gt
stringclasses
1 value
context
stringlengths
2.05k
161k
package cn.geekiot.sparklebike.ui; import android.content.Context; import android.content.res.TypedArray; import android.graphics.Canvas; import android.graphics.PointF; import android.graphics.drawable.BitmapDrawable; import android.graphics.drawable.Drawable; import android.net.wifi.WifiManager; import android.util.AttributeSet; import android.util.Log; import android.view.View; import android.view.animation.AnimationUtils; import cn.geekiot.sparklebike.R; import cn.geekiot.sparklebike.graphic.BlurDrawable; import cn.geekiot.sparklebike.graphic.WaveDrawable; /** * Created by Cocoonshu on 2016/4/17. */ public class DiscoverDeviceAnimView extends View { private static final String TAG = "DiscoverDeviceAnimView"; private WaveDrawable mDblWave = null; private Drawable mDblMasterDevice = null; private Drawable mDblSlaveDevice = null; private int mWaveColor = 0xFF000000; private int mBackgroundBlurRadius = 5; private PointF mMasterPosition = new PointF(); private PointF mSlavePosition = new PointF(); private float mMasterSweepDistance = 0; private float mSlaveSweepDistance = 0; private long mMasterAnimStartTime = 0; private long mSlaveAnimStartTime = 0; private boolean mIsDiscoverAnimationOn = false; private float mWaveSpeed = 1f; // pixel/ms private long mMasterWaveInterval = 1000; // ms private boolean mIsSlaveResponeActived = false; public DiscoverDeviceAnimView(Context context) { this(context, null); } public DiscoverDeviceAnimView(Context context, AttributeSet attrs) { this(context, attrs, 0); } public DiscoverDeviceAnimView(Context context, AttributeSet attrs, int defStyleAttr) { super(context, attrs, defStyleAttr, 0); initialize(context, attrs, defStyleAttr); } private void initialize(Context context, AttributeSet attrs, int defStyleAttr) { TypedArray typedArray = context.obtainStyledAttributes(attrs, R.styleable.DiscoverDeviceAnimView, defStyleAttr, 0); int indexCount = typedArray.getIndexCount(); for (int i = 0; i < indexCount; i++) { int key = typedArray.getIndex(i); switch (key) { case R.styleable.DiscoverDeviceAnimView_master_device_drawable: mDblMasterDevice = typedArray.getDrawable(i); if (mDblMasterDevice != null) { mDblMasterDevice.setBounds(0, 0, mDblMasterDevice.getIntrinsicWidth(), mDblMasterDevice.getIntrinsicHeight()); } break; case R.styleable.DiscoverDeviceAnimView_slaver_device_drawable: mDblSlaveDevice = typedArray.getDrawable(i); if (mDblSlaveDevice != null) { mDblSlaveDevice.setBounds(0, 0, mDblSlaveDevice.getIntrinsicWidth(), mDblSlaveDevice.getIntrinsicHeight()); } break; case R.styleable.DiscoverDeviceAnimView_wave_color: mWaveColor = typedArray.getColor(key, mWaveColor); break; case R.styleable.DiscoverDeviceAnimView_background_blur_radius: mBackgroundBlurRadius = typedArray.getInt(key, mBackgroundBlurRadius); break; default: break; } } typedArray.recycle(); mDblWave = new WaveDrawable(); mDblWave.setWaveColor(mWaveColor); replaceBackgroundDrawable(); } private void replaceBackgroundDrawable() { BlurDrawable blurDrawable = null; BitmapDrawable backgroundDrawable = null; if (getBackground() != null && getBackground() instanceof BitmapDrawable) { backgroundDrawable = (BitmapDrawable) getBackground(); blurDrawable = new BlurDrawable(backgroundDrawable.getBitmap(), mBackgroundBlurRadius); setBackground(blurDrawable); } } @Override protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { int backgroundWidth = 0; int backgroundHeight = 0; int widthSpecMode = MeasureSpec.getMode(widthMeasureSpec); int heightSpecMode = MeasureSpec.getMode(heightMeasureSpec); int widthSpecSize = MeasureSpec.getSize(widthMeasureSpec); int heightSpecSize = MeasureSpec.getSize(heightMeasureSpec); int measuredWidth = 0; int measuredHeight = 0; final Drawable background = getBackground(); if (background != null) { backgroundWidth = background.getIntrinsicWidth(); backgroundHeight = background.getIntrinsicHeight(); } // Width switch (widthSpecMode) { case MeasureSpec.UNSPECIFIED: measuredWidth = backgroundWidth; break; case MeasureSpec.AT_MOST: measuredWidth = backgroundWidth < widthSpecSize ? backgroundWidth : widthSpecSize; break; case MeasureSpec.EXACTLY: measuredWidth = widthSpecSize; break; } // Height switch (heightSpecMode) { case MeasureSpec.UNSPECIFIED: measuredHeight = backgroundHeight; break; case MeasureSpec.AT_MOST: measuredHeight = backgroundHeight < heightSpecSize ? backgroundHeight : heightSpecSize; break; case MeasureSpec.EXACTLY: measuredHeight = heightSpecSize; break; } setMeasuredDimension(measuredWidth, measuredHeight); refreshMasterAndSlaverPosition(); refreshMasterWaveTimeLine(); } @Override protected void onDraw(Canvas canvas) { super.onDraw(canvas); boolean hasMoreFrame = false; hasMoreFrame |= drawDiscoverWave(canvas); hasMoreFrame |= drawDeviceResponseWave(canvas); hasMoreFrame |= drawDevices(canvas); if (hasMoreFrame) { postInvalidateOnAnimation(); } } private boolean drawDiscoverWave(Canvas canvas) { if (mIsDiscoverAnimationOn) { if (mMasterAnimStartTime == 0) { mMasterAnimStartTime = AnimationUtils.currentAnimationTimeMillis(); } long currentAnimTime = AnimationUtils.currentAnimationTimeMillis(); long deltaAnimTime = currentAnimTime - mMasterAnimStartTime; float currentMasterWaveDistance = mWaveSpeed * deltaAnimTime; float masterWaveIterator = currentMasterWaveDistance; float tempMaxDistance = (float) Math.hypot(getWidth(), getHeight()); tempMaxDistance += tempMaxDistance * mDblWave.getThicknessPercent(); while (masterWaveIterator > tempMaxDistance) { masterWaveIterator -= mWaveSpeed * mMasterWaveInterval; } while (masterWaveIterator > 0) { mDblWave.setPivotPoint(mMasterPosition.x, mMasterPosition.y); mDblWave.setRadius(masterWaveIterator); mDblWave.draw(canvas); masterWaveIterator -= mWaveSpeed * mMasterWaveInterval; } } return true; } private boolean drawDeviceResponseWave(Canvas canvas) { if (mIsDiscoverAnimationOn && mIsSlaveResponeActived) { long currentAnimTime = AnimationUtils.currentAnimationTimeMillis(); long deltaAnimTime = currentAnimTime - mSlaveAnimStartTime; float currentSlaveWaveDistance = mWaveSpeed * deltaAnimTime; float tempMaxDistance = (float) Math.hypot(getWidth(), getHeight()); tempMaxDistance += tempMaxDistance * mDblWave.getThicknessPercent(); if (currentSlaveWaveDistance > tempMaxDistance) { mIsSlaveResponeActived = false; return false; } else { mDblWave.setPivotPoint(mSlavePosition.x, mSlavePosition.y); mDblWave.setRadius(currentSlaveWaveDistance); mDblWave.draw(canvas); } } return true; } private boolean drawDevices(Canvas canvas) { if (mDblMasterDevice != null) { float drawableWidth = mDblMasterDevice.getIntrinsicWidth(); float drawableHeight = mDblMasterDevice.getIntrinsicHeight(); canvas.save(); canvas.translate(mMasterPosition.x - drawableWidth * 0.5f, mMasterPosition.y - drawableHeight * 0.5f); mDblMasterDevice.draw(canvas); canvas.restore(); } if (mDblSlaveDevice != null) { float drawableWidth = mDblSlaveDevice.getIntrinsicWidth(); float drawableHeight = mDblSlaveDevice.getIntrinsicHeight(); canvas.save(); canvas.translate(mSlavePosition.x - drawableWidth * 0.5f, mSlavePosition.y - drawableHeight * 0.5f); mDblSlaveDevice.draw(canvas); canvas.restore(); } return false; } private float computeLongthSweepPath(float currentX, float currentY, float thicknessPercent) { float viewWidth = getWidth(); float viewHeight = getHeight(); float minXBorder = getPaddingLeft(); float maxXBorder = viewWidth - getPaddingRight(); float minYBorder = getPaddingTop(); float maxYBorder = viewHeight - getPaddingBottom(); float centerX = (maxXBorder - minXBorder) * 0.5f + minXBorder; float centerY = (maxYBorder - minYBorder) * 0.5f + minYBorder; float destinationX = (currentX < centerX) ? minXBorder : maxXBorder; float destinationY = (currentY < centerY) ? minYBorder : maxYBorder; return (float)Math.hypot(Math.abs(destinationX - currentX), Math.abs(destinationY - currentY)) * (1.0f + thicknessPercent); } private int computeMasterSweepCount() { float wholeDistanceDurationMS = mMasterSweepDistance / mWaveSpeed; float sweepCount = wholeDistanceDurationMS / mMasterWaveInterval; return (int) Math.ceil(sweepCount); } private void refreshMasterWaveTimeLine() { mMasterSweepDistance = computeLongthSweepPath(mMasterPosition.x, mMasterPosition.y, mDblWave.getThicknessPercent()); mSlaveSweepDistance = computeLongthSweepPath(mSlavePosition.x, mSlavePosition.y, mDblWave.getThicknessPercent()); } private void refreshMasterAndSlaverPosition() { float viewWidth = getWidth(); float viewHeight = getHeight(); float minXBorder = getPaddingLeft(); float maxXBorder = viewWidth - getPaddingRight(); float minYBorder = getPaddingTop(); float maxYBorder = viewHeight - getPaddingBottom(); float centerY = (maxYBorder - minYBorder) * 0.5f + minYBorder; float masterWidth = mDblMasterDevice != null ? mDblMasterDevice.getIntrinsicWidth() : 0; float slaverWidth = mDblSlaveDevice != null ? mDblSlaveDevice.getIntrinsicWidth() : 0; mMasterPosition.x = minXBorder + masterWidth * 0.5f; mSlavePosition.x = maxXBorder - slaverWidth * 0.5f; mMasterPosition.y = mSlavePosition.y = centerY; mDblWave.setBounds(0, 0, (int)viewWidth, (int)viewHeight); } public void startDiscover() { mIsDiscoverAnimationOn = true; postInvalidateOnAnimation(); } public void stopDiscover() { mIsDiscoverAnimationOn = false; } public void toggleDeviceResponse() { if (!mIsSlaveResponeActived) { mIsSlaveResponeActived = true; mSlaveAnimStartTime = AnimationUtils.currentAnimationTimeMillis(); postInvalidateOnAnimation(); } } }
/* * This file is part of "lunisolar-magma". * * (C) Copyright 2014-2022 Lunisolar (http://lunisolar.eu/). * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package eu.lunisolar.magma.func.function.to; import javax.annotation.Nonnull; // NOSONAR import javax.annotation.Nullable; // NOSONAR import javax.annotation.concurrent.NotThreadSafe; // NOSONAR import java.util.Comparator; // NOSONAR import java.util.Objects; // NOSONAR import eu.lunisolar.magma.basics.*; //NOSONAR import eu.lunisolar.magma.basics.builder.*; // NOSONAR import eu.lunisolar.magma.basics.exceptions.*; // NOSONAR import eu.lunisolar.magma.basics.meta.*; // NOSONAR import eu.lunisolar.magma.basics.meta.aType.*; // NOSONAR import eu.lunisolar.magma.basics.meta.functional.*; // NOSONAR import eu.lunisolar.magma.basics.meta.functional.type.*; // NOSONAR import eu.lunisolar.magma.basics.meta.functional.domain.*; // NOSONAR import eu.lunisolar.magma.func.IA; import eu.lunisolar.magma.func.SA; import eu.lunisolar.magma.func.*; // NOSONAR import eu.lunisolar.magma.func.tuple.*; // NOSONAR import java.util.concurrent.*; // NOSONAR import java.util.function.*; // NOSONAR import java.util.*; // NOSONAR import java.lang.reflect.*; // NOSONAR import eu.lunisolar.magma.func.action.*; // NOSONAR import eu.lunisolar.magma.func.consumer.*; // NOSONAR import eu.lunisolar.magma.func.consumer.primitives.*; // NOSONAR import eu.lunisolar.magma.func.consumer.primitives.bi.*; // NOSONAR import eu.lunisolar.magma.func.consumer.primitives.obj.*; // NOSONAR import eu.lunisolar.magma.func.consumer.primitives.tri.*; // NOSONAR import eu.lunisolar.magma.func.function.*; // NOSONAR import eu.lunisolar.magma.func.function.conversion.*; // NOSONAR import eu.lunisolar.magma.func.function.from.*; // NOSONAR import eu.lunisolar.magma.func.function.to.*; // NOSONAR import eu.lunisolar.magma.func.operator.binary.*; // NOSONAR import eu.lunisolar.magma.func.operator.ternary.*; // NOSONAR import eu.lunisolar.magma.func.operator.unary.*; // NOSONAR import eu.lunisolar.magma.func.predicate.*; // NOSONAR import eu.lunisolar.magma.func.supplier.*; // NOSONAR /** * Non-throwing functional interface (lambda) LOiToFltFunction for Java 8. * * Type: function * * Domain (lvl: 2): T a1,int a2 * * Co-domain: float * * Special case of function that corresponds to expressions like (list, index) -> List::get * */ @FunctionalInterface @SuppressWarnings("UnusedDeclaration") public interface LOiToFltFunction<T> extends MetaFunction, MetaInterface.NonThrowing, OiFunction<T, aFloat>, Codomain<aFloat>, Domain2<a<T>, aInt> { // NOSONAR String DESCRIPTION = "LOiToFltFunction: float applyAsFlt(T a1,int a2)"; // float applyAsFlt(T a1,int a2) ; default float applyAsFlt(T a1, int a2) { // return nestingApplyAsFlt(a1,a2); try { return this.applyAsFltX(a1, a2); } catch (Throwable e) { // NOSONAR throw Handling.nestCheckedAndThrow(e); } } /** * Implement this, but call applyAsFlt(T a1,int a2) */ float applyAsFltX(T a1, int a2) throws Throwable; default float tupleApplyAsFlt(LObjIntPair<T> args) { return applyAsFlt(args.first(), args.second()); } /** Function call that handles exceptions according to the instructions. */ default float handlingApplyAsFlt(T a1, int a2, HandlingInstructions<Throwable, RuntimeException> handling) { try { return this.applyAsFltX(a1, a2); } catch (Throwable e) { // NOSONAR throw Handler.handleOrNest(e, handling); } } default LOiToFltFunction<T> handling(HandlingInstructions<Throwable, RuntimeException> handling) { return (a1, a2) -> handlingApplyAsFlt(a1, a2, handling); } default float applyAsFlt(T a1, int a2, @Nonnull ExWMF<RuntimeException> factory, @Nonnull String newMessage) { try { return this.applyAsFltX(a1, a2); } catch (Throwable e) { // NOSONAR throw Handling.wrap(e, factory, newMessage); } } default float applyAsFlt(T a1, int a2, @Nonnull ExWMF<RuntimeException> factory, @Nonnull String newMessage, @Nullable Object param1) { try { return this.applyAsFltX(a1, a2); } catch (Throwable e) { // NOSONAR throw Handling.wrap(e, factory, newMessage, param1); } } default float applyAsFlt(T a1, int a2, @Nonnull ExWMF<RuntimeException> factory, @Nonnull String newMessage, @Nullable Object param1, @Nullable Object param2) { try { return this.applyAsFltX(a1, a2); } catch (Throwable e) { // NOSONAR throw Handling.wrap(e, factory, newMessage, param1, param2); } } default float applyAsFlt(T a1, int a2, @Nonnull ExWMF<RuntimeException> factory, @Nonnull String newMessage, @Nullable Object param1, @Nullable Object param2, @Nullable Object param3) { try { return this.applyAsFltX(a1, a2); } catch (Throwable e) { // NOSONAR throw Handling.wrap(e, factory, newMessage, param1, param2, param3); } } default LOiToFltFunction<T> trying(@Nonnull ExWMF<RuntimeException> factory, @Nonnull String newMessage) { return (a1, a2) -> applyAsFlt(a1, a2, factory, newMessage); } default LOiToFltFunction<T> trying(@Nonnull ExWMF<RuntimeException> factory, @Nonnull String newMessage, @Nullable Object param1) { return (a1, a2) -> applyAsFlt(a1, a2, factory, newMessage, param1); } default LOiToFltFunction<T> trying(@Nonnull ExWMF<RuntimeException> factory, @Nonnull String newMessage, @Nullable Object param1, @Nullable Object param2) { return (a1, a2) -> applyAsFlt(a1, a2, factory, newMessage, param1, param1); } default LOiToFltFunction<T> trying(@Nonnull ExWMF<RuntimeException> factory, @Nonnull String newMessage, @Nullable Object param1, @Nullable Object param2, @Nullable Object param3) { return (a1, a2) -> applyAsFlt(a1, a2, factory, newMessage, param1, param2, param3); } default float applyAsFlt(T a1, int a2, @Nonnull ExWF<RuntimeException> factory) { try { return this.applyAsFltX(a1, a2); } catch (Throwable e) { // NOSONAR throw Handling.wrap(e, factory); } } default LOiToFltFunction<T> trying(@Nonnull ExWF<RuntimeException> factory) { return (a1, a2) -> applyAsFlt(a1, a2, factory); } default float applyAsFltThen(T a1, int a2, @Nonnull LToFltFunction<Throwable> handler) { try { return this.applyAsFltX(a1, a2); } catch (Throwable e) { // NOSONAR Handling.handleErrors(e); return handler.applyAsFlt(e); } } default LOiToFltFunction<T> tryingThen(@Nonnull LToFltFunction<Throwable> handler) { return (a1, a2) -> applyAsFltThen(a1, a2, handler); } /** Function call that handles exceptions by always nesting checked exceptions and propagating the others as is. */ default float nestingApplyAsFlt(T a1, int a2) { try { return this.applyAsFltX(a1, a2); } catch (Throwable e) { // NOSONAR throw Handling.nestCheckedAndThrow(e); } } /** Function call that handles exceptions by always propagating them as is, even when they are undeclared checked ones. */ default float shovingApplyAsFlt(T a1, int a2) { try { return this.applyAsFltX(a1, a2); } catch (Throwable e) { // NOSONAR throw Handling.shoveIt(e); } } static <T> float shovingApplyAsFlt(T a1, int a2, LOiToFltFunction<T> func) { Null.nonNullArg(func, "func"); return func.shovingApplyAsFlt(a1, a2); } static <T> float handlingApplyAsFlt(T a1, int a2, LOiToFltFunction<T> func, HandlingInstructions<Throwable, RuntimeException> handling) { // <- Null.nonNullArg(func, "func"); return func.handlingApplyAsFlt(a1, a2, handling); } static <T> float tryApplyAsFlt(T a1, int a2, LOiToFltFunction<T> func) { Null.nonNullArg(func, "func"); return func.nestingApplyAsFlt(a1, a2); } static <T> float tryApplyAsFlt(T a1, int a2, LOiToFltFunction<T> func, @Nonnull ExWMF<RuntimeException> factory, @Nonnull String newMessage) { Null.nonNullArg(func, "func"); return func.applyAsFlt(a1, a2, factory, newMessage); } static <T> float tryApplyAsFlt(T a1, int a2, LOiToFltFunction<T> func, @Nonnull ExWMF<RuntimeException> factory, @Nonnull String newMessage, @Nullable Object param1) { Null.nonNullArg(func, "func"); return func.applyAsFlt(a1, a2, factory, newMessage, param1); } static <T> float tryApplyAsFlt(T a1, int a2, LOiToFltFunction<T> func, @Nonnull ExWMF<RuntimeException> factory, @Nonnull String newMessage, @Nullable Object param1, @Nullable Object param2) { Null.nonNullArg(func, "func"); return func.applyAsFlt(a1, a2, factory, newMessage, param1, param2); } static <T> float tryApplyAsFlt(T a1, int a2, LOiToFltFunction<T> func, @Nonnull ExWMF<RuntimeException> factory, @Nonnull String newMessage, @Nullable Object param1, @Nullable Object param2, @Nullable Object param3) { Null.nonNullArg(func, "func"); return func.applyAsFlt(a1, a2, factory, newMessage, param1, param2, param3); } static <T> float tryApplyAsFlt(T a1, int a2, LOiToFltFunction<T> func, @Nonnull ExWF<RuntimeException> factory) { Null.nonNullArg(func, "func"); return func.applyAsFlt(a1, a2, factory); } static <T> float tryApplyAsFltThen(T a1, int a2, LOiToFltFunction<T> func, @Nonnull LToFltFunction<Throwable> handler) { Null.nonNullArg(func, "func"); return func.applyAsFltThen(a1, a2, handler); } default float failSafeApplyAsFlt(T a1, int a2, @Nonnull LOiToFltFunction<T> failSafe) { try { return applyAsFlt(a1, a2); } catch (Throwable e) { // NOSONAR Handling.handleErrors(e); return failSafe.applyAsFlt(a1, a2); } } static <T> float failSafeApplyAsFlt(T a1, int a2, LOiToFltFunction<T> func, @Nonnull LOiToFltFunction<T> failSafe) { Null.nonNullArg(failSafe, "failSafe"); if (func == null) { return failSafe.applyAsFlt(a1, a2); } else { return func.failSafeApplyAsFlt(a1, a2, failSafe); } } static <T> LOiToFltFunction<T> failSafe(LOiToFltFunction<T> func, @Nonnull LOiToFltFunction<T> failSafe) { Null.nonNullArg(failSafe, "failSafe"); return (a1, a2) -> failSafeApplyAsFlt(a1, a2, func, failSafe); } /** Just to mirror the method: Ensures the result is not null */ default float nonNullApplyAsFlt(T a1, int a2) { return applyAsFlt(a1, a2); } /** Returns description of the functional interface. */ @Nonnull default String functionalInterfaceDescription() { return LOiToFltFunction.DESCRIPTION; } /** From-To. Intended to be used with non-capturing lambda. */ public static <T> void fromTo(int min_a2, int max_a2, T a1, @Nonnull LOiToFltFunction<T> func) { Null.nonNullArg(func, "func"); if (min_a2 <= max_a2) { for (int a2 = min_a2; a2 <= max_a2; a2++) { func.applyAsFlt(a1, a2); } } else { for (int a2 = min_a2; a2 >= max_a2; a2--) { func.applyAsFlt(a1, a2); } } } /** From-To. Intended to be used with non-capturing lambda. */ public static <T> void fromTill(int min_a2, int max_a2, T a1, @Nonnull LOiToFltFunction<T> func) { Null.nonNullArg(func, "func"); if (min_a2 <= max_a2) { for (int a2 = min_a2; a2 < max_a2; a2++) { func.applyAsFlt(a1, a2); } } else { for (int a2 = min_a2; a2 > max_a2; a2--) { func.applyAsFlt(a1, a2); } } } /** From-To. Intended to be used with non-capturing lambda. */ public static <T> void times(int max_a2, T a1, @Nonnull LOiToFltFunction<T> func) { if (max_a2 < 0) return; fromTill(0, max_a2, a1, func); } /** Extract and apply function. */ public static <M, K, V> float from(@Nonnull M container, LBiFunction<M, K, V> extractor, K key, int a2, @Nonnull LOiToFltFunction<V> function, float orElse) { Null.nonNullArg(container, "container"); Null.nonNullArg(function, "function"); V value = extractor.apply(container, key); if (value != null) { return function.applyAsFlt(value, a2); } return orElse; } default LIntToFltFunction lShrink(@Nonnull LIntFunction<T> left) { Null.nonNullArg(left, "left"); return a2 -> applyAsFlt(left.apply(a2), a2); } default LIntToFltFunction lShrink_(T a1) { return a2 -> applyAsFlt(a1, a2); } public static <T> LIntToFltFunction lShrunken(@Nonnull LIntFunction<T> left, @Nonnull LOiToFltFunction<T> func) { Null.nonNullArg(left, "left"); Null.nonNullArg(func, "func"); return func.lShrink(left); } public static <T> LIntToFltFunction lShrunken_(T a1, @Nonnull LOiToFltFunction<T> func) { Null.nonNullArg(func, "func"); return func.lShrink_(a1); } default LToFltFunction<T> rShrink(@Nonnull LToIntFunction<T> right) { Null.nonNullArg(right, "right"); return a1 -> applyAsFlt(a1, right.applyAsInt(a1)); } default LToFltFunction<T> rShrink_(int a2) { return a1 -> applyAsFlt(a1, a2); } public static <T> LToFltFunction<T> rShrunken(@Nonnull LToIntFunction<T> right, @Nonnull LOiToFltFunction<T> func) { Null.nonNullArg(right, "right"); Null.nonNullArg(func, "func"); return func.rShrink(right); } public static <T> LToFltFunction<T> rShrunken_(int a2, @Nonnull LOiToFltFunction<T> func) { Null.nonNullArg(func, "func"); return func.rShrink_(a2); } /** */ public static <T> LOiToFltFunction<T> uncurry(@Nonnull LFunction<T, LIntToFltFunction> func) { Null.nonNullArg(func, "func"); return (T a1, int a2) -> func.apply(a1).applyAsFlt(a2); } /** Cast that removes generics. */ default LOiToFltFunction untyped() { return this; } /** Cast that replace generics. */ default <V2> LOiToFltFunction<V2> cast() { return untyped(); } /** Cast that replace generics. */ public static <V2> LOiToFltFunction<V2> cast(LOiToFltFunction<?> function) { return (LOiToFltFunction) function; } /** Change function to consumer that ignores output. */ default LObjIntConsumer<T> toConsumer() { return this::applyAsFlt; } /** Calls domain consumer before main function. */ default LOiToFltFunction<T> beforeDo(@Nonnull LObjIntConsumer<T> before) { Null.nonNullArg(before, "before"); return (T a1, int a2) -> { before.accept(a1, a2); return applyAsFlt(a1, a2); }; } /** Calls codomain consumer after main function. */ default LOiToFltFunction<T> afterDo(@Nonnull LFltConsumer after) { Null.nonNullArg(after, "after"); return (T a1, int a2) -> { final float retval = applyAsFlt(a1, a2); after.accept(retval); return retval; }; } /** Captures arguments but delays the evaluation. */ default LFltSupplier capture(T a1, int a2) { return () -> this.applyAsFlt(a1, a2); } /** Creates function that always returns the same value. */ static <T> LOiToFltFunction<T> constant(float r) { return (a1, a2) -> r; } /** Captures single parameter function into this interface where only 1st parameter will be used. */ @Nonnull static <T> LOiToFltFunction<T> apply1stAsFlt(@Nonnull LToFltFunction<T> func) { return (a1, a2) -> func.applyAsFlt(a1); } /** Captures single parameter function into this interface where only 2nd parameter will be used. */ @Nonnull static <T> LOiToFltFunction<T> apply2ndAsFlt(@Nonnull LIntToFltFunction func) { return (a1, a2) -> func.applyAsFlt(a2); } /** Convenient method in case lambda expression is ambiguous for the compiler (that might happen for overloaded methods accepting different interfaces). */ @Nonnull static <T> LOiToFltFunction<T> oiToFltFunc(final @Nonnull LOiToFltFunction<T> lambda) { Null.nonNullArg(lambda, "lambda"); return lambda; } /** A completely inconvenient method in case lambda expression and generic arguments are ambiguous for the compiler. */ @Nonnull static <T> LOiToFltFunction<T> oiToFltFunc(@Nullable Class<T> c1, final @Nonnull LOiToFltFunction<T> lambda) { Null.nonNullArg(lambda, "lambda"); return lambda; } final class S<T> implements LOiToFltFunction<T> { private LOiToFltFunction<T> target = null; @Override public float applyAsFltX(T a1, int a2) throws Throwable { return target.applyAsFltX(a1, a2); } } @Nonnull static <T> LOiToFltFunction<T> recursive(final @Nonnull LFunction<LOiToFltFunction<T>, LOiToFltFunction<T>> selfLambda) { final S<T> single = new S(); LOiToFltFunction<T> func = selfLambda.apply(single); single.target = func; return func; } public static <T> M<T> mementoOf(T a1, int a2, LOiToFltFunction<T> function) { var initialValue = function.applyAsFlt(a1, a2); return initializedMementoOf(initialValue, function); } public static <T> M<T> initializedMementoOf(float initialValue, LOiToFltFunction<T> function) { return memento(initialValue, initialValue, function, (m, x1, x2) -> x2); } public static <T> M<T> deltaOf(T a1, int a2, LOiToFltFunction<T> function, LFltBinaryOperator deltaFunction) { var initialValue = function.applyAsFlt(a1, a2); return initializedDeltaOf(initialValue, function, deltaFunction); } public static <T> M<T> deltaOf(T a1, int a2, LOiToFltFunction<T> function) { var initialValue = function.applyAsFlt(a1, a2); return initializedDeltaOf(initialValue, function, (x1, x2) -> (x2 - x1)); } public static <T> M<T> initializedDeltaOf(float initialValue, LOiToFltFunction<T> function, LFltBinaryOperator deltaFunction) { return memento(initialValue, deltaFunction.applyAsFlt(initialValue, initialValue), function, (m, x1, x2) -> deltaFunction.applyAsFlt(x1, x2)); } public static <T> M<T> memento(float initialBaseValue, float initialValue, LOiToFltFunction<T> baseFunction, LFltTernaryOperator mementoFunction) { return new M(initialBaseValue, initialValue, baseFunction, mementoFunction); } /** * Implementation that allows to create derivative functions (do not confuse it with math concepts). Very short name is intended to be used with parent (LOiToFltFunction.M) */ @NotThreadSafe final class M<T> implements LOiToFltFunction<T> { private final LOiToFltFunction<T> baseFunction; private float lastBaseValue; private float lastValue; private final LFltTernaryOperator mementoFunction; private M(float lastBaseValue, float lastValue, LOiToFltFunction<T> baseFunction, LFltTernaryOperator mementoFunction) { this.baseFunction = baseFunction; this.lastBaseValue = lastBaseValue; this.lastValue = lastValue; this.mementoFunction = mementoFunction; } @Override public float applyAsFltX(T a1, int a2) throws Throwable { float x1 = lastBaseValue; float x2 = lastBaseValue = baseFunction.applyAsFltX(a1, a2); return lastValue = mementoFunction.applyAsFlt(lastValue, x1, x2); } public float lastValue() { return lastValue; }; public float lastBaseValue() { return lastBaseValue; }; } @Nonnull static <T> LOiToFltFunction<T> oiToFltFuncThrowing(final @Nonnull ExF<Throwable> exF) { Null.nonNullArg(exF, "exF"); return (a1, a2) -> { throw exF.produce(); }; } @Nonnull static <T> LOiToFltFunction<T> oiToFltFuncThrowing(final String message, final @Nonnull ExMF<Throwable> exF) { Null.nonNullArg(exF, "exF"); return (a1, a2) -> { throw exF.produce(message); }; } // <editor-fold desc="wrap variants"> /** Convenient method in case lambda expression is ambiguous for the compiler (that might happen for overloaded methods accepting different interfaces). */ @Nonnull static <T> LOiToFltFunction.LIntObjToFltFunc<T> intObjToFltFunc(final @Nonnull LOiToFltFunction.LIntObjToFltFunc<T> lambda) { Null.nonNullArg(lambda, "lambda"); return lambda; } // </editor-fold> static <T> float call(T a1, int a2, final @Nonnull LOiToFltFunction<T> lambda) { Null.nonNullArg(lambda, "lambda"); return lambda.applyAsFlt(a1, a2); } // <editor-fold desc="wrap"> // </editor-fold> // <editor-fold desc="compose (functional)"> /** Allows to manipulate the domain of the function. */ @Nonnull default <V1> LOiToFltFunction<V1> compose(@Nonnull final LFunction<? super V1, ? extends T> before1, @Nonnull final LIntUnaryOperator before2) { Null.nonNullArg(before1, "before1"); Null.nonNullArg(before2, "before2"); return (v1, v2) -> this.applyAsFlt(before1.apply(v1), before2.applyAsInt(v2)); } public static <V1, T> LOiToFltFunction<V1> composed(@Nonnull final LFunction<? super V1, ? extends T> before1, @Nonnull final LIntUnaryOperator before2, LOiToFltFunction<T> after) { return after.compose(before1, before2); } /** Allows to manipulate the domain of the function. */ @Nonnull default <V1, V2> LToFltBiFunction<V1, V2> oiToFltFuncCompose(@Nonnull final LFunction<? super V1, ? extends T> before1, @Nonnull final LToIntFunction<? super V2> before2) { Null.nonNullArg(before1, "before1"); Null.nonNullArg(before2, "before2"); return (v1, v2) -> this.applyAsFlt(before1.apply(v1), before2.applyAsInt(v2)); } public static <V1, V2, T> LToFltBiFunction<V1, V2> composed(@Nonnull final LFunction<? super V1, ? extends T> before1, @Nonnull final LToIntFunction<? super V2> before2, LOiToFltFunction<T> after) { return after.oiToFltFuncCompose(before1, before2); } // </editor-fold> // <editor-fold desc="then (functional)"> /** Combines two functions together in a order. */ @Nonnull default <V> LOiFunction<T, V> then(@Nonnull LFltFunction<? extends V> after) { Null.nonNullArg(after, "after"); return (a1, a2) -> after.apply(this.applyAsFlt(a1, a2)); } /** Combines two functions together in a order. */ @Nonnull default LOiToByteFunction<T> thenToByte(@Nonnull LFltToByteFunction after) { Null.nonNullArg(after, "after"); return (a1, a2) -> after.applyAsByte(this.applyAsFlt(a1, a2)); } /** Combines two functions together in a order. */ @Nonnull default LOiToSrtFunction<T> thenToSrt(@Nonnull LFltToSrtFunction after) { Null.nonNullArg(after, "after"); return (a1, a2) -> after.applyAsSrt(this.applyAsFlt(a1, a2)); } /** Combines two functions together in a order. */ @Nonnull default LOiToIntFunction<T> thenToInt(@Nonnull LFltToIntFunction after) { Null.nonNullArg(after, "after"); return (a1, a2) -> after.applyAsInt(this.applyAsFlt(a1, a2)); } /** Combines two functions together in a order. */ @Nonnull default LOiToLongFunction<T> thenToLong(@Nonnull LFltToLongFunction after) { Null.nonNullArg(after, "after"); return (a1, a2) -> after.applyAsLong(this.applyAsFlt(a1, a2)); } /** Combines two functions together in a order. */ @Nonnull default LOiToFltFunction<T> thenToFlt(@Nonnull LFltUnaryOperator after) { Null.nonNullArg(after, "after"); return (a1, a2) -> after.applyAsFlt(this.applyAsFlt(a1, a2)); } /** Combines two functions together in a order. */ @Nonnull default LOiToDblFunction<T> thenToDbl(@Nonnull LFltToDblFunction after) { Null.nonNullArg(after, "after"); return (a1, a2) -> after.applyAsDbl(this.applyAsFlt(a1, a2)); } /** Combines two functions together in a order. */ @Nonnull default LOiToCharFunction<T> thenToChar(@Nonnull LFltToCharFunction after) { Null.nonNullArg(after, "after"); return (a1, a2) -> after.applyAsChar(this.applyAsFlt(a1, a2)); } /** Combines two functions together in a order. */ @Nonnull default LObjIntPredicate<T> thenToBool(@Nonnull LFltPredicate after) { Null.nonNullArg(after, "after"); return (a1, a2) -> after.test(this.applyAsFlt(a1, a2)); } // </editor-fold> // <editor-fold desc="variant conversions"> // </editor-fold> // <editor-fold desc="interface variants"> /** Permutation of LOiToFltFunction for method references. */ @FunctionalInterface interface LIntObjToFltFunc<T> extends LOiToFltFunction<T> { /** * Implement this, but call applyAsFlt(T a1,int a2) */ default float applyAsFltX(T a1, int a2) { return this.applyAsFltIntObj(a2, a1); } // float applyAsFltIntObj(int a2,T a1) ; default float applyAsFltIntObj(int a2, T a1) { // return nestingApplyAsFltIntObj(a2,a1); try { return this.applyAsFltIntObjX(a2, a1); } catch (Throwable e) { // NOSONAR throw Handling.nestCheckedAndThrow(e); } } /** * Implement this, but call applyAsFltIntObj(int a2,T a1) */ float applyAsFltIntObjX(int a2, T a1) throws Throwable; } // </editor-fold> /** Does nothing (LOiToFltFunction) Function */ public static <T> float doNothing(T a1, int a2) { return Function4U.defaultFloat; } /** Does nothing (LOiToFltFunction.LIntObjToFltFunc) Function */ public static <T> float doNothing(int a2, T a1) { return Function4U.defaultFloat; } /** * For each element (or tuple) from arguments, calls the function and passes the result to consumer. * Thread safety, fail-fast, fail-safety of this method is not expected. */ default <C1, C2> void forEach(IndexedRead<C1, a<T>> ia1, C1 source1, IndexedRead<C2, aInt> ia2, C2 source2, LFltConsumer consumer) { int size = ia1.size(source1); LOiFunction<Object, T> oiFunc1 = (LOiFunction) ia1.getter(); size = Integer.min(size, ia2.size(source2)); LOiToIntFunction<Object> oiFunc2 = (LOiToIntFunction) ia2.getter(); int i = 0; for (; i < size; i++) { T a1 = oiFunc1.apply(source1, i); int a2 = oiFunc2.applyAsInt(source2, i); consumer.accept(this.applyAsFlt(a1, a2)); } } /** * For each element (or tuple) from arguments, calls the function and passes the result to consumer. * Thread safety, fail-fast, fail-safety of this method is not expected. */ default <C1, I1, C2> void iterate(SequentialRead<C1, I1, a<T>> sa1, C1 source1, IndexedRead<C2, aInt> ia2, C2 source2, LFltConsumer consumer) { Object iterator1 = ((LFunction) sa1.adapter()).apply(source1); LPredicate<Object> testFunc1 = (LPredicate) sa1.tester(); LFunction<Object, T> nextFunc1 = (LFunction) sa1.supplier(); int size = ia2.size(source2); LOiToIntFunction<Object> oiFunc2 = (LOiToIntFunction) ia2.getter(); int i = 0; while (testFunc1.test(iterator1) && i < size) { T a1 = nextFunc1.apply(iterator1); int a2 = oiFunc2.applyAsInt(source2, i); consumer.accept(this.applyAsFlt(a1, a2)); i++; } } /** * For each element (or tuple) from arguments, calls the function and passes the result to consumer. * Thread safety, fail-fast, fail-safety of this method is not expected. */ default <C1, C2, I2> void iterate(IndexedRead<C1, a<T>> ia1, C1 source1, SequentialRead<C2, I2, aInt> sa2, C2 source2, LFltConsumer consumer) { int size = ia1.size(source1); LOiFunction<Object, T> oiFunc1 = (LOiFunction) ia1.getter(); Object iterator2 = ((LFunction) sa2.adapter()).apply(source2); LPredicate<Object> testFunc2 = (LPredicate) sa2.tester(); LToIntFunction<Object> nextFunc2 = (LToIntFunction) sa2.supplier(); int i = 0; while (i < size && testFunc2.test(iterator2)) { T a1 = oiFunc1.apply(source1, i); int a2 = nextFunc2.applyAsInt(iterator2); consumer.accept(this.applyAsFlt(a1, a2)); i++; } } /** * For each element (or tuple) from arguments, calls the function and passes the result to consumer. * Thread safety, fail-fast, fail-safety of this method depends highly on the arguments. */ default <C1, I1, C2, I2> void iterate(SequentialRead<C1, I1, a<T>> sa1, C1 source1, SequentialRead<C2, I2, aInt> sa2, C2 source2, LFltConsumer consumer) { Object iterator1 = ((LFunction) sa1.adapter()).apply(source1); LPredicate<Object> testFunc1 = (LPredicate) sa1.tester(); LFunction<Object, T> nextFunc1 = (LFunction) sa1.supplier(); Object iterator2 = ((LFunction) sa2.adapter()).apply(source2); LPredicate<Object> testFunc2 = (LPredicate) sa2.tester(); LToIntFunction<Object> nextFunc2 = (LToIntFunction) sa2.supplier(); while (testFunc1.test(iterator1) && testFunc2.test(iterator2)) { T a1 = nextFunc1.apply(iterator1); int a2 = nextFunc2.applyAsInt(iterator2); consumer.accept(this.applyAsFlt(a1, a2)); } } }
package jenkins.install; import static org.apache.commons.io.FileUtils.readFileToString; import static org.apache.commons.lang.StringUtils.defaultIfBlank; import java.io.IOException; import java.nio.charset.StandardCharsets; import java.util.HashMap; import java.util.Locale; import java.util.Map; import java.util.UUID; import java.util.logging.Level; import java.util.logging.Logger; import edu.umd.cs.findbugs.annotations.CheckForNull; import javax.servlet.Filter; import javax.servlet.FilterChain; import javax.servlet.FilterConfig; import javax.servlet.ServletException; import javax.servlet.ServletRequest; import javax.servlet.ServletResponse; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletRequestWrapper; import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpSession; import edu.umd.cs.findbugs.annotations.NonNull; import hudson.security.csrf.GlobalCrumbIssuerConfiguration; import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; import jenkins.model.JenkinsLocationConfiguration; import jenkins.security.ApiTokenProperty; import jenkins.security.apitoken.TokenUuidAndPlainValue; import jenkins.security.seed.UserSeedProperty; import jenkins.util.SystemProperties; import jenkins.util.UrlHelper; import org.kohsuke.accmod.Restricted; import org.kohsuke.accmod.restrictions.NoExternalUse; import org.kohsuke.stapler.HttpResponse; import org.kohsuke.stapler.QueryParameter; import org.kohsuke.stapler.StaplerRequest; import org.kohsuke.stapler.StaplerResponse; import hudson.BulkChange; import hudson.Extension; import hudson.FilePath; import hudson.ProxyConfiguration; import hudson.model.PageDecorator; import hudson.model.UpdateCenter; import hudson.model.UpdateSite; import hudson.model.User; import hudson.security.AccountCreationFailedException; import hudson.security.FullControlOnceLoggedInAuthorizationStrategy; import hudson.security.HudsonPrivateSecurityRealm; import hudson.security.SecurityRealm; import hudson.security.csrf.CrumbIssuer; import hudson.util.HttpResponses; import hudson.util.PluginServletFilter; import hudson.util.VersionNumber; import java.io.File; import java.net.HttpRetryException; import java.net.HttpURLConnection; import java.net.URL; import java.net.URLConnection; import java.util.Iterator; import java.util.List; import jenkins.model.Jenkins; import jenkins.security.s2m.AdminWhitelistRule; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.apache.commons.io.FileUtils; import org.apache.commons.io.IOUtils; import org.kohsuke.accmod.restrictions.DoNotUse; import org.kohsuke.stapler.interceptor.RequirePOST; import org.kohsuke.stapler.verb.POST; import org.springframework.security.authentication.UsernamePasswordAuthenticationToken; import org.springframework.security.core.Authentication; import org.springframework.security.core.context.SecurityContextHolder; import org.springframework.security.core.userdetails.UsernameNotFoundException; /** * A Jenkins instance used during first-run to provide a limited set of services while * initial installation is in progress * * @since 2.0 */ @Restricted(NoExternalUse.class) @Extension public class SetupWizard extends PageDecorator { public SetupWizard() { checkFilter(); } /** * The security token parameter name */ public static String initialSetupAdminUserName = "admin"; private static final Logger LOGGER = Logger.getLogger(SetupWizard.class.getName()); private static final String ADMIN_INITIAL_API_TOKEN_PROPERTY_NAME = SetupWizard.class.getName() + ".adminInitialApiToken"; /** * This property determines the behavior during the SetupWizard install phase concerning the API Token creation * for the initial admin account. * The behavior depends on the provided value: * - true * A token is generated using random value at startup and the information is put * in the file "$JENKINS_HOME/secrets/initialAdminApiToken". * - [2-char hash version][32-hex-char of secret], where the hash version is currently only 11. * E.g. 110123456789abcdef0123456789abcdef. * A fixed API Token will be created for the user with that plain value as the token. * It is strongly recommended to use it to generate a new one (random) and then revoke it. * See {@link ApiTokenProperty#generateNewToken(String)} and {@link ApiTokenProperty#revokeAllTokensExceptOne(String)} * for scripting methods or using the web API calls: * /user/[user-login]/descriptorByName/jenkins.security.ApiTokenProperty/generateNewToken and * /user/[user-login]/descriptorByName/jenkins.security.ApiTokenProperty/revokeAllExcept * - @[file-location] where the file contains plain text value of the token, all stuff explained above is applicable * The application will not delete the file after read, so the script is responsible to clean up the stuff * * When the API Token is generated using this system property, it's strongly recommended that you are revoking it * during your installation script using the other ways at your disposal so that you have a fresh token * with less traces for your script. * * If you do not provide any value to that system property, the default admin account will not have an API Token. * * @since TODO (for the existence of the sysprop, not the availability to plugin) */ @Restricted(NoExternalUse.class) @SuppressFBWarnings(value = "MS_SHOULD_BE_FINAL", justification = "Accessible via System Groovy Scripts") private static /* not final */ String ADMIN_INITIAL_API_TOKEN = SystemProperties.getString(ADMIN_INITIAL_API_TOKEN_PROPERTY_NAME); @NonNull @Override public String getDisplayName() { return Messages.SetupWizard_DisplayName(); } /** * Initialize the setup wizard, this will process any current state initializations */ /*package*/ void init(boolean newInstall) throws IOException, InterruptedException { Jenkins jenkins = Jenkins.get(); if(newInstall) { // this was determined to be a new install, don't run the update wizard here setCurrentLevel(Jenkins.getVersion()); // Create an admin user by default with a // difficult password FilePath iapf = getInitialAdminPasswordFile(); if(jenkins.getSecurityRealm() == null || jenkins.getSecurityRealm() == SecurityRealm.NO_AUTHENTICATION) { // this seems very fragile try (BulkChange bc = new BulkChange(jenkins)) { HudsonPrivateSecurityRealm securityRealm = new HudsonPrivateSecurityRealm(false, false, null); jenkins.setSecurityRealm(securityRealm); String randomUUID = UUID.randomUUID().toString().replace("-", "").toLowerCase(Locale.ENGLISH); // create an admin user User initialAdmin = securityRealm.createAccount(SetupWizard.initialSetupAdminUserName, randomUUID); if (ADMIN_INITIAL_API_TOKEN != null) { createInitialApiToken(initialAdmin); } // JENKINS-33599 - write to a file in the jenkins home directory // most native packages of Jenkins creates a machine user account 'jenkins' to run Jenkins, // and use group 'jenkins' for admins. So we allow groups to read this file iapf.touch(System.currentTimeMillis()); iapf.chmod(0640); iapf.write(randomUUID + System.lineSeparator(), "UTF-8"); // Lock Jenkins down: FullControlOnceLoggedInAuthorizationStrategy authStrategy = new FullControlOnceLoggedInAuthorizationStrategy(); authStrategy.setAllowAnonymousRead(false); jenkins.setAuthorizationStrategy(authStrategy); // Disable jnlp by default, but honor system properties jenkins.setSlaveAgentPort(SystemProperties.getInteger(Jenkins.class.getName()+".slaveAgentPort",-1)); // require a crumb issuer jenkins.setCrumbIssuer(GlobalCrumbIssuerConfiguration.createDefaultCrumbIssuer()); // set master -> slave security: jenkins.getInjector().getInstance(AdminWhitelistRule.class) .setMasterKillSwitch(false); jenkins.save(); // TODO could probably be removed since some of the above setters already call save bc.commit(); } } if(iapf.exists()) { String setupKey = iapf.readToString().trim(); String ls = System.lineSeparator(); LOGGER.info(ls + ls + "*************************************************************" + ls + "*************************************************************" + ls + "*************************************************************" + ls + ls + "Jenkins initial setup is required. An admin user has been created and " + "a password generated." + ls + "Please use the following password to proceed to installation:" + ls + ls + setupKey + ls + ls + "This may also be found at: " + iapf.getRemote() + ls + ls + "*************************************************************" + ls + "*************************************************************" + ls + "*************************************************************" + ls); } } try { // Make sure plugin metadata is up to date UpdateCenter.updateDefaultSite(); } catch (Exception e) { LOGGER.log(Level.WARNING, e.getMessage(), e); } } private void createInitialApiToken(User user) throws IOException, InterruptedException { ApiTokenProperty apiTokenProperty = user.getProperty(ApiTokenProperty.class); String sysProp = ADMIN_INITIAL_API_TOKEN; if (sysProp.equals("true")) { TokenUuidAndPlainValue tokenUuidAndPlainValue = apiTokenProperty.generateNewToken("random-generation-during-setup-wizard"); FilePath fp = getInitialAdminApiTokenFile(); // same comment as in the init method // JENKINS-33599 - write to a file in the jenkins home directory // most native packages of Jenkins creates a machine user account 'jenkins' to run Jenkins, // and use group 'jenkins' for admins. So we allow groups to read this file fp.touch(System.currentTimeMillis()); fp.chmod(0640); fp.write(tokenUuidAndPlainValue.plainValue, StandardCharsets.UTF_8.name()); LOGGER.log(Level.INFO, "The API Token was randomly generated and the information was put in {0}", fp.getRemote()); } else { String plainText; if (sysProp.startsWith("@")) { // no need for path traversal check as it's coming from the instance creator only File apiTokenFile = new File(sysProp.substring(1)); if (!apiTokenFile.exists()) { LOGGER.log(Level.WARNING, "The API Token cannot be retrieved from a non-existing file: {0}", apiTokenFile); return; } try { plainText = FileUtils.readFileToString(apiTokenFile, StandardCharsets.UTF_8); LOGGER.log(Level.INFO, "API Token generated using contents of file: {0}", apiTokenFile.getAbsolutePath()); } catch (IOException e) { LOGGER.log(Level.WARNING, String.format("The API Token cannot be retrieved from the file: %s", apiTokenFile), e); return; } } else { LOGGER.log(Level.INFO, "API Token generated using system property: {0}", ADMIN_INITIAL_API_TOKEN_PROPERTY_NAME); plainText = sysProp; } try { apiTokenProperty.addFixedNewToken("fix-generation-during-setup-wizard", plainText); } catch (IllegalArgumentException e) { String constraintFailureMessage = e.getMessage(); LOGGER.log(Level.WARNING, "The API Token cannot be generated using the provided value due to: {0}", constraintFailureMessage); } } } private void setUpFilter() { try { if (!PluginServletFilter.hasFilter(FORCE_SETUP_WIZARD_FILTER)) { PluginServletFilter.addFilter(FORCE_SETUP_WIZARD_FILTER); } } catch (ServletException e) { throw new RuntimeException("Unable to add PluginServletFilter for the SetupWizard", e); } } private void tearDownFilter() { try { if (PluginServletFilter.hasFilter(FORCE_SETUP_WIZARD_FILTER)) { PluginServletFilter.removeFilter(FORCE_SETUP_WIZARD_FILTER); } } catch (ServletException e) { throw new RuntimeException("Unable to remove PluginServletFilter for the SetupWizard", e); } } /** * Indicates a generated password should be used - e.g. this is a new install, no security realm set up */ @SuppressWarnings("unused") // used by jelly public boolean isUsingSecurityToken() { try { return !Jenkins.get().getInstallState().isSetupComplete() && isUsingSecurityDefaults(); } catch (Exception e) { // ignore } return false; } /** * Determines if the security settings seem to match the defaults. Here, we only * really care about and test for HudsonPrivateSecurityRealm and the user setup. * Other settings are irrelevant. */ /*package*/ boolean isUsingSecurityDefaults() { Jenkins j = Jenkins.get(); if (j.getSecurityRealm() instanceof HudsonPrivateSecurityRealm) { HudsonPrivateSecurityRealm securityRealm = (HudsonPrivateSecurityRealm)j.getSecurityRealm(); try { if(securityRealm.getAllUsers().size() == 1) { HudsonPrivateSecurityRealm.Details details = securityRealm.load(SetupWizard.initialSetupAdminUserName); FilePath iapf = getInitialAdminPasswordFile(); if (iapf.exists()) { if (details.isPasswordCorrect(iapf.readToString().trim())) { return true; } } } } catch(UsernameNotFoundException | IOException | InterruptedException e) { return false; // Not initial security setup if no transitional admin user / password found } } return false; } /** * Called during the initial setup to create an admin user */ @POST @Restricted(NoExternalUse.class) public HttpResponse doCreateAdminUser(StaplerRequest req, StaplerResponse rsp) throws IOException { Jenkins j = Jenkins.get(); j.checkPermission(Jenkins.ADMINISTER); // This will be set up by default. if not, something changed, ok to fail HudsonPrivateSecurityRealm securityRealm = (HudsonPrivateSecurityRealm) j.getSecurityRealm(); User admin = securityRealm.getUser(SetupWizard.initialSetupAdminUserName); try { ApiTokenProperty initialApiTokenProperty = null; if (admin != null) { initialApiTokenProperty = admin.getProperty(ApiTokenProperty.class); admin.delete(); // assume the new user may well be 'admin' } User newUser = securityRealm.createAccountFromSetupWizard(req); if (admin != null) { admin = null; } if (initialApiTokenProperty != null) { // actually it will remove the current one and replace it with the one from initial admin newUser.addProperty(initialApiTokenProperty); } // Success! Delete the temporary password file: try { getInitialAdminPasswordFile().delete(); } catch (InterruptedException e) { throw new IOException(e); } try { FilePath fp = getInitialAdminApiTokenFile(); // no care about TOCTOU as it's done during instance creation process only (i.e. not yet user reachable) if (fp.exists()) { fp.delete(); } } catch (InterruptedException e) { throw new IOException(e); } InstallUtil.proceedToNextStateFrom(InstallState.CREATE_ADMIN_USER); // ... and then login Authentication auth = new UsernamePasswordAuthenticationToken(newUser.getId(), req.getParameter("password1")); auth = securityRealm.getSecurityComponents().manager2.authenticate(auth); SecurityContextHolder.getContext().setAuthentication(auth); HttpSession session = req.getSession(false); if (session != null) { // avoid session fixation session.invalidate(); } HttpSession newSession = req.getSession(true); UserSeedProperty userSeed = newUser.getProperty(UserSeedProperty.class); String sessionSeed = userSeed.getSeed(); // include the new seed newSession.setAttribute(UserSeedProperty.USER_SESSION_SEED, sessionSeed); CrumbIssuer crumbIssuer = Jenkins.get().getCrumbIssuer(); JSONObject data = new JSONObject(); if (crumbIssuer != null) { data.accumulate("crumbRequestField", crumbIssuer.getCrumbRequestField()).accumulate("crumb", crumbIssuer.getCrumb(req)); } return HttpResponses.okJSON(data); } catch (AccountCreationFailedException e) { /* Return Unprocessable Entity from WebDAV. While this is not technically in the HTTP/1.1 standard, browsers seem to accept this. 400 Bad Request is technically inappropriate because that implies invalid *syntax*, not incorrect data. The client only cares about it being >200 anyways. */ rsp.setStatus(422); return HttpResponses.forwardToView(securityRealm, "/jenkins/install/SetupWizard/setupWizardFirstUser.jelly"); } finally { if (admin != null) { admin.save(); // recreate this initial user if something failed } } } @POST @Restricted(NoExternalUse.class) public HttpResponse doConfigureInstance(StaplerRequest req, @QueryParameter String rootUrl) { Jenkins.get().checkPermission(Jenkins.ADMINISTER); Map<String, String> errors = new HashMap<>(); // pre-check data checkRootUrl(errors, rootUrl); if(!errors.isEmpty()){ return HttpResponses.errorJSON(Messages.SetupWizard_ConfigureInstance_ValidationErrors(), errors); } // use the parameters to configure the instance useRootUrl(errors, rootUrl); if(!errors.isEmpty()){ return HttpResponses.errorJSON(Messages.SetupWizard_ConfigureInstance_ValidationErrors(), errors); } InstallUtil.proceedToNextStateFrom(InstallState.CONFIGURE_INSTANCE); CrumbIssuer crumbIssuer = Jenkins.get().getCrumbIssuer(); JSONObject data = new JSONObject(); if (crumbIssuer != null) { data.accumulate("crumbRequestField", crumbIssuer.getCrumbRequestField()).accumulate("crumb", crumbIssuer.getCrumb(req)); } return HttpResponses.okJSON(data); } private void checkRootUrl(Map<String, String> errors, @CheckForNull String rootUrl){ if(rootUrl == null){ errors.put("rootUrl", Messages.SetupWizard_ConfigureInstance_RootUrl_Empty()); return; } if(!UrlHelper.isValidRootUrl(rootUrl)){ errors.put("rootUrl", Messages.SetupWizard_ConfigureInstance_RootUrl_Invalid()); } } private void useRootUrl(Map<String, String> errors, @CheckForNull String rootUrl){ LOGGER.log(Level.FINE, "Root URL set during SetupWizard to {0}", new Object[]{ rootUrl }); JenkinsLocationConfiguration.getOrDie().setUrl(rootUrl); } /*package*/ void setCurrentLevel(VersionNumber v) throws IOException { FileUtils.writeStringToFile(getUpdateStateFile(), v.toString()); } /** * File that captures the state of upgrade. * * This file records the version number that the installation has upgraded to. */ /*package*/ static File getUpdateStateFile() { return new File(Jenkins.get().getRootDir(),"jenkins.install.UpgradeWizard.state"); } /** * What is the version the upgrade wizard has run the last time and upgraded to?. * If {@link #getUpdateStateFile()} is missing, presumes the baseline is 1.0 * @return Current baseline. {@code null} if it cannot be retrieved. */ @Restricted(NoExternalUse.class) @CheckForNull public VersionNumber getCurrentLevel() { VersionNumber from = new VersionNumber("1.0"); File state = getUpdateStateFile(); if (state.exists()) { try { from = new VersionNumber(defaultIfBlank(readFileToString(state), "1.0").trim()); } catch (IOException ex) { LOGGER.log(Level.SEVERE, "Cannot read the current version file", ex); return null; } } return from; } /** * Returns the initial plugin list in JSON format */ @Restricted(DoNotUse.class) // WebOnly public HttpResponse doPlatformPluginList() throws IOException { SetupWizard setupWizard = Jenkins.get().getSetupWizard(); if (setupWizard != null) { if (InstallState.UPGRADE.equals(Jenkins.get().getInstallState())) { JSONArray initialPluginData = getPlatformPluginUpdates(); if(initialPluginData != null) { return HttpResponses.okJSON(initialPluginData); } } else { JSONArray initialPluginData = getPlatformPluginList(); if(initialPluginData != null) { return HttpResponses.okJSON(initialPluginData); } } } return HttpResponses.okJSON(); } /** * Returns whether the system needs a restart, and if it is supported * e.g. { restartRequired: true, restartSupported: false } */ @Restricted(DoNotUse.class) // WebOnly public HttpResponse doRestartStatus() throws IOException { JSONObject response = new JSONObject(); Jenkins jenkins = Jenkins.get(); response.put("restartRequired", jenkins.getUpdateCenter().isRestartRequiredForCompletion()); response.put("restartSupported", jenkins.getLifecycle().canRestart()); return HttpResponses.okJSON(response); } /** * Provides the list of platform plugin updates from the last time * the upgrade was run. * @return {@code null} if the version range cannot be retrieved. */ @CheckForNull public JSONArray getPlatformPluginUpdates() { final VersionNumber version = getCurrentLevel(); if (version == null) { return null; } return getPlatformPluginsForUpdate(version, Jenkins.getVersion()); } /** * Gets the suggested plugin list from the update sites, falling back to a local version * @return JSON array with the categorized plugin list */ @CheckForNull /*package*/ JSONArray getPlatformPluginList() { Jenkins.get().checkPermission(Jenkins.ADMINISTER); JSONArray initialPluginList = null; updateSiteList: for (UpdateSite updateSite : Jenkins.get().getUpdateCenter().getSiteList()) { String updateCenterJsonUrl = updateSite.getUrl(); String suggestedPluginUrl = updateCenterJsonUrl.replace("/update-center.json", "/platform-plugins.json"); try { URLConnection connection = ProxyConfiguration.open(new URL(suggestedPluginUrl)); try { if(connection instanceof HttpURLConnection) { int responseCode = ((HttpURLConnection)connection).getResponseCode(); if(HttpURLConnection.HTTP_OK != responseCode) { throw new HttpRetryException("Invalid response code (" + responseCode + ") from URL: " + suggestedPluginUrl, responseCode); } } String initialPluginJson = IOUtils.toString(connection.getInputStream(), StandardCharsets.UTF_8); initialPluginList = JSONArray.fromObject(initialPluginJson); break updateSiteList; } catch(Exception e) { // not found or otherwise unavailable LOGGER.log(Level.FINE, e.getMessage(), e); continue updateSiteList; } } catch(Exception e) { LOGGER.log(Level.FINE, e.getMessage(), e); } } if (initialPluginList == null) { // fall back to local file try { ClassLoader cl = getClass().getClassLoader(); URL localPluginData = cl.getResource("jenkins/install/platform-plugins.json"); String initialPluginJson = IOUtils.toString(localPluginData.openStream(), StandardCharsets.UTF_8); initialPluginList = JSONArray.fromObject(initialPluginJson); } catch (Exception e) { LOGGER.log(Level.SEVERE, e.getMessage(), e); } } return initialPluginList; } /** * Get the platform plugins added in the version range */ /*package*/ JSONArray getPlatformPluginsForUpdate(VersionNumber from, VersionNumber to) { Jenkins jenkins = Jenkins.get(); JSONArray pluginCategories = JSONArray.fromObject(getPlatformPluginList().toString()); for (Iterator<?> categoryIterator = pluginCategories.iterator(); categoryIterator.hasNext();) { Object category = categoryIterator.next(); if (category instanceof JSONObject) { JSONObject cat = (JSONObject)category; JSONArray plugins = cat.getJSONArray("plugins"); nextPlugin: for (Iterator<?> pluginIterator = plugins.iterator(); pluginIterator.hasNext();) { Object pluginData = pluginIterator.next(); if (pluginData instanceof JSONObject) { JSONObject plugin = (JSONObject)pluginData; if (plugin.has("added")) { String sinceVersion = plugin.getString("added"); if (sinceVersion != null) { VersionNumber v = new VersionNumber(sinceVersion); if(v.compareTo(to) <= 0 && v.compareTo(from) > 0) { // This plugin is valid, we'll leave "suggested" state // to match the experience during install // but only add it if it's currently uninstalled String pluginName = plugin.getString("name"); if (null == jenkins.getPluginManager().getPlugin(pluginName)) { // Also check that a compatible version exists in an update site boolean foundCompatibleVersion = false; for (UpdateSite site : jenkins.getUpdateCenter().getSiteList()) { UpdateSite.Plugin sitePlug = site.getPlugin(pluginName); if (sitePlug != null && !sitePlug.isForNewerHudson() && !sitePlug.isForNewerJava() && !sitePlug.isNeededDependenciesForNewerJenkins()) { foundCompatibleVersion = true; break; } } if (foundCompatibleVersion) { continue nextPlugin; } } } } } } pluginIterator.remove(); } if (plugins.isEmpty()) { categoryIterator.remove(); } } } return pluginCategories; } /** * Gets the file used to store the initial admin password */ public FilePath getInitialAdminPasswordFile() { return Jenkins.get().getRootPath().child("secrets/initialAdminPassword"); } /** * Gets the file used to store the initial admin API Token, in case the system property * {@link #ADMIN_INITIAL_API_TOKEN} is set to "true" (and only in this case). */ @Restricted(NoExternalUse.class) public FilePath getInitialAdminApiTokenFile() { return Jenkins.get().getRootPath().child("secrets/initialAdminApiToken"); } /** * Remove the setupWizard filter, ensure all updates are written to disk, etc */ @RequirePOST public HttpResponse doCompleteInstall() throws IOException, ServletException { completeSetup(); return HttpResponses.okJSON(); } /*package*/ void completeSetup() throws IOException, ServletException { Jenkins.get().checkPermission(Jenkins.ADMINISTER); InstallUtil.saveLastExecVersion(); setCurrentLevel(Jenkins.getVersion()); InstallUtil.proceedToNextStateFrom(InstallState.INITIAL_SETUP_COMPLETED); } /** * Gets all the install states */ public List<InstallState> getInstallStates() { return InstallState.all(); } /** * Returns an installState by name */ public InstallState getInstallState(String name) { if (name == null) { return null; } return InstallState.valueOf(name); } /** * Called upon install state update. * @param state the new install state. * @since 2.94 */ public void onInstallStateUpdate(InstallState state) { if (state.isSetupComplete()) { tearDownFilter(); } else { setUpFilter(); } } /** * Returns whether the setup wizard filter is currently registered. * @since 2.94 */ public boolean hasSetupWizardFilter() { return PluginServletFilter.hasFilter(FORCE_SETUP_WIZARD_FILTER); } /** * This filter will validate that the security token is provided */ private final Filter FORCE_SETUP_WIZARD_FILTER = new Filter() { @Override public void init(FilterConfig cfg) throws ServletException { } @Override public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain) throws IOException, ServletException { // Force root requests to the setup wizard if (request instanceof HttpServletRequest && !Jenkins.get().getInstallState().isSetupComplete()) { HttpServletRequest req = (HttpServletRequest) request; String requestURI = req.getRequestURI(); if (requestURI.equals(req.getContextPath()) && !requestURI.endsWith("/")) { ((HttpServletResponse) response).sendRedirect(req.getContextPath() + "/"); return; } else if (req.getRequestURI().equals(req.getContextPath() + "/")) { Jenkins.get().checkPermission(Jenkins.ADMINISTER); chain.doFilter(new HttpServletRequestWrapper(req) { public String getRequestURI() { return getContextPath() + "/setupWizard/"; } }, response); return; } // fall through to handling the request normally } chain.doFilter(request, response); } @Override public void destroy() { } }; /** * Sets up the Setup Wizard filter if the current state requires it. */ private void checkFilter() { if (!Jenkins.get().getInstallState().isSetupComplete()) { setUpFilter(); } } }
/** * Licensed to Cloudera, Inc. under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. Cloudera, Inc. licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.cloudera.flume.handlers.hive; import java.io.IOException; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Map.Entry; import org.apache.hadoop.fs.Path; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.cloudera.flume.conf.Context; import com.cloudera.flume.conf.FlumeConfiguration; import com.cloudera.flume.conf.FlumeSpecException; import com.cloudera.flume.conf.SinkFactory.SinkBuilder; import com.cloudera.flume.core.Event; import com.cloudera.flume.core.EventSink; import com.cloudera.flume.handlers.avro.AvroJsonOutputFormat; import com.cloudera.flume.handlers.hdfs.CustomDfsSink; import com.cloudera.flume.handlers.text.FormatFactory; import com.cloudera.flume.handlers.text.output.OutputFormat; import com.cloudera.flume.handlers.text.output.RawOutputFormat; import com.google.common.base.Preconditions; /** * Writes events the a file give a hadoop uri path. If no uri is specified It * defaults to the set by the given configured by fs.default.name config * variable. The user can specify an output format for the file. If none is * specified the default set by flume.collector.outputformat in the flume * configuration file is used. * * * * TODO (jon) refactor this to be sane. Not happening now. */ public class HiveNotifyingDfsSink extends EventSink.Base { static final Logger LOG = LoggerFactory.getLogger(HiveNotifyingDfsSink.class); final String dirpath; final OutputFormat format; final String hivetable; final HiveDirCreatedHandler handler; EventSink writer = null; // We keep a - potentially unbounded - set of writers around to deal with // different tags on events. Therefore this feature should be used with some // care (where the set of possible paths is small) until we do something // more sensible with resource management. final Map<String, EventSink> sfWriters = new HashMap<String, EventSink>(); // Used to short-circuit around doing regex matches when we know there are // no templates to be replaced. private String filename = ""; protected String absolutePath = ""; public HiveNotifyingDfsSink(String path, String filename, String hivetable, OutputFormat o, HiveDirCreatedHandler handler) { this.dirpath = path; this.filename = filename; this.format = o; absolutePath = path; this.hivetable = hivetable; if (filename != null && filename.length() > 0) { if (!absolutePath.endsWith(Path.SEPARATOR)) { absolutePath += Path.SEPARATOR; } absolutePath += this.filename; } if (!(o instanceof AvroJsonOutputFormat)) { LOG .warn("Currently, hive only supports only AvroJson output format SerDe."); } this.handler = handler; } public HiveNotifyingDfsSink(String path, String filename, String hivetable) { this(path, filename, hivetable, getDefaultOutputFormat(), new DefaultHandler()); } static class DefaultHandler implements HiveDirCreatedHandler { @Override public void handleNotification(HiveDirCreatedNotification notif) { LOG.info("Notifying Hive Metastore with ready event " + notif); // TODO (carl) This should be queued to a scheduler that will spawn // a thread to handle the notification. } }; static class DedupDefaultHandler implements HiveDirCreatedHandler { HashSet<String> cache = new HashSet<String>(); HiveDirCreatedHandler simple; public DedupDefaultHandler(HiveDirCreatedHandler hfrh) { this.simple = hfrh; } public void handleNotification(HiveDirCreatedNotification notif) { if (cache.contains(notif.getNotifDir())) { return; } simple.handleNotification(notif); cache.add(notif.getNotifDir()); } } static protected OutputFormat getDefaultOutputFormat() { try { return FormatFactory.get().getOutputFormat( FlumeConfiguration.get().getDefaultOutputFormat()); } catch (FlumeSpecException e) { LOG.warn("format from conf file not found, using default", e); return new RawOutputFormat(); } } protected EventSink getWriter(Event e) throws IOException, InterruptedException { final Event evt = e; String realpath = e.escapeString(absolutePath); EventSink w = sfWriters.get(realpath); if (w != null) { // uses already existing sink. return w; } // sink does exist for event, create it. LOG.info("Opening " + realpath); w = new CustomDfsSink(realpath, format); SinkCloseNotifier<EventSink, HiveDirCreatedNotification> notif = new SinkCloseNotifier<EventSink, HiveDirCreatedNotification>( w) { @Override public HiveDirCreatedNotification getNotificationEvent() { // take the dir part of the path String escdirpath = evt.escapeString(dirpath); Map<String, String> partitions = evt.getEscapeMapping(dirpath); return new HiveDirCreatedNotification(hivetable, escdirpath, partitions); } @Override public void notify(HiveDirCreatedNotification e) { handler.handleNotification(e); } }; notif.open(); sfWriters.put(realpath, notif); return notif; } /** * Writes the message to an HDFS file whose path is substituted with tags * drawn from the supplied event */ @Override public void append(Event e) throws IOException, InterruptedException { EventSink w = getWriter(e); w.append(e); super.append(e); } @Override public void close() throws IOException, InterruptedException { for (Entry<String, EventSink> e : sfWriters.entrySet()) { LOG.info("Closing " + e.getKey()); e.getValue().close(); } } @Override public void open() throws IOException { } public static SinkBuilder builder() { return new SinkBuilder() { @Override public EventSink build(Context context, String... args) { Preconditions.checkArgument(args.length >= 2 && args.length <= 3, "usage: hivedfs(\"[(hdfs|file|s3n|...)://namenode[:port]]/path\"" + ", \"hivetable\", [, outputformat ])"); String format = FlumeConfiguration.get().getDefaultOutputFormat(); String hivetable = args[1]; if (args.length >= 3) { format = args[2]; } OutputFormat o; try { o = FormatFactory.get().getOutputFormat(format); } catch (FlumeSpecException e) { LOG.warn("Illegal format type " + format + ".", e); o = null; } Preconditions.checkArgument(o != null, "Illegal format type " + format + "."); return new HiveNotifyingDfsSink(args[0], "", hivetable, o, new DefaultHandler()); } }; } }
// Copyright (C) 2017 The Android Open Source Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.gerrit.server.group.db; import static com.google.common.truth.Truth.assertThat; import static com.google.common.truth.Truth.assertWithMessage; import static com.google.gerrit.truth.OptionalSubject.assertThat; import static org.hamcrest.CoreMatchers.instanceOf; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Sets; import com.google.gerrit.common.Nullable; import com.google.gerrit.common.data.GroupDescription; import com.google.gerrit.reviewdb.client.Account; import com.google.gerrit.reviewdb.client.AccountGroup; import com.google.gerrit.reviewdb.client.Project; import com.google.gerrit.reviewdb.client.RefNames; import com.google.gerrit.server.extensions.events.GitReferenceUpdated; import com.google.gerrit.server.git.meta.MetaDataUpdate; import com.google.gerrit.server.group.InternalGroup; import com.google.gerrit.server.group.testing.InternalGroupSubject; import com.google.gerrit.server.util.time.TimeUtil; import com.google.gerrit.truth.OptionalSubject; import com.google.gwtorm.client.KeyUtil; import com.google.gwtorm.server.StandardKeyEncoder; import java.io.IOException; import java.sql.Timestamp; import java.time.LocalDate; import java.time.LocalDateTime; import java.time.Month; import java.time.ZoneId; import java.util.Optional; import java.util.TimeZone; import org.eclipse.jgit.errors.ConfigInvalidException; import org.eclipse.jgit.internal.storage.dfs.DfsRepositoryDescription; import org.eclipse.jgit.internal.storage.dfs.InMemoryRepository; import org.eclipse.jgit.junit.TestRepository; import org.eclipse.jgit.lib.PersonIdent; import org.eclipse.jgit.lib.Ref; import org.eclipse.jgit.lib.Repository; import org.eclipse.jgit.revwalk.RevCommit; import org.eclipse.jgit.revwalk.RevWalk; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; public class GroupConfigTest { static { // Necessary so that toString() methods of ReviewDb entities work correctly. KeyUtil.setEncoderImpl(new StandardKeyEncoder()); } @Rule public ExpectedException expectedException = ExpectedException.none(); private Project.NameKey projectName; private Repository repository; private TestRepository<?> testRepository; private final AccountGroup.UUID groupUuid = new AccountGroup.UUID("users-XYZ"); private final AccountGroup.NameKey groupName = new AccountGroup.NameKey("users"); private final AccountGroup.Id groupId = new AccountGroup.Id(123); private final AuditLogFormatter auditLogFormatter = AuditLogFormatter.createBackedBy(ImmutableSet.of(), ImmutableSet.of(), "server-id"); private final TimeZone timeZone = TimeZone.getTimeZone("America/Los_Angeles"); @Before public void setUp() throws Exception { projectName = new Project.NameKey("Test Repository"); repository = new InMemoryRepository(new DfsRepositoryDescription("Test Repository")); testRepository = new TestRepository<>(repository); } @Test public void specifiedGroupUuidIsRespectedForNewGroup() throws Exception { InternalGroupCreation groupCreation = getPrefilledGroupCreationBuilder().setGroupUUID(groupUuid).build(); createGroup(groupCreation); Optional<InternalGroup> group = loadGroup(groupUuid); assertThatGroup(group).value().groupUuid().isEqualTo(groupUuid); } @Test public void specifiedNameIsRespectedForNewGroup() throws Exception { InternalGroupCreation groupCreation = getPrefilledGroupCreationBuilder().setNameKey(groupName).build(); createGroup(groupCreation); Optional<InternalGroup> group = loadGroup(groupCreation.getGroupUUID()); assertThatGroup(group).value().nameKey().isEqualTo(groupName); } @Test public void nameOfGroupUpdateOverridesGroupCreation() throws Exception { AccountGroup.NameKey anotherName = new AccountGroup.NameKey("Another name"); InternalGroupCreation groupCreation = getPrefilledGroupCreationBuilder().setNameKey(groupName).build(); InternalGroupUpdate groupUpdate = InternalGroupUpdate.builder().setName(anotherName).build(); createGroup(groupCreation, groupUpdate); Optional<InternalGroup> group = loadGroup(groupCreation.getGroupUUID()); assertThatGroup(group).value().nameKey().isEqualTo(anotherName); } @Test public void nameOfNewGroupMustNotBeEmpty() throws Exception { InternalGroupCreation groupCreation = getPrefilledGroupCreationBuilder().setNameKey(new AccountGroup.NameKey("")).build(); GroupConfig groupConfig = GroupConfig.createForNewGroup(projectName, repository, groupCreation); try (MetaDataUpdate metaDataUpdate = createMetaDataUpdate()) { expectedException.expectCause(instanceOf(ConfigInvalidException.class)); expectedException.expectMessage("Name of the group " + groupUuid); groupConfig.commit(metaDataUpdate); } } @Test public void nameOfNewGroupMustNotBeNull() throws Exception { InternalGroupCreation groupCreation = getPrefilledGroupCreationBuilder().setNameKey(new AccountGroup.NameKey(null)).build(); GroupConfig groupConfig = GroupConfig.createForNewGroup(projectName, repository, groupCreation); try (MetaDataUpdate metaDataUpdate = createMetaDataUpdate()) { expectedException.expectCause(instanceOf(ConfigInvalidException.class)); expectedException.expectMessage("Name of the group " + groupUuid); groupConfig.commit(metaDataUpdate); } } @Test public void specifiedIdIsRespectedForNewGroup() throws Exception { InternalGroupCreation groupCreation = getPrefilledGroupCreationBuilder().setId(groupId).build(); createGroup(groupCreation); Optional<InternalGroup> group = loadGroup(groupCreation.getGroupUUID()); assertThatGroup(group).value().id().isEqualTo(groupId); } @Test public void idOfNewGroupMustNotBeNegative() throws Exception { InternalGroupCreation groupCreation = getPrefilledGroupCreationBuilder().setId(new AccountGroup.Id(-2)).build(); GroupConfig groupConfig = GroupConfig.createForNewGroup(projectName, repository, groupCreation); try (MetaDataUpdate metaDataUpdate = createMetaDataUpdate()) { expectedException.expectCause(instanceOf(ConfigInvalidException.class)); expectedException.expectMessage("ID of the group " + groupUuid); groupConfig.commit(metaDataUpdate); } } @Test public void descriptionDefaultsToNull() throws Exception { InternalGroupCreation groupCreation = InternalGroupCreation.builder() .setGroupUUID(groupUuid) .setNameKey(groupName) .setId(groupId) .build(); createGroup(groupCreation); Optional<InternalGroup> group = loadGroup(groupCreation.getGroupUUID()); assertThatGroup(group).value().description().isNull(); } @Test public void specifiedDescriptionIsRespectedForNewGroup() throws Exception { String description = "This is a test group."; InternalGroupCreation groupCreation = getPrefilledGroupCreationBuilder().build(); InternalGroupUpdate groupUpdate = InternalGroupUpdate.builder().setDescription(description).build(); createGroup(groupCreation, groupUpdate); Optional<InternalGroup> group = loadGroup(groupCreation.getGroupUUID()); assertThatGroup(group).value().description().isEqualTo(description); } @Test public void emptyDescriptionForNewGroupIsIgnored() throws Exception { InternalGroupCreation groupCreation = getPrefilledGroupCreationBuilder().build(); InternalGroupUpdate groupUpdate = InternalGroupUpdate.builder().setDescription("").build(); createGroup(groupCreation, groupUpdate); Optional<InternalGroup> group = loadGroup(groupCreation.getGroupUUID()); assertThatGroup(group).value().description().isNull(); } @Test public void ownerGroupUuidDefaultsToGroupItself() throws Exception { InternalGroupCreation groupCreation = InternalGroupCreation.builder() .setGroupUUID(groupUuid) .setNameKey(groupName) .setId(groupId) .build(); createGroup(groupCreation); Optional<InternalGroup> group = loadGroup(groupCreation.getGroupUUID()); assertThatGroup(group).value().ownerGroupUuid().isEqualTo(groupUuid); } @Test public void specifiedOwnerGroupUuidIsRespectedForNewGroup() throws Exception { AccountGroup.UUID ownerGroupUuid = new AccountGroup.UUID("anotherOwnerUuid"); InternalGroupCreation groupCreation = getPrefilledGroupCreationBuilder().build(); InternalGroupUpdate groupUpdate = InternalGroupUpdate.builder().setOwnerGroupUUID(ownerGroupUuid).build(); createGroup(groupCreation, groupUpdate); Optional<InternalGroup> group = loadGroup(groupCreation.getGroupUUID()); assertThatGroup(group).value().ownerGroupUuid().isEqualTo(ownerGroupUuid); } @Test public void ownerGroupUuidOfNewGroupMustNotBeNull() throws Exception { InternalGroupCreation groupCreation = getPrefilledGroupCreationBuilder().build(); InternalGroupUpdate groupUpdate = InternalGroupUpdate.builder().setOwnerGroupUUID(new AccountGroup.UUID(null)).build(); GroupConfig groupConfig = GroupConfig.createForNewGroup(projectName, repository, groupCreation); groupConfig.setGroupUpdate(groupUpdate, auditLogFormatter); try (MetaDataUpdate metaDataUpdate = createMetaDataUpdate()) { expectedException.expectCause(instanceOf(ConfigInvalidException.class)); expectedException.expectMessage("Owner UUID of the group " + groupUuid); groupConfig.commit(metaDataUpdate); } } @Test public void ownerGroupUuidOfNewGroupMustNotBeEmpty() throws Exception { InternalGroupCreation groupCreation = getPrefilledGroupCreationBuilder().build(); InternalGroupUpdate groupUpdate = InternalGroupUpdate.builder().setOwnerGroupUUID(new AccountGroup.UUID("")).build(); GroupConfig groupConfig = GroupConfig.createForNewGroup(projectName, repository, groupCreation); groupConfig.setGroupUpdate(groupUpdate, auditLogFormatter); try (MetaDataUpdate metaDataUpdate = createMetaDataUpdate()) { expectedException.expectCause(instanceOf(ConfigInvalidException.class)); expectedException.expectMessage("Owner UUID of the group " + groupUuid); groupConfig.commit(metaDataUpdate); } } @Test public void visibleToAllDefaultsToFalse() throws Exception { InternalGroupCreation groupCreation = InternalGroupCreation.builder() .setGroupUUID(groupUuid) .setNameKey(groupName) .setId(groupId) .build(); createGroup(groupCreation); Optional<InternalGroup> group = loadGroup(groupCreation.getGroupUUID()); assertThatGroup(group).value().visibleToAll().isFalse(); } @Test public void specifiedVisibleToAllIsRespectedForNewGroup() throws Exception { InternalGroupCreation groupCreation = getPrefilledGroupCreationBuilder().build(); InternalGroupUpdate groupUpdate = InternalGroupUpdate.builder().setVisibleToAll(true).build(); createGroup(groupCreation, groupUpdate); Optional<InternalGroup> group = loadGroup(groupCreation.getGroupUUID()); assertThatGroup(group).value().visibleToAll().isTrue(); } @Test public void createdOnDefaultsToNow() throws Exception { // Git timestamps are only precise to the second. Timestamp testStart = TimeUtil.truncateToSecond(TimeUtil.nowTs()); InternalGroupCreation groupCreation = InternalGroupCreation.builder() .setGroupUUID(groupUuid) .setNameKey(groupName) .setId(groupId) .build(); createGroup(groupCreation); Optional<InternalGroup> group = loadGroup(groupCreation.getGroupUUID()); assertThatGroup(group).value().createdOn().isAtLeast(testStart); } @Test public void specifiedCreatedOnIsRespectedForNewGroup() throws Exception { Timestamp createdOn = toTimestamp(LocalDate.of(2017, Month.DECEMBER, 11).atTime(13, 44, 10)); InternalGroupCreation groupCreation = getPrefilledGroupCreationBuilder().build(); InternalGroupUpdate groupUpdate = InternalGroupUpdate.builder().setUpdatedOn(createdOn).build(); createGroup(groupCreation, groupUpdate); Optional<InternalGroup> group = loadGroup(groupCreation.getGroupUUID()); assertThatGroup(group).value().createdOn().isEqualTo(createdOn); } @Test public void specifiedMembersAreRespectedForNewGroup() throws Exception { Account.Id member1 = new Account.Id(1); Account.Id member2 = new Account.Id(2); InternalGroupCreation groupCreation = getPrefilledGroupCreationBuilder().build(); InternalGroupUpdate groupUpdate = InternalGroupUpdate.builder() .setMemberModification(members -> ImmutableSet.of(member1, member2)) .build(); createGroup(groupCreation, groupUpdate); Optional<InternalGroup> group = loadGroup(groupCreation.getGroupUUID()); assertThatGroup(group).value().members().containsExactly(member1, member2); } @Test public void specifiedSubgroupsAreRespectedForNewGroup() throws Exception { AccountGroup.UUID subgroup1 = new AccountGroup.UUID("subgroup1"); AccountGroup.UUID subgroup2 = new AccountGroup.UUID("subgroup2"); InternalGroupCreation groupCreation = getPrefilledGroupCreationBuilder().build(); InternalGroupUpdate groupUpdate = InternalGroupUpdate.builder() .setSubgroupModification(subgroups -> ImmutableSet.of(subgroup1, subgroup2)) .build(); createGroup(groupCreation, groupUpdate); Optional<InternalGroup> group = loadGroup(groupCreation.getGroupUUID()); assertThatGroup(group).value().subgroups().containsExactly(subgroup1, subgroup2); } @Test public void nameInConfigMayBeUndefined() throws Exception { populateGroupConfig(groupUuid, "[group]\n\tid = 42\n\townerGroupUuid = owners\n"); Optional<InternalGroup> group = loadGroup(groupUuid); assertThatGroup(group).value().name().isEmpty(); } @Test public void nameInConfigMayBeEmpty() throws Exception { populateGroupConfig(groupUuid, "[group]\n\tname=\n\tid = 42\n\townerGroupUuid = owners\n"); Optional<InternalGroup> group = loadGroup(groupUuid); assertThatGroup(group).value().name().isEmpty(); } @Test public void idInConfigMustBeDefined() throws Exception { populateGroupConfig(groupUuid, "[group]\n\tname = users\n\townerGroupUuid = owners\n"); expectedException.expect(ConfigInvalidException.class); expectedException.expectMessage("ID of the group " + groupUuid); GroupConfig.loadForGroup(projectName, repository, groupUuid); } @Test public void idInConfigMustNotBeNegative() throws Exception { populateGroupConfig( groupUuid, "[group]\n\tname = users\n\tid = -5\n\townerGroupUuid = owners\n"); expectedException.expect(ConfigInvalidException.class); expectedException.expectMessage("ID of the group " + groupUuid); GroupConfig.loadForGroup(projectName, repository, groupUuid); } @Test public void descriptionInConfigMayBeUndefined() throws Exception { populateGroupConfig(groupUuid, "[group]\n\tid = 42\n\townerGroupUuid = owners\n"); Optional<InternalGroup> group = loadGroup(groupUuid); assertThatGroup(group).value().description().isNull(); } @Test public void descriptionInConfigMayBeEmpty() throws Exception { populateGroupConfig( groupUuid, "[group]\n\tdescription=\n\tid = 42\n\townerGroupUuid = owners\n"); Optional<InternalGroup> group = loadGroup(groupUuid); assertThatGroup(group).value().description().isNull(); } @Test public void ownerGroupUuidInConfigMustBeDefined() throws Exception { populateGroupConfig(groupUuid, "[group]\n\tname = users\n\tid = 42\n"); expectedException.expect(ConfigInvalidException.class); expectedException.expectMessage("Owner UUID of the group " + groupUuid); GroupConfig.loadForGroup(projectName, repository, groupUuid); } @Test public void membersFileNeedNotExist() throws Exception { populateGroupConfig(groupUuid, "[group]\n\tname=users\n\tid = 42\n\townerGroupUuid = owners\n"); Optional<InternalGroup> group = loadGroup(groupUuid); assertThatGroup(group).value().members().isEmpty(); } @Test public void membersFileMayBeEmpty() throws Exception { populateGroupConfig(groupUuid, "[group]\n\tname=users\n\tid = 42\n\townerGroupUuid = owners\n"); populateSubgroupsFile(groupUuid, ""); Optional<InternalGroup> group = loadGroup(groupUuid); assertThatGroup(group).value().members().isEmpty(); } @Test public void membersFileMayContainOnlyWhitespace() throws Exception { populateGroupConfig(groupUuid, "[group]\n\tname=users\n\tid = 42\n\townerGroupUuid = owners\n"); populateMembersFile(groupUuid, "\n\t\n\n"); Optional<InternalGroup> group = loadGroup(groupUuid); assertThatGroup(group).value().members().isEmpty(); } @Test public void membersFileMayUseAnyLineBreakCharacters() throws Exception { populateGroupConfig(groupUuid, "[group]\n\tname=users\n\tid = 42\n\townerGroupUuid = owners\n"); populateMembersFile(groupUuid, "1\n2\n3\r4\r\n5\u20296"); Optional<InternalGroup> group = loadGroup(groupUuid); assertThatGroup(group) .value() .members() .containsExactly( new Account.Id(1), new Account.Id(2), new Account.Id(3), new Account.Id(4), new Account.Id(5), new Account.Id(6)); } @Test public void membersFileMustContainIntegers() throws Exception { populateGroupConfig(groupUuid, "[group]\n\tname=users\n\tid = 42\n\townerGroupUuid = owners\n"); populateMembersFile(groupUuid, "One"); expectedException.expect(ConfigInvalidException.class); expectedException.expectMessage("Invalid file members"); loadGroup(groupUuid); } @Test public void membersFileUsesLineBreaksToSeparateMembers() throws Exception { populateGroupConfig(groupUuid, "[group]\n\tname=users\n\tid = 42\n\townerGroupUuid = owners\n"); populateMembersFile(groupUuid, "1\t2"); expectedException.expect(ConfigInvalidException.class); expectedException.expectMessage("Invalid file members"); loadGroup(groupUuid); } @Test public void subgroupsFileNeedNotExist() throws Exception { populateGroupConfig(groupUuid, "[group]\n\tname=users\n\tid = 42\n\townerGroupUuid = owners\n"); Optional<InternalGroup> group = loadGroup(groupUuid); assertThatGroup(group).value().subgroups().isEmpty(); } @Test public void subgroupsFileMayBeEmpty() throws Exception { populateGroupConfig(groupUuid, "[group]\n\tname=users\n\tid = 42\n\townerGroupUuid = owners\n"); populateMembersFile(groupUuid, ""); Optional<InternalGroup> group = loadGroup(groupUuid); assertThatGroup(group).value().subgroups().isEmpty(); } @Test public void subgroupsFileMayContainOnlyWhitespace() throws Exception { populateGroupConfig(groupUuid, "[group]\n\tname=users\n\tid = 42\n\townerGroupUuid = owners\n"); populateSubgroupsFile(groupUuid, "\n\t\n\n"); Optional<InternalGroup> group = loadGroup(groupUuid); assertThatGroup(group).value().subgroups().isEmpty(); } @Test public void subgroupsFileMayUseAnyLineBreakCharacters() throws Exception { populateGroupConfig(groupUuid, "[group]\n\tname=users\n\tid = 42\n\townerGroupUuid = owners\n"); populateSubgroupsFile(groupUuid, "1\n2\n3\r4\r\n5\u20296"); Optional<InternalGroup> group = loadGroup(groupUuid); assertThatGroup(group) .value() .subgroups() .containsExactly( new AccountGroup.UUID("1"), new AccountGroup.UUID("2"), new AccountGroup.UUID("3"), new AccountGroup.UUID("4"), new AccountGroup.UUID("5"), new AccountGroup.UUID("6")); } @Test public void subgroupsFileMayContainSubgroupsWithWhitespaceInUuid() throws Exception { populateGroupConfig(groupUuid, "[group]\n\tname=users\n\tid = 42\n\townerGroupUuid = owners\n"); populateSubgroupsFile(groupUuid, "1\t2 3"); Optional<InternalGroup> group = loadGroup(groupUuid); assertThatGroup(group).value().subgroups().containsExactly(new AccountGroup.UUID("1\t2 3")); } @Test public void subgroupsFileUsesLineBreaksToSeparateSubgroups() throws Exception { populateGroupConfig(groupUuid, "[group]\n\tname=users\n\tid = 42\n\townerGroupUuid = owners\n"); populateSubgroupsFile(groupUuid, "1\t2\n3"); Optional<InternalGroup> group = loadGroup(groupUuid); assertThatGroup(group) .value() .subgroups() .containsExactly(new AccountGroup.UUID("1\t2"), new AccountGroup.UUID("3")); } @Test public void nameCanBeUpdated() throws Exception { createArbitraryGroup(groupUuid); AccountGroup.NameKey newName = new AccountGroup.NameKey("New name"); InternalGroupUpdate groupUpdate = InternalGroupUpdate.builder().setName(newName).build(); updateGroup(groupUuid, groupUpdate); Optional<InternalGroup> group = loadGroup(groupUuid); assertThatGroup(group).value().nameKey().isEqualTo(newName); } @Test public void nameCannotBeUpdatedToNull() throws Exception { createArbitraryGroup(groupUuid); GroupConfig groupConfig = GroupConfig.loadForGroup(projectName, repository, groupUuid); InternalGroupUpdate groupUpdate = InternalGroupUpdate.builder().setName(new AccountGroup.NameKey(null)).build(); groupConfig.setGroupUpdate(groupUpdate, auditLogFormatter); try (MetaDataUpdate metaDataUpdate = createMetaDataUpdate()) { expectedException.expectCause(instanceOf(ConfigInvalidException.class)); expectedException.expectMessage("Name of the group " + groupUuid); groupConfig.commit(metaDataUpdate); } } @Test public void nameCannotBeUpdatedToEmptyString() throws Exception { createArbitraryGroup(groupUuid); GroupConfig groupConfig = GroupConfig.loadForGroup(projectName, repository, groupUuid); InternalGroupUpdate groupUpdate = InternalGroupUpdate.builder().setName(new AccountGroup.NameKey("")).build(); groupConfig.setGroupUpdate(groupUpdate, auditLogFormatter); try (MetaDataUpdate metaDataUpdate = createMetaDataUpdate()) { expectedException.expectCause(instanceOf(ConfigInvalidException.class)); expectedException.expectMessage("Name of the group " + groupUuid); groupConfig.commit(metaDataUpdate); } } @Test public void nameCanBeUpdatedToEmptyStringIfExplicitlySpecified() throws Exception { createArbitraryGroup(groupUuid); AccountGroup.NameKey emptyName = new AccountGroup.NameKey(""); GroupConfig groupConfig = GroupConfig.loadForGroup(projectName, repository, groupUuid); groupConfig.setAllowSaveEmptyName(); InternalGroupUpdate groupUpdate = InternalGroupUpdate.builder().setName(emptyName).build(); groupConfig.setGroupUpdate(groupUpdate, auditLogFormatter); commit(groupConfig); Optional<InternalGroup> group = loadGroup(groupUuid); assertThatGroup(group).value().nameKey().isEqualTo(emptyName); } @Test public void descriptionCanBeUpdated() throws Exception { createArbitraryGroup(groupUuid); String newDescription = "New description"; InternalGroupUpdate groupUpdate = InternalGroupUpdate.builder().setDescription(newDescription).build(); updateGroup(groupUuid, groupUpdate); Optional<InternalGroup> group = loadGroup(groupUuid); assertThatGroup(group).value().description().isEqualTo(newDescription); } @Test public void descriptionCanBeRemoved() throws Exception { createArbitraryGroup(groupUuid); InternalGroupUpdate groupUpdate = InternalGroupUpdate.builder().setDescription("").build(); Optional<InternalGroup> group = updateGroup(groupUuid, groupUpdate); assertThatGroup(group).value().description().isNull(); } @Test public void ownerGroupUuidCanBeUpdated() throws Exception { createArbitraryGroup(groupUuid); AccountGroup.UUID newOwnerGroupUuid = new AccountGroup.UUID("New owner"); InternalGroupUpdate groupUpdate = InternalGroupUpdate.builder().setOwnerGroupUUID(newOwnerGroupUuid).build(); updateGroup(groupUuid, groupUpdate); Optional<InternalGroup> group = loadGroup(groupUuid); assertThatGroup(group).value().ownerGroupUuid().isEqualTo(newOwnerGroupUuid); } @Test public void ownerGroupUuidCannotBeUpdatedToNull() throws Exception { createArbitraryGroup(groupUuid); GroupConfig groupConfig = GroupConfig.loadForGroup(projectName, repository, groupUuid); InternalGroupUpdate groupUpdate = InternalGroupUpdate.builder().setOwnerGroupUUID(new AccountGroup.UUID(null)).build(); groupConfig.setGroupUpdate(groupUpdate, auditLogFormatter); try (MetaDataUpdate metaDataUpdate = createMetaDataUpdate()) { expectedException.expectCause(instanceOf(ConfigInvalidException.class)); expectedException.expectMessage("Owner UUID of the group " + groupUuid); groupConfig.commit(metaDataUpdate); } } @Test public void ownerGroupUuidCannotBeUpdatedToEmptyString() throws Exception { createArbitraryGroup(groupUuid); GroupConfig groupConfig = GroupConfig.loadForGroup(projectName, repository, groupUuid); InternalGroupUpdate groupUpdate = InternalGroupUpdate.builder().setOwnerGroupUUID(new AccountGroup.UUID("")).build(); groupConfig.setGroupUpdate(groupUpdate, auditLogFormatter); try (MetaDataUpdate metaDataUpdate = createMetaDataUpdate()) { expectedException.expectCause(instanceOf(ConfigInvalidException.class)); expectedException.expectMessage("Owner UUID of the group " + groupUuid); groupConfig.commit(metaDataUpdate); } } @Test public void visibleToAllCanBeUpdated() throws Exception { createArbitraryGroup(groupUuid); boolean oldVisibleAll = loadGroup(groupUuid).map(InternalGroup::isVisibleToAll).orElse(false); InternalGroupUpdate groupUpdate = InternalGroupUpdate.builder().setVisibleToAll(!oldVisibleAll).build(); updateGroup(groupUuid, groupUpdate); Optional<InternalGroup> group = loadGroup(groupUuid); assertThatGroup(group).value().visibleToAll().isEqualTo(!oldVisibleAll); } @Test public void createdOnIsNotAffectedByFurtherUpdates() throws Exception { Timestamp createdOn = toTimestamp(LocalDate.of(2017, Month.MAY, 11).atTime(13, 44, 10)); Timestamp updatedOn = toTimestamp(LocalDate.of(2017, Month.DECEMBER, 12).atTime(10, 21, 49)); InternalGroupCreation groupCreation = getPrefilledGroupCreationBuilder().build(); InternalGroupUpdate initialGroupUpdate = InternalGroupUpdate.builder().setUpdatedOn(createdOn).build(); createGroup(groupCreation, initialGroupUpdate); InternalGroupUpdate laterGroupUpdate = InternalGroupUpdate.builder() .setName(new AccountGroup.NameKey("Another name")) .setUpdatedOn(updatedOn) .build(); Optional<InternalGroup> group = updateGroup(groupCreation.getGroupUUID(), laterGroupUpdate); assertThatGroup(group).value().createdOn().isEqualTo(createdOn); Optional<InternalGroup> reloadedGroup = loadGroup(groupUuid); assertThatGroup(reloadedGroup).value().createdOn().isEqualTo(createdOn); } @Test public void membersCanBeAdded() throws Exception { createArbitraryGroup(groupUuid); Account.Id member1 = new Account.Id(1); Account.Id member2 = new Account.Id(2); InternalGroupUpdate groupUpdate1 = InternalGroupUpdate.builder() .setMemberModification(members -> ImmutableSet.of(member1)) .build(); updateGroup(groupUuid, groupUpdate1); InternalGroupUpdate groupUpdate2 = InternalGroupUpdate.builder() .setMemberModification(members -> Sets.union(members, ImmutableSet.of(member2))) .build(); updateGroup(groupUuid, groupUpdate2); Optional<InternalGroup> group = loadGroup(groupUuid); assertThatGroup(group).value().members().containsExactly(member1, member2); } @Test public void membersCanBeDeleted() throws Exception { createArbitraryGroup(groupUuid); Account.Id member1 = new Account.Id(1); Account.Id member2 = new Account.Id(2); InternalGroupUpdate groupUpdate1 = InternalGroupUpdate.builder() .setMemberModification(members -> ImmutableSet.of(member1, member2)) .build(); updateGroup(groupUuid, groupUpdate1); InternalGroupUpdate groupUpdate2 = InternalGroupUpdate.builder() .setMemberModification(members -> Sets.difference(members, ImmutableSet.of(member1))) .build(); updateGroup(groupUuid, groupUpdate2); Optional<InternalGroup> group = loadGroup(groupUuid); assertThatGroup(group).value().members().containsExactly(member2); } @Test public void subgroupsCanBeAdded() throws Exception { createArbitraryGroup(groupUuid); AccountGroup.UUID subgroup1 = new AccountGroup.UUID("subgroups1"); AccountGroup.UUID subgroup2 = new AccountGroup.UUID("subgroups2"); InternalGroupUpdate groupUpdate1 = InternalGroupUpdate.builder() .setSubgroupModification(subgroups -> ImmutableSet.of(subgroup1)) .build(); updateGroup(groupUuid, groupUpdate1); InternalGroupUpdate groupUpdate2 = InternalGroupUpdate.builder() .setSubgroupModification(subgroups -> Sets.union(subgroups, ImmutableSet.of(subgroup2))) .build(); updateGroup(groupUuid, groupUpdate2); Optional<InternalGroup> group = loadGroup(groupUuid); assertThatGroup(group).value().subgroups().containsExactly(subgroup1, subgroup2); } @Test public void subgroupsCanBeDeleted() throws Exception { createArbitraryGroup(groupUuid); AccountGroup.UUID subgroup1 = new AccountGroup.UUID("subgroups1"); AccountGroup.UUID subgroup2 = new AccountGroup.UUID("subgroups2"); InternalGroupUpdate groupUpdate1 = InternalGroupUpdate.builder() .setSubgroupModification(members -> ImmutableSet.of(subgroup1, subgroup2)) .build(); updateGroup(groupUuid, groupUpdate1); InternalGroupUpdate groupUpdate2 = InternalGroupUpdate.builder() .setSubgroupModification( members -> Sets.difference(members, ImmutableSet.of(subgroup1))) .build(); updateGroup(groupUuid, groupUpdate2); Optional<InternalGroup> group = loadGroup(groupUuid); assertThatGroup(group).value().subgroups().containsExactly(subgroup2); } @Test public void createdGroupIsLoadedAutomatically() throws Exception { InternalGroupCreation groupCreation = getPrefilledGroupCreationBuilder().build(); Optional<InternalGroup> group = createGroup(groupCreation); assertThat(group).isPresent(); } @Test public void loadedNewGroupWithMandatoryPropertiesDoesNotChangeOnReload() throws Exception { InternalGroupCreation groupCreation = getPrefilledGroupCreationBuilder().build(); Optional<InternalGroup> createdGroup = createGroup(groupCreation); Optional<InternalGroup> reloadedGroup = loadGroup(groupCreation.getGroupUUID()); assertThat(createdGroup).isEqualTo(reloadedGroup); } @Test public void loadedNewGroupWithAllPropertiesDoesNotChangeOnReload() throws Exception { InternalGroupCreation groupCreation = getPrefilledGroupCreationBuilder().build(); InternalGroupUpdate groupUpdate = InternalGroupUpdate.builder() .setDescription("A test group") .setOwnerGroupUUID(new AccountGroup.UUID("another owner")) .setVisibleToAll(true) .setName(new AccountGroup.NameKey("Another name")) .setUpdatedOn(new Timestamp(92900892)) .setMemberModification(members -> ImmutableSet.of(new Account.Id(1), new Account.Id(2))) .setSubgroupModification( subgroups -> ImmutableSet.of(new AccountGroup.UUID("subgroup"))) .build(); Optional<InternalGroup> createdGroup = createGroup(groupCreation, groupUpdate); Optional<InternalGroup> reloadedGroup = loadGroup(groupCreation.getGroupUUID()); assertThat(createdGroup).isEqualTo(reloadedGroup); } @Test public void loadedGroupAfterUpdatesForAllPropertiesDoesNotChangeOnReload() throws Exception { createArbitraryGroup(groupUuid); InternalGroupUpdate groupUpdate = InternalGroupUpdate.builder() .setDescription("A test group") .setOwnerGroupUUID(new AccountGroup.UUID("another owner")) .setVisibleToAll(true) .setName(new AccountGroup.NameKey("Another name")) .setUpdatedOn(new Timestamp(92900892)) .setMemberModification(members -> ImmutableSet.of(new Account.Id(1), new Account.Id(2))) .setSubgroupModification( subgroups -> ImmutableSet.of(new AccountGroup.UUID("subgroup"))) .build(); Optional<InternalGroup> updatedGroup = updateGroup(groupUuid, groupUpdate); Optional<InternalGroup> reloadedGroup = loadGroup(groupUuid); assertThat(updatedGroup).isEqualTo(reloadedGroup); } @Test public void loadedGroupWithAllPropertiesAndUpdateOfSinglePropertyDoesNotChangeOnReload() throws Exception { // Create a group with all properties set. InternalGroupCreation groupCreation = getPrefilledGroupCreationBuilder().build(); InternalGroupUpdate initialGroupUpdate = InternalGroupUpdate.builder() .setDescription("A test group") .setOwnerGroupUUID(new AccountGroup.UUID("another owner")) .setVisibleToAll(true) .setName(new AccountGroup.NameKey("Another name")) .setUpdatedOn(new Timestamp(92900892)) .setMemberModification(members -> ImmutableSet.of(new Account.Id(1), new Account.Id(2))) .setSubgroupModification( subgroups -> ImmutableSet.of(new AccountGroup.UUID("subgroup"))) .build(); createGroup(groupCreation, initialGroupUpdate); // Only update one of the properties. InternalGroupUpdate groupUpdate = InternalGroupUpdate.builder().setName(new AccountGroup.NameKey("Another name")).build(); Optional<InternalGroup> updatedGroup = updateGroup(groupCreation.getGroupUUID(), groupUpdate); Optional<InternalGroup> reloadedGroup = loadGroup(groupCreation.getGroupUUID()); assertThat(updatedGroup).isEqualTo(reloadedGroup); } @Test public void groupConfigMayBeReusedForFurtherUpdates() throws Exception { InternalGroupCreation groupCreation = getPrefilledGroupCreationBuilder().setGroupUUID(groupUuid).setId(groupId).build(); GroupConfig groupConfig = GroupConfig.createForNewGroup(projectName, repository, groupCreation); commit(groupConfig); AccountGroup.NameKey name = new AccountGroup.NameKey("Robots"); InternalGroupUpdate groupUpdate1 = InternalGroupUpdate.builder().setName(name).build(); groupConfig.setGroupUpdate(groupUpdate1, auditLogFormatter); commit(groupConfig); String description = "Test group for robots"; InternalGroupUpdate groupUpdate2 = InternalGroupUpdate.builder().setDescription(description).build(); groupConfig.setGroupUpdate(groupUpdate2, auditLogFormatter); commit(groupConfig); Optional<InternalGroup> group = loadGroup(groupUuid); assertThatGroup(group).value().id().isEqualTo(groupId); assertThatGroup(group).value().nameKey().isEqualTo(name); assertThatGroup(group).value().description().isEqualTo(description); } @Test public void newGroupIsRepresentedByARefPointingToARootCommit() throws Exception { createArbitraryGroup(groupUuid); Ref ref = repository.exactRef(RefNames.refsGroups(groupUuid)); assertThat(ref.getObjectId()).isNotNull(); try (RevWalk revWalk = new RevWalk(repository)) { RevCommit revCommit = revWalk.parseCommit(ref.getObjectId()); assertThat(revCommit.getParentCount()).isEqualTo(0); } } @Test public void updatedGroupIsRepresentedByARefPointingToACommitSequence() throws Exception { createArbitraryGroup(groupUuid); RevCommit commitAfterCreation = getLatestCommitForGroup(groupUuid); InternalGroupUpdate groupUpdate = InternalGroupUpdate.builder().setName(new AccountGroup.NameKey("Another name")).build(); updateGroup(groupUuid, groupUpdate); RevCommit commitAfterUpdate = getLatestCommitForGroup(groupUuid); assertThat(commitAfterUpdate).isNotEqualTo(commitAfterCreation); assertThat(commitAfterUpdate.getParents()).asList().containsExactly(commitAfterCreation); } @Test public void newCommitIsNotCreatedForEmptyUpdate() throws Exception { createArbitraryGroup(groupUuid); InternalGroupUpdate groupUpdate = InternalGroupUpdate.builder().build(); RevCommit commitBeforeUpdate = getLatestCommitForGroup(groupUuid); updateGroup(groupUuid, groupUpdate); RevCommit commitAfterUpdate = getLatestCommitForGroup(groupUuid); assertThat(commitAfterUpdate).isEqualTo(commitBeforeUpdate); } @Test public void newCommitIsNotCreatedForPureUpdatedOnUpdate() throws Exception { createArbitraryGroup(groupUuid); Timestamp updatedOn = toTimestamp(LocalDate.of(3017, Month.DECEMBER, 12).atTime(10, 21, 49)); InternalGroupUpdate groupUpdate = InternalGroupUpdate.builder().setUpdatedOn(updatedOn).build(); RevCommit commitBeforeUpdate = getLatestCommitForGroup(groupUuid); updateGroup(groupUuid, groupUpdate); RevCommit commitAfterUpdate = getLatestCommitForGroup(groupUuid); assertThat(commitAfterUpdate).isEqualTo(commitBeforeUpdate); } @Test public void newCommitIsNotCreatedForRedundantNameUpdate() throws Exception { createArbitraryGroup(groupUuid); InternalGroupUpdate groupUpdate = InternalGroupUpdate.builder().setName(groupName).build(); updateGroup(groupUuid, groupUpdate); RevCommit commitBeforeUpdate = getLatestCommitForGroup(groupUuid); updateGroup(groupUuid, groupUpdate); RevCommit commitAfterUpdate = getLatestCommitForGroup(groupUuid); assertThat(commitAfterUpdate).isEqualTo(commitBeforeUpdate); } @Test public void newCommitIsNotCreatedForRedundantDescriptionUpdate() throws Exception { createArbitraryGroup(groupUuid); InternalGroupUpdate groupUpdate = InternalGroupUpdate.builder().setDescription("A test group").build(); updateGroup(groupUuid, groupUpdate); RevCommit commitBeforeUpdate = getLatestCommitForGroup(groupUuid); updateGroup(groupUuid, groupUpdate); RevCommit commitAfterUpdate = getLatestCommitForGroup(groupUuid); assertThat(commitAfterUpdate).isEqualTo(commitBeforeUpdate); } @Test public void newCommitIsNotCreatedForRedundantVisibleToAllUpdate() throws Exception { createArbitraryGroup(groupUuid); InternalGroupUpdate groupUpdate = InternalGroupUpdate.builder().setVisibleToAll(true).build(); updateGroup(groupUuid, groupUpdate); RevCommit commitBeforeUpdate = getLatestCommitForGroup(groupUuid); updateGroup(groupUuid, groupUpdate); RevCommit commitAfterUpdate = getLatestCommitForGroup(groupUuid); assertThat(commitAfterUpdate).isEqualTo(commitBeforeUpdate); } @Test public void newCommitIsNotCreatedForRedundantOwnerGroupUuidUpdate() throws Exception { createArbitraryGroup(groupUuid); InternalGroupUpdate groupUpdate = InternalGroupUpdate.builder() .setOwnerGroupUUID(new AccountGroup.UUID("Another owner")) .build(); updateGroup(groupUuid, groupUpdate); RevCommit commitBeforeUpdate = getLatestCommitForGroup(groupUuid); updateGroup(groupUuid, groupUpdate); RevCommit commitAfterUpdate = getLatestCommitForGroup(groupUuid); assertThat(commitAfterUpdate).isEqualTo(commitBeforeUpdate); } @Test public void newCommitIsNotCreatedForRedundantMemberUpdate() throws Exception { createArbitraryGroup(groupUuid); InternalGroupUpdate groupUpdate = InternalGroupUpdate.builder() .setMemberModification( members -> Sets.union(members, ImmutableSet.of(new Account.Id(10)))) .build(); updateGroup(groupUuid, groupUpdate); RevCommit commitBeforeUpdate = getLatestCommitForGroup(groupUuid); updateGroup(groupUuid, groupUpdate); RevCommit commitAfterUpdate = getLatestCommitForGroup(groupUuid); assertThat(commitAfterUpdate).isEqualTo(commitBeforeUpdate); } @Test public void newCommitIsNotCreatedForRedundantSubgroupsUpdate() throws Exception { createArbitraryGroup(groupUuid); InternalGroupUpdate groupUpdate = InternalGroupUpdate.builder() .setSubgroupModification( subgroups -> Sets.union(subgroups, ImmutableSet.of(new AccountGroup.UUID("subgroup")))) .build(); updateGroup(groupUuid, groupUpdate); RevCommit commitBeforeUpdate = getLatestCommitForGroup(groupUuid); updateGroup(groupUuid, groupUpdate); RevCommit commitAfterUpdate = getLatestCommitForGroup(groupUuid); assertThat(commitAfterUpdate).isEqualTo(commitBeforeUpdate); } @Test public void newCommitIsNotCreatedWhenCommittingGroupCreationTwice() throws Exception { InternalGroupCreation groupCreation = getPrefilledGroupCreationBuilder().setGroupUUID(groupUuid).build(); InternalGroupUpdate groupUpdate = InternalGroupUpdate.builder().setName(new AccountGroup.NameKey("Another name")).build(); GroupConfig groupConfig = GroupConfig.createForNewGroup(projectName, repository, groupCreation); groupConfig.setGroupUpdate(groupUpdate, auditLogFormatter); commit(groupConfig); RevCommit commitBeforeSecondCommit = getLatestCommitForGroup(groupUuid); commit(groupConfig); RevCommit commitAfterSecondCommit = getLatestCommitForGroup(groupUuid); assertThat(commitAfterSecondCommit).isEqualTo(commitBeforeSecondCommit); } @Test public void newCommitIsNotCreatedWhenCommittingGroupUpdateTwice() throws Exception { createArbitraryGroup(groupUuid); InternalGroupUpdate groupUpdate = InternalGroupUpdate.builder().setDescription("A test group").build(); GroupConfig groupConfig = GroupConfig.loadForGroup(projectName, repository, groupUuid); groupConfig.setGroupUpdate(groupUpdate, auditLogFormatter); commit(groupConfig); RevCommit commitBeforeSecondCommit = getLatestCommitForGroup(groupUuid); commit(groupConfig); RevCommit commitAfterSecondCommit = getLatestCommitForGroup(groupUuid); assertThat(commitAfterSecondCommit).isEqualTo(commitBeforeSecondCommit); } @Test public void commitTimeMatchesDefaultCreatedOnOfNewGroup() throws Exception { // Git timestamps are only precise to the second. long testStartAsSecondsSinceEpoch = TimeUtil.nowTs().getTime() / 1000; InternalGroupCreation groupCreation = InternalGroupCreation.builder() .setGroupUUID(groupUuid) .setNameKey(groupName) .setId(groupId) .build(); createGroup(groupCreation); RevCommit revCommit = getLatestCommitForGroup(groupUuid); assertThat(revCommit.getCommitTime()).isAtLeast((int) testStartAsSecondsSinceEpoch); } @Test public void commitTimeMatchesSpecifiedCreatedOnOfNewGroup() throws Exception { // Git timestamps are only precise to the second. long createdOnAsSecondsSinceEpoch = 9082093; InternalGroupCreation groupCreation = InternalGroupCreation.builder() .setGroupUUID(groupUuid) .setNameKey(groupName) .setId(groupId) .build(); InternalGroupUpdate groupUpdate = InternalGroupUpdate.builder() .setUpdatedOn(new Timestamp(createdOnAsSecondsSinceEpoch * 1000)) .build(); createGroup(groupCreation, groupUpdate); RevCommit revCommit = getLatestCommitForGroup(groupUuid); assertThat(revCommit.getCommitTime()).isEqualTo(createdOnAsSecondsSinceEpoch); } @Test public void timestampOfCommitterMatchesSpecifiedCreatedOnOfNewGroup() throws Exception { Timestamp committerTimestamp = toTimestamp(LocalDate.of(2017, Month.DECEMBER, 13).atTime(15, 5, 27)); Timestamp createdOn = toTimestamp(LocalDate.of(2016, Month.MARCH, 11).atTime(23, 49, 11)); InternalGroupCreation groupCreation = InternalGroupCreation.builder() .setGroupUUID(groupUuid) .setNameKey(groupName) .setId(groupId) .build(); InternalGroupUpdate groupUpdate = InternalGroupUpdate.builder() .setName(new AccountGroup.NameKey("Another name")) .setUpdatedOn(createdOn) .build(); GroupConfig groupConfig = GroupConfig.createForNewGroup(projectName, repository, groupCreation); groupConfig.setGroupUpdate(groupUpdate, auditLogFormatter); PersonIdent committerIdent = new PersonIdent("Jane", "Jane@gerritcodereview.com", committerTimestamp, timeZone); try (MetaDataUpdate metaDataUpdate = createMetaDataUpdate()) { metaDataUpdate.getCommitBuilder().setCommitter(committerIdent); groupConfig.commit(metaDataUpdate); } RevCommit revCommit = getLatestCommitForGroup(groupUuid); assertThat(revCommit.getCommitterIdent().getWhen()).isEqualTo(createdOn); assertThat(revCommit.getCommitterIdent().getTimeZone().getRawOffset()) .isEqualTo(timeZone.getRawOffset()); } @Test public void timestampOfAuthorMatchesSpecifiedCreatedOnOfNewGroup() throws Exception { Timestamp authorTimestamp = toTimestamp(LocalDate.of(2017, Month.DECEMBER, 13).atTime(15, 5, 27)); Timestamp createdOn = toTimestamp(LocalDate.of(2016, Month.MARCH, 11).atTime(23, 49, 11)); InternalGroupCreation groupCreation = InternalGroupCreation.builder() .setGroupUUID(groupUuid) .setNameKey(groupName) .setId(groupId) .build(); InternalGroupUpdate groupUpdate = InternalGroupUpdate.builder() .setName(new AccountGroup.NameKey("Another name")) .setUpdatedOn(createdOn) .build(); GroupConfig groupConfig = GroupConfig.createForNewGroup(projectName, repository, groupCreation); groupConfig.setGroupUpdate(groupUpdate, auditLogFormatter); PersonIdent authorIdent = new PersonIdent("Jane", "Jane@gerritcodereview.com", authorTimestamp, timeZone); try (MetaDataUpdate metaDataUpdate = createMetaDataUpdate()) { metaDataUpdate.getCommitBuilder().setAuthor(authorIdent); groupConfig.commit(metaDataUpdate); } RevCommit revCommit = getLatestCommitForGroup(groupUuid); assertThat(revCommit.getAuthorIdent().getWhen()).isEqualTo(createdOn); assertThat(revCommit.getAuthorIdent().getTimeZone().getRawOffset()) .isEqualTo(timeZone.getRawOffset()); } @Test public void commitTimeMatchesDefaultUpdatedOnOfUpdatedGroup() throws Exception { // Git timestamps are only precise to the second. long testStartAsSecondsSinceEpoch = TimeUtil.nowTs().getTime() / 1000; createArbitraryGroup(groupUuid); InternalGroupUpdate groupUpdate = InternalGroupUpdate.builder().setName(new AccountGroup.NameKey("Another name")).build(); updateGroup(groupUuid, groupUpdate); RevCommit revCommit = getLatestCommitForGroup(groupUuid); assertThat(revCommit.getCommitTime()).isAtLeast((int) testStartAsSecondsSinceEpoch); } @Test public void commitTimeMatchesSpecifiedUpdatedOnOfUpdatedGroup() throws Exception { // Git timestamps are only precise to the second. long updatedOnAsSecondsSinceEpoch = 9082093; createArbitraryGroup(groupUuid); InternalGroupUpdate groupUpdate = InternalGroupUpdate.builder() .setName(new AccountGroup.NameKey("Another name")) .setUpdatedOn(new Timestamp(updatedOnAsSecondsSinceEpoch * 1000)) .build(); updateGroup(groupUuid, groupUpdate); RevCommit revCommit = getLatestCommitForGroup(groupUuid); assertThat(revCommit.getCommitTime()).isEqualTo(updatedOnAsSecondsSinceEpoch); } @Test public void timestampOfCommitterMatchesSpecifiedUpdatedOnOfUpdatedGroup() throws Exception { Timestamp committerTimestamp = toTimestamp(LocalDate.of(2017, Month.DECEMBER, 13).atTime(15, 5, 27)); Timestamp updatedOn = toTimestamp(LocalDate.of(2016, Month.MARCH, 11).atTime(23, 49, 11)); createArbitraryGroup(groupUuid); InternalGroupUpdate groupUpdate = InternalGroupUpdate.builder() .setName(new AccountGroup.NameKey("Another name")) .setUpdatedOn(updatedOn) .build(); GroupConfig groupConfig = GroupConfig.loadForGroup(projectName, repository, groupUuid); groupConfig.setGroupUpdate(groupUpdate, auditLogFormatter); PersonIdent committerIdent = new PersonIdent("Jane", "Jane@gerritcodereview.com", committerTimestamp, timeZone); try (MetaDataUpdate metaDataUpdate = createMetaDataUpdate()) { metaDataUpdate.getCommitBuilder().setCommitter(committerIdent); groupConfig.commit(metaDataUpdate); } RevCommit revCommit = getLatestCommitForGroup(groupUuid); assertThat(revCommit.getCommitterIdent().getWhen()).isEqualTo(updatedOn); assertThat(revCommit.getCommitterIdent().getTimeZone().getRawOffset()) .isEqualTo(timeZone.getRawOffset()); } @Test public void timestampOfAuthorMatchesSpecifiedUpdatedOnOfUpdatedGroup() throws Exception { Timestamp authorTimestamp = toTimestamp(LocalDate.of(2017, Month.DECEMBER, 13).atTime(15, 5, 27)); Timestamp updatedOn = toTimestamp(LocalDate.of(2016, Month.MARCH, 11).atTime(23, 49, 11)); createArbitraryGroup(groupUuid); InternalGroupUpdate groupUpdate = InternalGroupUpdate.builder() .setName(new AccountGroup.NameKey("Another name")) .setUpdatedOn(updatedOn) .build(); GroupConfig groupConfig = GroupConfig.loadForGroup(projectName, repository, groupUuid); groupConfig.setGroupUpdate(groupUpdate, auditLogFormatter); PersonIdent authorIdent = new PersonIdent("Jane", "Jane@gerritcodereview.com", authorTimestamp, timeZone); try (MetaDataUpdate metaDataUpdate = createMetaDataUpdate()) { metaDataUpdate.getCommitBuilder().setAuthor(authorIdent); groupConfig.commit(metaDataUpdate); } RevCommit revCommit = getLatestCommitForGroup(groupUuid); assertThat(revCommit.getAuthorIdent().getWhen()).isEqualTo(updatedOn); assertThat(revCommit.getAuthorIdent().getTimeZone().getRawOffset()) .isEqualTo(timeZone.getRawOffset()); } @Test public void refStateOfLoadedGroupIsPopulatedWithCommitSha1() throws Exception { createArbitraryGroup(groupUuid); Optional<InternalGroup> group = loadGroup(groupUuid); RevCommit revCommit = getLatestCommitForGroup(groupUuid); assertThatGroup(group).value().refState().isEqualTo(revCommit.copy()); } @Test public void groupCanBeLoadedAtASpecificRevision() throws Exception { createArbitraryGroup(groupUuid); AccountGroup.NameKey firstName = new AccountGroup.NameKey("Bots"); InternalGroupUpdate groupUpdate1 = InternalGroupUpdate.builder().setName(firstName).build(); updateGroup(groupUuid, groupUpdate1); RevCommit commitAfterUpdate1 = getLatestCommitForGroup(groupUuid); InternalGroupUpdate groupUpdate2 = InternalGroupUpdate.builder().setName(new AccountGroup.NameKey("Robots")).build(); updateGroup(groupUuid, groupUpdate2); GroupConfig groupConfig = GroupConfig.loadForGroupSnapshot( projectName, repository, groupUuid, commitAfterUpdate1.copy()); Optional<InternalGroup> group = groupConfig.getLoadedGroup(); assertThatGroup(group).value().nameKey().isEqualTo(firstName); assertThatGroup(group).value().refState().isEqualTo(commitAfterUpdate1.copy()); } @Test public void commitMessageOfNewGroupWithoutMembersOrSubgroupsContainsNoFooters() throws Exception { InternalGroupCreation groupCreation = getPrefilledGroupCreationBuilder().setGroupUUID(groupUuid).build(); createGroup(groupCreation); RevCommit revCommit = getLatestCommitForGroup(groupUuid); assertThat(revCommit.getFullMessage()).isEqualTo("Create group"); } @Test public void commitMessageOfNewGroupWithAdditionalNameSpecificationContainsNoFooters() throws Exception { InternalGroupCreation groupCreation = getPrefilledGroupCreationBuilder().setGroupUUID(groupUuid).build(); InternalGroupUpdate groupUpdate = InternalGroupUpdate.builder().setName(new AccountGroup.NameKey("Another name")).build(); createGroup(groupCreation, groupUpdate); RevCommit revCommit = getLatestCommitForGroup(groupUuid); assertThat(revCommit.getFullMessage()).isEqualTo("Create group"); } @Test public void commitMessageOfNewGroupWithMembersContainsFooters() throws Exception { Account account13 = createAccount(new Account.Id(13), "John"); Account account7 = createAccount(new Account.Id(7), "Jane"); ImmutableSet<Account> accounts = ImmutableSet.of(account13, account7); AuditLogFormatter auditLogFormatter = AuditLogFormatter.createBackedBy(accounts, ImmutableSet.of(), "server-id"); InternalGroupCreation groupCreation = getPrefilledGroupCreationBuilder().setGroupUUID(groupUuid).build(); InternalGroupUpdate groupUpdate = InternalGroupUpdate.builder() .setMemberModification(members -> ImmutableSet.of(account13.getId(), account7.getId())) .build(); GroupConfig groupConfig = GroupConfig.createForNewGroup(projectName, repository, groupCreation); groupConfig.setGroupUpdate(groupUpdate, auditLogFormatter); commit(groupConfig); RevCommit revCommit = getLatestCommitForGroup(groupUuid); assertThat(revCommit.getFullMessage()) .isEqualTo("Create group\n\nAdd: Jane <7@server-id>\nAdd: John <13@server-id>"); } @Test public void commitMessageOfNewGroupWithSubgroupsContainsFooters() throws Exception { GroupDescription.Basic group1 = createGroup(new AccountGroup.UUID("129403"), "Bots"); GroupDescription.Basic group2 = createGroup(new AccountGroup.UUID("8903493"), "Verifiers"); ImmutableSet<GroupDescription.Basic> groups = ImmutableSet.of(group1, group2); AuditLogFormatter auditLogFormatter = AuditLogFormatter.createBackedBy(ImmutableSet.of(), groups, "serverId"); InternalGroupCreation groupCreation = getPrefilledGroupCreationBuilder().setGroupUUID(groupUuid).build(); InternalGroupUpdate groupUpdate = InternalGroupUpdate.builder() .setSubgroupModification( subgroups -> ImmutableSet.of(group1.getGroupUUID(), group2.getGroupUUID())) .build(); GroupConfig groupConfig = GroupConfig.createForNewGroup(projectName, repository, groupCreation); groupConfig.setGroupUpdate(groupUpdate, auditLogFormatter); commit(groupConfig); RevCommit revCommit = getLatestCommitForGroup(groupUuid); assertThat(revCommit.getFullMessage()) .isEqualTo("Create group\n\nAdd-group: Bots <129403>\nAdd-group: Verifiers <8903493>"); } @Test public void commitMessageOfMemberAdditionContainsFooters() throws Exception { Account account13 = createAccount(new Account.Id(13), "John"); Account account7 = createAccount(new Account.Id(7), "Jane"); ImmutableSet<Account> accounts = ImmutableSet.of(account13, account7); createArbitraryGroup(groupUuid); AuditLogFormatter auditLogFormatter = AuditLogFormatter.createBackedBy(accounts, ImmutableSet.of(), "GerritServer1"); InternalGroupUpdate groupUpdate = InternalGroupUpdate.builder() .setMemberModification(members -> ImmutableSet.of(account13.getId(), account7.getId())) .build(); updateGroup(groupUuid, groupUpdate, auditLogFormatter); RevCommit revCommit = getLatestCommitForGroup(groupUuid); assertThat(revCommit.getFullMessage()) .isEqualTo("Update group\n\nAdd: Jane <7@GerritServer1>\nAdd: John <13@GerritServer1>"); } @Test public void commitMessageOfMemberRemovalContainsFooters() throws Exception { Account account13 = createAccount(new Account.Id(13), "John"); Account account7 = createAccount(new Account.Id(7), "Jane"); ImmutableSet<Account> accounts = ImmutableSet.of(account13, account7); createArbitraryGroup(groupUuid); AuditLogFormatter auditLogFormatter = AuditLogFormatter.createBackedBy(accounts, ImmutableSet.of(), "server-id"); InternalGroupUpdate groupUpdate1 = InternalGroupUpdate.builder() .setMemberModification(members -> ImmutableSet.of(account13.getId(), account7.getId())) .build(); updateGroup(groupUuid, groupUpdate1, auditLogFormatter); InternalGroupUpdate groupUpdate2 = InternalGroupUpdate.builder() .setMemberModification(members -> ImmutableSet.of(account7.getId())) .build(); updateGroup(groupUuid, groupUpdate2, auditLogFormatter); RevCommit revCommit = getLatestCommitForGroup(groupUuid); assertThat(revCommit.getFullMessage()).isEqualTo("Update group\n\nRemove: John <13@server-id>"); } @Test public void commitMessageOfSubgroupAdditionContainsFooters() throws Exception { GroupDescription.Basic group1 = createGroup(new AccountGroup.UUID("129403"), "Bots"); GroupDescription.Basic group2 = createGroup(new AccountGroup.UUID("8903493"), "Verifiers"); ImmutableSet<GroupDescription.Basic> groups = ImmutableSet.of(group1, group2); createArbitraryGroup(groupUuid); AuditLogFormatter auditLogFormatter = AuditLogFormatter.createBackedBy(ImmutableSet.of(), groups, "serverId"); InternalGroupUpdate groupUpdate = InternalGroupUpdate.builder() .setSubgroupModification( subgroups -> ImmutableSet.of(group1.getGroupUUID(), group2.getGroupUUID())) .build(); updateGroup(groupUuid, groupUpdate, auditLogFormatter); RevCommit revCommit = getLatestCommitForGroup(groupUuid); assertThat(revCommit.getFullMessage()) .isEqualTo("Update group\n\nAdd-group: Bots <129403>\nAdd-group: Verifiers <8903493>"); } @Test public void commitMessageOfSubgroupRemovalContainsFooters() throws Exception { GroupDescription.Basic group1 = createGroup(new AccountGroup.UUID("129403"), "Bots"); GroupDescription.Basic group2 = createGroup(new AccountGroup.UUID("8903493"), "Verifiers"); ImmutableSet<GroupDescription.Basic> groups = ImmutableSet.of(group1, group2); createArbitraryGroup(groupUuid); AuditLogFormatter auditLogFormatter = AuditLogFormatter.createBackedBy(ImmutableSet.of(), groups, "serverId"); InternalGroupUpdate groupUpdate1 = InternalGroupUpdate.builder() .setSubgroupModification( subgroups -> ImmutableSet.of(group1.getGroupUUID(), group2.getGroupUUID())) .build(); updateGroup(groupUuid, groupUpdate1, auditLogFormatter); InternalGroupUpdate groupUpdate2 = InternalGroupUpdate.builder() .setSubgroupModification(subgroups -> ImmutableSet.of(group1.getGroupUUID())) .build(); updateGroup(groupUuid, groupUpdate2, auditLogFormatter); RevCommit revCommit = getLatestCommitForGroup(groupUuid); assertThat(revCommit.getFullMessage()) .isEqualTo("Update group\n\nRemove-group: Verifiers <8903493>"); } @Test public void commitMessageOfGroupRenameContainsFooters() throws Exception { createArbitraryGroup(groupUuid); InternalGroupUpdate groupUpdate1 = InternalGroupUpdate.builder().setName(new AccountGroup.NameKey("Old name")).build(); updateGroup(groupUuid, groupUpdate1); InternalGroupUpdate groupUpdate2 = InternalGroupUpdate.builder().setName(new AccountGroup.NameKey("New name")).build(); updateGroup(groupUuid, groupUpdate2); RevCommit revCommit = getLatestCommitForGroup(groupUuid); assertThat(revCommit.getFullMessage()) .isEqualTo("Update group\n\nRename from Old name to New name"); } @Test public void commitMessageFootersCanBeMixed() throws Exception { Account account13 = createAccount(new Account.Id(13), "John"); Account account7 = createAccount(new Account.Id(7), "Jane"); ImmutableSet<Account> accounts = ImmutableSet.of(account13, account7); GroupDescription.Basic group1 = createGroup(new AccountGroup.UUID("129403"), "Bots"); GroupDescription.Basic group2 = createGroup(new AccountGroup.UUID("8903493"), "Verifiers"); ImmutableSet<GroupDescription.Basic> groups = ImmutableSet.of(group1, group2); createArbitraryGroup(groupUuid); AuditLogFormatter auditLogFormatter = AuditLogFormatter.createBackedBy(accounts, groups, "serverId"); InternalGroupUpdate groupUpdate1 = InternalGroupUpdate.builder() .setName(new AccountGroup.NameKey("Old name")) .setMemberModification(members -> ImmutableSet.of(account7.getId())) .setSubgroupModification(subgroups -> ImmutableSet.of(group2.getGroupUUID())) .build(); updateGroup(groupUuid, groupUpdate1, auditLogFormatter); InternalGroupUpdate groupUpdate2 = InternalGroupUpdate.builder() .setName(new AccountGroup.NameKey("New name")) .setMemberModification(members -> ImmutableSet.of(account13.getId())) .setSubgroupModification(subgroups -> ImmutableSet.of(group1.getGroupUUID())) .build(); updateGroup(groupUuid, groupUpdate2, auditLogFormatter); RevCommit revCommit = getLatestCommitForGroup(groupUuid); assertThat(revCommit.getFullMessage()) .isEqualTo( "Update group\n" + "\n" + "Add-group: Bots <129403>\n" + "Add: John <13@serverId>\n" + "Remove-group: Verifiers <8903493>\n" + "Remove: Jane <7@serverId>\n" + "Rename from Old name to New name"); } private static Timestamp toTimestamp(LocalDateTime localDateTime) { return Timestamp.from(localDateTime.atZone(ZoneId.systemDefault()).toInstant()); } private void populateGroupConfig(AccountGroup.UUID uuid, String fileContent) throws Exception { testRepository .branch(RefNames.refsGroups(uuid)) .commit() .message("Prepopulate group.config") .add(GroupConfig.GROUP_CONFIG_FILE, fileContent) .create(); } private void populateMembersFile(AccountGroup.UUID uuid, String fileContent) throws Exception { testRepository .branch(RefNames.refsGroups(uuid)) .commit() .message("Prepopulate members") .add(GroupConfig.MEMBERS_FILE, fileContent) .create(); } private void populateSubgroupsFile(AccountGroup.UUID uuid, String fileContent) throws Exception { testRepository .branch(RefNames.refsGroups(uuid)) .commit() .message("Prepopulate subgroups") .add(GroupConfig.SUBGROUPS_FILE, fileContent) .create(); } private void createArbitraryGroup(AccountGroup.UUID uuid) throws Exception { InternalGroupCreation groupCreation = getPrefilledGroupCreationBuilder().setGroupUUID(uuid).build(); createGroup(groupCreation); } private InternalGroupCreation.Builder getPrefilledGroupCreationBuilder() { return InternalGroupCreation.builder() .setGroupUUID(groupUuid) .setNameKey(groupName) .setId(groupId); } private Optional<InternalGroup> createGroup(InternalGroupCreation groupCreation) throws Exception { GroupConfig groupConfig = GroupConfig.createForNewGroup(projectName, repository, groupCreation); commit(groupConfig); return groupConfig.getLoadedGroup(); } private Optional<InternalGroup> createGroup( InternalGroupCreation groupCreation, InternalGroupUpdate groupUpdate) throws Exception { GroupConfig groupConfig = GroupConfig.createForNewGroup(projectName, repository, groupCreation); groupConfig.setGroupUpdate(groupUpdate, auditLogFormatter); commit(groupConfig); return groupConfig.getLoadedGroup(); } private Optional<InternalGroup> updateGroup( AccountGroup.UUID uuid, InternalGroupUpdate groupUpdate) throws Exception { return updateGroup(uuid, groupUpdate, auditLogFormatter); } private Optional<InternalGroup> updateGroup( AccountGroup.UUID uuid, InternalGroupUpdate groupUpdate, AuditLogFormatter auditLogFormatter) throws Exception { GroupConfig groupConfig = GroupConfig.loadForGroup(projectName, repository, uuid); groupConfig.setGroupUpdate(groupUpdate, auditLogFormatter); commit(groupConfig); return groupConfig.getLoadedGroup(); } private Optional<InternalGroup> loadGroup(AccountGroup.UUID uuid) throws Exception { GroupConfig groupConfig = GroupConfig.loadForGroup(projectName, repository, uuid); return groupConfig.getLoadedGroup(); } private void commit(GroupConfig groupConfig) throws IOException { try (MetaDataUpdate metaDataUpdate = createMetaDataUpdate()) { groupConfig.commit(metaDataUpdate); } } private MetaDataUpdate createMetaDataUpdate() { PersonIdent serverIdent = new PersonIdent( "Gerrit Server", "noreply@gerritcodereview.com", TimeUtil.nowTs(), timeZone); MetaDataUpdate metaDataUpdate = new MetaDataUpdate( GitReferenceUpdated.DISABLED, new Project.NameKey("Test Repository"), repository); metaDataUpdate.getCommitBuilder().setCommitter(serverIdent); metaDataUpdate.getCommitBuilder().setAuthor(serverIdent); return metaDataUpdate; } private RevCommit getLatestCommitForGroup(AccountGroup.UUID uuid) throws IOException { Ref ref = repository.exactRef(RefNames.refsGroups(uuid)); assertWithMessage("Precondition: Assumed that ref for group " + uuid + " exists.") .that(ref.getObjectId()) .isNotNull(); try (RevWalk revWalk = new RevWalk(repository)) { return revWalk.parseCommit(ref.getObjectId()); } } private static Account createAccount(Account.Id id, String name) { Account account = new Account(id, TimeUtil.nowTs()); account.setFullName(name); return account; } private static GroupDescription.Basic createGroup(AccountGroup.UUID uuid, String name) { return new GroupDescription.Basic() { @Override public AccountGroup.UUID getGroupUUID() { return uuid; } @Override public String getName() { return name; } @Nullable @Override public String getEmailAddress() { return null; } @Nullable @Override public String getUrl() { return null; } }; } private static OptionalSubject<InternalGroupSubject, InternalGroup> assertThatGroup( Optional<InternalGroup> loadedGroup) { return assertThat(loadedGroup, InternalGroupSubject::assertThat); } }
/* * Sleuth Kit Data Model * * Copyright 2011 Basis Technology Corp. * Contact: carrier <at> sleuthkit <dot> org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.datamodel; import java.text.DateFormat; import java.text.SimpleDateFormat; import java.util.GregorianCalendar; import java.util.List; import java.util.HashMap; import java.util.Map; import java.util.TimeZone; import org.sleuthkit.datamodel.TskData.TSK_FS_ATTR_TYPE_ENUM; /** * Interfaces with the Sleuthkit TSK c/c++ libraries Supports case management, * add image process, reading data off content objects Setting up Hash database * parameters and updating / reading values * * Caches image and filesystem handles and reuses them for the duration of the * application */ public class SleuthkitJNI { private static final int MAX_DATABASES = 256; //Native methods private static native String getVersionNat(); private static native void startVerboseLoggingNat(String logPath); //database private static native long newCaseDbNat(String dbPath) throws TskCoreException; private static native long openCaseDbNat(String path) throws TskCoreException; private static native void closeCaseDbNat(long db) throws TskCoreException; private static native int hashDbOpenNat(String hashDbPath) throws TskCoreException; private static native int hashDbNewNat(String hashDbPath) throws TskCoreException; private static native int hashDbBeginTransactionNat(int dbHandle) throws TskCoreException; private static native int hashDbCommitTransactionNat(int dbHandle) throws TskCoreException; private static native int hashDbRollbackTransactionNat(int dbHandle) throws TskCoreException; private static native int hashDbAddEntryNat(String filename, String hashMd5, String hashSha1, String hashSha256, String comment, int dbHandle) throws TskCoreException; private static native boolean hashDbIsUpdateableNat(int dbHandle); private static native boolean hashDbIsReindexableNat(int dbHandle); private static native String hashDbPathNat(int dbHandle); private static native String hashDbIndexPathNat(int dbHandle); private static native String hashDbGetDisplayName(int dbHandle) throws TskCoreException; private static native void hashDbCloseAll() throws TskCoreException; private static native void hashDbClose(int dbHandle) throws TskCoreException; //hash-lookup database functions private static native void hashDbCreateIndexNat(int dbHandle) throws TskCoreException; private static native boolean hashDbIndexExistsNat(int dbHandle) throws TskCoreException; private static native boolean hashDbIsIdxOnlyNat(int dbHandle) throws TskCoreException; private static native boolean hashDbLookup(String hash, int dbHandle) throws TskCoreException; private static native HashHitInfo hashDbLookupVerbose(String hash, int dbHandle) throws TskCoreException; //load image private static native long initAddImgNat(long db, String timezone, boolean processUnallocSpace, boolean noFatFsOrphans) throws TskCoreException; private static native void runAddImgNat(long process, String[] imgPath, int splits, String timezone) throws TskCoreException, TskDataException; // if runAddImg finishes without being stopped, revertAddImg or commitAddImg MUST be called private static native void stopAddImgNat(long process) throws TskCoreException; private static native void revertAddImgNat(long process) throws TskCoreException; private static native long commitAddImgNat(long process) throws TskCoreException; //open functions private static native long openImgNat(String[] imgPath, int splits) throws TskCoreException; private static native long openVsNat(long imgHandle, long vsOffset) throws TskCoreException; private static native long openVolNat(long vsHandle, long volId) throws TskCoreException; private static native long openFsNat(long imgHandle, long fsId) throws TskCoreException; private static native long openFileNat(long fsHandle, long fileId, int attrType, int attrId) throws TskCoreException; //read functions private static native int readImgNat(long imgHandle, byte[] readBuffer, long offset, long len) throws TskCoreException; private static native int readVsNat(long vsHandle, byte[] readBuffer, long offset, long len) throws TskCoreException; private static native int readVolNat(long volHandle, byte[] readBuffer, long offset, long len) throws TskCoreException; private static native int readFsNat(long fsHandle, byte[] readBuffer, long offset, long len) throws TskCoreException; private static native int readFileNat(long fileHandle, byte[] readBuffer, long offset, long len) throws TskCoreException; //close functions private static native void closeImgNat(long imgHandle); private static native void closeVsNat(long vsHandle); private static native void closeFsNat(long fsHandle); private static native void closeFileNat(long fileHandle); //util functions private static native long findDeviceSizeNat(String devicePath) throws TskCoreException; private static native String getCurDirNat(long process); //Linked library loading static { LibraryUtils.loadSleuthkitJNI(); } private SleuthkitJNI() { } /** * Handle to TSK Case database */ public static class CaseDbHandle { private long caseDbPointer; //map concat. image paths to cached image handle private static final Map<String, Long> imageHandleCache = new HashMap<String, Long>(); //map image and offsets to cached fs handle private static final Map<Long, Map<Long, Long>> fsHandleCache = new HashMap<Long, Map<Long, Long>>(); private CaseDbHandle(long pointer) { this.caseDbPointer = pointer; } /** * Close the case database * * @throws TskCoreException exception thrown if critical error occurs * within TSK */ void free() throws TskCoreException { SleuthkitJNI.closeCaseDbNat(caseDbPointer); } /******************** Hash Database Methods ***********************/ /** * Start the process of adding a disk image to the case * * @param timezone Timezone that image was from * @param processUnallocSpace true if to process unallocated space in * the image * @param noFatFsOrphans true if to skip processing of orphans on FAT * filesystems * * @return Object that can be used to manage the process. */ AddImageProcess initAddImageProcess(String timezone, boolean processUnallocSpace, boolean noFatFsOrphans) { return new AddImageProcess(timezone, processUnallocSpace, noFatFsOrphans); } /** * Encapsulates a multi-step process to add a disk image. Adding a disk * image takes a while and this object has objects to manage that * process. Methods within this class are intended to be threadsafe. */ public class AddImageProcess { private String timezone; private boolean processUnallocSpace; private boolean noFatFsOrphans; private volatile long autoDbPointer; private AddImageProcess(String timezone, boolean processUnallocSpace, boolean noFatFsOrphans) { this.timezone = timezone; this.processUnallocSpace = processUnallocSpace; this.noFatFsOrphans = noFatFsOrphans; autoDbPointer = 0; } /** * Start the process of adding an image to the case database. MUST * call either commit() or revert() after calling run(). * * @param imgPath Full path(s) to the image file(s). * @throws TskCoreException exception thrown if critical error * occurs within TSK * @throws TskDataException exception thrown if non-critical error * occurs within TSK (should be OK to continue) */ public void run(String[] imgPath) throws TskCoreException, TskDataException { if (autoDbPointer != 0) { throw new TskCoreException("AddImgProcess:run: AutoDB pointer is already set"); } synchronized (this) { autoDbPointer = initAddImgNat(caseDbPointer, timezoneLongToShort(timezone), processUnallocSpace, noFatFsOrphans); } if (autoDbPointer == 0) { //additional check in case initAddImgNat didn't throw exception throw new TskCoreException("AddImgProcess::run: AutoDB pointer is NULL after initAddImgNat"); } runAddImgNat(autoDbPointer, imgPath, imgPath.length, timezone); } /** * Call while run() is executing in another thread to prematurely * halt the process. Must call revert() in the other thread once the * stopped run() returns. * * @throws TskCoreException exception thrown if critical error * occurs within TSK */ public void stop() throws TskCoreException { if (autoDbPointer == 0) { throw new TskCoreException("AddImgProcess::stop: AutoDB pointer is NULL"); } stopAddImgNat(autoDbPointer); } /** * Rollback a process that has already been run(), reverting the * database. This releases the C++ object and no additional * operations can be performed. This method is threadsafe. * * @throws TskCoreException exception thrown if critical error * occurs within TSK */ public synchronized void revert() throws TskCoreException { if (autoDbPointer == 0) { throw new TskCoreException("AddImgProcess::revert: AutoDB pointer is NULL"); } revertAddImgNat(autoDbPointer); // the native code deleted the object autoDbPointer = 0; } /** * Finish off a process that has already been run(), closing the * transaction and committing the new image data to the database. * * @return The id of the image that was added. This releases the C++ * object and no additional operations can be performed. This method * is threadsafe. * * @throws TskCoreException exception thrown if critical error * occurs within TSK */ public synchronized long commit() throws TskCoreException { if (autoDbPointer == 0) { throw new TskCoreException("AddImgProcess::commit: AutoDB pointer is NULL"); } long id = commitAddImgNat(autoDbPointer); // the native code deleted the object autoDbPointer = 0; return id; } /** * Gets the directory currently being processed by TSK. This method * is threadsafe. * * @return the currently processing directory */ public synchronized String currentDirectory() { return autoDbPointer == 0 ? "NO_INFO" : getCurDirNat(autoDbPointer); //NON-NLS } } } /** * Creates a new case database. Must call .free() on CaseDbHandle instance * when done. * * @param path Location to create the database at. * @return Handle for a new TskCaseDb instance. * @throws TskCoreException exception thrown if critical error occurs within * TSK */ static CaseDbHandle newCaseDb(String path) throws TskCoreException { return new CaseDbHandle(newCaseDbNat(path)); } /** * Opens an existing case database. Must call .free() on CaseDbHandle * instance when done. * * @param path Location of the existing database. * @return Handle for a new TskCaseDb instance. * @throws TskCoreException exception thrown if critical error occurs within * TSK */ static CaseDbHandle openCaseDb(String path) throws TskCoreException { return new CaseDbHandle(openCaseDbNat(path)); } /** * get the Sleuth Kit version string * * @return the version string */ public static String getVersion() { return getVersionNat(); } /** * Enable verbose logging and redirect stderr to the given log file. */ public static void startVerboseLogging(String logPath) { startVerboseLoggingNat(logPath); } /** * open the image and return the image info pointer * * @param imageFiles the paths to the images * @return the image info pointer * @throws TskCoreException exception thrown if critical error occurs within * TSK */ public synchronized static long openImage(String[] imageFiles) throws TskCoreException { long imageHandle = 0; StringBuilder keyBuilder = new StringBuilder(); for (int i = 0; i < imageFiles.length; ++i) { keyBuilder.append(imageFiles[i]); } final String imageKey = keyBuilder.toString(); if (CaseDbHandle.imageHandleCache.containsKey(imageKey)) //get from cache { imageHandle = CaseDbHandle.imageHandleCache.get(imageKey); } else { //open new handle and cache it imageHandle = openImgNat(imageFiles, imageFiles.length); CaseDbHandle.fsHandleCache.put(imageHandle, new HashMap<Long, Long>()); CaseDbHandle.imageHandleCache.put(imageKey, imageHandle); } return imageHandle; } /** * Get volume system Handle * * @param imgHandle a handle to previously opened image * @param vsOffset byte offset in the image to the volume system (usually 0) * @return pointer to a vsHandle structure in the sleuthkit * @throws TskCoreException exception thrown if critical error occurs within * TSK */ public static long openVs(long imgHandle, long vsOffset) throws TskCoreException { return openVsNat(imgHandle, vsOffset); } //get pointers /** * Get volume Handle * * @param vsHandle pointer to the volume system structure in the sleuthkit * @param volId id of the volume * @return pointer to a volHandle structure in the sleuthkit * @throws TskCoreException exception thrown if critical error occurs within * TSK */ public static long openVsPart(long vsHandle, long volId) throws TskCoreException { //returned long is ptr to vs Handle object in tsk return openVolNat(vsHandle, volId); } /** * Get file system Handle Opened handle is cached (transparently) so it does * not need be reopened next time for the duration of the application * * @param imgHandle pointer to imgHandle in sleuthkit * @param fsOffset byte offset to the file system * @return pointer to a fsHandle structure in the sleuthkit * @throws TskCoreException exception thrown if critical error occurs within * TSK */ public synchronized static long openFs(long imgHandle, long fsOffset) throws TskCoreException { long fsHandle = 0; final Map<Long, Long> imgOffSetToFsHandle = CaseDbHandle.fsHandleCache.get(imgHandle); if (imgOffSetToFsHandle.containsKey(fsOffset)) { //return cached fsHandle = imgOffSetToFsHandle.get(fsOffset); } else { fsHandle = openFsNat(imgHandle, fsOffset); //cache it imgOffSetToFsHandle.put(fsOffset, fsHandle); } return fsHandle; } /** * Get file Handle * * @param fsHandle fsHandle pointer in the sleuthkit * @param fileId id of the file * @param attrType file attribute type to open * @param attrId file attribute id to open * @return pointer to a file structure in the sleuthkit * @throws TskCoreException exception thrown if critical error occurs within * TSK */ public static long openFile(long fsHandle, long fileId, TSK_FS_ATTR_TYPE_ENUM attrType, int attrId) throws TskCoreException { return openFileNat(fsHandle, fileId, attrType.getValue(), attrId); } //do reads /** * reads data from an image * * @param imgHandle * @param readBuffer buffer to read to * @param offset byte offset in the image to start at * @param len amount of data to read * @return the number of characters read, or -1 if the end of the stream has * been reached * @throws TskCoreException exception thrown if critical error occurs within * TSK */ public static int readImg(long imgHandle, byte[] readBuffer, long offset, long len) throws TskCoreException { //returned byte[] is the data buffer return readImgNat(imgHandle, readBuffer, offset, len); } /** * reads data from an volume system * * @param vsHandle pointer to a volume system structure in the sleuthkit * @param readBuffer buffer to read to * @param offset sector offset in the image to start at * @param len amount of data to read * @return the number of characters read, or -1 if the end of the stream has * been reached * @throws TskCoreException exception thrown if critical error occurs within * TSK */ public static int readVs(long vsHandle, byte[] readBuffer, long offset, long len) throws TskCoreException { return readVsNat(vsHandle, readBuffer, offset, len); } /** * reads data from an volume * * @param volHandle pointer to a volume structure in the sleuthkit * @param readBuffer buffer to read to * @param offset byte offset in the image to start at * @param len amount of data to read * @return the number of characters read, or -1 if the end of the stream has * been reached * @throws TskCoreException exception thrown if critical error occurs within * TSK */ public static int readVsPart(long volHandle, byte[] readBuffer, long offset, long len) throws TskCoreException { //returned byte[] is the data buffer return readVolNat(volHandle, readBuffer, offset, len); } /** * reads data from an file system * * @param fsHandle pointer to a file system structure in the sleuthkit * @param readBuffer buffer to read to * @param offset byte offset in the image to start at * @param len amount of data to read * @return the number of characters read, or -1 if the end of the stream has * been reached * @throws TskCoreException exception thrown if critical error occurs within * TSK */ public static int readFs(long fsHandle, byte[] readBuffer, long offset, long len) throws TskCoreException { //returned byte[] is the data buffer return readFsNat(fsHandle, readBuffer, offset, len); } /** * reads data from an file * * @param fileHandle pointer to a file structure in the sleuthkit * @param readBuffer pre-allocated buffer to read to * @param offset byte offset in the image to start at * @param len amount of data to read * @return the number of characters read, or -1 if the end of the stream has * been reached * @throws TskCoreException exception thrown if critical error occurs within * TSK */ public static int readFile(long fileHandle, byte[] readBuffer, long offset, long len) throws TskCoreException { return readFileNat(fileHandle, readBuffer, offset, len); } //free pointers /** * frees the imgHandle pointer currently does not close the image, until the * application terminates (image handle is cached) * * @param imgHandle to close the image */ public static void closeImg(long imgHandle) { //@@@ TODO close the image handle when Case is closed instead //currently the image handle is not being freed, it's cached for duration of the application //closeImgNat(imgHandle); } /** * frees the vsHandle pointer * * @param vsHandle pointer to volume system structure in sleuthkit */ public static void closeVs(long vsHandle) { closeVsNat(vsHandle); } /** * frees the fsHandle pointer Currently does not do anything - preserves the * cached object for the duration of the application * * @param fsHandle pointer to file system structure in sleuthkit */ public static void closeFs(long fsHandle) { //@@@ TODO close the fs handle when Case is closed instead //currently the fs handle is not being freed, it's cached for duration of the application //closeFsNat(fsHandle); } /** * frees the fileHandle pointer * * @param fileHandle pointer to file structure in sleuthkit */ public static void closeFile(long fileHandle) { closeFileNat(fileHandle); } /****************************** Hash database methods *****************/ /** * Create an index for the given database path. * * @param dbPath The path to the database * @throws TskCoreException if a critical error occurs within TSK core */ public static void createLookupIndexForHashDatabase(int dbHandle) throws TskCoreException { hashDbCreateIndexNat(dbHandle); } /** * Check if an index exists for the given database path. * * @param dbPath * @return true if index exists * @throws TskCoreException if a critical error occurs within TSK core */ public static boolean hashDatabaseHasLookupIndex(int dbHandle) throws TskCoreException { return hashDbIndexExistsNat(dbHandle); } /** * hashDatabaseCanBeReindexed * * @param dbHandle previously opened hash db handle * @return Does this database have a source database that is different than the index? * @throws TskCoreException if a critical error occurs within TSK core */ public static boolean hashDatabaseCanBeReindexed(int dbHandle) throws TskCoreException { return hashDbIsReindexableNat(dbHandle); } /** * getHashDatabasePath * * @param dbHandle previously opened hash db handle * @return Hash db file path * @throws TskCoreException if a critical error occurs within TSK core */ public static String getHashDatabasePath(int dbHandle) throws TskCoreException { return hashDbPathNat(dbHandle); } /** * getHashDatabaseIndexPath * * @param dbHandle previously opened hash db handle * @return Index file path * @throws TskCoreException if a critical error occurs within TSK core */ public static String getHashDatabaseIndexPath(int dbHandle) throws TskCoreException { return hashDbIndexPathNat(dbHandle); } public static int openHashDatabase(String path) throws TskCoreException { return hashDbOpenNat(path); } /** * Creates a hash database. Will be of the default TSK hash database type. * * @param path The path to the database * @return a handle for that database * @throws TskCoreException if a critical error occurs within TSK core */ public static int createHashDatabase(String path) throws TskCoreException { return hashDbNewNat(path); } /** * Close the currently open lookup databases. Resets the handle counting. * * @throws TskCoreException exception thrown if critical error occurs * within TSK */ public static void closeAllHashDatabases() throws TskCoreException { hashDbCloseAll(); } /** * Close a particular open lookup database. Existing handles are not affected. * * @throws TskCoreException exception thrown if critical error occurs * within TSK */ public static void closeHashDatabase(int dbHandle) throws TskCoreException { hashDbClose(dbHandle); } /** * Get the name of the database * * @param dbHandle previously opened hash db handle * @throws TskCoreException if a critical error occurs within TSK core */ public static String getHashDatabaseDisplayName(int dbHandle) throws TskCoreException { return hashDbGetDisplayName(dbHandle); } /** * Lookup the given hash value and get basic answer * @param hash Hash value to search for * @param dbHandle Handle of database to lookup in. * @return True if hash was found in database. * @throws TskCoreException */ public static boolean lookupInHashDatabase(String hash, int dbHandle) throws TskCoreException { return hashDbLookup(hash, dbHandle); } /** * Lookup hash value in DB and return details on results (more time consuming than basic lookup) * @param hash Hash value to search for * @param dbHandle Handle of database to lookup in. * @return Details on hash if it was in DB or null if it was not found. * @throws TskCoreException */ public static HashHitInfo lookupInHashDatabaseVerbose(String hash, int dbHandle) throws TskCoreException { return hashDbLookupVerbose(hash, dbHandle); } /** * Adds a hash value to a hash database. * @param filename Name of file (can be null) * @param md5 Text of MD5 hash (can be null) * @param sha1 Text of SHA1 hash (can be null) * @param sha256 Text of SHA256 hash (can be null) * @param dbHandle Handle to DB * @throws TskCoreException */ public static void addToHashDatabase(String filename, String md5, String sha1, String sha256, String comment, int dbHandle) throws TskCoreException { hashDbAddEntryNat(filename, md5, sha1, sha256, comment, dbHandle); } public static void addToHashDatabase(List<HashEntry> hashes, int dbHandle) throws TskCoreException { hashDbBeginTransactionNat(dbHandle); try { for (HashEntry entry : hashes) { hashDbAddEntryNat(entry.getFileName(), entry.getMd5Hash(), entry.getSha1Hash(), entry.getSha256Hash(), entry.getComment(), dbHandle); } hashDbCommitTransactionNat(dbHandle); } catch (TskCoreException ex) { try { hashDbRollbackTransactionNat(dbHandle); } catch (TskCoreException ex2) { ex2.initCause(ex); throw ex2; } throw ex; } } public static boolean isUpdateableHashDatabase(int dbHandle) throws TskCoreException { return hashDbIsUpdateableNat(dbHandle); } public static boolean hashDatabaseIsIndexOnly(int dbHandle) throws TskCoreException { return hashDbIsIdxOnlyNat(dbHandle); } /** * Convert this timezone from long to short form * Convert timezoneLongForm passed in from long to short form * * @param timezoneLongForm the long form (e.g., America/New_York) * @return the short form (e.g., EST5EDT) string representation, or an empty string if * empty long form was passed in */ private static String timezoneLongToShort(String timezoneLongForm) { if (timezoneLongForm == null || timezoneLongForm.isEmpty()) { return ""; } String timezoneShortForm = ""; TimeZone zone = TimeZone.getTimeZone(timezoneLongForm); int offset = zone.getRawOffset() / 1000; int hour = offset / 3600; int min = (offset % 3600) / 60; DateFormat dfm = new SimpleDateFormat("z"); dfm.setTimeZone(zone); boolean hasDaylight = zone.useDaylightTime(); String first = dfm.format(new GregorianCalendar(2010, 1, 1).getTime()).substring(0, 3); // make it only 3 letters code String second = dfm.format(new GregorianCalendar(2011, 6, 6).getTime()).substring(0, 3); // make it only 3 letters code int mid = hour * -1; timezoneShortForm = first + Integer.toString(mid); if (min != 0) { timezoneShortForm = timezoneShortForm + ":" + (min < 10 ? "0" : "") + Integer.toString(min); } if (hasDaylight) { timezoneShortForm = timezoneShortForm + second; } return timezoneShortForm; } /** * Get size of a device (physical, logical device, image) pointed to by * devPath * * @param devPath device path pointing to the device * @return size of the device in bytes * @throws TskCoreException exception thrown if the device size could not be * queried */ public static long findDeviceSize(String devPath) throws TskCoreException { return findDeviceSizeNat(devPath); } }
/* * Copyright (C) 2013 Morihiro Soft * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package jp.morihirosoft.twwb; import android.app.PendingIntent; import android.app.Service; import android.content.BroadcastReceiver; import android.content.Context; import android.content.Intent; import android.content.IntentFilter; import android.graphics.Color; import android.graphics.PixelFormat; import android.os.Bundle; import android.os.IBinder; import android.os.PowerManager; import android.os.SystemClock; import android.support.v4.content.LocalBroadcastManager; import android.telephony.PhoneStateListener; import android.telephony.TelephonyManager; import android.util.Log; import android.view.LayoutInflater; import android.view.View; import android.view.WindowManager; import android.widget.Button; import android.widget.ImageView; import com.google.android.gms.common.ConnectionResult; import com.google.android.gms.common.GooglePlayServicesClient; import com.google.android.gms.location.ActivityRecognitionClient; import com.google.android.gms.location.DetectedActivity; public class TwwbService extends Service implements GooglePlayServicesClient.ConnectionCallbacks, GooglePlayServicesClient.OnConnectionFailedListener, View.OnClickListener { private static final boolean DEBUG = false; private static final String TAG = "TwwbService"; //------------------------------------------------------------------------- // MEMBER //------------------------------------------------------------------------- private TwwbSettings mSettings = null; private View mBlockScreen = null; private ActivityRecognitionClient mARClient = null; private PendingIntent mIntentRecognition = null; private boolean mIsRegistered = false; private boolean mIsCalling = false; private long mStartTime = -1; //------------------------------------------------------------------------- // PUBLIC METHOD //------------------------------------------------------------------------- @Override public void onCreate() { if (DEBUG) Log.d(TAG, "onCreate"); super.onCreate(); TwwbApplication app = (TwwbApplication)getApplicationContext(); mSettings = app.getSettings(); app.log(Constants.LOG_START); app.updateStatusbarIcon(); } @Override public int onStartCommand(Intent intent, int flags, int startId) { if (DEBUG) Log.d(TAG, "onStartCommand: intent="+intent); registerReceivers(); PowerManager pm = (PowerManager)getSystemService(Context.POWER_SERVICE); if (pm.isScreenOn()) { startRecognition(); } return super.onStartCommand(intent, flags, startId); } @Override public void onDestroy() { if (DEBUG) Log.d(TAG, "onDestroy"); stopRecognition(); unregisterReceivers(); TwwbApplication app = (TwwbApplication)getApplicationContext(); app.log(Constants.LOG_STOP); app.updateStatusbarIcon(false); super.onDestroy(); } @Override public IBinder onBind(Intent arg0) { if (DEBUG) Log.d(TAG, "onBind"); return null; } @Override public void onConnected(Bundle arg0) { if (DEBUG) Log.d(TAG, "onConnected"); Intent i = new Intent(this, TwwbRecognition.class); mIntentRecognition = PendingIntent.getService(this, 0, i, PendingIntent.FLAG_UPDATE_CURRENT); mARClient.requestActivityUpdates(1000, mIntentRecognition); } @Override public void onDisconnected() { if (DEBUG) Log.d(TAG, "onDisconnected"); } @Override public void onConnectionFailed(ConnectionResult arg0) { if (DEBUG) Log.d(TAG, "onConnectionFailed"); } @Override public void onClick(View v) { if (DEBUG) Log.d(TAG, "onClick: v="+v); switch(v.getId()) { case R.id.btn_unblock: temporaryUnblockScreen(); break; } } //------------------------------------------------------------------------- // PRIVATE... //------------------------------------------------------------------------- private final BroadcastReceiver mReceiverScreenOn = new BroadcastReceiver() { @Override public void onReceive(Context context, Intent intent) { if (DEBUG) Log.d(TAG, "onReceive: ACTION_SCREEN_ON: intent="+intent); startRecognition(); } }; private final BroadcastReceiver mReceiverScreenOff = new BroadcastReceiver() { @Override public void onReceive(Context context, Intent intent) { if (DEBUG) Log.d(TAG, "onReceive: ACTION_SCREEN_OFF: intent="+intent); stopRecognition(); } }; private final PhoneStateListener mPhoneStateListener = new PhoneStateListener() { @Override public void onCallStateChanged(int state, String number) { if (DEBUG) Log.d(TAG, "onCallStateChanged: state="+state); switch(state) { case TelephonyManager.CALL_STATE_IDLE: mIsCalling = false; break; case TelephonyManager.CALL_STATE_RINGING: case TelephonyManager.CALL_STATE_OFFHOOK: mIsCalling = true; break; } if (mIsCalling) { unblockScreen(); } } }; private final BroadcastReceiver mReceiverRecognition = new BroadcastReceiver() { @Override public void onReceive(Context context, Intent intent) { if (DEBUG) Log.d(TAG, "onReceive: ACTION_RECOGNITION: intent="+intent); boolean block = false; switch(intent.getIntExtra(Constants.EXTRA_TYPE, 0)) { case DetectedActivity.IN_VEHICLE: block = mSettings.getBlockInVehicle(); break; case DetectedActivity.ON_BICYCLE: block = mSettings.getBlockOnBicycle(); break; case DetectedActivity.ON_FOOT: block = mSettings.getBlockOnFoot(); break; case DetectedActivity.STILL: break; case DetectedActivity.UNKNOWN: break; case DetectedActivity.TILTING: block = mSettings.getBlockTilting(); break; } if (block) { blockScreen(); } else { unblockScreen(); } } }; private void registerReceivers() { if (DEBUG) Log.d(TAG, "registerReceivers"); if (!mIsRegistered) { mIsRegistered = true; registerReceiver(mReceiverScreenOn, new IntentFilter(Intent.ACTION_SCREEN_ON)); registerReceiver(mReceiverScreenOff, new IntentFilter(Intent.ACTION_SCREEN_OFF)); TelephonyManager tm = (TelephonyManager)getSystemService(Context.TELEPHONY_SERVICE); tm.listen(mPhoneStateListener, PhoneStateListener.LISTEN_CALL_STATE); LocalBroadcastManager lbm = LocalBroadcastManager.getInstance(this); lbm.registerReceiver(mReceiverRecognition, new IntentFilter(Constants.ACTION_RECOGNITION)); } } private void unregisterReceivers() { if (DEBUG) Log.d(TAG, "unregisterReceivers"); if (mIsRegistered) { mIsRegistered = false; unregisterReceiver(mReceiverScreenOn); unregisterReceiver(mReceiverScreenOff); TelephonyManager tm = (TelephonyManager)getSystemService(Context.TELEPHONY_SERVICE); tm.listen(mPhoneStateListener, PhoneStateListener.LISTEN_NONE); LocalBroadcastManager lbm = LocalBroadcastManager.getInstance(this); lbm.unregisterReceiver(mReceiverRecognition); } } private void startRecognition() { if (DEBUG) Log.d(TAG, "startRecognition"); if (mARClient == null) { mARClient = new ActivityRecognitionClient(this, this, this); mARClient.connect(); } mStartTime = SystemClock.uptimeMillis(); } private void stopRecognition() { if (DEBUG) Log.d(TAG, "stopRecognition"); if (mARClient != null && mARClient.isConnected()) { mARClient.removeActivityUpdates(mIntentRecognition); mARClient.disconnect(); mARClient = null; } unblockScreen(); } private void blockScreen() { if (DEBUG) Log.d(TAG, "blockScreen"); if (mBlockScreen == null) { if (mIsCalling) { return; } if (SystemClock.uptimeMillis() - mStartTime < mSettings.getPending() * 1000) { return; } mBlockScreen = LayoutInflater.from(this).inflate(R.layout.view_block, null); int alpha = 255 * mSettings.getAlpha() / 100; mBlockScreen.setBackgroundColor(Color.argb(alpha, 0, 0, 0)); ((ImageView)mBlockScreen.findViewById(R.id.img_logo)).setAlpha(alpha); ((Button)mBlockScreen.findViewById(R.id.btn_unblock)).setOnClickListener(this); WindowManager.LayoutParams params = new WindowManager.LayoutParams(); params.width = WindowManager.LayoutParams.MATCH_PARENT; params.height = WindowManager.LayoutParams.MATCH_PARENT; params.type = WindowManager.LayoutParams.TYPE_SYSTEM_ALERT; params.flags = 0; params.format = PixelFormat.TRANSLUCENT; WindowManager wm = (WindowManager)getSystemService(Context.WINDOW_SERVICE); wm.addView(mBlockScreen, params); } } private void unblockScreen() { if (DEBUG) Log.d(TAG, "unblockScreen"); if (mBlockScreen != null) { WindowManager wm = (WindowManager)getSystemService(Context.WINDOW_SERVICE); wm.removeView(mBlockScreen); mBlockScreen = null; } } private void temporaryUnblockScreen() { if (DEBUG) Log.d(TAG, "temporaryUnblockScreen"); unblockScreen(); mStartTime = SystemClock.uptimeMillis(); } }
/* * Copyright 2021 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.ads.googleads.v8.services; import com.google.ads.googleads.v8.resources.KeywordThemeConstant; import com.google.ads.googleads.v8.services.stub.KeywordThemeConstantServiceStubSettings; import com.google.api.core.ApiFunction; import com.google.api.core.BetaApi; import com.google.api.gax.core.GoogleCredentialsProvider; import com.google.api.gax.core.InstantiatingExecutorProvider; import com.google.api.gax.grpc.InstantiatingGrpcChannelProvider; import com.google.api.gax.rpc.ApiClientHeaderProvider; import com.google.api.gax.rpc.ClientContext; import com.google.api.gax.rpc.ClientSettings; import com.google.api.gax.rpc.StubSettings; import com.google.api.gax.rpc.TransportChannelProvider; import com.google.api.gax.rpc.UnaryCallSettings; import java.io.IOException; import java.util.List; import javax.annotation.Generated; // AUTO-GENERATED DOCUMENTATION AND CLASS. /** * Settings class to configure an instance of {@link KeywordThemeConstantServiceClient}. * * <p>The default instance has everything set to sensible defaults: * * <ul> * <li> The default service address (googleads.googleapis.com) and default port (443) are used. * <li> Credentials are acquired automatically through Application Default Credentials. * <li> Retries are configured for idempotent methods but not for non-idempotent methods. * </ul> * * <p>The builder of this class is recursive, so contained classes are themselves builders. When * build() is called, the tree of builders is called to create the complete settings object. * * <p>For example, to set the total timeout of getKeywordThemeConstant to 30 seconds: * * <pre>{@code * KeywordThemeConstantServiceSettings.Builder keywordThemeConstantServiceSettingsBuilder = * KeywordThemeConstantServiceSettings.newBuilder(); * keywordThemeConstantServiceSettingsBuilder * .getKeywordThemeConstantSettings() * .setRetrySettings( * keywordThemeConstantServiceSettingsBuilder * .getKeywordThemeConstantSettings() * .getRetrySettings() * .toBuilder() * .setTotalTimeout(Duration.ofSeconds(30)) * .build()); * KeywordThemeConstantServiceSettings keywordThemeConstantServiceSettings = * keywordThemeConstantServiceSettingsBuilder.build(); * }</pre> */ @Generated("by gapic-generator-java") public class KeywordThemeConstantServiceSettings extends ClientSettings<KeywordThemeConstantServiceSettings> { /** Returns the object with the settings used for calls to getKeywordThemeConstant. */ public UnaryCallSettings<GetKeywordThemeConstantRequest, KeywordThemeConstant> getKeywordThemeConstantSettings() { return ((KeywordThemeConstantServiceStubSettings) getStubSettings()) .getKeywordThemeConstantSettings(); } /** Returns the object with the settings used for calls to suggestKeywordThemeConstants. */ public UnaryCallSettings< SuggestKeywordThemeConstantsRequest, SuggestKeywordThemeConstantsResponse> suggestKeywordThemeConstantsSettings() { return ((KeywordThemeConstantServiceStubSettings) getStubSettings()) .suggestKeywordThemeConstantsSettings(); } public static final KeywordThemeConstantServiceSettings create( KeywordThemeConstantServiceStubSettings stub) throws IOException { return new KeywordThemeConstantServiceSettings.Builder(stub.toBuilder()).build(); } /** Returns a builder for the default ExecutorProvider for this service. */ public static InstantiatingExecutorProvider.Builder defaultExecutorProviderBuilder() { return KeywordThemeConstantServiceStubSettings.defaultExecutorProviderBuilder(); } /** Returns the default service endpoint. */ public static String getDefaultEndpoint() { return KeywordThemeConstantServiceStubSettings.getDefaultEndpoint(); } /** Returns the default service scopes. */ public static List<String> getDefaultServiceScopes() { return KeywordThemeConstantServiceStubSettings.getDefaultServiceScopes(); } /** Returns a builder for the default credentials for this service. */ public static GoogleCredentialsProvider.Builder defaultCredentialsProviderBuilder() { return KeywordThemeConstantServiceStubSettings.defaultCredentialsProviderBuilder(); } /** Returns a builder for the default ChannelProvider for this service. */ public static InstantiatingGrpcChannelProvider.Builder defaultGrpcTransportProviderBuilder() { return KeywordThemeConstantServiceStubSettings.defaultGrpcTransportProviderBuilder(); } public static TransportChannelProvider defaultTransportChannelProvider() { return KeywordThemeConstantServiceStubSettings.defaultTransportChannelProvider(); } @BetaApi("The surface for customizing headers is not stable yet and may change in the future.") public static ApiClientHeaderProvider.Builder defaultApiClientHeaderProviderBuilder() { return KeywordThemeConstantServiceStubSettings.defaultApiClientHeaderProviderBuilder(); } /** Returns a new builder for this class. */ public static Builder newBuilder() { return Builder.createDefault(); } /** Returns a new builder for this class. */ public static Builder newBuilder(ClientContext clientContext) { return new Builder(clientContext); } /** Returns a builder containing all the values of this settings class. */ public Builder toBuilder() { return new Builder(this); } protected KeywordThemeConstantServiceSettings(Builder settingsBuilder) throws IOException { super(settingsBuilder); } /** Builder for KeywordThemeConstantServiceSettings. */ public static class Builder extends ClientSettings.Builder<KeywordThemeConstantServiceSettings, Builder> { protected Builder() throws IOException { this(((ClientContext) null)); } protected Builder(ClientContext clientContext) { super(KeywordThemeConstantServiceStubSettings.newBuilder(clientContext)); } protected Builder(KeywordThemeConstantServiceSettings settings) { super(settings.getStubSettings().toBuilder()); } protected Builder(KeywordThemeConstantServiceStubSettings.Builder stubSettings) { super(stubSettings); } private static Builder createDefault() { return new Builder(KeywordThemeConstantServiceStubSettings.newBuilder()); } public KeywordThemeConstantServiceStubSettings.Builder getStubSettingsBuilder() { return ((KeywordThemeConstantServiceStubSettings.Builder) getStubSettings()); } /** * Applies the given settings updater function to all of the unary API methods in this service. * * <p>Note: This method does not support applying settings to streaming methods. */ public Builder applyToAllUnaryMethods( ApiFunction<UnaryCallSettings.Builder<?, ?>, Void> settingsUpdater) { super.applyToAllUnaryMethods( getStubSettingsBuilder().unaryMethodSettingsBuilders(), settingsUpdater); return this; } /** Returns the builder for the settings used for calls to getKeywordThemeConstant. */ public UnaryCallSettings.Builder<GetKeywordThemeConstantRequest, KeywordThemeConstant> getKeywordThemeConstantSettings() { return getStubSettingsBuilder().getKeywordThemeConstantSettings(); } /** Returns the builder for the settings used for calls to suggestKeywordThemeConstants. */ public UnaryCallSettings.Builder< SuggestKeywordThemeConstantsRequest, SuggestKeywordThemeConstantsResponse> suggestKeywordThemeConstantsSettings() { return getStubSettingsBuilder().suggestKeywordThemeConstantsSettings(); } @Override public KeywordThemeConstantServiceSettings build() throws IOException { return new KeywordThemeConstantServiceSettings(this); } } }
package fortunedog.mail.proxy.servlet; import java.io.IOException; import java.sql.SQLException; import java.sql.Statement; import java.util.Comparator; import java.util.Iterator; import java.util.TreeSet; import javax.mail.MessagingException; import javax.naming.Context; import javax.naming.InitialContext; import javax.naming.NamingException; import javax.servlet.ServletException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import fortunedog.mail.proxy.MailClient; import fortunedog.mail.proxy.MailStatus; import fortunedog.mail.proxy.net.MailSummary; import fortunedog.mail.proxy.net.Result; import fortunedog.util.DbHelper; public class SyncupMail extends DatabaseServlet { private static int MAIL_COUNT_PER_PACKET = 10; static Logger log = LoggerFactory.getLogger(SyncupMail.class); @Override Result dbService(ServiceData d) throws ServletException, IOException, SQLException, NeedLoginException { // log.log(Level.INFO, // "Servlet Version:"+this.getServletContext().getMajorVersion()+"."+this.getServletContext().getMinorVersion()); // Servelet version 2.5 in tomcat6 log.info( "Entering SyncupMail servelet"); checkSession(d, true); checkMailClient(d); int normalMaxIdx = Integer.parseInt(d.request.getParameter("nm")); int deleteMaxIdx = Integer.parseInt(d.request.getParameter("dm")); int mailCount = MAIL_COUNT_PER_PACKET; if (d.request.getParameter("mc") != null) { mailCount = Integer.parseInt(d.request.getParameter("mc")); } long LE = Long.MAX_VALUE; if (d.request.getParameter("LE") != null) { LE = Integer.parseInt(d.request.getParameter("LE")); } String checkNew = d.request.getParameter("cn"); MailClient.ClientRequestData reqData = new MailClient.ClientRequestData(); reqData.uidxMax = normalMaxIdx; reqData.requestedMailCount = mailCount; reqData.requestedUidxCeil = LE; if(d.request.getParameter("foldername")==null) reqData.folderName="INBOX"; else reqData.folderName=new String(d.request.getParameter("foldername").getBytes("ISO8859-1"), "UTF-8"); // d.mailClient.setClientRequestData(reqData); Thread.currentThread().setName("Syncup_"+d.mailClient.connData.accountName); try { d.mailClient.enterUserState(); } catch (InterruptedException e1) { return null; } try { boolean checkNewInitated = false; if ("1".equals(checkNew)/* User ask to check new mail */|| d.mailClient.isImap() ) { try { int r = d.mailClient.checkNewMails(reqData); if (r != Result.SUCCESSED) { if (r == Result.AUTH_FAIL) throw new NeedLoginException("Login Failed"); else throw new ServletException("Failed to check new mail"); } checkNewInitated = true; } catch (MessagingException e) { e.printStackTrace(); throw new ServletException("Failed to check new mail:" + e.getMessage()); } } TreeSet<MailSummary> waitingMail=new TreeSet<MailSummary>(new Comparator<MailSummary>() { @Override public int compare(MailSummary o1, MailSummary o2) { int r = o1.uidx>(o2.uidx)?-1:(o1.uidx==(o2.uidx)?0:1); return r; } }); StringBuilder sqlWhere = new StringBuilder(" where `uidx` > ").append(normalMaxIdx); if (LE != Long.MAX_VALUE) { sqlWhere.append(" and `uidx` <= ").append(LE); } /////sqlite_refactor sqlWhere.append(" and state !=").append(MailStatus.MAIL_TO_DEL) .append(" and foldername='" + reqData.folderName + "'"); Statement varSt = d.sqliteStat; if(!DbHelper.useSqlite) { sqlWhere.append(" and accountId="+d.mailClient.connData.accountId); varSt = d.dbStat; } String sql = "select * from mails " + sqlWhere.append(" order by uidx DESC limit ").append(mailCount + 1); // System.err.println(sql); d.dbRst = varSt.executeQuery(sql.toString()); while (d.dbRst.next()) { //log.info( "Select mail:"); MailSummary s = new MailSummary(d.dbRst.getString("uid")); s.date = d.dbRst.getTimestamp("date"); if(s.date==null||s.date.toString()=="") break; s.from = d.dbRst.getString("from"); s.to = d.dbRst.getString("to"); s.cc = d.dbRst.getString("cc"); s.subject = d.dbRst.getString("subject"); s.uidx =d.dbRst.getInt("uidx"); s.index = d.dbRst.getInt("index"); s.state = d.dbRst.getInt("state"); s.attachmentFlag = d.dbRst.getInt("attachmentFlag"); s.folderName = d.dbRst.getString("foldername"); waitingMail.add(s); //log.info( "\t\tSubject:"+s.subject); } DbHelper.close(d.dbRst); //get all deleted mails, also obey the limitation sqlWhere = new StringBuilder(" where `uidx` > ").append(deleteMaxIdx); ///sqlite_refactor sqlWhere.append(" and state=").append(MailStatus.MAIL_TO_DEL).append(" and foldername='" + reqData.folderName + "'"); if(!DbHelper.useSqlite) sqlWhere.append(" and accountId="+d.mailClient.connData.accountId); sql = "select * from mails " + sqlWhere.append(" order by uidx DESC limit ").append(mailCount + 1); System.err.println(sql); d.dbRst = varSt.executeQuery(sql.toString()); while (d.dbRst.next()) { MailSummary s = new MailSummary(d.dbRst.getString("uid")); s.date = d.dbRst.getTimestamp("date"); if(s.date==null||s.date.toString()=="") break; s.from = d.dbRst.getString("from"); s.to = d.dbRst.getString("to"); s.cc = d.dbRst.getString("cc"); s.subject = d.dbRst.getString("subject"); s.uidx =d.dbRst.getInt("uidx"); s.index = d.dbRst.getInt("index"); s.state = d.dbRst.getInt("state"); s.attachmentFlag = d.dbRst.getInt("attachmentFlag"); s.folderName = d.dbRst.getString("foldername"); waitingMail.add(s); } d.response.setContentType("text/xml"); int mailTransfered = 0; d.out.write("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n"); d.out.write("<result class=\"fortunedog.mail.proxy.net.MailListResult\">\n"); d.out.write("<status code=\"" + Result.SUCCESSED + "\"/>\n"); d.out.write("<content>\n"); if(!DbHelper.shareSqliteConnection) { DbHelper.close(d.sqliteStat); DbHelper.close(d.sqliteConn); d.sqliteStat=null; d.sqliteConn = null; } if (checkNewInitated ) { Iterator<MailSummary> syncedMails = reqData.syncState.getSyncedMails(); iterateNewMails: while(syncedMails.hasNext()) { MailSummary mailSummary = syncedMails.next(); //log.info( "synced new mail:" + mailSummary.subject); waitingMail.add(mailSummary); do{ MailSummary s = waitingMail.pollFirst(); if(s.state == MailStatus.MAIL_TO_DEL) { d.out.write(s.toXml()); continue; } if(mailTransfered==mailCount) { s.state |= MailStatus.FLAG_HAS_MORE_PLACEHOLD; d.out.write(s.toXml()); mailTransfered++; break iterateNewMails; } else { d.out.write(s.toXml()); } mailTransfered++; }while(!waitingMail.isEmpty() && waitingMail.first().uidx >= mailSummary.uidx); } } Iterator<MailSummary> iterator=waitingMail.iterator(); while(iterator.hasNext() && mailTransfered<=mailCount) { MailSummary mailSummary=iterator.next(); //log.info( "waiting mail:" + mailSummary.subject); if(mailSummary.state == MailStatus.MAIL_TO_DEL) { d.out.write(mailSummary.toXml()); continue; } if(mailTransfered==mailCount) { mailSummary.state |= MailStatus.FLAG_HAS_MORE_PLACEHOLD; d.out.write(mailSummary.toXml()); break; } d.out.write(mailSummary.toXml()); mailTransfered++; } d.out.write("</content></result>"); // System.out.println(buffer.toString()); d.out.flush(); d.stream.flush(); } catch(Throwable t) { if(d.session != null) SessionListener.removeStoredMailClient(d.session); log.error( "Fail syncup mail" ,t ); } finally { d.mailClient.quiteUserState(); } return null; } @Override public void init() throws ServletException { Context env; try { env = (Context) new InitialContext().lookup("java:comp/env"); MAIL_COUNT_PER_PACKET = (Integer) env.lookup("maxMailCountPerPacket"); } catch (NamingException e) { e.printStackTrace(); } } }
/* * Created on Dec 8, 2007 * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. * * Copyright @2007-2013 the original author or authors. */ package org.fest.swing.fixture; import java.awt.Dimension; import java.awt.Point; import javax.annotation.Nonnull; import javax.annotation.Nullable; import javax.swing.JInternalFrame; import org.fest.swing.core.Robot; import org.fest.swing.driver.JInternalFrameDriver; import org.fest.swing.exception.ActionFailedException; import org.fest.swing.exception.ComponentLookupException; /** * Supports functional testing of {@code JInternalFrame}s * * @author Alex Ruiz */ public class JInternalFrameFixture extends AbstractJPopupMenuInvokerFixture<JInternalFrameFixture, JInternalFrame, JInternalFrameDriver> implements FrameLikeFixture<JInternalFrameFixture> { /** * Creates a new {@link JInternalFrameFixture}. * * @param robot performs simulation of user events on a {@code JInternalFrame}. * @param internalFrameName the name of the {@code JInternalFrame} to find using the given {@code Robot}. * @throws NullPointerException if {@code robot} is {@code null}. * @throws ComponentLookupException if a matching {@code JInternalFrame} could not be found. * @throws ComponentLookupException if more than one matching {@code JInternalFrame} is found. */ public JInternalFrameFixture(@Nonnull Robot robot, @Nullable String internalFrameName) { super(JInternalFrameFixture.class, robot, internalFrameName, JInternalFrame.class); } /** * Creates a new {@link JInternalFrameFixture}. * * @param robot performs simulation of user events on the given {@code JInternalFrame}. * @param target the {@code JInternalFrame} to be managed by this fixture. * @throws NullPointerException if {@code robot} is {@code null}. * @throws NullPointerException if {@code target} is {@code null}. */ public JInternalFrameFixture(@Nonnull Robot robot, @Nonnull JInternalFrame target) { super(JInternalFrameFixture.class, robot, target); } @Override protected @Nonnull JInternalFrameDriver createDriver(@Nonnull Robot robot) { return new JInternalFrameDriver(robot); } /** * Brings this fixture's {@code JInternalFrame} to the front. * * @return this fixture. */ @Override public @Nonnull JInternalFrameFixture moveToFront() { driver().moveToFront(target()); return this; } /** * Brings this fixture's {@code JInternalFrame} to the back. * * @return this fixture. */ @Override public @Nonnull JInternalFrameFixture moveToBack() { driver().moveToBack(target()); return this; } /** * Simulates a user deiconifying this fixture's {@code JInternalFrame}. * * @return this fixture. * @throws ActionFailedException if the {@code JInternalFrame} vetoes the action. */ @Override public @Nonnull JInternalFrameFixture deiconify() { driver().deiconify(target()); return this; } /** * Simulates a user iconifying this fixture's {@code JInternalFrame}. * * @return this fixture. * @throws ActionFailedException if the given {@code JInternalFrame} is not iconifiable. * @throws ActionFailedException if the {@code JInternalFrame} vetoes the action. */ @Override public @Nonnull JInternalFrameFixture iconify() { driver().iconify(target()); return this; } /** * Simulates a user maximizing this fixture's {@code JInternalFrame}, deconifying it first if it is iconified. * * @return this fixture. * @throws ActionFailedException if the given {@code JInternalFrame} is not maximizable. * @throws ActionFailedException if the {@code JInternalFrame} vetoes the action. */ @Override public @Nonnull JInternalFrameFixture maximize() { driver().maximize(target()); return this; } /** * Simulates a user normalizing this fixture's {@code JInternalFrame}, deconifying it first if it is iconified. * * @return this fixture. * @throws ActionFailedException if the {@code JInternalFrame} vetoes the action. */ @Override public @Nonnull JInternalFrameFixture normalize() { driver().normalize(target()); return this; } /** * Simulates a user closing this fixture's {@code JInternalFrame}. * * @throws ActionFailedException if the {@code JInternalFrame} is not closable. */ @Override public void close() { driver().close(target()); } /** * Asserts that the size of this fixture's {@code JInternalFrame} is equal to given one. * * @param size the given size to match. * @return this fixture. * @throws AssertionError if the size of this fixture's {@code JInternalFrame} is not equal to the given size. */ @Override public @Nonnull JInternalFrameFixture requireSize(@Nonnull Dimension size) { driver().requireSize(target(), size); return this; } /** * Simulates a user resizing horizontally this fixture's {@code JInternalFrame}. * * @param width the width that this fixture's {@code JInternalFrame} should have after being resized. * @return this fixture. */ @Override public @Nonnull JInternalFrameFixture resizeWidthTo(int width) { driver().resizeWidth(target(), width); return this; } /** * Simulates a user resizing vertically this fixture's {@code JInternalFrame}. * * @param height the height that this fixture's {@code JInternalFrame} should have after being resized. * @return this fixture. */ @Override public @Nonnull JInternalFrameFixture resizeHeightTo(int height) { driver().resizeHeight(target(), height); return this; } /** * Simulates a user resizing this fixture's {@code JInternalFrame}. * * @param size the size that the target {@code JInternalFrame} should have after being resized. * @return this fixture. */ @Override public @Nonnull JInternalFrameFixture resizeTo(@Nonnull Dimension size) { driver().resizeTo(target(), size); return this; } /** * Simulates a user moving this fixture's {@code JInternalFrame} to the given point. * * @param p the point to move this fixture's {@code JInternalFrame} to. * @return this fixture. */ @Override public @Nonnull JInternalFrameFixture moveTo(@Nonnull Point p) { driver().move(target(), p); return this; } }
/* * Copyright 2005-2014 The Kuali Foundation. * * Licensed under the Educational Community License, Version 1.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.opensource.org/licenses/ecl1.php * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kuali.kra.s2s.generator.impl; import gov.grants.apply.forms.nsfCoverPage13V13.NSFCoverPage13Document; import gov.grants.apply.forms.nsfCoverPage13V13.NSFCoverPage13Document.NSFCoverPage13; import gov.grants.apply.forms.nsfCoverPage13V13.NSFCoverPage13Document.NSFCoverPage13.NSFUnitConsideration; import gov.grants.apply.forms.nsfCoverPage13V13.NSFCoverPage13Document.NSFCoverPage13.OtherInfo; import gov.grants.apply.forms.nsfCoverPage13V13.NSFCoverPage13Document.NSFCoverPage13.PIInfo; import gov.grants.apply.system.attachmentsV10.AttachedFileDataType; import gov.grants.apply.system.attachmentsV10.AttachmentGroupMin1Max100DataType; import gov.grants.apply.system.globalLibraryV20.YesNoDataType; import org.apache.xmlbeans.XmlObject; import org.kuali.kra.bo.Organization; import org.kuali.kra.bo.OrganizationYnq; import org.kuali.kra.infrastructure.KraServiceLocator; import org.kuali.kra.proposaldevelopment.bo.DevelopmentProposal; import org.kuali.kra.proposaldevelopment.bo.Narrative; import org.kuali.kra.proposaldevelopment.bo.ProposalPerson; import org.kuali.kra.proposaldevelopment.bo.ProposalSite; import org.kuali.kra.proposaldevelopment.document.ProposalDevelopmentDocument; import org.kuali.kra.proposaldevelopment.questionnaire.ProposalPersonModuleQuestionnaireBean; import org.kuali.kra.questionnaire.answer.Answer; import org.kuali.kra.questionnaire.answer.AnswerHeader; import org.kuali.kra.questionnaire.answer.QuestionnaireAnswerService; import org.kuali.kra.s2s.generator.S2SQuestionnairing; import org.kuali.kra.s2s.util.S2SConstants; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; /** * * This class is used to generate XML Document object for grants.gov * NSFCoverPageV1.2. This form is generated using XMLBean API's generated by * compiling NSFCoverPageV1.2 schema. * * @author Kuali Research Administration Team (kualidev@oncourse.iu.edu) */ public class NSFCoverPageV1_3Generator extends NSFCoverPageBaseGenerator implements S2SQuestionnairing{ /** * * This method returns NSFCoverPage13Document object based on proposal * development document which contains the NSFCoverPage13Document * informations * NSFUnitConsideration,FundingOpportunityNumber,PIInfo,CoPIInfo,OtherInfo,and * SingleCopyDocuments for a particular proposal * * @return nsfCoverPage13Document {@link XmlObject} of type * NSFCoverPage13Document. */ private NSFCoverPage13Document getNSFCoverPage13() { NSFCoverPage13Document nsfCoverPage13Document = NSFCoverPage13Document.Factory .newInstance(); NSFCoverPage13 nsfCoverPage13 = NSFCoverPage13.Factory.newInstance(); nsfCoverPage13.setFormVersion(S2SConstants.FORMVERSION_1_3); setFundingOpportunityNumber(nsfCoverPage13); if (pdDoc.getDevelopmentProposal().getS2sOpportunity() != null && pdDoc.getDevelopmentProposal().getS2sOpportunity() .getClosingDate() != null) { nsfCoverPage13.setDueDate(dateTimeService.getCalendar(pdDoc .getDevelopmentProposal().getS2sOpportunity() .getClosingDate())); } nsfCoverPage13.setNSFUnitConsideration(getNSFUnitConsideration()); setOtherInfo(nsfCoverPage13); AttachmentGroupMin1Max100DataType attachmentGroup = AttachmentGroupMin1Max100DataType.Factory .newInstance(); attachmentGroup.setAttachedFileArray(getAttachedFileDataTypes()); if(attachmentGroup.getAttachedFileArray().length > 0) nsfCoverPage13.setSingleCopyDocuments(attachmentGroup); nsfCoverPage13Document.setNSFCoverPage13(nsfCoverPage13); return nsfCoverPage13Document; } private void setFundingOpportunityNumber(NSFCoverPage13 nsfCoverPage13) { if (pdDoc.getDevelopmentProposal().getProgramAnnouncementNumber() != null) { if (pdDoc.getDevelopmentProposal().getProgramAnnouncementNumber() .length() > PROGRAM_ANNOUNCEMENT_NUMBER_MAX_LENGTH) { nsfCoverPage13.setFundingOpportunityNumber(pdDoc .getDevelopmentProposal() .getProgramAnnouncementNumber().substring(0, PROGRAM_ANNOUNCEMENT_NUMBER_MAX_LENGTH)); } else { nsfCoverPage13.setFundingOpportunityNumber(pdDoc .getDevelopmentProposal() .getProgramAnnouncementNumber()); } } } /** * * This method returns Investigator * status,DisclosureLobbyingActivities,ExploratoryResearch,HistoricPlaces, * HighResolutionGraphics and AccomplishmentRenewal information for the * OtherInfo type. * * @param nsfCoverPage13 * * @return OtherInfo object containing other informations about the * principal investigator. */ private void setOtherInfo(NSFCoverPage13 nsfCoverPage13) { OtherInfo otherInfo = OtherInfo.Factory.newInstance(); PIInfo pInfo = PIInfo.Factory.newInstance(); for (Answer questionnaireAnswer : s2sUtilService .getQuestionnaireAnswers(pdDoc.getDevelopmentProposal(),getNamespace(),getFormName())) { String answer = questionnaireAnswer.getAnswer(); int questionId = questionnaireAnswer.getQuestionNumber(); if (answer != null) { switch (questionId) { case QUESTION_CURRENT_PI: pInfo .setIsCurrentPI(answer .equals(S2SConstants.PROPOSAL_YNQ_ANSWER_Y) ? YesNoDataType.Y_YES : YesNoDataType.N_NO); break; case QUESTION_BEGIN_INVESTIGATOR: otherInfo .setIsBeginInvestigator(answer .equals(S2SConstants.PROPOSAL_YNQ_ANSWER_Y) ? YesNoDataType.Y_YES : YesNoDataType.N_NO); break; case QUESTION_EARLY_CONCEPT_GRANT: otherInfo .setIsEarlyConceptGrant(answer .equals(S2SConstants.PROPOSAL_YNQ_ANSWER_Y) ? YesNoDataType.Y_YES : YesNoDataType.N_NO); break; case QUESTION_RAPIDRESPONSE_GRANT: otherInfo .setIsRapidResponseGrant(answer .equals(S2SConstants.PROPOSAL_YNQ_ANSWER_Y) ? YesNoDataType.Y_YES : YesNoDataType.N_NO); break; case QUESTION_ACCOMPLISHMENT_RENEWAL: otherInfo .setIsAccomplishmentRenewal(answer .equals(S2SConstants.PROPOSAL_YNQ_ANSWER_Y) ? YesNoDataType.Y_YES : YesNoDataType.N_NO); break; case QUESTION_RESOLUTION_GRAPHICS: otherInfo .setIsHighResolutionGraphics(answer .equals(S2SConstants.PROPOSAL_YNQ_ANSWER_Y) ? YesNoDataType.Y_YES : YesNoDataType.N_NO); break; default: break; } } } nsfCoverPage13.setPIInfo(pInfo); otherInfo.setIsDisclosureLobbyingActivities(getLobbyingAnswer()); nsfCoverPage13.setOtherInfo(otherInfo); } /** * * This method YesNo data type Lobbying answers based on the ProposalYnq * QuestionId * * @return answer (YesNoDataType.Enum) corresponding to Ynq question id. */ private YesNoDataType.Enum getLobbyingAnswer() { YesNoDataType.Enum answer = YesNoDataType.N_NO; for (ProposalPerson proposalPerson : pdDoc.getDevelopmentProposal() .getProposalPersons()) { if (proposalPerson.getProposalPersonRoleId() != null && proposalPerson.getProposalPersonRoleId().equals( PRINCIPAL_INVESTIGATOR) || proposalPerson.getProposalPersonRoleId().equals( PI_C0_INVESTIGATOR)) { ProposalPersonModuleQuestionnaireBean moduleQuestionnaireBean = new ProposalPersonModuleQuestionnaireBean(pdDoc.getDevelopmentProposal(), proposalPerson); List<AnswerHeader> headers=getQuestionnaireAnswerService().getQuestionnaireAnswer(moduleQuestionnaireBean); AnswerHeader answerHeader=headers.get(0); List <Answer> certificationAnswers=answerHeader.getAnswers(); for(Answer certificatonAnswer : certificationAnswers){ if (certificatonAnswer != null && PROPOSAL_YNQ_LOBBYING_ACTIVITIES .equals(certificatonAnswer.getQuestion().getQuestionId()) && S2SConstants.PROPOSAL_YNQ_ANSWER_Y .equals(certificatonAnswer.getAnswer())) { return YesNoDataType.Y_YES; } } } } Organization organization = getOrganizationFromDevelopmentProposal(pdDoc.getDevelopmentProposal()); List<OrganizationYnq> organizationYnqs = null; if (organization != null && organization.getOrganizationId() != null) { organizationYnqs = getOrganizationYNQ(organization .getOrganizationId()); } for (OrganizationYnq organizationYnq : organizationYnqs) { if (organizationYnq.getQuestionId().equals(LOBBYING_QUESTION_ID)) { if(getAnswerFromOrganizationYnq(organizationYnq)){ return YesNoDataType.Y_YES; } } } return answer; } private QuestionnaireAnswerService getQuestionnaireAnswerService() { return KraServiceLocator.getService(QuestionnaireAnswerService.class); } /* * This method return true if question is answered otherwise false . */ protected boolean getAnswerFromOrganizationYnq(OrganizationYnq organizationYnq) { return organizationYnq.getAnswer().equals(ANSWER_INDICATOR_VALUE) ? true : false; } /* * This method will get the list of Organization YNQ for given question id. */ private List<OrganizationYnq> getOrganizationYNQ(String questionId) { OrganizationYnq organizationYnq = null; Map<String, String> organizationYnqMap = new HashMap<String, String>(); organizationYnqMap.put(ORGANIZATION_ID_PARAMETER, questionId); List<OrganizationYnq> organizationYnqs = (List<OrganizationYnq>) businessObjectService.findMatching(OrganizationYnq.class, organizationYnqMap); return organizationYnqs; } /* * This method will get the Organization from the Development proposal. */ private Organization getOrganizationFromDevelopmentProposal(DevelopmentProposal developmentProposal) { Organization organization = null; ProposalSite proposalSite = developmentProposal.getApplicantOrganization(); if (proposalSite != null) { organization = proposalSite.getOrganization(); } return organization; } /** * * This method returns DivisionCode and ProgramCode information for the * NSFUnitConsideration type. * * @return NSFUnitConsideration object containing unit consideration * informations like Division Code and Program code. */ private NSFUnitConsideration getNSFUnitConsideration() { NSFUnitConsideration nsfConsideration = NSFUnitConsideration.Factory .newInstance(); nsfConsideration.setDivisionCode(pdDoc.getDevelopmentProposal() .getAgencyDivisionCode()); nsfConsideration.setProgramCode(pdDoc.getDevelopmentProposal() .getAgencyProgramCode()); return nsfConsideration; } /** * * This method returns attachment type for the form and it can be of type * Personal Data or Proprietary Information. * * @return AttachedFileDataType[] array of attachments based on the * narrative type code. */ private AttachedFileDataType[] getAttachedFileDataTypes() { List<AttachedFileDataType> attachedFileDataTypeList = new ArrayList<AttachedFileDataType>(); AttachedFileDataType attachedFileDataType = null; for (Narrative narrative : pdDoc.getDevelopmentProposal() .getNarratives()) { if (narrative.getNarrativeTypeCode() != null) { int narrativeTypeCode = Integer.parseInt(narrative .getNarrativeTypeCode()); if (narrativeTypeCode == PERSONAL_DATA || narrativeTypeCode == PROPRIETARY_INFORMATION || narrativeTypeCode == SINGLE_COPY_DOCUMENT) { attachedFileDataType = getAttachedFileType(narrative); if(attachedFileDataType != null){ attachedFileDataTypeList.add(attachedFileDataType); } } } } return attachedFileDataTypeList .toArray(new AttachedFileDataType[attachedFileDataTypeList .size()]); } /** * This method creates {@link XmlObject} of type * {@link NSFCoverPage13Document} by populating data from the given * {@link ProposalDevelopmentDocument} * * @param proposalDevelopmentDocument * for which the {@link XmlObject} needs to be created * @return {@link XmlObject} which is generated using the given * {@link ProposalDevelopmentDocument} * @see org.kuali.kra.s2s.generator.S2SFormGenerator#getFormObject(ProposalDevelopmentDocument) */ public XmlObject getFormObject( ProposalDevelopmentDocument proposalDevelopmentDocument) { this.pdDoc = proposalDevelopmentDocument; return getNSFCoverPage13(); } /** * This method typecasts the given {@link XmlObject} to the required * generator type and returns back the document of that generator type. * * @param xmlObject * which needs to be converted to the document type of the * required generator * @return {@link XmlObject} document of the required generator type * @see org.kuali.kra.s2s.generator.S2SFormGenerator#getFormObject(XmlObject) */ public XmlObject getFormObject(XmlObject xmlObject) { NSFCoverPage13 nsfCoverPage13 = (NSFCoverPage13) xmlObject; NSFCoverPage13Document nsfCoverPage13Document = NSFCoverPage13Document.Factory .newInstance(); nsfCoverPage13Document.setNSFCoverPage13(nsfCoverPage13); return nsfCoverPage13Document; } public String getFormName() { return "NSF_CoverPage_1_3-V1.3"; } public String getNamespace() { return "http://apply.grants.gov/forms/NSF_CoverPage_1_3-V1.3"; } }
// Copyright (c) 2014 Intel Corporation. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. package org.xwalk.embedding.test; import java.util.ArrayList; import java.util.List; import java.util.concurrent.Callable; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.xwalk.core.XWalkResourceClient; import org.xwalk.core.XWalkUIClient; import org.xwalk.embedding.MainActivity; import org.xwalk.embedding.base.XWalkViewTestBase; import android.annotation.SuppressLint; import android.content.Intent; import android.os.Bundle; import android.test.suitebuilder.annotation.SmallTest; import android.util.Pair; import android.view.KeyEvent; @SuppressLint("NewApi") public class XWalkViewTest extends XWalkViewTestBase { @SmallTest public void testAddJavascriptInterface() { try { final String name = "add_js_interface.html"; addJavascriptInterface(); loadAssetFile(name); assertEquals(mExpectedStr, getTitleOnUiThread()); } catch (Exception e) { e.printStackTrace(); assertTrue(false); } } public void testAddJavascriptInterfaceWithAnnotation() { try { final String name = "index.html"; final String xwalkStr = "\"xwalk\""; String result; addJavascriptInterface(); loadAssetFile(name); result = executeJavaScriptAndWaitForResult("testInterface.getText()"); assertEquals(xwalkStr, result); raisesExceptionAndSetTitle("testInterface.getTextWithoutAnnotation()"); String title = getTitleOnUiThread(); assertEquals(mExpectedStr, title); } catch (Exception e) { assertTrue(false); e.printStackTrace(); } catch (Throwable e) { assertTrue(false); } } @SmallTest public void testEvaluateJavascript() { try { String changedTitle = "testEvaluateJavascript_ChangeTitle"; String url = "file:///android_asset/p2bar.html"; loadUrlSync(url,null); executeJavaScriptAndWaitForResult("document.title='"+changedTitle+"';"); assertEquals(changedTitle, getTitleOnUiThread()); } catch (Exception e) { assertTrue(false); e.printStackTrace(); } } @SmallTest public void testClearCache() { try { final String pagePath = "/clear_cache_test.html"; List<Pair<String, String>> headers = new ArrayList<Pair<String, String>>(); // Set Cache-Control headers to cache this request. One century should be long enough. headers.add(Pair.create("Cache-Control", "max-age=3153600000")); headers.add(Pair.create("Last-Modified", "Tues, 12 September 2014 00:00:00 GMT")); final String pageUrl = mWebServer.setResponse( pagePath, "<html><body>foo</body></html>", headers); // First load to populate cache. clearCacheOnUiThread(true); loadUrlSync(pageUrl); assertEquals(1, mWebServer.getRequestCount(pagePath)); // Load about:blank so next load is not treated as reload by XWalkView and force // revalidate with the server. loadUrlSync("about:blank"); // No clearCache call, so should be loaded from cache. loadUrlSync(pageUrl); assertEquals(1, mWebServer.getRequestCount(pagePath)); // Same as above. loadUrlSync("about:blank"); // Clear cache, so should hit server again. clearCacheOnUiThread(true); loadUrlSync(pageUrl); assertEquals(2, mWebServer.getRequestCount(pagePath)); // Same as above. loadUrlSync("about:blank"); // Do not clear cache, so should be loaded from cache. clearCacheOnUiThread(false); loadUrlSync(pageUrl); assertEquals(2, mWebServer.getRequestCount(pagePath)); } catch (Exception e) { assertTrue(false); e.printStackTrace(); } } @SmallTest public void testPauseTimers() { try { getInstrumentation().runOnMainSync(new Runnable() { @Override public void run() { mXWalkView.pauseTimers(); } }); assertTrue(true); } catch (Exception e) { e.printStackTrace(); assertTrue(false); } } @SmallTest public void testResumeTimers() { try { getInstrumentation().runOnMainSync(new Runnable() { @Override public void run() { mXWalkView.pauseTimers(); mXWalkView.resumeTimers(); } }); assertTrue(true); } catch (Exception e) { e.printStackTrace(); assertTrue(false); } } @SmallTest public void testOnHide() { try { getInstrumentation().runOnMainSync(new Runnable() { @Override public void run() { mXWalkView.onHide(); } }); assertTrue(true); } catch (Exception e) { e.printStackTrace(); assertTrue(false); } } @SmallTest public void testOnShow() { try { getInstrumentation().runOnMainSync(new Runnable() { @Override public void run() { mXWalkView.onShow(); } }); assertTrue(true); } catch (Exception e) { e.printStackTrace(); assertTrue(false); } } @SmallTest public void testOnDestroy() { try { getInstrumentation().runOnMainSync(new Runnable() { @Override public void run() { mXWalkView.onDestroy(); } }); assertTrue(true); } catch (Exception e) { e.printStackTrace(); assertTrue(false); } } @SmallTest public void testOnActivityResult() { try { getInstrumentation().runOnMainSync(new Runnable() { @Override public void run() { mXWalkView.onActivityResult(WAIT_TIMEOUT_SECONDS, NUM_NAVIGATIONS, null); } }); assertTrue(true); } catch (Exception e) { e.printStackTrace(); assertTrue(false); } } @SmallTest public void testOnNewIntent() { try { getInstrumentation().runOnMainSync(new Runnable() { @Override public void run() { Intent intent = new Intent(); intent.setClassName("org.xwalk.embedding", MainActivity.class.getName()); mXWalkView.onNewIntent(intent); } }); assertTrue(true); } catch (Exception e) { e.printStackTrace(); assertTrue(false); } } @SmallTest public void testSaveState() { try { final Bundle state = new Bundle(); state.putByteArray("XWALKVIEW_STATE", "valid state".getBytes()); boolean result = runTestOnUiThreadAndGetResult(new Callable<Boolean>() { @Override public Boolean call() throws Exception { return mXWalkView.saveState(state); } }); assertTrue(result); } catch (Exception e) { assertTrue(false); e.printStackTrace(); } } @SmallTest public void testSaveState_loadUrl() { try { setServerResponseAndLoad(NUM_NAVIGATIONS); saveAndRestoreStateOnUiThread(); checkHistoryItemList(); } catch (Throwable e) { assertTrue(false); e.printStackTrace(); } } @SmallTest public void testRestoreState_trueResult() { try { final Bundle state = new Bundle(); state.putByteArray("XWALKVIEW_STATE", "valid state".getBytes()); loadUrlSync("file:///android_asset/p1bar.html/"); boolean result = runTestOnUiThreadAndGetResult(new Callable<Boolean>() { @Override public Boolean call() throws Exception { mXWalkView.saveState(state); return mXWalkView.restoreState(state); } }); assertTrue(result); } catch (Exception e) { assertTrue(false); e.printStackTrace(); } } @SmallTest public void testRestoreState_falseResult() { try { final Bundle state = new Bundle(); state.putByteArray("XWALKVIEW_STATE", "valid state".getBytes()); loadUrlSync("file:///android_asset/p1bar.html/"); boolean result = runTestOnUiThreadAndGetResult(new Callable<Boolean>() { @Override public Boolean call() throws Exception { return mXWalkView.restoreState(state); } }); assertFalse(result); } catch (Exception e) { assertTrue(false); e.printStackTrace(); } } @SmallTest public void testRestoreState_notLoadFirst() { try { final Bundle state = new Bundle(); state.putByteArray("XWALKVIEW_STATE", "valid state".getBytes()); boolean result = runTestOnUiThreadAndGetResult(new Callable<Boolean>() { @Override public Boolean call() throws Exception { mXWalkView.saveState(state); return mXWalkView.restoreState(state); } }); assertFalse(result); } catch (Exception e) { assertTrue(false); e.printStackTrace(); } } @SmallTest public void testGetAPIVersion() { try { String version = getAPIVersionOnUiThread(); Pattern pattern = Pattern.compile("^[0-9]+(.[0-9]+)$"); Matcher matcher = pattern.matcher(version); assertTrue("The API version is invalid.", matcher.find()); } catch (Exception e) { assertTrue(false); e.printStackTrace(); } } @SmallTest public void testGetXWalkVersion() { try { String version = getXWalkVersionOnUiThread(); Pattern pattern = Pattern.compile("\\d+\\.\\d+\\.\\d+\\.\\d+"); Matcher matcher = pattern.matcher(version); assertTrue("The Crosswalk version is invalid.", matcher.find()); } catch (Exception e) { assertTrue(false); e.printStackTrace(); } } @SmallTest public void testSetResourceClient() { try { getInstrumentation().runOnMainSync(new Runnable() { @Override public void run() { mXWalkView.setResourceClient(new XWalkResourceClient(mXWalkView)); } }); assertTrue(true); } catch (Exception e) { e.printStackTrace(); assertTrue(false); } } @SmallTest public void testSetUIClient() { try { getInstrumentation().runOnMainSync(new Runnable() { @Override public void run() { mXWalkView.setUIClient(new XWalkUIClient(mXWalkView)); } }); assertTrue(true); } catch (Exception e) { e.printStackTrace(); assertTrue(false); } } @SmallTest public void testOnKeyUp() { try { getInstrumentation().runOnMainSync(new Runnable() { @Override public void run() { mXWalkView.onKeyUp(0, null); } }); assertTrue(true); } catch (Exception e) { e.printStackTrace(); assertTrue(false); } } @SmallTest public void testOnKeyDown() { try { getInstrumentation().runOnMainSync(new Runnable() { @Override public void run() { mXWalkView.onKeyDown(65, new KeyEvent(0, 65)); } }); assertTrue(true); } catch (Exception e) { e.printStackTrace(); assertTrue(false); } } }
/* * Copyright (c) 2002-2022, City of Paris * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * * 1. Redistributions of source code must retain the above copyright notice * and the following disclaimer. * * 2. Redistributions in binary form must reproduce the above copyright notice * and the following disclaimer in the documentation and/or other materials * provided with the distribution. * * 3. Neither the name of 'Mairie de Paris' nor 'Lutece' nor the names of its * contributors may be used to endorse or promote products derived from * this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. * * License 1.0 */ package fr.paris.lutece.portal.web.features; import fr.paris.lutece.portal.business.right.FeatureGroup; import fr.paris.lutece.portal.business.right.FeatureGroupHome; import fr.paris.lutece.portal.business.right.Right; import fr.paris.lutece.portal.business.right.RightHome; import fr.paris.lutece.portal.service.admin.AccessDeniedException; import fr.paris.lutece.portal.service.message.AdminMessage; import fr.paris.lutece.portal.service.message.AdminMessageService; import fr.paris.lutece.portal.service.security.SecurityTokenService; import fr.paris.lutece.portal.service.template.AppTemplateService; import fr.paris.lutece.portal.web.admin.AdminFeaturesPageJspBean; import fr.paris.lutece.portal.web.constants.Messages; import fr.paris.lutece.portal.web.dashboard.AdminDashboardJspBean; import fr.paris.lutece.util.ReferenceList; import fr.paris.lutece.util.html.HtmlTemplate; import fr.paris.lutece.util.url.UrlItem; import java.io.Serializable; import java.util.HashMap; import java.util.Map; import javax.servlet.http.HttpServletRequest; import org.apache.commons.collections.CollectionUtils; /** * FeaturesGroupJspBean */ public class FeaturesGroupJspBean extends AdminFeaturesPageJspBean { public static final String RIGHT_FEATURES_MANAGEMENT = "CORE_FEATURES_MANAGEMENT"; private static final long serialVersionUID = -8573499137269541850L; private static final String TEMPLATE_CREATE_GROUP = "admin/features/create_group.html"; private static final String TEMPLATE_MODIFY_GROUP = "admin/features/modify_group.html"; private static final String PARAMETER_GROUP_ID = "group_id"; private static final String PARAMETER_GROUP_NAME = "group_name"; private static final String PARAMETER_GROUP_DESCRIPTION = "group_description"; private static final String PARAMETER_GROUP_ORDER = "group_order"; private static final String PARAMETER_ORDER_ID = "order_id"; private static final String PARAMETER_RIGHT_ID = "right_id"; private static final String JSP_REMOVE_GROUPS = "jsp/admin/features/DoRemoveGroup.jsp"; private static final String MESSAGE_CONFIRM_DELETE = "portal.features.message.confirmDeleteGroup"; private static final String MESSAGE_RIGHT_ALREADY_ASSIGN = "portal.features.message.rightAlreadyAssign"; private static final String MARK_ORDER_LIST = "order_list"; private static final String MARK_FEATURE_GROUP = "feature_group"; private static final String MARK_DEFAULT_ORDER = "order_default"; private static final String REGEX_ID = "^[\\d]+$"; private static final String ANCHOR_ADMIN_DASHBOARDS = "features_management"; /** * Dispatch a feature to a given group * * @param request * The HTTP request * @return The next URL to redirect after processing * @throws AccessDeniedException * if the security token is invalid */ public String doDispatchFeature( HttpServletRequest request ) throws AccessDeniedException { if ( !SecurityTokenService.getInstance( ).validate( request, AdminDashboardJspBean.TEMPLATE_MANAGE_DASHBOARDS ) ) { throw new AccessDeniedException( ERROR_INVALID_TOKEN ); } String strRightId = request.getParameter( PARAMETER_RIGHT_ID ); String strGroupName = request.getParameter( PARAMETER_GROUP_NAME ); String strOrderId = request.getParameter( PARAMETER_ORDER_ID ); Right right = RightHome.findByPrimaryKey( strRightId ); UrlItem url = new UrlItem( getDashboardUrl( request ) ); if ( ( strGroupName != null ) ) { // Set the old group as anchor url.setAnchor( right.getFeatureGroup( ) ); right.setFeatureGroup( strGroupName.equals( "" ) ? null : strGroupName ); } if ( ( strOrderId != null ) && strOrderId.matches( REGEX_ID ) ) { right.setOrder( Integer.parseInt( strOrderId ) ); } RightHome.update( right ); return url.getUrl( ); } /** * Dispatch a feature group * * @param request * The HTTP request * @return The next URL to redirect after processing * @throws AccessDeniedException * if the security token is invalid */ public String doDispatchFeatureGroup( HttpServletRequest request ) throws AccessDeniedException { if ( !SecurityTokenService.getInstance( ).validate( request, AdminDashboardJspBean.TEMPLATE_MANAGE_DASHBOARDS ) ) { throw new AccessDeniedException( ERROR_INVALID_TOKEN ); } String strGroupId = request.getParameter( PARAMETER_GROUP_ID ); String strOrderId = request.getParameter( PARAMETER_ORDER_ID ); FeatureGroup featureGroup = FeatureGroupHome.findByPrimaryKey( strGroupId ); UrlItem url = new UrlItem( getDashboardUrl( request ) ); if ( ( strOrderId != null ) && strOrderId.matches( REGEX_ID ) ) { featureGroup.setOrder( Integer.parseInt( strOrderId ) ); } FeatureGroupHome.update( featureGroup ); return url.getUrl( ); } /** * Reinitialize feature orders * * @param request * The {@link HttpServletRequest} * @return The next URL to redirect after processing * @throws AccessDeniedException * if the security token is invalid */ public String doReinitFeatures( HttpServletRequest request ) throws AccessDeniedException { if ( !SecurityTokenService.getInstance( ).validate( request, AdminDashboardJspBean.TEMPLATE_MANAGE_DASHBOARDS ) ) { throw new AccessDeniedException( ERROR_INVALID_TOKEN ); } String strGroupId = request.getParameter( PARAMETER_GROUP_ID ); RightHome.reinitFeatureOrders( strGroupId ); UrlItem url = new UrlItem( getDashboardUrl( request ) ); if ( ( strGroupId != null ) ) { url.setAnchor( strGroupId ); } return url.getUrl( ); } /** * Returns the Create Group page * * @param request * The HTTP request * @return The HTML page */ public String getCreateGroup( HttpServletRequest request ) { int nCount = FeatureGroupHome.getFeatureGroupsCount( ) + 1; Map<String, Serializable> model = new HashMap<>( ); model.put( MARK_ORDER_LIST, getOrderRefList( ) ); model.put( MARK_DEFAULT_ORDER, String.valueOf( nCount ) ); model.put( SecurityTokenService.MARK_TOKEN, SecurityTokenService.getInstance( ).getToken( request, AdminDashboardJspBean.TEMPLATE_MANAGE_DASHBOARDS ) ); HtmlTemplate t = AppTemplateService.getTemplate( TEMPLATE_CREATE_GROUP, getLocale( ), model ); return getAdminPage( t.getHtml( ) ); } /** * Returns the Modify Group page * * @param request * The HTTP request * @return The HTML page */ public String getModifyGroup( HttpServletRequest request ) { String strGroupId = request.getParameter( PARAMETER_GROUP_ID ); FeatureGroup group = FeatureGroupHome.findByPrimaryKey( strGroupId ); if ( group == null ) { return getDashboardUrl( request ); } Map<String, Object> model = new HashMap<>( ); model.put( MARK_ORDER_LIST, getOrderRefList( ) ); model.put( MARK_FEATURE_GROUP, group ); model.put( SecurityTokenService.MARK_TOKEN, SecurityTokenService.getInstance( ).getToken( request, AdminDashboardJspBean.TEMPLATE_MANAGE_DASHBOARDS ) ); HtmlTemplate t = AppTemplateService.getTemplate( TEMPLATE_MODIFY_GROUP, getLocale( ), model ); return getAdminPage( t.getHtml( ) ); } /** * Create the group * * @param request * The HTTP request * @return The next URL to redirect after processing * @throws AccessDeniedException * if the security token is invalid */ public String doCreateGroup( HttpServletRequest request ) throws AccessDeniedException { String strGroupId = request.getParameter( PARAMETER_GROUP_ID ); String strGroupName = request.getParameter( PARAMETER_GROUP_NAME ); String strGroupDescription = request.getParameter( PARAMETER_GROUP_DESCRIPTION ); String strGroupOrder = request.getParameter( PARAMETER_GROUP_ORDER ); // Mandatory fields if ( strGroupId.equals( "" ) || strGroupName.equals( "" ) || strGroupDescription.equals( "" ) ) { return AdminMessageService.getMessageUrl( request, Messages.MANDATORY_FIELDS, AdminMessage.TYPE_STOP ); } if ( !SecurityTokenService.getInstance( ).validate( request, AdminDashboardJspBean.TEMPLATE_MANAGE_DASHBOARDS ) ) { throw new AccessDeniedException( ERROR_INVALID_TOKEN ); } FeatureGroup group = new FeatureGroup( ); group.setId( strGroupId ); group.setLabelKey( strGroupName ); group.setDescriptionKey( strGroupDescription ); FeatureGroupHome.create( group ); group.setOrder( Integer.parseInt( strGroupOrder ) ); FeatureGroupHome.update( group ); return getDashboardUrl( request ); } /** * Modify the group * * @param request * The HTTP request * @return The next URL to redirect after processing * @throws AccessDeniedException * is the security token is invalid */ public String doModifyGroup( HttpServletRequest request ) throws AccessDeniedException { String strGroupId = request.getParameter( PARAMETER_GROUP_ID ); String strGroupName = request.getParameter( PARAMETER_GROUP_NAME ); String strGroupDescription = request.getParameter( PARAMETER_GROUP_DESCRIPTION ); String strGroupOrder = request.getParameter( PARAMETER_GROUP_ORDER ); // Mandatory fields if ( strGroupId.equals( "" ) || strGroupName.equals( "" ) || strGroupDescription.equals( "" ) ) { return AdminMessageService.getMessageUrl( request, Messages.MANDATORY_FIELDS, AdminMessage.TYPE_STOP ); } if ( !SecurityTokenService.getInstance( ).validate( request, AdminDashboardJspBean.TEMPLATE_MANAGE_DASHBOARDS ) ) { throw new AccessDeniedException( ERROR_INVALID_TOKEN ); } FeatureGroup group = FeatureGroupHome.findByPrimaryKey( strGroupId ); group.setLabelKey( strGroupName ); group.setDescriptionKey( strGroupDescription ); group.setOrder( Integer.parseInt( strGroupOrder ) ); FeatureGroupHome.update( group ); return getDashboardUrl( request ); } /** * Generate an HTML combo of available group order * * @return The reference list of orders */ private ReferenceList getOrderRefList( ) { int nGroupsCount = FeatureGroupHome.getFeatureGroupsCount( ); ReferenceList listOrders = new ReferenceList( ); for ( int i = 0; i < nGroupsCount; i++ ) { listOrders.addItem( i + 1, Integer.toString( i + 1 ) ); } return listOrders; } /** * Returns the Remove page * * @param request * The HTTP request * @return The HTML page */ public String getRemoveGroup( HttpServletRequest request ) { String strGroupId = request.getParameter( PARAMETER_GROUP_ID ); String strUrl = JSP_REMOVE_GROUPS; Map<String, Object> parameters = new HashMap<>( ); parameters.put( PARAMETER_GROUP_ID, strGroupId ); parameters.put( SecurityTokenService.PARAMETER_TOKEN, SecurityTokenService.getInstance( ).getToken( request, AdminDashboardJspBean.TEMPLATE_MANAGE_DASHBOARDS ) ); FeatureGroup group = FeatureGroupHome.findByPrimaryKey( strGroupId ); group.setLocale( getUser( ).getLocale( ) ); Object [ ] messageArgs = { group.getLabel( ) }; return AdminMessageService.getMessageUrl( request, MESSAGE_CONFIRM_DELETE, messageArgs, null, strUrl, "", AdminMessage.TYPE_CONFIRMATION, parameters ); } /** * Remove the group * * @param request * The HTTP request * @return The next URL to redirect after processing * @throws AccessDeniedException * if the security token is invalid */ public String doRemoveGroup( HttpServletRequest request ) throws AccessDeniedException { String strGroupId = request.getParameter( PARAMETER_GROUP_ID ); if ( CollectionUtils.isNotEmpty( RightHome.getRightsList( strGroupId ) ) ) { return AdminMessageService.getMessageUrl( request, MESSAGE_RIGHT_ALREADY_ASSIGN, AdminMessage.TYPE_STOP ); } if ( !SecurityTokenService.getInstance( ).validate( request, AdminDashboardJspBean.TEMPLATE_MANAGE_DASHBOARDS ) ) { throw new AccessDeniedException( ERROR_INVALID_TOKEN ); } FeatureGroupHome.remove( strGroupId ); return getDashboardUrl( request ); } /** * Returns the dashboard URL * * @param request * The HTTP request * @return the dashboard URL */ private String getDashboardUrl( HttpServletRequest request ) { return getAdminDashboardsUrl( request, ANCHOR_ADMIN_DASHBOARDS ); } }
package edu.psu.compbio.seqcode.gse.deepseq.utilities; import java.io.File; import java.io.FileWriter; import java.io.IOException; import java.sql.SQLException; import java.util.ArrayList; import java.util.List; import org.apache.log4j.Logger; import edu.psu.compbio.seqcode.gse.datasets.general.NamedRegion; import edu.psu.compbio.seqcode.gse.datasets.general.Region; import edu.psu.compbio.seqcode.gse.datasets.seqdata.SeqLocator; import edu.psu.compbio.seqcode.gse.datasets.species.Genome; import edu.psu.compbio.seqcode.gse.datasets.species.Organism; import edu.psu.compbio.seqcode.gse.deepseq.DeepSeqExpt; import edu.psu.compbio.seqcode.gse.deepseq.ReadHit; import edu.psu.compbio.seqcode.gse.deepseq.discovery.SingleConditionFeatureFinder; import edu.psu.compbio.seqcode.gse.ewok.verbs.ChromRegionIterator; import edu.psu.compbio.seqcode.gse.tools.utils.Args; import edu.psu.compbio.seqcode.gse.utils.ArgParser; import edu.psu.compbio.seqcode.gse.utils.NotFoundException; import edu.psu.compbio.seqcode.gse.utils.Pair; /** * Outputs a fixed-step WIG format file for a deep-seq experiment. * * @author Shaun Mahony * @version %I%, %G% */ public class WIGExporter { private Organism org; private Genome gen; protected DeepSeqExpt expt; protected int [] stackedHitCounts; private int winSize=20, winStep=20; private int readLength=1, read5PrimeExt=0, read3PrimeExt=200; private String outName="out"; private String trackName="out"; private String trackDesc="out"; private String trackColor="0,0,255"; private int trackYMax=-1; private boolean dbconnected=false; private int perBaseMax=10; private boolean needlefiltering=false; private static final Logger logger = Logger.getLogger(SingleConditionFeatureFinder.class); public static void main(String[] args) throws SQLException, NotFoundException { WIGExporter wig = new WIGExporter(args); wig.execute(); } public WIGExporter(String [] args) { if(args.length==0){ System.err.println("WIGExporter usage:\n" + "\t--species <organism;genome>\n" + "\t--(rdb)expt <experiment names>\n" + "\t--read5ext <Extension on the 5' end>\n" + "\t--read3ext <Extension on the 3' end>\n" + "\t--pbmax <max read count per base>\n" + "\t--winsize <window size/step in WIG file>\n" + "\t--name <string to use as track name>\n" + "\t--description <string to use as track description>\n" + "\t--ylimit <default track y max>\n" + "\t--color <R,G,B>\n" + "\t--out <output file name>"); System.exit(1); } ArgParser ap = new ArgParser(args); try { if(ap.hasKey("species")){ Pair<Organism, Genome> pair = Args.parseGenome(args); if(pair != null){ gen = pair.cdr(); dbconnected=true; } }else{ //Make fake genome... chr lengths provided??? if(ap.hasKey("geninfo") || ap.hasKey("g")){ String fName = ap.hasKey("geninfo") ? ap.getKeyValue("geninfo") : ap.getKeyValue("g"); gen = new Genome("Genome", new File(fName), true); }else{ gen = null; } } }catch (NotFoundException e) { e.printStackTrace(); } outName = Args.parseString(args,"out",outName); trackName = Args.parseString(args,"name",trackName); trackDesc = Args.parseString(args,"description",trackDesc); trackColor = Args.parseString(args,"color",trackColor); read5PrimeExt = Args.parseInteger(args,"read5ext",read5PrimeExt); read3PrimeExt = Args.parseInteger(args,"read3ext",read3PrimeExt); readLength = Args.parseInteger(args,"readlen",readLength); winSize = Args.parseInteger(args,"winsize",winSize); perBaseMax = Args.parseInteger(args,"pbmax",perBaseMax); if(ap.hasKey("pbmax")){needlefiltering=true;} if(ap.hasKey("ylimit")){trackYMax=Args.parseInteger(args,"ylimit",-1);} winStep=winSize; // Load the experiments List<SeqLocator> dbexpts = Args.parseSeqExpt(args, "dbexpt"); List<SeqLocator> rdbexpts = Args.parseSeqExpt(args,"rdbexpt"); List<File> expts = Args.parseFileHandles(args, "expt"); boolean nonUnique = ap.hasKey("nonunique") ? true : false; String fileFormat = Args.parseString(args, "format", "ELAND"); if(expts.size()>0 && dbexpts.size() == 0 && rdbexpts.size()==0){ expt= new DeepSeqExpt(gen, expts, nonUnique, fileFormat, (int)readLength); }else if (dbexpts.size() > 0 && expts.size() == 0) { expt = new DeepSeqExpt(gen, dbexpts, "db", (int)readLength); dbconnected = true; }else if (rdbexpts.size()>0 && expts.size() == 0){ expt = new DeepSeqExpt(gen, rdbexpts, "readdb", -1); dbconnected=true; }else { logger.error("Must provide either an aligner output file or Gifford lab DB experiment name for the signal experiment (but not both)"); System.exit(1); } logger.info("Expt hit count: " + (int) expt.getHitCount() + ", weight: " + (int) expt.getWeightTotal()); read3PrimeExt = Math.max(0, read3PrimeExt-readLength); expt.setFivePrimeExt(read5PrimeExt); expt.setThreePrimeExt(read3PrimeExt); } public void execute(){ try { FileWriter fw = new FileWriter(outName+".wig"); double basesDone=0, printStep=10000000, numPrint=0; if(trackName.equals("out")) trackName=outName; if(trackDesc.equals("out")) trackDesc=outName; //Print the header fw.write("track type=wiggle_0 name=\""+trackName+"\" description=\""+trackDesc+" summary\""+" visibility=full color="+trackColor+" "); if(trackYMax >0) fw.write("autoScale=off viewLimits=0:"+trackYMax+" "); fw.write("\n"); ChromRegionIterator chroms = new ChromRegionIterator(gen); while(chroms.hasNext()){ NamedRegion currentRegion = chroms.next(); //Split the job up into chunks of 100Mbp for(int x=currentRegion.getStart(); x<=currentRegion.getEnd(); x+=100000000){ int y = x+100000000; if(y>currentRegion.getEnd()){y=currentRegion.getEnd();} Region currSubRegion = new Region(gen, currentRegion.getChrom(), x, y); ArrayList<ReadHit> hits = new ArrayList<ReadHit>(); hits.addAll(expt.loadExtHits(currSubRegion)); double stackedHitCounts[] = makeHitLandscape(hits, currSubRegion, perBaseMax, '.'); boolean recording=false; //Scan regions for(int i=currSubRegion.getStart(); i<currSubRegion.getEnd()-(int)winSize; i+=(int)winStep){ Region currWin = new Region(gen, currentRegion.getChrom(), i, (int)(i+winSize-1)); int binid = (int)Math.max(0, ((double)(currWin.getStart()-currSubRegion.getStart())/winStep)); double winHits=(double)stackedHitCounts[binid]; if(winHits>0){ if(!recording){ fw.write("fixedStep chrom=chr"+currSubRegion.getChrom()+" start="+(i+1)+" step="+winStep+" span="+winSize+"\n"); recording=true; } fw.write(String.format("%.1f\n", winHits)); }else{ recording=false; } //Print out progress basesDone+=winStep; if(basesDone > numPrint*printStep){ if(numPrint%10==0){System.out.print(String.format("(%.0f)", (numPrint*printStep)));} else{System.out.print(".");} if(numPrint%50==0 && numPrint!=0){System.out.print("\n");} numPrint++; } } } } System.out.print("\n"); fw.close(); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } } protected double[] makeHitLandscape(ArrayList<ReadHit> hits, Region currReg, int perBaseMax, char strand){ int numBins = (int)(currReg.getWidth()/winStep); double[] landscape = new double[numBins+1]; double[] startcounts = new double[(int)currReg.getWidth()+1]; for(int i=0; i<=numBins; i++){landscape[i]=0;} for(int i=0; i<=currReg.getWidth(); i++){startcounts[i]=0;} for(ReadHit r : hits){ if(strand=='.' || r.getStrand()==strand){ int offset5=inBounds(r.getFivePrime()-currReg.getStart(),0,currReg.getWidth()); if(!needlefiltering || (startcounts[offset5] <= perBaseMax)){ int binstart = inBounds((int)((double)(r.getStart()-currReg.getStart())/winStep), 0, numBins); int binend = inBounds((int)((double)(r.getEnd()-currReg.getStart())/winStep), 0, numBins); for(int i=binstart; i<=binend; i++){ landscape[i]+=r.getWeight(); } if(needlefiltering && (startcounts[offset5]+r.getWeight() > perBaseMax)) startcounts[offset5]=perBaseMax; else startcounts[offset5]+=r.getWeight(); } } } return(landscape); } //keep the number in bounds protected final double inBounds(double x, double min, double max){ if(x<min){return min;} if(x>max){return max;} return x; } protected final int inBounds(int x, int min, int max){ if(x<min){return min;} if(x>max){return max;} return x; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.geode.management.internal.cli.commands; import static org.apache.geode.distributed.ConfigurationProperties.HTTP_SERVICE_BIND_ADDRESS; import static org.apache.geode.distributed.ConfigurationProperties.HTTP_SERVICE_PORT; import static org.assertj.core.api.Assertions.assertThat; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.when; import java.io.File; import java.util.ArrayList; import java.util.Arrays; import java.util.HashSet; import java.util.List; import java.util.Properties; import java.util.Set; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.junit.experimental.categories.Category; import org.apache.geode.distributed.ConfigurationProperties; import org.apache.geode.distributed.LocatorLauncher; import org.apache.geode.distributed.internal.DistributionConfig; import org.apache.geode.internal.lang.SystemUtils; import org.apache.geode.test.junit.categories.UnitTest; @Category(UnitTest.class) public class StartLocatorCommandTest { private StartLocatorCommand locatorCommands; @Before public void setup() { locatorCommands = new StartLocatorCommand(); } @After public void tearDown() { locatorCommands = null; } @Test public void testLocatorClasspathOrder() { String userClasspath = "/path/to/user/lib/app.jar:/path/to/user/classes"; String expectedClasspath = StartMemberUtils.getGemFireJarPath().concat(File.pathSeparator).concat(userClasspath) .concat(File.pathSeparator).concat(System.getProperty("java.class.path")) .concat(File.pathSeparator).concat(StartMemberUtils.CORE_DEPENDENCIES_JAR_PATHNAME); String actualClasspath = locatorCommands.getLocatorClasspath(true, userClasspath); assertEquals(expectedClasspath, actualClasspath); } @Test public void testLocatorCommandLineWithRestAPI() throws Exception { LocatorLauncher locatorLauncher = new LocatorLauncher.Builder().setCommand(LocatorLauncher.Command.START) .setMemberName("testLocatorCommandLineWithRestAPI").setBindAddress("localhost") .setPort(11111).build(); Properties gemfireProperties = new Properties(); gemfireProperties.setProperty(HTTP_SERVICE_PORT, "8089"); gemfireProperties.setProperty(HTTP_SERVICE_BIND_ADDRESS, "localhost"); String[] commandLineElements = locatorCommands.createStartLocatorCommandLine(locatorLauncher, null, null, gemfireProperties, null, false, new String[0], null, null); assertNotNull(commandLineElements); assertTrue(commandLineElements.length > 0); Set<String> expectedCommandLineElements = new HashSet<>(6); expectedCommandLineElements.add(locatorLauncher.getCommand().getName()); expectedCommandLineElements.add(locatorLauncher.getMemberName().toLowerCase()); expectedCommandLineElements.add(String.format("--port=%1$d", locatorLauncher.getPort())); expectedCommandLineElements .add("-d" + DistributionConfig.GEMFIRE_PREFIX + "" + HTTP_SERVICE_PORT + "=" + "8089"); expectedCommandLineElements.add("-d" + DistributionConfig.GEMFIRE_PREFIX + "" + HTTP_SERVICE_BIND_ADDRESS + "=" + "localhost"); for (String commandLineElement : commandLineElements) { expectedCommandLineElements.remove(commandLineElement.toLowerCase()); } assertTrue(String.format("Expected ([]); but was (%1$s)", expectedCommandLineElements), expectedCommandLineElements.isEmpty()); } @Test public void testAddJvmOptionsForOutOfMemoryErrors() { final List<String> jvmOptions = new ArrayList<>(1); addJvmOptionsForOutOfMemoryErrors(jvmOptions); if (SystemUtils.isHotSpotVM()) { if (SystemUtils.isWindows()) { assertTrue(jvmOptions.contains("-XX:OnOutOfMemoryError=taskkill /F /PID %p")); } else { assertTrue(jvmOptions.contains("-XX:OnOutOfMemoryError=kill -KILL %p")); } } else if (SystemUtils.isJ9VM()) { assertEquals(1, jvmOptions.size()); assertTrue(jvmOptions.contains("-Xcheck:memory")); } else if (SystemUtils.isJRockitVM()) { assertEquals(1, jvmOptions.size()); assertTrue(jvmOptions.contains("-XXexitOnOutOfMemory")); } else { assertTrue(jvmOptions.isEmpty()); } } private void addJvmOptionsForOutOfMemoryErrors(final List<String> commandLine) { if (SystemUtils.isHotSpotVM()) { if (SystemUtils.isWindows()) { // ProcessBuilder "on Windows" needs every word (space separated) to be // a different element in the array/list. See #47312. Need to study why! commandLine.add("-XX:OnOutOfMemoryError=taskkill /F /PID %p"); } else { // All other platforms (Linux, Mac OS X, UNIX, etc) commandLine.add("-XX:OnOutOfMemoryError=kill -KILL %p"); } } else if (SystemUtils.isJ9VM()) { // NOTE IBM states the following IBM J9 JVM command-line option/switch has side-effects on // "performance", // as noted in the reference documentation... // http://publib.boulder.ibm.com/infocenter/javasdk/v6r0/index.jsp?topic=/com.ibm.java.doc.diagnostics.60/diag/appendixes/cmdline/commands_jvm.html commandLine.add("-Xcheck:memory"); } else if (SystemUtils.isJRockitVM()) { // NOTE the following Oracle JRockit JVM documentation was referenced to identify the // appropriate JVM option to // set when handling OutOfMemoryErrors. // http://docs.oracle.com/cd/E13150_01/jrockit_jvm/jrockit/jrdocs/refman/optionXX.html commandLine.add("-XXexitOnOutOfMemory"); } } @Test public void testCreateStartLocatorCommandLine() throws Exception { LocatorLauncher locatorLauncher = new LocatorLauncher.Builder().setMemberName("defaultLocator") .setCommand(LocatorLauncher.Command.START).build(); String[] commandLineElements = locatorCommands.createStartLocatorCommandLine(locatorLauncher, null, null, new Properties(), null, false, null, null, null); Set<String> expectedCommandLineElements = new HashSet<>(); expectedCommandLineElements.add(StartMemberUtils.getJavaPath()); expectedCommandLineElements.add("-server"); expectedCommandLineElements.add("-classpath"); expectedCommandLineElements.add(StartMemberUtils.getGemFireJarPath().concat(File.pathSeparator) .concat(StartMemberUtils.CORE_DEPENDENCIES_JAR_PATHNAME)); expectedCommandLineElements.add("-Dgemfire.launcher.registerSignalHandlers=true"); expectedCommandLineElements.add("-Djava.awt.headless=true"); expectedCommandLineElements.add("-Dsun.rmi.dgc.server.gcInterval=9223372036854775806"); expectedCommandLineElements.add("org.apache.geode.distributed.LocatorLauncher"); expectedCommandLineElements.add("start"); expectedCommandLineElements.add("defaultLocator"); expectedCommandLineElements.add("--port=10334"); assertNotNull(commandLineElements); assertTrue(commandLineElements.length > 0); assertEquals(commandLineElements.length, expectedCommandLineElements.size()); for (String commandLineElement : commandLineElements) { expectedCommandLineElements.remove(commandLineElement); } assertTrue(String.format("Expected ([]); but was (%1$s)", expectedCommandLineElements), expectedCommandLineElements.isEmpty()); } @Test public void testCreateStartLocatorCommandLineWithAllOptions() throws Exception { LocatorLauncher locatorLauncher = new LocatorLauncher.Builder().setCommand(LocatorLauncher.Command.START) .setDebug(Boolean.TRUE).setDeletePidFileOnStop(Boolean.TRUE).setForce(Boolean.TRUE) .setHostnameForClients("localhost").setMemberName("customLocator").setPort(10101) .setRedirectOutput(Boolean.TRUE).build(); File gemfirePropertiesFile = spy(mock(File.class)); when(gemfirePropertiesFile.getAbsolutePath()).thenReturn("/config/customGemfire.properties"); File gemfireSecurityPropertiesFile = spy(mock(File.class)); when(gemfireSecurityPropertiesFile.getAbsolutePath()) .thenReturn("/config/customGemfireSecurity.properties"); Properties gemfireProperties = new Properties(); gemfireProperties.setProperty(ConfigurationProperties.STATISTIC_SAMPLE_RATE, "1500"); gemfireProperties.setProperty(ConfigurationProperties.DISABLE_AUTO_RECONNECT, "true"); String heapSize = "1024m"; String customClasspath = "/temp/domain-1.0.0.jar"; String[] jvmArguments = new String[] {"-verbose:gc", "-Xloggc:member-gc.log", "-XX:+PrintGCDateStamps", "-XX:+PrintGCDetails"}; String[] commandLineElements = locatorCommands.createStartLocatorCommandLine(locatorLauncher, gemfirePropertiesFile, gemfireSecurityPropertiesFile, gemfireProperties, customClasspath, Boolean.FALSE, jvmArguments, heapSize, heapSize); Set<String> expectedCommandLineElements = new HashSet<>(); expectedCommandLineElements.add(StartMemberUtils.getJavaPath()); expectedCommandLineElements.add("-server"); expectedCommandLineElements.add("-classpath"); expectedCommandLineElements .add(StartMemberUtils.getGemFireJarPath().concat(File.pathSeparator).concat(customClasspath) .concat(File.pathSeparator).concat(StartMemberUtils.CORE_DEPENDENCIES_JAR_PATHNAME)); expectedCommandLineElements .add("-DgemfirePropertyFile=".concat(gemfirePropertiesFile.getAbsolutePath())); expectedCommandLineElements.add( "-DgemfireSecurityPropertyFile=".concat(gemfireSecurityPropertiesFile.getAbsolutePath())); expectedCommandLineElements.add("-Dgemfire.statistic-sample-rate=1500"); expectedCommandLineElements.add("-Dgemfire.disable-auto-reconnect=true"); expectedCommandLineElements.addAll(Arrays.asList(jvmArguments)); expectedCommandLineElements.add("org.apache.geode.distributed.LocatorLauncher"); expectedCommandLineElements.add("start"); expectedCommandLineElements.add("customLocator"); expectedCommandLineElements.add("--debug"); expectedCommandLineElements.add("--force"); expectedCommandLineElements.add("--hostname-for-clients=localhost"); expectedCommandLineElements.add("--port=10101"); expectedCommandLineElements.add("--redirect-output"); assertNotNull(commandLineElements); assertTrue(commandLineElements.length > 0); assertThat(commandLineElements).containsAll(expectedCommandLineElements); } }
package de.kleppmann.maniation.scene; class AnimationImpl implements de.kleppmann.maniation.scene.Animation, de.kleppmann.maniation.scene.XMLElement { private javax.xml.namespace.QName _tagName = new javax.xml.namespace.QName("http://kleppmann.de/maniation/scene", "animation"); private de.realityinabox.databinding.libs.AttributeMap _attributes = new de.realityinabox.databinding.libs.AttributeMap(new de.kleppmann.maniation.scene.AnimationImpl.MyAttributes()); private de.kleppmann.maniation.scene.XMLElement _parent; private de.kleppmann.maniation.scene.XMLDocument _document; private de.kleppmann.maniation.scene.AnimationImpl.MyHandler _handler = new de.kleppmann.maniation.scene.AnimationImpl.MyHandler(); private de.kleppmann.maniation.scene.AnimationImpl.MyChildren _children = new de.kleppmann.maniation.scene.AnimationImpl.MyChildren(); AnimationImpl(de.kleppmann.maniation.scene.XMLDocument document, de.kleppmann.maniation.scene.XMLElement parent) { this._document = document; this._parent = parent; } private double start; private double finish; private boolean loop; private java.util.List<de.kleppmann.maniation.scene.Keyframe> keyframes = new java.util.ArrayList<de.kleppmann.maniation.scene.Keyframe>(); public double getStart() { return start; } public void setStart(double start) { this.start = start; } public double getFinish() { return finish; } public void setFinish(double finish) { this.finish = finish; } public boolean isLoop() { return loop; } public void setLoop(boolean loop) { this.loop = loop; } public java.util.List<de.kleppmann.maniation.scene.Keyframe> getKeyframes() { return keyframes; } public de.realityinabox.databinding.libs.Document getDocument() { return this._document; } public de.kleppmann.maniation.scene.XMLElement getParent() { return this._parent; } public javax.xml.namespace.QName getTagName() { return this._tagName; } public void setTagName(javax.xml.namespace.QName tagName) { this._tagName = tagName; } public java.util.List<de.realityinabox.databinding.libs.XMLChild> getChildren() { return this._children; } public java.util.Map<javax.xml.namespace.QName, java.lang.String> getAttributes() { return this._attributes; } public org.xml.sax.ContentHandler getParseHandler() { return this._handler; } private class MyAttributes extends de.realityinabox.databinding.libs.AttributeSet { javax.xml.namespace.QName _startAttribute = new javax.xml.namespace.QName("", "start"); javax.xml.namespace.QName _finishAttribute = new javax.xml.namespace.QName("", "finish"); javax.xml.namespace.QName _loopAttribute = new javax.xml.namespace.QName("", "loop"); MyAttributes() { super(null); } public int size() { return 3; } public java.lang.String add(javax.xml.namespace.QName key, java.lang.String value) { java.lang.String _result = null; if (key.equals(_startAttribute)) { _result = java.lang.Double.toString(getStart()); setStart(java.lang.Double.parseDouble(value)); } else if (key.equals(_finishAttribute)) { _result = java.lang.Double.toString(getFinish()); setFinish(java.lang.Double.parseDouble(value)); } else if (key.equals(_loopAttribute)) { _result = (isLoop() ? "true" : "false"); setLoop(value.equals("1") || value.equals("true")); } else throw new java.lang.IllegalArgumentException("XML attribute '" + key.getLocalPart() + "' is unknown"); return _result; } public javax.xml.namespace.QName getKey(int index) { if (index == 0) return _startAttribute; if (index == 1) return _finishAttribute; if (index == 2) return _loopAttribute; throw new IllegalArgumentException(); } public java.lang.String getValue(int index) { if (index == 0) return java.lang.Double.toString(getStart()); if (index == 1) return java.lang.Double.toString(getFinish()); if (index == 2) return (isLoop() ? "true" : "false"); throw new IllegalArgumentException(); } public void remove(int index) { if ((index >= 0) && (index < 3)) return; throw new IllegalArgumentException(); } } private class MyChildren extends java.util.AbstractList<de.realityinabox.databinding.libs.XMLChild> { private int ownSize() { int _i = 0; _i += keyframes.size(); return _i; } public de.realityinabox.databinding.libs.XMLChild get(int index) { try { if ((index >= 0) && (index < keyframes.size())) return (XMLChild) keyframes.get(index); index -= keyframes.size(); } catch (ClassCastException e) { assert(false); } throw new IllegalArgumentException(); } public int size() { return ownSize(); } public de.realityinabox.databinding.libs.XMLChild set(int index, de.realityinabox.databinding.libs.XMLChild element) { de.kleppmann.maniation.scene.XMLChild _result; try { if ((index >= 0) && (index < keyframes.size())) { _result = (de.kleppmann.maniation.scene.XMLChild) keyframes.get(index); keyframes.set(index, (de.kleppmann.maniation.scene.KeyframeImpl) element); return _result; } index -= keyframes.size(); throw new java.lang.IllegalArgumentException(); } catch (java.lang.ClassCastException e) { throw new java.lang.IllegalArgumentException(e); } } public void add(int index, de.realityinabox.databinding.libs.XMLChild element) { if (element instanceof de.kleppmann.maniation.scene.KeyframeImpl) { if (((de.kleppmann.maniation.scene.KeyframeImpl) element).getTagName().equals(_handler._keyframeChild)) { keyframes.add((de.kleppmann.maniation.scene.KeyframeImpl) element); return; } } if (element instanceof de.kleppmann.maniation.scene.XMLElement) throw new java.lang.IllegalArgumentException("XML element '" + ((de.kleppmann.maniation.scene.XMLElement) element).getTagName().getLocalPart() + "' is unknown"); } public de.realityinabox.databinding.libs.XMLChild remove(int index) { try { if ((index >= 0) && (index < keyframes.size())) return (XMLChild) keyframes.remove(index); index -= keyframes.size(); } catch (ClassCastException e) { assert(false); } throw new java.lang.IllegalArgumentException(); } } private class MyHandler extends org.xml.sax.helpers.DefaultHandler { javax.xml.namespace.QName _keyframeChild = new javax.xml.namespace.QName("http://kleppmann.de/maniation/scene", "keyframe"); public void startElement(java.lang.String namespaceURI, java.lang.String localName, java.lang.String qName, org.xml.sax.Attributes atts) throws org.xml.sax.SAXException { de.kleppmann.maniation.scene.XMLElement _el = null; if (namespaceURI.equals(_keyframeChild.getNamespaceURI()) && localName.equals(_keyframeChild.getLocalPart())) _el = new de.kleppmann.maniation.scene.KeyframeImpl(_document, AnimationImpl.this); getDocument().getParseStack().push(_el); if (_el == null) return; _el.setTagName(new javax.xml.namespace.QName(namespaceURI, localName)); for (int i=0; i < atts.getLength(); i++) { javax.xml.namespace.QName n = new javax.xml.namespace.QName(atts.getURI(i), atts.getLocalName(i)); _el.getAttributes().put(n, atts.getValue(i)); } } public void endElement(java.lang.String namespaceURI, java.lang.String localName, java.lang.String qName) throws org.xml.sax.SAXException { if (getParent() != null) getParent().getChildren().add(AnimationImpl.this); getDocument().getParseStack().pop(); } public void characters(char[] ch, int start, int length) throws org.xml.sax.SAXException { //getChildren().add(new CDataImpl(document, new String(ch, start, length))); } } }
package org.folio.okapi.managers; import io.vertx.core.Future; import io.vertx.core.MultiMap; import io.vertx.core.Vertx; import io.vertx.core.eventbus.EventBus; import io.vertx.core.http.HttpMethod; import io.vertx.core.json.Json; import io.vertx.core.json.JsonObject; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import org.apache.logging.log4j.Logger; import org.folio.okapi.bean.InterfaceDescriptor; import org.folio.okapi.bean.ModuleDescriptor; import org.folio.okapi.bean.ModuleInstance; import org.folio.okapi.bean.RoutingEntry; import org.folio.okapi.bean.TimerDescriptor; import org.folio.okapi.common.ErrorType; import org.folio.okapi.common.GenericCompositeFuture; import org.folio.okapi.common.OkapiLogger; import org.folio.okapi.service.TimerStore; import org.folio.okapi.util.LockedTypedMap1; import org.folio.okapi.util.OkapiError; public class TimerManager { private final Logger logger = OkapiLogger.get(); private static final String TIMER_ENTRY_SEP = "_"; private static final String MAP_NAME = "org.folio.okapi.timer.map"; private static final String EVENT_NAME = "org.folio.okapi.timer.event"; private final Map<String,LockedTypedMap1<TimerDescriptor>> tenantTimers = new HashMap<>(); private final Map<String,Long> timerRunning = new HashMap<>(); private final TimerStore timerStore; private final boolean local; private TenantManager tenantManager; private DiscoveryManager discoveryManager; private ProxyService proxyService; private Vertx vertx; public TimerManager(TimerStore timerStore, boolean local) { this.timerStore = timerStore; this.local = local; } /** * Initialize timer manager. * @param vertx Vert.x handle * @return future result */ public Future<Void> init(Vertx vertx, TenantManager tenantManager, DiscoveryManager discoveryManager, ProxyService proxyService) { this.vertx = vertx; this.tenantManager = tenantManager; this.discoveryManager = discoveryManager; this.proxyService = proxyService; consumePatchTimer(); tenantManager.setTenantChange(this::tenantChange); return tenantManager.allTenants().compose(list -> { Future<Void> future = Future.succeededFuture(); for (String id : list) { future = future.compose(y -> startTimers(id, true)); } return future; }); } /** * Handle module change for tenant. * @param tenantId tenant identifier */ private void tenantChange(String tenantId) { startTimers(tenantId, false); } private Future<Void> loadFromStorage(String tenantId) { final LockedTypedMap1<TimerDescriptor> timerMap = tenantTimers.get(tenantId); return timerStore.getAll().compose(list -> { List<Future<Void>> futures = new LinkedList<>(); String prefix = tenantId + TIMER_ENTRY_SEP; for (TimerDescriptor timerDescriptor : list) { String tenantTimerId = timerDescriptor.getId(); if (tenantTimerId.startsWith(prefix)) { timerDescriptor.setId(tenantTimerId.substring(prefix.length())); if (timerDescriptor.isModified()) { futures.add(timerMap.put(timerDescriptor.getId(), timerDescriptor)); } } } return GenericCompositeFuture.all(futures).mapEmpty(); } ); } private Future<Void> removeStale(String tenantId, List<ModuleDescriptor> mdList) { final LockedTypedMap1<TimerDescriptor> timerMap = tenantTimers.get(tenantId); return timerMap.getAll().compose(list -> { List<Future<Void>> futures = new LinkedList<>(); for (String timerId : list.keySet()) { ModuleDescriptor md = getModuleForTimer(mdList, timerId); if (md == null) { final String runId = tenantId + TIMER_ENTRY_SEP + timerId; Long id = timerRunning.remove(runId); if (id != null) { vertx.cancelTimer(id); } timerMap.remove(timerId); futures.add(timerStore.delete(runId).mapEmpty()); } } return GenericCompositeFuture.all(futures).mapEmpty(); }); } private Future<Void> handleNew(String tenantId, List<ModuleDescriptor> mdList) { final LockedTypedMap1<TimerDescriptor> timerMap = tenantTimers.get(tenantId); Future<Void> future = Future.succeededFuture(); for (ModuleDescriptor md : mdList) { InterfaceDescriptor timerInt = md.getSystemInterface("_timer"); if (timerInt != null) { List<RoutingEntry> routingEntries = timerInt.getAllRoutingEntries(); int seq = 0; for (RoutingEntry re : routingEntries) { String timerId = md.getProduct() + TIMER_ENTRY_SEP + seq; future = future .compose(y -> timerMap.get(timerId)) .compose(existing -> { // patched timer descriptor takes precedence over updated module final String runId = tenantId + TIMER_ENTRY_SEP + timerId; if (existing != null && existing.isModified()) { // see if timers already going for this one. if (timerRunning.containsKey(runId)) { return Future.succeededFuture(); } return waitTimer(tenantId, existing); } // non-patched timer descriptor for module's routing entry TimerDescriptor newTimerDescriptor = new TimerDescriptor(); newTimerDescriptor.setId(timerId); newTimerDescriptor.setRoutingEntry(re); if (timerRunning.containsKey(runId)) { if (isSimilar(existing, newTimerDescriptor)) { return Future.succeededFuture(); } vertx.cancelTimer(timerRunning.get(runId)); } return timerMap.put(timerId, newTimerDescriptor) .compose(x -> waitTimer(tenantId, newTimerDescriptor)); }); seq++; } } } return future; } /** * enable timers for enabled modules for a tenant. * @param tenantId Tenant identifier * @param load whether to load from storage * @return async result */ private Future<Void> startTimers(String tenantId, boolean load) { return tenantManager.getEnabledModules(tenantId).compose(mdList -> { Future<Void> future = Future.succeededFuture(); if (!tenantTimers.containsKey(tenantId)) { LockedTypedMap1<TimerDescriptor> timerMap = new LockedTypedMap1<>(TimerDescriptor.class); tenantTimers.put(tenantId, timerMap); future = future.compose(x -> timerMap.init(vertx, MAP_NAME + "." + tenantId, local)); } if (load) { future = future.compose(x -> loadFromStorage(tenantId)); } return future .compose(x -> removeStale(tenantId, mdList)) .compose(x -> handleNew(tenantId, mdList)); }); } /** * Fire a timer - invoke module. * @param tenantId tenant identifier * @param md module descriptor of module to invoke * @param timerDescriptor timer descriptor in use */ private void fireTimer(String tenantId, ModuleDescriptor md, TimerDescriptor timerDescriptor) { RoutingEntry routingEntry = timerDescriptor.getRoutingEntry(); String path = routingEntry.getStaticPath(); HttpMethod httpMethod = routingEntry.getDefaultMethod(HttpMethod.POST); ModuleInstance inst = new ModuleInstance(md, routingEntry, path, httpMethod, true); MultiMap headers = MultiMap.caseInsensitiveMultiMap(); logger.info("timer {} call start module {} for tenant {}", timerDescriptor.getId(), md.getId(), tenantId); proxyService.callSystemInterface(headers, tenantId, inst, "") .onFailure(cause -> logger.info("timer call failed to module {} for tenant {} : {}", md.getId(), tenantId, cause.getMessage())) .onSuccess(res -> logger.info("timer call succeeded to module {} for tenant {}", md.getId(), tenantId)); } Future<ModuleDescriptor> getModuleForTimer(String tenantId, String timerId) { return tenantManager.getEnabledModules(tenantId) .map(list -> getModuleForTimer(list, timerId)) .recover(cause -> Future.succeededFuture(null)); } ModuleDescriptor getModuleForTimer(List<ModuleDescriptor> list, String timerId) { String product = timerId.substring(0, timerId.indexOf(TIMER_ENTRY_SEP)); int seq = Integer.parseInt(timerId.substring(timerId.indexOf(TIMER_ENTRY_SEP) + 1)); for (ModuleDescriptor md : list) { if (product.equals(md.getProduct())) { InterfaceDescriptor timerInt = md.getSystemInterface("_timer"); if (timerInt != null) { List<RoutingEntry> routingEntries = timerInt.getAllRoutingEntries(); if (seq < routingEntries.size()) { return md; } } } } return null; } /** * Handle a timer timer. * * <p>This method is called for each timer in each tenant and for each instance in * the Okapi cluster. * @param tenantId tenant identifier * @param timerDescriptor descriptor that this handling */ private void handleTimer(String tenantId, TimerDescriptor timerDescriptor) { logger.info("timer {} handle for tenant {}", timerDescriptor.getId(), tenantId); final String timerId = timerDescriptor.getId(); tenantTimers.get(tenantId).get(timerId) .compose(currentDescriptor -> { // this timer is latest and current .. do the work.. // find module for this timer.. If module is not found, it was disabled // in the meantime and timer is stopped. return getModuleForTimer(tenantId, timerId).compose(md -> { if (md == null) { final String runId = tenantId + TIMER_ENTRY_SEP + timerId; timerRunning.remove(runId); return Future.succeededFuture(); } if (discoveryManager.isLeader()) { // only fire timer in one instance (of the Okapi cluster) fireTimer(tenantId, md, currentDescriptor); } // roll on.. wait and redo.. return waitTimer(tenantId, timerDescriptor); }); }) .onFailure(cause -> logger.warn("handleTimer id={} {}", timerId, cause.getMessage(), cause)); } /** * Handle a timer timer. * * <p>This method is called for each timer in each tenant and for each instance in * the Okapi cluster. If the tenant descriptor has a zero delay, that will * stop/disable the timer. * @param tenantId tenant identifier * @param timerDescriptor descriptor that this handling */ private Future<Void> waitTimer(String tenantId, TimerDescriptor timerDescriptor) { RoutingEntry routingEntry = timerDescriptor.getRoutingEntry(); final long delay = routingEntry.getDelayMilliSeconds(); final String runId = tenantId + TIMER_ENTRY_SEP + timerDescriptor.getId(); logger.info("waitTimer {} delay {} for tenant {}", timerDescriptor.getId(), delay, tenantId); if (delay > 0) { timerRunning.put(runId, vertx.setTimer(delay, res -> handleTimer(tenantId, timerDescriptor))); } else { timerRunning.remove(runId); } return Future.succeededFuture(); } /** * get timer descriptor. * @param tenantId tenant identifier * @param timerId timer identifier * @return timer descriptor */ public Future<TimerDescriptor> getTimer(String tenantId, String timerId) { LockedTypedMap1<TimerDescriptor> timerMap = tenantTimers.get(tenantId); if (timerMap == null) { return Future.failedFuture(new OkapiError(ErrorType.NOT_FOUND, tenantId)); } return timerMap.getNotFound(timerId); } /** * timer list. * @param tenantId tenant identifier * @return timer descriptors for the tenant */ public Future<Collection<TimerDescriptor>> listTimers(String tenantId) { LockedTypedMap1<TimerDescriptor> timerMap = tenantTimers.get(tenantId); if (timerMap == null) { return Future.succeededFuture(Collections.emptyList()); } return timerMap.getAll().map(LinkedHashMap::values); } static boolean isSimilar(TimerDescriptor a, TimerDescriptor b) { if (a == null) { return false; } return Json.encode(a).equals(Json.encode(b)); } static boolean isPatchReset(RoutingEntry patchEntry) { return patchEntry.getDelay() == null && patchEntry.getUnit() == null && patchEntry.getSchedule() == null; } /** * timer PATCH. * @param tenantId tenant identifier * @param timerDescriptor timer descriptor * @return future */ public Future<Void> patchTimer(String tenantId, TimerDescriptor timerDescriptor) { return getTimer(tenantId, timerDescriptor.getId()) .compose(existing -> { final String existingJson = Json.encode(existing); final String timerId = timerDescriptor.getId(); RoutingEntry patchEntry = timerDescriptor.getRoutingEntry(); Future<TimerDescriptor> future; if (isPatchReset(patchEntry)) { // reset to original value of module descriptor future = tenantManager.getEnabledModules(tenantId).compose(mdList -> { timerDescriptor.setModified(false); for (ModuleDescriptor md : mdList) { InterfaceDescriptor timerInt = md.getSystemInterface("_timer"); if (timerInt != null) { List<RoutingEntry> routingEntries = timerInt.getAllRoutingEntries(); int seq = 0; for (RoutingEntry re : routingEntries) { String gotTimerId = md.getProduct() + TIMER_ENTRY_SEP + seq; if (gotTimerId.equals(timerId)) { timerDescriptor.setRoutingEntry(re); return Future.succeededFuture(timerDescriptor); } seq++; } } } return Future.failedFuture(new OkapiError(ErrorType.NOT_FOUND, timerId)); }); } else { RoutingEntry existingEntry = existing.getRoutingEntry(); timerDescriptor.setRoutingEntry(existingEntry); timerDescriptor.setModified(true); existingEntry.setUnit(patchEntry.getUnit()); existingEntry.setDelay(patchEntry.getDelay()); existingEntry.setSchedule(patchEntry.getSchedule()); future = Future.succeededFuture(timerDescriptor); } return future.compose(newDescriptor -> { String newJson = Json.encode(newDescriptor); if (existingJson.equals(newJson)) { return Future.succeededFuture(); } TimerDescriptor newDescriptorStorage = new JsonObject(newJson) .mapTo(TimerDescriptor.class); String newId = tenantId + TIMER_ENTRY_SEP + newDescriptor.getId(); newDescriptorStorage.setId(newId); return timerStore.put(newDescriptorStorage) .compose(y -> tenantTimers.get(tenantId).put(newDescriptor.getId(), newDescriptor) .onSuccess(x -> { JsonObject o = new JsonObject(); o.put("tenantId", tenantId); o.put("timerDescriptor", newJson); EventBus eb = vertx.eventBus(); eb.publish(EVENT_NAME, o.encode()); })); }); }); } /** * Consume patch event and start a new timer .. */ private void consumePatchTimer() { EventBus eb = vertx.eventBus(); eb.consumer(EVENT_NAME, res -> { JsonObject o = new JsonObject((String) res.body()); String tenantId = o.getString("tenantId"); String timerDescriptorVal = o.getString("timerDescriptor"); TimerDescriptor timerDescriptor = Json.decodeValue(timerDescriptorVal, TimerDescriptor.class); final String runId = tenantId + TIMER_ENTRY_SEP + timerDescriptor.getId(); vertx.cancelTimer(timerRunning.get(runId)); waitTimer(tenantId, timerDescriptor); }); } }
/** * Copyright 2005-2011 The Kuali Foundation * * Licensed under the Educational Community License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.opensource.org/licenses/ecl2.php * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kuali.rice.kim.test.service; import org.junit.Before; import org.junit.Test; import org.kuali.rice.core.api.membership.MemberType; import org.kuali.rice.kim.api.role.Role; import org.kuali.rice.kim.api.role.RoleMembership; import org.kuali.rice.kim.api.role.RoleService; import org.kuali.rice.kim.api.services.KimApiServiceLocator; import org.kuali.rice.kim.api.permission.PermissionService; import org.kuali.rice.kim.test.KIMTestCase; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Iterator; import static org.junit.Assert.*; /** * This is a description of what this class does - kellerj don't forget to fill this in. * * @author Kuali Rice Team (rice.collab@kuali.org) * */ public class AuthorizationServiceImplTest extends KIMTestCase { private PermissionService permissionService; private RoleService roleService; private String principal1Id = "p1"; private String principal2Id = "p2"; private String principal3Id = "p3"; private String group1Id = "g1"; private String role1Id = "r1"; private String role1NamespaceCode = "AUTH_SVC_TEST1"; private String role1Description = "Role 1 Description"; private String role1Name = "RoleOne"; private String role2Id = "r2"; private String role2NamespaceCode = "AUTH_SVC_TEST2"; private String role2Description = "Role 2 Description"; private String role2Name = "RoleTwo"; private String permission1Name = "perm1"; private String permission1NamespaceCode = "KR-NS"; private String permission1Id = "p1"; private String permission2Name = "perm2"; private String permission2NamespaceCode = "KR-NS"; private String permission2Id = "p2"; private String permission3Name = "perm3"; private String permission3NamespaceCode = "KR-NS"; private String permission3Id = "p3"; @Before public void setUp() throws Exception { super.setUp(); permissionService = KimApiServiceLocator.getPermissionService(); roleService = KimApiServiceLocator.getRoleService(); // // // // set up Role "r1" with principal p1 // RoleBo role1 = new RoleBo(); // role1.setId(role1Id); // role1.setActive(true); // role1.setKimTypeId(getDefaultKimType().getId()); // role1.setNamespaceCode(role1NamespaceCode); // role1.setDescription(role1Description); // role1.setName(role1Name); // List<RoleMemberBo> members1 = new ArrayList<RoleMemberBo>(); // role1.setMembers(members1); // RoleMemberBo p1Member = new RoleMemberBo(); // p1Member.setMemberId(principal1Id); // p1Member.setMemberTypeCode("P"); // p1Member.setRoleId(role1Id); // p1Member.setRoleMemberId(getNewRoleMemberId()); // members1.add(p1Member); // KRADServiceLocator.getBusinessObjectService().save(role1); // // // set up Role "r2" with principal p3, group g1 and role r1 // RoleBo role2 = new RoleBo(); // role2.setId(role2Id); // role2.setActive(true); // role2.setKimTypeId(getDefaultKimType().getId()); // role2.setNamespaceCode(role2NamespaceCode); // role2.setDescription(role2Description); // role2.setName(role2Name); // List<RoleMemberBo> members2 = new ArrayList<RoleMemberBo>(); // role2.setMembers(members2); // RoleMemberBo p3Member = new RoleMemberBo(); // p3Member.setMemberId(principal3Id); // p3Member.setMemberTypeCode("P"); // p3Member.setRoleId(role2Id); // p3Member.setRoleMemberId(getNewRoleMemberId()); // members2.add(p3Member); // RoleMemberBo g1Member = new RoleMemberBo(); // g1Member.setMemberId(group1Id); // g1Member.setMemberTypeCode("G"); // g1Member.setRoleId(role2Id); // g1Member.setRoleMemberId(getNewRoleMemberId()); // members2.add(g1Member); // RoleMemberBo r1Member = new RoleMemberBo(); // r1Member.setMemberId(role1Id); // r1Member.setMemberTypeCode("R"); // r1Member.setRoleId(role2Id); // r1Member.setRoleMemberId(getNewRoleMemberId()); // members2.add(r1Member); // KRADServiceLocator.getBusinessObjectService().save(role2); // // // setup permissions // // KimPermissionTemplateImpl defaultTemplate = getDefaultPermissionTemplate(); // // KimPermissionImpl permission1 = new KimPermissionImpl(); // permission1.setActive(true); // permission1.setDescription("permission1"); // permission1.setName(permission1Name); // permission1.setNamespaceCode(permission1NamespaceCode); // permission1.setPermissionId(permission1Id); // permission1.setTemplateId(defaultTemplate.getPermissionTemplateId()); // permission1.setTemplate(defaultTemplate); // KRADServiceLocator.getBusinessObjectService().save(permission1); // // KimPermissionImpl permission2 = new KimPermissionImpl(); // permission2.setActive(true); // permission2.setDescription("permission2"); // permission2.setName(permission2Name); // permission2.setNamespaceCode(permission2NamespaceCode); // permission2.setPermissionId(permission2Id); // permission2.setTemplateId(defaultTemplate.getPermissionTemplateId()); // permission2.setTemplate(defaultTemplate); // KRADServiceLocator.getBusinessObjectService().save(permission2); // // KimPermissionImpl permission3 = new KimPermissionImpl(); // permission3.setActive(true); // permission3.setDescription("permission3"); // permission3.setName(permission3Name); // permission3.setNamespaceCode(permission3NamespaceCode); // permission3.setPermissionId(permission3Id); // permission3.setTemplateId(defaultTemplate.getPermissionTemplateId()); // permission3.setTemplate(defaultTemplate); // KRADServiceLocator.getBusinessObjectService().save(permission3); // // // assign permissions to roles // // p1 -> r1 // // p2 -> r1 // // p3 -> r2 // // RolePermissionImpl role1Perm1 = new RolePermissionImpl(); // role1Perm1.setActive(true); // role1Perm1.setRoleId(role1Id); // role1Perm1.setPermissionId(permission1Id); // role1Perm1.setRolePermissionId(getNewRolePermissionId()); // KRADServiceLocator.getBusinessObjectService().save(role1Perm1); // // RolePermissionImpl role1Perm2 = new RolePermissionImpl(); // role1Perm2.setActive(true); // role1Perm2.setRoleId(role1Id); // role1Perm2.setPermissionId(permission2Id); // role1Perm2.setRolePermissionId(getNewRolePermissionId()); // KRADServiceLocator.getBusinessObjectService().save(role1Perm2); // // RolePermissionImpl role2Perm3 = new RolePermissionImpl(); // role2Perm3.setActive(true); // role2Perm3.setRoleId(role2Id); // role2Perm3.setPermissionId(permission3Id); // role2Perm3.setRolePermissionId(getNewRolePermissionId()); // KRADServiceLocator.getBusinessObjectService().save(role2Perm3); } @Test public void testRoleMembership() { Role role = roleService.getRole( role2Id ); assertNotNull( "r2 must exist", role ); ArrayList<String> roleList = new ArrayList<String>( 1 ); roleList.add( role2Id ); Collection<String> memberPrincipalIds = roleService.getRoleMemberPrincipalIds(role2NamespaceCode, role2Name, Collections.<String, String>emptyMap()); assertNotNull(memberPrincipalIds); assertEquals("RoleTwo should have 6 principal ids", 5, memberPrincipalIds.size()); assertTrue( "p3 must belong to role", memberPrincipalIds.contains(principal3Id) ); assertTrue( "p2 must belong to role (assigned via group)", memberPrincipalIds.contains(principal2Id) ); assertTrue( "p1 must belong to r2 (via r1)", memberPrincipalIds.contains(principal1Id) ); Collection<RoleMembership> members = roleService.getRoleMembers( roleList, Collections.<String, String>emptyMap() ); assertNotNull( "returned list may not be null", members ); assertFalse( "list must not be empty", members.isEmpty() ); assertEquals("Returned list must have 4 members.", 4, members.size()); boolean foundP3 = false; boolean foundG1 = false; boolean foundR1 = false; for (RoleMembership member : members) { if (member.getMemberId().equals(principal3Id) && member.getType().equals(MemberType.PRINCIPAL)) { foundP3 = true; } else if (member.getMemberId().equals(group1Id) && member.getType().equals(MemberType.GROUP)) { foundG1 = true; } else if (member.getMemberId().equals(principal1Id) && member.getType().equals(MemberType.PRINCIPAL)) { foundR1 = true; assertEquals("Should have r1 embedded role id.", role1Id, member.getEmbeddedRoleId()); } } assertTrue("Failed to find p3 principal member", foundP3); assertTrue("Failed to find g1 group member", foundG1); assertTrue("Failed to find r1 role member", foundR1); role = roleService.getRole( role1Id ); assertNotNull( "r1 must exist", role ); roleList.clear(); roleList.add( role1Id ); members = roleService.getRoleMembers( roleList, Collections.<String, String>emptyMap() ); assertNotNull( "returned list may not be null", members ); assertEquals("Should have 2 members", 2, members.size()); Iterator<RoleMembership> iter = members.iterator(); assertTrue("One of those members should be p1.", principal1Id.equals(iter.next().getMemberId()) || principal1Id.equals(iter.next().getMemberId())); } // @Test // public void testGetPermissionsForRole() { // List<PermissionDetailInfo> perms = authorizationService.getPermissionsForRole( "r1" ); // System.out.println( "r1: " + perms ); // assertTrue( "r1 must have perm1 (direct)", hasPermission( perms, "perm1" ) ); // assertTrue( "r1 must have perm2 (direct)", hasPermission( perms, "perm2" ) ); // assertTrue( "r1 must have perm3 (via r2)", hasPermission( perms, "perm3" ) ); // perms = authorizationService.getPermissionsForRole( "r2" ); // System.out.println( "r2: " + perms ); // assertTrue( "r2 must have perm3 (direct)", hasPermission( perms, "perm3" ) ); // assertFalse( "r2 must not have perm1", hasPermission( perms, "perm1" ) ); // assertFalse( "r2 must not have perm2", hasPermission( perms, "perm2" ) ); // } @Test public void testHasPermission() { assertTrue( "p1 must have perm1 (via r1)", permissionService.hasPermission( "p1", "KR-NS", "perm1" )); assertTrue( "p1 must have perm2 (via r1)", permissionService.hasPermission( "p1", "KR-NS", "perm2" ) ); assertTrue( "p1 must have perm3 (via r2)", permissionService.hasPermission( "p1", "KR-NS", "perm3" ) ); assertTrue( "p3 must have perm3 (via r2)", permissionService.hasPermission( "p3", "KR-NS", "perm3" ) ); assertFalse( "p3 must not have perm1", permissionService.hasPermission( "p3", "KR-NS", "perm1") ); assertFalse( "p3 must not have perm2", permissionService.hasPermission( "p3", "KR-NS", "perm2") ); } // protected boolean hasPermission( List<PermissionDetailsInfo> perms, String permissionId ) { // for ( PermissionDetailsInfo perm : perms ) { // if ( perm.getPermissionId().equals( permissionId ) ) { // return true; // } // } // return false; // } // test that only active roles/permissions are used // test that only roles attached to active groups are returned // check that implied/implying lists are correct // check qualification matching // need hierarchical test for qualification matching // check namespace filters // non-qualified role/permission checks // qualified role/permission checks // add type services in test spring startup? - how in rice? }
/* * Copyright 2016 MovingBlocks * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.terasology.logic.particles; import org.lwjgl.opengl.GL11; import org.lwjgl.opengl.GL13; import org.terasology.utilities.Assets; import org.terasology.config.Config; import org.terasology.entitySystem.entity.EntityManager; import org.terasology.entitySystem.entity.EntityRef; import org.terasology.entitySystem.entity.lifecycleEvents.BeforeDeactivateComponent; import org.terasology.entitySystem.entity.lifecycleEvents.OnActivatedComponent; import org.terasology.entitySystem.event.ReceiveEvent; import org.terasology.entitySystem.systems.BaseComponentSystem; import org.terasology.entitySystem.systems.RegisterMode; import org.terasology.entitySystem.systems.RegisterSystem; import org.terasology.entitySystem.systems.RenderSystem; import org.terasology.entitySystem.systems.UpdateSubscriberSystem; import org.terasology.logic.location.LocationComponent; import org.terasology.logic.particles.BlockParticleEffectComponent.Particle; import org.terasology.math.geom.Vector2f; import org.terasology.math.geom.Vector3f; import org.terasology.math.geom.Vector3i; import org.terasology.math.geom.Vector4f; import org.terasology.registry.In; import org.terasology.rendering.assets.material.Material; import org.terasology.rendering.assets.texture.Texture; import org.terasology.rendering.logic.NearestSortingList; import org.terasology.rendering.opengl.OpenGLUtils; import org.terasology.rendering.world.WorldRenderer; import org.terasology.utilities.random.FastRandom; import org.terasology.utilities.random.Random; import org.terasology.world.WorldProvider; import org.terasology.world.biomes.Biome; import org.terasology.world.block.Block; import org.terasology.world.block.BlockManager; import org.terasology.world.block.BlockPart; import org.terasology.world.block.tiles.WorldAtlas; import java.math.RoundingMode; import java.util.Arrays; import java.util.Iterator; import static org.lwjgl.opengl.GL11.GL_ONE; import static org.lwjgl.opengl.GL11.GL_ONE_MINUS_SRC_ALPHA; import static org.lwjgl.opengl.GL11.GL_QUADS; import static org.lwjgl.opengl.GL11.GL_SRC_ALPHA; import static org.lwjgl.opengl.GL11.glBegin; import static org.lwjgl.opengl.GL11.glBindTexture; import static org.lwjgl.opengl.GL11.glBlendFunc; import static org.lwjgl.opengl.GL11.glCallList; import static org.lwjgl.opengl.GL11.glDeleteLists; import static org.lwjgl.opengl.GL11.glDisable; import static org.lwjgl.opengl.GL11.glEnable; import static org.lwjgl.opengl.GL11.glEnd; import static org.lwjgl.opengl.GL11.glEndList; import static org.lwjgl.opengl.GL11.glGenLists; import static org.lwjgl.opengl.GL11.glNewList; import static org.lwjgl.opengl.GL11.glPopMatrix; import static org.lwjgl.opengl.GL11.glPushMatrix; import static org.lwjgl.opengl.GL11.glScalef; import static org.lwjgl.opengl.GL11.glTranslated; import static org.lwjgl.opengl.GL11.glTranslatef; /** */ // TODO: Generalise for non-block particles // TODO: Dispose display lists @RegisterSystem(RegisterMode.CLIENT) public class BlockParticleEmitterSystem extends BaseComponentSystem implements UpdateSubscriberSystem, RenderSystem { private static final int PARTICLES_PER_UPDATE = 32; @In private EntityManager entityManager; @In private WorldProvider worldProvider; @In private WorldAtlas worldAtlas; // TODO: lose dependency on worldRenderer? @In private WorldRenderer worldRenderer; @In private Config config; @In private BlockManager blockManager; private Random random = new FastRandom(); private NearestSortingList sorter = new NearestSortingList(); private int displayList; @Override public void initialise() { if (displayList == 0) { displayList = glGenLists(1); glNewList(displayList, GL11.GL_COMPILE); drawParticle(); glEndList(); } sorter.initialise(worldRenderer.getActiveCamera()); } @Override public void shutdown() { glDeleteLists(displayList, 1); sorter.stop(); } @Override public void update(float delta) { for (EntityRef entity : entityManager.getEntitiesWith(BlockParticleEffectComponent.class, LocationComponent.class)) { BlockParticleEffectComponent particleEffect = entity.getComponent(BlockParticleEffectComponent.class); Iterator<Particle> iterator = particleEffect.particles.iterator(); while (iterator.hasNext()) { BlockParticleEffectComponent.Particle p = iterator.next(); p.lifeRemaining -= delta; if (p.lifeRemaining <= 0) { iterator.remove(); } else { updateVelocity(entity, particleEffect, p, delta); updatePosition(p, delta); } } for (int i = 0; particleEffect.spawnCount > 0 && i < PARTICLES_PER_UPDATE; ++i) { spawnParticle(particleEffect); } if (particleEffect.particles.size() == 0 && particleEffect.destroyEntityOnCompletion) { entity.destroy(); } } } @ReceiveEvent(components = {BlockParticleEffectComponent.class, LocationComponent.class}) public void onActivated(OnActivatedComponent event, EntityRef entity) { sorter.add(entity); } @ReceiveEvent(components = {BlockParticleEffectComponent.class, LocationComponent.class}) public void onDeactivated(BeforeDeactivateComponent event, EntityRef entity) { sorter.remove(entity); } private void spawnParticle(BlockParticleEffectComponent particleEffect) { Particle p = new Particle(); p.lifeRemaining = random.nextFloat() * (particleEffect.maxLifespan - particleEffect.minLifespan) + particleEffect.minLifespan; p.velocity = random.nextVector3f(); p.size = random.nextFloat() * (particleEffect.maxSize - particleEffect.minSize) + particleEffect.minSize; p.position.set( random.nextFloat(-particleEffect.spawnRange.x, particleEffect.spawnRange.x), random.nextFloat(-particleEffect.spawnRange.y, particleEffect.spawnRange.y), random.nextFloat(-particleEffect.spawnRange.z, particleEffect.spawnRange.z)); p.color = particleEffect.color; if (particleEffect.blockType != null) { final float tileSize = worldAtlas.getRelativeTileSize(); p.texSize.set(tileSize, tileSize); Block b = blockManager.getBlock(particleEffect.blockType).getBlockFamily().getArchetypeBlock(); p.texOffset.set(b.getPrimaryAppearance().getTextureAtlasPos(BlockPart.FRONT)); if (particleEffect.randBlockTexDisplacement) { final float relTileSize = worldAtlas.getRelativeTileSize(); Vector2f particleTexSize = new Vector2f( relTileSize * particleEffect.randBlockTexDisplacementScale.y, relTileSize * particleEffect.randBlockTexDisplacementScale.y); p.texSize.x *= particleEffect.randBlockTexDisplacementScale.x; p.texSize.y *= particleEffect.randBlockTexDisplacementScale.y; p.texOffset.set( p.texOffset.x + random.nextFloat() * (tileSize - particleTexSize.x), p.texOffset.y + random.nextFloat() * (tileSize - particleTexSize.y)); } } //p.texSize.set(TEX_SIZE,TEX_SIZE); particleEffect.particles.add(p); particleEffect.spawnCount--; } protected void updateVelocity(EntityRef entity, BlockParticleEffectComponent particleEffect, Particle particle, float delta) { Vector3f diff = new Vector3f(particleEffect.targetVelocity); diff.sub(particle.velocity); diff.x *= particleEffect.acceleration.x * delta; diff.y *= particleEffect.acceleration.y * delta; diff.z *= particleEffect.acceleration.z * delta; particle.velocity.add(diff); if (particleEffect.collideWithBlocks) { LocationComponent location = entity.getComponent(LocationComponent.class); Vector3f pos = location.getWorldPosition(); pos.add(particle.position); if (worldProvider.getBlock(new Vector3f(pos.x, pos.y + 2 * Math.signum(particle.velocity.y) * particle.size, pos.z)).getId() != 0x0) { particle.velocity.y = 0; } } } protected void updatePosition(Particle particle, float delta) { particle.position.x += particle.velocity.x * delta; particle.position.y += particle.velocity.y * delta; particle.position.z += particle.velocity.z * delta; } @Override public void renderAlphaBlend() { if (config.getRendering().isRenderNearest()) { render(Arrays.asList(sorter.getNearest(config.getRendering().getParticleEffectLimit()))); } else { render(entityManager.getEntitiesWith(BlockParticleEffectComponent.class, LocationComponent.class)); } } private void render(Iterable<EntityRef> particleEntities) { Assets.getMaterial("engine:prog.particle").get().enable(); glDisable(GL11.GL_CULL_FACE); Vector3f cameraPosition = worldRenderer.getActiveCamera().getPosition(); for (EntityRef entity : particleEntities) { LocationComponent location = entity.getComponent(LocationComponent.class); if (null == location) { continue; } Vector3f worldPos = location.getWorldPosition(); if (!worldProvider.isBlockRelevant(worldPos)) { continue; } BlockParticleEffectComponent particleEffect = entity.getComponent(BlockParticleEffectComponent.class); if (particleEffect.texture == null) { Texture terrainTex = Assets.getTexture("engine:terrain").get(); if (terrainTex == null || !terrainTex.isLoaded()) { return; } GL13.glActiveTexture(GL13.GL_TEXTURE0); glBindTexture(GL11.GL_TEXTURE_2D, terrainTex.getId()); } else if (particleEffect.texture.isLoaded()) { GL13.glActiveTexture(GL13.GL_TEXTURE0); glBindTexture(GL11.GL_TEXTURE_2D, particleEffect.texture.getId()); } else { return; } if (particleEffect.blendMode == BlockParticleEffectComponent.ParticleBlendMode.ADD) { glBlendFunc(GL_ONE, GL_ONE); } if (particleEffect.blockType != null) { renderBlockParticles(worldPos, cameraPosition, particleEffect); } else { renderParticles(worldPos, cameraPosition, particleEffect); } if (particleEffect.blendMode == BlockParticleEffectComponent.ParticleBlendMode.ADD) { glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA); } } glEnable(GL11.GL_CULL_FACE); } private void renderBlockParticles(Vector3f worldPos, Vector3f cameraPosition, BlockParticleEffectComponent particleEffect) { Vector3i worldPos3i = new Vector3i(worldPos, RoundingMode.HALF_UP); Biome biome = worldProvider.getBiome(worldPos3i); glPushMatrix(); glTranslated(worldPos.x - cameraPosition.x, worldPos.y - cameraPosition.y, worldPos.z - cameraPosition.z); for (Particle particle : particleEffect.particles) { glPushMatrix(); glTranslatef(particle.position.x, particle.position.y, particle.position.z); OpenGLUtils.applyBillboardOrientation(); glScalef(particle.size, particle.size, particle.size); float light = worldRenderer.getRenderingLightIntensityAt(new Vector3f(worldPos.x + particle.position.x, worldPos.y + particle.position.y, worldPos.z + particle.position.z)); renderParticle(particle, blockManager.getBlock(particleEffect.blockType).getBlockFamily().getArchetypeBlock(), biome, light); glPopMatrix(); } glPopMatrix(); } private void renderParticles(Vector3f worldPos, Vector3f cameraPosition, BlockParticleEffectComponent particleEffect) { glPushMatrix(); glTranslated(worldPos.x - cameraPosition.x, worldPos.y - cameraPosition.y, worldPos.z - cameraPosition.z); for (Particle particle : particleEffect.particles) { glPushMatrix(); glTranslatef(particle.position.x, particle.position.y, particle.position.z); OpenGLUtils.applyBillboardOrientation(); glScalef(particle.size, particle.size, particle.size); float light = worldRenderer.getRenderingLightIntensityAt(new Vector3f(worldPos.x + particle.position.x, worldPos.y + particle.position.y, worldPos.z + particle.position.z)); renderParticle(particle, light); glPopMatrix(); } glPopMatrix(); } protected void renderParticle(Particle particle, float light) { Material mat = Assets.getMaterial("engine:prog.particle").get(); mat.setFloat4("colorOffset", particle.color.x, particle.color.y, particle.color.z, particle.color.w, true); mat.setFloat2("texOffset", particle.texOffset.x, particle.texOffset.y, true); mat.setFloat2("texScale", particle.texSize.x, particle.texSize.y, true); mat.setFloat("light", light, true); glCallList(displayList); } protected void renderParticle(Particle particle, Block block, Biome biome, float light) { Material mat = Assets.getMaterial("engine:prog.particle").get(); Vector4f colorMod = block.calcColorOffsetFor(BlockPart.FRONT, biome); mat.setFloat4("colorOffset", particle.color.x * colorMod.x, particle.color.y * colorMod.y, particle.color.z * colorMod.z, particle.color.w * colorMod.w, true); mat.setFloat2("texOffset", particle.texOffset.x, particle.texOffset.y, true); mat.setFloat2("texScale", particle.texSize.x, particle.texSize.y, true); mat.setFloat("light", light, true); glCallList(displayList); } private void drawParticle() { glBegin(GL_QUADS); GL11.glTexCoord2f(0.0f, 0.0f); GL11.glVertex3f(-0.5f, 0.5f, 0.0f); GL11.glTexCoord2f(1.0f, 0.0f); GL11.glVertex3f(0.5f, 0.5f, 0.0f); GL11.glTexCoord2f(1.0f, 1.0f); GL11.glVertex3f(0.5f, -0.5f, 0.0f); GL11.glTexCoord2f(0.0f, 1.0f); GL11.glVertex3f(-0.5f, -0.5f, 0.0f); glEnd(); } @Override public void renderOpaque() { } @Override public void renderOverlay() { } @Override public void renderFirstPerson() { } @Override public void renderShadows() { } }
package mstparser; import gnu.trove.TIntIntHashMap; import java.util.ArrayList; import java.util.Arrays; public class DependencyDecoder { DependencyPipe pipe; public DependencyDecoder(DependencyPipe pipe) { this.pipe = pipe; } protected int[][] getTypes(double[][][][] nt_probs, int len) { int[][] static_types = new int[len][len]; for (int i = 0; i < len; i++) { for (int j = 0; j < len; j++) { if (i == j) { static_types[i][j] = 0; continue; } int wh = -1; double best = Double.NEGATIVE_INFINITY; for (int t = 0; t < pipe.types.length; t++) { double score = 0.0; if (i < j) score = nt_probs[i][t][0][1] + nt_probs[j][t][0][0]; else score = nt_probs[i][t][1][1] + nt_probs[j][t][1][0]; if (score > best) { wh = t; best = score; } } static_types[i][j] = wh; } } return static_types; } // static type for each edge: run time O(n^3 + Tn^2) T is number of types public Object[][] decodeProjective(DependencyInstance inst, FeatureVector[][][] fvs, double[][][] probs, FeatureVector[][][][] nt_fvs, double[][][][] nt_probs, int K) { String[] forms = inst.forms; String[] pos = inst.postags; int[][] static_types = null; if (pipe.labeled) { static_types = getTypes(nt_probs, forms.length); } KBestParseForest pf = new KBestParseForest(0, forms.length - 1, inst, K); for (int s = 0; s < forms.length; s++) { pf.add(s, -1, 0, 0.0, new FeatureVector()); pf.add(s, -1, 1, 0.0, new FeatureVector()); } for (int j = 1; j < forms.length; j++) { for (int s = 0; s < forms.length && s + j < forms.length; s++) { int t = s + j; FeatureVector prodFV_st = fvs[s][t][0]; FeatureVector prodFV_ts = fvs[s][t][1]; double prodProb_st = probs[s][t][0]; double prodProb_ts = probs[s][t][1]; int type1 = pipe.labeled ? static_types[s][t] : 0; int type2 = pipe.labeled ? static_types[t][s] : 0; FeatureVector nt_fv_s_01 = nt_fvs[s][type1][0][1]; FeatureVector nt_fv_s_10 = nt_fvs[s][type2][1][0]; FeatureVector nt_fv_t_00 = nt_fvs[t][type1][0][0]; FeatureVector nt_fv_t_11 = nt_fvs[t][type2][1][1]; double nt_prob_s_01 = nt_probs[s][type1][0][1]; double nt_prob_s_10 = nt_probs[s][type2][1][0]; double nt_prob_t_00 = nt_probs[t][type1][0][0]; double nt_prob_t_11 = nt_probs[t][type2][1][1]; double prodProb = 0.0; for (int r = s; r <= t; r++) { /** first is direction, second is complete */ /** _s means s is the parent */ if (r != t) { ParseForestItem[] b1 = pf.getItems(s, r, 0, 0); ParseForestItem[] c1 = pf.getItems(r + 1, t, 1, 0); if (b1 != null && c1 != null) { int[][] pairs = pf.getKBestPairs(b1, c1); for (int k = 0; k < pairs.length; k++) { if (pairs[k][0] == -1 || pairs[k][1] == -1) break; int comp1 = pairs[k][0]; int comp2 = pairs[k][1]; double bc = b1[comp1].prob + c1[comp2].prob; double prob_fin = bc + prodProb_st; FeatureVector fv_fin = prodFV_st; if (pipe.labeled) { fv_fin = nt_fv_s_01.cat(nt_fv_t_00.cat(fv_fin)); prob_fin += nt_prob_s_01 + nt_prob_t_00; } pf.add(s, r, t, type1, 0, 1, prob_fin, fv_fin, b1[comp1], c1[comp2]); prob_fin = bc + prodProb_ts; fv_fin = prodFV_ts; if (pipe.labeled) { fv_fin = nt_fv_t_11.cat(nt_fv_s_10.cat(fv_fin)); prob_fin += nt_prob_t_11 + nt_prob_s_10; } pf.add(s, r, t, type2, 1, 1, prob_fin, fv_fin, b1[comp1], c1[comp2]); } } } } for (int r = s; r <= t; r++) { if (r != s) { ParseForestItem[] b1 = pf.getItems(s, r, 0, 1); ParseForestItem[] c1 = pf.getItems(r, t, 0, 0); if (b1 != null && c1 != null) { int[][] pairs = pf.getKBestPairs(b1, c1); for (int k = 0; k < pairs.length; k++) { if (pairs[k][0] == -1 || pairs[k][1] == -1) break; int comp1 = pairs[k][0]; int comp2 = pairs[k][1]; double bc = b1[comp1].prob + c1[comp2].prob; if (!pf.add(s, r, t, -1, 0, 0, bc, new FeatureVector(), b1[comp1], c1[comp2])) break; } } } if (r != t) { ParseForestItem[] b1 = pf.getItems(s, r, 1, 0); ParseForestItem[] c1 = pf.getItems(r, t, 1, 1); if (b1 != null && c1 != null) { int[][] pairs = pf.getKBestPairs(b1, c1); for (int k = 0; k < pairs.length; k++) { if (pairs[k][0] == -1 || pairs[k][1] == -1) break; int comp1 = pairs[k][0]; int comp2 = pairs[k][1]; double bc = b1[comp1].prob + c1[comp2].prob; if (!pf.add(s, r, t, -1, 1, 0, bc, new FeatureVector(), b1[comp1], c1[comp2])) break; } } } } } } return pf.getBestParses(); } public Object[][] decodeNonProjective(DependencyInstance inst, FeatureVector[][][] fvs, double[][][] probs, FeatureVector[][][][] nt_fvs, double[][][][] nt_probs, int K) { String[] pos = inst.postags; int numWords = inst.length(); int[][] oldI = new int[numWords][numWords]; int[][] oldO = new int[numWords][numWords]; double[][] scoreMatrix = new double[numWords][numWords]; double[][] orig_scoreMatrix = new double[numWords][numWords]; boolean[] curr_nodes = new boolean[numWords]; TIntIntHashMap[] reps = new TIntIntHashMap[numWords]; int[][] static_types = null; if (pipe.labeled) { static_types = getTypes(nt_probs, pos.length); } for (int i = 0; i < numWords; i++) { curr_nodes[i] = true; reps[i] = new TIntIntHashMap(); reps[i].put(i, 0); for (int j = 0; j < numWords; j++) { // score of edge (i,j) i --> j scoreMatrix[i][j] = probs[i < j ? i : j][i < j ? j : i][i < j ? 0 : 1] + (pipe.labeled ? nt_probs[i][static_types[i][j]][i < j ? 0 : 1][1] + nt_probs[j][static_types[i][j]][i < j ? 0 : 1][0] : 0.0); orig_scoreMatrix[i][j] = probs[i < j ? i : j][i < j ? j : i][i < j ? 0 : 1] + (pipe.labeled ? nt_probs[i][static_types[i][j]][i < j ? 0 : 1][1] + nt_probs[j][static_types[i][j]][i < j ? 0 : 1][0] : 0.0); oldI[i][j] = i; oldO[i][j] = j; if (i == j || j == 0) continue; // no self loops of i --> 0 } } TIntIntHashMap final_edges = chuLiuEdmonds(scoreMatrix, curr_nodes, oldI, oldO, false, new TIntIntHashMap(), reps); int[] par = new int[numWords]; int[] ns = final_edges.keys(); for (int i = 0; i < ns.length; i++) { int ch = ns[i]; int pr = final_edges.get(ns[i]); par[ch] = pr; } int[] n_par = getKChanges(par, orig_scoreMatrix, Math.min(K, par.length)); int new_k = 1; for (int i = 0; i < n_par.length; i++) if (n_par[i] > -1) new_k++; // Create Feature Vectors; int[][] fin_par = new int[new_k][numWords]; FeatureVector[][] fin_fv = new FeatureVector[new_k][numWords]; fin_par[0] = par; int c = 1; for (int i = 0; i < n_par.length; i++) { if (n_par[i] > -1) { int[] t_par = new int[par.length]; for (int j = 0; j < t_par.length; j++) t_par[j] = par[j]; t_par[i] = n_par[i]; fin_par[c] = t_par; c++; } } for (int k = 0; k < fin_par.length; k++) { for (int i = 0; i < fin_par[k].length; i++) { int ch = i; int pr = fin_par[k][i]; if (pr != -1) { fin_fv[k][ch] = fvs[ch < pr ? ch : pr][ch < pr ? pr : ch][ch < pr ? 1 : 0]; if (pipe.labeled) { fin_fv[k][ch] = fin_fv[k][ch].cat(nt_fvs[ch][static_types[pr][ch]][ch < pr ? 1 : 0][0]); fin_fv[k][ch] = fin_fv[k][ch].cat(nt_fvs[pr][static_types[pr][ch]][ch < pr ? 1 : 0][1]); } } else fin_fv[k][ch] = new FeatureVector(); } } FeatureVector[] fin = new FeatureVector[new_k]; String[] result = new String[new_k]; for (int k = 0; k < fin.length; k++) { fin[k] = new FeatureVector(); for (int i = 1; i < fin_fv[k].length; i++) fin[k] = fin_fv[k][i].cat(fin[k]); result[k] = ""; for (int i = 1; i < par.length; i++) result[k] += fin_par[k][i] + "|" + i + (pipe.labeled ? ":" + static_types[fin_par[k][i]][i] : ":0") + " "; } // create d. Object[][] d = new Object[new_k][2]; for (int k = 0; k < new_k; k++) { d[k][0] = fin[k]; d[k][1] = result[k].trim(); } return d; } private int[] getKChanges(int[] par, double[][] scoreMatrix, int K) { int[] result = new int[par.length]; int[] n_par = new int[par.length]; double[] n_score = new double[par.length]; for (int i = 0; i < par.length; i++) { result[i] = -1; n_par[i] = -1; n_score[i] = Double.NEGATIVE_INFINITY; } boolean[][] isChild = calcChilds(par); for (int i = 1; i < n_par.length; i++) { double max = Double.NEGATIVE_INFINITY; int wh = -1; for (int j = 0; j < n_par.length; j++) { if (i == j || par[i] == j || isChild[i][j]) continue; if (scoreMatrix[j][i] > max) { max = scoreMatrix[j][i]; wh = j; } } n_par[i] = wh; n_score[i] = max; } for (int k = 0; k < K; k++) { double max = Double.NEGATIVE_INFINITY; int wh = -1; int whI = -1; for (int i = 0; i < n_par.length; i++) { if (n_par[i] == -1) continue; double score = scoreMatrix[n_par[i]][i]; if (score > max) { max = score; whI = i; wh = n_par[i]; } } if (max == Double.NEGATIVE_INFINITY) break; result[whI] = wh; n_par[whI] = -1; } return result; } private boolean[][] calcChilds(int[] par) { boolean[][] isChild = new boolean[par.length][par.length]; for (int i = 1; i < par.length; i++) { int l = par[i]; while (l != -1) { isChild[l][i] = true; l = par[l]; } } return isChild; } private static TIntIntHashMap chuLiuEdmonds(double[][] scoreMatrix, boolean[] curr_nodes, int[][] oldI, int[][] oldO, boolean print, TIntIntHashMap final_edges, TIntIntHashMap[] reps) { // need to construct for each node list of nodes they represent (here only!) int[] par = new int[curr_nodes.length]; int numWords = curr_nodes.length; // create best graph par[0] = -1; for (int i = 1; i < par.length; i++) { // only interested in current nodes if (!curr_nodes[i]) continue; double maxScore = scoreMatrix[0][i]; par[i] = 0; for (int j = 0; j < par.length; j++) { if (j == i) continue; if (!curr_nodes[j]) continue; double newScore = scoreMatrix[j][i]; if (newScore > maxScore) { maxScore = newScore; par[i] = j; } } } if (print) { System.out.println("After init"); for (int i = 0; i < par.length; i++) { if (curr_nodes[i]) System.out.print(par[i] + "|" + i + " "); } System.out.println(); } // Find a cycle ArrayList cycles = new ArrayList(); boolean[] added = new boolean[numWords]; for (int i = 0; i < numWords && cycles.size() == 0; i++) { // if I have already considered this or // This is not a valid node (i.e. has been contracted) if (added[i] || !curr_nodes[i]) continue; added[i] = true; TIntIntHashMap cycle = new TIntIntHashMap(); cycle.put(i, 0); int l = i; while (true) { if (par[l] == -1) { added[l] = true; break; } if (cycle.contains(par[l])) { cycle = new TIntIntHashMap(); int lorg = par[l]; cycle.put(lorg, par[lorg]); added[lorg] = true; int l1 = par[lorg]; while (l1 != lorg) { cycle.put(l1, par[l1]); added[l1] = true; l1 = par[l1]; } cycles.add(cycle); break; } cycle.put(l, 0); l = par[l]; if (added[l] && !cycle.contains(l)) break; added[l] = true; } } // get all edges and return them if (cycles.size() == 0) { // System.out.println("TREE:"); for (int i = 0; i < par.length; i++) { if (!curr_nodes[i]) continue; if (par[i] != -1) { int pr = oldI[par[i]][i]; int ch = oldO[par[i]][i]; final_edges.put(ch, pr); // System.out.print(pr+"|"+ch + " "); } else final_edges.put(0, -1); } // System.out.println(); return final_edges; } int max_cyc = 0; int wh_cyc = 0; for (int i = 0; i < cycles.size(); i++) { TIntIntHashMap cycle = (TIntIntHashMap) cycles.get(i); if (cycle.size() > max_cyc) { max_cyc = cycle.size(); wh_cyc = i; } } TIntIntHashMap cycle = (TIntIntHashMap) cycles.get(wh_cyc); int[] cyc_nodes = cycle.keys(); int rep = cyc_nodes[0]; if (print) { System.out.println("Found Cycle"); for (int i = 0; i < cyc_nodes.length; i++) System.out.print(cyc_nodes[i] + " "); System.out.println(); } double cyc_weight = 0.0; for (int j = 0; j < cyc_nodes.length; j++) { cyc_weight += scoreMatrix[par[cyc_nodes[j]]][cyc_nodes[j]]; } for (int i = 0; i < numWords; i++) { if (!curr_nodes[i] || cycle.contains(i)) continue; double max1 = Double.NEGATIVE_INFINITY; int wh1 = -1; double max2 = Double.NEGATIVE_INFINITY; int wh2 = -1; for (int j = 0; j < cyc_nodes.length; j++) { int j1 = cyc_nodes[j]; if (scoreMatrix[j1][i] > max1) { max1 = scoreMatrix[j1][i]; wh1 = j1;// oldI[j1][i]; } // cycle weight + new edge - removal of old double scr = cyc_weight + scoreMatrix[i][j1] - scoreMatrix[par[j1]][j1]; if (scr > max2) { max2 = scr; wh2 = j1;// oldO[i][j1]; } } scoreMatrix[rep][i] = max1; oldI[rep][i] = oldI[wh1][i];// wh1; oldO[rep][i] = oldO[wh1][i];// oldO[wh1][i]; scoreMatrix[i][rep] = max2; oldO[i][rep] = oldO[i][wh2];// wh2; oldI[i][rep] = oldI[i][wh2];// oldI[i][wh2]; } TIntIntHashMap[] rep_cons = new TIntIntHashMap[cyc_nodes.length]; for (int i = 0; i < cyc_nodes.length; i++) { rep_cons[i] = new TIntIntHashMap(); int[] keys = reps[cyc_nodes[i]].keys(); Arrays.sort(keys); if (print) System.out.print(cyc_nodes[i] + ": "); for (int j = 0; j < keys.length; j++) { rep_cons[i].put(keys[j], 0); if (print) System.out.print(keys[j] + " "); } if (print) System.out.println(); } // don't consider not representative nodes // these nodes have been folded for (int i = 1; i < cyc_nodes.length; i++) { curr_nodes[cyc_nodes[i]] = false; int[] keys = reps[cyc_nodes[i]].keys(); for (int j = 0; j < keys.length; j++) reps[rep].put(keys[j], 0); } chuLiuEdmonds(scoreMatrix, curr_nodes, oldI, oldO, print, final_edges, reps); // check each node in cycle, if one of its representatives // is a key in the final_edges, it is the one. int wh = -1; boolean found = false; for (int i = 0; i < rep_cons.length && !found; i++) { int[] keys = rep_cons[i].keys(); for (int j = 0; j < keys.length && !found; j++) { if (final_edges.contains(keys[j])) { wh = cyc_nodes[i]; found = true; } } } int l = par[wh]; while (l != wh) { int ch = oldO[par[l]][l]; int pr = oldI[par[l]][l]; final_edges.put(ch, pr); l = par[l]; } if (print) { int[] keys = final_edges.keys(); Arrays.sort(keys); for (int i = 0; i < keys.length; i++) System.out.print(final_edges.get(keys[i]) + "|" + keys[i] + " "); System.out.println(); } return final_edges; } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.http4; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.Serializable; import java.io.UnsupportedEncodingException; import java.net.URI; import java.net.URISyntaxException; import java.util.HashMap; import java.util.Map; import org.apache.camel.CamelExchangeException; import org.apache.camel.Exchange; import org.apache.camel.Message; import org.apache.camel.component.file.GenericFile; import org.apache.camel.component.http4.helper.HttpHelper; import org.apache.camel.converter.IOConverter; import org.apache.camel.converter.stream.CachedOutputStream; import org.apache.camel.impl.DefaultProducer; import org.apache.camel.spi.HeaderFilterStrategy; import org.apache.camel.util.ExchangeHelper; import org.apache.camel.util.GZIPHelper; import org.apache.camel.util.IOHelper; import org.apache.http.Header; import org.apache.http.HttpEntity; import org.apache.http.HttpResponse; import org.apache.http.client.HttpClient; import org.apache.http.client.methods.HttpEntityEnclosingRequestBase; import org.apache.http.client.methods.HttpRequestBase; import org.apache.http.client.methods.HttpUriRequest; import org.apache.http.entity.ByteArrayEntity; import org.apache.http.entity.FileEntity; import org.apache.http.entity.InputStreamEntity; import org.apache.http.entity.StringEntity; import org.apache.http.params.CoreProtocolPNames; import org.apache.http.util.EntityUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * @version */ public class HttpProducer extends DefaultProducer { private static final transient Logger LOG = LoggerFactory.getLogger(HttpProducer.class); private HttpClient httpClient; private boolean throwException; private boolean transferException; public HttpProducer(HttpEndpoint endpoint) { super(endpoint); this.httpClient = endpoint.getHttpClient(); this.throwException = endpoint.isThrowExceptionOnFailure(); this.transferException = endpoint.isTransferException(); } public void process(Exchange exchange) throws Exception { if (getEndpoint().isBridgeEndpoint()) { exchange.setProperty(Exchange.SKIP_GZIP_ENCODING, Boolean.TRUE); } HttpRequestBase httpRequest = createMethod(exchange); Message in = exchange.getIn(); String httpProtocolVersion = in.getHeader(Exchange.HTTP_PROTOCOL_VERSION, String.class); if (httpProtocolVersion != null) { // set the HTTP protocol version httpRequest.getParams().setParameter(CoreProtocolPNames.PROTOCOL_VERSION, HttpHelper.parserHttpVersion(httpProtocolVersion)); } HeaderFilterStrategy strategy = getEndpoint().getHeaderFilterStrategy(); // propagate headers as HTTP headers for (Map.Entry<String, Object> entry : in.getHeaders().entrySet()) { String headerValue = in.getHeader(entry.getKey(), String.class); if (strategy != null && !strategy.applyFilterToCamelHeaders(entry.getKey(), headerValue, exchange)) { httpRequest.addHeader(entry.getKey(), headerValue); } } // lets store the result in the output message. HttpResponse httpResponse = null; try { if (LOG.isDebugEnabled()) { LOG.debug("Executing http {} method: {}", httpRequest.getMethod(), httpRequest.getURI().toString()); } httpResponse = executeMethod(httpRequest); int responseCode = httpResponse.getStatusLine().getStatusCode(); LOG.debug("Http responseCode: {}", responseCode); if (throwException && (responseCode < 100 || responseCode >= 300)) { throw populateHttpOperationFailedException(exchange, httpRequest, httpResponse, responseCode); } else { populateResponse(exchange, httpRequest, httpResponse, in, strategy, responseCode); } } finally { if (httpResponse != null) { try { EntityUtils.consume(httpResponse.getEntity()); } catch (IOException e) { // nothing we could do } } } } @Override public HttpEndpoint getEndpoint() { return (HttpEndpoint) super.getEndpoint(); } protected void populateResponse(Exchange exchange, HttpRequestBase httpRequest, HttpResponse httpResponse, Message in, HeaderFilterStrategy strategy, int responseCode) throws IOException, ClassNotFoundException { // We just make the out message is not create when extractResponseBody throws exception Object response = extractResponseBody(httpRequest, httpResponse, exchange); Message answer = exchange.getOut(); answer.setHeaders(in.getHeaders()); answer.setHeader(Exchange.HTTP_RESPONSE_CODE, responseCode); answer.setBody(response); // propagate HTTP response headers Header[] headers = httpResponse.getAllHeaders(); for (Header header : headers) { String name = header.getName(); String value = header.getValue(); if (name.toLowerCase().equals("content-type")) { name = Exchange.CONTENT_TYPE; } if (strategy != null && !strategy.applyFilterToExternalHeaders(name, value, exchange)) { answer.setHeader(name, value); } } } protected Exception populateHttpOperationFailedException(Exchange exchange, HttpRequestBase httpRequest, HttpResponse httpResponse, int responseCode) throws IOException, ClassNotFoundException { Exception answer; String uri = httpRequest.getURI().toString(); String statusText = httpResponse.getStatusLine() != null ? httpResponse.getStatusLine().getReasonPhrase() : null; Map<String, String> headers = extractResponseHeaders(httpResponse.getAllHeaders()); Object responseBody = extractResponseBody(httpRequest, httpResponse, exchange); if (transferException && responseBody != null && responseBody instanceof Exception) { // if the response was a serialized exception then use that return (Exception) responseBody; } // make a defensive copy of the response body in the exception so its detached from the cache String copy = null; if (responseBody != null) { copy = exchange.getContext().getTypeConverter().convertTo(String.class, exchange, responseBody); } Header locationHeader = httpResponse.getFirstHeader("location"); if (locationHeader != null && (responseCode >= 300 && responseCode < 400)) { answer = new HttpOperationFailedException(uri, responseCode, statusText, locationHeader.getValue(), headers, copy); } else { answer = new HttpOperationFailedException(uri, responseCode, statusText, null, headers, copy); } return answer; } /** * Strategy when executing the method (calling the remote server). * * @param httpRequest the http Request to execute * @return the response * @throws IOException can be thrown */ protected HttpResponse executeMethod(HttpUriRequest httpRequest) throws IOException { return httpClient.execute(httpRequest); } /** * Extracts the response headers * * @param responseHeaders the headers * @return the extracted headers or <tt>null</tt> if no headers existed */ protected static Map<String, String> extractResponseHeaders(Header[] responseHeaders) { if (responseHeaders == null || responseHeaders.length == 0) { return null; } Map<String, String> answer = new HashMap<String, String>(); for (Header header : responseHeaders) { answer.put(header.getName(), header.getValue()); } return answer; } /** * Extracts the response from the method as a InputStream. * * @param httpRequest the method that was executed * @return the response either as a stream, or as a deserialized java object * @throws IOException can be thrown */ protected static Object extractResponseBody(HttpRequestBase httpRequest, HttpResponse httpResponse, Exchange exchange) throws IOException, ClassNotFoundException { HttpEntity entity = httpResponse.getEntity(); if (entity == null) { return null; } InputStream is = entity.getContent(); if (is == null) { return null; } Header header = httpResponse.getFirstHeader(Exchange.CONTENT_ENCODING); String contentEncoding = header != null ? header.getValue() : null; if (!exchange.getProperty(Exchange.SKIP_GZIP_ENCODING, Boolean.FALSE, Boolean.class)) { is = GZIPHelper.uncompressGzip(contentEncoding, is); } // Honor the character encoding String contentType = null; header = httpRequest.getFirstHeader("content-type"); if (header != null) { contentType = header.getValue(); // find the charset and set it to the Exchange HttpHelper.setCharsetFromContentType(contentType, exchange); } InputStream response = doExtractResponseBodyAsStream(is, exchange); // if content type is a serialized java object then de-serialize it back to a Java object if (contentType != null && contentType.equals(HttpConstants.CONTENT_TYPE_JAVA_SERIALIZED_OBJECT)) { return HttpHelper.deserializeJavaObjectFromStream(response); } else { return response; } } private static InputStream doExtractResponseBodyAsStream(InputStream is, Exchange exchange) throws IOException { // As httpclient is using a AutoCloseInputStream, it will be closed when the connection is closed // we need to cache the stream for it. try { // This CachedOutputStream will not be closed when the exchange is onCompletion CachedOutputStream cos = new CachedOutputStream(exchange, false); IOHelper.copy(is, cos); // When the InputStream is closed, the CachedOutputStream will be closed return cos.getWrappedInputStream(); } finally { IOHelper.close(is, "Extracting response body", LOG); } } /** * Creates the HttpMethod to use to call the remote server, either its GET or POST. * * @param exchange the exchange * @return the created method as either GET or POST * @throws URISyntaxException is thrown if the URI is invalid * @throws CamelExchangeException is thrown if error creating RequestEntity */ protected HttpRequestBase createMethod(Exchange exchange) throws URISyntaxException, CamelExchangeException { String url = HttpHelper.createURL(exchange, getEndpoint()); URI uri = new URI(url); HttpEntity requestEntity = createRequestEntity(exchange); HttpMethods methodToUse = HttpHelper.createMethod(exchange, getEndpoint(), requestEntity != null); // is a query string provided in the endpoint URI or in a header (header overrules endpoint) String queryString = exchange.getIn().getHeader(Exchange.HTTP_QUERY, String.class); if (queryString == null) { queryString = getEndpoint().getHttpUri().getRawQuery(); } StringBuilder builder = new StringBuilder(uri.getScheme()).append("://").append(uri.getHost()); if (uri.getPort() != -1) { builder.append(":").append(uri.getPort()); } if (uri.getPath() != null) { builder.append(uri.getRawPath()); } if (queryString != null) { builder.append('?'); builder.append(queryString); } HttpRequestBase httpRequest = methodToUse.createMethod(builder.toString()); if (methodToUse.isEntityEnclosing()) { ((HttpEntityEnclosingRequestBase) httpRequest).setEntity(requestEntity); if (requestEntity != null && requestEntity.getContentType() == null) { LOG.debug("No Content-Type provided for URL: {} with exchange: {}", url, exchange); } } return httpRequest; } /** * Creates a holder object for the data to send to the remote server. * * @param exchange the exchange with the IN message with data to send * @return the data holder * @throws CamelExchangeException is thrown if error creating RequestEntity */ protected HttpEntity createRequestEntity(Exchange exchange) throws CamelExchangeException { Message in = exchange.getIn(); if (in.getBody() == null) { return null; } HttpEntity answer = in.getBody(HttpEntity.class); if (answer == null) { try { Object data = in.getBody(); if (data != null) { String contentType = ExchangeHelper.getContentType(exchange); if (contentType != null && HttpConstants.CONTENT_TYPE_JAVA_SERIALIZED_OBJECT.equals(contentType)) { // serialized java object Serializable obj = in.getMandatoryBody(Serializable.class); // write object to output stream ByteArrayOutputStream bos = new ByteArrayOutputStream(); HttpHelper.writeObjectToStream(bos, obj); ByteArrayEntity entity = new ByteArrayEntity(bos.toByteArray()); entity.setContentType(HttpConstants.CONTENT_TYPE_JAVA_SERIALIZED_OBJECT); IOHelper.close(bos); answer = entity; } else if (data instanceof File || data instanceof GenericFile) { // file based (could potentially also be a FTP file etc) File file = in.getBody(File.class); if (file != null) { answer = new FileEntity(file, contentType); } } else if (data instanceof String) { // be a bit careful with String as any type can most likely be converted to String // so we only do an instanceof check and accept String if the body is really a String // do not fallback to use the default charset as it can influence the request // (for example application/x-www-form-urlencoded forms being sent) String charset = IOConverter.getCharsetName(exchange, false); StringEntity entity = new StringEntity((String) data, charset); entity.setContentType(contentType); answer = entity; } // fallback as input stream if (answer == null) { // force the body as an input stream since this is the fallback InputStream is = in.getMandatoryBody(InputStream.class); InputStreamEntity entity = new InputStreamEntity(is, -1); entity.setContentType(contentType); answer = entity; } } } catch (UnsupportedEncodingException e) { throw new CamelExchangeException("Error creating RequestEntity from message body", exchange, e); } catch (IOException e) { throw new CamelExchangeException("Error serializing message body", exchange, e); } } return answer; } public HttpClient getHttpClient() { return httpClient; } public void setHttpClient(HttpClient httpClient) { this.httpClient = httpClient; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.activemq.artemis.tests.integration.client; import org.junit.Before; import org.junit.Test; import org.junit.Assert; import org.apache.activemq.artemis.api.core.Message; import org.apache.activemq.artemis.api.core.SimpleString; import org.apache.activemq.artemis.api.core.client.ClientConsumer; import org.apache.activemq.artemis.api.core.client.ClientMessage; import org.apache.activemq.artemis.api.core.client.ClientProducer; import org.apache.activemq.artemis.api.core.client.ClientSession; import org.apache.activemq.artemis.api.core.client.ClientSessionFactory; import org.apache.activemq.artemis.api.core.client.ServerLocator; import org.apache.activemq.artemis.core.server.ActiveMQServer; import org.apache.activemq.artemis.core.server.Queue; import org.apache.activemq.artemis.core.settings.impl.AddressSettings; import org.apache.activemq.artemis.tests.util.ActiveMQTestBase; import org.apache.activemq.artemis.utils.RandomUtil; public class MessageExpirationTest extends ActiveMQTestBase { private static final int EXPIRATION = 1000; private ActiveMQServer server; private ClientSession session; private ClientSessionFactory sf; private ServerLocator locator; @Test public void testMessageExpiredWithoutExpiryAddress() throws Exception { SimpleString address = RandomUtil.randomSimpleString(); SimpleString queue = RandomUtil.randomSimpleString(); session.createQueue(address, queue, false); ClientProducer producer = session.createProducer(address); ClientMessage message = session.createMessage(false); message.setExpiration(System.currentTimeMillis() + MessageExpirationTest.EXPIRATION); producer.send(message); Thread.sleep(MessageExpirationTest.EXPIRATION * 2); session.start(); ClientConsumer consumer = session.createConsumer(queue); ClientMessage message2 = consumer.receiveImmediate(); Assert.assertNull(message2); consumer.close(); session.deleteQueue(queue); } @Test public void testMessageExpiredWithoutExpiryAddressWithExpiryDelayOverride() throws Exception { SimpleString address = RandomUtil.randomSimpleString(); SimpleString queue = RandomUtil.randomSimpleString(); session.close(); session = addClientSession(sf.createSession(false, false, false)); session.createQueue(address, queue, false); ClientProducer producer = session.createProducer(address); ClientMessage message = session.createMessage(false); AddressSettings addressSettings = new AddressSettings().setExpiryDelay((long) MessageExpirationTest.EXPIRATION); server.getAddressSettingsRepository().addMatch(address.toString(), addressSettings); producer.send(message); // second message, this message shouldn't be overridden message = session.createMessage(false); message.setExpiration(System.currentTimeMillis() + EXPIRATION * 3); producer.send(message); session.commit(); session.start(); ClientConsumer consumer = session.createConsumer(queue); // non expired.. should receive both assertNotNull(consumer.receiveImmediate()); assertNotNull(consumer.receiveImmediate()); // stopping the consumer to cleanup the client's buffer session.stop(); // we receive the message and then rollback... then we wait some time > expiration, the message must be gone session.rollback(); Thread.sleep(MessageExpirationTest.EXPIRATION * 2); session.start(); // one expired as we changed the expiry in one of the messages... should receive just one assertNotNull(consumer.receiveImmediate()); assertNull(consumer.receiveImmediate()); session.stop(); session.rollback(); Thread.sleep(MessageExpirationTest.EXPIRATION * 2); session.start(); // both expired... nothing should be received assertNull(consumer.receiveImmediate()); consumer.close(); session.deleteQueue(queue); } @Test public void testMessageExpirationOnServer() throws Exception { SimpleString address = RandomUtil.randomSimpleString(); SimpleString queue = RandomUtil.randomSimpleString(); session.createQueue(address, queue, false); ClientProducer producer = session.createProducer(address); ClientConsumer consumer = session.createConsumer(queue); ClientMessage message = session.createMessage(false); message.setExpiration(System.currentTimeMillis() + MessageExpirationTest.EXPIRATION); producer.send(message); Thread.sleep(MessageExpirationTest.EXPIRATION * 2); session.start(); Thread.sleep(500); Assert.assertEquals(0, ((Queue) server.getPostOffice().getBinding(queue).getBindable()).getDeliveringCount()); Assert.assertEquals(0, getMessageCount(((Queue) server.getPostOffice().getBinding(queue).getBindable()))); ClientMessage message2 = consumer.receiveImmediate(); Assert.assertNull(message2); consumer.close(); session.deleteQueue(queue); } @Test public void testMessageExpirationOnClient() throws Exception { SimpleString address = RandomUtil.randomSimpleString(); SimpleString queue = RandomUtil.randomSimpleString(); session.createQueue(address, queue, false); ClientProducer producer = session.createProducer(address); ClientMessage message = session.createMessage(false); message.setExpiration(System.currentTimeMillis() + MessageExpirationTest.EXPIRATION); producer.send(message); session.start(); Thread.sleep(MessageExpirationTest.EXPIRATION * 2); ClientConsumer consumer = session.createConsumer(queue); ClientMessage message2 = consumer.receiveImmediate(); Assert.assertNull(message2); Assert.assertEquals(0, ((Queue) server.getPostOffice().getBinding(queue).getBindable()).getDeliveringCount()); Assert.assertEquals(0, getMessageCount(((Queue) server.getPostOffice().getBinding(queue).getBindable()))); consumer.close(); session.deleteQueue(queue); } @Test public void testMessageExpiredWithExpiryAddress() throws Exception { SimpleString address = RandomUtil.randomSimpleString(); SimpleString queue = RandomUtil.randomSimpleString(); final SimpleString expiryAddress = RandomUtil.randomSimpleString(); SimpleString expiryQueue = RandomUtil.randomSimpleString(); server.getAddressSettingsRepository().addMatch(address.toString(), new AddressSettings() { private static final long serialVersionUID = -6476053400596299130L; @Override public SimpleString getExpiryAddress() { return expiryAddress; } }); session.createQueue(address, queue, false); session.createQueue(expiryAddress, expiryQueue, false); ClientProducer producer = session.createProducer(address); ClientMessage message = session.createMessage(false); message.setExpiration(System.currentTimeMillis() + MessageExpirationTest.EXPIRATION); producer.send(message); Thread.sleep(MessageExpirationTest.EXPIRATION * 2); session.start(); ClientConsumer consumer = session.createConsumer(queue); ClientMessage message2 = consumer.receiveImmediate(); Assert.assertNull(message2); ClientConsumer expiryConsumer = session.createConsumer(expiryQueue); ClientMessage expiredMessage = expiryConsumer.receive(500); Assert.assertNotNull(expiredMessage); Assert.assertNotNull(expiredMessage.getObjectProperty(Message.HDR_ACTUAL_EXPIRY_TIME)); Assert.assertEquals(address, expiredMessage.getObjectProperty(Message.HDR_ORIGINAL_ADDRESS)); Assert.assertEquals(queue, expiredMessage.getObjectProperty(Message.HDR_ORIGINAL_QUEUE)); consumer.close(); expiryConsumer.close(); session.deleteQueue(queue); session.deleteQueue(expiryQueue); } // Package protected --------------------------------------------- // Protected ----------------------------------------------------- @Override @Before public void setUp() throws Exception { super.setUp(); server = createServer(false); server.start(); locator = createInVMNonHALocator(); sf = createSessionFactory(locator); session = addClientSession(sf.createSession(false, true, true)); } }
/* * Copyright 2020 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/channel/v1/service.proto package com.google.cloud.channel.v1; /** * * * <pre> * TransferableOffer represents an Offer that can be used in Transfer. * Read-only. * </pre> * * Protobuf type {@code google.cloud.channel.v1.TransferableOffer} */ public final class TransferableOffer extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.channel.v1.TransferableOffer) TransferableOfferOrBuilder { private static final long serialVersionUID = 0L; // Use TransferableOffer.newBuilder() to construct. private TransferableOffer(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private TransferableOffer() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new TransferableOffer(); } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private TransferableOffer( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.cloud.channel.v1.Offer.Builder subBuilder = null; if (offer_ != null) { subBuilder = offer_.toBuilder(); } offer_ = input.readMessage(com.google.cloud.channel.v1.Offer.parser(), extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(offer_); offer_ = subBuilder.buildPartial(); } break; } default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.channel.v1.ServiceProto .internal_static_google_cloud_channel_v1_TransferableOffer_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.channel.v1.ServiceProto .internal_static_google_cloud_channel_v1_TransferableOffer_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.channel.v1.TransferableOffer.class, com.google.cloud.channel.v1.TransferableOffer.Builder.class); } public static final int OFFER_FIELD_NUMBER = 1; private com.google.cloud.channel.v1.Offer offer_; /** * * * <pre> * Offer with parameter constraints updated to allow the Transfer. * </pre> * * <code>.google.cloud.channel.v1.Offer offer = 1;</code> * * @return Whether the offer field is set. */ @java.lang.Override public boolean hasOffer() { return offer_ != null; } /** * * * <pre> * Offer with parameter constraints updated to allow the Transfer. * </pre> * * <code>.google.cloud.channel.v1.Offer offer = 1;</code> * * @return The offer. */ @java.lang.Override public com.google.cloud.channel.v1.Offer getOffer() { return offer_ == null ? com.google.cloud.channel.v1.Offer.getDefaultInstance() : offer_; } /** * * * <pre> * Offer with parameter constraints updated to allow the Transfer. * </pre> * * <code>.google.cloud.channel.v1.Offer offer = 1;</code> */ @java.lang.Override public com.google.cloud.channel.v1.OfferOrBuilder getOfferOrBuilder() { return getOffer(); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (offer_ != null) { output.writeMessage(1, getOffer()); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (offer_ != null) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getOffer()); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.channel.v1.TransferableOffer)) { return super.equals(obj); } com.google.cloud.channel.v1.TransferableOffer other = (com.google.cloud.channel.v1.TransferableOffer) obj; if (hasOffer() != other.hasOffer()) return false; if (hasOffer()) { if (!getOffer().equals(other.getOffer())) return false; } if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasOffer()) { hash = (37 * hash) + OFFER_FIELD_NUMBER; hash = (53 * hash) + getOffer().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.channel.v1.TransferableOffer parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.channel.v1.TransferableOffer parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.channel.v1.TransferableOffer parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.channel.v1.TransferableOffer parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.channel.v1.TransferableOffer parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.channel.v1.TransferableOffer parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.channel.v1.TransferableOffer parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.channel.v1.TransferableOffer parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.channel.v1.TransferableOffer parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.channel.v1.TransferableOffer parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.channel.v1.TransferableOffer parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.channel.v1.TransferableOffer parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.channel.v1.TransferableOffer prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * TransferableOffer represents an Offer that can be used in Transfer. * Read-only. * </pre> * * Protobuf type {@code google.cloud.channel.v1.TransferableOffer} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.channel.v1.TransferableOffer) com.google.cloud.channel.v1.TransferableOfferOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.channel.v1.ServiceProto .internal_static_google_cloud_channel_v1_TransferableOffer_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.channel.v1.ServiceProto .internal_static_google_cloud_channel_v1_TransferableOffer_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.channel.v1.TransferableOffer.class, com.google.cloud.channel.v1.TransferableOffer.Builder.class); } // Construct using com.google.cloud.channel.v1.TransferableOffer.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {} } @java.lang.Override public Builder clear() { super.clear(); if (offerBuilder_ == null) { offer_ = null; } else { offer_ = null; offerBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.channel.v1.ServiceProto .internal_static_google_cloud_channel_v1_TransferableOffer_descriptor; } @java.lang.Override public com.google.cloud.channel.v1.TransferableOffer getDefaultInstanceForType() { return com.google.cloud.channel.v1.TransferableOffer.getDefaultInstance(); } @java.lang.Override public com.google.cloud.channel.v1.TransferableOffer build() { com.google.cloud.channel.v1.TransferableOffer result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.channel.v1.TransferableOffer buildPartial() { com.google.cloud.channel.v1.TransferableOffer result = new com.google.cloud.channel.v1.TransferableOffer(this); if (offerBuilder_ == null) { result.offer_ = offer_; } else { result.offer_ = offerBuilder_.build(); } onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.channel.v1.TransferableOffer) { return mergeFrom((com.google.cloud.channel.v1.TransferableOffer) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.channel.v1.TransferableOffer other) { if (other == com.google.cloud.channel.v1.TransferableOffer.getDefaultInstance()) return this; if (other.hasOffer()) { mergeOffer(other.getOffer()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.cloud.channel.v1.TransferableOffer parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (com.google.cloud.channel.v1.TransferableOffer) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private com.google.cloud.channel.v1.Offer offer_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.channel.v1.Offer, com.google.cloud.channel.v1.Offer.Builder, com.google.cloud.channel.v1.OfferOrBuilder> offerBuilder_; /** * * * <pre> * Offer with parameter constraints updated to allow the Transfer. * </pre> * * <code>.google.cloud.channel.v1.Offer offer = 1;</code> * * @return Whether the offer field is set. */ public boolean hasOffer() { return offerBuilder_ != null || offer_ != null; } /** * * * <pre> * Offer with parameter constraints updated to allow the Transfer. * </pre> * * <code>.google.cloud.channel.v1.Offer offer = 1;</code> * * @return The offer. */ public com.google.cloud.channel.v1.Offer getOffer() { if (offerBuilder_ == null) { return offer_ == null ? com.google.cloud.channel.v1.Offer.getDefaultInstance() : offer_; } else { return offerBuilder_.getMessage(); } } /** * * * <pre> * Offer with parameter constraints updated to allow the Transfer. * </pre> * * <code>.google.cloud.channel.v1.Offer offer = 1;</code> */ public Builder setOffer(com.google.cloud.channel.v1.Offer value) { if (offerBuilder_ == null) { if (value == null) { throw new NullPointerException(); } offer_ = value; onChanged(); } else { offerBuilder_.setMessage(value); } return this; } /** * * * <pre> * Offer with parameter constraints updated to allow the Transfer. * </pre> * * <code>.google.cloud.channel.v1.Offer offer = 1;</code> */ public Builder setOffer(com.google.cloud.channel.v1.Offer.Builder builderForValue) { if (offerBuilder_ == null) { offer_ = builderForValue.build(); onChanged(); } else { offerBuilder_.setMessage(builderForValue.build()); } return this; } /** * * * <pre> * Offer with parameter constraints updated to allow the Transfer. * </pre> * * <code>.google.cloud.channel.v1.Offer offer = 1;</code> */ public Builder mergeOffer(com.google.cloud.channel.v1.Offer value) { if (offerBuilder_ == null) { if (offer_ != null) { offer_ = com.google.cloud.channel.v1.Offer.newBuilder(offer_).mergeFrom(value).buildPartial(); } else { offer_ = value; } onChanged(); } else { offerBuilder_.mergeFrom(value); } return this; } /** * * * <pre> * Offer with parameter constraints updated to allow the Transfer. * </pre> * * <code>.google.cloud.channel.v1.Offer offer = 1;</code> */ public Builder clearOffer() { if (offerBuilder_ == null) { offer_ = null; onChanged(); } else { offer_ = null; offerBuilder_ = null; } return this; } /** * * * <pre> * Offer with parameter constraints updated to allow the Transfer. * </pre> * * <code>.google.cloud.channel.v1.Offer offer = 1;</code> */ public com.google.cloud.channel.v1.Offer.Builder getOfferBuilder() { onChanged(); return getOfferFieldBuilder().getBuilder(); } /** * * * <pre> * Offer with parameter constraints updated to allow the Transfer. * </pre> * * <code>.google.cloud.channel.v1.Offer offer = 1;</code> */ public com.google.cloud.channel.v1.OfferOrBuilder getOfferOrBuilder() { if (offerBuilder_ != null) { return offerBuilder_.getMessageOrBuilder(); } else { return offer_ == null ? com.google.cloud.channel.v1.Offer.getDefaultInstance() : offer_; } } /** * * * <pre> * Offer with parameter constraints updated to allow the Transfer. * </pre> * * <code>.google.cloud.channel.v1.Offer offer = 1;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.channel.v1.Offer, com.google.cloud.channel.v1.Offer.Builder, com.google.cloud.channel.v1.OfferOrBuilder> getOfferFieldBuilder() { if (offerBuilder_ == null) { offerBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.channel.v1.Offer, com.google.cloud.channel.v1.Offer.Builder, com.google.cloud.channel.v1.OfferOrBuilder>( getOffer(), getParentForChildren(), isClean()); offer_ = null; } return offerBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.channel.v1.TransferableOffer) } // @@protoc_insertion_point(class_scope:google.cloud.channel.v1.TransferableOffer) private static final com.google.cloud.channel.v1.TransferableOffer DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.channel.v1.TransferableOffer(); } public static com.google.cloud.channel.v1.TransferableOffer getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<TransferableOffer> PARSER = new com.google.protobuf.AbstractParser<TransferableOffer>() { @java.lang.Override public TransferableOffer parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new TransferableOffer(input, extensionRegistry); } }; public static com.google.protobuf.Parser<TransferableOffer> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<TransferableOffer> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.channel.v1.TransferableOffer getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
package org.jivesoftware.util.cert; import org.bouncycastle.asn1.*; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.security.cert.CertificateParsingException; import java.security.cert.X509Certificate; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.List; /** * Certificate identity mapping that uses SubjectAlternativeName as the identity credentials. * This implementation returns all subjectAltName entries that are a: * <ul> * <li>GeneralName of type otherName with the "id-on-xmppAddr" Object Identifier</li> * <li>GeneralName of type otherName with the "id-on-dnsSRV" Object Identifier</li> * <li>GeneralName of type DNSName</li> * <li>GeneralName of type UniformResourceIdentifier</li> * </ul> * * @author Victor Hong * @author Guus der Kinderen, guus@goodbytes.nl */ public class SANCertificateIdentityMapping implements CertificateIdentityMapping { private static final Logger Log = LoggerFactory.getLogger( SANCertificateIdentityMapping.class ); /** * id-on-xmppAddr Object Identifier. * * @see <a href="http://tools.ietf.org/html/rfc6120">RFC 6120</a> */ public static final String OTHERNAME_XMPP_OID = "1.3.6.1.5.5.7.8.5"; /** * id-on-dnsSRV Object Identifier. * * @see <a href="https://tools.ietf.org/html/rfc4985">RFC 4985</a> */ public static final String OTHERNAME_SRV_OID = "1.3.6.1.5.5.7.8.7"; /** * User Principal Name (UPN) Object Identifier. * * @see <a href="http://www.oid-info.com/get/1.3.6.1.4.1.311.20.2.3">User Principal Name (UPN)</a> */ public static final String OTHERNAME_UPN_OID = "1.3.6.1.4.1.311.20.2.3"; /** * Returns the JID representation of an XMPP entity contained as a SubjectAltName extension * in the certificate. If none was found then return an empty list. * * @param certificate the certificate presented by the remote entity. * @return the JID representation of an XMPP entity contained as a SubjectAltName extension * in the certificate. If none was found then return an empty list. */ @Override public List<String> mapIdentity( X509Certificate certificate ) { List<String> identities = new ArrayList<>(); try { Collection<List<?>> altNames = certificate.getSubjectAlternativeNames(); // Check that the certificate includes the SubjectAltName extension if ( altNames == null ) { return Collections.emptyList(); } for ( List<?> item : altNames ) { final Integer type = (Integer) item.get( 0 ); final Object value = item.get( 1 ); // this is either a string, or a byte-array that represents the ASN.1 DER encoded form. final String result; switch ( type ) { case 0: // OtherName: search for "id-on-xmppAddr" or 'sRVName' or 'userPrincipalName' result = parseOtherName( (byte[]) value ); break; case 2: // DNS result = (String) value; break; case 6: // URI result = (String) value; break; default: // Not applicable to XMPP, so silently ignore them result = null; break; } if ( result != null ) { identities.add( result ); } } } catch ( CertificateParsingException e ) { Log.error( "Error parsing SubjectAltName in certificate: " + certificate.getSubjectDN(), e ); } return identities; } /** * Returns the short name of mapping. * * @return The short name of the mapping (never null). */ @Override public String name() { return "Subject Alternative Name Mapping"; } /** * Parses the byte-array representation of a subjectAltName 'otherName' entry. * <p> * The provided 'OtherName' is expected to have this format: * <pre>{@code * OtherName ::= SEQUENCE { * type-id OBJECT IDENTIFIER, * value [0] EXPLICIT ANY DEFINED BY type-id } * }</pre> * * @param item A byte array representation of a subjectAltName 'otherName' entry (cannot be null). * @return an xmpp address, or null when the otherName entry does not relate to XMPP (or fails to parse). */ public String parseOtherName( byte[] item ) { if ( item == null || item.length == 0 ) { return null; } try ( final ASN1InputStream decoder = new ASN1InputStream( item ) ) { // By specification, OtherName instances must always be an ASN.1 Sequence. final ASN1Primitive object = decoder.readObject(); final ASN1Sequence otherNameSeq = (ASN1Sequence) object; // By specification, an OtherName instance consists of: // - the type-id (which is an Object Identifier), followed by: // - a tagged value, of which the tag number is 0 (zero) and the value is defined by the type-id. final ASN1ObjectIdentifier typeId = (ASN1ObjectIdentifier) otherNameSeq.getObjectAt( 0 ); final ASN1TaggedObject taggedValue = (ASN1TaggedObject) otherNameSeq.getObjectAt( 1 ); final int tagNo = taggedValue.getTagNo(); if ( tagNo != 0 ) { throw new IllegalArgumentException( "subjectAltName 'otherName' sequence's second object is expected to be a tagged value of which the tag number is 0. The tag number that was detected: " + tagNo ); } final ASN1Primitive value = taggedValue.getObject(); switch ( typeId.getId() ) { case OTHERNAME_SRV_OID: return parseOtherNameDnsSrv( value ); case OTHERNAME_XMPP_OID: return parseOtherNameXmppAddr( value ); case OTHERNAME_UPN_OID: return parseOtherNameUpn( value ); default: String otherName = parseOtherName(typeId, value); if (otherName != null) { return otherName; } Log.debug( "Ignoring subjectAltName 'otherName' type-id '{}' that's neither id-on-xmppAddr nor id-on-dnsSRV.", typeId.getId() ); return null; } } catch ( Exception e ) { Log.warn( "Unable to parse a byte array (of length {}) as a subjectAltName 'otherName'. It is ignored.", item.length, e ); return null; } } /** * Allow sub-class to support additional OID values, possibly taking typeId into account * * @param typeId The ASN.1 object identifier (cannot be null). * @param value The ASN.1 representation of the value (cannot be null). * @return The parsed otherName String value. */ protected String parseOtherName(ASN1ObjectIdentifier typeId, ASN1Primitive value) { return null; } /** * Parses a SRVName value as specified by RFC 4985. * * This method parses the argument value as a DNS SRV Resource Record. Only when the parsed value refers to an XMPP * related service, the corresponding DNS domain name is returned (minus the service name). * * @param srvName The ASN.1 representation of the srvName value (cannot be null). * @return an XMPP address value, or null when the record does not relate to XMPP. */ protected String parseOtherNameDnsSrv( ASN1Primitive srvName ) { // RFC 4985 says that this should be a IA5 String. Lets be tolerant and allow all text-based values. final String value = ( (ASN1String) srvName ).getString(); if ( value.toLowerCase().startsWith( "_xmpp-server." ) ) { return value.substring( "_xmpp-server.".length() ); } else if ( value.toLowerCase().startsWith( "_xmpp-client." ) ) { return value.substring( "_xmpp-client.".length() ); } else { // Not applicable to XMPP. Ignore. Log.debug( "srvName value '{}' of id-on-dnsSRV record is neither _xmpp-server nor _xmpp-client. It is being ignored.", value ); return null; } } /** * Parse a XmppAddr value as specified in RFC 6120. * * @param xmppAddr The ASN.1 representation of the xmppAddr value (cannot be null). * @return The parsed xmppAddr value. */ protected String parseOtherNameXmppAddr( ASN1Primitive xmppAddr ) { // Get the nested object if the value is an ASN1TaggedObject or a sub-type of it if (ASN1TaggedObject.class.isAssignableFrom(xmppAddr.getClass())) { ASN1TaggedObject taggedObject = (ASN1TaggedObject) xmppAddr; ASN1Primitive objectPrimitive = taggedObject.getObject(); if (ASN1String.class.isAssignableFrom(objectPrimitive.getClass())) { return ((ASN1String) objectPrimitive).getString(); } } // RFC 6120 says that this should be a UTF8String. Lets be tolerant and allow all text-based values. return ( (ASN1String) xmppAddr ).getString(); } /** * Parse a UPN value * * @param value The ASN.1 representation of the UPN (cannot be null). * @return The parsed UPN value. */ protected String parseOtherNameUpn( ASN1Primitive value ) { String otherName = null; if (value instanceof ASN1TaggedObject) { ASN1TaggedObject taggedObject = (ASN1TaggedObject) value; ASN1Primitive objectPrimitive = taggedObject.getObject(); if (objectPrimitive instanceof ASN1String) { otherName = ((ASN1String)objectPrimitive).getString(); } } if (otherName == null) { Log.warn("UPN type unexpected, UPN extraction failed: " + value.getClass().getName() + ":" + value.toString()); } else { Log.debug("UPN from certificate has value of: " + otherName ); } return otherName; } }
/* * External Code Formatter * Copyright (c) 2007-2009 Esko Luontola, www.orfjackal.net * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.orfjackal.extformatter; import net.orfjackal.extformatter.util.*; import org.jetbrains.annotations.*; import java.io.File; import java.util.*; /** * Uses as few reformat method calls as possible to reformat all the queued files. * * @author Esko Luontola * @since 7.12.2007 */ public class OptimizingReformatQueue implements ReformatQueue { @NotNull private final CodeFormatter formatter; @NotNull private final List<File> fileQueue = new ArrayList<File>(); public OptimizingReformatQueue(@NotNull CodeFormatter formatter) { this.formatter = formatter; } public boolean supportsFileType(@NotNull File file) { return formatter.supportsFileType(file); } public boolean supportsReformatOne() { return true; } public void reformatOne(@NotNull File file) { assert supportsFileType(file); fileQueue.add(file); } public boolean isEmpty() { return fileQueue.isEmpty(); } public void flush() { fileQueue.removeAll(useReformatMany()); fileQueue.removeAll(useReformatRecursively()); fileQueue.removeAll(useReformatDirectory()); fileQueue.removeAll(useReformatOne()); mustBeEmpty(fileQueue); } @NotNull private List<File> useReformatOne() { List<File> reformatted = new ArrayList<File>(); if (formatter.supportsReformatOne()) { for (File file : fileQueue) { formatter.reformatOne(file); reformatted.add(file); } } return reformatted; } @NotNull private List<File> useReformatMany() { List<File> reformatted = new ArrayList<File>(); if (formatter.supportsReformatMany() && fileQueue.size() > 0) { formatter.reformatMany(toArray(fileQueue)); reformatted.addAll(fileQueue); } return reformatted; } @NotNull private List<File> useReformatDirectory() { List<File> reformatted = new ArrayList<File>(); if (formatter.supportsReformatDirectory()) { Map<File, List<File>> groups = groupByDirectory(fileQueue); for (Map.Entry<File, List<File>> group : groups.entrySet()) { File directory = group.getKey(); List<File> files = group.getValue(); if (noOthersInTheSameDirectory(directory, files)) { formatter.reformatDirectory(directory); reformatted.addAll(files); } } } return reformatted; } @NotNull private List<File> useReformatRecursively() { List<File> reformatted = new ArrayList<File>(); if (formatter.supportsReformatRecursively()) { File directory = commonParentDirectory(fileQueue); if (directory != null && noOthersInTheSameDirectoryTree(directory, fileQueue)) { formatter.reformatRecursively(directory); reformatted.addAll(fileQueue); } } return reformatted; } private static void mustBeEmpty(@NotNull List<File> files) { if (!files.isEmpty()) { try { throw new IllegalStateException("The following files could not be reformatted: " + files); } finally { files.clear(); } } } private boolean noOthersInTheSameDirectoryTree(@NotNull File directory, @NotNull List<File> files) { if (!noOthersInTheSameDirectory(directory, files)) { return false; } File[] subDirs = directory.listFiles(new Directories()); for (File subDir : subDirs) { if (!noOthersInTheSameDirectoryTree(subDir, files)) { return false; } } return true; } private boolean noOthersInTheSameDirectory(@NotNull File directory, @NotNull List<File> files) { File[] allFilesInDir = directory.listFiles(new FilesSupportedBy(this)); for (File fileInDir : allFilesInDir) { if (!files.contains(fileInDir)) { return false; } } return true; } @Nullable private static File commonParentDirectory(@NotNull List<File> files) { // Assumes that the common parent directory contains some files. // Otherwise there should be some check that this does not go // up to the root directory and possibly by accident reformat // files outside the project source directories. File commonParent = null; for (File file : files) { if (commonParent == null) { commonParent = file.getParentFile(); } if (isParent(file.getParentFile(), commonParent)) { commonParent = file.getParentFile(); } } return commonParent; } private static boolean isParent(@NotNull File parent, @NotNull File child) { for (File dir = child; dir.getParentFile() != null; dir = dir.getParentFile()) { if (dir.equals(parent)) { return true; } } return false; } @NotNull private static Map<File, List<File>> groupByDirectory(@NotNull List<File> files) { Map<File, List<File>> groups = new HashMap<File, List<File>>(); for (File file : files) { File directory = file.getParentFile(); putToList(groups, directory, file); } return groups; } private static <T> void putToList(@NotNull Map<T, List<T>> map, @NotNull T key, @NotNull T value) { List<T> list = map.get(key); if (list == null) { list = new ArrayList<T>(); map.put(key, list); } list.add(value); } @NotNull private static File[] toArray(@NotNull List<File> files) { return files.toArray(new File[files.size()]); } // Unsupported operations public boolean supportsReformatMany() { return false; } public void reformatMany(@NotNull File... files) { throw new UnsupportedOperationException(); } public boolean supportsReformatDirectory() { return false; } public void reformatDirectory(@NotNull File directory) { throw new UnsupportedOperationException(); } public boolean supportsReformatRecursively() { return false; } public void reformatRecursively(@NotNull File directory) { throw new UnsupportedOperationException(); } }
/* * Copyright 2006 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.directwebremoting.spring; import java.lang.reflect.Method; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.directwebremoting.create.NewCreator; import org.directwebremoting.filter.ExtraLatencyAjaxFilter; import org.springframework.beans.FatalBeanException; import org.springframework.beans.PropertyValue; import org.springframework.beans.factory.BeanFactory; import org.springframework.beans.factory.BeanInitializationException; import org.springframework.beans.factory.HierarchicalBeanFactory; import org.springframework.beans.factory.config.BeanDefinition; import org.springframework.beans.factory.config.BeanDefinitionHolder; import org.springframework.beans.factory.config.RuntimeBeanReference; import org.springframework.beans.factory.support.BeanDefinitionBuilder; import org.springframework.beans.factory.support.BeanDefinitionReaderUtils; import org.springframework.beans.factory.support.BeanDefinitionRegistry; import org.springframework.beans.factory.support.ManagedList; import org.springframework.beans.factory.support.ManagedMap; import org.springframework.beans.factory.xml.BeanDefinitionDecorator; import org.springframework.beans.factory.xml.BeanDefinitionParser; import org.springframework.beans.factory.xml.BeanDefinitionParserDelegate; import org.springframework.beans.factory.xml.NamespaceHandlerSupport; import org.springframework.beans.factory.xml.ParserContext; import org.springframework.util.ClassUtils; import org.springframework.util.StringUtils; import org.springframework.util.xml.DomUtils; import org.w3c.dom.Element; import org.w3c.dom.Node; import org.w3c.dom.NodeList; /** * The Spring namespace handler which handles all elements that are defined as * part of the DWR namespace, except <dwr:annotation-config />. <br/> * The DWR namespace is defined in the <code>spring-dwr-X.X.xsd</code> file. All * elements that are encountered in Spring configuration files are automatically * converted to their actual bean representation in the Spring bean registry. * * @author Erik Wiersma * @author Bram Smeets * @author Jose Noheda * @author Joe Walker [joe at getahead dot ltd dot uk] */ public abstract class DwrNamespaceHandler extends NamespaceHandlerSupport { /* (non-Javadoc) * @see org.springframework.beans.factory.xml.NamespaceHandler#init() */ public void init() { // register bean definition parsers and decorators for all dwr namespace elements registerBeanDefinitionParser("configuration", new ConfigurationBeanDefinitionParser()); registerBeanDefinitionParser("controller", new ControllerBeanDefinitionParser()); registerBeanDefinitionParser("url-mapping", new UrlMappingBeanDefinitionParser()); registerBeanDefinitionParser("proxy-ref", new ProxyBeanDefinitionParser()); registerBeanDefinitionDecorator("init", new InitDefinitionDecorator()); registerBeanDefinitionDecorator("create", new CreatorBeanDefinitionDecorator()); registerBeanDefinitionDecorator("convert", new ConverterBeanDefinitionDecorator()); registerBeanDefinitionDecorator("signatures", new SignaturesBeanDefinitionDecorator()); registerBeanDefinitionDecorator("remote", new RemoteBeanDefinitionDecorator()); } protected static BeanDefinition registerSpringConfiguratorIfNecessary(BeanDefinitionRegistry registry) { if (!registry.containsBeanDefinition(DEFAULT_SPRING_CONFIGURATOR_ID)) { BeanDefinitionBuilder builder = BeanDefinitionBuilder.rootBeanDefinition(SpringConfigurator.class); builder.addPropertyValue("creators", new ManagedMap()); builder.addPropertyValue("converters", new ManagedMap()); registry.registerBeanDefinition(DEFAULT_SPRING_CONFIGURATOR_ID, builder.getBeanDefinition()); } return registry.getBeanDefinition(DEFAULT_SPRING_CONFIGURATOR_ID); } /** * Registers a new {@link org.directwebremoting.extend.Creator} in the registry using name <code>javascript</code>. * @param registry The definition of all the Beans * @param javascript The name of the bean in the registry. * @param creatorConfig * @param params * @param children The node list to check for nested elements */ @SuppressWarnings("unchecked") protected void registerCreator(BeanDefinitionRegistry registry, String javascript, BeanDefinitionBuilder creatorConfig, Map<String, String> params, NodeList children) { registerSpringConfiguratorIfNecessary(registry); List<String> includes = new ArrayList<String>(); creatorConfig.addPropertyValue("includes", includes); List<String> excludes = new ArrayList<String>(); creatorConfig.addPropertyValue("excludes", excludes); Map<String, List<String>> auth = new HashMap<String, List<String>>(); creatorConfig.addPropertyValue("auth", auth); // check to see if there are any nested elements here for (int i = 0; i < children.getLength(); i++) { Node node = children.item(i); if (node.getNodeType() == Node.TEXT_NODE || node.getNodeType() == Node.COMMENT_NODE) { continue; } Element child = (Element) node; if ("dwr:latencyfilter".equals(node.getNodeName())) { BeanDefinitionBuilder beanFilter = BeanDefinitionBuilder.rootBeanDefinition(ExtraLatencyAjaxFilter.class); beanFilter.addPropertyValue("delay", child.getAttribute("delay")); BeanDefinitionHolder holder2 = new BeanDefinitionHolder(beanFilter.getBeanDefinition(), "__latencyFilter_" + javascript); BeanDefinitionReaderUtils.registerBeanDefinition(holder2, registry); ManagedList filterList = new ManagedList(); filterList.add(new RuntimeBeanReference("__latencyFilter_" + javascript)); creatorConfig.addPropertyValue("filters", filterList); } else if ("dwr:include".equals(node.getNodeName())) { includes.add(child.getAttribute("method")); } else if ("dwr:exclude".equals(node.getNodeName())) { excludes.add(child.getAttribute("method")); } else if ("dwr:auth".equals(node.getNodeName())) { String method = child.getAttribute("method"); if (auth.get(method) == null) { auth.put(method, new ArrayList<String>()); } auth.get(method).add(child.getAttribute("role")); } else if ("dwr:convert".equals(node.getNodeName())) { Element element = (Element) node; String type = element.getAttribute("type"); String className = element.getAttribute("class"); ConverterConfig converterConfig = new ConverterConfig(); converterConfig.setType(type); parseConverterSettings(converterConfig, element); lookupConverters(registry).put(className, converterConfig); } else if ("dwr:filter".equals(node.getNodeName())) { Element element = (Element) node; String filterClass = element.getAttribute("class"); List<Element> filterParamElements = DomUtils.getChildElementsByTagName(element, "param"); BeanDefinitionBuilder beanFilter; try { beanFilter = BeanDefinitionBuilder.rootBeanDefinition(ClassUtils.forName(filterClass)); } catch (ClassNotFoundException e) { throw new IllegalArgumentException("DWR filter class '" + filterClass + "' was not found. " + "Check the class name specified in <dwr:filter class=\"" + filterClass + "\" /> exists"); } for (Element filterParamElement : filterParamElements) { beanFilter.addPropertyValue(filterParamElement.getAttribute("name"), filterParamElement.getAttribute("value")); } BeanDefinitionHolder holder2 = new BeanDefinitionHolder(beanFilter.getBeanDefinition(), "__filter_" + filterClass + "_" + javascript); BeanDefinitionReaderUtils.registerBeanDefinition(holder2, registry); ManagedList filterList = new ManagedList(); filterList.add(new RuntimeBeanReference("__filter_" + filterClass + "_" + javascript)); creatorConfig.addPropertyValue("filters", filterList); } else if ("dwr:param".equals(node.getNodeName())) { Element element = (Element) node; String name = element.getAttribute("name"); String value = element.getAttribute("value"); params.put(name, value); } else { throw new RuntimeException("an unknown dwr:remote sub node was fouund: " + node.getNodeName()); } } creatorConfig.addPropertyValue("params", params); String creatorConfigName = "__" + javascript; BeanDefinitionHolder holder3 = new BeanDefinitionHolder(creatorConfig.getBeanDefinition(), creatorConfigName); BeanDefinitionReaderUtils.registerBeanDefinition(holder3, registry); lookupCreators(registry).put(javascript, new RuntimeBeanReference(creatorConfigName)); } protected class ConfigurationBeanDefinitionParser implements BeanDefinitionParser { /* (non-Javadoc) * @see org.springframework.beans.factory.xml.BeanDefinitionParser#parse(org.w3c.dom.Element, org.springframework.beans.factory.xml.ParserContext) */ @SuppressWarnings("unchecked") public BeanDefinition parse(Element element, ParserContext parserContext) { BeanDefinitionRegistry registry = parserContext.getRegistry(); BeanDefinition beanDefinition = registerSpringConfiguratorIfNecessary(registry); Element initElement = DomUtils.getChildElementByTagName(element, "init"); if (initElement != null) { decorate(initElement, new BeanDefinitionHolder(beanDefinition, DEFAULT_SPRING_CONFIGURATOR_ID), parserContext); } List<Element> createElements = DomUtils.getChildElementsByTagName(element, "create"); for (Element createElement : createElements) { decorate(createElement, new BeanDefinitionHolder(beanDefinition, DEFAULT_SPRING_CONFIGURATOR_ID), parserContext); } List<Element> convertElements = DomUtils.getChildElementsByTagName(element, "convert"); for (Element convertElement : convertElements) { decorate(convertElement, new BeanDefinitionHolder(beanDefinition, DEFAULT_SPRING_CONFIGURATOR_ID), parserContext); } List<Element> signatureElements = DomUtils.getChildElementsByTagName(element, "signatures"); for (Element signatureElement : signatureElements) { decorate(signatureElement, new BeanDefinitionHolder(beanDefinition, DEFAULT_SPRING_CONFIGURATOR_ID), parserContext); } return beanDefinition; } } protected static class ControllerBeanDefinitionParser implements BeanDefinitionParser { /* (non-Javadoc) * @see org.springframework.beans.factory.xml.BeanDefinitionParser#parse(org.w3c.dom.Element, org.springframework.beans.factory.xml.ParserContext) */ @SuppressWarnings("unchecked") public BeanDefinition parse(Element element, ParserContext parserContext) { BeanDefinitionBuilder dwrController = BeanDefinitionBuilder.rootBeanDefinition(DwrController.class); List<Object> configurators = new ManagedList(); configurators.add(new RuntimeBeanReference(DEFAULT_SPRING_CONFIGURATOR_ID)); dwrController.addPropertyValue("configurators", configurators); String debug = element.getAttribute("debug"); if (StringUtils.hasText(debug)) { dwrController.addPropertyValue("debug", debug); } String beanName = element.getAttribute(BeanDefinitionParserDelegate.ID_ATTRIBUTE); String nameAttr = element.getAttribute(BeanDefinitionParserDelegate.NAME_ATTRIBUTE); String[] aliases = null; if (!StringUtils.hasText(beanName)) { beanName = element.getAttribute("name"); if (!StringUtils.hasText(beanName)) { beanName ="dwrController"; // Offer a sensible default if no id was specified } } else { String aliasName = element.getAttribute("name"); if (StringUtils.hasText(aliasName)) { aliases = StringUtils.tokenizeToStringArray(nameAttr, BeanDefinitionParserDelegate.BEAN_NAME_DELIMITERS); } } parseControllerParameters(dwrController, element); BeanDefinitionHolder holder = new BeanDefinitionHolder(dwrController.getBeanDefinition(), beanName, aliases); BeanDefinitionReaderUtils.registerBeanDefinition(holder, parserContext.getRegistry()); return dwrController.getBeanDefinition(); } /** * @param dwrControllerDefinition * @param parent */ protected void parseControllerParameters(BeanDefinitionBuilder dwrControllerDefinition, Element parent) { NodeList children = parent.getChildNodes(); Map<String, String> params = new HashMap<String, String>(); for (int i = 0; i < children.getLength(); i++) { Node node = children.item(i); if (node.getNodeType() == Node.TEXT_NODE || node.getNodeType() == Node.COMMENT_NODE) { continue; } Element child = (Element) node; if ("dwr:config-param".equals(child.getNodeName())) { String paramName = child.getAttribute("name"); String value = child.getAttribute("value"); params.put(paramName, value); } else { throw new RuntimeException("an unknown dwr:controller sub node was found: " + node.getNodeName()); } } dwrControllerDefinition.addPropertyValue("configParams", params); } } /** * Registers a new bean definition based on <dwr:url-mapping /> schema. * * @author Jose Noheda [jose.noheda@gmail.com] */ protected class UrlMappingBeanDefinitionParser implements BeanDefinitionParser { /** * Converts <dwr:url-mapping /> tag in the adequate DwrHandlerMapping bean definition. * @param element the <dwr:url-mapping /> tag * @param parserContext access to the registry * @return a DwrHandlerMapping bean definition */ public BeanDefinition parse(Element element, ParserContext parserContext) { BeanDefinitionBuilder builder = BeanDefinitionBuilder.rootBeanDefinition(DwrHandlerMapping.class); String interceptors = element.getAttribute("interceptors"); if (StringUtils.hasText(interceptors)) { builder.addPropertyReference("interceptors", interceptors); } parserContext.getRegistry().registerBeanDefinition("DwrAnnotationURLMapper", builder.getBeanDefinition()); return parserContext.getRegistry().getBeanDefinition("DwrAnnotationURLMapper"); } } /** * Registers a bean proxy based in <dwr:proxy-ref /> * * @author Jose Noheda [jose.noheda@gmail.com] */ protected class ProxyBeanDefinitionParser implements BeanDefinitionParser { public BeanDefinition parse(Element element, ParserContext parserContext) { String beanRef = element.getAttribute("bean"); BeanDefinitionRegistry registry = parserContext.getRegistry(); BeanDefinition beanRefDefinition = findParentDefinition(beanRef, registry); //BeanDefinitionHolder beanDefinitionHolder = new BeanDefinitionHolder(beanRefDefinition, beanRef); String javascript = element.getAttribute("javascript"); if (!StringUtils.hasText(javascript)) { if (log.isDebugEnabled()) { log.debug("No javascript name provided. Remoting using bean id [" + beanRef + "]"); } javascript = StringUtils.capitalize(beanRef); } BeanDefinitionBuilder beanCreator = BeanDefinitionBuilder.rootBeanDefinition(BeanCreator.class); beanCreator.addPropertyValue("beanClass", resolveBeanClassname(beanRefDefinition, registry)); beanCreator.addPropertyValue("beanId", beanRef); beanCreator.addDependsOn(beanRef); beanCreator.addPropertyValue("javascript", javascript); BeanDefinitionBuilder creatorConfig = BeanDefinitionBuilder.rootBeanDefinition(CreatorConfig.class); creatorConfig.addPropertyValue("creator", beanCreator.getBeanDefinition()); registerCreator(parserContext.getRegistry(), javascript, creatorConfig, new HashMap<String, String>(), element.getChildNodes()); return creatorConfig.getBeanDefinition(); } } protected class RemoteBeanDefinitionDecorator implements BeanDefinitionDecorator { /** * Registers an &lt;dwr:remote ... /&gt; element. */ public BeanDefinitionHolder decorate(Node node, BeanDefinitionHolder definition, ParserContext parserContext) { Element element = (Element) node; String javascript = element.getAttribute("javascript"); BeanDefinitionBuilder beanCreator = BeanDefinitionBuilder.rootBeanDefinition(BeanCreator.class); try { String beanClassName = resolveBeanClassname(definition.getBeanDefinition(), parserContext.getRegistry()); if (beanClassName == null) { throw new FatalBeanException("Unabled to find type for beanName '" + definition.getBeanName() + "'. " + "Check your bean has a correctly configured parent or provide a class for " + " the bean definition"); } beanCreator.addPropertyValue("beanClass", ClassUtils.forName(beanClassName)); } catch (ClassNotFoundException e) { throw new FatalBeanException("Unable to create DWR bean creator for '" + definition.getBeanName() + "'.", e); } String name = definition.getBeanName(); if (name.startsWith("scopedTarget.")) { name = name.substring(name.indexOf(".") + 1); } beanCreator.addPropertyValue("beanId", name); if (!StringUtils.hasText(javascript)) { if (log.isDebugEnabled()) { log.debug("No javascript name provided. Remoting using bean id [" + name + "]"); } javascript = StringUtils.capitalize(name); } beanCreator.addPropertyValue("javascript", javascript); BeanDefinitionBuilder creatorConfig = BeanDefinitionBuilder.rootBeanDefinition(CreatorConfig.class); creatorConfig.addPropertyValue("creator", beanCreator.getBeanDefinition()); registerCreator(parserContext.getRegistry(), javascript, creatorConfig, new HashMap<String, String>(), node.getChildNodes()); return definition; } } /** * Try getting the beanClassName from the definition and if that fails try to get it from * the parent (and even parent BeanFactory if we have to). * @param definition * @param registry * @return class name or null if not found */ protected static String resolveBeanClassname(BeanDefinition definition, BeanDefinitionRegistry registry) { String beanClassName = definition.getBeanClassName(); while(!StringUtils.hasText(beanClassName)) { try { Method m = definition.getClass().getMethod("getParentName", new Class[0]); String parentName = (String) m.invoke(definition, new Object[0]); BeanDefinition parentDefinition = findParentDefinition(parentName, registry); beanClassName = parentDefinition.getBeanClassName(); definition = parentDefinition; } catch (Exception e) { throw new FatalBeanException("No parent bean could be found for " + definition, e); } } return beanClassName; } protected static BeanDefinition findParentDefinition(String parentName, BeanDefinitionRegistry registry) { if (registry != null) { if (registry.containsBeanDefinition(parentName)) { return registry.getBeanDefinition(parentName); } else if (registry instanceof HierarchicalBeanFactory) { // Try to get parent definition from the parent BeanFactory. This could return null BeanFactory parentBeanFactory = ((HierarchicalBeanFactory) registry).getParentBeanFactory(); return findParentDefinition(parentName, (BeanDefinitionRegistry) parentBeanFactory); } } // we've exhausted all possibilities return null; } protected class ConverterBeanDefinitionDecorator implements BeanDefinitionDecorator { public BeanDefinitionHolder decorate(Node node, BeanDefinitionHolder definition, ParserContext parserContext) { Element element = (Element) node; String type = element.getAttribute("type"); if ("preconfigured".equals(type)) { type += ":" + element.getAttribute("ref"); } String className = element.getAttribute("class"); String javascriptClassName = element.getAttribute("javascript"); BeanDefinitionRegistry registry = parserContext.getRegistry(); ConverterConfig converterConfig = new ConverterConfig(); converterConfig.setType(type); converterConfig.setJavascriptClassName(javascriptClassName); parseConverterSettings(converterConfig, element); lookupConverters(registry).put(className, converterConfig); return definition; } } /** * @param converterConfig * @param parent */ protected void parseConverterSettings(ConverterConfig converterConfig, Element parent) { NodeList children = parent.getChildNodes(); // check to see if there are any nested elements here for (int i = 0; i < children.getLength(); i++) { Node node = children.item(i); if (node.getNodeType() == Node.TEXT_NODE || node.getNodeType() == Node.COMMENT_NODE) { continue; } Element child = (Element) node; if ("dwr:include".equals(child.getNodeName())) { converterConfig.addInclude(child.getAttribute("method")); } else if ("dwr:exclude".equals(child.getNodeName())) { converterConfig.addExclude(child.getAttribute("method")); } /* TODO Why is this only a property of ObjectConverter? else if (child.getNodeName().equals("dwr:force")) { converterConfig.setForce(Boolean.parseBoolean(child.getAttribute("value"))); } */ else { throw new RuntimeException("an unknown dwr:remote sub node was found: " + node.getNodeName()); } } } /** * Parse the <code>&lt;dwr:init&gt;</code> elements */ protected class InitDefinitionDecorator implements BeanDefinitionDecorator { /* (non-Javadoc) * @see org.springframework.beans.factory.xml.BeanDefinitionDecorator#decorate(org.w3c.dom.Node, org.springframework.beans.factory.config.BeanDefinitionHolder, org.springframework.beans.factory.xml.ParserContext) */ public BeanDefinitionHolder decorate(Node parent, BeanDefinitionHolder definition, ParserContext parserContext) { Map<String, String> converters = new HashMap<String, String>(); Map<String, String> creators = new HashMap<String, String>(); NodeList inits = parent.getChildNodes(); for (int j = 0; j < inits.getLength(); j++) { Node node = inits.item(j); if (node.getNodeType() == Node.TEXT_NODE || node.getNodeType() == Node.COMMENT_NODE) { continue; } Element child = (Element) inits.item(j); if (child.getNodeName().equals(ELEMENT_CREATOR)) { String id = child.getAttribute(ATTRIBUTE_ID); String className = child.getAttribute(ATTRIBUTE_CLASS); creators.put(id, className); } else if (child.getNodeName().equals(ELEMENT_CONVERTER)) { String id = child.getAttribute(ATTRIBUTE_ID); String className = child.getAttribute(ATTRIBUTE_CLASS); converters.put(id, className); } else { throw new RuntimeException("An unknown sub node '" + child.getNodeName() + "' was found while parsing dwr:init"); } } BeanDefinition configurator = registerSpringConfiguratorIfNecessary(parserContext.getRegistry()); configurator.getPropertyValues().addPropertyValue("creatorTypes", creators); configurator.getPropertyValues().addPropertyValue("converterTypes", converters); return definition; } } /** * Uses the BeanDefinitionDecorator since we need access to the name of the parent definition?? * Register the creatores: spring, new, null, scripted, jsf, struts, pageflow */ protected class CreatorBeanDefinitionDecorator implements BeanDefinitionDecorator { /* (non-Javadoc) * @see org.springframework.beans.factory.xml.BeanDefinitionDecorator#decorate(org.w3c.dom.Node, org.springframework.beans.factory.config.BeanDefinitionHolder, org.springframework.beans.factory.xml.ParserContext) */ @SuppressWarnings("unchecked") public BeanDefinitionHolder decorate(Node node, BeanDefinitionHolder definition, ParserContext parserContext) { Element element = (Element) node; String javascript = element.getAttribute("javascript"); String creatorType = element.getAttribute("type"); BeanDefinitionBuilder creatorConfig = BeanDefinitionBuilder.rootBeanDefinition(CreatorConfig.class); // Configure "known" creators in the CreatorConfig. If unknown then just create the configuration // and leave it up DWR itself to decide if it's a valid creator type BeanDefinitionBuilder creator; Map<String, String> params = new HashMap<String, String>(); if ("spring".equals(creatorType)) { // TODO Refactor so that both spring creators use the same code... BeanDefinitionBuilder springCreator = BeanDefinitionBuilder.rootBeanDefinition(BeanCreator.class); springCreator.addPropertyValue("javascript", javascript); NodeList children = element.getChildNodes(); for (int i = 0; i < children.getLength(); i++) { Node childNode = children.item(i); if (childNode.getNodeType() == Node.TEXT_NODE || childNode.getNodeType() == Node.COMMENT_NODE) { continue; } Element child = (Element) childNode; String paramName = child.getAttribute("name"); String value = child.getAttribute("value"); if ("beanName".equals(paramName) || "beanId".equals(paramName)) { springCreator.addPropertyValue("beanId", value); } else { params.put(paramName, value); } } creatorConfig.addPropertyValue("creator", springCreator.getBeanDefinition()); } else if ("new".equals(creatorType)) { creator = BeanDefinitionBuilder.rootBeanDefinition(NewCreator.class); creator.addPropertyValue("className", node.getAttributes().getNamedItem("class").getNodeValue()); creator.addPropertyValue("javascript", javascript); creatorConfig.addPropertyValue("creator", creator.getBeanDefinition()); } else if ("null".equals(creatorType)) { creatorConfig.addPropertyValue("creatorType", "none"); String className = element.getAttribute("class"); if (className == null || "".equals(className)) { throw new BeanInitializationException("'class' is a required attribute for the declaration <dwr:creator type=\"null\"" + " javascript=\"" + javascript + "\" ... />"); } params.put("class", className); } else if ("pageflow".equals(creatorType)) { creatorConfig.addPropertyValue("creatorType", creatorType); } else if ("jsf".equals(creatorType) || "scripted".equals(creatorType) || "struts".equals(creatorType)) { creatorConfig.addPropertyValue("creatorType", creatorType); } else { if (log.isDebugEnabled()) { log.debug("Looking up creator type '" + creatorType + "'"); } // TODO We should delay the initialization of the creatorClass until after the bean // definitions have been parsed. BeanDefinition configurator = registerSpringConfiguratorIfNecessary(parserContext.getRegistry()); PropertyValue registeredCreators = configurator.getPropertyValues().getPropertyValue("creatorTypes"); Map<String, String> registeredCreatorMap = (Map<String, String>) registeredCreators.getValue(); String creatorClass = registeredCreatorMap.get(creatorType); if (creatorClass == null) { // the creator type should have been registered throw new UnsupportedOperationException("Type " + creatorType + " is not supported " + " or the custom creator has not been registered dwr:init"); } else { try { Class<?> clazz = Class.forName(creatorClass); creator = BeanDefinitionBuilder.rootBeanDefinition(clazz); creatorConfig.addPropertyValue("creator", creator.getBeanDefinition()); String className = element.getAttribute("class"); if (StringUtils.hasText(className)) { params.put("class", className); } } catch (ClassNotFoundException ex) { throw new FatalBeanException("ClassNotFoundException trying to register " + " creator '" + creatorClass + "' for javascript type '" + javascript + "'. Check the " + " class in the classpath and that the creator is register in dwr:init", ex); } } } registerCreator(parserContext.getRegistry(), javascript, creatorConfig, params, node.getChildNodes()); return definition; } } protected class SignaturesBeanDefinitionDecorator implements BeanDefinitionDecorator { public BeanDefinitionHolder decorate(Node node, BeanDefinitionHolder definition, ParserContext parserContext) { BeanDefinitionRegistry registry = parserContext.getRegistry(); BeanDefinition config = registerSpringConfiguratorIfNecessary(registry); StringBuffer sigtext = new StringBuffer(); NodeList children = node.getChildNodes(); for (int i = 0; i < children.getLength(); i++) { Node child = children.item(i); if (child.getNodeType() != Node.TEXT_NODE && child.getNodeType() != Node.CDATA_SECTION_NODE) { log.warn("Ignoring illegal node type: " + child.getNodeType()); continue; } sigtext.append(child.getNodeValue()); } config.getPropertyValues().addPropertyValue("signatures", sigtext.toString()); return definition; } } /** * @param registry * @return Get a list of the defined Creators */ @SuppressWarnings("unchecked") protected static Map<String, RuntimeBeanReference> lookupCreators(BeanDefinitionRegistry registry) { BeanDefinition config = registerSpringConfiguratorIfNecessary(registry); return (Map<String, RuntimeBeanReference>) config.getPropertyValues().getPropertyValue("creators").getValue(); } /** * @param registry * @return Get a list of the defined Converters */ @SuppressWarnings("unchecked") protected static Map<String, ConverterConfig> lookupConverters(BeanDefinitionRegistry registry) { BeanDefinition config = registerSpringConfiguratorIfNecessary(registry); return (Map<String, ConverterConfig>) config.getPropertyValues().getPropertyValue("converters").getValue(); } protected static final String DEFAULT_SPRING_CONFIGURATOR_ID = "__dwrConfiguration"; /** * The log stream */ private static final Log log = LogFactory.getLog(DwrNamespaceHandler.class); /* * The element names */ private static final String ELEMENT_CONVERTER = "dwr:converter"; private static final String ELEMENT_CREATOR = "dwr:creator"; /* * The attribute names */ private static final String ATTRIBUTE_ID = "id"; private static final String ATTRIBUTE_CLASS = "class"; }
/* \ * Copyright 2005-2014 The Kuali Foundation * * Licensed under the Educational Community License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.opensource.org/licenses/ecl1.php * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kuali.kra.printing.util; import org.kuali.kra.award.paymentreports.awardreports.reporting.service.ReportTrackingType; import org.kuali.kra.award.printing.AwardPrintType; import org.kuali.kra.budget.printing.BudgetPrintType; import org.kuali.kra.coi.print.CoiDisclosureType; import org.kuali.kra.common.committee.print.CommitteeReportType; import org.kuali.kra.infrastructure.KraServiceLocator; import org.kuali.kra.institutionalproposal.printing.InstitutionalProposalPrintType; import org.kuali.kra.institutionalproposal.proposallog.service.ProposalLogPrintingService; import org.kuali.kra.irb.actions.print.ProtocolPrintType; import org.kuali.kra.negotiations.printing.NegotiationActivityPrintType; import org.kuali.kra.printing.service.CurrentAndPendingReportService; import org.kuali.kra.proposaldevelopment.bo.AttachmentDataSource; import org.kuali.kra.proposaldevelopment.bo.ProposalPerson; import org.kuali.kra.proposaldevelopment.document.ProposalDevelopmentDocument; import org.kuali.kra.proposaldevelopment.printing.service.ProposalDevelopmentPrintingService; import org.kuali.kra.subawardReporting.printing.SubAwardPrintType; import org.kuali.rice.coreservice.framework.parameter.ParameterService; import org.kuali.rice.kns.util.WebUtils; import org.kuali.rice.krad.service.BusinessObjectService; import org.kuali.rice.location.api.country.Country; import org.kuali.rice.location.api.country.CountryService; import org.kuali.rice.location.api.state.State; import org.kuali.rice.location.api.state.StateService; import javax.servlet.http.HttpServletResponse; import javax.xml.transform.Source; import javax.xml.transform.stream.StreamSource; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.util.ArrayList; import java.util.List; public class PrintingUtils { private static String XSL_CONTEXT_DIR = "/org/kuali/kra/printing/stylesheet/"; private static final String XSL_BUDGET_SUMMARY = "BudgetSummaryReport.xsl"; private static final String XSL_BUDGET_SALARY = "BudgetSalary.xsl"; private static final String XSL_BUDGET_TOTAL = "BudgetSummaryTotalPage.xsl"; private static final String XSL_BUDGET_CUMULATIVE = "CumulativeSummary.xsl"; private static final String XSL_INDUSTRIAL_BUDGET = "IndstlBudgetSummary.xsl"; private static final String XSL_BUDGET_COSTSHARING_SUMMARY = "CostSharingBudgetSummary.xsl"; private static final String XSL_AWARD_NOTICE = "AwardNotice.xsl"; private static final String XSL_AWARD_DELTA = "AwardModification.xsl"; private static final String XSL_AWARD_BUDGET_HIERARCHY = "awardBudgetHierarchy.xsl"; private static final String XSL_AWARD_BUDGET_HISTORY_TRANSACTION = "awardBudgetModification.xsl"; private static final String XSL_AWARD_TEMPLATE = "awardTemplate.xsl"; private static final String XSL_MONEY_AND_END_DATES_HISTORY = "awardMoneyAndEndDatesHistory.xsl"; private static final String XSL_PRINT_CERTIFICATION = "printCertification.xsl"; private static final String XSL_CURRENT_REPORT = "CurrentSupport.xsl"; private static final String XSL_PENDING_REPORT = "PendingSupport.xsl"; private static final String XSL_INSTITUTIONAL_PROPOSAL_REPORT = "instituteProposal.xsl"; private static final String XSL_COMMITTEE_ROSTER = "CommitteeRoster.xsl"; private static final String XSL_FUTURE_SCHEDULED_MEETINGS = "CommitteeFutureScheduledMeetings.xsl"; private static final String XSL_PROPOSAL_LOG_REPORT = "proposalLog.xsl"; private static final String PRINCIPAL_INVESTIGATOR = "PI"; private static final String XSL_PRINT_NEGOTIATION_ACTIVITY_REPORT ="NegotiationActivityReport.xsl"; private static final String XSL_PRINT_SUB_AWARD_SF_294_REPORT ="294.xsl"; private static final String XSL_PRINT_SUB_AWARD_SF_295_REPORT ="295.xsl"; private static final String XSL_COI_APPROVED_DISCLOSURE = "ApprovedDisclosure.xsl"; private static final String XSL_AWARD_REPORT_TRACKING = "AwardReportingRequirements.xsl"; private static final String XSL_SUB_AWARD_FDP_AGREEMENT = "FDP_Template_Agreement.xsl"; private static final String XSL_SUB_AWARD_FDP_MODIFICATION = "FDP_Modification_Template.xsl"; /** * This method fetches system constant parameters * * @param parameter * String for which value must be fetched * @return String System constant parameters. * @see org.kuali.kra.s2s.service.S2SUtilService#getParameterValue(java.lang.String) */ public static String getParameterValue(String parameter) { ParameterService parameterService = KraServiceLocator .getService(ParameterService.class); return parameterService.getParameterValueAsString( ProposalDevelopmentDocument.class, parameter); } /** * * This method is used if class is not PD. right now is used by AWARD. * @param clazz * @param parameter * @return */ public static String getParameterValue(Class clazz, String parameter) { ParameterService parameterService = KraServiceLocator.getService(ParameterService.class); return parameterService.getParameterValueAsString(clazz, parameter); } /** * This method is to get a State object from the state name * * @param stateName * Name of the state * @return State object matching the name. * @see org.kuali.kra.s2s.service.S2SUtilService#getStateFromName(java.lang.String) */ public static State getStateFromName(String countryAlternateCode, String stateName) { Country country = getCountryService().getCountryByAlternateCode(countryAlternateCode); State state = getStateService().getState(country.getCode(), stateName); return state; } private static CountryService getCountryService() { return KraServiceLocator.getService(CountryService.class); } private static StateService getStateService() { return KraServiceLocator.getService(StateService.class); } /** * This method fetches the stylesheet for a given report and returns it as a * {@link Source} in an {@link ArrayList} * * @param reportType * report for which stylesheet is to be fetched * @return {@link ArrayList} of stylesheet {@link Source} */ public static ArrayList<Source> getXSLTforReport(String reportType) { String xsl = null; if (reportType.equals(AwardPrintType.AWARD_NOTICE_REPORT .getAwardPrintType())) { xsl = XSL_AWARD_NOTICE; } else if (reportType.equals(AwardPrintType.AWARD_DELTA_REPORT .getAwardPrintType())) { xsl = XSL_AWARD_DELTA; } else if (reportType.equals(AwardPrintType.AWARD_BUDGET_HIERARCHY .getAwardPrintType())) { xsl = XSL_AWARD_BUDGET_HIERARCHY; } else if (reportType.equals(AwardPrintType.AWARD_BUDGET_HISTORY_TRANSACTION .getAwardPrintType())) { xsl = XSL_AWARD_BUDGET_HISTORY_TRANSACTION; } else if (reportType.equals(AwardPrintType.AWARD_TEMPLATE .getAwardPrintType())) { xsl = XSL_AWARD_TEMPLATE; } else if (reportType.equals(AwardPrintType.MONEY_AND_END_DATES_HISTORY .getAwardPrintType())) { xsl = XSL_MONEY_AND_END_DATES_HISTORY; } else if (reportType.equals(BudgetPrintType.BUDGET_SUMMARY_REPORT .getBudgetPrintType())) { xsl = XSL_BUDGET_SUMMARY; } else if (reportType.equals(BudgetPrintType.BUDGET_SALARY_REPORT .getBudgetPrintType())) { xsl = XSL_BUDGET_SALARY; } else if (reportType.equals(BudgetPrintType.BUDGET_TOTAL_REPORT .getBudgetPrintType())) { xsl = XSL_BUDGET_TOTAL; } else if (reportType .equals(BudgetPrintType.BUDGET_SUMMARY_TOTAL_REPORT .getBudgetPrintType())) { xsl = XSL_BUDGET_TOTAL; } else if (reportType .equals(BudgetPrintType.INDUSTRIAL_CUMULATIVE_BUDGET_REPORT .getBudgetPrintType())) { xsl = XSL_BUDGET_TOTAL; } else if (reportType.equals(BudgetPrintType.BUDGET_CUMULATIVE_REPORT .getBudgetPrintType())) { xsl = XSL_BUDGET_CUMULATIVE; } else if (reportType.equals(BudgetPrintType.INDUSTRIAL_BUDGET_REPORT .getBudgetPrintType())) { xsl = XSL_INDUSTRIAL_BUDGET; } else if (reportType .equals(BudgetPrintType.BUDGET_COST_SHARE_SUMMARY_REPORT .getBudgetPrintType())) { xsl = XSL_BUDGET_COSTSHARING_SUMMARY; } else if (reportType .equals(CurrentAndPendingReportService.CURRENT_REPORT_TYPE)) { xsl = XSL_CURRENT_REPORT; } else if (reportType .equals(CurrentAndPendingReportService.PENDING_REPORT_TYPE)) { xsl = XSL_PENDING_REPORT; } else if (reportType .equals(InstitutionalProposalPrintType.INSTITUTIONAL_PROPOSAL_REPORT .getInstitutionalProposalPrintType())) { xsl = XSL_INSTITUTIONAL_PROPOSAL_REPORT; } else if (reportType.equals(ProposalLogPrintingService.PROPOSAL_LOG_REPORT_TYPE)) { xsl = XSL_PROPOSAL_LOG_REPORT; } else if (reportType .equals(ProposalDevelopmentPrintingService.PRINT_CERTIFICATION_REPORT)) { xsl = XSL_PRINT_CERTIFICATION; } else if (reportType.equals(CoiDisclosureType.APPROVED_DISCLOSURE_TYPE.getCoiDisclosureType())) { xsl = XSL_COI_APPROVED_DISCLOSURE; } else if (reportType.equals(CommitteeReportType.ROSTER.getCommitteeReportType())) { xsl = XSL_COMMITTEE_ROSTER; } else if (reportType.equals(CommitteeReportType.FUTURE_SCHEDULED_MEETINGS.getCommitteeReportType())) { xsl = XSL_FUTURE_SCHEDULED_MEETINGS; } else if (reportType.equals(ReportTrackingType. AWARD_REPORT_TRACKING.getReportTrackingType())) { xsl = XSL_AWARD_REPORT_TRACKING; } else if (reportType .equals(NegotiationActivityPrintType.NEGOTIATION_ACTIVITY_REPORT .getNegotiationActivityPrintType())) { xsl = XSL_PRINT_NEGOTIATION_ACTIVITY_REPORT; } else if (reportType .equals(SubAwardPrintType.SUB_AWARD_SF_294_PRINT_TYPE .getSubAwardPrintType())) { xsl = XSL_PRINT_SUB_AWARD_SF_294_REPORT; } else if (reportType .equals(SubAwardPrintType.SUB_AWARD_SF_295_PRINT_TYPE .getSubAwardPrintType())) { xsl = XSL_PRINT_SUB_AWARD_SF_295_REPORT; }else if (reportType .equals(SubAwardPrintType.SUB_AWARD_FDP_TEMPLATE .getSubAwardPrintType())) { xsl = XSL_SUB_AWARD_FDP_AGREEMENT; } else if (reportType .equals(SubAwardPrintType.SUB_AWARD_FDP_MODIFICATION .getSubAwardPrintType())) { xsl = XSL_SUB_AWARD_FDP_MODIFICATION; } else if (ProtocolPrintType.getReportTypes().contains(reportType)) { for (ProtocolPrintType protocolPrintType : ProtocolPrintType.values()) { if (reportType.equals(protocolPrintType.getProtocolPrintType())) { xsl = protocolPrintType.getTemplate(); break; } } } Source src = new StreamSource(new PrintingUtils().getClass() .getResourceAsStream(XSL_CONTEXT_DIR + "/" + xsl)); ArrayList<Source> sourceList = new ArrayList<Source>(); sourceList.add(src); return sourceList; } public static ArrayList<Source> getXSLTforReportTemplate(String reportTemplate) { Source src = new StreamSource(new PrintingUtils().getClass() .getResourceAsStream(XSL_CONTEXT_DIR + "/" + reportTemplate)); ArrayList<Source> sourceList = new ArrayList<Source>(); sourceList.add(src); return sourceList; } /** * This method is to get a Country object from the country code * * @param countryCode * country code for the country. * @return Country object matching the code * @see org.kuali.kra.s2s.service.S2SUtilService#getCountryFromCode(java.lang.String) */ public static Country getCountryFromCode(String countryCode, BusinessObjectService businessObjectService) { CountryService countryService = KraServiceLocator.getService(CountryService.class); Country country = countryService.getCountryByAlternateCode(countryCode); return country; } /** * This method is to get PrincipalInvestigator for a given Proposal Development Document * * @param proposalPersons Proposal development document. * @return ProposalPerson PrincipalInvestigator for the proposal. * @see org.kuali.kra.s2s.service.S2SUtilService#getPrincipalInvestigator(org.kuali.kra.proposaldevelopment.document.ProposalDevelopmentDocument) */ public static ProposalPerson getPrincipalInvestigator(List<ProposalPerson> proposalPersons) { ProposalPerson proposalPerson = null; if (proposalPersons != null) { for (ProposalPerson person : proposalPersons) { if (person.getProposalPersonRoleId().equals(PRINCIPAL_INVESTIGATOR)) { proposalPerson = person; } } } return proposalPerson; } /* * This method is copied from KraTransactionalDocumentBase. It is referenced by meeting. * TODO : refactor other references from KraTransactionalDocumentBase to this method ? */ public static void streamToResponse(AttachmentDataSource attachmentDataSource, HttpServletResponse response) throws Exception { byte[] xbts = attachmentDataSource.getContent(); ByteArrayOutputStream baos = null; try { baos = new ByteArrayOutputStream(xbts.length); baos.write(xbts); WebUtils.saveMimeOutputStreamAsFile(response, attachmentDataSource.getContentType(), baos, attachmentDataSource.getFileName()); } finally { try { if (baos != null) { baos.close(); baos = null; } } catch (IOException ioEx) { // LOG.warn(ioEx.getMessage(), ioEx); } } } public static void streamToResponse(byte[] fileContents, String fileName, String fileContentType, HttpServletResponse response) throws Exception { ByteArrayOutputStream baos = null; try { baos = new ByteArrayOutputStream(fileContents.length); baos.write(fileContents); WebUtils.saveMimeOutputStreamAsFile(response, fileContentType, baos, fileName); } finally { try { if (baos != null) { baos.close(); baos = null; } } catch (IOException ioEx) { throw new RuntimeException("IOException occurred while downloading attachment", ioEx); } } } }
/* * Copyright 2016 Karl Bennett * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package shiver.me.timbers.spring.security.fields; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import java.lang.reflect.Field; import static org.hamcrest.Matchers.is; import static org.junit.Assert.assertThat; import static org.mockito.BDDMockito.given; import static org.mockito.BDDMockito.willThrow; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verifyNoMoreInteractions; import static shiver.me.timbers.data.random.RandomDoubles.someDouble; import static shiver.me.timbers.data.random.RandomIntegers.someInteger; import static shiver.me.timbers.data.random.RandomShorts.someShort; import static shiver.me.timbers.data.random.RandomStrings.someString; import static shiver.me.timbers.data.random.RandomThings.someThing; public class ReflectionFieldMutatorTest { private static final Field ONE = getField("one"); private static final Field TWO = getField("two"); private static final Field THREE = getField("three"); private static final Field FOUR = getField("four"); @Rule public final ExpectedException expectedException = ExpectedException.none(); private FieldFinder fieldFinder; private FieldGetSetter fieldGetSetter; private FieldMutator mutator; @Before public void setUp() { fieldGetSetter = mock(FieldGetSetter.class); fieldFinder = mock(FieldFinder.class); mutator = new ReflectionFieldMutator(fieldFinder, fieldGetSetter); } @Test public void Can_retrieve_a_field() throws NoSuchFieldException, IllegalAccessException { final Object object = someObject(); final String name = someString(); final Class type = someClass(); final Field field = someField(); final Object expected = someObject(); // Given given(fieldFinder.findField(object, name, type)).willReturn(field); given(fieldGetSetter.get(object, field)).willReturn(expected); // When final Object actual = mutator.retrieve(object, name, type); // Then assertThat(actual, is(expected)); } @Test public void Can_fail_to_find_a_field_to_retrieve() throws NoSuchFieldException { final Object object = someObject(); final String name = someString(); final Class type = someClass(); final NoSuchFieldException exception = new NoSuchFieldException(); // Given given(fieldFinder.findField(object, name, type)).willThrow(exception); expectedException.expect(IllegalArgumentException.class); expectedException.expectCause(is(exception)); // When mutator.retrieve(object, name, type); } @Test public void Can_fail_to_get_a_field_to_retrieve() throws NoSuchFieldException, IllegalAccessException { final Object object = someObject(); final String name = someString(); final Class type = someClass(); final Field field = someField(); final IllegalAccessException exception = new IllegalAccessException(); // Given given(fieldFinder.findField(object, name, type)).willReturn(field); given(fieldGetSetter.get(object, field)).willThrow(exception); expectedException.expect(IllegalStateException.class); expectedException.expectCause(is(exception)); // When mutator.retrieve(object, name, type); } @Test public void Can_replace_a_field() throws NoSuchFieldException, IllegalAccessException { final Object object = someObject(); final String name = someString(); final Class type = someClass(); final Object value = someObject(); final Field field = someField(); // Given given(fieldFinder.findField(object, name, type)).willReturn(field); // When mutator.replace(object, name, type, value); // Then verify(fieldGetSetter).set(object, field, value); } @Test public void Can_fail_to_find_a_field_to_replace() throws NoSuchFieldException { final Object object = someObject(); final String name = someString(); final Class type = someClass(); final Object value = someObject(); final NoSuchFieldException exception = new NoSuchFieldException(); // Given given(fieldFinder.findField(object, name, type)).willThrow(exception); expectedException.expect(IllegalArgumentException.class); expectedException.expectCause(is(exception)); // When mutator.replace(object, name, type, value); } @Test public void Can_fail_to_set_a_field_to_replace() throws NoSuchFieldException, IllegalAccessException { final Object object = someObject(); final String name = someString(); final Class type = someClass(); final Object value = someObject(); final Field field = someField(); final IllegalAccessException exception = new IllegalAccessException(); // Given given(fieldFinder.findField(object, name, type)).willReturn(field); willThrow(exception).given(fieldGetSetter).set(object, field, value); expectedException.expect(IllegalStateException.class); expectedException.expectCause(is(exception)); // When mutator.replace(object, name, type, value); } @Test @SuppressWarnings("unchecked") public void Can_update_a_field() throws NoSuchFieldException, IllegalAccessException { final Object object = someObject(); final String name = someString(); final Class type = someClass(); final Updater updater = mock(Updater.class); final Field field = someField(); final Object value = someObject(); final Object update = someObject(); // Given given(fieldFinder.findField(object, name, type)).willReturn(field); given(fieldGetSetter.get(object, field)).willReturn(value); given(updater.update(value)).willReturn(update); // When mutator.update(object, name, type, updater); // Then verify(fieldGetSetter).set(object, field, update); } @Test public void Can_copy_fields() throws IllegalAccessException { final FieldTest from = new FieldTest(); final FieldTest to = new FieldTest(); final Object value1 = new Object(); final Object value2 = new Object(); final Object value3 = new Object(); final Object value4 = new Object(); // Given given(fieldGetSetter.get(from, ONE)).willReturn(value1); given(fieldGetSetter.get(from, TWO)).willReturn(value2); given(fieldGetSetter.get(from, THREE)).willReturn(value3); given(fieldGetSetter.get(from, FOUR)).willReturn(value4); // When mutator.copy(from, to); // Then verify(fieldGetSetter).get(from, ONE); verify(fieldGetSetter).get(from, TWO); verify(fieldGetSetter).get(from, THREE); verify(fieldGetSetter).get(from, FOUR); verify(fieldGetSetter).set(to, ONE, value1); verify(fieldGetSetter).set(to, TWO, value2); verify(fieldGetSetter).set(to, THREE, value3); verify(fieldGetSetter).set(to, FOUR, value4); verifyNoMoreInteractions(fieldGetSetter); } @Test public void Can_fail_to_copy_fields() throws IllegalAccessException { final FieldTest from = new FieldTest(); final FieldTest to = new FieldTest(); final IllegalAccessException exception = new IllegalAccessException(); // Given given(fieldGetSetter.get(from, ONE)).willThrow(exception); expectedException.expect(IllegalArgumentException.class); expectedException.expectCause(is(exception)); // When mutator.copy(from, to); } private static Object someObject() { return someThing(someInteger(), someDouble(), someString()); } private static Field someField() { return getField(someThing("one", "two", "three", "four")); } private static Field getField(String name) { try { return FieldTest.class.getDeclaredField(name); } catch (NoSuchFieldException e) { throw new RuntimeException(e); } } private static Class someClass() { return someThing(Integer.class, Double.class, String.class); } private static class FieldTest { private static final short ZERO = someShort(); private final int one = someInteger(); private final String two = someString(); private final double three = someDouble(); private final String four = someString(); } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.aries.subsystem.core.internal; import java.io.BufferedOutputStream; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.net.URISyntaxException; import java.security.AccessController; import java.security.PrivilegedAction; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.EnumSet; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import org.apache.aries.subsystem.AriesSubsystem; import org.apache.aries.subsystem.core.archive.AriesSubsystemParentsHeader; import org.apache.aries.subsystem.core.archive.DeployedContentHeader; import org.apache.aries.subsystem.core.archive.DeploymentManifest; import org.apache.aries.subsystem.core.archive.Header; import org.apache.aries.subsystem.core.archive.SubsystemContentHeader; import org.apache.aries.subsystem.core.archive.SubsystemManifest; import org.apache.aries.util.filesystem.FileSystem; import org.apache.aries.util.filesystem.IDirectory; import org.apache.aries.util.io.IOUtils; import org.osgi.framework.BundleContext; import org.osgi.framework.BundleException; import org.osgi.framework.InvalidSyntaxException; import org.osgi.framework.Version; import org.osgi.framework.namespace.IdentityNamespace; import org.osgi.resource.Capability; import org.osgi.resource.Requirement; import org.osgi.resource.Resource; import org.osgi.service.coordinator.Coordination; import org.osgi.service.coordinator.Participant; import org.osgi.service.resolver.ResolutionException; import org.osgi.service.subsystem.Subsystem; import org.osgi.service.subsystem.SubsystemConstants; import org.osgi.service.subsystem.SubsystemException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class BasicSubsystem implements Resource, AriesSubsystem { private static final Logger logger = LoggerFactory.getLogger(BasicSubsystem.class); public static final String ROOT_SYMBOLIC_NAME = "org.osgi.service.subsystem.root"; public static final Version ROOT_VERSION = Version.parseVersion("1.0.0"); public static final String ROOT_LOCATION = "subsystem://?" + SubsystemConstants.SUBSYSTEM_SYMBOLICNAME + '=' + ROOT_SYMBOLIC_NAME + '&' + SubsystemConstants.SUBSYSTEM_VERSION + '=' + ROOT_VERSION; private DeploymentManifest deploymentManifest; private SubsystemResource resource; private SubsystemManifest subsystemManifest; private final IDirectory directory; public BasicSubsystem(SubsystemResource resource) throws URISyntaxException, IOException, BundleException, InvalidSyntaxException { this.resource = resource; final File file = new File(Activator.getInstance().getBundleContext().getDataFile(""), Long.toString(resource.getId())); file.mkdirs(); Coordination coordination = Activator.getInstance().getCoordinator().peek(); if (coordination != null) { coordination.addParticipant(new Participant() { @Override public void ended(Coordination c) throws Exception { // Nothing } @Override public void failed(Coordination c) throws Exception { IOUtils.deleteRecursive(file); } }); } directory = FileSystem.getFSRoot(file); setSubsystemManifest(resource.getSubsystemManifest()); SubsystemManifestValidator.validate(this, getSubsystemManifest()); setDeploymentManifest(new DeploymentManifest.Builder() .manifest(resource.getSubsystemManifest()) .manifest(resource.getDeploymentManifest()) .location(resource.getLocation()) .autostart(false) .id(resource.getId()) .lastId(SubsystemIdentifier.getLastId()) .region(resource.getRegion().getName()) .state(State.INSTALLING) .build()); } public BasicSubsystem(File file) throws IOException, URISyntaxException, ResolutionException { this(FileSystem.getFSRoot(file)); } public BasicSubsystem(IDirectory directory) throws IOException, URISyntaxException, ResolutionException { this.directory = directory; setDeploymentManifest(new DeploymentManifest.Builder().manifest(getDeploymentManifest()).build()); } /* BEGIN Resource interface methods. */ @Override public boolean equals(Object o) { if (o == this) return true; if (!(o instanceof BasicSubsystem)) return false; BasicSubsystem that = (BasicSubsystem)o; return getLocation().equals(that.getLocation()); } @Override public List<Capability> getCapabilities(String namespace) { SubsystemManifest manifest = getSubsystemManifest(); List<Capability> result = manifest.toCapabilities(this); if (namespace != null) for (Iterator<Capability> i = result.iterator(); i.hasNext();) if (!i.next().getNamespace().equals(namespace)) i.remove(); // TODO Somehow, exposing the capabilities of content resources of a // feature is causing an infinite regression of feature2 installations // in FeatureTest.testSharedContent() under certain conditions. if (isScoped() || IdentityNamespace.IDENTITY_NAMESPACE.equals(namespace)) return result; SubsystemContentHeader header = manifest.getSubsystemContentHeader(); for (Resource constituent : getConstituents()) if (header.contains(constituent)) for (Capability capability : constituent.getCapabilities(namespace)) result.add(new BasicCapability(capability, this)); return result; } @Override public List<Requirement> getRequirements(String namespace) { SubsystemManifest manifest = getSubsystemManifest(); List<Requirement> result = manifest.toRequirements(this); if (namespace != null) for (Iterator<Requirement> i = result.iterator(); i.hasNext();) if (!i.next().getNamespace().equals(namespace)) i.remove(); if (isScoped()) return result; SubsystemContentHeader header = manifest.getSubsystemContentHeader(); for (Resource constituent : getConstituents()) if (header.contains(constituent)) for (Requirement requirement : constituent.getRequirements(namespace)) result.add(new BasicRequirement(requirement, this)); return result; } @Override public int hashCode() { int result = 17; result = 31 * result + getLocation().hashCode(); return result; } /* END Resource interface methods. */ /* BEGIN Subsystem interface methods. */ @Override public BundleContext getBundleContext() { SecurityManager.checkContextPermission(this); return AccessController.doPrivileged(new GetBundleContextAction(this)); } @Override public Collection<Subsystem> getChildren() { return Activator.getInstance().getSubsystems().getChildren(this); } @Override public Map<String, String> getSubsystemHeaders(Locale locale) { SecurityManager.checkMetadataPermission(this); return AccessController.doPrivileged(new GetSubsystemHeadersAction(this)); } @Override public String getLocation() { SecurityManager.checkMetadataPermission(this); return getDeploymentManifestHeaderValue(DeploymentManifest.ARIESSUBSYSTEM_LOCATION); } @Override public Collection<Subsystem> getParents() { AriesSubsystemParentsHeader header = getDeploymentManifest().getAriesSubsystemParentsHeader(); if (header == null) return Collections.emptyList(); Collection<Subsystem> result = new ArrayList<Subsystem>(header.getClauses().size()); for (AriesSubsystemParentsHeader.Clause clause : header.getClauses()) { BasicSubsystem subsystem = Activator.getInstance().getSubsystems().getSubsystemById(clause.getId()); if (subsystem == null) continue; result.add(subsystem); } return result; } @Override public Collection<Resource> getConstituents() { return Activator.getInstance().getSubsystems().getConstituents(this); } @Override public State getState() { return State.valueOf(getDeploymentManifestHeaderValue(DeploymentManifest.ARIESSUBSYSTEM_STATE)); } @Override public long getSubsystemId() { return Long.parseLong(getDeploymentManifestHeaderValue(DeploymentManifest.ARIESSUBSYSTEM_ID)); } @Override public String getSymbolicName() { return getSubsystemManifest().getSubsystemSymbolicNameHeader().getSymbolicName(); } @Override public String getType() { return getSubsystemManifest().getSubsystemTypeHeader().getType(); } @Override public Version getVersion() { return getSubsystemManifest().getSubsystemVersionHeader().getVersion(); } @Override public AriesSubsystem install(String location) { return install(location, (InputStream)null); } @Override public AriesSubsystem install(String location, final InputStream content) { try { return install(location, content == null ? null : AccessController.doPrivileged(new PrivilegedAction<IDirectory>() { @Override public IDirectory run() { return FileSystem.getFSRoot(content); } })); } finally { // This method must guarantee the content input stream was closed. IOUtils.close(content); } } @Override public void start() { SecurityManager.checkExecutePermission(this); // Changing the autostart setting must be privileged because of file IO. // It cannot be done within SartAction because we only want to change it // on an explicit start operation but StartAction is also used for // implicit operations. AccessController.doPrivileged(new PrivilegedAction<Object>() { @Override public Object run() { setAutostart(true); return null; } }); AccessController.doPrivileged(new StartAction(this, this, this)); } @Override public void stop() { SecurityManager.checkExecutePermission(this); // Changing the autostart setting must be privileged because of file IO. // It cannot be done within StopAction because we only want to change it // on an explicit stop operation but StopAction is also used for // implicit operations. AccessController.doPrivileged(new PrivilegedAction<Object>() { @Override public Object run() { setAutostart(false); return null; } }); AccessController.doPrivileged(new StopAction(this, this, !isRoot())); } @Override public void uninstall() { SecurityManager.checkLifecyclePermission(this); AccessController.doPrivileged(new UninstallAction(this, this, false)); } /* END Subsystem interface methods. */ void addedConstituent(Resource resource, boolean referenced) { try { if (logger.isDebugEnabled()) logger.debug("Adding constituent {} to deployment manifest...", resource); synchronized (this) { setDeploymentManifest(new DeploymentManifest.Builder() .manifest(getDeploymentManifest()).content(resource, referenced).build()); } if (logger.isDebugEnabled()) logger.debug("Added constituent {} to deployment manifest", resource); } catch (Exception e) { throw new SubsystemException(e); } } void addedParent(BasicSubsystem subsystem, boolean referenceCount) { try { if (logger.isDebugEnabled()) logger.debug("Adding parent {} to deployment manifest...", subsystem.getSymbolicName()); synchronized (this) { setDeploymentManifest(new DeploymentManifest.Builder() .manifest(getDeploymentManifest()).parent(subsystem, referenceCount).build()); } if (logger.isDebugEnabled()) logger.debug("Added parent {} to deployment manifest", subsystem.getSymbolicName()); } catch (Exception e) { throw new SubsystemException(e); } } synchronized DeploymentManifest getDeploymentManifest() { if (deploymentManifest == null) { try { deploymentManifest = new DeploymentManifest(directory.getFile("OSGI-INF/DEPLOYMENT.MF").open()); } catch (Throwable t) { throw new SubsystemException(t); } } return deploymentManifest; } File getDirectory() { try { return new File(directory.toURL().toURI()); } catch (Exception e) { throw new SubsystemException(e); } } org.eclipse.equinox.region.Region getRegion() { return Activator.getInstance().getRegionDigraph().getRegion(getRegionName()); } String getRegionName() { DeploymentManifest manifest = getDeploymentManifest(); Header<?> header = manifest.getHeaders().get(DeploymentManifest.ARIESSUBSYSTEM_REGION); if (header == null) return null; return header.getValue(); } synchronized SubsystemResource getResource() { if (resource == null) { try { resource = new SubsystemResource(directory); } catch (Exception e) { throw new SubsystemException(e); } Collection<DeployedContentHeader.Clause> missingResources = resource.getMissingResources(); if (!missingResources.isEmpty()) { if (isRoot()) // We don't care if the root subsystem has missing resources // because they are either (1) extraneous bundles outside of // the subsystems API or (2) provisioned dependencies of // other subsystems. Those that fall in the latter category // will be detected by the dependent subsystems. removedContent(missingResources); else // If a non-root subsystem has missing dependencies, let's // fail fast for now. throw new SubsystemException("Missing resources: " + missingResources); } } return resource; } synchronized SubsystemManifest getSubsystemManifest() { if (subsystemManifest == null) { try { subsystemManifest = new SubsystemManifest(directory.getFile("OSGI-INF/SUBSYSTEM.MF").open()); } catch (Throwable t) { throw new SubsystemException(t); } } return subsystemManifest; } boolean isApplication() { return getSubsystemManifest().getSubsystemTypeHeader().isApplication(); } boolean isAutostart() { DeploymentManifest manifest = getDeploymentManifest(); Header<?> header = manifest.getHeaders().get(DeploymentManifest.ARIESSUBSYSTEM_AUTOSTART); return Boolean.valueOf(header.getValue()); } boolean isComposite() { return getSubsystemManifest().getSubsystemTypeHeader().isComposite(); } boolean isFeature() { return getSubsystemManifest().getSubsystemTypeHeader().isFeature(); } boolean isReadyToStart() { if (isRoot()) return true; for (Subsystem parent : getParents()) if (EnumSet.of(State.STARTING, State.ACTIVE).contains(parent.getState()) && isAutostart()) return true; return false; } boolean isReferenced(Resource resource) { // Everything is referenced for the root subsystem during initialization. if (isRoot() && EnumSet.of(State.INSTALLING, State.INSTALLED).contains(getState())) return true; DeployedContentHeader header = getDeploymentManifest().getDeployedContentHeader(); if (header == null) return false; return header.isReferenced(resource); } boolean isRoot() { return ROOT_LOCATION.equals(getLocation()); } boolean isScoped() { return isApplication() || isComposite(); } void removedContent(Resource resource) { DeploymentManifest manifest = getDeploymentManifest(); DeployedContentHeader header = manifest.getDeployedContentHeader(); if (header == null) return; DeployedContentHeader.Clause clause = header.getClause(resource); if (clause == null) return; removedContent(Collections.singleton(clause)); } synchronized void removedContent(Collection<DeployedContentHeader.Clause> content) { DeploymentManifest manifest = getDeploymentManifest(); DeployedContentHeader header = manifest.getDeployedContentHeader(); if (header == null) return; Collection<DeployedContentHeader.Clause> clauses = new ArrayList<DeployedContentHeader.Clause>(header.getClauses()); for (Iterator<DeployedContentHeader.Clause> i = clauses.iterator(); i.hasNext();) if (content.contains(i.next())) { i.remove(); break; } DeploymentManifest.Builder builder = new DeploymentManifest.Builder(); for (Entry<String, Header<?>> entry : manifest.getHeaders().entrySet()) { if (DeployedContentHeader.NAME.equals(entry.getKey())) continue; builder.header(entry.getValue()); } if (!clauses.isEmpty()) builder.header(new DeployedContentHeader(clauses)); try { setDeploymentManifest(builder.build()); } catch (Exception e) { throw new SubsystemException(e); } } void setAutostart(boolean value) { try { synchronized (this) { setDeploymentManifest(new DeploymentManifest.Builder() .manifest(getDeploymentManifest()).autostart(value).build()); } } catch (Exception e) { throw new SubsystemException(e); } } synchronized void setDeploymentManifest(DeploymentManifest value) throws IOException { deploymentManifest = value; Coordination coordination = Activator.getInstance().getCoordinator().peek(); if (logger.isDebugEnabled()) logger.debug("Setting deployment manifest for subsystem {} using coordination {}", getSymbolicName(), coordination == null ? null : coordination.getName()); if (coordination == null) { saveDeploymentManifest(); } else { Map<Class<?>, Object> variables = coordination.getVariables(); synchronized (variables) { @SuppressWarnings("unchecked") Set<BasicSubsystem> dirtySubsystems = (Set<BasicSubsystem>) variables.get(SaveManifestParticipant.class); if (dirtySubsystems == null) { // currently no dirty subsystems found; // create a list to hold them and store it as a variable dirtySubsystems = new HashSet<BasicSubsystem>(); variables.put(SaveManifestParticipant.class, dirtySubsystems); // add the save manifest participant coordination.addParticipant(new SaveManifestParticipant()); } dirtySubsystems.add(this); } } } synchronized void saveDeploymentManifest() throws IOException { File file = new File(getDirectory(), "OSGI-INF"); if (!file.exists()) file.mkdirs(); BufferedOutputStream out = new BufferedOutputStream(new FileOutputStream(new File(file, "DEPLOYMENT.MF"))); try { if (logger.isDebugEnabled()) logger.debug("Writing deployment manifest for subsystem {} in state {}", getSymbolicName(), getState()); deploymentManifest.write(out); if (logger.isDebugEnabled()) logger.debug("Wrote deployment manifest for subsystem {} in state {}", getSymbolicName(), getState()); } finally { IOUtils.close(out); } } void setState(State value) { if (logger.isDebugEnabled()) logger.debug("Setting state of subsystem {} to {}", getSymbolicName(), value); State state = getState(); if (value.equals(state)) { if (logger.isDebugEnabled()) logger.debug("Requested state {} equals current state {}", value, state); return; } try { if (logger.isDebugEnabled()) logger.debug("Setting the deployment manifest..."); synchronized (this) { setDeploymentManifest(new DeploymentManifest.Builder() .manifest(getDeploymentManifest()).state(value).build()); } } catch (Exception e) { throw new SubsystemException(e); } Activator.getInstance().getSubsystemServiceRegistrar().update(this); synchronized (this) { if (logger.isDebugEnabled()) logger.debug("Notifying all waiting for state change of subsystem {}", getSymbolicName()); notifyAll(); } } synchronized void setSubsystemManifest(SubsystemManifest value) throws URISyntaxException, IOException { File file = new File(getDirectory(), "OSGI-INF"); if (!file.exists()) file.mkdirs(); FileOutputStream fos = new FileOutputStream(new File(file, "SUBSYSTEM.MF")); try { value.write(fos); subsystemManifest = value; } finally { IOUtils.close(fos); } } private String getDeploymentManifestHeaderValue(String name) { DeploymentManifest manifest = getDeploymentManifest(); if (manifest == null) return null; Header<?> header = manifest.getHeaders().get(name); if (header == null) return null; return header.getValue(); } @Override public synchronized void addRequirements(Collection<Requirement> requirements) { // The root subsystem has no requirements (there is no parent to import from). if (isRoot()) throw new UnsupportedOperationException("The root subsystem does not accept additional requirements"); // Unscoped subsystems import everything already. if (!isScoped()) return; RegionUpdater updater = new RegionUpdater(getRegion(), ((BasicSubsystem)getParents().iterator().next()).getRegion()); try { updater.addRequirements(requirements); } catch (Exception e) { throw new SubsystemException(e); } } @Override public AriesSubsystem install(String location, IDirectory content) { return AccessController.doPrivileged(new InstallAction(location, content, this, AccessController.getContext())); } private static class SaveManifestParticipant implements Participant { protected SaveManifestParticipant() {} @Override public void ended(Coordination coordination) throws Exception { if (logger.isDebugEnabled()) logger.debug("Saving deployment manifests because coordination {} ended", coordination.getName()); Map<Class<?>, Object> variables = coordination.getVariables(); Set<BasicSubsystem> dirtySubsystems; synchronized (variables) { @SuppressWarnings("unchecked") Set<BasicSubsystem> temp = (Set<BasicSubsystem>) variables.remove(SaveManifestParticipant.class); dirtySubsystems = temp == null ? Collections. <BasicSubsystem>emptySet() : temp; } for (BasicSubsystem dirtySubsystem : dirtySubsystems) { if (logger.isDebugEnabled()) logger.debug("Saving deployment manifest of subsystem {} for coordination {}", dirtySubsystem.getSymbolicName(), coordination.getName()); dirtySubsystem.saveDeploymentManifest(); } } @Override public void failed(Coordination coordination) throws Exception { // Do no saving } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.geode.cache.query.internal; import java.io.DataInput; import java.io.DataOutput; import java.io.IOException; import java.util.*; import it.unimi.dsi.fastutil.objects.AbstractObjectIterator; import it.unimi.dsi.fastutil.objects.ObjectIterator; import it.unimi.dsi.fastutil.objects.ObjectOpenCustomHashSet; import org.apache.geode.DataSerializer; import org.apache.geode.cache.query.*; import org.apache.geode.cache.query.internal.types.*; import org.apache.geode.cache.query.types.*; import org.apache.geode.internal.DataSerializableFixedID; import org.apache.geode.internal.Version; import org.apache.geode.internal.i18n.LocalizedStrings; /** * A Set constrained to contain Structs of all the same type. To conserve on objects, we store the * StructType once and reuse it to generate Struct instances on demand. * * The values in this set are stored as Object[] and get wrapped in Structs as necessary. * * @since GemFire 4.0 */ public class StructSet /* extends ObjectOpenCustomHashSet */ implements Set, SelectResults, DataSerializableFixedID, StructFields { private static final long serialVersionUID = -1228835506930611510L; protected StructType structType; /** * Holds value of property modifiable. */ private boolean modifiable = true; /** * Holds the actual contents of the StructSet */ private ObjectOpenCustomHashSet contents; /** * Empty constructor to satisfy <code>DataSerializer</code> requirements */ public StructSet() {} /** * This implementation uses Arrays.equals(Object[]) as it hashing strategy. */ protected static class ObjectArrayHashingStrategy implements ObjectOpenCustomHashSet.Strategy { private static final long serialVersionUID = -6407549977968716071L; public int hashCode(Object o) { // throws ClassCastException if not Object[] // compute hash code based on all elements Object[] oa = (Object[]) o; int h = 0; for (int i = 0; i < oa.length; i++) { Object obj = oa[i]; if (obj != null) h += obj.hashCode(); } return h; } public boolean equals(Object o1, Object o2) { // throws ClassCastException if not Object[] if (o1 == null) return o2 == null; if (!(o1 instanceof Object[]) || !(o2 instanceof Object[])) { return o1.equals(o2); } return Arrays.equals((Object[]) o1, (Object[]) o2); } } /** Creates a new instance of StructSet */ public StructSet(StructType structType) { this.contents = new ObjectOpenCustomHashSet(new ObjectArrayHashingStrategy()); if (structType == null) { throw new IllegalArgumentException( LocalizedStrings.StructSet_STRUCTTYPE_MUST_NOT_BE_NULL.toLocalizedString()); } this.structType = structType; } /** takes collection of Object[] fieldValues *or* another StructSet */ public StructSet(Collection c, StructType structType) { this.contents = new ObjectOpenCustomHashSet(c, new ObjectArrayHashingStrategy()); if (structType == null) { throw new IllegalArgumentException( LocalizedStrings.StructSet_STRUCTTYPE_MUST_NOT_BE_NULL.toLocalizedString()); } this.structType = structType; } /** * Creates a StructSet directly from a StructBag; (internal use) * * @since GemFire 5.1 */ StructSet(StructBag bag) { this.contents = new ObjectOpenCustomHashSet(new ObjectArrayHashingStrategy()); this.structType = (StructType) bag.elementType; if (bag.hasLimitIterator) { // Asif: Since the number of unique keys which // will be returned by Bag with limit in place // cannot be more than the size of the bag ( i.e // the limit) , we can safely assume HashMap size // to equal to bag's size Iterator itr = bag.fieldValuesIterator(); while (itr.hasNext()) { addFieldValues((Object[]) itr.next()); } } else { Set keys = bag.map.keySet(); for (Object key : keys) { addFieldValues((Object[]) key); } } } public StructSet(int initialCapacity, StructType structType) { this.contents = new ObjectOpenCustomHashSet(initialCapacity, new ObjectArrayHashingStrategy()); if (structType == null) { throw new IllegalArgumentException( LocalizedStrings.StructSet_STRUCTTYPE_MUST_NOT_BE_NULL.toLocalizedString()); } this.structType = structType; } public StructSet(int initialCapacity, float loadFactor, StructType structType) { this.contents = new ObjectOpenCustomHashSet(initialCapacity, loadFactor, new ObjectArrayHashingStrategy()); if (structType == null) { throw new IllegalArgumentException( LocalizedStrings.StructSet_STRUCTTYPE_MUST_NOT_BE_NULL.toLocalizedString()); } this.structType = structType; } @Override public boolean equals(Object other) { if (!(other instanceof StructSet)) { return false; } if (!this.structType.equals(((StructSet) other).structType)) { return false; } if (other.getClass() == StructSet.class) { return this.contents.equals(((StructSet) other).contents); } else { return false; } } @Override public int hashCode() { return this.structType.hashCode(); } /** Add a Struct */ @Override public boolean add(Object obj) { if (!(obj instanceof StructImpl)) { throw new IllegalArgumentException( LocalizedStrings.StructSet_THIS_SET_ONLY_ACCEPTS_STRUCTIMPL.toLocalizedString()); } StructImpl s = (StructImpl) obj; if (!s.getStructType().equals(this.structType)) { throw new IllegalArgumentException( LocalizedStrings.StructSet_OBJ_DOES_NOT_HAVE_THE_SAME_STRUCTTYPE_REQUIRED_0_ACTUAL_1 .toLocalizedString(new Object[] {this.structType, s.getStructType()})); } return addFieldValues(s.getFieldValues()); } /** * For internal use. Just add the Object[] values for a struct with same type */ public boolean addFieldValues(Object[] fieldValues) { return this.contents.add(fieldValues); } /** Does this set contain specified struct? */ @Override public boolean contains(Object obj) { if (!(obj instanceof Struct)) { return false; } Struct s = (Struct) obj; if (!this.structType.equals(StructTypeImpl.typeFromStruct(s))) { return false; } return containsFieldValues(s.getFieldValues()); } /** * Does this set contain a Struct of the correct type with the specified values? */ public boolean containsFieldValues(Object[] fieldValues) { return this.contents.contains(fieldValues); } /** Remove the specified Struct */ public boolean removeEntry(Object o) { if (!(o instanceof Struct)) { return false; } Struct s = (Struct) o; if (!this.structType.equals(StructTypeImpl.typeFromStruct(s))) { return false; } return removeFieldValues(s.getFieldValues()); } /** Remove the field values from a struct of the correct type */ public boolean removeFieldValues(Object[] fieldValues) { return this.contents.remove(fieldValues); } // downcast StructSets to call more efficient methods public boolean addAll(Collection c) { if (c instanceof StructSet) { return addAll((StructSet) c); } else { boolean modified = false; for (Object o : c) { modified |= add(o); } return modified; } } public boolean removeAll(Collection c) { if (c instanceof StructSet) { return removeAll((StructSet) c); } else { boolean modified = false; for (Object o : c) { modified |= remove(o); } return modified; } } public boolean retainAll(Collection c) { if (c instanceof StructSet) { return retainAll((StructSet) c); } return this.contents.retainAll(c); } public boolean addAll(StructSet ss) { boolean modified = false; if (!this.structType.equals(ss.structType)) { throw new IllegalArgumentException( LocalizedStrings.StructSet_TYPES_DONT_MATCH.toLocalizedString()); } for (Iterator itr = ss.fieldValuesIterator(); itr.hasNext();) { Object[] vals = (Object[]) itr.next(); if (this.contents.add(vals)) { modified = true; } } return modified; } public boolean removeAll(StructSet ss) { boolean modified = false; if (!this.structType.equals(ss.structType)) { return false; // nothing // modified } for (Iterator itr = ss.fieldValuesIterator(); itr.hasNext();) { Object[] vals = (Object[]) itr.next(); if (this.contents.remove(vals)) { modified = true; } } return modified; } public boolean retainAll(StructSet ss) { if (!this.structType.equals(ss.structType)) { if (isEmpty()) { return false; // nothing modified } else { clear(); return true; // nothing retained in receiver collection } } boolean changed = false; int size = size(); Iterator it; it = fieldValuesIterator(); while (size-- > 0) { Object[] vals = (Object[]) it.next(); if (!ss.containsFieldValues(vals)) { it.remove(); changed = true; } } return changed; } /** Returns an Iterator over the Structs in this set */ @Override public ObjectIterator iterator() { return new StructIterator(fieldValuesIterator()); } /** Returns an iterator over the fieldValues Object[] instances */ public Iterator fieldValuesIterator() { return this.contents.iterator(); } public CollectionType getCollectionType() { return new CollectionTypeImpl(StructSet.class, this.structType); } // note: this method is dangerous in that it could result in undefined // behavior if the new struct type is not compatible with the data. // For now just trust that the application knows what it is doing if it // is overriding the element type in a set of structs public void setElementType(ObjectType elementType) { if (!(elementType instanceof StructTypeImpl)) { throw new IllegalArgumentException( LocalizedStrings.StructSet_ELEMENT_TYPE_MUST_BE_STRUCT.toLocalizedString()); } this.structType = (StructType) elementType; } public List asList() { return new ArrayList(this); } public Set asSet() { return this; } /** * Getter for property modifiable. * * @return Value of property modifiable. */ public boolean isModifiable() { return this.modifiable; } public int occurrences(Object element) { return contains(element) ? 1 : 0; } /** * Setter for property modifiable. * * @param modifiable New value of property modifiable. */ public void setModifiable(boolean modifiable) { this.modifiable = modifiable; } @Override public String toString() { StringBuffer buf = new StringBuffer(); buf.append("["); Iterator i = iterator(); boolean hasNext = i.hasNext(); while (hasNext) { Object o = i.next(); buf.append(o == this ? "(this Collection)" : String.valueOf(o)); hasNext = i.hasNext(); if (hasNext) buf.append(", "); } buf.append("]"); return buf.toString(); } /** * Iterator wrapper to construct Structs on demand. */ private class StructIterator extends AbstractObjectIterator { private final Iterator itr; /** * @param itr iterator over the Object[] instances of fieldValues */ StructIterator(Iterator itr) { this.itr = itr; } public boolean hasNext() { return this.itr.hasNext(); } public Object next() { return new StructImpl((StructTypeImpl) StructSet.this.structType, (Object[]) this.itr.next()); } public void remove() { this.itr.remove(); } } public int getDSFID() { return STRUCT_SET; } public void fromData(DataInput in) throws IOException, ClassNotFoundException { this.contents = new ObjectOpenCustomHashSet(new ObjectArrayHashingStrategy()); int size = in.readInt(); this.structType = (StructTypeImpl) DataSerializer.readObject(in); for (int j = size; j > 0; j--) { this.add(DataSerializer.readObject(in)); } } public void toData(DataOutput out) throws IOException { out.writeInt(this.size()); DataSerializer.writeObject(this.structType, out); for (Iterator i = this.iterator(); i.hasNext();) { DataSerializer.writeObject(i.next(), out); } } @Override public Version[] getSerializationVersions() { return null; } @Override public int size() { return this.contents.size(); } @Override public boolean isEmpty() { return this.contents.isEmpty(); } @Override public Object[] toArray() { Struct[] structs = new Struct[this.contents.size()]; int i = 0; for (Iterator iter = this.iterator(); iter.hasNext();) { structs[i++] = (Struct) iter.next(); } return structs; } @Override public Object[] toArray(Object[] a) { Object[] array = this.contents.toArray(a); int i = 0; for (Object o : array) { array[i++] = new StructImpl((StructTypeImpl) this.structType, (Object[]) o); } return array; } @Override public boolean remove(Object o) { if (o instanceof Struct) { o = ((Struct) o).getFieldValues(); } return this.contents.remove(o); } @Override public boolean containsAll(Collection c) { // TODO: Asif : This is wrong ,we need to fix this. return this.contents.containsAll(c); } @Override public void clear() { this.contents.clear(); } }
/* Copyright 2009 Wallace Wadge This file is part of BoneCP. BoneCP is free software: you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. BoneCP is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with BoneCP. If not, see <http://www.gnu.org/licenses/>. */ package org.itas.core.dbpool; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import java.util.Properties; import javax.sql.DataSource; import org.easymock.EasyMock; import org.junit.BeforeClass; import org.junit.Test; import com.jolbox.bonecp.CommonTestUtils; /** Tests config object. * @author wwadge * */ public class TestBoneCPConfig { /** Config handle. */ static BoneCPConfig config; /** Stub out any calls to logger. * @throws SecurityException * @throws NoSuchFieldException * @throws IllegalArgumentException * @throws IllegalAccessException * @throws CloneNotSupportedException */ @BeforeClass public static void setup() throws SecurityException, NoSuchFieldException, IllegalArgumentException, IllegalAccessException, CloneNotSupportedException{ config = CommonTestUtils.getConfigClone(); } /** Tests configs using xml setups. * @throws Exception */ @Test public void testXMLConfig() throws Exception{ // read off from the default bonecp-config.xml BoneCPConfig config = new BoneCPConfig("specialApp"); assertEquals(99, config.getMinConnectionsPerPartition()); } /** Tests configs using xml setups. * @throws Exception */ @Test public void testXMLConfig2() throws Exception{ // read off from the default bonecp-config.xml BoneCPConfig config = new BoneCPConfig("specialApp2"); assertEquals(123, config.getMinConnectionsPerPartition()); } /** * Load properties via a given stream. * @throws Exception */ @Test public void testXmlConfigViaInputStream() throws Exception{ // read off from an input stream BoneCPConfig config = new BoneCPConfig(this.getClass().getResourceAsStream("/bonecp-config.xml"), "specialApp"); assertEquals(99, config.getMinConnectionsPerPartition()); } /** XML based config. * @throws Exception */ @Test public void testXMLConfigWithUnfoundSection() throws Exception{ BoneCPConfig config = new BoneCPConfig("non-existant"); assertEquals(20, config.getMinConnectionsPerPartition()); } /** * Test error condition for xml config. */ @Test public void testXmlConfigWithInvalidStream(){ // throw errors try{ new BoneCPConfig(null, "specialApp"); fail("Should have thrown an exception"); }catch (Exception e){ // do nothing } } /** Tests configs using xml setups. * @throws Exception */ @Test public void testPropertyBasedConfig() throws Exception{ Properties props = new Properties(); props.setProperty("minConnectionsPerPartition", "123"); props.setProperty("bonecp.maxConnectionsPerPartition", "456"); props.setProperty("idleConnectionTestPeriod", "999"); props.setProperty("username", "test"); props.setProperty("partitionCount", "an int which is invalid"); props.setProperty("idleMaxAge", "a long which is invalid"); BoneCPConfig config = new BoneCPConfig(props); assertEquals(123, config.getMinConnectionsPerPartition()); assertEquals(456, config.getMaxConnectionsPerPartition()); } /** * Property get/set */ @SuppressWarnings("deprecation") @Test public void testGettersSetters(){ Properties driverProperties = new Properties(); DataSource mockDataSource = EasyMock.createNiceMock(DataSource.class); config.setJdbcUrl(CommonTestUtils.url); config.setUsername(CommonTestUtils.username); config.setPassword(CommonTestUtils.password); config.setIdleConnectionTestPeriod(1); config.setIdleMaxAge(1); config.setStatementsCacheSize(2); config.setReleaseHelperThreads(3); config.setMaxConnectionsPerPartition(5); config.setMinConnectionsPerPartition(5); config.setPartitionCount(1); config.setConnectionTestStatement("test"); config.setAcquireIncrement(6); config.setStatementsCachedPerConnection(7); config.setPreparedStatementsCacheSize(2); config.setStatementCacheSize(2); config.setPoolName("foo"); config.setDisableJMX(false); config.setDatasourceBean(mockDataSource); config.setQueryExecuteTimeLimit(123); config.setDisableConnectionTracking(true); config.setConnectionTimeout(9999); config.setDriverProperties(driverProperties); assertEquals("foo", config.getPoolName()); assertEquals(CommonTestUtils.url, config.getJdbcUrl()); assertEquals(CommonTestUtils.username, config.getUsername()); assertEquals(CommonTestUtils.password, config.getPassword()); assertEquals(60*1000, config.getIdleConnectionTestPeriod()); assertEquals(60*1000, config.getIdleMaxAge()); assertEquals(2, config.getStatementsCacheSize()); assertEquals(2, config.getStatementCacheSize()); assertEquals(2, config.getPreparedStatementsCacheSize()); assertEquals(3, config.getReleaseHelperThreads()); assertEquals(5, config.getMaxConnectionsPerPartition()); assertEquals(5, config.getMinConnectionsPerPartition()); assertEquals(6, config.getAcquireIncrement()); assertEquals(9999, config.getConnectionTimeout()); assertEquals(true, config.isDisableConnectionTracking()); assertEquals(7, config.getStatementsCachedPerConnection()); assertEquals(123, config.getQueryExecuteTimeLimit()); assertEquals(1, config.getPartitionCount()); assertEquals("test", config.getConnectionTestStatement()); assertEquals(mockDataSource, config.getDatasourceBean()); assertEquals(driverProperties, config.getDriverProperties()); } /** * Config file scrubbing */ @Test public void testConfigSanitize(){ config.setMaxConnectionsPerPartition(-1); config.setMinConnectionsPerPartition(-1); config.setPartitionCount(-1); config.setStatementsCacheSize(-1); config.setConnectionTestStatement(""); config.setJdbcUrl(null); config.setUsername(null); config.setAcquireIncrement(0); config.setPassword(null); config.setPoolAvailabilityThreshold(-50); config.setReleaseHelperThreads(-1); config.sanitize(); assertNotNull(config.toString()); assertFalse(config.getAcquireIncrement() == 0); assertFalse(config.getReleaseHelperThreads() == -1); assertFalse(config.getMaxConnectionsPerPartition() == -1); assertFalse(config.getMinConnectionsPerPartition() == -1); assertFalse(config.getPartitionCount() == -1); assertFalse(config.getStatementsCacheSize() == -1); config.setMinConnectionsPerPartition(config.getMaxConnectionsPerPartition()+1); config.sanitize(); assertEquals(config.getMinConnectionsPerPartition(), config.getMaxConnectionsPerPartition()); assertEquals(20, config.getPoolAvailabilityThreshold()); } /** * Tests that setting driver properties handles username/password correctly. */ @Test public void testDriverPropertiesConfigSanitize(){ config.setDatasourceBean(null); config.setUsername("foo"); config.setPassword("bar"); config.setMaxConnectionsPerPartition(2); config.setMinConnectionsPerPartition(2); config.setJdbcUrl("test"); config.sanitize(); Properties props = new Properties(); props.setProperty("user", "something different"); props.setProperty("password", "something different"); config.setDriverProperties(props); config.sanitize(); // if they don't match, the pool config wins assertEquals("foo", config.getDriverProperties().getProperty("user")); assertEquals("bar", config.getDriverProperties().getProperty("password")); config.setDriverProperties(new Properties()); config.sanitize(); // if not found, copied over from pool config assertEquals("foo", config.getDriverProperties().getProperty("user")); assertEquals("bar", config.getDriverProperties().getProperty("password")); config.setUsername(null); config.setPassword(null); config.setDriverProperties(new Properties()); config.sanitize(); // if not set, should be blanked out assertEquals("", config.getUsername()); assertEquals("", config.getPassword()); } /** * Tests general methods. * @throws CloneNotSupportedException */ @Test public void testCloneEqualsHashCode() throws CloneNotSupportedException{ BoneCPConfig clone = config.clone(); assertTrue(clone.equals(config)); assertEquals(clone.hashCode(), config.hashCode()); assertFalse(clone.equals(null)); assertTrue(clone.equals(clone)); clone.setJdbcUrl("something else"); assertFalse(clone.equals(config)); } }
/* * Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.simplesystemsmanagement.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.protocol.StructuredPojo; import com.amazonaws.protocol.ProtocolMarshaller; /** * <p> * A summary of compliance information by compliance type. * </p> * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/ssm-2014-11-06/ComplianceSummaryItem" target="_top">AWS API * Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class ComplianceSummaryItem implements Serializable, Cloneable, StructuredPojo { /** * <p> * The type of compliance item. For example, the compliance type can be Association, Patch, or Custom:string. * </p> */ private String complianceType; /** * <p> * A list of COMPLIANT items for the specified compliance type. * </p> */ private CompliantSummary compliantSummary; /** * <p> * A list of NON_COMPLIANT items for the specified compliance type. * </p> */ private NonCompliantSummary nonCompliantSummary; /** * <p> * The type of compliance item. For example, the compliance type can be Association, Patch, or Custom:string. * </p> * * @param complianceType * The type of compliance item. For example, the compliance type can be Association, Patch, or Custom:string. */ public void setComplianceType(String complianceType) { this.complianceType = complianceType; } /** * <p> * The type of compliance item. For example, the compliance type can be Association, Patch, or Custom:string. * </p> * * @return The type of compliance item. For example, the compliance type can be Association, Patch, or * Custom:string. */ public String getComplianceType() { return this.complianceType; } /** * <p> * The type of compliance item. For example, the compliance type can be Association, Patch, or Custom:string. * </p> * * @param complianceType * The type of compliance item. For example, the compliance type can be Association, Patch, or Custom:string. * @return Returns a reference to this object so that method calls can be chained together. */ public ComplianceSummaryItem withComplianceType(String complianceType) { setComplianceType(complianceType); return this; } /** * <p> * A list of COMPLIANT items for the specified compliance type. * </p> * * @param compliantSummary * A list of COMPLIANT items for the specified compliance type. */ public void setCompliantSummary(CompliantSummary compliantSummary) { this.compliantSummary = compliantSummary; } /** * <p> * A list of COMPLIANT items for the specified compliance type. * </p> * * @return A list of COMPLIANT items for the specified compliance type. */ public CompliantSummary getCompliantSummary() { return this.compliantSummary; } /** * <p> * A list of COMPLIANT items for the specified compliance type. * </p> * * @param compliantSummary * A list of COMPLIANT items for the specified compliance type. * @return Returns a reference to this object so that method calls can be chained together. */ public ComplianceSummaryItem withCompliantSummary(CompliantSummary compliantSummary) { setCompliantSummary(compliantSummary); return this; } /** * <p> * A list of NON_COMPLIANT items for the specified compliance type. * </p> * * @param nonCompliantSummary * A list of NON_COMPLIANT items for the specified compliance type. */ public void setNonCompliantSummary(NonCompliantSummary nonCompliantSummary) { this.nonCompliantSummary = nonCompliantSummary; } /** * <p> * A list of NON_COMPLIANT items for the specified compliance type. * </p> * * @return A list of NON_COMPLIANT items for the specified compliance type. */ public NonCompliantSummary getNonCompliantSummary() { return this.nonCompliantSummary; } /** * <p> * A list of NON_COMPLIANT items for the specified compliance type. * </p> * * @param nonCompliantSummary * A list of NON_COMPLIANT items for the specified compliance type. * @return Returns a reference to this object so that method calls can be chained together. */ public ComplianceSummaryItem withNonCompliantSummary(NonCompliantSummary nonCompliantSummary) { setNonCompliantSummary(nonCompliantSummary); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getComplianceType() != null) sb.append("ComplianceType: ").append(getComplianceType()).append(","); if (getCompliantSummary() != null) sb.append("CompliantSummary: ").append(getCompliantSummary()).append(","); if (getNonCompliantSummary() != null) sb.append("NonCompliantSummary: ").append(getNonCompliantSummary()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof ComplianceSummaryItem == false) return false; ComplianceSummaryItem other = (ComplianceSummaryItem) obj; if (other.getComplianceType() == null ^ this.getComplianceType() == null) return false; if (other.getComplianceType() != null && other.getComplianceType().equals(this.getComplianceType()) == false) return false; if (other.getCompliantSummary() == null ^ this.getCompliantSummary() == null) return false; if (other.getCompliantSummary() != null && other.getCompliantSummary().equals(this.getCompliantSummary()) == false) return false; if (other.getNonCompliantSummary() == null ^ this.getNonCompliantSummary() == null) return false; if (other.getNonCompliantSummary() != null && other.getNonCompliantSummary().equals(this.getNonCompliantSummary()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getComplianceType() == null) ? 0 : getComplianceType().hashCode()); hashCode = prime * hashCode + ((getCompliantSummary() == null) ? 0 : getCompliantSummary().hashCode()); hashCode = prime * hashCode + ((getNonCompliantSummary() == null) ? 0 : getNonCompliantSummary().hashCode()); return hashCode; } @Override public ComplianceSummaryItem clone() { try { return (ComplianceSummaryItem) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } @com.amazonaws.annotation.SdkInternalApi @Override public void marshall(ProtocolMarshaller protocolMarshaller) { com.amazonaws.services.simplesystemsmanagement.model.transform.ComplianceSummaryItemMarshaller.getInstance().marshall(this, protocolMarshaller); } }
/* * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS HEADER. * * Copyright 1997-2010 Oracle and/or its affiliates. All rights reserved. * * Oracle and Java are registered trademarks of Oracle and/or its affiliates. * Other names may be trademarks of their respective owners. * * The contents of this file are subject to the terms of either the GNU * General Public License Version 2 only ("GPL") or the Common * Development and Distribution License("CDDL") (collectively, the * "License"). You may not use this file except in compliance with the * License. You can obtain a copy of the License at * http://www.netbeans.org/cddl-gplv2.html * or nbbuild/licenses/CDDL-GPL-2-CP. See the License for the * specific language governing permissions and limitations under the * License. When distributing the software, include this License Header * Notice in each file and include the License file at * nbbuild/licenses/CDDL-GPL-2-CP. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the GPL Version 2 section of the License file that * accompanied this code. If applicable, add the following below the * License Header, with the fields enclosed by brackets [] replaced by * your own identifying information: * "Portions Copyrighted [year] [name of copyright owner]" * * Contributor(s): * * The Original Software is NetBeans. The Initial Developer of the Original * Software is Sun Microsystems, Inc. Portions Copyright 1997-2006 Sun * Microsystems, Inc. All Rights Reserved. * * If you wish your version of this file to be governed by only the CDDL * or only the GPL Version 2, indicate your decision by adding * "[Contributor] elects to include this software in this distribution * under the [CDDL or GPL Version 2] license." If you do not indicate a * single choice of license, a recipient has the option to distribute * your version of this file under either the CDDL, the GPL Version 2 or * to extend the choice of license to its licensees as provided above. * However, if you add GPL Version 2 code and therefore, elected the GPL * Version 2 license, then the option applies only if the new code is * made subject to such option by the copyright holder. */ package com.bearsoft.org.netbeans.modules.form; import com.bearsoft.org.netbeans.modules.form.actions.*; import com.eas.client.forms.Form; import com.eas.design.Designable; import com.eas.design.Undesignable; import com.eas.script.ScriptFunction; import java.awt.datatransfer.Transferable; import java.beans.BeanInfo; import java.beans.Introspector; import java.beans.PropertyChangeEvent; import java.beans.PropertyChangeListener; import java.beans.PropertyDescriptor; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.ResourceBundle; import java.util.TreeMap; import javax.swing.Action; import org.openide.actions.PasteAction; import org.openide.actions.ReorderAction; import org.openide.nodes.*; import org.openide.util.Exceptions; import org.openide.util.NbBundle; import org.openide.util.actions.SystemAction; import org.openide.util.datatransfer.PasteType; /** * This class represents the root node of the form (displayed as root in * Component Inspector). * * @author Tomas Pavek */ public class FormRootNode extends FormNode { private Map<String, FormProperty<?>> propsByName; private List<PropertySet> propSets; private final PropertyChangeListener propsListener = (PropertyChangeEvent evt) -> { if (evt.getSource() instanceof FormRootProperty<?> && FormProperty.PROP_VALUE.equals(evt.getPropertyName())) { formModel.fireFormPropertyChanged(FormRootNode.this, ((FormRootProperty<?>)evt.getSource()).getName(), evt.getOldValue(), evt.getNewValue()); } }; public FormRootNode(FormModel aFormModel) { super(new RootChildren(aFormModel), aFormModel); setName("Form Root Node"); // NOI18N setIconBaseWithExtension("com/bearsoft/org/netbeans/modules/form/resources/formDesigner.gif"); // NOI18N updateName(formModel.getName()); } public FormProperty<?> getProperty(String propertyName) { return propsByName.get(propertyName); } public FormProperty[] getFormProperties(){ checkPropertiesSets(); return propsByName.values().toArray(new FormProperty[]{}); } @Override public PropertySet[] getPropertySets() { checkPropertiesSets(); return propSets.toArray(new PropertySet[]{}); } protected void checkPropertiesSets() { if (propSets == null) { try { propSets = new ArrayList<>(); propsByName = new HashMap<>(); Map<String, List<FormProperty<?>>> propsByCategory = new HashMap<>(); BeanInfo bi = Introspector.getBeanInfo(Form.class, java.beans.Introspector.IGNORE_ALL_BEANINFO); for (PropertyDescriptor descriptor : bi.getPropertyDescriptors()) { if (descriptor.getReadMethod() != null && descriptor.getWriteMethod() != null) { Designable designable = descriptor.getReadMethod().getAnnotation(Designable.class); ScriptFunction scriptFunction = descriptor.getReadMethod().getAnnotation(ScriptFunction.class); if (designable == null) { designable = descriptor.getWriteMethod().getAnnotation(Designable.class); } if (scriptFunction == null) { scriptFunction = descriptor.getWriteMethod().getAnnotation(ScriptFunction.class); } if ((designable != null || scriptFunction != null) && !descriptor.getReadMethod().isAnnotationPresent(Undesignable.class) && !descriptor.getWriteMethod().isAnnotationPresent(Undesignable.class)) { String category = "general"; if (designable != null && designable.category() != null && !designable.category().isEmpty()) { category = designable.category(); } List<FormProperty<?>> catProps = propsByCategory.get(category); if (catProps == null) { catProps = new ArrayList<>(); propsByCategory.put(category, catProps); } FormProperty<?> prop = new FormRootProperty<>(formModel, descriptor); prop.addPropertyChangeListener(propsListener); catProps.add(prop); propsByName.put(prop.getName(), prop); } } } final ResourceBundle bundle = FormUtils.getBundle(); propsByCategory.entrySet().stream().forEach((Map.Entry<String, List<FormProperty<?>>> aEntry) -> { final String category = aEntry.getKey(); final List<FormProperty<?>> props = aEntry.getValue(); if (props.size() > 0) { propSets.add(new Node.PropertySet(category, bundle.getString("CTL_" + category), bundle.getString("CTL_" + category + "Hint")) { @Override public Node.Property<?>[] getProperties() { return props.toArray(new Node.Property<?>[]{}); } }); } }); } catch (Exception ex) { Exceptions.printStackTrace(ex); } } } @Override public boolean canRename() { return false; } @Override public boolean canDestroy() { return false; } @Override public Action[] getActions(boolean context) { if (actions == null) { // from AbstractNode List<Action> l = new ArrayList<>(); if (isModifiableContainer()) { l.add(SystemAction.get(AddAction.class)); l.add(null); l.add(SystemAction.get(PasteAction.class)); l.add(null); l.add(SystemAction.get(ReorderAction.class)); l.add(null); } l.add(null); l.addAll(Arrays.asList(super.getActions(context))); actions = l.toArray(new Action[l.size()]); } return actions; } void updateName(String name) { setDisplayName(FormUtils.getFormattedBundleString("FMT_FormNodeName", // NOI18N new Object[]{name})); } FormOthersNode getOthersNode() { return ((RootChildren) getChildren()).othersNode; } @Override protected void createPasteTypes(Transferable t, java.util.List<PasteType> s) { if (isModifiableContainer()) { CopySupport.createPasteTypes(t, s, formModel, null); } } /** * Returns whether "other components" can be added under this node (i.e. * there is no Other Components node, the components appear directly under * root node). */ private boolean isModifiableContainer() { return !formModel.isReadOnly() && !shouldHaveOthersNode(formModel); } /** * Returns true if the Other Components node should be used, or false if all * the "other" components should be shown directly under the root node. The * latter is the case when the root component either does not exists (the * form class extends Object) or if it is not a visual container. Here all * the components can be presented on the same level. OTOH if the root * component is a visual container (e.g. extends JPanel or JFrame), then it * has its hierarchy (the node can be expanded) and it seems better to have * the other components presented separately under Other Components node. */ private static boolean shouldHaveOthersNode(FormModel formModel) { return formModel.getTopRADComponent() instanceof RADVisualContainer<?>; } // ---------------- /** * The children nodes of the root node can have 3 variants: */ static class RootChildren extends FormNodeChildren { static final RADVisualContainer<?> OTHERS_ROOT = new RADVisualContainer<>(); private final FormModel formModel; private FormOthersNode othersNode; protected RootChildren(FormModel aFormModel) { super(); formModel = aFormModel; updateKeys(); } // FormNodeChildren implementation @Override protected void updateKeys() { othersNode = null; List<RADComponent<?>> keys = new ArrayList<>(); boolean otherComps = shouldHaveOthersNode(formModel); if (otherComps) { keys.add(OTHERS_ROOT); } RADComponent<?> rootComp = formModel.getTopRADComponent(); if (rootComp != null) { keys.add(rootComp); } if (!otherComps) { keys.addAll(formModel.getOtherComponents()); } setKeys(keys.toArray(new RADComponent<?>[]{})); } @Override protected Node[] createNodes(RADComponent<?> key) { Node node; if (key == OTHERS_ROOT) { node = othersNode = new FormOthersNode(formModel); } else { assert key instanceof RADVisualComponent<?>; node = new RADComponentNode((RADVisualComponent<?>) key); key.setNodeReference((RADComponentNode) node); } node.getChildren().getNodes(); // enforce subnodes creation return new Node[]{node}; } protected final FormModel getFormModel() { return formModel; } } }
/* * Copyright 2014 The Netty Project * * The Netty Project licenses this file to you under the Apache License, version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package io.netty.handler.codec.http2; import io.netty.buffer.ByteBuf; import io.netty.channel.ChannelHandlerContext; import io.netty.handler.codec.http2.Http2Exception.ClosedStreamCreationException; import io.netty.util.internal.UnstableApi; import io.netty.util.internal.logging.InternalLogger; import io.netty.util.internal.logging.InternalLoggerFactory; import java.util.List; import static io.netty.handler.codec.http2.Http2CodecUtil.DEFAULT_PRIORITY_WEIGHT; import static io.netty.handler.codec.http2.Http2Error.PROTOCOL_ERROR; import static io.netty.handler.codec.http2.Http2Error.STREAM_CLOSED; import static io.netty.handler.codec.http2.Http2Exception.connectionError; import static io.netty.handler.codec.http2.Http2Exception.streamError; import static io.netty.handler.codec.http2.Http2PromisedRequestVerifier.ALWAYS_VERIFY; import static io.netty.handler.codec.http2.Http2Stream.State.CLOSED; import static io.netty.handler.codec.http2.Http2Stream.State.HALF_CLOSED_REMOTE; import static io.netty.util.internal.ObjectUtil.checkNotNull; /** * Provides the default implementation for processing inbound frame events and delegates to a * {@link Http2FrameListener} * <p> * This class will read HTTP/2 frames and delegate the events to a {@link Http2FrameListener} * <p> * This interface enforces inbound flow control functionality through * {@link Http2LocalFlowController} */ @UnstableApi public class DefaultHttp2ConnectionDecoder implements Http2ConnectionDecoder { private static final InternalLogger logger = InternalLoggerFactory.getInstance(DefaultHttp2ConnectionDecoder.class); private Http2FrameListener internalFrameListener = new PrefaceFrameListener(); private final Http2Connection connection; private Http2LifecycleManager lifecycleManager; private final Http2ConnectionEncoder encoder; private final Http2FrameReader frameReader; private Http2FrameListener listener; private final Http2PromisedRequestVerifier requestVerifier; public DefaultHttp2ConnectionDecoder(Http2Connection connection, Http2ConnectionEncoder encoder, Http2FrameReader frameReader) { this(connection, encoder, frameReader, ALWAYS_VERIFY); } public DefaultHttp2ConnectionDecoder(Http2Connection connection, Http2ConnectionEncoder encoder, Http2FrameReader frameReader, Http2PromisedRequestVerifier requestVerifier) { this.connection = checkNotNull(connection, "connection"); this.frameReader = checkNotNull(frameReader, "frameReader"); this.encoder = checkNotNull(encoder, "encoder"); this.requestVerifier = checkNotNull(requestVerifier, "requestVerifier"); if (connection.local().flowController() == null) { connection.local().flowController(new DefaultHttp2LocalFlowController(connection)); } connection.local().flowController().frameWriter(encoder.frameWriter()); } @Override public void lifecycleManager(Http2LifecycleManager lifecycleManager) { this.lifecycleManager = checkNotNull(lifecycleManager, "lifecycleManager"); } @Override public Http2Connection connection() { return connection; } @Override public final Http2LocalFlowController flowController() { return connection.local().flowController(); } @Override public void frameListener(Http2FrameListener listener) { this.listener = checkNotNull(listener, "listener"); } @Override public Http2FrameListener frameListener() { return listener; } // Visible for testing Http2FrameListener internalFrameListener() { return internalFrameListener; } @Override public boolean prefaceReceived() { return FrameReadListener.class == internalFrameListener.getClass(); } @Override public void decodeFrame(ChannelHandlerContext ctx, ByteBuf in, List<Object> out) throws Http2Exception { frameReader.readFrame(ctx, in, internalFrameListener); } @Override public Http2Settings localSettings() { Http2Settings settings = new Http2Settings(); Http2FrameReader.Configuration config = frameReader.configuration(); Http2HeaderTable headerTable = config.headerTable(); Http2FrameSizePolicy frameSizePolicy = config.frameSizePolicy(); settings.initialWindowSize(flowController().initialWindowSize()); settings.maxConcurrentStreams(connection.remote().maxActiveStreams()); settings.headerTableSize(headerTable.maxHeaderTableSize()); settings.maxFrameSize(frameSizePolicy.maxFrameSize()); settings.maxHeaderListSize(headerTable.maxHeaderListSize()); if (!connection.isServer()) { // Only set the pushEnabled flag if this is a client endpoint. settings.pushEnabled(connection.local().allowPushTo()); } return settings; } @Override public void localSettings(Http2Settings settings) throws Http2Exception { Boolean pushEnabled = settings.pushEnabled(); Http2FrameReader.Configuration config = frameReader.configuration(); Http2HeaderTable inboundHeaderTable = config.headerTable(); Http2FrameSizePolicy inboundFrameSizePolicy = config.frameSizePolicy(); if (pushEnabled != null) { if (connection.isServer()) { throw connectionError(PROTOCOL_ERROR, "Server sending SETTINGS frame with ENABLE_PUSH specified"); } connection.local().allowPushTo(pushEnabled); } Long maxConcurrentStreams = settings.maxConcurrentStreams(); if (maxConcurrentStreams != null) { int value = (int) Math.min(maxConcurrentStreams, Integer.MAX_VALUE); connection.remote().maxActiveStreams(value); } Long headerTableSize = settings.headerTableSize(); if (headerTableSize != null) { inboundHeaderTable.maxHeaderTableSize((int) Math.min(headerTableSize, Integer.MAX_VALUE)); } Integer maxHeaderListSize = settings.maxHeaderListSize(); if (maxHeaderListSize != null) { inboundHeaderTable.maxHeaderListSize(maxHeaderListSize); } Integer maxFrameSize = settings.maxFrameSize(); if (maxFrameSize != null) { inboundFrameSizePolicy.maxFrameSize(maxFrameSize); } Integer initialWindowSize = settings.initialWindowSize(); if (initialWindowSize != null) { flowController().initialWindowSize(initialWindowSize); } } @Override public void close() { frameReader.close(); } private int unconsumedBytes(Http2Stream stream) { return flowController().unconsumedBytes(stream); } void onGoAwayRead0(ChannelHandlerContext ctx, int lastStreamId, long errorCode, ByteBuf debugData) throws Http2Exception { listener.onGoAwayRead(ctx, lastStreamId, errorCode, debugData); connection.goAwayReceived(lastStreamId, errorCode, debugData); } void onUnknownFrame0(ChannelHandlerContext ctx, byte frameType, int streamId, Http2Flags flags, ByteBuf payload) throws Http2Exception { listener.onUnknownFrame(ctx, frameType, streamId, flags, payload); } /** * Handles all inbound frames from the network. */ private final class FrameReadListener implements Http2FrameListener { @Override public int onDataRead(final ChannelHandlerContext ctx, int streamId, ByteBuf data, int padding, boolean endOfStream) throws Http2Exception { Http2Stream stream = connection.stream(streamId); Http2LocalFlowController flowController = flowController(); int bytesToReturn = data.readableBytes() + padding; boolean shouldIgnore = true; try { shouldIgnore = shouldIgnoreHeadersOrDataFrame(ctx, streamId, stream, "DATA"); } finally { if (shouldIgnore) { // Ignoring this frame. We still need to count the frame towards the connection flow control // window, but we immediately mark all bytes as consumed. flowController.receiveFlowControlledFrame(stream, data, padding, endOfStream); flowController.consumeBytes(stream, bytesToReturn); // Verify that the stream may have existed after we apply flow control. verifyStreamMayHaveExisted(streamId); // All bytes have been consumed. return bytesToReturn; } } Http2Exception error = null; switch (stream.state()) { case OPEN: case HALF_CLOSED_LOCAL: break; case HALF_CLOSED_REMOTE: case CLOSED: error = streamError(stream.id(), STREAM_CLOSED, "Stream %d in unexpected state: %s", stream.id(), stream.state()); break; default: error = streamError(stream.id(), PROTOCOL_ERROR, "Stream %d in unexpected state: %s", stream.id(), stream.state()); break; } int unconsumedBytes = unconsumedBytes(stream); try { flowController.receiveFlowControlledFrame(stream, data, padding, endOfStream); // Update the unconsumed bytes after flow control is applied. unconsumedBytes = unconsumedBytes(stream); // If the stream is in an invalid state to receive the frame, throw the error. if (error != null) { throw error; } // Call back the application and retrieve the number of bytes that have been // immediately processed. bytesToReturn = listener.onDataRead(ctx, streamId, data, padding, endOfStream); return bytesToReturn; } catch (Http2Exception e) { // If an exception happened during delivery, the listener may have returned part // of the bytes before the error occurred. If that's the case, subtract that from // the total processed bytes so that we don't return too many bytes. int delta = unconsumedBytes - unconsumedBytes(stream); bytesToReturn -= delta; throw e; } catch (RuntimeException e) { // If an exception happened during delivery, the listener may have returned part // of the bytes before the error occurred. If that's the case, subtract that from // the total processed bytes so that we don't return too many bytes. int delta = unconsumedBytes - unconsumedBytes(stream); bytesToReturn -= delta; throw e; } finally { // If appropriate, return the processed bytes to the flow controller. flowController.consumeBytes(stream, bytesToReturn); if (endOfStream) { lifecycleManager.closeStreamRemote(stream, ctx.newSucceededFuture()); } } } @Override public void onHeadersRead(ChannelHandlerContext ctx, int streamId, Http2Headers headers, int padding, boolean endOfStream) throws Http2Exception { onHeadersRead(ctx, streamId, headers, 0, DEFAULT_PRIORITY_WEIGHT, false, padding, endOfStream); } @Override public void onHeadersRead(ChannelHandlerContext ctx, int streamId, Http2Headers headers, int streamDependency, short weight, boolean exclusive, int padding, boolean endOfStream) throws Http2Exception { Http2Stream stream = connection.stream(streamId); boolean allowHalfClosedRemote = false; if (stream == null && !connection.streamMayHaveExisted(streamId)) { stream = connection.remote().createStream(streamId, endOfStream); // Allow the state to be HALF_CLOSE_REMOTE if we're creating it in that state. allowHalfClosedRemote = stream.state() == HALF_CLOSED_REMOTE; } if (shouldIgnoreHeadersOrDataFrame(ctx, streamId, stream, "HEADERS")) { return; } switch (stream.state()) { case RESERVED_REMOTE: stream.open(endOfStream); break; case OPEN: case HALF_CLOSED_LOCAL: // Allowed to receive headers in these states. break; case HALF_CLOSED_REMOTE: if (!allowHalfClosedRemote) { throw streamError(stream.id(), STREAM_CLOSED, "Stream %d in unexpected state: %s", stream.id(), stream.state()); } break; case CLOSED: throw streamError(stream.id(), STREAM_CLOSED, "Stream %d in unexpected state: %s", stream.id(), stream.state()); default: // Connection error. throw connectionError(PROTOCOL_ERROR, "Stream %d in unexpected state: %s", stream.id(), stream.state()); } try { // This call will create a stream for streamDependency if necessary. // For this reason it must be done before notifying the listener. stream.setPriority(streamDependency, weight, exclusive); } catch (ClosedStreamCreationException ignored) { // It is possible that either the stream for this frame or the parent stream is closed. // In this case we should ignore the exception and allow the frame to be sent. } listener.onHeadersRead(ctx, streamId, headers, streamDependency, weight, exclusive, padding, endOfStream); // If the headers completes this stream, close it. if (endOfStream) { lifecycleManager.closeStreamRemote(stream, ctx.newSucceededFuture()); } } @Override public void onPriorityRead(ChannelHandlerContext ctx, int streamId, int streamDependency, short weight, boolean exclusive) throws Http2Exception { Http2Stream stream = connection.stream(streamId); try { if (stream == null) { if (connection.streamMayHaveExisted(streamId)) { logger.info("{} ignoring PRIORITY frame for stream {}. Stream doesn't exist but may " + " have existed", ctx.channel(), streamId); return; } // PRIORITY frames always identify a stream. This means that if a PRIORITY frame is the // first frame to be received for a stream that we must create the stream. stream = connection.remote().createIdleStream(streamId); } else if (streamCreatedAfterGoAwaySent(streamId)) { logger.info("{} ignoring PRIORITY frame for stream {}. Stream created after GOAWAY sent. " + "Last known stream by peer {}", ctx.channel(), streamId, connection.remote().lastStreamKnownByPeer()); return; } // This call will create a stream for streamDependency if necessary. // For this reason it must be done before notifying the listener. stream.setPriority(streamDependency, weight, exclusive); } catch (ClosedStreamCreationException ignored) { // It is possible that either the stream for this frame or the parent stream is closed. // In this case we should ignore the exception and allow the frame to be sent. } listener.onPriorityRead(ctx, streamId, streamDependency, weight, exclusive); } @Override public void onRstStreamRead(ChannelHandlerContext ctx, int streamId, long errorCode) throws Http2Exception { Http2Stream stream = connection.stream(streamId); if (stream == null) { verifyStreamMayHaveExisted(streamId); return; } switch(stream.state()) { case IDLE: throw connectionError(PROTOCOL_ERROR, "RST_STREAM received for IDLE stream %d", streamId); case CLOSED: return; // RST_STREAM frames must be ignored for closed streams. default: break; } listener.onRstStreamRead(ctx, streamId, errorCode); lifecycleManager.closeStream(stream, ctx.newSucceededFuture()); } @Override public void onSettingsAckRead(ChannelHandlerContext ctx) throws Http2Exception { // Apply oldest outstanding local settings here. This is a synchronization point between endpoints. Http2Settings settings = encoder.pollSentSettings(); if (settings != null) { applyLocalSettings(settings); } listener.onSettingsAckRead(ctx); } /** * Applies settings sent from the local endpoint. */ private void applyLocalSettings(Http2Settings settings) throws Http2Exception { Boolean pushEnabled = settings.pushEnabled(); final Http2FrameReader.Configuration config = frameReader.configuration(); final Http2HeaderTable headerTable = config.headerTable(); final Http2FrameSizePolicy frameSizePolicy = config.frameSizePolicy(); if (pushEnabled != null) { if (connection.isServer()) { throw connectionError(PROTOCOL_ERROR, "Server sending SETTINGS frame with ENABLE_PUSH specified"); } connection.local().allowPushTo(pushEnabled); } Long maxConcurrentStreams = settings.maxConcurrentStreams(); if (maxConcurrentStreams != null) { int value = (int) Math.min(maxConcurrentStreams, Integer.MAX_VALUE); connection.remote().maxActiveStreams(value); } Long headerTableSize = settings.headerTableSize(); if (headerTableSize != null) { headerTable.maxHeaderTableSize((int) Math.min(headerTableSize, Integer.MAX_VALUE)); } Integer maxHeaderListSize = settings.maxHeaderListSize(); if (maxHeaderListSize != null) { headerTable.maxHeaderListSize(maxHeaderListSize); } Integer maxFrameSize = settings.maxFrameSize(); if (maxFrameSize != null) { frameSizePolicy.maxFrameSize(maxFrameSize); } Integer initialWindowSize = settings.initialWindowSize(); if (initialWindowSize != null) { flowController().initialWindowSize(initialWindowSize); } } @Override public void onSettingsRead(ChannelHandlerContext ctx, Http2Settings settings) throws Http2Exception { encoder.remoteSettings(settings); // Acknowledge receipt of the settings. encoder.writeSettingsAck(ctx, ctx.newPromise()); listener.onSettingsRead(ctx, settings); } @Override public void onPingRead(ChannelHandlerContext ctx, ByteBuf data) throws Http2Exception { // Send an ack back to the remote client. // Need to retain the buffer here since it will be released after the write completes. encoder.writePing(ctx, true, data.retainedSlice(), ctx.newPromise()); listener.onPingRead(ctx, data); } @Override public void onPingAckRead(ChannelHandlerContext ctx, ByteBuf data) throws Http2Exception { listener.onPingAckRead(ctx, data); } @Override public void onPushPromiseRead(ChannelHandlerContext ctx, int streamId, int promisedStreamId, Http2Headers headers, int padding) throws Http2Exception { Http2Stream parentStream = connection.stream(streamId); if (shouldIgnoreHeadersOrDataFrame(ctx, streamId, parentStream, "PUSH_PROMISE")) { return; } if (parentStream == null) { throw connectionError(PROTOCOL_ERROR, "Stream %d does not exist", streamId); } switch (parentStream.state()) { case OPEN: case HALF_CLOSED_LOCAL: // Allowed to receive push promise in these states. break; default: // Connection error. throw connectionError(PROTOCOL_ERROR, "Stream %d in unexpected state for receiving push promise: %s", parentStream.id(), parentStream.state()); } if (!requestVerifier.isAuthoritative(ctx, headers)) { throw streamError(promisedStreamId, PROTOCOL_ERROR, "Promised request on stream %d for promised stream %d is not authoritative", streamId, promisedStreamId); } if (!requestVerifier.isCacheable(headers)) { throw streamError(promisedStreamId, PROTOCOL_ERROR, "Promised request on stream %d for promised stream %d is not known to be cacheable", streamId, promisedStreamId); } if (!requestVerifier.isSafe(headers)) { throw streamError(promisedStreamId, PROTOCOL_ERROR, "Promised request on stream %d for promised stream %d is not known to be safe", streamId, promisedStreamId); } // Reserve the push stream based with a priority based on the current stream's priority. connection.remote().reservePushStream(promisedStreamId, parentStream); listener.onPushPromiseRead(ctx, streamId, promisedStreamId, headers, padding); } @Override public void onGoAwayRead(ChannelHandlerContext ctx, int lastStreamId, long errorCode, ByteBuf debugData) throws Http2Exception { onGoAwayRead0(ctx, lastStreamId, errorCode, debugData); } @Override public void onWindowUpdateRead(ChannelHandlerContext ctx, int streamId, int windowSizeIncrement) throws Http2Exception { Http2Stream stream = connection.stream(streamId); if (stream == null || stream.state() == CLOSED || streamCreatedAfterGoAwaySent(streamId)) { // Ignore this frame. verifyStreamMayHaveExisted(streamId); return; } // Update the outbound flow control window. encoder.flowController().incrementWindowSize(stream, windowSizeIncrement); listener.onWindowUpdateRead(ctx, streamId, windowSizeIncrement); } @Override public void onUnknownFrame(ChannelHandlerContext ctx, byte frameType, int streamId, Http2Flags flags, ByteBuf payload) throws Http2Exception { onUnknownFrame0(ctx, frameType, streamId, flags, payload); } /** * Helper method to determine if a frame that has the semantics of headers or data should be ignored for the * {@code stream} (which may be {@code null}) associated with {@code streamId}. */ private boolean shouldIgnoreHeadersOrDataFrame(ChannelHandlerContext ctx, int streamId, Http2Stream stream, String frameName) throws Http2Exception { if (stream == null) { if (streamCreatedAfterGoAwaySent(streamId)) { logger.info("{} ignoring {} frame for stream {}. Stream sent after GOAWAY sent", ctx.channel(), frameName, streamId); return true; } // Its possible that this frame would result in stream ID out of order creation (PROTOCOL ERROR) and its // also possible that this frame is received on a CLOSED stream (STREAM_CLOSED after a RST_STREAM is // sent). We don't have enough information to know for sure, so we choose the lesser of the two errors. throw streamError(streamId, STREAM_CLOSED, "Received %s frame for an unknown stream %d", frameName, streamId); } else if (stream.isResetSent() || streamCreatedAfterGoAwaySent(streamId)) { if (logger.isInfoEnabled()) { logger.info("{} ignoring {} frame for stream {} {}", ctx.channel(), frameName, stream.isResetSent() ? "RST_STREAM sent." : ("Stream created after GOAWAY sent. Last known stream by peer " + connection.remote().lastStreamKnownByPeer())); } return true; } return false; } /** * Helper method for determining whether or not to ignore inbound frames. A stream is considered to be created * after a {@code GOAWAY} is sent if the following conditions hold: * <p/> * <ul> * <li>A {@code GOAWAY} must have been sent by the local endpoint</li> * <li>The {@code streamId} must identify a legitimate stream id for the remote endpoint to be creating</li> * <li>{@code streamId} is greater than the Last Known Stream ID which was sent by the local endpoint * in the last {@code GOAWAY} frame</li> * </ul> * <p/> */ private boolean streamCreatedAfterGoAwaySent(int streamId) { Http2Connection.Endpoint<?> remote = connection.remote(); return connection.goAwaySent() && remote.isValidStreamId(streamId) && streamId > remote.lastStreamKnownByPeer(); } private void verifyStreamMayHaveExisted(int streamId) throws Http2Exception { if (!connection.streamMayHaveExisted(streamId)) { throw connectionError(PROTOCOL_ERROR, "Stream %d does not exist", streamId); } } } private final class PrefaceFrameListener implements Http2FrameListener { /** * Verifies that the HTTP/2 connection preface has been received from the remote endpoint. * It is possible that the current call to * {@link Http2FrameReader#readFrame(ChannelHandlerContext, ByteBuf, Http2FrameListener)} will have multiple * frames to dispatch. So it may be OK for this class to get legitimate frames for the first readFrame. */ private void verifyPrefaceReceived() throws Http2Exception { if (!prefaceReceived()) { throw connectionError(PROTOCOL_ERROR, "Received non-SETTINGS as first frame."); } } @Override public int onDataRead(ChannelHandlerContext ctx, int streamId, ByteBuf data, int padding, boolean endOfStream) throws Http2Exception { verifyPrefaceReceived(); return internalFrameListener.onDataRead(ctx, streamId, data, padding, endOfStream); } @Override public void onHeadersRead(ChannelHandlerContext ctx, int streamId, Http2Headers headers, int padding, boolean endOfStream) throws Http2Exception { verifyPrefaceReceived(); internalFrameListener.onHeadersRead(ctx, streamId, headers, padding, endOfStream); } @Override public void onHeadersRead(ChannelHandlerContext ctx, int streamId, Http2Headers headers, int streamDependency, short weight, boolean exclusive, int padding, boolean endOfStream) throws Http2Exception { verifyPrefaceReceived(); internalFrameListener.onHeadersRead(ctx, streamId, headers, streamDependency, weight, exclusive, padding, endOfStream); } @Override public void onPriorityRead(ChannelHandlerContext ctx, int streamId, int streamDependency, short weight, boolean exclusive) throws Http2Exception { verifyPrefaceReceived(); internalFrameListener.onPriorityRead(ctx, streamId, streamDependency, weight, exclusive); } @Override public void onRstStreamRead(ChannelHandlerContext ctx, int streamId, long errorCode) throws Http2Exception { verifyPrefaceReceived(); internalFrameListener.onRstStreamRead(ctx, streamId, errorCode); } @Override public void onSettingsAckRead(ChannelHandlerContext ctx) throws Http2Exception { verifyPrefaceReceived(); internalFrameListener.onSettingsAckRead(ctx); } @Override public void onSettingsRead(ChannelHandlerContext ctx, Http2Settings settings) throws Http2Exception { // The first settings should change the internalFrameListener to the "real" listener // that expects the preface to be verified. if (!prefaceReceived()) { internalFrameListener = new FrameReadListener(); } internalFrameListener.onSettingsRead(ctx, settings); } @Override public void onPingRead(ChannelHandlerContext ctx, ByteBuf data) throws Http2Exception { verifyPrefaceReceived(); internalFrameListener.onPingRead(ctx, data); } @Override public void onPingAckRead(ChannelHandlerContext ctx, ByteBuf data) throws Http2Exception { verifyPrefaceReceived(); internalFrameListener.onPingAckRead(ctx, data); } @Override public void onPushPromiseRead(ChannelHandlerContext ctx, int streamId, int promisedStreamId, Http2Headers headers, int padding) throws Http2Exception { verifyPrefaceReceived(); internalFrameListener.onPushPromiseRead(ctx, streamId, promisedStreamId, headers, padding); } @Override public void onGoAwayRead(ChannelHandlerContext ctx, int lastStreamId, long errorCode, ByteBuf debugData) throws Http2Exception { onGoAwayRead0(ctx, lastStreamId, errorCode, debugData); } @Override public void onWindowUpdateRead(ChannelHandlerContext ctx, int streamId, int windowSizeIncrement) throws Http2Exception { verifyPrefaceReceived(); internalFrameListener.onWindowUpdateRead(ctx, streamId, windowSizeIncrement); } @Override public void onUnknownFrame(ChannelHandlerContext ctx, byte frameType, int streamId, Http2Flags flags, ByteBuf payload) throws Http2Exception { onUnknownFrame0(ctx, frameType, streamId, flags, payload); } } }
/* * * * Licensed to the Apache Software Foundation (ASF) under one * * or more contributor license agreements. See the NOTICE file * * distributed with this work for additional information * * regarding copyright ownership. The ASF licenses this file * * to you under the Apache License, Version 2.0 (the * * "License"); you may not use this file except in compliance * * with the License. You may obtain a copy of the License at * * * * http://www.apache.org/licenses/LICENSE-2.0 * * * * Unless required by applicable law or agreed to in writing, * * software distributed under the License is distributed on an * * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * * KIND, either express or implied. See the License for the * * specific language governing permissions and limitations * * under the License. * */ package org.apache.usergrid.persistence.core.astyanax; import java.util.Arrays; import java.util.Comparator; import java.util.HashMap; import org.apache.usergrid.persistence.core.CassandraConfig; import org.apache.usergrid.persistence.core.CassandraFig; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.apache.usergrid.persistence.core.guice.TestCommonModule; import org.apache.usergrid.persistence.core.scope.ApplicationScope; import org.apache.usergrid.persistence.core.test.ITRunner; import org.apache.usergrid.persistence.core.test.UseModules; import org.apache.usergrid.persistence.model.util.UUIDGenerator; import com.google.inject.Inject; import com.netflix.astyanax.Keyspace; import com.netflix.astyanax.MutationBatch; import com.netflix.astyanax.connectionpool.exceptions.ConnectionException; import com.netflix.astyanax.model.Column; import com.netflix.astyanax.model.ColumnFamily; import com.netflix.astyanax.model.ConsistencyLevel; import com.netflix.astyanax.query.RowQuery; import com.netflix.astyanax.serializers.LongSerializer; import com.netflix.astyanax.serializers.StringSerializer; import com.netflix.astyanax.util.RangeBuilder; import rx.Observable; import rx.schedulers.Schedulers; import static org.junit.Assert.assertEquals; @RunWith( ITRunner.class ) @UseModules( TestCommonModule.class ) public class MultiKeyColumnNameIteratorTest { @Inject public CassandraFig cassandraFig; @Inject public CassandraCluster cassandraCluster; protected static Keyspace keyspace; protected ApplicationScope scope; protected static ColumnFamily<String, Long> COLUMN_FAMILY = new ColumnFamily<>( "MultiKeyLongTests", StringSerializer.get(), LongSerializer.get() ); protected static final boolean TRUE = true; @Before public void setup() throws ConnectionException { final CassandraConfig cassandraConfig = new CassandraConfig() { @Override public ConsistencyLevel getReadCL() { return ConsistencyLevel.CL_LOCAL_ONE; } @Override public ConsistencyLevel getConsistentReadCL() { return ConsistencyLevel.CL_LOCAL_QUORUM; } @Override public ConsistencyLevel getWriteCL() { return ConsistencyLevel.CL_QUORUM; } @Override public com.datastax.driver.core.ConsistencyLevel getDataStaxReadCl() { return com.datastax.driver.core.ConsistencyLevel.LOCAL_ONE; } @Override public com.datastax.driver.core.ConsistencyLevel getDataStaxReadConsistentCl() { return com.datastax.driver.core.ConsistencyLevel.ALL; } @Override public com.datastax.driver.core.ConsistencyLevel getDataStaxWriteCl() { return com.datastax.driver.core.ConsistencyLevel.QUORUM; } @Override public int[] getShardSettings() { return new int[]{20}; } @Override public String getApplicationKeyspace() { return cassandraFig.getApplicationKeyspace(); } @Override public String getApplicationLocalKeyspace() { return cassandraFig.getApplicationKeyspace() + "us_east"; } @Override public String getLocalDataCenter() { return cassandraFig.getLocalDataCenter(); } @Override public int getConnections() { return cassandraFig.getConnections(); } @Override public int getTimeout() { return cassandraFig.getTimeout(); } @Override public int getPoolTimeout() { return cassandraFig.getPoolTimeout(); } @Override public String getClusterName() { return cassandraFig.getClusterName(); } @Override public String getHosts() { return cassandraFig.getHosts(); } @Override public String getVersion() { return cassandraFig.getVersion(); } @Override public String getUsername() { return cassandraFig.getUsername(); } @Override public String getPassword() { return cassandraFig.getPassword(); } @Override public String getStrategy() { return cassandraFig.getStrategy(); } @Override public String getStrategyOptions() { return cassandraFig.getStrategyOptions(); } @Override public String getStrategyLocal() { return cassandraFig.getStrategyLocal(); } @Override public String getStrategyOptionsLocal() { return cassandraFig.getStrategyOptionsLocal(); } }; AstyanaxKeyspaceProvider astyanaxKeyspaceProvider = new AstyanaxKeyspaceProvider( cassandraCluster ); keyspace = astyanaxKeyspaceProvider.get(); TestUtils.createKeyspace( keyspace ); TestUtils.createColumnFamiliy( keyspace, COLUMN_FAMILY, new HashMap<String, Object>() ); } @Test public void multiIterator() { final String rowKey1 = UUIDGenerator.newTimeUUID().toString(); final String rowKey2 = UUIDGenerator.newTimeUUID().toString(); final String rowKey3 = UUIDGenerator.newTimeUUID().toString(); final long maxValue = 10000; /** * Write to both rows in parallel */ Observable.from( new String[] { rowKey1, rowKey2, rowKey3 } ) //perform a flatmap .flatMap( stringObservable -> Observable.just( stringObservable ).doOnNext( key -> { final MutationBatch batch = keyspace.prepareMutationBatch(); for ( long i = 0; i < maxValue; i++ ) { batch.withRow( COLUMN_FAMILY, key ).putColumn( i, TRUE ); if ( i % 1000 == 0 ) { try { batch.execute(); } catch ( ConnectionException e ) { throw new RuntimeException( e ); } } } try { batch.execute(); } catch ( ConnectionException e ) { throw new RuntimeException( e ); } } ).subscribeOn( Schedulers.io() ) ).toBlocking().last(); //create 3 iterators ColumnNameIterator<Long, Long> row1Iterator = createIterator( rowKey1, false ); ColumnNameIterator<Long, Long> row2Iterator = createIterator( rowKey2, false ); ColumnNameIterator<Long, Long> row3Iterator = createIterator( rowKey3, false ); final Comparator<Long> ascendingComparator = new Comparator<Long>() { @Override public int compare( final Long o1, final Long o2 ) { return Long.compare( o1, o2 ); } }; /** * Again, arbitrary buffer size to attempt we buffer at some point */ final MultiKeyColumnNameIterator<Long, Long> ascendingItr = new MultiKeyColumnNameIterator<>( Arrays.asList( row1Iterator, row2Iterator, row3Iterator ), ascendingComparator, 900 ); //ensure we have to make several trips, purposefully set to a nonsensical value to ensure we make all the // trips required for ( long i = 0; i < maxValue; i++ ) { assertEquals( i, ascendingItr.next().longValue() ); } //now test it in reverse ColumnNameIterator<Long, Long> row1IteratorDesc = createIterator( rowKey1, true ); ColumnNameIterator<Long, Long> row2IteratorDesc = createIterator( rowKey2, true ); ColumnNameIterator<Long, Long> row3IteratorDesc = createIterator( rowKey3, true ); final Comparator<Long> descendingComparator = new Comparator<Long>() { @Override public int compare( final Long o1, final Long o2 ) { return ascendingComparator.compare( o1, o2 ) * -1; } }; /** * Again, arbitrary buffer size to attempt we buffer at some point */ final MultiKeyColumnNameIterator<Long, Long> descendingItr = new MultiKeyColumnNameIterator<>( Arrays.asList( row1IteratorDesc, row2IteratorDesc, row3IteratorDesc ), descendingComparator, 900 ); for ( long i = maxValue - 1; i > -1; i-- ) { assertEquals( i, descendingItr.next().longValue() ); } } @Test public void singleIterator() { final String rowKey1 = UUIDGenerator.newTimeUUID().toString(); final long maxValue = 10000; /** * Write to both rows in parallel */ Observable.just( rowKey1 ).flatMap( rowKey -> Observable.just( rowKey ).doOnNext( key -> { final MutationBatch batch = keyspace.prepareMutationBatch(); for ( long i = 0; i < maxValue; i++ ) { batch.withRow( COLUMN_FAMILY, key ).putColumn( i, TRUE ); if ( i % 1000 == 0 ) { try { batch.execute(); } catch ( ConnectionException e ) { throw new RuntimeException( e ); } } } try { batch.execute(); } catch ( ConnectionException e ) { throw new RuntimeException( e ); }} ).subscribeOn( Schedulers.io() ) ).toBlocking().last(); //create 3 iterators ColumnNameIterator<Long, Long> row1Iterator = createIterator( rowKey1, false ); final Comparator<Long> ascendingComparator = new Comparator<Long>() { @Override public int compare( final Long o1, final Long o2 ) { return Long.compare( o1, o2 ); } }; /** * Again, arbitrary buffer size to attempt we buffer at some point */ final MultiKeyColumnNameIterator<Long, Long> ascendingItr = new MultiKeyColumnNameIterator<>( Arrays.asList( row1Iterator ), ascendingComparator, 900 ); //ensure we have to make several trips, purposefully set to a nonsensical value to ensure we make all the // trips required for ( long i = 0; i < maxValue; i++ ) { //we have 3 iterators, so we should get each value 3 times in the aggregation assertEquals( i, ascendingItr.next().longValue() ); } //now test it in reverse ColumnNameIterator<Long, Long> row1IteratorDesc = createIterator( rowKey1, true ); final Comparator<Long> descendingComparator = new Comparator<Long>() { @Override public int compare( final Long o1, final Long o2 ) { return ascendingComparator.compare( o1, o2 ) * -1; } }; /** * Again, arbitrary buffer size to attempt we buffer at some point */ final MultiKeyColumnNameIterator<Long, Long> descendingItr = new MultiKeyColumnNameIterator<>( Arrays.asList( row1IteratorDesc), descendingComparator, 900 ); for ( long i = maxValue - 1; i > -1; i-- ) { assertEquals( i, descendingItr.next().longValue() ); } } private static ColumnNameIterator<Long, Long> createIterator( final String rowKey, final boolean reversed ) { final ColumnParser<Long, Long> longParser = new ColumnParser<Long, Long>() { @Override public Long parseColumn( final Column<Long> column ) { return column.getName(); } }; final RangeBuilder forwardRange = new RangeBuilder().setLimit( 720 ).setReversed( reversed ); final RowQuery<String, Long> forwardQuery = keyspace.prepareQuery( COLUMN_FAMILY ).getKey( rowKey ).withColumnRange( forwardRange.build() ); ColumnNameIterator<Long, Long> itr = new ColumnNameIterator<>( forwardQuery, longParser, false ); return itr; } }
package org.projectspinoza.twitterswissarmyknife; import static org.junit.Assert.assertEquals; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.Mock; import org.mockito.Mockito; import org.powermock.api.mockito.PowerMockito; import org.powermock.core.classloader.annotations.PowerMockIgnore; import org.powermock.core.classloader.annotations.PrepareForTest; import org.powermock.modules.junit4.PowerMockRunner; import org.projectspinoza.twitterswissarmyknife.command.BaseCommand; import org.projectspinoza.twitterswissarmyknife.command.CommandDumpAccountSettings; import org.projectspinoza.twitterswissarmyknife.command.CommandDumpAvailableTrends; import org.projectspinoza.twitterswissarmyknife.command.CommandDumpBlockList; import org.projectspinoza.twitterswissarmyknife.command.CommandDumpFavourites; import org.projectspinoza.twitterswissarmyknife.command.CommandDumpFollowerIDs; import org.projectspinoza.twitterswissarmyknife.command.CommandDumpFollowersList; import org.projectspinoza.twitterswissarmyknife.command.CommandDumpFriendIDs; import org.projectspinoza.twitterswissarmyknife.command.CommandDumpFriendsList; import org.projectspinoza.twitterswissarmyknife.command.CommandDumpGeoDetails; import org.projectspinoza.twitterswissarmyknife.command.CommandDumpHomeTimeLine; import org.projectspinoza.twitterswissarmyknife.command.CommandDumpIncomingFriendships; import org.projectspinoza.twitterswissarmyknife.command.CommandDumpMemberSuggestions; import org.projectspinoza.twitterswissarmyknife.command.CommandDumpMentionsTimeLine; import org.projectspinoza.twitterswissarmyknife.command.CommandDumpMutesIDs; import org.projectspinoza.twitterswissarmyknife.command.CommandDumpMutesList; import org.projectspinoza.twitterswissarmyknife.command.CommandDumpOutgoingFriendships; import org.projectspinoza.twitterswissarmyknife.command.CommandDumpOwnRetweets; import org.projectspinoza.twitterswissarmyknife.command.CommandDumpPlaceTrends; import org.projectspinoza.twitterswissarmyknife.command.CommandDumpRetweeters; import org.projectspinoza.twitterswissarmyknife.command.CommandDumpSavedSearches; import org.projectspinoza.twitterswissarmyknife.command.CommandDumpSimilarPlaces; import org.projectspinoza.twitterswissarmyknife.command.CommandDumpStatus; import org.projectspinoza.twitterswissarmyknife.command.CommandDumpSuggestedUserCats; import org.projectspinoza.twitterswissarmyknife.command.CommandDumpUserListMembers; import org.projectspinoza.twitterswissarmyknife.command.CommandDumpUserListMemberships; import org.projectspinoza.twitterswissarmyknife.command.CommandDumpUserListSubscribers; import org.projectspinoza.twitterswissarmyknife.command.CommandDumpUserListSubscriptions; import org.projectspinoza.twitterswissarmyknife.command.CommandDumpUserSuggestions; import org.projectspinoza.twitterswissarmyknife.command.CommandSearchUsers; import org.projectspinoza.twitterswissarmyknife.command.CommandShowFriendShip; import org.projectspinoza.twitterswissarmyknife.util.TsakResponse; import twitter4j.AccountSettings; import twitter4j.Category; import twitter4j.GeoLocation; import twitter4j.GeoQuery; import twitter4j.IDs; import twitter4j.Location; import twitter4j.PagableResponseList; import twitter4j.Paging; import twitter4j.Place; import twitter4j.Query; import twitter4j.QueryResult; import twitter4j.RateLimitStatus; import twitter4j.Relationship; import twitter4j.ResponseList; import twitter4j.SavedSearch; import twitter4j.Status; import twitter4j.Trends; import twitter4j.Twitter; import twitter4j.TwitterException; import twitter4j.User; import twitter4j.UserList; @PowerMockIgnore("javax.management.*") @RunWith(PowerMockRunner.class) @PrepareForTest({ Paging.class, Query.class, QueryResult.class, GeoLocation.class, GeoQuery.class, CommandDumpSimilarPlaces.class, BaseCommand.class }) public class TsakUnitTest { long testUserId = 101010111; long testSlugId = 101010111; long testListId = 101010111; long testStatusId = 1; String testUserName = "bit-whacker"; String testOutput = "testOutput.txt"; String testPlaceId = "5a110d312052166f"; String testSlug = "sports"; String testPlaceName = "New York"; String testKeywords = "Cofee"; String testSource = "test source"; String testTarget = "test target"; Double testLongitude = 74.0059; Double testLatitude = 40.7127; int testWoeId = 1; int testLimit = 1; @Mock Twitter twitter; IDs ids; RateLimitStatus rateLimitStatus; AccountSettings settings; ResponseList<Location> locationsList; PagableResponseList<User> blockList; Paging page; QueryResult queryResult; Query query; GeoLocation geoLocation; ResponseList<Status> statusList; List<PagableResponseList<User>> userList; PagableResponseList<User> userPagableList; Place place; ResponseList<Place> placeResponseList; ResponseList<User> userRList; Trends trends; ResponseList<SavedSearch> savedSearchResponseList; Status status; ResponseList<Category> categoryResponseList; PagableResponseList<UserList> userPRLists; Relationship relationship; @SuppressWarnings("unchecked") @Before public void setup() throws Exception { twitter = Mockito.mock(Twitter.class); settings = Mockito.mock(AccountSettings.class); ids = Mockito.mock(IDs.class); rateLimitStatus = Mockito.mock(RateLimitStatus.class); locationsList = Mockito.mock(ResponseList.class); statusList = Mockito.mock(ResponseList.class); blockList = Mockito.mock(PagableResponseList.class); page = Mockito.mock(Paging.class); queryResult = Mockito.mock(QueryResult.class); geoLocation = Mockito.mock(GeoLocation.class); query = Mockito.mock(Query.class); userList = Mockito.mock(List.class); userRList = Mockito.mock(ResponseList.class); userPagableList = Mockito.mock(PagableResponseList.class); userPRLists = Mockito.mock(PagableResponseList.class); place = Mockito.mock(Place.class); page = Mockito.mock(Paging.class); trends = Mockito.mock(Trends.class); status = Mockito.mock(Status.class); savedSearchResponseList = Mockito.mock(ResponseList.class); categoryResponseList = Mockito.mock(ResponseList.class); relationship = Mockito.mock(Relationship.class); PowerMockito.whenNew(GeoLocation.class).withArguments(testLatitude, testLongitude).thenReturn(geoLocation); } @Test public void testCase_1() throws TwitterException { CommandDumpFollowerIDs testCommand = new CommandDumpFollowerIDs(); testCommand.setScreenName(testUserName); testCommand.setOutputFile(testOutput); testCommand.setLimit(testLimit); TsakResponse expected = new TsakResponse(0, new ArrayList<IDs>(Arrays.asList(ids))); expected.setCommandDetails(testCommand.toString()); Mockito.when(twitter.getFollowersIDs(testUserName, -1)).thenReturn(ids); Mockito.when(ids.getNextCursor()).thenReturn(0L); Mockito.when(ids.getRateLimitStatus()).thenReturn(rateLimitStatus); Mockito.when(rateLimitStatus.getRemaining()).thenReturn(0); TsakResponse result = testCommand.execute(twitter); assertEquals(expected.getRemApiLimits(), result.getRemApiLimits()); assertEquals(expected.getResponseData(), result.getResponseData()); assertEquals(expected.getCommandDetails(), result.getCommandDetails()); } @Test public void testCase_2() throws TwitterException { CommandDumpAccountSettings testCommand = new CommandDumpAccountSettings(); testCommand.setHelp(true); testCommand.setOutputFile(testOutput); TsakResponse expected = new TsakResponse(0, settings); expected.setCommandDetails(testCommand.toString()); Mockito.when(twitter.getAccountSettings()).thenReturn(settings); Mockito.when(settings.getRateLimitStatus()).thenReturn(rateLimitStatus); Mockito.when(rateLimitStatus.getRemaining()).thenReturn(0); TsakResponse result = testCommand.execute(twitter); assertEquals(expected.getRemApiLimits(), result.getRemApiLimits()); assertEquals(expected.getResponseData(), result.getResponseData()); assertEquals(expected.getCommandDetails(), result.getCommandDetails()); } @Test public void testCase_3() throws TwitterException { CommandDumpAvailableTrends testCommand = new CommandDumpAvailableTrends(); testCommand.setHelp(true); testCommand.setOutputFile(testOutput); TsakResponse expected = new TsakResponse(-1, locationsList); expected.setCommandDetails(testCommand.toString()); Mockito.when(twitter.getAvailableTrends()).thenReturn(locationsList); TsakResponse result = testCommand.execute(twitter); assertEquals(expected.getResponseData(), result.getResponseData()); assertEquals(expected.getCommandDetails(), result.getCommandDetails()); } @Test public void testCase_4() throws TwitterException { CommandDumpBlockList testCommand = new CommandDumpBlockList(); testCommand.setHelp(true); testCommand.setOutputFile(testOutput); TsakResponse expected = new TsakResponse(0, blockList); expected.setCommandDetails(testCommand.toString()); Mockito.when(twitter.getBlocksList()).thenReturn(blockList); Mockito.when(blockList.getRateLimitStatus()).thenReturn(rateLimitStatus); Mockito.when(rateLimitStatus.getRemaining()).thenReturn(0); TsakResponse result = testCommand.execute(twitter); assertEquals(expected.getRemApiLimits(), result.getRemApiLimits()); assertEquals(expected.getResponseData(), result.getResponseData()); assertEquals(expected.getCommandDetails(), result.getCommandDetails()); } @Test public void testCase_5() throws TwitterException { } @Test public void testCase_6() throws TwitterException { CommandDumpFavourites testCommand = new CommandDumpFavourites(); testCommand.setHelp(true); testCommand.setOutputFile(testOutput); TsakResponse expected = new TsakResponse(0, statusList); expected.setCommandDetails(testCommand.toString()); Mockito.when(twitter.getFavorites()).thenReturn(statusList); Mockito.when(statusList.getRateLimitStatus()).thenReturn(rateLimitStatus); Mockito.when(rateLimitStatus.getRemaining()).thenReturn(0); TsakResponse result = testCommand.execute(twitter); assertEquals(expected.getRemApiLimits(), result.getRemApiLimits()); assertEquals(expected.getResponseData(), result.getResponseData()); assertEquals(expected.getCommandDetails(), result.getCommandDetails()); } @Test public void testCase_7() throws TwitterException { List<PagableResponseList<User>> followersList = new ArrayList<PagableResponseList<User>>(); followersList.add(userPagableList); CommandDumpFollowersList testCommand = new CommandDumpFollowersList(); testCommand.setHelp(true); testCommand.setOutputFile(testOutput); testCommand.setLimit(testLimit); testCommand.setUserId(testUserId); TsakResponse expected = new TsakResponse(0, followersList); expected.setCommandDetails(testCommand.toString()); Mockito.when(twitter.getFollowersList(testUserId, -1)).thenReturn(userPagableList); Mockito.when(userPagableList.getRateLimitStatus()).thenReturn(rateLimitStatus); Mockito.when(userPagableList.getNextCursor()).thenReturn(0L); Mockito.when(rateLimitStatus.getRemaining()).thenReturn(0); TsakResponse result = testCommand.execute(twitter); assertEquals(expected.getRemApiLimits(), result.getRemApiLimits()); assertEquals(expected.getResponseData(), result.getResponseData()); assertEquals(expected.getCommandDetails(), result.getCommandDetails()); } @Test public void testCase_8() throws TwitterException { List<IDs> friendsIDsCollection = new ArrayList<IDs>(); friendsIDsCollection.add(ids); CommandDumpFriendIDs testCommand = new CommandDumpFriendIDs(); testCommand.setHelp(true); testCommand.setOutputFile(testOutput); testCommand.setLimit(testLimit); testCommand.setUserId(testUserId); TsakResponse expected = new TsakResponse(0, friendsIDsCollection); expected.setCommandDetails(testCommand.toString()); Mockito.when(twitter.getFriendsIDs(testUserId, -1)).thenReturn(ids); Mockito.when(ids.getRateLimitStatus()).thenReturn(rateLimitStatus); Mockito.when(userPagableList.getNextCursor()).thenReturn(0L); Mockito.when(rateLimitStatus.getRemaining()).thenReturn(0); TsakResponse result = testCommand.execute(twitter); assertEquals(expected.getRemApiLimits(), result.getRemApiLimits()); assertEquals(expected.getResponseData(), result.getResponseData()); assertEquals(expected.getCommandDetails(), result.getCommandDetails()); } @Test public void testCase_9() throws TwitterException { List<PagableResponseList<User>> friendList = new ArrayList<PagableResponseList<User>>(); friendList.add(userPagableList); CommandDumpFriendsList testCommand = new CommandDumpFriendsList(); testCommand.setHelp(true); testCommand.setOutputFile(testOutput); testCommand.setLimit(testLimit); testCommand.setUserId(testUserId); TsakResponse expected = new TsakResponse(0, friendList); expected.setCommandDetails(testCommand.toString()); Mockito.when(twitter.getFriendsList(testUserId, -1)).thenReturn(userPagableList); Mockito.when(userPagableList.getRateLimitStatus()).thenReturn(rateLimitStatus); Mockito.when(userPagableList.getNextCursor()).thenReturn(0L); Mockito.when(rateLimitStatus.getRemaining()).thenReturn(0); TsakResponse result = testCommand.execute(twitter); assertEquals(expected.getRemApiLimits(), result.getRemApiLimits()); assertEquals(expected.getResponseData(), result.getResponseData()); assertEquals(expected.getCommandDetails(), result.getCommandDetails()); } @Test public void testCase_10() throws TwitterException { CommandDumpGeoDetails testCommand = new CommandDumpGeoDetails(); testCommand.setHelp(true); testCommand.setOutputFile(testOutput); testCommand.setPlaceId(testPlaceId); TsakResponse expected = new TsakResponse(0, place); expected.setCommandDetails(testCommand.toString()); Mockito.when(twitter.getGeoDetails(testPlaceId)).thenReturn(place); Mockito.when(place.getRateLimitStatus()).thenReturn(rateLimitStatus); Mockito.when(rateLimitStatus.getRemaining()).thenReturn(0); TsakResponse result = testCommand.execute(twitter); assertEquals(expected.getRemApiLimits(), result.getRemApiLimits()); assertEquals(expected.getResponseData(), result.getResponseData()); assertEquals(expected.getCommandDetails(), result.getCommandDetails()); } @Test public void testCase_11() throws TwitterException { CommandDumpHomeTimeLine testCommand = new CommandDumpHomeTimeLine(); testCommand.setHelp(true); testCommand.setOutputFile(testOutput); TsakResponse expected = new TsakResponse(0, statusList); expected.setCommandDetails(testCommand.toString()); Mockito.when(twitter.getHomeTimeline()).thenReturn(statusList); Mockito.when(statusList.getRateLimitStatus()).thenReturn(rateLimitStatus); Mockito.when(rateLimitStatus.getRemaining()).thenReturn(0); TsakResponse result = testCommand.execute(twitter); assertEquals(expected.getRemApiLimits(), result.getRemApiLimits()); assertEquals(expected.getResponseData(), result.getResponseData()); assertEquals(expected.getCommandDetails(), result.getCommandDetails()); } /* CommandDumpIncomingFriendships */ @Test public void testCase_12() throws TwitterException { List<IDs> IncomingFriendshipsCollection = new ArrayList<IDs>(); IncomingFriendshipsCollection.add(ids); CommandDumpIncomingFriendships testCommand = new CommandDumpIncomingFriendships(); testCommand.setHelp(true); testCommand.setOutputFile(testOutput); testCommand.setLimit(testLimit); TsakResponse expected = new TsakResponse(0, IncomingFriendshipsCollection); expected.setCommandDetails(testCommand.toString()); Mockito.when(twitter.getIncomingFriendships(-1)).thenReturn(ids); Mockito.when(ids.getNextCursor()).thenReturn(0L); Mockito.when(ids.getRateLimitStatus()).thenReturn(rateLimitStatus); Mockito.when(rateLimitStatus.getRemaining()).thenReturn(0); TsakResponse result = testCommand.execute(twitter); assertEquals(expected.getRemApiLimits(), result.getRemApiLimits()); assertEquals(expected.getResponseData(), result.getResponseData()); assertEquals(expected.getCommandDetails(), result.getCommandDetails()); } /* CommandDumpListStatuses */ @Test public void testCase_13() throws Exception { } /* CommandDumpMemberSuggestions */ @Test public void testCase_14() throws Exception { CommandDumpMemberSuggestions testCommand = new CommandDumpMemberSuggestions(); testCommand.setHelp(true); testCommand.setOutputFile(testOutput); testCommand.setSlug(testSlug); TsakResponse expected = new TsakResponse(0, userRList); expected.setCommandDetails(testCommand.toString()); Mockito.when(twitter.getMemberSuggestions(testSlug)).thenReturn(userRList); Mockito.when(userRList.getRateLimitStatus()).thenReturn(rateLimitStatus); Mockito.when(rateLimitStatus.getRemaining()).thenReturn(0); TsakResponse result = testCommand.execute(twitter); assertEquals(expected.getRemApiLimits(), result.getRemApiLimits()); assertEquals(expected.getResponseData(), result.getResponseData()); assertEquals(expected.getCommandDetails(), result.getCommandDetails()); } /* CommandDumpMentionsTimeLine */ @Test public void testCase_15() throws Exception { CommandDumpMentionsTimeLine testCommand = new CommandDumpMentionsTimeLine(); testCommand.setHelp(true); testCommand.setOutputFile(testOutput); TsakResponse expected = new TsakResponse(0, statusList); expected.setCommandDetails(testCommand.toString()); Mockito.when(twitter.getMentionsTimeline()).thenReturn(statusList); Mockito.when(statusList.getRateLimitStatus()).thenReturn(rateLimitStatus); Mockito.when(rateLimitStatus.getRemaining()).thenReturn(0); TsakResponse result = testCommand.execute(twitter); assertEquals(expected.getRemApiLimits(), result.getRemApiLimits()); assertEquals(expected.getResponseData(), result.getResponseData()); assertEquals(expected.getCommandDetails(), result.getCommandDetails()); } /* CommandDumpMentionsTimeLine */ @Test public void testCase_16() throws Exception { List<IDs> mutesIDsCollection = new ArrayList<IDs>(); mutesIDsCollection.add(ids); CommandDumpMutesIDs testCommand = new CommandDumpMutesIDs(); testCommand.setHelp(true); testCommand.setOutputFile(testOutput); testCommand.setLimit(testLimit); TsakResponse expected = new TsakResponse(0, mutesIDsCollection); expected.setCommandDetails(testCommand.toString()); Mockito.when(twitter.getMutesIDs(-1)).thenReturn(ids); Mockito.when(ids.getRateLimitStatus()).thenReturn(rateLimitStatus); Mockito.when(rateLimitStatus.getRemaining()).thenReturn(0); TsakResponse result = testCommand.execute(twitter); assertEquals(expected.getRemApiLimits(), result.getRemApiLimits()); assertEquals(expected.getResponseData(), result.getResponseData()); assertEquals(expected.getCommandDetails(), result.getCommandDetails()); } /* CommandDumpMutesIDs */ @Test public void testCase_17() throws Exception { List<PagableResponseList<User>> MutesListCollection = new ArrayList<PagableResponseList<User>>(); MutesListCollection.add(userPagableList); CommandDumpMutesList testCommand = new CommandDumpMutesList(); testCommand.setHelp(true); testCommand.setOutputFile(testOutput); testCommand.setLimit(testLimit); TsakResponse expected = new TsakResponse(0, MutesListCollection); expected.setCommandDetails(testCommand.toString()); Mockito.when(twitter.getMutesList(-1)).thenReturn(userPagableList); Mockito.when(userPagableList.getRateLimitStatus()).thenReturn(rateLimitStatus); Mockito.when(rateLimitStatus.getRemaining()).thenReturn(0); TsakResponse result = testCommand.execute(twitter); assertEquals(expected.getRemApiLimits(), result.getRemApiLimits()); assertEquals(expected.getResponseData(), result.getResponseData()); assertEquals(expected.getCommandDetails(), result.getCommandDetails()); } /* CommandDumpOutgoingFriendships */ @Test public void testCase_20() throws Exception { List<IDs> outGoingFriendshipsCollection = new ArrayList<IDs>(); outGoingFriendshipsCollection.add(ids); CommandDumpOutgoingFriendships testCommand = new CommandDumpOutgoingFriendships(); testCommand.setHelp(true); testCommand.setOutputFile(testOutput); testCommand.setLimit(testLimit); TsakResponse expected = new TsakResponse(0, outGoingFriendshipsCollection); expected.setCommandDetails(testCommand.toString()); Mockito.when(twitter.getOutgoingFriendships(-1)).thenReturn(ids); Mockito.when(ids.getRateLimitStatus()).thenReturn(rateLimitStatus); Mockito.when(rateLimitStatus.getRemaining()).thenReturn(0); TsakResponse result = testCommand.execute(twitter); assertEquals(expected.getRemApiLimits(), result.getRemApiLimits()); assertEquals(expected.getResponseData(), result.getResponseData()); assertEquals(expected.getCommandDetails(), result.getCommandDetails()); } /* CommandDumpOwnRetweets */ @Test public void testCase_21() throws Exception { CommandDumpOwnRetweets testCommand = new CommandDumpOwnRetweets(); testCommand.setHelp(true); testCommand.setOutputFile(testOutput); TsakResponse expected = new TsakResponse(0, statusList); expected.setCommandDetails(testCommand.toString()); Mockito.when(twitter.getRetweetsOfMe()).thenReturn(statusList); Mockito.when(statusList.getRateLimitStatus()).thenReturn(rateLimitStatus); Mockito.when(rateLimitStatus.getRemaining()).thenReturn(0); TsakResponse result = testCommand.execute(twitter); assertEquals(expected.getRemApiLimits(), result.getRemApiLimits()); assertEquals(expected.getResponseData(), result.getResponseData()); assertEquals(expected.getCommandDetails(), result.getCommandDetails()); } /* CommandDumpPlaceTrends */ @Test public void testCase_22() throws Exception { CommandDumpPlaceTrends testCommand = new CommandDumpPlaceTrends(); testCommand.setHelp(true); testCommand.setOutputFile(testOutput); testCommand.setWoeId(testWoeId); TsakResponse expected = new TsakResponse(0, trends); expected.setCommandDetails(testCommand.toString()); Mockito.when(twitter.getPlaceTrends(testWoeId)).thenReturn(trends); Mockito.when(trends.getRateLimitStatus()).thenReturn(rateLimitStatus); Mockito.when(rateLimitStatus.getRemaining()).thenReturn(0); TsakResponse result = testCommand.execute(twitter); assertEquals(expected.getRemApiLimits(), result.getRemApiLimits()); assertEquals(expected.getResponseData(), result.getResponseData()); assertEquals(expected.getCommandDetails(), result.getCommandDetails()); } /* CommandDumpRetweeters */ @Test public void testCase_23() throws Exception { List<IDs> retweetersIDs = new ArrayList<IDs>(); retweetersIDs.add(ids); CommandDumpRetweeters testCommand = new CommandDumpRetweeters(); testCommand.setHelp(true); testCommand.setOutputFile(testOutput); testCommand.setStatusId(testStatusId); TsakResponse expected = new TsakResponse(0, retweetersIDs); expected.setCommandDetails(testCommand.toString()); Mockito.when(twitter.getRetweeterIds(testStatusId, -1)).thenReturn(ids); Mockito.when(ids.getRateLimitStatus()).thenReturn(rateLimitStatus); Mockito.when(rateLimitStatus.getRemaining()).thenReturn(0); TsakResponse result = testCommand.execute(twitter); assertEquals(expected.getRemApiLimits(), result.getRemApiLimits()); assertEquals(expected.getResponseData(), result.getResponseData()); assertEquals(expected.getCommandDetails(), result.getCommandDetails()); } /* CommandDumpSavedSearches */ @Test public void testCase_24() throws Exception { CommandDumpSavedSearches testCommand = new CommandDumpSavedSearches(); testCommand.setHelp(true); testCommand.setOutputFile(testOutput); TsakResponse expected = new TsakResponse(0, savedSearchResponseList); expected.setCommandDetails(testCommand.toString()); Mockito.when(twitter.getSavedSearches()).thenReturn(savedSearchResponseList); Mockito.when(savedSearchResponseList.getRateLimitStatus()).thenReturn(rateLimitStatus); Mockito.when(rateLimitStatus.getRemaining()).thenReturn(0); TsakResponse result = testCommand.execute(twitter); assertEquals(expected.getRemApiLimits(), result.getRemApiLimits()); assertEquals(expected.getResponseData(), result.getResponseData()); assertEquals(expected.getCommandDetails(), result.getCommandDetails()); } /* CommandDumpSimilarPlaces */ @Test public void testCase_25() throws Exception { } /* CommandDumpStatus */ @Test public void testCase_26() throws Exception { CommandDumpStatus testCommand = new CommandDumpStatus(); testCommand.setHelp(true); testCommand.setOutputFile(testOutput); testCommand.setStatusId(testStatusId); TsakResponse expected = new TsakResponse(0, status); expected.setCommandDetails(testCommand.toString()); Mockito.when(twitter.showStatus(testStatusId)).thenReturn(status); Mockito.when(status.getRateLimitStatus()).thenReturn(rateLimitStatus); Mockito.when(rateLimitStatus.getRemaining()).thenReturn(0); TsakResponse result = testCommand.execute(twitter); assertEquals(expected.getRemApiLimits(), result.getRemApiLimits()); assertEquals(expected.getResponseData(), result.getResponseData()); assertEquals(expected.getCommandDetails(), result.getCommandDetails()); } /* CommandDumpSuggestedUserCats */ @Test public void testCase_27() throws Exception { CommandDumpSuggestedUserCats testCommand = new CommandDumpSuggestedUserCats(); testCommand.setHelp(true); testCommand.setOutputFile(testOutput); TsakResponse expected = new TsakResponse(0, categoryResponseList); expected.setCommandDetails(testCommand.toString()); Mockito.when(twitter.getSuggestedUserCategories()).thenReturn(categoryResponseList); Mockito.when(categoryResponseList.getRateLimitStatus()).thenReturn(rateLimitStatus); Mockito.when(rateLimitStatus.getRemaining()).thenReturn(0); TsakResponse result = testCommand.execute(twitter); assertEquals(expected.getRemApiLimits(), result.getRemApiLimits()); assertEquals(expected.getResponseData(), result.getResponseData()); assertEquals(expected.getCommandDetails(), result.getCommandDetails()); } /* CommandDumpTweets */ @Test public void testCase_28() throws Exception { } /* CommandDumpSuggestedUserCats */ @Test public void testCase_29() throws Exception { CommandDumpSuggestedUserCats testCommand = new CommandDumpSuggestedUserCats(); testCommand.setHelp(true); testCommand.setOutputFile(testOutput); TsakResponse expected = new TsakResponse(0, categoryResponseList); expected.setCommandDetails(testCommand.toString()); Mockito.when(twitter.getSuggestedUserCategories()).thenReturn(categoryResponseList); Mockito.when(categoryResponseList.getRateLimitStatus()).thenReturn(rateLimitStatus); Mockito.when(rateLimitStatus.getRemaining()).thenReturn(0); TsakResponse result = testCommand.execute(twitter); assertEquals(expected.getRemApiLimits(), result.getRemApiLimits()); assertEquals(expected.getResponseData(), result.getResponseData()); assertEquals(expected.getCommandDetails(), result.getCommandDetails()); } /* CommandDumpTweets */ @Test public void testCase_30() throws Exception { } /* CommandDumpUserListMembers */ @Test public void testCase_31() throws Exception { List<PagableResponseList<User>> ListMembersCollection = new ArrayList<PagableResponseList<User>>(); ListMembersCollection.add(userPagableList); CommandDumpUserListMembers testCommand = new CommandDumpUserListMembers(); testCommand.setHelp(true); testCommand.setOutputFile(testOutput); testCommand.setLimit(testLimit); testCommand.setListId(testListId); TsakResponse expected = new TsakResponse(0, ListMembersCollection); expected.setCommandDetails(testCommand.toString()); Mockito.when(twitter.getUserListMembers(testListId, -1)).thenReturn(userPagableList); Mockito.when(userPagableList.getRateLimitStatus()).thenReturn(rateLimitStatus); Mockito.when(rateLimitStatus.getRemaining()).thenReturn(0); TsakResponse result = testCommand.execute(twitter); assertEquals(expected.getRemApiLimits(), result.getRemApiLimits()); assertEquals(expected.getResponseData(), result.getResponseData()); assertEquals(expected.getCommandDetails(), result.getCommandDetails()); } /* CommandDumpUserListMemberships */ @Test public void testCase_32() throws Exception { List<PagableResponseList<UserList>> listMemberships = new ArrayList<PagableResponseList<UserList>>(); listMemberships.add(userPRLists); CommandDumpUserListMemberships testCommand = new CommandDumpUserListMemberships(); testCommand.setHelp(true); testCommand.setOutputFile(testOutput); testCommand.setLimit(testLimit); testCommand.setUserId(testUserId); TsakResponse expected = new TsakResponse(0, listMemberships); expected.setCommandDetails(testCommand.toString()); Mockito.when(twitter.getUserListMemberships(testListId, -1)).thenReturn(userPRLists); Mockito.when(userPRLists.getRateLimitStatus()).thenReturn(rateLimitStatus); Mockito.when(rateLimitStatus.getRemaining()).thenReturn(0); TsakResponse result = testCommand.execute(twitter); assertEquals(expected.getRemApiLimits(), result.getRemApiLimits()); assertEquals(expected.getResponseData(), result.getResponseData()); assertEquals(expected.getCommandDetails(), result.getCommandDetails()); } /* CommandDumpUserListSubscribers */ @Test public void testCase_33() throws Exception { List<PagableResponseList<User>> listSubscribers = new ArrayList<PagableResponseList<User>>(); listSubscribers.add(userPagableList); CommandDumpUserListSubscribers testCommand = new CommandDumpUserListSubscribers(); testCommand.setHelp(true); testCommand.setOutputFile(testOutput); testCommand.setListId(testListId); testCommand.setLimit(testLimit); TsakResponse expected = new TsakResponse(0, listSubscribers); expected.setCommandDetails(testCommand.toString()); Mockito.when(twitter.getUserListSubscribers(testListId, -1)).thenReturn(userPagableList); Mockito.when(userPagableList.getRateLimitStatus()).thenReturn(rateLimitStatus); Mockito.when(rateLimitStatus.getRemaining()).thenReturn(0); TsakResponse result = testCommand.execute(twitter); assertEquals(expected.getRemApiLimits(), result.getRemApiLimits()); assertEquals(expected.getResponseData(), result.getResponseData()); assertEquals(expected.getCommandDetails(), result.getCommandDetails()); } /* CommandDumpUserListSubscriptions */ @Test public void testCase_34() throws Exception { List<PagableResponseList<UserList>> listSubscriptions = new ArrayList<PagableResponseList<UserList>>(); listSubscriptions.add(userPRLists); CommandDumpUserListSubscriptions testCommand = new CommandDumpUserListSubscriptions(); testCommand.setHelp(true); testCommand.setOutputFile(testOutput); testCommand.setScreenName(testUserName); testCommand.setLimit(testLimit); TsakResponse expected = new TsakResponse(0, listSubscriptions); expected.setCommandDetails(testCommand.toString()); Mockito.when(twitter.getUserListSubscriptions(testUserName, -1)).thenReturn(userPRLists); Mockito.when(userPRLists.getRateLimitStatus()).thenReturn(rateLimitStatus); Mockito.when(rateLimitStatus.getRemaining()).thenReturn(0); TsakResponse result = testCommand.execute(twitter); assertEquals(expected.getRemApiLimits(), result.getRemApiLimits()); assertEquals(expected.getResponseData(), result.getResponseData()); assertEquals(expected.getCommandDetails(), result.getCommandDetails()); } /* CommandDumpUserSuggestions */ @Test public void testCase_35() throws Exception { CommandDumpUserSuggestions testCommand = new CommandDumpUserSuggestions(); testCommand.setHelp(true); testCommand.setOutputFile(testOutput); testCommand.setSlug(testSlug); TsakResponse expected = new TsakResponse(0, userPagableList); expected.setCommandDetails(testCommand.toString()); Mockito.when(twitter.getUserSuggestions(testSlug)).thenReturn(userPagableList); Mockito.when(userPagableList.getRateLimitStatus()).thenReturn(rateLimitStatus); Mockito.when(rateLimitStatus.getRemaining()).thenReturn(0); TsakResponse result = testCommand.execute(twitter); assertEquals(expected.getRemApiLimits(), result.getRemApiLimits()); assertEquals(expected.getResponseData(), result.getResponseData()); assertEquals(expected.getCommandDetails(), result.getCommandDetails()); } /* CommandDumpUserSuggestions */ @Test public void testCase_36() throws Exception { CommandDumpUserSuggestions testCommand = new CommandDumpUserSuggestions(); testCommand.setHelp(true); testCommand.setOutputFile(testOutput); testCommand.setSlug(testSlug); TsakResponse expected = new TsakResponse(0, userPagableList); expected.setCommandDetails(testCommand.toString()); Mockito.when(twitter.getUserSuggestions(testSlug)).thenReturn(userPagableList); Mockito.when(userPagableList.getRateLimitStatus()).thenReturn(rateLimitStatus); Mockito.when(rateLimitStatus.getRemaining()).thenReturn(0); TsakResponse result = testCommand.execute(twitter); assertEquals(expected.getRemApiLimits(), result.getRemApiLimits()); assertEquals(expected.getResponseData(), result.getResponseData()); assertEquals(expected.getCommandDetails(), result.getCommandDetails()); } /* CommandDumpUserTimeLine */ @Test public void testCase_37() throws Exception { } /* CommandLookupFriendShip */ @Test public void testCase_38() throws Exception { } /* CommandSearchPlace */ @Test public void testCase_39() throws Exception { } /* CommandSearchUsers */ @Test public void testCase40() throws Exception { List<ResponseList<User>> usersCollection = new ArrayList<ResponseList<User>>(); usersCollection.add(userRList); CommandSearchUsers testCommand = new CommandSearchUsers(); testCommand.setHelp(true); testCommand.setOutputFile(testOutput); testCommand.setKeywords(testKeywords); TsakResponse expected = new TsakResponse(0, usersCollection); expected.setCommandDetails(testCommand.toString()); Mockito.when(twitter.searchUsers(testKeywords, 1)).thenReturn(userRList); Mockito.when(userRList.getRateLimitStatus()).thenReturn(rateLimitStatus); Mockito.when(rateLimitStatus.getRemaining()).thenReturn(0); TsakResponse result = testCommand.execute(twitter); assertEquals(expected.getRemApiLimits(), result.getRemApiLimits()); assertEquals(expected.getResponseData(), result.getResponseData()); assertEquals(expected.getCommandDetails(), result.getCommandDetails()); } /* CommandShowFriendShip */ @Test public void testCase41() throws Exception { CommandShowFriendShip testCommand = new CommandShowFriendShip(); testCommand.setHelp(true); testCommand.setOutputFile(testOutput); testCommand.setSource(testSource); testCommand.setTarget(testTarget); TsakResponse expected = new TsakResponse(0, relationship); expected.setCommandDetails(testCommand.toString()); Mockito.when(twitter.showFriendship(testSource, testTarget)).thenReturn(relationship); Mockito.when(relationship.getRateLimitStatus()).thenReturn(rateLimitStatus); Mockito.when(rateLimitStatus.getRemaining()).thenReturn(0); TsakResponse result = testCommand.execute(twitter); assertEquals(expected.getRemApiLimits(), result.getRemApiLimits()); assertEquals(expected.getResponseData(), result.getResponseData()); assertEquals(expected.getCommandDetails(), result.getCommandDetails()); } }
/*========================================================================= * Copyright Copyright (c) 2000-2014 Pivotal Software, Inc. All Rights Reserved. * This product is protected by U.S. and international copyright * and intellectual property laws. Pivotal products are covered by * more patents listed at http://www.pivotal.io/patents. * $Id: CompiledUndefined.java,v 1.2 2005/02/01 17:19:20 vaibhav Exp $ *========================================================================= */ package com.gemstone.gemfire.cache.query.internal; import java.util.*; import com.gemstone.gemfire.cache.query.*; import com.gemstone.gemfire.cache.query.internal.IndexInfo; import com.gemstone.gemfire.cache.query.internal.index.IndexData; import com.gemstone.gemfire.cache.query.internal.index.IndexProtocol; import com.gemstone.gemfire.cache.query.internal.index.IndexUtils; import com.gemstone.gemfire.cache.query.internal.types.StructTypeImpl; import com.gemstone.gemfire.cache.query.types.ObjectType; import com.gemstone.gemfire.cache.query.types.StructType; /** * Predefined function for identity of the UNDEFINED literal * * @version $Revision: 1.2 $ * @author ericz * @author asif */ public class CompiledUndefined extends AbstractCompiledValue implements Negatable , Indexable { private CompiledValue _value; private boolean _is_defined; public CompiledUndefined(CompiledValue value, boolean is_defined) { _value = value; _is_defined = is_defined; } @Override public List getChildren() { return Collections.singletonList(this._value); } public int getType() { return FUNCTION; } public Object evaluate(ExecutionContext context) throws FunctionDomainException, TypeMismatchException, NameResolutionException, QueryInvocationTargetException { boolean b = _value.evaluate(context) == QueryService.UNDEFINED; return Boolean.valueOf(_is_defined ? !b : b); } /** * Asif : Evaluates as a filter taking advantage of indexes if appropriate. * This function has a meaningful implementation only in CompiledComparison & * CompiledUndefined . It is unsupported in other classes. The additional * parameters which it takes are a boolean which is used to indicate whether * the index result set needs to be expanded to the top level or not. The * second is a CompiledValue representing the operands which are only iter * evaluatable. The CompiledValue passed will be null except if a * GroupJunction has only one filter evaluatable condition & rest are iter * operands. In such cases , the iter operands will be evaluated while * expanding/cutting down the index resultset * * @return SelectResults */ @Override public SelectResults filterEvaluate(ExecutionContext context, SelectResults intermediateResults, boolean completeExpansionNeeded, CompiledValue iterOperands, RuntimeIterator[] indpndntItrs, boolean isIntersection, boolean conditioningNeeded, boolean evaluateProjAttrib) throws FunctionDomainException, TypeMismatchException, NameResolutionException, QueryInvocationTargetException { // this method is called if we are independent of the iterator, // or if we can use an index. // if we are independent, then we should not have been here in the first // place Support .Assert( this._value.isDependentOnCurrentScope(context), "For a condition which does not depend on any RuntimeIterator of current scope , we should not have been in this function"); IndexInfo idxInfo[] = getIndexInfo(context); ObjectType resultType = idxInfo[0]._index.getResultSetType(); int indexFieldsSize = -1; SelectResults set = null; if (resultType instanceof StructType) { set = QueryUtils.createStructCollection(context, (StructTypeImpl)resultType) ; indexFieldsSize = ((StructTypeImpl) resultType).getFieldNames().length; } else { set = QueryUtils.createResultCollection(context, resultType); indexFieldsSize = 1; } int op = _is_defined ? TOK_NE : TOK_EQ; Object key = QueryService.UNDEFINED; QueryObserver observer = QueryObserverHolder.getInstance(); try { observer.beforeIndexLookup(idxInfo[0]._index, op, key); context.cachePut(CompiledValue.INDEX_INFO, idxInfo[0]); idxInfo[0]._index.query(key, op, set,context); } finally { observer.afterIndexLookup(set); } return QueryUtils.getconditionedIndexResults(set, idxInfo[0], context, indexFieldsSize, completeExpansionNeeded, iterOperands, indpndntItrs); } public int getSizeEstimate(ExecutionContext context) throws FunctionDomainException, TypeMismatchException, NameResolutionException, QueryInvocationTargetException { IndexInfo[] idxInfo = getIndexInfo(context); assert idxInfo.length == 1; if (context instanceof QueryExecutionContext) { QueryExecutionContext qcontext = (QueryExecutionContext) context; if (qcontext.isHinted(idxInfo[0]._index.getName())) { return qcontext.getHintSize(idxInfo[0]._index.getName()); } } int op = _is_defined ? TOK_NE : TOK_EQ; return idxInfo[0]._index.getSizeEstimate(QueryService.UNDEFINED, op, idxInfo[0]._matchLevel); } public int getOperator() { return _is_defined ? TOK_NE : TOK_EQ; } /** * evaluate as a filter, producing an intermediate result set. This may * require iteration if there is no index available. Asif :The boolean true * implies that CompiledComparsion when existing on its own always requires a * Complete expansion to top level iterators. This flag can get toggled to * false only from inside a GroupJunction * * <p>param intermediateResults if this parameter is provided, and we have to * iterate, then iterate over this result set instead of the entire * base collection. */ @Override public SelectResults filterEvaluate(ExecutionContext context, SelectResults iterationLimit) throws FunctionDomainException, TypeMismatchException, NameResolutionException, QueryInvocationTargetException { return filterEvaluate(context, iterationLimit, true/* Complete Expansion needed */, null, null, true,isConditioningNeededForIndex(null, context, true), false); } /* * Asif : This function should never get invoked as now if a CompiledJunction * or GroupJunction contains a single filterable CompiledUndefined, it should * directly call filterEvaluate rather than auxFilterEvalutae. Overriding this * function just for ensuring that auxFilterEvaluate is not being called by * mistake. */ @Override public SelectResults auxFilterEvaluate(ExecutionContext context, SelectResults intermediateResults) throws FunctionDomainException, TypeMismatchException, NameResolutionException, QueryInvocationTargetException { Support .assertionFailed(" This auxFilterEvaluate of CompiledComparison should never have got invoked."); return null; } @Override public Set computeDependencies(ExecutionContext context) throws TypeMismatchException, AmbiguousNameException, NameResolutionException { return context.addDependencies(this, this._value .computeDependencies(context)); } public void negate() { _is_defined = !_is_defined; } // Invariant: the receiver is dependent on the current iterator. @Override protected PlanInfo protGetPlanInfo(ExecutionContext context) throws TypeMismatchException, AmbiguousNameException, NameResolutionException { PlanInfo result = new PlanInfo(); IndexInfo[] indexInfo = getIndexInfo(context); if (indexInfo == null) return result; Support .Assert( indexInfo.length == 1, "For a CompiledUndefined we cannot have a join of two indexes. There should be only a single index to use"); result.indexes.add(indexInfo[0]._index); result.evalAsFilter = true; return result; } public IndexInfo[] getIndexInfo(ExecutionContext context) throws TypeMismatchException, AmbiguousNameException, NameResolutionException { IndexInfo[] indexInfo = privGetIndexInfo(context); if (indexInfo != null) { if (indexInfo == NO_INDEXES_IDENTIFIER) { return null; } else { return indexInfo; } } if (!IndexUtils.indexesEnabled) return null; //TODO:Asif : Check if the condition is such that Primary Key Index is used // & its key is DEFINED //, then are we returning all the values of the region ? // & that if the key is UNDEFINED are we returning an empty set.? IndexData indexData = QueryUtils.getAvailableIndexIfAny(this._value, context, _is_defined ? TOK_NE : TOK_EQ); IndexProtocol index = null; IndexInfo[] newIndexInfo = null; if (indexData != null) { index = indexData.getIndex(); } if (index != null && index.isValid()) { newIndexInfo = new IndexInfo[1]; /* Pass the Key as null as the key is not of type CompiledValue( but of type QueryService.UNDEFINED)*/ newIndexInfo[0] = new IndexInfo(null, this._value, index, indexData.getMatchLevel(), indexData.getMapping(), _is_defined ? TOK_NE : TOK_EQ); } if (newIndexInfo != null) { privSetIndexInfo(newIndexInfo, context); } else { privSetIndexInfo(NO_INDEXES_IDENTIFIER, context); } return newIndexInfo; } @Override public void generateCanonicalizedExpression(StringBuffer clauseBuffer, ExecutionContext context) throws AmbiguousNameException, TypeMismatchException, NameResolutionException { clauseBuffer.insert(0, ')'); _value.generateCanonicalizedExpression(clauseBuffer, context); if (_is_defined) clauseBuffer.insert(0, "IS_DEFINED("); else clauseBuffer.insert(0, "IS_UNDEFINED("); } //_indexInfo is a transient field // if this is just faulted in then can be null private IndexInfo[] privGetIndexInfo(ExecutionContext context) { return (IndexInfo[]) context.cacheGet(this); } private void privSetIndexInfo(IndexInfo[] indexInfo, ExecutionContext context) { context.cachePut(this, indexInfo); } public boolean isRangeEvaluatable() { return false; } public boolean isProjectionEvaluationAPossibility(ExecutionContext context) { return true; } //TODO:Asif: This should ideally be treated like CompiledComparison in terms evaluation of // iter operands etc public boolean isConditioningNeededForIndex(RuntimeIterator independentIter, ExecutionContext context, boolean completeExpnsNeeded) throws AmbiguousNameException, TypeMismatchException, NameResolutionException { return true; } public boolean isBetterFilter(Filter comparedTo, ExecutionContext context, int thisSize) throws FunctionDomainException, TypeMismatchException, NameResolutionException, QueryInvocationTargetException { //If the current filter is equality & comparedTo filter is also equality based , then // return the one with lower size estimate is better boolean isThisBetter = true; int thisOperator = this.getOperator(); int thatSize = comparedTo.getSizeEstimate(context); int thatOperator = comparedTo.getOperator() ; //Go with the lowest cost when hint is used. if (context instanceof QueryExecutionContext && ((QueryExecutionContext)context).hasHints()) { return thisSize <= thatSize; } switch(thatOperator) { case TOK_EQ: case TOK_NE: case TOK_NE_ALT: isThisBetter = thisSize <= thatSize; break; case LITERAL_and: //This is possible only in case of RangeJunction if(thisOperator== TOK_NE || thisOperator == TOK_NE_ALT ) { //Asif: Give preference to range as I am assuming that range will fetch less data // as compared to NOT EQUALs isThisBetter = false; } break; case TOK_LE: case TOK_LT: case TOK_GE: case TOK_GT: //Give preference to this rather than that as this is more deterministic break; default : throw new IllegalArgumentException("The operator type ="+ thatOperator + " is unknown"); } return isThisBetter; } }
/* * Copyright 2000-2010 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.refactoring.extractMethod; import com.intellij.ui.TableUtil; import com.intellij.ui.ToolbarDecorator; import com.intellij.ui.table.JBTable; import com.intellij.util.ui.EditableModel; import org.jetbrains.annotations.NonNls; import javax.swing.*; import javax.swing.table.AbstractTableModel; import javax.swing.table.DefaultTableCellRenderer; import java.awt.*; import java.awt.event.ActionEvent; import java.awt.event.KeyEvent; /** * @author oleg * It`s a modified copy of com.intellij.refactoring.util.ParameterTablePanel */ public abstract class AbstractParameterTablePanel extends JPanel { private AbstractVariableData[] myVariableData; private JBTable myTable; private MyTableModel myTableModel; private final ExtractMethodValidator myValidator; protected abstract void updateSignature(); protected abstract void doEnterAction(); protected abstract void doCancelAction(); public void setVariableData(AbstractVariableData[] variableData) { myVariableData = variableData; } public AbstractParameterTablePanel(final ExtractMethodValidator validator) { super(new BorderLayout()); myValidator = validator; } public void init() { myTableModel = new MyTableModel(); myTable = new JBTable(myTableModel); DefaultCellEditor defaultEditor = (DefaultCellEditor)myTable.getDefaultEditor(Object.class); defaultEditor.setClickCountToStart(1); myTable.setTableHeader(null); myTable.getSelectionModel().setSelectionMode(ListSelectionModel.SINGLE_SELECTION); myTable.getColumnModel().getColumn(MyTableModel.CHECKMARK_COLUMN).setMaxWidth(new JCheckBox().getPreferredSize().width); myTable.getColumnModel().getColumn(MyTableModel.PARAMETER_NAME_COLUMN).setCellRenderer(new DefaultTableCellRenderer() { @Override public Component getTableCellRendererComponent(JTable table, Object value, boolean isSelected, boolean hasFocus, int row, int column) { super.getTableCellRendererComponent(table, value, isSelected, hasFocus, row, column); AbstractVariableData data = myVariableData[row]; setText(data.name); return this; } }); myTable.setPreferredScrollableViewportSize(new Dimension(250, myTable.getRowHeight() * 5)); myTable.setShowGrid(false); myTable.setIntercellSpacing(new Dimension(0, 0)); @NonNls final InputMap inputMap = myTable.getInputMap(); inputMap.put(KeyStroke.getKeyStroke(KeyEvent.VK_SPACE, 0), "enable_disable"); @NonNls final ActionMap actionMap = myTable.getActionMap(); actionMap.put("enable_disable", new AbstractAction() { @Override public void actionPerformed(ActionEvent e) { if (myTable.isEditing()) return; int[] rows = myTable.getSelectedRows(); if (rows.length > 0) { boolean valueToBeSet = false; for (int row : rows) { if (!myVariableData[row].passAsParameter) { valueToBeSet = true; break; } } for (int row : rows) { myVariableData[row].passAsParameter = valueToBeSet; } myTableModel.fireTableRowsUpdated(rows[0], rows[rows.length - 1]); TableUtil.selectRows(myTable, rows); } } }); // F2 should edit the name inputMap.put(KeyStroke.getKeyStroke(KeyEvent.VK_F2, 0), "edit_parameter_name"); actionMap.put("edit_parameter_name", new AbstractAction() { @Override public void actionPerformed(ActionEvent e) { if (!myTable.isEditing()) { int row = myTable.getSelectedRow(); if (row >= 0 && row < myTableModel.getRowCount()) { TableUtil.editCellAt(myTable, row, MyTableModel.PARAMETER_NAME_COLUMN); } } } }); JPanel listPanel = ToolbarDecorator.createDecorator(myTable).disableAddAction().disableRemoveAction().createPanel(); add(listPanel, BorderLayout.CENTER); if (myVariableData.length > 1) { myTable.getSelectionModel().setSelectionInterval(0, 0); } } @Override public void setEnabled(boolean enabled) { myTable.setEnabled(enabled); super.setEnabled(enabled); } public AbstractVariableData[] getVariableData() { return myVariableData; } private class MyTableModel extends AbstractTableModel implements EditableModel { public static final int CHECKMARK_COLUMN = 0; public static final int PARAMETER_NAME_COLUMN = 1; @Override public int getRowCount() { return myVariableData.length; } @Override public int getColumnCount() { return 2; } @Override public Object getValueAt(int rowIndex, int columnIndex) { switch (columnIndex) { case CHECKMARK_COLUMN: { return myVariableData[rowIndex].passAsParameter ? Boolean.TRUE : Boolean.FALSE; } case PARAMETER_NAME_COLUMN: { return myVariableData[rowIndex].name; } } assert false; return null; } @Override public void setValueAt(Object aValue, int rowIndex, int columnIndex) { switch (columnIndex) { case CHECKMARK_COLUMN: { myVariableData[rowIndex].passAsParameter = ((Boolean)aValue).booleanValue(); fireTableRowsUpdated(rowIndex, rowIndex); myTable.getSelectionModel().setSelectionInterval(rowIndex, rowIndex); updateSignature(); break; } case PARAMETER_NAME_COLUMN: { AbstractVariableData data = myVariableData[rowIndex]; String name = (String)aValue; if (myValidator.isValidName(name)) { data.name = name; } updateSignature(); break; } } } @Override public boolean isCellEditable(int rowIndex, int columnIndex) { switch (columnIndex) { case CHECKMARK_COLUMN: return isEnabled(); case PARAMETER_NAME_COLUMN: return isEnabled() && myVariableData[rowIndex].passAsParameter; default: return false; } } @Override public Class getColumnClass(int columnIndex) { if (columnIndex == CHECKMARK_COLUMN) { return Boolean.class; } return super.getColumnClass(columnIndex); } @Override public void addRow() { throw new IllegalAccessError("Not implemented"); } @Override public void removeRow(int index) { throw new IllegalAccessError("Not implemented"); } @Override public void exchangeRows(int row, int targetRow) { if (row < 0 || row >= getVariableData().length) return; if (targetRow < 0 || targetRow >= getVariableData().length) return; final AbstractVariableData currentItem = getVariableData()[row]; getVariableData()[row] = getVariableData()[targetRow]; getVariableData()[targetRow] = currentItem; myTableModel.fireTableRowsUpdated(Math.min(targetRow, row), Math.max(targetRow, row)); myTable.getSelectionModel().setSelectionInterval(targetRow, targetRow); updateSignature(); } @Override public boolean canExchangeRows(int row, int targetRow) { if (row < 0 || row >= getVariableData().length) return false; if (targetRow < 0 || targetRow >= getVariableData().length) return false; return true; } } }
package com.oklab.gitjourney.activities; import android.content.Intent; import android.content.SharedPreferences; import android.net.Uri; import android.os.Bundle; import android.preference.PreferenceManager; import android.support.design.widget.FloatingActionButton; import android.support.design.widget.TabLayout; import android.support.v4.app.Fragment; import android.support.v4.app.FragmentManager; import android.support.v4.app.FragmentPagerAdapter; import android.support.v4.view.ViewPager; import android.support.v7.app.AppCompatActivity; import android.support.v7.widget.Toolbar; import android.util.Log; import android.view.LayoutInflater; import android.view.Menu; import android.view.MenuItem; import android.view.View; import android.view.ViewGroup; import com.google.android.gms.maps.SupportMapFragment; import com.google.firebase.analytics.FirebaseAnalytics; import com.oklab.gitjourney.R; import com.oklab.gitjourney.adapters.FirebaseAnalyticsWrapper; import com.oklab.gitjourney.fragments.FeedListFragment; import com.oklab.gitjourney.fragments.FollowersListFragment; import com.oklab.gitjourney.fragments.FollowingListFragment; import com.oklab.gitjourney.fragments.LocationsReadyCallback; import com.oklab.gitjourney.fragments.RepositoriesListFragment; import com.oklab.gitjourney.fragments.StarsListFragment; import com.oklab.gitjourney.services.TakeScreenshotService; import java.util.TimeZone; public class GeneralActivity extends AppCompatActivity implements FeedListFragment.OnFragmentInteractionListener, RepositoriesListFragment.OnFragmentInteractionListener, StarsListFragment.OnFragmentInteractionListener, FollowersListFragment.OnFragmentInteractionListener, FollowingListFragment.OnFragmentInteractionListener { private static final String TAG = GeneralActivity.class.getSimpleName(); private TakeScreenshotService takeScreenshotService; private FirebaseAnalyticsWrapper firebaseAnalytics; /** * The {@link android.support.v4.view.PagerAdapter} that will provide * fragments for each of the sections. We use a * {@link FragmentPagerAdapter} derivative, which will keep every * loaded fragment in memory. If this becomes too memory intensive, it * may be best to switch to a * {@link android.support.v4.app.FragmentStatePagerAdapter}. */ private SectionsPagerAdapter mSectionsPagerAdapter; /** * The {@link ViewPager} that will host the section contents. */ private ViewPager mViewPager; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); Log.v(TAG, "onCreate"); setContentView(R.layout.activity_general); firebaseAnalytics = new FirebaseAnalyticsWrapper(this); Toolbar toolbar = (Toolbar) findViewById(R.id.toolbar); setSupportActionBar(toolbar); getSupportActionBar().setDisplayHomeAsUpEnabled(true); // Create the adapter that will return a fragment for each of the three // primary sections of the activity. String timezone = TimeZone.getDefault().toString(); Log.v(TAG, "onCreate timezone = " + timezone); mSectionsPagerAdapter = new SectionsPagerAdapter(getSupportFragmentManager()); // Set up the ViewPager with the sections adapter. mViewPager = (ViewPager) findViewById(R.id.container); mViewPager.setAdapter(mSectionsPagerAdapter); mViewPager.addOnPageChangeListener(new ViewPager.OnPageChangeListener() { @Override public void onPageScrolled(int position, float positionOffset, int positionOffsetPixels) { } @Override public void onPageSelected(int position) { Log.v(TAG, "onPageSelected, position = " + position); Bundle bundle = new Bundle(); bundle.putString(FirebaseAnalytics.Param.ITEM_ID, Integer.toString(position)); bundle.putString(FirebaseAnalytics.Param.ITEM_CATEGORY, "GeneralActivityTabChange"); firebaseAnalytics.logEvent(FirebaseAnalytics.Event.VIEW_ITEM, bundle); } @Override public void onPageScrollStateChanged(int state) { } }); mViewPager.setOffscreenPageLimit(5); TabLayout tabLayout = (TabLayout) findViewById(R.id.tabs); tabLayout.setupWithViewPager(mViewPager); takeScreenshotService = new TakeScreenshotService(this); FloatingActionButton fab = (FloatingActionButton) findViewById(R.id.fab); fab.setVisibility(View.GONE); fab.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { Log.v(TAG, "takeScreenShot"); takeScreenshotService.takeScreenShot(); } }); } @Override protected void onStart() { super.onStart(); firebaseAnalytics.setCurrentScreen(this, "GeneralActivityFirebaseAnalytics"); } @Override public boolean onCreateOptionsMenu(Menu menu) { // Inflate the menu; this adds items to the action bar if it is present. getMenuInflater().inflate(R.menu.menu_general, menu); return true; } @Override public boolean onOptionsItemSelected(MenuItem item) { // Handle action bar item clicks here. The action bar will // automatically handle clicks on the Home/Up button, so long // as you specify a parent activity in AndroidManifest.xml. int id = item.getItemId(); //noinspection SimplifiableIfStatement if (id == R.id.action_settings) { Intent intent = new Intent(this, SettingsActivity.class); startActivity(intent); return true; } return super.onOptionsItemSelected(item); } @Override public void onFragmentInteraction(Uri uri) { Log.v(TAG, "onFragmentInteraction "); } /** * A placeholder fragment containing a simple view. */ public static class PlaceholderFragment extends Fragment { /** * The fragment argument representing the section number for this * fragment. */ private static final String ARG_SECTION_NUMBER = "section_number"; public PlaceholderFragment() { } /** * Returns a new instance of this fragment for the given section * number. */ public static PlaceholderFragment newInstance(int sectionNumber) { Log.v(TAG, "newInstance "); PlaceholderFragment fragment = new PlaceholderFragment(); Bundle args = new Bundle(); args.putInt(ARG_SECTION_NUMBER, sectionNumber); fragment.setArguments(args); return fragment; } @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { Log.v(TAG, "onCreateView "); View rootView = inflater.inflate(R.layout.fragment_general_list, container, false); return rootView; } } /** * A {@link FragmentPagerAdapter} that returns a fragment corresponding to * one of the sections/tabs/pages. */ public class SectionsPagerAdapter extends FragmentPagerAdapter { public SectionsPagerAdapter(FragmentManager fm) { super(fm); } @Override public Fragment getItem(int position) { Log.v(TAG, "getItem, position = " + position); // getItem is called to instantiate the fragment for the given page. // Return a PlaceholderFragment (defined as a static inner class below). switch (position) { case 0: return FeedListFragment.newInstance(); case 1: return RepositoriesListFragment.newInstance(); case 2: return FollowingListFragment.newInstance(); case 3: return FollowersListFragment.newInstance(); case 4: return StarsListFragment.newInstance(); case 5: LocationsReadyCallback callback = new LocationsReadyCallback(GeneralActivity.this); SupportMapFragment fragment = SupportMapFragment.newInstance(); fragment.getMapAsync(callback); return fragment; } return PlaceholderFragment.newInstance(position + 1); } @Override public int getCount() { SharedPreferences sharedPref = PreferenceManager.getDefaultSharedPreferences(GeneralActivity.this); boolean map = sharedPref.getBoolean("map_switch", true); Log.v(TAG, "map value = " + map); if (map) { return 6; } else { return 5; } } @Override public CharSequence getPageTitle(int position) { Log.v(TAG, "getPageTitle "); switch (position) { case 0: return getApplicationContext().getString(R.string.feed); case 1: return getApplicationContext().getString(R.string.repositories); case 2: return getApplicationContext().getString(R.string.following); case 3: return getApplicationContext().getString(R.string.followers); case 4: return getApplicationContext().getString(R.string.stars); case 5: return getApplicationContext().getString(R.string.map); } return null; } } }
/* * Copyright 2015 Comcast Cable Communications Management, LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.comcast.cdn.traffic_control.traffic_router.core.loc; import java.io.File; import java.io.FileNotFoundException; import java.io.FileReader; import java.net.InetAddress; import java.net.UnknownHostException; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.TreeMap; import com.comcast.cdn.traffic_control.traffic_router.core.util.CidrAddress; import com.comcast.cdn.traffic_control.traffic_router.geolocation.Geolocation; import org.apache.log4j.Logger; import org.apache.wicket.ajax.json.JSONArray; import org.apache.wicket.ajax.json.JSONException; import org.apache.wicket.ajax.json.JSONObject; import org.apache.wicket.ajax.json.JSONTokener; import com.comcast.cdn.traffic_control.traffic_router.core.cache.CacheLocation; public class NetworkNode implements Comparable<NetworkNode> { private static final Logger LOGGER = Logger.getLogger(NetworkNode.class); private static final String DEFAULT_SUB_STR = "0.0.0.0/0"; private static NetworkNode instance; private CidrAddress cidrAddress; private String loc; private CacheLocation cacheLocation = null; private Geolocation geolocation = null; protected Map<NetworkNode,NetworkNode> children; public static NetworkNode getInstance() { if (instance != null) { return instance; } try { instance = new NetworkNode(DEFAULT_SUB_STR); } catch (NetworkNodeException e) { LOGGER.warn(e); } return instance; } public static NetworkNode generateTree(final File f) throws NetworkNodeException, FileNotFoundException, JSONException { return generateTree(new JSONObject(new JSONTokener(new FileReader(f)))); } @SuppressWarnings("PMD.CyclomaticComplexity") public static NetworkNode generateTree(final JSONObject json) { try { final JSONObject coverageZones = json.getJSONObject("coverageZones"); final SuperNode root = new SuperNode(); for (final String loc : JSONObject.getNames(coverageZones)) { final JSONObject locData = coverageZones.getJSONObject(loc); final JSONObject coordinates = locData.optJSONObject("coordinates"); Geolocation geolocation = null; if (coordinates != null && coordinates.has("latitude") && coordinates.has("longitude")) { final double latitude = coordinates.optDouble("latitude"); final double longitude = coordinates.optDouble("longitude"); geolocation = new Geolocation(latitude, longitude); } try { final JSONArray network6 = locData.getJSONArray("network6"); for (int i = 0; i < network6.length(); i++) { final String ip = network6.getString(i); try { root.add6(new NetworkNode(ip, loc, geolocation)); } catch (NetworkNodeException ex) { LOGGER.error(ex, ex); } } } catch (JSONException ex) { LOGGER.warn("An exception was caught while accessing the network6 key of " + loc + " in the incoming coverage zone file: " + ex.getMessage()); } try { final JSONArray network = locData.getJSONArray("network"); for (int i = 0; i < network.length(); i++) { final String ip = network.getString(i); try { root.add(new NetworkNode(ip, loc, geolocation)); } catch (NetworkNodeException ex) { LOGGER.error(ex, ex); } } } catch (JSONException ex) { LOGGER.warn("An exception was caught while accessing the network key of " + loc + " in the incoming coverage zone file: " + ex.getMessage()); } } instance = root; return root; } catch (JSONException e) { LOGGER.warn(e,e); } catch (NetworkNodeException ex) { LOGGER.fatal(ex, ex); } return null; } public NetworkNode(final String str) throws NetworkNodeException { this(str, null); } public NetworkNode(final String str, final String loc) throws NetworkNodeException { this(str, loc, null); } public NetworkNode(final String str, final String loc, final Geolocation geolocation) throws NetworkNodeException { this.loc = loc; this.geolocation = geolocation; cidrAddress = CidrAddress.fromString(str); } public NetworkNode getNetwork(final String ip) throws NetworkNodeException { return getNetwork(new NetworkNode(ip)); } public NetworkNode getNetwork(final NetworkNode ipnn) { if (this.compareTo(ipnn) != 0) { return null; } if (children == null) { return this; } final NetworkNode c = children.get(ipnn); if (c == null) { return this; } return c.getNetwork(ipnn); } public Boolean add(final NetworkNode nn) { synchronized(this) { if (children == null) { children = new TreeMap<NetworkNode,NetworkNode>(); } return add(children, nn); } } protected Boolean add(final Map<NetworkNode,NetworkNode> children, final NetworkNode networkNode) { if (compareTo(networkNode) != 0) { return false; } for (final NetworkNode child : children.values()) { if (child.cidrAddress.equals(networkNode.cidrAddress)) { return false; } } final List<NetworkNode> movedChildren = new ArrayList<NetworkNode>(); for (final NetworkNode child : children.values()) { if (networkNode.cidrAddress.includesAddress(child.cidrAddress)) { movedChildren.add(child); networkNode.add(child); } } for (final NetworkNode movedChild : movedChildren) { children.remove(movedChild); } for (final NetworkNode child : children.values()) { if (child.cidrAddress.includesAddress(networkNode.cidrAddress)) { return child.add(networkNode); } } children.put(networkNode, networkNode); return true; } public String getLoc() { return loc; } public Geolocation getGeolocation() { return geolocation; } public CacheLocation getCacheLocation() { return cacheLocation; } public void setCacheLocation(final CacheLocation cacheLocation) { this.cacheLocation = cacheLocation; } public int size() { if (children == null) { return 1; } int size = 1; for (final NetworkNode child : children.keySet()) { size += child.size(); } return size; } public void clearCacheLocations() { synchronized(this) { cacheLocation = null; if (this instanceof SuperNode) { final SuperNode superNode = (SuperNode) this; if (superNode.children6 != null) { for (final NetworkNode child : superNode.children6.keySet()) { child.clearCacheLocations(); } } } if (children != null) { for (final NetworkNode child : children.keySet()) { child.clearCacheLocations(); } } } } public static class SuperNode extends NetworkNode { private Map<NetworkNode, NetworkNode> children6; public SuperNode() throws NetworkNodeException { super(DEFAULT_SUB_STR); } public Boolean add6(final NetworkNode nn) { if(children6 == null) { children6 = new TreeMap<NetworkNode,NetworkNode>(); } return add(children6, nn); } public NetworkNode getNetwork(final String ip) throws NetworkNodeException { final NetworkNode nn = new NetworkNode(ip); if (nn.cidrAddress.isIpV6()) { return getNetwork6(nn); } return getNetwork(nn); } public NetworkNode getNetwork6(final NetworkNode networkNode) { if (children6 == null) { return this; } final NetworkNode c = children6.get(networkNode); if (c == null) { return this; } return c.getNetwork(networkNode); } } @Override public int compareTo(final NetworkNode other) { return cidrAddress.compareTo(other.cidrAddress); } public String toString() { String str = ""; try { str = InetAddress.getByAddress(cidrAddress.getHostBytes()).toString().replace("/", ""); } catch (UnknownHostException e) { LOGGER.warn(e,e); } return "[" + str + "/" + cidrAddress.getNetmaskLength() + "] - location:" + this.getLoc(); } }
package org.ripple.bouncycastle.x509; import java.io.IOException; import java.math.BigInteger; import java.security.cert.CertificateExpiredException; import java.security.cert.CertificateNotYetValidException; import java.util.Collection; import java.util.Collections; import java.util.Date; import java.util.HashSet; import java.util.Iterator; import java.util.Set; import org.ripple.bouncycastle.asn1.ASN1InputStream; import org.ripple.bouncycastle.asn1.ASN1Primitive; import org.ripple.bouncycastle.asn1.DEROctetString; import org.ripple.bouncycastle.asn1.x509.GeneralName; import org.ripple.bouncycastle.asn1.x509.Target; import org.ripple.bouncycastle.asn1.x509.TargetInformation; import org.ripple.bouncycastle.asn1.x509.Targets; import org.ripple.bouncycastle.asn1.x509.X509Extensions; import org.ripple.bouncycastle.util.Selector; /** * This class is an <code>Selector</code> like implementation to select * attribute certificates from a given set of criteria. * * @see org.ripple.bouncycastle.x509.X509AttributeCertificate * @see org.ripple.bouncycastle.x509.X509Store * @deprecated use org.bouncycastle.cert.X509AttributeCertificateSelector and org.bouncycastle.cert.X509AttributeCertificateSelectorBuilder. */ public class X509AttributeCertStoreSelector implements Selector { // TODO: name constraints??? private AttributeCertificateHolder holder; private AttributeCertificateIssuer issuer; private BigInteger serialNumber; private Date attributeCertificateValid; private X509AttributeCertificate attributeCert; private Collection targetNames = new HashSet(); private Collection targetGroups = new HashSet(); public X509AttributeCertStoreSelector() { super(); } /** * Decides if the given attribute certificate should be selected. * * @param obj The attribute certificate which should be checked. * @return <code>true</code> if the attribute certificate can be selected, * <code>false</code> otherwise. */ public boolean match(Object obj) { if (!(obj instanceof X509AttributeCertificate)) { return false; } X509AttributeCertificate attrCert = (X509AttributeCertificate) obj; if (this.attributeCert != null) { if (!this.attributeCert.equals(attrCert)) { return false; } } if (serialNumber != null) { if (!attrCert.getSerialNumber().equals(serialNumber)) { return false; } } if (holder != null) { if (!attrCert.getHolder().equals(holder)) { return false; } } if (issuer != null) { if (!attrCert.getIssuer().equals(issuer)) { return false; } } if (attributeCertificateValid != null) { try { attrCert.checkValidity(attributeCertificateValid); } catch (CertificateExpiredException e) { return false; } catch (CertificateNotYetValidException e) { return false; } } if (!targetNames.isEmpty() || !targetGroups.isEmpty()) { byte[] targetInfoExt = attrCert .getExtensionValue(X509Extensions.TargetInformation.getId()); if (targetInfoExt != null) { TargetInformation targetinfo; try { targetinfo = TargetInformation .getInstance(new ASN1InputStream( ((DEROctetString) DEROctetString .fromByteArray(targetInfoExt)).getOctets()) .readObject()); } catch (IOException e) { return false; } catch (IllegalArgumentException e) { return false; } Targets[] targetss = targetinfo.getTargetsObjects(); if (!targetNames.isEmpty()) { boolean found = false; for (int i=0; i<targetss.length; i++) { Targets t = targetss[i]; Target[] targets = t.getTargets(); for (int j=0; j<targets.length; j++) { if (targetNames.contains(GeneralName.getInstance(targets[j] .getTargetName()))) { found = true; break; } } } if (!found) { return false; } } if (!targetGroups.isEmpty()) { boolean found = false; for (int i=0; i<targetss.length; i++) { Targets t = targetss[i]; Target[] targets = t.getTargets(); for (int j=0; j<targets.length; j++) { if (targetGroups.contains(GeneralName.getInstance(targets[j] .getTargetGroup()))) { found = true; break; } } } if (!found) { return false; } } } } return true; } /** * Returns a clone of this object. * * @return the clone. */ public Object clone() { X509AttributeCertStoreSelector sel = new X509AttributeCertStoreSelector(); sel.attributeCert = attributeCert; sel.attributeCertificateValid = getAttributeCertificateValid(); sel.holder = holder; sel.issuer = issuer; sel.serialNumber = serialNumber; sel.targetGroups = getTargetGroups(); sel.targetNames = getTargetNames(); return sel; } /** * Returns the attribute certificate which must be matched. * * @return Returns the attribute certificate. */ public X509AttributeCertificate getAttributeCert() { return attributeCert; } /** * Set the attribute certificate to be matched. If <code>null</code> is * given any will do. * * @param attributeCert The attribute certificate to set. */ public void setAttributeCert(X509AttributeCertificate attributeCert) { this.attributeCert = attributeCert; } /** * Get the criteria for the validity. * * @return Returns the attributeCertificateValid. */ public Date getAttributeCertificateValid() { if (attributeCertificateValid != null) { return new Date(attributeCertificateValid.getTime()); } return null; } /** * Set the time, when the certificate must be valid. If <code>null</code> * is given any will do. * * @param attributeCertificateValid The attribute certificate validation * time to set. */ public void setAttributeCertificateValid(Date attributeCertificateValid) { if (attributeCertificateValid != null) { this.attributeCertificateValid = new Date(attributeCertificateValid .getTime()); } else { this.attributeCertificateValid = null; } } /** * Gets the holder. * * @return Returns the holder. */ public AttributeCertificateHolder getHolder() { return holder; } /** * Sets the holder. If <code>null</code> is given any will do. * * @param holder The holder to set. */ public void setHolder(AttributeCertificateHolder holder) { this.holder = holder; } /** * Returns the issuer criterion. * * @return Returns the issuer. */ public AttributeCertificateIssuer getIssuer() { return issuer; } /** * Sets the issuer the attribute certificate must have. If <code>null</code> * is given any will do. * * @param issuer The issuer to set. */ public void setIssuer(AttributeCertificateIssuer issuer) { this.issuer = issuer; } /** * Gets the serial number the attribute certificate must have. * * @return Returns the serialNumber. */ public BigInteger getSerialNumber() { return serialNumber; } /** * Sets the serial number the attribute certificate must have. If * <code>null</code> is given any will do. * * @param serialNumber The serialNumber to set. */ public void setSerialNumber(BigInteger serialNumber) { this.serialNumber = serialNumber; } /** * Adds a target name criterion for the attribute certificate to the target * information extension criteria. The <code>X509AttributeCertificate</code> * must contain at least one of the specified target names. * <p> * Each attribute certificate may contain a target information extension * limiting the servers where this attribute certificate can be used. If * this extension is not present, the attribute certificate is not targeted * and may be accepted by any server. * * @param name The name as a GeneralName (not <code>null</code>) */ public void addTargetName(GeneralName name) { targetNames.add(name); } /** * Adds a target name criterion for the attribute certificate to the target * information extension criteria. The <code>X509AttributeCertificate</code> * must contain at least one of the specified target names. * <p> * Each attribute certificate may contain a target information extension * limiting the servers where this attribute certificate can be used. If * this extension is not present, the attribute certificate is not targeted * and may be accepted by any server. * * @param name a byte array containing the name in ASN.1 DER encoded form of a GeneralName * @throws IOException if a parsing error occurs. */ public void addTargetName(byte[] name) throws IOException { addTargetName(GeneralName.getInstance(ASN1Primitive.fromByteArray(name))); } /** * Adds a collection with target names criteria. If <code>null</code> is * given any will do. * <p> * The collection consists of either GeneralName objects or byte[] arrays representing * DER encoded GeneralName structures. * * @param names A collection of target names. * @throws IOException if a parsing error occurs. * @see #addTargetName(byte[]) * @see #addTargetName(GeneralName) */ public void setTargetNames(Collection names) throws IOException { targetNames = extractGeneralNames(names); } /** * Gets the target names. The collection consists of <code>GeneralName</code> * objects. * <p> * The returned collection is immutable. * * @return The collection of target names * @see #setTargetNames(Collection) */ public Collection getTargetNames() { return Collections.unmodifiableCollection(targetNames); } /** * Adds a target group criterion for the attribute certificate to the target * information extension criteria. The <code>X509AttributeCertificate</code> * must contain at least one of the specified target groups. * <p> * Each attribute certificate may contain a target information extension * limiting the servers where this attribute certificate can be used. If * this extension is not present, the attribute certificate is not targeted * and may be accepted by any server. * * @param group The group as GeneralName form (not <code>null</code>) */ public void addTargetGroup(GeneralName group) { targetGroups.add(group); } /** * Adds a target group criterion for the attribute certificate to the target * information extension criteria. The <code>X509AttributeCertificate</code> * must contain at least one of the specified target groups. * <p> * Each attribute certificate may contain a target information extension * limiting the servers where this attribute certificate can be used. If * this extension is not present, the attribute certificate is not targeted * and may be accepted by any server. * * @param name a byte array containing the group in ASN.1 DER encoded form of a GeneralName * @throws IOException if a parsing error occurs. */ public void addTargetGroup(byte[] name) throws IOException { addTargetGroup(GeneralName.getInstance(ASN1Primitive.fromByteArray(name))); } /** * Adds a collection with target groups criteria. If <code>null</code> is * given any will do. * <p> * The collection consists of <code>GeneralName</code> objects or <code>byte[]</code representing DER * encoded GeneralNames. * * @param names A collection of target groups. * @throws IOException if a parsing error occurs. * @see #addTargetGroup(byte[]) * @see #addTargetGroup(GeneralName) */ public void setTargetGroups(Collection names) throws IOException { targetGroups = extractGeneralNames(names); } /** * Gets the target groups. The collection consists of <code>GeneralName</code> objects. * <p> * The returned collection is immutable. * * @return The collection of target groups. * @see #setTargetGroups(Collection) */ public Collection getTargetGroups() { return Collections.unmodifiableCollection(targetGroups); } private Set extractGeneralNames(Collection names) throws IOException { if (names == null || names.isEmpty()) { return new HashSet(); } Set temp = new HashSet(); for (Iterator it = names.iterator(); it.hasNext();) { Object o = it.next(); if (o instanceof GeneralName) { temp.add(o); } else { temp.add(GeneralName.getInstance(ASN1Primitive.fromByteArray((byte[])o))); } } return temp; } }
/* Generated by camel build tools - do NOT edit this file! */ package org.apache.camel.component.ssh; import java.util.Map; import org.apache.camel.CamelContext; import org.apache.camel.spi.ExtendedPropertyConfigurerGetter; import org.apache.camel.spi.PropertyConfigurerGetter; import org.apache.camel.spi.ConfigurerStrategy; import org.apache.camel.spi.GeneratedPropertyConfigurer; import org.apache.camel.util.CaseInsensitiveMap; import org.apache.camel.support.component.PropertyConfigurerSupport; /** * Generated by camel build tools - do NOT edit this file! */ @SuppressWarnings("unchecked") public class SshEndpointConfigurer extends PropertyConfigurerSupport implements GeneratedPropertyConfigurer, PropertyConfigurerGetter { @Override public boolean configure(CamelContext camelContext, Object obj, String name, Object value, boolean ignoreCase) { SshEndpoint target = (SshEndpoint) obj; switch (ignoreCase ? name.toLowerCase() : name) { case "backofferrorthreshold": case "backoffErrorThreshold": target.setBackoffErrorThreshold(property(camelContext, int.class, value)); return true; case "backoffidlethreshold": case "backoffIdleThreshold": target.setBackoffIdleThreshold(property(camelContext, int.class, value)); return true; case "backoffmultiplier": case "backoffMultiplier": target.setBackoffMultiplier(property(camelContext, int.class, value)); return true; case "bridgeerrorhandler": case "bridgeErrorHandler": target.setBridgeErrorHandler(property(camelContext, boolean.class, value)); return true; case "certresource": case "certResource": target.getConfiguration().setCertResource(property(camelContext, java.lang.String.class, value)); return true; case "certresourcepassword": case "certResourcePassword": target.getConfiguration().setCertResourcePassword(property(camelContext, java.lang.String.class, value)); return true; case "channeltype": case "channelType": target.getConfiguration().setChannelType(property(camelContext, java.lang.String.class, value)); return true; case "delay": target.setDelay(property(camelContext, long.class, value)); return true; case "exceptionhandler": case "exceptionHandler": target.setExceptionHandler(property(camelContext, org.apache.camel.spi.ExceptionHandler.class, value)); return true; case "exchangepattern": case "exchangePattern": target.setExchangePattern(property(camelContext, org.apache.camel.ExchangePattern.class, value)); return true; case "failonunknownhost": case "failOnUnknownHost": target.getConfiguration().setFailOnUnknownHost(property(camelContext, boolean.class, value)); return true; case "greedy": target.setGreedy(property(camelContext, boolean.class, value)); return true; case "initialdelay": case "initialDelay": target.setInitialDelay(property(camelContext, long.class, value)); return true; case "keypairprovider": case "keyPairProvider": target.getConfiguration().setKeyPairProvider(property(camelContext, org.apache.sshd.common.keyprovider.KeyPairProvider.class, value)); return true; case "keytype": case "keyType": target.getConfiguration().setKeyType(property(camelContext, java.lang.String.class, value)); return true; case "knownhostsresource": case "knownHostsResource": target.getConfiguration().setKnownHostsResource(property(camelContext, java.lang.String.class, value)); return true; case "lazystartproducer": case "lazyStartProducer": target.setLazyStartProducer(property(camelContext, boolean.class, value)); return true; case "password": target.getConfiguration().setPassword(property(camelContext, java.lang.String.class, value)); return true; case "pollcommand": case "pollCommand": target.getConfiguration().setPollCommand(property(camelContext, java.lang.String.class, value)); return true; case "pollstrategy": case "pollStrategy": target.setPollStrategy(property(camelContext, org.apache.camel.spi.PollingConsumerPollStrategy.class, value)); return true; case "repeatcount": case "repeatCount": target.setRepeatCount(property(camelContext, long.class, value)); return true; case "runlogginglevel": case "runLoggingLevel": target.setRunLoggingLevel(property(camelContext, org.apache.camel.LoggingLevel.class, value)); return true; case "scheduledexecutorservice": case "scheduledExecutorService": target.setScheduledExecutorService(property(camelContext, java.util.concurrent.ScheduledExecutorService.class, value)); return true; case "scheduler": target.setScheduler(property(camelContext, java.lang.Object.class, value)); return true; case "schedulerproperties": case "schedulerProperties": target.setSchedulerProperties(property(camelContext, java.util.Map.class, value)); return true; case "sendemptymessagewhenidle": case "sendEmptyMessageWhenIdle": target.setSendEmptyMessageWhenIdle(property(camelContext, boolean.class, value)); return true; case "shellprompt": case "shellPrompt": target.getConfiguration().setShellPrompt(property(camelContext, java.lang.String.class, value)); return true; case "sleepforshellprompt": case "sleepForShellPrompt": target.getConfiguration().setSleepForShellPrompt(property(camelContext, long.class, value)); return true; case "startscheduler": case "startScheduler": target.setStartScheduler(property(camelContext, boolean.class, value)); return true; case "synchronous": target.setSynchronous(property(camelContext, boolean.class, value)); return true; case "timeunit": case "timeUnit": target.setTimeUnit(property(camelContext, java.util.concurrent.TimeUnit.class, value)); return true; case "timeout": target.getConfiguration().setTimeout(property(camelContext, long.class, value)); return true; case "usefixeddelay": case "useFixedDelay": target.setUseFixedDelay(property(camelContext, boolean.class, value)); return true; case "username": target.getConfiguration().setUsername(property(camelContext, java.lang.String.class, value)); return true; default: return false; } } @Override public Class<?> getOptionType(String name, boolean ignoreCase) { switch (ignoreCase ? name.toLowerCase() : name) { case "backofferrorthreshold": case "backoffErrorThreshold": return int.class; case "backoffidlethreshold": case "backoffIdleThreshold": return int.class; case "backoffmultiplier": case "backoffMultiplier": return int.class; case "bridgeerrorhandler": case "bridgeErrorHandler": return boolean.class; case "certresource": case "certResource": return java.lang.String.class; case "certresourcepassword": case "certResourcePassword": return java.lang.String.class; case "channeltype": case "channelType": return java.lang.String.class; case "delay": return long.class; case "exceptionhandler": case "exceptionHandler": return org.apache.camel.spi.ExceptionHandler.class; case "exchangepattern": case "exchangePattern": return org.apache.camel.ExchangePattern.class; case "failonunknownhost": case "failOnUnknownHost": return boolean.class; case "greedy": return boolean.class; case "initialdelay": case "initialDelay": return long.class; case "keypairprovider": case "keyPairProvider": return org.apache.sshd.common.keyprovider.KeyPairProvider.class; case "keytype": case "keyType": return java.lang.String.class; case "knownhostsresource": case "knownHostsResource": return java.lang.String.class; case "lazystartproducer": case "lazyStartProducer": return boolean.class; case "password": return java.lang.String.class; case "pollcommand": case "pollCommand": return java.lang.String.class; case "pollstrategy": case "pollStrategy": return org.apache.camel.spi.PollingConsumerPollStrategy.class; case "repeatcount": case "repeatCount": return long.class; case "runlogginglevel": case "runLoggingLevel": return org.apache.camel.LoggingLevel.class; case "scheduledexecutorservice": case "scheduledExecutorService": return java.util.concurrent.ScheduledExecutorService.class; case "scheduler": return java.lang.Object.class; case "schedulerproperties": case "schedulerProperties": return java.util.Map.class; case "sendemptymessagewhenidle": case "sendEmptyMessageWhenIdle": return boolean.class; case "shellprompt": case "shellPrompt": return java.lang.String.class; case "sleepforshellprompt": case "sleepForShellPrompt": return long.class; case "startscheduler": case "startScheduler": return boolean.class; case "synchronous": return boolean.class; case "timeunit": case "timeUnit": return java.util.concurrent.TimeUnit.class; case "timeout": return long.class; case "usefixeddelay": case "useFixedDelay": return boolean.class; case "username": return java.lang.String.class; default: return null; } } @Override public Object getOptionValue(Object obj, String name, boolean ignoreCase) { SshEndpoint target = (SshEndpoint) obj; switch (ignoreCase ? name.toLowerCase() : name) { case "backofferrorthreshold": case "backoffErrorThreshold": return target.getBackoffErrorThreshold(); case "backoffidlethreshold": case "backoffIdleThreshold": return target.getBackoffIdleThreshold(); case "backoffmultiplier": case "backoffMultiplier": return target.getBackoffMultiplier(); case "bridgeerrorhandler": case "bridgeErrorHandler": return target.isBridgeErrorHandler(); case "certresource": case "certResource": return target.getConfiguration().getCertResource(); case "certresourcepassword": case "certResourcePassword": return target.getConfiguration().getCertResourcePassword(); case "channeltype": case "channelType": return target.getConfiguration().getChannelType(); case "delay": return target.getDelay(); case "exceptionhandler": case "exceptionHandler": return target.getExceptionHandler(); case "exchangepattern": case "exchangePattern": return target.getExchangePattern(); case "failonunknownhost": case "failOnUnknownHost": return target.getConfiguration().isFailOnUnknownHost(); case "greedy": return target.isGreedy(); case "initialdelay": case "initialDelay": return target.getInitialDelay(); case "keypairprovider": case "keyPairProvider": return target.getConfiguration().getKeyPairProvider(); case "keytype": case "keyType": return target.getConfiguration().getKeyType(); case "knownhostsresource": case "knownHostsResource": return target.getConfiguration().getKnownHostsResource(); case "lazystartproducer": case "lazyStartProducer": return target.isLazyStartProducer(); case "password": return target.getConfiguration().getPassword(); case "pollcommand": case "pollCommand": return target.getConfiguration().getPollCommand(); case "pollstrategy": case "pollStrategy": return target.getPollStrategy(); case "repeatcount": case "repeatCount": return target.getRepeatCount(); case "runlogginglevel": case "runLoggingLevel": return target.getRunLoggingLevel(); case "scheduledexecutorservice": case "scheduledExecutorService": return target.getScheduledExecutorService(); case "scheduler": return target.getScheduler(); case "schedulerproperties": case "schedulerProperties": return target.getSchedulerProperties(); case "sendemptymessagewhenidle": case "sendEmptyMessageWhenIdle": return target.isSendEmptyMessageWhenIdle(); case "shellprompt": case "shellPrompt": return target.getConfiguration().getShellPrompt(); case "sleepforshellprompt": case "sleepForShellPrompt": return target.getConfiguration().getSleepForShellPrompt(); case "startscheduler": case "startScheduler": return target.isStartScheduler(); case "synchronous": return target.isSynchronous(); case "timeunit": case "timeUnit": return target.getTimeUnit(); case "timeout": return target.getConfiguration().getTimeout(); case "usefixeddelay": case "useFixedDelay": return target.isUseFixedDelay(); case "username": return target.getConfiguration().getUsername(); default: return null; } } }
package com.afollestad.cabinet.fragments; import android.app.AlertDialog; import android.app.Dialog; import android.app.DialogFragment; import android.app.ProgressDialog; import android.content.DialogInterface; import android.os.Bundle; import android.text.Html; import android.text.Spanned; import android.view.LayoutInflater; import android.view.View; import android.widget.CheckBox; import android.widget.CompoundButton; import android.widget.TextView; import com.afollestad.cabinet.R; import com.afollestad.cabinet.file.base.File; import com.afollestad.cabinet.file.root.RootFile; import com.afollestad.cabinet.utils.Perm; import com.afollestad.cabinet.utils.TimeUtils; import com.afollestad.cabinet.utils.Utils; import java.util.GregorianCalendar; import java.util.List; import eu.chainfire.libsuperuser.Shell; public class DetailsDialog extends DialogFragment implements CompoundButton.OnCheckedChangeListener { public DetailsDialog() { } public static DetailsDialog create(File file) { DetailsDialog dialog = new DetailsDialog(); Bundle args = new Bundle(); args.putSerializable("file", file); dialog.setArguments(args); return dialog; } private TextView body; private File file; public CheckBox ownerR; public CheckBox ownerW; public CheckBox ownerX; public CheckBox groupR; public CheckBox groupW; public CheckBox groupX; public CheckBox otherR; public CheckBox otherW; public CheckBox otherX; public String permissionsString; public String initialPermission; private Spanned getBody(boolean loadDirContents, final View view) { if (getActivity() == null) return null; String content; GregorianCalendar cal = new GregorianCalendar(); cal.setTimeInMillis(file.lastModified()); if (file.isDirectory()) { String size = getString(R.string.unavailable); if (!file.isRemote()) { if (loadDirContents) { size = file.getSizeString(); } else { size = getString(R.string.loading); new Thread(new Runnable() { @Override public void run() { final Spanned newBody = getBody(true, null); if (getActivity() == null) return; getActivity().runOnUiThread(new Runnable() { @Override public void run() { body.setText(newBody); } }); } }).start(); } } content = getString(R.string.details_body_dir, file.getName(), file.getPath(), size, TimeUtils.toStringLong(cal)); } else { if (permissionsString == null) { ownerR.setEnabled(false); ownerW.setEnabled(false); ownerX.setEnabled(false); groupR.setEnabled(false); groupW.setEnabled(false); groupX.setEnabled(false); otherR.setEnabled(false); otherW.setEnabled(false); otherX.setEnabled(false); if (!Shell.SU.available()) { permissionsString = getString(R.string.superuser_not_available); if (view != null) view.findViewById(R.id.permissionsGroup).setVisibility(View.GONE); } else { permissionsString = getString(R.string.loading); new Thread(new Runnable() { @Override public void run() { invalidatePermissions(true); final Spanned newBody = getBody(false, view); if (getActivity() == null) return; getActivity().runOnUiThread(new Runnable() { @Override public void run() { ownerR.setEnabled(true); ownerW.setEnabled(true); ownerX.setEnabled(true); groupR.setEnabled(true); groupW.setEnabled(true); groupX.setEnabled(true); otherR.setEnabled(true); otherW.setEnabled(true); otherX.setEnabled(true); body.setText(newBody); invalidatePermissions(false); } }); } }).start(); } } content = getString(R.string.details_body_file, file.getName(), file.getPath(), file.getSizeString(), TimeUtils.toStringLong(cal), permissionsString); } return Html.fromHtml(content); } @Override public Dialog onCreateDialog(Bundle savedInstanceState) { file = (File) getArguments().getSerializable("file"); file.setContext(getActivity()); LayoutInflater layoutInflater = getActivity().getLayoutInflater(); View rootView = layoutInflater.inflate(R.layout.dialog_custom, null); TextView title = (TextView) rootView.findViewById(R.id.title); ownerR = (CheckBox) rootView.findViewById(R.id.ownerR); ownerW = (CheckBox) rootView.findViewById(R.id.ownerW); ownerX = (CheckBox) rootView.findViewById(R.id.ownerX); groupR = (CheckBox) rootView.findViewById(R.id.groupR); groupW = (CheckBox) rootView.findViewById(R.id.groupW); groupX = (CheckBox) rootView.findViewById(R.id.groupX); otherR = (CheckBox) rootView.findViewById(R.id.otherR); otherW = (CheckBox) rootView.findViewById(R.id.otherW); otherX = (CheckBox) rootView.findViewById(R.id.otherX); title.setText(R.string.details); body = (TextView) rootView.findViewById(R.id.body); body.setText(getBody(false, rootView)); return new AlertDialog.Builder(getActivity()) .setView(rootView) .setPositiveButton(android.R.string.ok, new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int i) { dialog.dismiss(); applyPermissionsIfNecessary(); } }).create(); } private void invalidatePermissions(boolean reload) { if (reload) { try { final List<String> results = RootFile.runAsRoot(getActivity(), "ls -l \"" + file.getPath() + "\"", file.getParent()); if (results.size() > 0 && getActivity() != null) { getActivity().runOnUiThread(new Runnable() { @Override public void run() { permissionsString = Perm.parse(results.get(0), DetailsDialog.this); initialPermission = permissionsString; } }); } } catch (Exception e) { e.printStackTrace(); } } else { int owner = 0; if (ownerR.isChecked()) owner += Perm.READ; if (ownerW.isChecked()) owner += Perm.WRITE; if (ownerX.isChecked()) owner += Perm.EXECUTE; int group = 0; if (groupR.isChecked()) group += Perm.READ; if (groupW.isChecked()) group += Perm.WRITE; if (groupX.isChecked()) group += Perm.EXECUTE; int other = 0; if (otherR.isChecked()) other += Perm.READ; if (otherW.isChecked()) other += Perm.WRITE; if (otherX.isChecked()) other += Perm.EXECUTE; permissionsString = owner + "" + group + "" + other; body.setText(getBody(false, getView())); ownerR.setOnCheckedChangeListener(this); ownerW.setOnCheckedChangeListener(this); ownerX.setOnCheckedChangeListener(this); groupR.setOnCheckedChangeListener(this); groupW.setOnCheckedChangeListener(this); groupX.setOnCheckedChangeListener(this); otherR.setOnCheckedChangeListener(this); otherW.setOnCheckedChangeListener(this); otherX.setOnCheckedChangeListener(this); } } private void applyPermissionsIfNecessary() { if ((permissionsString == null || permissionsString.length() != 3) || (initialPermission == null || initialPermission.length() != 3) || (permissionsString != null && permissionsString.equals(initialPermission)) || !Shell.SU.available()) { return; } final ProgressDialog mDialog = new ProgressDialog(getActivity()); mDialog.setCancelable(false); mDialog.setMessage(getString(R.string.applying_permissions)); mDialog.setIndeterminate(true); mDialog.show(); Perm.chmod(file, permissionsString, new Perm.Callback() { @Override public void onComplete(boolean result, String error) { mDialog.dismiss(); if (!result) Utils.showErrorDialog(getActivity(), error); } }); } @Override public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) { invalidatePermissions(false); } }
/* * Copyright 2016 LINE Corporation * * LINE Corporation licenses this file to you under the Apache License, * version 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at: * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.linecorp.armeria.client.circuitbreaker; import static org.assertj.core.api.Assertions.assertThat; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.reset; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import java.time.Duration; import org.junit.Test; import com.google.common.testing.FakeTicker; import com.linecorp.armeria.common.util.Exceptions; import com.linecorp.armeria.testing.internal.AnticipatedException; public class NonBlockingCircuitBreakerTest { private static final String remoteServiceName = "testService"; private static final FakeTicker ticker = new FakeTicker(); private static final Duration circuitOpenWindow = Duration.ofSeconds(1); private static final Duration trialRequestInterval = Duration.ofSeconds(1); private static final Duration counterUpdateInterval = Duration.ofSeconds(1); private static final CircuitBreakerListener listener = mock(CircuitBreakerListener.class); private static NonBlockingCircuitBreaker create(long minimumRequestThreshold, double failureRateThreshold) { return (NonBlockingCircuitBreaker) new CircuitBreakerBuilder(remoteServiceName) .failureRateThreshold(failureRateThreshold) .minimumRequestThreshold(minimumRequestThreshold) .circuitOpenWindow(circuitOpenWindow) .trialRequestInterval(trialRequestInterval) .counterSlidingWindow(Duration.ofSeconds(10)) .counterUpdateInterval(counterUpdateInterval) .listener(listener) .ticker(ticker) .build(); } private static CircuitBreaker closedState(long minimumRequestThreshold, double failureRateThreshold) { NonBlockingCircuitBreaker cb = create(minimumRequestThreshold, failureRateThreshold); assertThat(cb.state().isClosed()).isTrue(); assertThat(cb.canRequest()).isTrue(); return cb; } private static NonBlockingCircuitBreaker openState(long minimumRequestThreshold, double failureRateThreshold) { NonBlockingCircuitBreaker cb = create(minimumRequestThreshold, failureRateThreshold); cb.onSuccess(); cb.onFailure(); cb.onFailure(); ticker.advance(counterUpdateInterval.toNanos()); cb.onFailure(); assertThat(cb.state().isOpen()).isTrue(); assertThat(cb.canRequest()).isFalse(); return cb; } private static NonBlockingCircuitBreaker halfOpenState(long minimumRequestThreshold, double failureRateThreshold) { NonBlockingCircuitBreaker cb = openState(minimumRequestThreshold, failureRateThreshold); ticker.advance(circuitOpenWindow.toNanos()); assertThat(cb.state().isHalfOpen()).isFalse(); assertThat(cb.canRequest()).isTrue(); // first request is allowed assertThat(cb.state().isHalfOpen()).isTrue(); assertThat(cb.canRequest()).isFalse(); // seconds request is refused return cb; } @Test public void testClosed() { closedState(2, 0.5); } @Test public void testMinimumRequestThreshold() { NonBlockingCircuitBreaker cb = create(4, 0.5); assertThat(cb.state().isClosed() && cb.canRequest()).isTrue(); cb.onFailure(); ticker.advance(counterUpdateInterval.toNanos()); cb.onFailure(); assertThat(cb.state().isClosed()).isTrue(); assertThat(cb.canRequest()).isTrue(); cb.onFailure(); cb.onFailure(); ticker.advance(counterUpdateInterval.toNanos()); cb.onFailure(); assertThat(cb.state().isOpen()).isTrue(); assertThat(cb.canRequest()).isFalse(); } @Test public void testFailureRateThreshold() { NonBlockingCircuitBreaker cb = create(10, 0.5); for (int i = 0; i < 10; i++) { cb.onSuccess(); } for (int i = 0; i < 9; i++) { cb.onFailure(); } ticker.advance(counterUpdateInterval.toNanos()); cb.onFailure(); assertThat(cb.state().isClosed()).isTrue(); // 10 vs 9 (0.47) assertThat(cb.canRequest()).isTrue(); ticker.advance(counterUpdateInterval.toNanos()); cb.onFailure(); assertThat(cb.state().isClosed()).isTrue(); // 10 vs 10 (0.5) assertThat(cb.canRequest()).isTrue(); ticker.advance(counterUpdateInterval.toNanos()); cb.onFailure(); assertThat(cb.state().isOpen()).isTrue(); // 10 vs 11 (0.52) assertThat(cb.canRequest()).isFalse(); } @Test public void testClosedToOpen() { openState(2, 0.5); } @Test public void testOpenToHalfOpen() { halfOpenState(2, 0.5); } @Test public void testHalfOpenToClosed() { NonBlockingCircuitBreaker cb = halfOpenState(2, 0.5); cb.onSuccess(); assertThat(cb.state().isClosed()).isTrue(); assertThat(cb.canRequest()).isTrue(); } @Test public void testHalfOpenToOpen() { NonBlockingCircuitBreaker cb = halfOpenState(2, 0.5); cb.onFailure(); assertThat(cb.state().isOpen()).isTrue(); assertThat(cb.canRequest()).isFalse(); } @Test public void testHalfOpenRetryRequest() { NonBlockingCircuitBreaker cb = halfOpenState(2, 0.5); ticker.advance(trialRequestInterval.toNanos()); assertThat(cb.state().isHalfOpen()).isTrue(); assertThat(cb.canRequest()).isTrue(); // first request is allowed assertThat(cb.state().isHalfOpen()).isTrue(); assertThat(cb.canRequest()).isFalse(); // seconds request is refused } @Test public void testFailureOfExceptionFilter() { NonBlockingCircuitBreaker cb = (NonBlockingCircuitBreaker) new CircuitBreakerBuilder() .exceptionFilter(cause -> { throw Exceptions.clearTrace(new AnticipatedException("exception filter failed")); }) .ticker(ticker) .build(); cb.onFailure(new Exception()); } @Test public void testNotification() throws Exception { reset(listener); NonBlockingCircuitBreaker cb = create(4, 0.5); // Notify initial state verify(listener, times(1)).onEventCountUpdated(cb, EventCount.ZERO); verify(listener, times(1)).onStateChanged(cb, CircuitState.CLOSED); reset(listener); cb.onFailure(); ticker.advance(counterUpdateInterval.toNanos()); cb.onFailure(); // Notify updated event count verify(listener, times(1)).onEventCountUpdated(cb, new EventCount(0, 1)); reset(listener); // Notify circuit tripped cb.onFailure(); cb.onFailure(); ticker.advance(counterUpdateInterval.toNanos()); cb.onFailure(); verify(listener, times(1)).onEventCountUpdated(cb, EventCount.ZERO); verify(listener, times(1)).onStateChanged(cb, CircuitState.OPEN); reset(listener); // Notify request rejected cb.canRequest(); verify(listener, times(1)).onRequestRejected(cb); ticker.advance(circuitOpenWindow.toNanos()); // Notify half open cb.canRequest(); verify(listener, times(1)).onEventCountUpdated(cb, EventCount.ZERO); verify(listener, times(1)).onStateChanged(cb, CircuitState.HALF_OPEN); reset(listener); // Notify circuit closed cb.onSuccess(); verify(listener, times(1)).onEventCountUpdated(cb, EventCount.ZERO); verify(listener, times(1)).onStateChanged(cb, CircuitState.CLOSED); } }
/* Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.flowable.variable.service.impl; import java.util.ArrayList; import java.util.List; import org.flowable.common.engine.api.FlowableIllegalArgumentException; import org.flowable.common.engine.api.query.Query; import org.flowable.common.engine.impl.interceptor.CommandContext; import org.flowable.common.engine.impl.interceptor.CommandExecutor; import org.flowable.common.engine.impl.query.AbstractQuery; import org.flowable.variable.service.VariableServiceConfiguration; /** * Abstract query class that adds methods to query for variable values. * * @author Frederik Heremans */ public abstract class AbstractVariableQueryImpl<T extends Query<?, ?>, U> extends AbstractQuery<T, U> { private static final long serialVersionUID = 1L; protected VariableServiceConfiguration variableServiceConfiguration; protected List<QueryVariableValue> queryVariableValues = new ArrayList<>(); public AbstractVariableQueryImpl() { } public AbstractVariableQueryImpl(CommandContext commandContext, VariableServiceConfiguration variableServiceConfiguration) { super(commandContext); this.variableServiceConfiguration = variableServiceConfiguration; } public AbstractVariableQueryImpl(CommandExecutor commandExecutor, VariableServiceConfiguration variableServiceConfiguration) { super(commandExecutor); this.variableServiceConfiguration = variableServiceConfiguration; } @Override public abstract long executeCount(CommandContext commandContext); @Override public abstract List<U> executeList(CommandContext commandContext); public T variableValueEquals(String name, Object value) { return variableValueEquals(name, value, true); } @SuppressWarnings("unchecked") protected T variableValueEquals(String name, Object value, boolean localScope) { addVariable(name, value, QueryOperator.EQUALS, localScope); return (T) this; } public T variableValueEquals(Object value) { return variableValueEquals(value, true); } @SuppressWarnings("unchecked") protected T variableValueEquals(Object value, boolean localScope) { queryVariableValues.add(new QueryVariableValue(null, value, QueryOperator.EQUALS, localScope)); return (T) this; } public T variableValueEqualsIgnoreCase(String name, String value) { return variableValueEqualsIgnoreCase(name, value, true); } @SuppressWarnings("unchecked") protected T variableValueEqualsIgnoreCase(String name, String value, boolean localScope) { if (value == null) { throw new FlowableIllegalArgumentException("value is null"); } addVariable(name, value.toLowerCase(), QueryOperator.EQUALS_IGNORE_CASE, localScope); return (T) this; } public T variableValueNotEqualsIgnoreCase(String name, String value) { return variableValueNotEqualsIgnoreCase(name, value, true); } @SuppressWarnings("unchecked") protected T variableValueNotEqualsIgnoreCase(String name, String value, boolean localScope) { if (value == null) { throw new FlowableIllegalArgumentException("value is null"); } addVariable(name, value.toLowerCase(), QueryOperator.NOT_EQUALS_IGNORE_CASE, localScope); return (T) this; } public T variableValueNotEquals(String name, Object value) { return variableValueNotEquals(name, value, true); } @SuppressWarnings("unchecked") protected T variableValueNotEquals(String name, Object value, boolean localScope) { addVariable(name, value, QueryOperator.NOT_EQUALS, localScope); return (T) this; } public T variableValueGreaterThan(String name, Object value) { return variableValueGreaterThan(name, value, true); } @SuppressWarnings("unchecked") protected T variableValueGreaterThan(String name, Object value, boolean localScope) { addVariable(name, value, QueryOperator.GREATER_THAN, localScope); return (T) this; } public T variableValueGreaterThanOrEqual(String name, Object value) { return variableValueGreaterThanOrEqual(name, value, true); } @SuppressWarnings("unchecked") protected T variableValueGreaterThanOrEqual(String name, Object value, boolean localScope) { addVariable(name, value, QueryOperator.GREATER_THAN_OR_EQUAL, localScope); return (T) this; } public T variableValueLessThan(String name, Object value) { return variableValueLessThan(name, value, true); } @SuppressWarnings("unchecked") protected T variableValueLessThan(String name, Object value, boolean localScope) { addVariable(name, value, QueryOperator.LESS_THAN, localScope); return (T) this; } public T variableValueLessThanOrEqual(String name, Object value) { return variableValueLessThanOrEqual(name, value, true); } @SuppressWarnings("unchecked") protected T variableValueLessThanOrEqual(String name, Object value, boolean localScope) { addVariable(name, value, QueryOperator.LESS_THAN_OR_EQUAL, localScope); return (T) this; } public T variableValueLike(String name, String value) { return variableValueLike(name, value, true); } public T variableValueLikeIgnoreCase(String name, String value) { return variableValueLikeIgnoreCase(name, value, true); } @SuppressWarnings("unchecked") protected T variableValueLike(String name, String value, boolean localScope) { addVariable(name, value, QueryOperator.LIKE, localScope); return (T) this; } @SuppressWarnings("unchecked") protected T variableValueLikeIgnoreCase(String name, String value, boolean localScope) { if (value == null) { throw new FlowableIllegalArgumentException("value is null"); } addVariable(name, value.toLowerCase(), QueryOperator.LIKE_IGNORE_CASE, localScope); return (T) this; } public T variableExists(String name) { return variableExists(name, true); } @SuppressWarnings("unchecked") protected T variableExists(String name, boolean localScope) { addVariable(name, null, QueryOperator.EXISTS, localScope); return (T) this; } public T variableNotExists(String name) { return variableNotExists(name, true); } @SuppressWarnings("unchecked") protected T variableNotExists(String name, boolean localScope) { addVariable(name, null, QueryOperator.NOT_EXISTS, localScope); return (T) this; } protected void addVariable(String name, Object value, QueryOperator operator, boolean localScope) { if (name == null) { throw new FlowableIllegalArgumentException("name is null"); } if (value == null || isBoolean(value)) { // Null-values and booleans can only be used in EQUALS, NOT_EQUALS, EXISTS and NOT_EXISTS switch (operator) { case GREATER_THAN: throw new FlowableIllegalArgumentException("Booleans and null cannot be used in 'greater than' condition"); case LESS_THAN: throw new FlowableIllegalArgumentException("Booleans and null cannot be used in 'less than' condition"); case GREATER_THAN_OR_EQUAL: throw new FlowableIllegalArgumentException("Booleans and null cannot be used in 'greater than or equal' condition"); case LESS_THAN_OR_EQUAL: throw new FlowableIllegalArgumentException("Booleans and null cannot be used in 'less than or equal' condition"); } if (operator == QueryOperator.EQUALS_IGNORE_CASE && !(value instanceof String)) { throw new FlowableIllegalArgumentException("Only string values can be used with 'equals ignore case' condition"); } if (operator == QueryOperator.NOT_EQUALS_IGNORE_CASE && !(value instanceof String)) { throw new FlowableIllegalArgumentException("Only string values can be used with 'not equals ignore case' condition"); } if ((operator == QueryOperator.LIKE || operator == QueryOperator.LIKE_IGNORE_CASE) && !(value instanceof String)) { throw new FlowableIllegalArgumentException("Only string values can be used with 'like' condition"); } } queryVariableValues.add(new QueryVariableValue(name, value, operator, localScope)); } protected boolean isBoolean(Object value) { if (value == null) { return false; } return Boolean.class.isAssignableFrom(value.getClass()) || boolean.class.isAssignableFrom(value.getClass()); } protected void ensureVariablesInitialized() { if (!queryVariableValues.isEmpty()) { for (QueryVariableValue queryVariableValue : queryVariableValues) { queryVariableValue.initialize(variableServiceConfiguration); } } } public List<QueryVariableValue> getQueryVariableValues() { return queryVariableValues; } public boolean hasValueComparisonQueryVariables() { for (QueryVariableValue qvv : queryVariableValues) { if (!QueryOperator.EXISTS.toString().equals(qvv.getOperator()) && !QueryOperator.NOT_EXISTS.toString().equals(qvv.getOperator())) { return true; } } return false; } public boolean hasLocalQueryVariableValue() { for (QueryVariableValue qvv : queryVariableValues) { if (qvv.isLocal()) { return true; } } return false; } public boolean hasNonLocalQueryVariableValue() { for (QueryVariableValue qvv : queryVariableValues) { if (!qvv.isLocal()) { return true; } } return false; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.client.cli; import org.apache.flink.api.common.JobID; import org.apache.flink.client.cli.util.MockedCliFrontend; import org.apache.flink.client.deployment.StandaloneClusterId; import org.apache.flink.client.program.ClusterClient; import org.apache.flink.client.program.rest.RestClusterClient; import org.apache.flink.configuration.Configuration; import org.apache.flink.runtime.concurrent.FutureUtils; import org.apache.flink.runtime.messages.Acknowledge; import org.apache.flink.util.ExceptionUtils; import org.apache.flink.util.FlinkException; import org.apache.flink.util.Preconditions; import org.hamcrest.Matchers; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TemporaryFolder; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.FileOutputStream; import java.io.PrintStream; import java.util.concurrent.CompletableFuture; import java.util.function.Function; import java.util.zip.ZipOutputStream; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import static org.mockito.ArgumentMatchers.nullable; import static org.mockito.Matchers.any; import static org.mockito.Matchers.eq; import static org.mockito.Matchers.isNull; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; /** Tests for the SAVEPOINT command. */ public class CliFrontendSavepointTest extends CliFrontendTestBase { private static PrintStream stdOut; private static PrintStream stdErr; private static ByteArrayOutputStream buffer; @Rule public TemporaryFolder tmp = new TemporaryFolder(); // ------------------------------------------------------------------------ // Trigger savepoint // ------------------------------------------------------------------------ @Test public void testTriggerSavepointSuccess() throws Exception { replaceStdOutAndStdErr(); JobID jobId = new JobID(); String savepointPath = "expectedSavepointPath"; final ClusterClient<String> clusterClient = createClusterClient(savepointPath); try { MockedCliFrontend frontend = new MockedCliFrontend(clusterClient); String[] parameters = {jobId.toString()}; frontend.savepoint(parameters); verify(clusterClient, times(1)).triggerSavepoint(eq(jobId), isNull(String.class)); assertTrue(buffer.toString().contains(savepointPath)); } finally { clusterClient.close(); restoreStdOutAndStdErr(); } } @Test public void testTriggerSavepointFailure() throws Exception { replaceStdOutAndStdErr(); JobID jobId = new JobID(); String expectedTestException = "expectedTestException"; Exception testException = new Exception(expectedTestException); final ClusterClient<String> clusterClient = createFailingClusterClient(testException); try { MockedCliFrontend frontend = new MockedCliFrontend(clusterClient); String[] parameters = {jobId.toString()}; try { frontend.savepoint(parameters); fail("Savepoint should have failed."); } catch (FlinkException e) { assertTrue( ExceptionUtils.findThrowableWithMessage(e, expectedTestException) .isPresent()); } } finally { clusterClient.close(); restoreStdOutAndStdErr(); } } @Test public void testTriggerSavepointFailureIllegalJobID() throws Exception { replaceStdOutAndStdErr(); try { CliFrontend frontend = new MockedCliFrontend( new RestClusterClient<>( getConfiguration(), StandaloneClusterId.getInstance())); String[] parameters = {"invalid job id"}; try { frontend.savepoint(parameters); fail("Should have failed."); } catch (CliArgsException e) { assertThat(e.getMessage(), Matchers.containsString("Cannot parse JobID")); } } finally { restoreStdOutAndStdErr(); } } /** * Tests that a CLI call with a custom savepoint directory target is forwarded correctly to the * cluster client. */ @Test public void testTriggerSavepointCustomTarget() throws Exception { replaceStdOutAndStdErr(); JobID jobId = new JobID(); String savepointDirectory = "customTargetDirectory"; final ClusterClient<String> clusterClient = createClusterClient(savepointDirectory); try { MockedCliFrontend frontend = new MockedCliFrontend(clusterClient); String[] parameters = {jobId.toString(), savepointDirectory}; frontend.savepoint(parameters); verify(clusterClient, times(1)).triggerSavepoint(eq(jobId), eq(savepointDirectory)); assertTrue(buffer.toString().contains(savepointDirectory)); } finally { clusterClient.close(); restoreStdOutAndStdErr(); } } // ------------------------------------------------------------------------ // Dispose savepoint // ------------------------------------------------------------------------ @Test public void testDisposeSavepointSuccess() throws Exception { replaceStdOutAndStdErr(); String savepointPath = "expectedSavepointPath"; ClusterClient clusterClient = new DisposeSavepointClusterClient( (String path) -> CompletableFuture.completedFuture(Acknowledge.get()), getConfiguration()); try { CliFrontend frontend = new MockedCliFrontend(clusterClient); String[] parameters = {"-d", savepointPath}; frontend.savepoint(parameters); String outMsg = buffer.toString(); assertTrue(outMsg.contains(savepointPath)); assertTrue(outMsg.contains("disposed")); } finally { clusterClient.close(); restoreStdOutAndStdErr(); } } /** Tests disposal with a JAR file. */ @Test public void testDisposeWithJar() throws Exception { replaceStdOutAndStdErr(); final CompletableFuture<String> disposeSavepointFuture = new CompletableFuture<>(); final DisposeSavepointClusterClient clusterClient = new DisposeSavepointClusterClient( (String savepointPath) -> { disposeSavepointFuture.complete(savepointPath); return CompletableFuture.completedFuture(Acknowledge.get()); }, getConfiguration()); try { CliFrontend frontend = new MockedCliFrontend(clusterClient); // Fake JAR file File f = tmp.newFile(); ZipOutputStream out = new ZipOutputStream(new FileOutputStream(f)); out.close(); final String disposePath = "any-path"; String[] parameters = {"-d", disposePath, "-j", f.getAbsolutePath()}; frontend.savepoint(parameters); final String actualSavepointPath = disposeSavepointFuture.get(); assertEquals(disposePath, actualSavepointPath); } finally { clusterClient.close(); restoreStdOutAndStdErr(); } } @Test public void testDisposeSavepointFailure() throws Exception { replaceStdOutAndStdErr(); String savepointPath = "expectedSavepointPath"; Exception testException = new Exception("expectedTestException"); DisposeSavepointClusterClient clusterClient = new DisposeSavepointClusterClient( (String path) -> FutureUtils.completedExceptionally(testException), getConfiguration()); try { CliFrontend frontend = new MockedCliFrontend(clusterClient); String[] parameters = {"-d", savepointPath}; try { frontend.savepoint(parameters); fail("Savepoint should have failed."); } catch (Exception e) { assertTrue( ExceptionUtils.findThrowableWithMessage(e, testException.getMessage()) .isPresent()); } } finally { clusterClient.close(); restoreStdOutAndStdErr(); } } // ------------------------------------------------------------------------ private static final class DisposeSavepointClusterClient extends RestClusterClient<StandaloneClusterId> { private final Function<String, CompletableFuture<Acknowledge>> disposeSavepointFunction; DisposeSavepointClusterClient( Function<String, CompletableFuture<Acknowledge>> disposeSavepointFunction, Configuration configuration) throws Exception { super(configuration, StandaloneClusterId.getInstance()); this.disposeSavepointFunction = Preconditions.checkNotNull(disposeSavepointFunction); } @Override public CompletableFuture<Acknowledge> disposeSavepoint(String savepointPath) { return disposeSavepointFunction.apply(savepointPath); } } private static void replaceStdOutAndStdErr() { stdOut = System.out; stdErr = System.err; buffer = new ByteArrayOutputStream(); PrintStream capture = new PrintStream(buffer); System.setOut(capture); System.setErr(capture); } private static void restoreStdOutAndStdErr() { System.setOut(stdOut); System.setErr(stdErr); } private static ClusterClient<String> createClusterClient(String expectedResponse) throws Exception { final ClusterClient<String> clusterClient = mock(ClusterClient.class); when(clusterClient.triggerSavepoint(any(JobID.class), nullable(String.class))) .thenReturn(CompletableFuture.completedFuture(expectedResponse)); return clusterClient; } private static ClusterClient<String> createFailingClusterClient(Exception expectedException) throws Exception { final ClusterClient<String> clusterClient = mock(ClusterClient.class); when(clusterClient.triggerSavepoint(any(JobID.class), nullable(String.class))) .thenReturn(FutureUtils.completedExceptionally(expectedException)); return clusterClient; } }
package org.knowm.xchange.currency; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonValue; import java.io.Serializable; import org.knowm.xchange.instrument.Instrument; /** * Value object to provide the following to API: * * <ul> * <li>Provision of major currency symbol pairs (EUR/USD, GBP/USD etc) * <li>Provision of arbitrary symbol pairs for exchange index trading, notional currencies etc * </ul> * * <p>Symbol pairs are quoted, for example, as EUR/USD 1.25 such that 1 EUR can be purchased with * 1.25 USD */ public class CurrencyPair extends Instrument implements Comparable<CurrencyPair>, Serializable { private static final long serialVersionUID = 414711266389792746L; // Provide some standard major symbols public static final CurrencyPair EUR_USD = new CurrencyPair(Currency.EUR, Currency.USD); public static final CurrencyPair GBP_USD = new CurrencyPair(Currency.GBP, Currency.USD); public static final CurrencyPair USD_JPY = new CurrencyPair(Currency.USD, Currency.JPY); public static final CurrencyPair JPY_USD = new CurrencyPair(Currency.JPY, Currency.USD); public static final CurrencyPair USD_CHF = new CurrencyPair(Currency.USD, Currency.CHF); public static final CurrencyPair USD_AUD = new CurrencyPair(Currency.USD, Currency.AUD); public static final CurrencyPair USD_CAD = new CurrencyPair(Currency.USD, Currency.CAD); public static final CurrencyPair USD_RUR = new CurrencyPair(Currency.USD, Currency.RUR); public static final CurrencyPair EUR_RUR = new CurrencyPair(Currency.EUR, Currency.RUR); public static final CurrencyPair USD_XRP = new CurrencyPair(Currency.USD, Currency.XRP); public static final CurrencyPair EUR_XRP = new CurrencyPair(Currency.EUR, Currency.XRP); public static final CurrencyPair USD_XVN = new CurrencyPair(Currency.USD, Currency.XVN); public static final CurrencyPair EUR_XVN = new CurrencyPair(Currency.EUR, Currency.XVN); public static final CurrencyPair KRW_XRP = new CurrencyPair(Currency.KRW, Currency.XRP); // Provide some courtesy BTC major symbols public static final CurrencyPair BTC_USD = new CurrencyPair(Currency.BTC, Currency.USD); public static final CurrencyPair BTC_GBP = new CurrencyPair(Currency.BTC, Currency.GBP); public static final CurrencyPair BTC_EUR = new CurrencyPair(Currency.BTC, Currency.EUR); public static final CurrencyPair BTC_JPY = new CurrencyPair(Currency.BTC, Currency.JPY); public static final CurrencyPair BTC_CHF = new CurrencyPair(Currency.BTC, Currency.CHF); public static final CurrencyPair BTC_AUD = new CurrencyPair(Currency.BTC, Currency.AUD); public static final CurrencyPair BTC_CAD = new CurrencyPair(Currency.BTC, Currency.CAD); public static final CurrencyPair BTC_CNY = new CurrencyPair(Currency.BTC, Currency.CNY); public static final CurrencyPair BTC_DKK = new CurrencyPair(Currency.BTC, Currency.DKK); public static final CurrencyPair BTC_HKD = new CurrencyPair(Currency.BTC, Currency.HKD); public static final CurrencyPair BTC_MXN = new CurrencyPair(Currency.BTC, Currency.MXN); public static final CurrencyPair BTC_NZD = new CurrencyPair(Currency.BTC, Currency.NZD); public static final CurrencyPair BTC_PLN = new CurrencyPair(Currency.BTC, Currency.PLN); public static final CurrencyPair BTC_RUB = new CurrencyPair(Currency.BTC, Currency.RUB); public static final CurrencyPair BTC_SEK = new CurrencyPair(Currency.BTC, Currency.SEK); public static final CurrencyPair BTC_SGD = new CurrencyPair(Currency.BTC, Currency.SGD); public static final CurrencyPair BTC_NOK = new CurrencyPair(Currency.BTC, Currency.NOK); public static final CurrencyPair BTC_THB = new CurrencyPair(Currency.BTC, Currency.THB); public static final CurrencyPair BTC_RUR = new CurrencyPair(Currency.BTC, Currency.RUR); public static final CurrencyPair BTC_ZAR = new CurrencyPair(Currency.BTC, Currency.ZAR); public static final CurrencyPair BTC_BRL = new CurrencyPair(Currency.BTC, Currency.BRL); public static final CurrencyPair BTC_BRZ = new CurrencyPair(Currency.BTC, Currency.BRZ); public static final CurrencyPair BTC_CZK = new CurrencyPair(Currency.BTC, Currency.CZK); public static final CurrencyPair BTC_ILS = new CurrencyPair(Currency.BTC, Currency.ILS); public static final CurrencyPair BTC_KRW = new CurrencyPair(Currency.BTC, Currency.KRW); public static final CurrencyPair BTC_LTC = new CurrencyPair(Currency.BTC, Currency.LTC); public static final CurrencyPair BTC_XRP = new CurrencyPair(Currency.BTC, Currency.XRP); public static final CurrencyPair BTC_NMC = new CurrencyPair(Currency.BTC, Currency.NMC); public static final CurrencyPair BTC_XVN = new CurrencyPair(Currency.BTC, Currency.XVN); public static final CurrencyPair BTC_IDR = new CurrencyPair(Currency.BTC, Currency.IDR); public static final CurrencyPair BTC_PHP = new CurrencyPair(Currency.BTC, Currency.PHP); public static final CurrencyPair BTC_STR = new CurrencyPair(Currency.BTC, Currency.STR); public static final CurrencyPair BTC_INR = new CurrencyPair(Currency.BTC, Currency.INR); public static final CurrencyPair BTC_XAUR = new CurrencyPair(Currency.BTC, Currency.XAUR); public static final CurrencyPair BTC_IOC = new CurrencyPair(Currency.BTC, Currency.IOC); public static final CurrencyPair BTC_ZMW = new CurrencyPair(Currency.BTC, Currency.ZMW); public static final CurrencyPair BTC_TRY = new CurrencyPair(Currency.BTC, Currency.TRY); public static final CurrencyPair ETH_TRY = new CurrencyPair(Currency.ETH, Currency.TRY); public static final CurrencyPair BCH_USD = new CurrencyPair(Currency.BCH, Currency.USD); public static final CurrencyPair BCH_AUD = new CurrencyPair(Currency.BCH, Currency.AUD); public static final CurrencyPair BCH_CAD = new CurrencyPair(Currency.BCH, Currency.CAD); public static final CurrencyPair BCH_GBP = new CurrencyPair(Currency.BCH, Currency.GBP); public static final CurrencyPair BCH_EUR = new CurrencyPair(Currency.BCH, Currency.EUR); public static final CurrencyPair BCH_BTC = new CurrencyPair(Currency.BCH, Currency.BTC); public static final CurrencyPair BCH_ETH = new CurrencyPair(Currency.BCH, Currency.ETH); public static final CurrencyPair BCH_CZK = new CurrencyPair(Currency.BCH, Currency.CZK); public static final CurrencyPair BCA_USD = new CurrencyPair(Currency.BCA, Currency.USD); public static final CurrencyPair BCA_EUR = new CurrencyPair(Currency.BCA, Currency.EUR); public static final CurrencyPair BCA_CNY = new CurrencyPair(Currency.BCA, Currency.CNY); public static final CurrencyPair BCA_JPY = new CurrencyPair(Currency.BCA, Currency.JPY); public static final CurrencyPair BCA_BTC = new CurrencyPair(Currency.BCA, Currency.BTC); public static final CurrencyPair BCA_ETH = new CurrencyPair(Currency.BCA, Currency.ETH); public static final CurrencyPair ETH_BTC = new CurrencyPair(Currency.ETH, Currency.BTC); public static final CurrencyPair ETH_GBP = new CurrencyPair(Currency.ETH, Currency.GBP); public static final CurrencyPair ETH_USD = new CurrencyPair(Currency.ETH, Currency.USD); public static final CurrencyPair ETH_USDT = new CurrencyPair(Currency.ETH, Currency.USDT); public static final CurrencyPair ETH_EUR = new CurrencyPair(Currency.ETH, Currency.EUR); public static final CurrencyPair ETH_JPY = new CurrencyPair(Currency.ETH, Currency.JPY); public static final CurrencyPair ETH_CNY = new CurrencyPair(Currency.ETH, Currency.CNY); public static final CurrencyPair ETH_AUD = new CurrencyPair(Currency.ETH, Currency.AUD); public static final CurrencyPair ETH_NZD = new CurrencyPair(Currency.ETH, Currency.NZD); public static final CurrencyPair ETH_CZK = new CurrencyPair(Currency.ETH, Currency.CZK); public static final CurrencyPair XAUR_BTC = new CurrencyPair(Currency.XAUR, Currency.BTC); public static final CurrencyPair XDC_BTC = new CurrencyPair(Currency.XDC, Currency.BTC); public static final CurrencyPair SC_BTC = new CurrencyPair(Currency.SC, Currency.BTC); public static final CurrencyPair DCR_BTC = new CurrencyPair(Currency.DCR, Currency.BTC); public static final CurrencyPair XRP_BTC = new CurrencyPair(Currency.XRP, Currency.BTC); public static final CurrencyPair XRP_ETH = new CurrencyPair(Currency.XRP, Currency.ETH); public static final CurrencyPair XRP_EUR = new CurrencyPair(Currency.XRP, Currency.EUR); public static final CurrencyPair XRP_USD = new CurrencyPair(Currency.XRP, Currency.USD); public static final CurrencyPair XRP_USDT = new CurrencyPair(Currency.XRP, Currency.USDT); public static final CurrencyPair XRP_GBP = new CurrencyPair(Currency.XRP, Currency.GBP); public static final CurrencyPair XRP_CZK = new CurrencyPair(Currency.XRP, Currency.CZK); public static final CurrencyPair LTC_AUD = new CurrencyPair(Currency.LTC, Currency.AUD); public static final CurrencyPair LTC_GBP = new CurrencyPair(Currency.LTC, Currency.GBP); public static final CurrencyPair LTC_USD = new CurrencyPair(Currency.LTC, Currency.USD); public static final CurrencyPair LTC_KRW = new CurrencyPair(Currency.LTC, Currency.KRW); public static final CurrencyPair LTC_CNY = new CurrencyPair(Currency.LTC, Currency.CNY); public static final CurrencyPair LTC_RUR = new CurrencyPair(Currency.LTC, Currency.RUR); public static final CurrencyPair LTC_EUR = new CurrencyPair(Currency.LTC, Currency.EUR); public static final CurrencyPair LTC_BTC = new CurrencyPair(Currency.LTC, Currency.BTC); public static final CurrencyPair LTC_XRP = new CurrencyPair(Currency.LTC, Currency.XRP); public static final CurrencyPair LTC_TRY = new CurrencyPair(Currency.LTC, Currency.TRY); public static final CurrencyPair LTC_CZK = new CurrencyPair(Currency.LTC, Currency.CZK); public static final CurrencyPair NMC_USD = new CurrencyPair(Currency.NMC, Currency.USD); public static final CurrencyPair NMC_CNY = new CurrencyPair(Currency.NMC, Currency.CNY); public static final CurrencyPair NMC_EUR = new CurrencyPair(Currency.NMC, Currency.EUR); public static final CurrencyPair NMC_KRW = new CurrencyPair(Currency.NMC, Currency.KRW); public static final CurrencyPair NMC_BTC = new CurrencyPair(Currency.NMC, Currency.BTC); public static final CurrencyPair NMC_LTC = new CurrencyPair(Currency.NMC, Currency.LTC); public static final CurrencyPair NMC_XRP = new CurrencyPair(Currency.NMC, Currency.XRP); public static final CurrencyPair NVC_USD = new CurrencyPair(Currency.NVC, Currency.USD); public static final CurrencyPair NVC_BTC = new CurrencyPair(Currency.NVC, Currency.BTC); public static final CurrencyPair TRC_BTC = new CurrencyPair(Currency.TRC, Currency.BTC); public static final CurrencyPair PPC_USD = new CurrencyPair(Currency.PPC, Currency.USD); public static final CurrencyPair PPC_BTC = new CurrencyPair(Currency.PPC, Currency.BTC); public static final CurrencyPair PPC_LTC = new CurrencyPair(Currency.PPC, Currency.LTC); public static final CurrencyPair FTC_USD = new CurrencyPair(Currency.FTC, Currency.USD); public static final CurrencyPair FTC_CNY = new CurrencyPair(Currency.FTC, Currency.CNY); public static final CurrencyPair FTC_BTC = new CurrencyPair(Currency.FTC, Currency.BTC); public static final CurrencyPair FTC_LTC = new CurrencyPair(Currency.FTC, Currency.LTC); public static final CurrencyPair XEM_USD = new CurrencyPair(Currency.XEM, Currency.USD); public static final CurrencyPair XEM_USDT = new CurrencyPair(Currency.XEM, Currency.USDT); public static final CurrencyPair XEM_KRW = new CurrencyPair(Currency.XEM, Currency.KRW); public static final CurrencyPair XEM_JPY = new CurrencyPair(Currency.XEM, Currency.JPY); public static final CurrencyPair XEM_BTC = new CurrencyPair(Currency.XEM, Currency.BTC); public static final CurrencyPair XEM_ETH = new CurrencyPair(Currency.XEM, Currency.ETH); public static final CurrencyPair XEM_EUR = new CurrencyPair(Currency.XEM, Currency.EUR); public static final CurrencyPair XMR_BTC = new CurrencyPair(Currency.XMR, Currency.BTC); public static final CurrencyPair XMR_ETH = new CurrencyPair(Currency.XMR, Currency.ETH); public static final CurrencyPair XMR_USD = new CurrencyPair(Currency.XMR, Currency.USD); public static final CurrencyPair XMR_USDT = new CurrencyPair(Currency.XMR, Currency.USDT); public static final CurrencyPair XPM_USD = new CurrencyPair(Currency.XPM, Currency.USD); public static final CurrencyPair XPM_CNY = new CurrencyPair(Currency.XPM, Currency.CNY); public static final CurrencyPair XPM_BTC = new CurrencyPair(Currency.XPM, Currency.BTC); public static final CurrencyPair XPM_LTC = new CurrencyPair(Currency.XPM, Currency.LTC); public static final CurrencyPair XPM_PPC = new CurrencyPair(Currency.XPM, Currency.PPC); public static final CurrencyPair XVN_XRP = new CurrencyPair(Currency.XVN, Currency.XRP); public static final CurrencyPair STEEM_BTC = new CurrencyPair(Currency.STEEM, Currency.BTC); public static final CurrencyPair STEEM_USD = new CurrencyPair(Currency.STEEM, Currency.USD); public static final CurrencyPair STEEM_USDT = new CurrencyPair(Currency.STEEM, Currency.USDT); public static final CurrencyPair STEEM_ETH = new CurrencyPair(Currency.STEEM, Currency.ETH); public static final CurrencyPair STEEM_BNB = new CurrencyPair(Currency.STEEM, Currency.BNB); public static final CurrencyPair STEEM_KRW = new CurrencyPair(Currency.STEEM, Currency.KRW); public static final CurrencyPair VET_BTC = new CurrencyPair(Currency.VET, Currency.BTC); public static final CurrencyPair VET_USDT = new CurrencyPair(Currency.VET, Currency.USDT); public static final CurrencyPair VET_ETH = new CurrencyPair(Currency.VET, Currency.ETH); public static final CurrencyPair VET_BNB = new CurrencyPair(Currency.VET, Currency.BNB); public static final CurrencyPair ADA_BTC = new CurrencyPair(Currency.ADA, Currency.BTC); public static final CurrencyPair ADA_USDT = new CurrencyPair(Currency.ADA, Currency.USDT); public static final CurrencyPair ADA_ETH = new CurrencyPair(Currency.ADA, Currency.ETH); public static final CurrencyPair ADA_BNB = new CurrencyPair(Currency.ADA, Currency.BNB); public static final CurrencyPair TRX_BTC = new CurrencyPair(Currency.TRX, Currency.BTC); public static final CurrencyPair TRX_USDT = new CurrencyPair(Currency.TRX, Currency.USDT); public static final CurrencyPair TRX_ETH = new CurrencyPair(Currency.TRX, Currency.ETH); public static final CurrencyPair TRX_BNB = new CurrencyPair(Currency.TRX, Currency.BNB); // start of extra ANX supported pair // BTC public static final CurrencyPair BTC_XDC = new CurrencyPair(Currency.BTC, Currency.XDC); public static final CurrencyPair BTC_PPC = new CurrencyPair(Currency.BTC, Currency.PPC); public static final CurrencyPair STR_BTC = new CurrencyPair(Currency.STR, Currency.BTC); // LTC public static final CurrencyPair LTC_HKD = new CurrencyPair(Currency.LTC, Currency.HKD); public static final CurrencyPair LTC_XDC = new CurrencyPair(Currency.LTC, Currency.XDC); public static final CurrencyPair LTC_NMC = new CurrencyPair(Currency.LTC, Currency.NMC); public static final CurrencyPair LTC_PPC = new CurrencyPair(Currency.LTC, Currency.PPC); // DOGE public static final CurrencyPair DOGE_HKD = new CurrencyPair(Currency.DOGE, Currency.HKD); public static final CurrencyPair DOGE_BTC = new CurrencyPair(Currency.DOGE, Currency.BTC); public static final CurrencyPair DOGE_LTC = new CurrencyPair(Currency.DOGE, Currency.LTC); public static final CurrencyPair DOGE_NMC = new CurrencyPair(Currency.DOGE, Currency.NMC); public static final CurrencyPair DOGE_PPC = new CurrencyPair(Currency.DOGE, Currency.PPC); public static final CurrencyPair DOGE_USD = new CurrencyPair(Currency.DOGE, Currency.USD); public static final CurrencyPair DOGE_TRY = new CurrencyPair(Currency.DOGE, Currency.TRY); public static final CurrencyPair XDC_HKD = new CurrencyPair(Currency.XDC, Currency.HKD); public static final CurrencyPair XDC_LTC = new CurrencyPair(Currency.XDC, Currency.LTC); public static final CurrencyPair XDC_NMC = new CurrencyPair(Currency.XDC, Currency.NMC); public static final CurrencyPair XDC_PPC = new CurrencyPair(Currency.XDC, Currency.PPC); public static final CurrencyPair XDC_USD = new CurrencyPair(Currency.XDC, Currency.USD); // NMC public static final CurrencyPair NMC_HKD = new CurrencyPair(Currency.NMC, Currency.HKD); public static final CurrencyPair NMC_XDC = new CurrencyPair(Currency.NMC, Currency.XDC); public static final CurrencyPair NMC_PPC = new CurrencyPair(Currency.NMC, Currency.PPC); // PPC public static final CurrencyPair PPC_HKD = new CurrencyPair(Currency.PPC, Currency.HKD); public static final CurrencyPair PPC_XDC = new CurrencyPair(Currency.PPC, Currency.XDC); public static final CurrencyPair PPC_NMC = new CurrencyPair(Currency.PPC, Currency.NMC); // end // IOTA public static final CurrencyPair IOTA_USD = new CurrencyPair(Currency.IOT, Currency.USD); public static final CurrencyPair IOTA_BTC = new CurrencyPair(Currency.IOT, Currency.BTC); public static final CurrencyPair IOTA_ETH = new CurrencyPair(Currency.IOT, Currency.ETH); // end // OMG public static final CurrencyPair OMG_USD = new CurrencyPair(Currency.OMG, Currency.USD); public static final CurrencyPair OMG_BTC = new CurrencyPair(Currency.OMG, Currency.BTC); public static final CurrencyPair OMG_ETH = new CurrencyPair(Currency.OMG, Currency.ETH); // end // NEO public static final CurrencyPair NEO_USD = new CurrencyPair(Currency.NEO, Currency.USD); public static final CurrencyPair NEO_USDT = new CurrencyPair(Currency.NEO, Currency.USDT); public static final CurrencyPair NEO_BTC = new CurrencyPair(Currency.NEO, Currency.BTC); public static final CurrencyPair NEO_ETH = new CurrencyPair(Currency.NEO, Currency.ETH); // end // not real currencies, but tradable commodities (GH/s) public static final CurrencyPair GHs_BTC = new CurrencyPair(Currency.GHs, Currency.BTC); public static final CurrencyPair GHs_NMC = new CurrencyPair(Currency.GHs, Currency.NMC); public static final CurrencyPair CNC_BTC = new CurrencyPair(Currency.CNC, Currency.BTC); public static final CurrencyPair WDC_USD = new CurrencyPair(Currency.WDC, Currency.USD); public static final CurrencyPair WDC_BTC = new CurrencyPair(Currency.WDC, Currency.BTC); public static final CurrencyPair DVC_BTC = new CurrencyPair(Currency.DVC, Currency.BTC); public static final CurrencyPair DGC_BTC = new CurrencyPair(Currency.DGC, Currency.BTC); public static final CurrencyPair UTC_USD = new CurrencyPair(Currency.UTC, Currency.USD); public static final CurrencyPair UTC_EUR = new CurrencyPair(Currency.UTC, Currency.EUR); public static final CurrencyPair UTC_BTC = new CurrencyPair(Currency.UTC, Currency.BTC); public static final CurrencyPair UTC_LTC = new CurrencyPair(Currency.UTC, Currency.LTC); // Kraken additional pairs public static final CurrencyPair ADA_USD = new CurrencyPair(Currency.ADA, Currency.USD); public static final CurrencyPair ADA_EUR = new CurrencyPair(Currency.ADA, Currency.EUR); public static final CurrencyPair ADA_CAD = new CurrencyPair(Currency.ADA, Currency.CAD); public static final CurrencyPair ATOM_BTC = new CurrencyPair(Currency.ATOM, Currency.BTC); public static final CurrencyPair ATOM_ETH = new CurrencyPair(Currency.ATOM, Currency.ETH); public static final CurrencyPair ATOM_USD = new CurrencyPair(Currency.ATOM, Currency.USD); public static final CurrencyPair ATOM_EUR = new CurrencyPair(Currency.ATOM, Currency.EUR); public static final CurrencyPair ATOM_CAD = new CurrencyPair(Currency.ATOM, Currency.CAD); public static final CurrencyPair ETC_BTC = new CurrencyPair(Currency.ETC, Currency.BTC); public static final CurrencyPair ETC_EUR = new CurrencyPair(Currency.ETC, Currency.EUR); public static final CurrencyPair ETC_ETH = new CurrencyPair(Currency.ETC, Currency.ETH); public static final CurrencyPair ETC_USD = new CurrencyPair(Currency.ETC, Currency.USD); public static final CurrencyPair ICN_BTC = new CurrencyPair(Currency.ICN, Currency.BTC); public static final CurrencyPair ICN_ETH = new CurrencyPair(Currency.ICN, Currency.ETH); public static final CurrencyPair DASH_USD = new CurrencyPair(Currency.DASH, Currency.USD); public static final CurrencyPair DASH_EUR = new CurrencyPair(Currency.DASH, Currency.EUR); public static final CurrencyPair DASH_BTC = new CurrencyPair(Currency.DASH, Currency.BTC); public static final CurrencyPair DASH_TRY = new CurrencyPair(Currency.DASH, Currency.TRY); public static final CurrencyPair DASH_CZK = new CurrencyPair(Currency.DASH, Currency.CZK); public static final CurrencyPair MLN_ETH = new CurrencyPair(Currency.MLN, Currency.ETH); public static final CurrencyPair MLN_BTC = new CurrencyPair(Currency.MLN, Currency.BTC); public static final CurrencyPair ZEC_EUR = new CurrencyPair(Currency.ZEC, Currency.EUR); public static final CurrencyPair ZEC_USD = new CurrencyPair(Currency.ZEC, Currency.USD); public static final CurrencyPair ZEC_BTC = new CurrencyPair(Currency.ZEC, Currency.BTC); public static final CurrencyPair ZEN_USD = new CurrencyPair(Currency.ZEN, Currency.USD); public static final CurrencyPair ZEN_BTC = new CurrencyPair(Currency.ZEN, Currency.BTC); public static final CurrencyPair GNO_ETH = new CurrencyPair(Currency.GNO, Currency.ETH); public static final CurrencyPair GNO_BTC = new CurrencyPair(Currency.GNO, Currency.BTC); public static final CurrencyPair EOS_ETH = new CurrencyPair(Currency.EOS, Currency.ETH); public static final CurrencyPair EOS_BTC = new CurrencyPair(Currency.EOS, Currency.BTC); public static final CurrencyPair BCC_USD = new CurrencyPair(Currency.BCC, Currency.USD); public static final CurrencyPair BCC_BTC = new CurrencyPair(Currency.BCC, Currency.BTC); public static final CurrencyPair BAT_USD = new CurrencyPair(Currency.BAT, Currency.USD); public static final CurrencyPair BAT_BTC = new CurrencyPair(Currency.BAT, Currency.BTC); // Tether Pairs public static final CurrencyPair BTC_USDT = new CurrencyPair(Currency.BTC, Currency.USDT); public static final CurrencyPair DASH_USDT = new CurrencyPair(Currency.DASH, Currency.USDT); // UAH pairs public static final CurrencyPair BTC_UAH = new CurrencyPair(Currency.BTC, Currency.UAH); public static final CurrencyPair ETH_UAH = new CurrencyPair(Currency.ETH, Currency.UAH); public static final CurrencyPair BCH_UAH = new CurrencyPair(Currency.BCH, Currency.UAH); // Bitmex futures contracts public static final CurrencyPair XBT_USD = new CurrencyPair(Currency.XBT, Currency.USD); public static final CurrencyPair XBT_H18 = new CurrencyPair(Currency.XBT, Currency.H18); public static final CurrencyPair XBT_M18 = new CurrencyPair(Currency.XBT, Currency.M18); public static final CurrencyPair XBT_U18 = new CurrencyPair(Currency.XBT, Currency.U18); public static final CurrencyPair XBT_Z18 = new CurrencyPair(Currency.XBT, Currency.Z18); public static final CurrencyPair ADA_H18 = new CurrencyPair(Currency.ADA, Currency.H18); public static final CurrencyPair ADA_M18 = new CurrencyPair(Currency.ADA, Currency.M18); public static final CurrencyPair ADA_H19 = new CurrencyPair(Currency.ADA, Currency.H19); public static final CurrencyPair BCH_H18 = new CurrencyPair(Currency.BCH, Currency.H18); public static final CurrencyPair BCH_M18 = new CurrencyPair(Currency.BCH, Currency.M18); public static final CurrencyPair BCH_H19 = new CurrencyPair(Currency.BCH, Currency.H19); public static final CurrencyPair EOS_H19 = new CurrencyPair(Currency.EOS, Currency.H19); public static final CurrencyPair ETH_H18 = new CurrencyPair(Currency.ETH, Currency.H18); public static final CurrencyPair ETH_M18 = new CurrencyPair(Currency.ETH, Currency.M18); public static final CurrencyPair LTC_H18 = new CurrencyPair(Currency.LTC, Currency.H18); public static final CurrencyPair LTC_M18 = new CurrencyPair(Currency.LTC, Currency.M18); public static final CurrencyPair LTC_H19 = new CurrencyPair(Currency.LTC, Currency.H19); public static final CurrencyPair TRX_H19 = new CurrencyPair(Currency.TRX, Currency.H19); public static final CurrencyPair XRP_H18 = new CurrencyPair(Currency.XRP, Currency.H18); public static final CurrencyPair XRP_M18 = new CurrencyPair(Currency.XRP, Currency.M18); public static final CurrencyPair XRP_H19 = new CurrencyPair(Currency.XRP, Currency.H19); public static final CurrencyPair DASH_H18 = new CurrencyPair(Currency.DASH, Currency.H18); public static final CurrencyPair NEO_H18 = new CurrencyPair(Currency.NEO, Currency.H18); public static final CurrencyPair XMR_H18 = new CurrencyPair(Currency.XMR, Currency.H18); public static final CurrencyPair XLM_H18 = new CurrencyPair(Currency.XLM, Currency.H18); public static final CurrencyPair ZEC_H18 = new CurrencyPair(Currency.ZEC, Currency.H18); public static final CurrencyPair ETC_7D = new CurrencyPair(Currency.ETC, Currency.getInstance("7D")); // Bankera Exchange pairs public static final CurrencyPair BNK_BTC = new CurrencyPair(Currency.BNK, Currency.BTC); public static final CurrencyPair BNK_ETH = new CurrencyPair(Currency.BNK, Currency.ETH); public static final CurrencyPair BNK_USDT = new CurrencyPair(Currency.BNK, Currency.USDT); public static final CurrencyPair XRP_BNK = new CurrencyPair(Currency.XRP, Currency.BNK); public static final CurrencyPair XLM_BNK = new CurrencyPair(Currency.XLM, Currency.BNK); public static final CurrencyPair LTC_BNK = new CurrencyPair(Currency.LTC, Currency.BNK); public static final CurrencyPair ZEC_BNK = new CurrencyPair(Currency.ZEC, Currency.BNK); public static final CurrencyPair XLM_BTC = new CurrencyPair(Currency.XLM, Currency.BTC); public static final CurrencyPair XLM_ETH = new CurrencyPair(Currency.XLM, Currency.ETH); public static final CurrencyPair LTC_ETH = new CurrencyPair(Currency.LTC, Currency.ETH); public static final CurrencyPair ZEC_ETH = new CurrencyPair(Currency.ZEC, Currency.ETH); public static final CurrencyPair XLM_USDT = new CurrencyPair(Currency.XLM, Currency.USDT); public static final CurrencyPair LTC_USDT = new CurrencyPair(Currency.LTC, Currency.USDT); public static final CurrencyPair ZEC_USDT = new CurrencyPair(Currency.ZEC, Currency.USDT); public static final CurrencyPair XLM_USD = new CurrencyPair(Currency.XLM, Currency.USD); public static final CurrencyPair XLM_EUR = new CurrencyPair(Currency.XLM, Currency.EUR); public static final CurrencyPair LINK_USD = new CurrencyPair(Currency.LINK, Currency.USD); public static final CurrencyPair LINK_EUR = new CurrencyPair(Currency.LINK, Currency.EUR); public static final CurrencyPair LINK_BTC = new CurrencyPair(Currency.LINK, Currency.BTC); public static final CurrencyPair LINK_ETH = new CurrencyPair(Currency.LINK, Currency.ETH); // dydx Exchange Spot and Perpetual Pairs public static final CurrencyPair WETH_USDC = new CurrencyPair(Currency.WETH, Currency.USDC); public static final CurrencyPair WETH_DAI = new CurrencyPair(Currency.WETH, Currency.DAI); public static final CurrencyPair DAI_USDC = new CurrencyPair(Currency.DAI, Currency.USDC); public static final CurrencyPair PBTC_USDC = new CurrencyPair(Currency.PBTC, Currency.USDC); public static final CurrencyPair WETH_PUSD = new CurrencyPair(Currency.WETH, Currency.PUSD); public static final CurrencyPair PLINK_USDC = new CurrencyPair(Currency.PLINK, Currency.USDC); public final Currency base; public final Currency counter; /** * Full constructor In general the CurrencyPair.base is what you're wanting to buy/sell. The * CurrencyPair.counter is what currency you want to use to pay/receive for your purchase/sale. * * @param base The base currency is what you're wanting to buy/sell * @param counter The counter currency is what currency you want to use to pay/receive for your * purchase/sale. */ public CurrencyPair(Currency base, Currency counter) { this.base = base; this.counter = counter; } /** * String constructor In general the CurrencyPair.base is what you're wanting to buy/sell. The * CurrencyPair.counter is what currency you want to use to pay/receive for your purchase/sale. * * @param baseSymbol The base symbol is what you're wanting to buy/sell * @param counterSymbol The counter symbol is what currency you want to use to pay/receive for * your purchase/sale. */ public CurrencyPair(String baseSymbol, String counterSymbol) { this(Currency.getInstance(baseSymbol), Currency.getInstance(counterSymbol)); } /** * Parse currency pair from a string in the same format as returned by toString() method - ABC/XYZ */ @JsonCreator public CurrencyPair(String currencyPair) { int split; if (currencyPair.contains("-")) { split = currencyPair.indexOf('-'); } else { split = currencyPair.indexOf('/'); } if (split < 1) { throw new IllegalArgumentException( "Could not parse currency pair from '" + currencyPair + "'"); } String base = currencyPair.substring(0, split); String counter = currencyPair.substring(split + 1); this.base = Currency.getInstance(base); this.counter = Currency.getInstance(counter); } @JsonValue @Override public String toString() { return base + "/" + counter; } public boolean contains(Currency currency) { return base.equals(currency) || counter.equals(currency); } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((base == null) ? 0 : base.hashCode()); result = prime * result + ((counter == null) ? 0 : counter.hashCode()); return result; } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } CurrencyPair other = (CurrencyPair) obj; if (base == null) { if (other.base != null) { return false; } } else if (!base.equals(other.base)) { return false; } if (counter == null) { if (other.counter != null) { return false; } } else if (!counter.equals(other.counter)) { return false; } return true; } @Override public int compareTo(CurrencyPair o) { return (base.compareTo(o.base) << 16) + counter.compareTo(o.counter); } }
/* * The MIT License (MIT) * * Copyright (c) 2017 Daniel Gomez-Sanchez * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package org.magicdgs.readtools.utils.fastq; import org.magicdgs.readtools.RTDefaults; import org.magicdgs.readtools.utils.read.RTReadUtils; import com.google.common.annotations.VisibleForTesting; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.broadinstitute.hellbender.exceptions.GATKException; import org.broadinstitute.hellbender.utils.read.GATKRead; import java.util.Arrays; import java.util.regex.Matcher; import java.util.regex.Pattern; /** * Enum for encoding of the read name in the FASTQ format. Some of this names contain important * information that cannot be lost in the processing step. Because there are very strict formatting * of headers in the BAM file, processing read names from FASTQ files is important for lossy * transformation. * * @author Daniel Gomez-Sanchez (magicDGS) */ public enum FastqReadNameEncoding { // CASAVA should go first because ILLUMINA detects all kind of read names even without barcode pair/information // regexp for CASAVA is as following: // first group = ([\\S]+) -> any word character is the read name // no group = \\s+ -> any white character for separate the comment // the following groups are separated by ':' // second group = ([012]) -> the pair-information ('1', '2', or '0') // third group = ([YN]) -> PF flag (vendors quality) // no group = [0-9]+ -> numeric value that does not contain important information in our framework // fourth group = ([^\\s]+) -> the barcode information (restricted to any character except space to allow barcode separators) CASAVA("([\\S]+)\\s+([012]):([YN]):[0-9]+:([^\\s]+).?", 2, 4, 3), // this ILLUMINA pattern match with/without barcodes // first group = ([^#/]+) -> any character that is not the marker of barcode or pair-info separator // second group = (#([^/\\s]+))? -> one or none of # followed by something that is not a / or white space // third group = nested group in previous -> the string between # and / or space (barcode) // fourth group = (/([012]){1})? -> match '/0', '/1' or '/2' or nothing // fifth group = nested group in previous -> match '0', '1' or '2' ILLUMINA("([^#/]+)(" + RTFastqConstants.ILLUMINA_NAME_BARCODE_DELIMITER + "([^/\\s]+))?(/([012]){1})?.?", 5, 3, -1); private static Logger logger = LogManager.getLogger(FastqReadNameEncoding.class); // pre-compiled pattern private final Pattern pattern; // the group where the pair information, barcode or PF flag is stored within the pattern private final int pairInfoGroup; private final int barcodeGroup; private final int pfGroup; /** Enum constructor, which indicates in which group is the information (-1 if not available). */ FastqReadNameEncoding(final String pattern, final int pairInfoGroup, final int barcodeGroup, final int pfGroup) { this.pattern = Pattern.compile(pattern); this.pairInfoGroup = pairInfoGroup; this.barcodeGroup = barcodeGroup; this.pfGroup = pfGroup; } /** * Gets the plain read name for this encoding, trimming everything after the white-space. * * @return illumina read name without comments. * * @throws IllegalArgumentException if the name is wrongly encoded. */ @VisibleForTesting String getPlainName(final String readName) { final Matcher matcher = pattern.matcher(readName); if (!matcher.find()) { throw new IllegalArgumentException( "Wrong encoded read name for " + name() + " encoding: " + readName); } // remove trailing white spaces if present final String plainName = matcher.group(1); final int index = plainName.indexOf(" "); return (index == -1) ? plainName : plainName.substring(0, index); } /** * Gets the PF flag from the read name. * * <p>Note: this information is only encoded in the {@link #CASAVA} formatting. * * @param readName the read name. * * @return {@code true} if 'Y' is found in CASAVA formatting; {@code false} otherwise. */ public boolean isPF(final String readName) { if (pfGroup != -1) { final Matcher matcher = pattern.matcher(readName); if (matcher.find()) { return matcher.group(pfGroup).equals("Y"); } } return false; } /** * Returns the paired state for this read (0, 1 or 2). * * @param readName the read name to extract the pair state. * * @return the pair state 1 or 2; 0 if not information. */ public String getPairedState(final String readName) { if (pairInfoGroup == -1) { return "0"; } final Matcher matcher = pattern.matcher(readName); String pairInfo = null; if (matcher.find()) { pairInfo = matcher.group(pairInfoGroup); } return pairInfo == null ? "0" : pairInfo; } /** * Returns the second of pair status of the read. * * <p>Note: {@code false} does not mean that the read is not paired or have the '1' mark. * * @param readName the read name to extract the information. * * @return {@code true} if the read have the '2' mark; {@code false} otherwise. */ public boolean isSecondOfPair(final String readName) { return getPairedState(readName).equals("2"); } /** * Returns the first of pair status of the read. * * <p>Note: {@code false} does not mean that the read is not paired or have the '2' mark. * * @param readName the read name to extract the information. * * @return {@code true} if the read have the '1' mark; {@code false} otherwise. */ public boolean isFirstOfPair(final String readName) { return getPairedState(readName).equals("1"); } /** * Returns the barcodes in the read name (splitted by {@link RTReadUtils#DEFAULT_BARCODE_INDEX_SPLIT}). * * @param readName the read name to extract the information. * * @return the barcodes in the read name; empty array if information is not present. */ public String[] getBarcodes(final String readName) { if (barcodeGroup != -1) { final Matcher matcher = pattern.matcher(readName); if (matcher.find()) { final String barcode = matcher.group(barcodeGroup); if (barcode != null) { return RTReadUtils.DEFAULT_BARCODE_INDEX_SPLIT.split(barcode); } } } return new String[0]; } /** * Detects the format for the read name provided, and updates the read information. * * <p>The following information will be updated: * <ul> * <li>Read name according to SAM specs (no barcode or pair-end information), without * white-space.</li> * <li>Pair-end information in the bitwise flag (using {@link #getPairedState(String)}).</li> * <li>PF information in the bitwise flag (using {@link #isPF(String)}).</li> * <li>Barcode information in the default tag (using {@link #getBarcodes(String)}).</li> * </ul> * * @param read the read to update. * @param readName the read name from a FASTQ file. */ public static void updateReadFromReadName(final GATKRead read, final String readName) { // gets the firt encoding that match, in the order of the enum final FastqReadNameEncoding encoding = Arrays.stream(FastqReadNameEncoding.values()) .filter(e -> e.pattern.matcher(readName).find()) .findFirst().orElse(null); if (encoding == null) { throw new GATKException.ShouldNeverReachHereException("Encoding should not be null."); } else { logger.debug("Detected encoding: {}", encoding); read.setName(encoding.getPlainName(readName)); final String pairState = encoding.getPairedState(readName); switch (pairState) { case "0": read.setIsPaired(false); break; case "1": read.setIsFirstOfPair(); break; case "2": read.setIsSecondOfPair(); break; default: throw new GATKException.ShouldNeverReachHereException( "Incorrect detection of pair-state: " + pairState); } read.setFailsVendorQualityCheck(encoding.isPF(readName)); RTReadUtils.addBarcodesTagToRead(read, encoding.getBarcodes(readName)); } } }
package stepDefintions; import java.io.IOException; import methods.TestCaseFailed; import cucumber.api.java.en.Then; import env.BaseTest; public class PredefinedStepDefinitions implements BaseTest { //Navigation Steps //Step to navigate to specified URL @Then("^I navigate to \"([^\"]*)\"$") public void navigate_to(String link) { navigationObj.navigateTo(link); } //Step to navigate forward @Then("^I navigate forward") public void navigate_forward() { navigationObj.navigate("forward"); } //Step to navigate backward @Then("^I navigate back") public void navigate_back() { navigationObj.navigate("back"); } // steps to refresh page @Then("^I refresh page$") public void refresh_page() { driver.navigate().refresh(); } // Switch between windows //Switch to new window @Then("^I switch to new window$") public void switch_to_new_window() { navigationObj.switchToNewWindow(); } //Switch to old window @Then("^I switch to previous window$") public void switch_to_old_window() { navigationObj.switchToOldWindow(); } //Switch to new window by window title @Then("^I switch to window having title \"(.*?)\"$") public void switch_to_window_by_title(String windowTitle) throws Exception { navigationObj.switchToWindowByTitle(windowTitle); } //Close new window @Then("^I close new window$") public void close_new_window() { navigationObj.closeNewWindow(); } // Switch between frame // Step to switch to frame by web element @Then("^I switch to frame having (.+) \"(.*?)\"$") public void switch_frame_by_element(String method, String value) { navigationObj.switchFrame(method, value); } // step to switch to main content @Then("^I switch to main content$") public void switch_to_default_content() { navigationObj.switchToDefaultContent(); } // To interact with browser // step to resize browser @Then("^I resize browser window size to width (\\d+) and height (\\d+)$") public void resize_browser(int width, int heigth) { navigationObj.resizeBrowser(width, heigth); } // step to maximize browser @Then("^I maximize browser window$") public void maximize_browser() { navigationObj.maximizeBrowser(); } //Step to close the browser @Then("^I close browser$") public void close_browser() { navigationObj.closeDriver(); } // zoom in/out page // steps to zoom in page @Then("^I zoom in page$") public void zoom_in() { navigationObj.zoomInOut("ADD"); } // steps to zoom out page @Then("^I zoom out page$") public void zoom_out() { navigationObj.zoomInOut("SUBTRACT"); } // zoom out webpage till necessary element displays // steps to zoom out till element displays @Then("^I zoom out page till I see element having (.+) \"(.*?)\"$") public void zoom_till_element_display(String type, String accessName) throws Exception { miscmethodObj.validateLocator(type); navigationObj.zoomInOutTillElementDisplay(type,"substract", accessName); } // reset webpage view use @Then("^I reset page view$") public void reset_page_zoom() { navigationObj.zoomInOut("reset"); } // scroll webpage @Then("^I scroll to (top|end) of page$") public void scroll_page(String to) throws Exception { navigationObj.scrollPage(to); } // scroll webpage to specific element @Then("^I scroll to element having (.+) \"(.*?)\"$") public void scroll_to_element(String type, String accessName) throws Exception { miscmethodObj.validateLocator(type); navigationObj.scrollToElement(type, accessName); } // hover over element // Note: Doesn't work on Windows firefox @Then("^I hover over element having (.+) \"(.*?)\"$") public void hover_over_element(String type, String accessName) throws Exception { miscmethodObj.validateLocator(type); navigationObj.hoverOverElement(type, accessName); } //Assertion steps /** page title checking * @param present : * @param title : */ @Then("^I should\\s*((?:not)?)\\s+see page title as \"(.+)\"$") public void check_title(String present,String title) throws TestCaseFailed { //System.out.println("Present :" + present.isEmpty()); assertionObj.checkTitle(title,present.isEmpty()); } // step to check element partial text @Then("^I should\\s*((?:not)?)\\s+see page title having partial text as \"(.*?)\"$") public void check_partial_text(String present, String partialTextTitle) throws TestCaseFailed { //System.out.println("Present :" + present.isEmpty()); assertionObj.checkPartialTitle(partialTextTitle, present.isEmpty()); } // step to check element text @Then("^element having (.+) \"([^\"]*)\" should\\s*((?:not)?)\\s+have text as \"(.*?)\"$") public void check_element_text(String type, String accessName,String present,String value) throws Exception { miscmethodObj.validateLocator(type); assertionObj.checkElementText(type, value, accessName,present.isEmpty()); } //step to check element partial text @Then("^element having (.+) \"([^\"]*)\" should\\s*((?:not)?)\\s+have partial text as \"(.*?)\"$") public void check_element_partial_text(String type,String accessName,String present,String value) throws Exception { miscmethodObj.validateLocator(type); assertionObj.checkElementPartialText(type, value, accessName, present.isEmpty()); } // step to check attribute value @Then("^element having (.+) \"([^\"]*)\" should\\s*((?:not)?)\\s+have attribute \"(.*?)\" with value \"(.*?)\"$") public void check_element_attribute(String type,String accessName,String present,String attrb,String value) throws Exception { miscmethodObj.validateLocator(type); assertionObj.checkElementAttribute(type, attrb, value, accessName, present.isEmpty()); } // step to check element enabled or not @Then("^element having (.+) \"([^\"]*)\" should\\s*((?:not)?)\\s+be (enabled|disabled)$") public void check_element_enable(String type, String accessName,String present,String state) throws Exception { miscmethodObj.validateLocator(type); boolean flag = state.equals("enabled"); if(!present.isEmpty()) { flag = !flag; } assertionObj.checkElementEnable(type, accessName, flag); } //step to check element present or not @Then("^element having (.+) \"(.*?)\" should\\s*((?:not)?)\\s+be present$") public void check_element_presence(String type,String accessName,String present) throws Exception { miscmethodObj.validateLocator(type); assertionObj.checkElementPresence(type, accessName, present.isEmpty()); } //step to assert checkbox is checked or unchecked @Then("^checkbox having (.+) \"(.*?)\" should be (checked|unchecked)$") public void is_checkbox_checked(String type, String accessName,String state) throws Exception { miscmethodObj.validateLocator(type); boolean flag = state.equals("checked"); assertionObj.isCheckboxChecked(type, accessName, flag); } //steps to assert radio button checked or unchecked @Then("^radio button having (.+) \"(.*?)\" should be (selected|unselected)$") public void is_radio_button_selected(String type,String accessName,String state) throws Exception { miscmethodObj.validateLocator(type); boolean flag = state.equals("selected"); assertionObj.isRadioButtonSelected(type, accessName, flag); } //steps to assert option by text from radio button group selected/unselected @Then("^option \"(.*?)\" by (.+) from radio button group having (.+) \"(.*?)\" should be (selected|unselected)$") public void is_option_from_radio_button_group_selected(String option,String attrb,String type,String accessName,String state) throws Exception { miscmethodObj.validateLocator(type); boolean flag = state.equals("selected"); assertionObj.isOptionFromRadioButtonGroupSelected(type,attrb,option,accessName,flag); } //steps to check link presence @Then("^link having text \"(.*?)\" should\\s*((?:not)?)\\s+be present$") public void check_element_presence(String accessName,String present) throws TestCaseFailed, Exception { assertionObj.checkElementPresence("linkText",accessName,present.isEmpty()); } //steps to check partail link presence @Then("^link having partial text \"(.*?)\" should\\s*((?:not)?)\\s+be present$") public void check_partial_element_presence(String accessName,String present) throws TestCaseFailed, Exception { assertionObj.checkElementPresence("partialLinkText", accessName, present.isEmpty()); } //step to assert javascript pop-up alert text @Then("^I should see alert text as \"(.*?)\"$") public void check_alert_text(String actualValue) throws TestCaseFailed { assertionObj.checkAlertText(actualValue); } // step to select dropdown list @Then("^option \"(.*?)\" by (.+) from dropdown having (.+) \"(.*?)\" should be (selected|unselected)$") public void is_option_from_dropdown_selected(String option,String by,String type,String accessName,String state) throws Exception { miscmethodObj.validateLocator(type); boolean flag = state.equals("selected"); assertionObj.isOptionFromDropdownSelected(type,by,option,accessName,flag); } //Input steps // enter text into input field steps @Then("^I enter \"([^\"]*)\" into input field having (.+) \"([^\"]*)\"$") public void enter_text(String text, String type,String accessName) throws Exception { miscmethodObj.validateLocator(type); inputObj.enterText(type, text, accessName); } // clear input field steps @Then("^I clear input field having (.+) \"([^\"]*)\"$") public void clear_text(String type, String accessName) throws Exception { miscmethodObj.validateLocator(type); inputObj.clearText(type, accessName); } // select option by text/value from dropdown @Then("^I select \"(.*?)\" option by (.+) from dropdown having (.+) \"(.*?)\"$") public void select_option_from_dropdown(String option,String optionBy,String type,String accessName) throws Exception { miscmethodObj.validateLocator(type); miscmethodObj.validateOptionBy(optionBy); inputObj.selectOptionFromDropdown(type,optionBy, option, accessName); } // select option by index from dropdown @Then("^I select (\\d+) option by index from dropdown having (.+) \"(.*?)\"$") public void select_option_from_dropdown_by_index(String option, String type, String accessName) throws Exception { miscmethodObj.validateLocator(type); inputObj.selectOptionFromDropdown(type,"selectByIndex", option, accessName); } // select option by text/value from multiselect @Then("^I select \"(.*?)\" option by (.+) from multiselect dropdown having (.+) \"(.*?)\"$") public void select_option_from_multiselect_dropdown(String option,String optionBy, String type,String accessName) throws Exception { miscmethodObj.validateLocator(type); miscmethodObj.validateOptionBy(optionBy); inputObj.selectOptionFromDropdown(type,optionBy, option, accessName); } // select option by index from multiselect @Then("^I select (\\d+) option by index from multiselect dropdown having (.+) \"(.*?)\"$") public void select_option_from_multiselect_dropdown_by_index(String option, String type, String accessName) throws Exception { miscmethodObj.validateLocator(type); inputObj.selectOptionFromDropdown(type,"selectByIndex", option, accessName); } // deselect option by text/value from multiselect @Then("^I deselect \"(.*?)\" option by (.+) from multiselect dropdown having (.+) \"(.*?)\"$") public void deselect_option_from_multiselect_dropdown(String option,String optionBy, String type,String accessName) throws Exception { miscmethodObj.validateLocator(type); miscmethodObj.validateOptionBy(optionBy); inputObj.deselectOptionFromDropdown(type, optionBy, option, accessName); } // deselect option by index from multiselect @Then("^I deselect (\\d+) option by index from multiselect dropdown having (.+) \"(.*?)\"$") public void deselect_option_from_multiselect_dropdown_by_index(String option, String type, String accessName) throws Exception { miscmethodObj.validateLocator(type); inputObj.deselectOptionFromDropdown(type, "selectByIndex", option, accessName); } // step to select option from mutliselect dropdown list /*@Then("^I select all options from multiselect dropdown having (.+) \"(.*?)\"$") public void select_all_option_from_multiselect_dropdown(String type,String accessName) throws Exception { miscmethod.validateLocator(type); //inputObj. //select_all_option_from_multiselect_dropdown(type, access_name) }*/ // step to unselect option from mutliselect dropdown list @Then("^I deselect all options from multiselect dropdown having (.+) \"(.*?)\"$") public void unselect_all_option_from_multiselect_dropdown(String type, String accessName) throws Exception { miscmethodObj.validateLocator(type); inputObj.unselectAllOptionFromMultiselectDropdown(type, accessName); } //check checkbox steps @Then("^I check the checkbox having (.+) \"(.*?)\"$") public void check_checkbox(String type, String accessName) throws Exception { miscmethodObj.validateLocator(type); inputObj.checkCheckbox(type, accessName); } //uncheck checkbox steps @Then("^I uncheck the checkbox having (.+) \"(.*?)\"$") public void uncheck_checkbox(String type,String accessName) throws Exception { miscmethodObj.validateLocator(type); inputObj.uncheckCheckbox(type, accessName); } //steps to toggle checkbox @Then("^I toggle checkbox having (.+) \"(.*?)\"$") public void toggle_checkbox(String type,String accessName) throws Exception { miscmethodObj.validateLocator(type); inputObj.toggleCheckbox(type, accessName); } // step to select radio button @Then("^I select radio button having (.+) \"(.*?)\"$") public void select_radio_button(String type, String accessName) throws Exception { miscmethodObj.validateLocator(type); inputObj.selectRadioButton(type, accessName); } // steps to select option by text from radio button group @Then("^I select \"(.*?)\" option by (.+) from radio button group having (.+) \"(.*?)\"$") public void select_option_from_radio_btn_group(String option,String by, String type, String accessName) throws Exception { miscmethodObj.validateLocator(type); //miscmethodObj.validateOptionBy(optionBy); inputObj.selectOptionFromRadioButtonGroup(type, option, by, accessName); } //Click element Steps // click on web element @Then("^I click on element having (.+) \"(.*?)\"$") public void click(String type,String accessName) throws Exception { miscmethodObj.validateLocator(type); clickObj.click(type, accessName); } //Forcefully click on element @Then("^I forcefully click on element having (.+) \"(.*?)\"$") public void click_forcefully(String type,String accessName) throws Exception { miscmethodObj.validateLocator(type); clickObj.clickForcefully(type,accessName); } // double click on web element @Then("^I double click on element having (.+) \"(.*?)\"$") public void double_click(String type, String accessValue) throws Exception { miscmethodObj.validateLocator(type); clickObj.doubleClick(type, accessValue); } // steps to click on link @Then("^I click on link having text \"(.*?)\"$") public void click_link(String accessName) { clickObj.click("linkText", accessName); } //Step to click on partial link @Then("^I click on link having partial text \"(.*?)\"$") public void click_partial_link(String accessName) { clickObj.click("partialLinkText", accessName); } //Progress methods // wait for specific period of time @Then("^I wait for (\\d+) sec$") public void wait(String time) throws NumberFormatException, InterruptedException { progressObj.wait(time); } //wait for specific element to display for specific period of time @Then("^I wait (\\d+) seconds for element having (.+) \"(.*?)\" to display$") public void wait_for_ele_to_display(String duration, String type, String accessName) throws Exception { miscmethodObj.validateLocator(type); progressObj.waitForElementToDisplay(type, accessName, duration); } // wait for specific element to enable for specific period of time @Then("^I wait (\\d+) seconds for element having (.+) \"(.*?)\" to be enabled$") public void wait_for_ele_to_click(String duration, String type, String accessName) throws Exception { miscmethodObj.validateLocator(type); progressObj.waitForElementToClick(type, accessName, duration); } //JavaScript handling steps //Step to handle java script @Then("^I accept alert$") public void handle_alert() { javascriptObj.handleAlert("accept"); } //Steps to dismiss java script @Then("^I dismiss alert$") public void dismiss_alert() { javascriptObj.handleAlert("dismiss"); } //Screen shot methods @Then("^I take screenshot$") public void take_screenshot() throws IOException { screenshotObj.takeScreenShot(); } //Configuration steps // step to print configuration @Then("^I print configuration$") public void print_config() { configObj.printDesktopConfiguration(); } }
package com.planet_ink.coffee_mud.Areas; import com.planet_ink.coffee_mud.core.interfaces.*; import com.planet_ink.coffee_mud.core.*; import com.planet_ink.coffee_mud.core.collections.*; import com.planet_ink.coffee_mud.Abilities.interfaces.*; import com.planet_ink.coffee_mud.Areas.interfaces.*; import com.planet_ink.coffee_mud.Behaviors.interfaces.*; import com.planet_ink.coffee_mud.CharClasses.interfaces.*; import com.planet_ink.coffee_mud.Commands.interfaces.*; import com.planet_ink.coffee_mud.Common.interfaces.*; import com.planet_ink.coffee_mud.Exits.interfaces.*; import com.planet_ink.coffee_mud.Items.Basic.StdItem; import com.planet_ink.coffee_mud.Items.interfaces.*; import com.planet_ink.coffee_mud.Locales.interfaces.*; import com.planet_ink.coffee_mud.Libraries.interfaces.*; import com.planet_ink.coffee_mud.MOBS.interfaces.*; import com.planet_ink.coffee_mud.Races.interfaces.*; import java.lang.ref.WeakReference; import java.util.*; /* Copyright 2000-2014 Bo Zimmerman Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ public class StdThinInstance extends StdThinArea { @Override public String ID(){ return "StdThinInstance";} private long flags=Area.FLAG_THIN|Area.FLAG_INSTANCE_PARENT; @Override public long flags(){return flags;} private final SVector<AreaInstanceChild> instanceChildren = new SVector<AreaInstanceChild>(); private volatile int instanceCounter=0; private long childCheckDown=CMProps.getMillisPerMudHour()/CMProps.getTickMillis(); private WeakReference<Area> parentArea = null; protected String getStrippedRoomID(String roomID) { final int x=roomID.indexOf('#'); if(x<0) return null; return roomID.substring(x); } protected String convertToMyArea(String roomID) { final String strippedID=getStrippedRoomID(roomID); if(strippedID==null) return null; return Name()+strippedID; } protected Area getParentArea() { if((parentArea!=null)&&(parentArea.get()!=null)) return parentArea.get(); final int x=Name().indexOf('_'); if(x<0) return null; if(!CMath.isNumber(Name().substring(0,x))) return null; final Area parentA = CMLib.map().getArea(Name().substring(x+1)); if((parentA==null) ||(!CMath.bset(parentA.flags(),Area.FLAG_INSTANCE_PARENT)) ||(CMath.bset(parentA.flags(),Area.FLAG_INSTANCE_CHILD))) return null; parentArea=new WeakReference<Area>(parentA); return parentA; } @Override public Room getRoom(String roomID) { if(!CMath.bset(flags(),Area.FLAG_INSTANCE_CHILD)) return super.getRoom(roomID); if(!isRoom(roomID)) return null; Room R=super.getRoom(roomID); if(((R==null)||(R.amDestroyed()))&&(roomID!=null)) { final Area parentA=getParentArea(); if(parentA==null) return null; if(roomID.toUpperCase().startsWith(Name().toUpperCase()+"#")) roomID=Name()+roomID.substring(Name().length()); // for case sensitive situations R=parentA.getRoom(parentA.Name()+getStrippedRoomID(roomID)); if(R==null) return null; final Room origRoom=R; R=CMLib.database().DBReadRoomObject(R.roomID(), false); final TreeMap<String,Room> V=new TreeMap<String,Room>(); V.put(roomID,R); CMLib.database().DBReadRoomExits(R.roomID(), R, false); CMLib.database().DBReadContent(R.roomID(), R, true); R.clearSky(); if(R instanceof GridLocale) ((GridLocale)R).clearGrid(null); for(int d=Directions.NUM_DIRECTIONS()-1;d>=0;d--) R.rawDoors()[d]=null; R.setRoomID(roomID); R.setArea(this); addProperRoom(R); synchronized(("SYNC"+roomID).intern()) { for(int d=Directions.NUM_DIRECTIONS()-1;d>=0;d--) { final Room dirR=origRoom.rawDoors()[d]; if(dirR!=null) { final String myRID=dirR.roomID(); if((myRID!=null)&&(myRID.length()>0)&&(dirR.getArea()==parentA)) { final String localDirRID=convertToMyArea(myRID); final Room localDirR=getProperRoom(localDirRID); if(localDirR!=null) R.rawDoors()[d]=localDirR; else if(localDirRID==null) Log.errOut("StdThinInstance","Error in linked room ID "+origRoom.roomID()+", dir="+d); else { R.rawDoors()[d]=CMClass.getLocale("ThinRoom"); R.rawDoors()[d].setRoomID(localDirRID); R.rawDoors()[d].setArea(this); } } else R.rawDoors()[d]=dirR; } } } for(final Enumeration<MOB> e=R.inhabitants();e.hasMoreElements();) e.nextElement().bringToLife(R,true); R.startItemRejuv(); fillInAreaRoom(R); R.setExpirationDate(System.currentTimeMillis()+WorldMap.ROOM_EXPIRATION_MILLIS); } return R; } protected boolean flushInstance(int index) { final Area childA=instanceChildren.elementAt(index).A; if(childA.getAreaState() != Area.State.ACTIVE) { final List<WeakReference<MOB>> V=instanceChildren.elementAt(index).mobs; boolean anyInside=false; for(final WeakReference<MOB> wmob : V) { final MOB M=wmob.get(); if((M!=null) &&CMLib.flags().isInTheGame(M,true) &&(M.location()!=null) &&(M.location().getArea()==childA)) { anyInside=true; break; } } if(!anyInside) { instanceChildren.remove(index); for(final WeakReference<MOB> wmob : V) { final MOB M=wmob.get(); if((M!=null) &&(M.location()!=null) &&(M.location().getArea()==this)) M.setLocation(M.getStartRoom()); } final MOB mob=CMClass.sampleMOB(); for(final Enumeration<Room> e=childA.getProperMap();e.hasMoreElements();) { final Room R=e.nextElement(); R.executeMsg(mob,CMClass.getMsg(mob,R,null,CMMsg.MSG_EXPIRE,null)); } CMLib.map().delArea(childA); childA.destroy(); return true; } } return false; } @Override public boolean tick(Tickable ticking, int tickID) { if(!super.tick(ticking, tickID)) return false; if(CMath.bset(flags(),Area.FLAG_INSTANCE_CHILD)) return true; if((--childCheckDown)<=0) { childCheckDown=CMProps.getMillisPerMudHour()/CMProps.getTickMillis(); synchronized(instanceChildren) { for(int i=instanceChildren.size()-1;i>=0;i--) flushInstance(i); } } return true; } @Override public void executeMsg(final Environmental myHost, final CMMsg msg) { super.executeMsg(myHost, msg); if(CMath.bset(flags(),Area.FLAG_INSTANCE_CHILD) &&(msg.sourceMinor()==CMMsg.TYP_SPEAK) &&(msg.sourceMessage()!=null) &&((msg.sourceMajor()&CMMsg.MASK_MAGIC)==0)) { final String said=CMStrings.getSayFromMessage(msg.sourceMessage()); if("RESET INSTANCE".equalsIgnoreCase(said)) { Room returnToRoom=null; final Room thisRoom=msg.source().location(); if(thisRoom.getArea()==this) { for(int d=0;d<Directions.NUM_DIRECTIONS();d++) { final Room R=thisRoom.getRoomInDir(d); if((R!=null)&&(R.getArea()!=null)&&(R.getArea()!=this)) returnToRoom=R; } } if(returnToRoom==null) { msg.addTrailerMsg(CMClass.getMsg(msg.source(),null,null,CMMsg.MSG_OK_ACTION,CMMsg.NO_EFFECT,CMMsg.NO_EFFECT, _("You must be at an entrance to reset the area."))); return; } final Area A=this.getParentArea(); if(A instanceof StdThinInstance) { final StdThinInstance parentA=(StdThinInstance)A; synchronized(instanceChildren) { for(int i=0;i<parentA.instanceChildren.size();i++) { final List<WeakReference<MOB>> V=parentA.instanceChildren.elementAt(i).mobs; if(parentA.instanceChildren.elementAt(i).A==this) { for(final WeakReference<MOB> wM : V) { final MOB M=wM.get(); if((M!=null) &&CMLib.flags().isInTheGame(M,true) &&(M.location()!=null) &&(M.location()!=returnToRoom) &&(M.location().getArea()==this)) { returnToRoom.bringMobHere(M, true); CMLib.commands().postLook(M, true); } } setAreaState(Area.State.PASSIVE); if(flushInstance(i)) msg.addTrailerMsg(CMClass.getMsg(msg.source(),CMMsg.MSG_OK_ACTION,_("The instance has been reset."))); else msg.addTrailerMsg(CMClass.getMsg(msg.source(),CMMsg.MSG_OK_ACTION,_("The instance was unable to be reset."))); return; } } } } msg.addTrailerMsg(CMClass.getMsg(msg.source(),CMMsg.MSG_OK_ACTION,_("The instance failed to reset."))); } } } @Override public int[] getAreaIStats() { if(!CMProps.getBoolVar(CMProps.Bool.MUDSTARTED)) return emptyStats; final Area parentArea=getParentArea(); final String areaName = (parentArea==null)?Name():parentArea.Name(); int[] statData=(int[])Resources.getResource("STATS_"+areaName.toUpperCase()); if(statData!=null) return statData; synchronized(("STATS_"+areaName).intern()) { if(parentArea==null) { statData=super.getAreaIStats(); if(statData==emptyStats) { final Enumeration<AreaInstanceChild> childE=instanceChildren.elements(); int ct=0; if(childE.hasMoreElements()) { statData=new int[Area.Stats.values().length]; for(;childE.hasMoreElements();) { final int[] theseStats=childE.nextElement().A.getAreaIStats(); if(theseStats != emptyStats) { ct++; for(int i=0;i<theseStats.length;i++) statData[i]+=theseStats[i]; } } } if(ct==0) return emptyStats; for(int i=0;i<statData.length;i++) statData[i]=statData[i]/ct; } Resources.removeResource("HELP_"+areaName.toUpperCase()); Resources.submitResource("STATS_"+areaName.toUpperCase(),statData); } else { return super.getAreaIStats(); } } return statData; } @Override public boolean okMessage(final Environmental myHost, final CMMsg msg) { if(!super.okMessage(myHost, msg)) return false; if(CMath.bset(flags(),Area.FLAG_INSTANCE_CHILD)) return true; setAreaState(Area.State.PASSIVE); if((msg.sourceMinor()==CMMsg.TYP_ENTER) &&(msg.target() instanceof Room) &&(CMath.bset(flags(),Area.FLAG_INSTANCE_PARENT)) &&(isRoom((Room)msg.target())) &&(!CMSecurity.isAllowed(msg.source(),(Room)msg.target(),CMSecurity.SecFlag.CMDAREAS)) &&(((msg.source().getStartRoom()==null)||(msg.source().getStartRoom().getArea()!=this)))) { synchronized(instanceChildren) { int myDex=-1; for(int i=0;i<instanceChildren.size();i++) { final List<WeakReference<MOB>> V=instanceChildren.elementAt(i).mobs; for (final WeakReference<MOB> weakReference : V) if(msg.source() == weakReference.get()) { myDex=i; break; } } final Set<MOB> grp = msg.source().getGroupMembers(new HashSet<MOB>()); for(int i=0;i<instanceChildren.size();i++) { if(i!=myDex) { final List<WeakReference<MOB>> V=instanceChildren.elementAt(i).mobs; for(int v=V.size()-1;v>=0;v--) { final WeakReference<MOB> wmob=V.get(v); if(wmob==null) continue; final MOB M=wmob.get(); if(grp.contains(M)) { if(myDex<0) { myDex=i; break; } else if((CMLib.flags().isInTheGame(M,true)) &&(M.location().getArea()!=instanceChildren.elementAt(i).A)) { V.remove(M); instanceChildren.get(myDex).mobs.add(new WeakReference<MOB>(M)); } } } } } Area redirectA = null; if(myDex<0) { final StdThinInstance newA=(StdThinInstance)this.copyOf(); newA.properRooms=new STreeMap<String, Room>(new Area.RoomIDComparator()); newA.parentArea=null; newA.properRoomIDSet = null; newA.metroRoomIDSet = null; newA.blurbFlags=new STreeMap<String,String>(); newA.setName((++instanceCounter)+"_"+Name()); newA.flags |= Area.FLAG_INSTANCE_CHILD; for(final Enumeration<String> e=getProperRoomnumbers().getRoomIDs();e.hasMoreElements();) newA.addProperRoomnumber(newA.convertToMyArea(e.nextElement())); redirectA=newA; CMLib.map().addArea(newA); newA.setAreaState(Area.State.ACTIVE); // starts ticking final List<WeakReference<MOB>> newMobList = new SVector<WeakReference<MOB>>(5); newMobList.add(new WeakReference<MOB>(msg.source())); final AreaInstanceChild child = new AreaInstanceChild(redirectA,newMobList); instanceChildren.add(child); } else redirectA=instanceChildren.get(myDex).A; if(redirectA instanceof StdThinInstance) { final Room R=redirectA.getRoom(((StdThinInstance)redirectA).convertToMyArea(CMLib.map().getExtendedRoomID((Room)msg.target()))); if(R!=null) msg.setTarget(R); } } } return true; } }
/* * Copyright 2017 Crown Copyright * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package stroom.legacy.model_6_1; import javax.xml.bind.annotation.XmlRootElement; import java.util.ArrayList; import java.util.List; /** * <p> * Criteria Object for find streams that is a bit more advanced than find by * example. * </p> */ @XmlRootElement @Deprecated public class OldFindStreamCriteria extends BaseCriteria { public static final String FIELD_CREATE_MS = "Create"; private static final long serialVersionUID = -4777723504698304778L; /** * Keep up to date as it's used to cache SQL queries. */ private EntityIdSet<StreamProcessor> streamProcessorIdSet; private OldFolderIdSet folderIdSet; /** * You must use feeds instead this is here for compatibility with previous * versions. **/ @Deprecated private EntityIdSet<Feed> feedIdSet; private IncludeExcludeEntityIdSet<Feed> feeds; private EntityIdSet<PipelineEntity> pipelineIdSet; private EntityIdSet<StreamType> streamTypeIdSet; private EntityIdSet<Stream> streamIdSet; private CriteriaSet<StreamStatus> statusSet; private IdRange streamIdRange; private EntityIdSet<Stream> parentStreamIdSet; private Period createPeriod; private Period effectivePeriod; private Period statusPeriod; private List<StreamAttributeCondition> attributeConditionList; private QueryData queryData; public OldFindStreamCriteria() { } public EntityIdSet<StreamProcessor> getStreamProcessorIdSet() { return streamProcessorIdSet; } public void setStreamProcessorIdSet(final EntityIdSet<StreamProcessor> streamProcessorIdSet) { this.streamProcessorIdSet = streamProcessorIdSet; } public EntityIdSet<StreamProcessor> obtainStreamProcessorIdSet() { if (streamProcessorIdSet == null) { streamProcessorIdSet = new EntityIdSet<>(); } return streamProcessorIdSet; } public CriteriaSet<StreamStatus> getStatusSet() { return statusSet; } public void setStatusSet(final CriteriaSet<StreamStatus> statusSet) { this.statusSet = statusSet; } public CriteriaSet<StreamStatus> obtainStatusSet() { if (statusSet == null) { statusSet = new CriteriaSet<>(); } return statusSet; } public OldFolderIdSet getFolderIdSet() { return folderIdSet; } public void setFolderIdSet(final OldFolderIdSet folderIdSet) { this.folderIdSet = folderIdSet; } public OldFolderIdSet obtainFolderIdSet() { if (folderIdSet == null) { folderIdSet = new OldFolderIdSet(); } return folderIdSet; } public IncludeExcludeEntityIdSet<Feed> getFeeds() { if (feedIdSet != null) { feeds = new IncludeExcludeEntityIdSet<>(); feeds.setInclude(feedIdSet); this.feedIdSet = null; } return feeds; } public void setFeeds(final IncludeExcludeEntityIdSet<Feed> feeds) { this.feeds = feeds; } public IncludeExcludeEntityIdSet<Feed> obtainFeeds() { if (feedIdSet != null) { feeds = new IncludeExcludeEntityIdSet<>(); feeds.setInclude(feedIdSet); this.feedIdSet = null; } if (feeds == null) { feeds = new IncludeExcludeEntityIdSet<>(); } return feeds; } /** * You must use getFeeds() instead this is here for compatibility with * previous versions. **/ @Deprecated public EntityIdSet<Feed> getFeedIdSet() { return feedIdSet; } /** * You must use setFeeds() instead this is here for compatibility with * previous versions. **/ @Deprecated public void setFeedIdSet(final EntityIdSet<Feed> feedIdSet) { if (feedIdSet != null) { feeds = new IncludeExcludeEntityIdSet<>(); feeds.setInclude(feedIdSet); } this.feedIdSet = null; } public EntityIdSet<PipelineEntity> getPipelineIdSet() { return pipelineIdSet; } public void setPipelineIdSet(final EntityIdSet<PipelineEntity> pipelineIdSet) { this.pipelineIdSet = pipelineIdSet; } public EntityIdSet<PipelineEntity> obtainPipelineIdSet() { if (pipelineIdSet == null) { pipelineIdSet = new EntityIdSet<>(); } return pipelineIdSet; } public EntityIdSet<StreamType> getStreamTypeIdSet() { return streamTypeIdSet; } public void setStreamTypeIdSet(final EntityIdSet<StreamType> streamTypeIdSet) { this.streamTypeIdSet = streamTypeIdSet; } public EntityIdSet<StreamType> obtainStreamTypeIdSet() { if (streamTypeIdSet == null) { streamTypeIdSet = new EntityIdSet<>(); } return streamTypeIdSet; } public EntityIdSet<Stream> getStreamIdSet() { return streamIdSet; } public void setStreamIdSet(final EntityIdSet<Stream> streamIdSet) { this.streamIdSet = streamIdSet; } public EntityIdSet<Stream> obtainStreamIdSet() { if (streamIdSet == null) { streamIdSet = new EntityIdSet<>(); } return streamIdSet; } public IdRange getStreamIdRange() { return streamIdRange; } public void setStreamIdRange(final IdRange streamIdRange) { this.streamIdRange = streamIdRange; } public IdRange obtainStreamIdRange() { if (streamIdRange == null) { streamIdRange = new IdRange(); } return streamIdRange; } public EntityIdSet<Stream> getParentStreamIdSet() { return parentStreamIdSet; } public void setParentStreamIdSet(final EntityIdSet<Stream> parentStreamIdSet) { this.parentStreamIdSet = parentStreamIdSet; } public EntityIdSet<Stream> obtainParentStreamIdSet() { if (parentStreamIdSet == null) { parentStreamIdSet = new EntityIdSet<>(); } return parentStreamIdSet; } public Period getCreatePeriod() { return createPeriod; } public void setCreatePeriod(final Period createPeriod) { this.createPeriod = createPeriod; } public Period obtainCreatePeriod() { if (createPeriod == null) { createPeriod = new Period(); } return createPeriod; } public Period getEffectivePeriod() { return effectivePeriod; } public void setEffectivePeriod(final Period effectivePeriod) { this.effectivePeriod = effectivePeriod; } public Period obtainEffectivePeriod() { if (effectivePeriod == null) { effectivePeriod = new Period(); } return effectivePeriod; } public Period getStatusPeriod() { return statusPeriod; } public void setStatusPeriod(final Period statusPeriod) { this.statusPeriod = statusPeriod; } public Period obtainStatusPeriod() { if (statusPeriod == null) { statusPeriod = new Period(); } return statusPeriod; } public List<StreamAttributeCondition> getAttributeConditionList() { return attributeConditionList; } public void setAttributeConditionList(final List<StreamAttributeCondition> attributeConditionList) { this.attributeConditionList = attributeConditionList; } public List<StreamAttributeCondition> obtainAttributeConditionList() { if (attributeConditionList == null) { attributeConditionList = new ArrayList<>(); } return attributeConditionList; } public QueryData getQueryData() { return queryData; } public void setQueryData(final QueryData queryData) { this.queryData = queryData; } }
/* Copyright 2007-2009 Selenium committers Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.openqa.selenium; import org.junit.Before; import org.junit.Test; import org.openqa.selenium.environment.DomainHelper; import org.openqa.selenium.testing.Ignore; import org.openqa.selenium.testing.JUnit4TestBase; import org.openqa.selenium.testing.JavascriptEnabled; import java.net.URI; import java.util.Date; import java.util.Random; import java.util.Set; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.not; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNotSame; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; import static org.junit.Assume.assumeTrue; import static org.openqa.selenium.testing.Ignore.Driver.ALL; import static org.openqa.selenium.testing.Ignore.Driver.ANDROID; import static org.openqa.selenium.testing.Ignore.Driver.CHROME; import static org.openqa.selenium.testing.Ignore.Driver.FIREFOX; import static org.openqa.selenium.testing.Ignore.Driver.HTMLUNIT; import static org.openqa.selenium.testing.Ignore.Driver.IE; import static org.openqa.selenium.testing.Ignore.Driver.IPHONE; import static org.openqa.selenium.testing.Ignore.Driver.OPERA; import static org.openqa.selenium.testing.Ignore.Driver.OPERA_MOBILE; import static org.openqa.selenium.testing.Ignore.Driver.PHANTOMJS; import static org.openqa.selenium.testing.Ignore.Driver.REMOTE; import static org.openqa.selenium.testing.Ignore.Driver.SAFARI; public class CookieImplementationTest extends JUnit4TestBase { private DomainHelper domainHelper; private String cookiePage; private static final Random random = new Random(); @Before public void setUp() throws Exception { domainHelper = new DomainHelper(appServer); assumeTrue(domainHelper.checkIsOnValidHostname()); cookiePage = domainHelper.getUrlForFirstValidHostname("/common/cookie"); deleteAllCookiesOnServerSide(); // This page is the deepest page we go to in the cookie tests // We go to it to ensure that cookies with /common/... paths are deleted // Do not write test in this class which use pages other than under /common // without ensuring that cookies are deleted on those pages as required try { driver.get(domainHelper.getUrlForFirstValidHostname("/common/animals")); } catch (IllegalArgumentException e) { // Ideally we would throw an IgnoredTestError or something here, // but our test runner doesn't pay attention to those. // Rely on the tests skipping themselves if they need to be on a useful page. return; } driver.manage().deleteAllCookies(); assertNoCookiesArePresent(); } @JavascriptEnabled @Test public void testShouldGetCookieByName() { String key = generateUniqueKey(); String value = "set"; assertCookieIsNotPresentWithName(key); addCookieOnServerSide(new Cookie(key, value)); Cookie cookie = driver.manage().getCookieNamed(key); assertEquals(value, cookie.getValue()); } @JavascriptEnabled @Test public void testShouldBeAbleToAddCookie() { String key = generateUniqueKey(); String value = "foo"; Cookie cookie = new Cookie.Builder(key, value).build(); assertCookieIsNotPresentWithName(key); driver.manage().addCookie(cookie); assertCookieHasValue(key, value); openAnotherPage(); assertCookieHasValue(key, value); } @Test public void testGetAllCookies() { String key1 = generateUniqueKey(); String key2 = generateUniqueKey(); assertCookieIsNotPresentWithName(key1); assertCookieIsNotPresentWithName(key2); Set<Cookie> cookies = driver.manage().getCookies(); int countBefore = cookies.size(); Cookie one = new Cookie.Builder(key1, "value").build(); Cookie two = new Cookie.Builder(key2, "value").build(); driver.manage().addCookie(one); driver.manage().addCookie(two); openAnotherPage(); cookies = driver.manage().getCookies(); assertEquals(countBefore + 2, cookies.size()); assertTrue(cookies.contains(one)); assertTrue(cookies.contains(two)); } @JavascriptEnabled @Test public void testDeleteAllCookies() { addCookieOnServerSide(new Cookie("foo", "set")); assertSomeCookiesArePresent(); driver.manage().deleteAllCookies(); assertNoCookiesArePresent(); openAnotherPage(); assertNoCookiesArePresent(); } @JavascriptEnabled @Test public void testDeleteCookieWithName() { String key1 = generateUniqueKey(); String key2 = generateUniqueKey(); addCookieOnServerSide(new Cookie(key1, "set")); addCookieOnServerSide(new Cookie(key2, "set")); assertCookieIsPresentWithName(key1); assertCookieIsPresentWithName(key2); driver.manage().deleteCookieNamed(key1); assertCookieIsNotPresentWithName(key1); assertCookieIsPresentWithName(key2); openAnotherPage(); assertCookieIsNotPresentWithName(key1); assertCookieIsPresentWithName(key2); } @Test public void testShouldNotDeleteCookiesWithASimilarName() { String cookieOneName = "fish"; Cookie cookie1 = new Cookie.Builder(cookieOneName, "cod").build(); Cookie cookie2 = new Cookie.Builder(cookieOneName + "x", "earth").build(); WebDriver.Options options = driver.manage(); assertCookieIsNotPresentWithName(cookie1.getName()); options.addCookie(cookie1); options.addCookie(cookie2); assertCookieIsPresentWithName(cookie1.getName()); options.deleteCookieNamed(cookieOneName); Set<Cookie> cookies = options.getCookies(); assertFalse(cookies.toString(), cookies.contains(cookie1)); assertTrue(cookies.toString(), cookies.contains(cookie2)); } @Ignore(OPERA) @Test public void testAddCookiesWithDifferentPathsThatAreRelatedToOurs() { driver.get(domainHelper.getUrlForFirstValidHostname("/common/animals")); Cookie cookie1 = new Cookie.Builder("fish", "cod").path("/common/animals").build(); Cookie cookie2 = new Cookie.Builder("planet", "earth").path("/common/").build(); WebDriver.Options options = driver.manage(); options.addCookie(cookie1); options.addCookie(cookie2); driver.get(domainHelper.getUrlForFirstValidHostname("/common/animals")); assertCookieIsPresentWithName(cookie1.getName()); assertCookieIsPresentWithName(cookie2.getName()); driver.get(domainHelper.getUrlForFirstValidHostname("/common/simplePage.html")); assertCookieIsNotPresentWithName(cookie1.getName()); } @Ignore({CHROME, OPERA}) @Test public void testCannotGetCookiesWithPathDifferingOnlyInCase() { String cookieName = "fish"; Cookie cookie = new Cookie.Builder(cookieName, "cod").path("/Common/animals").build(); driver.manage().addCookie(cookie); driver.get(domainHelper.getUrlForFirstValidHostname("/common/animals")); assertNull(driver.manage().getCookieNamed(cookieName)); } @Test public void testShouldNotGetCookieOnDifferentDomain() { assumeTrue(domainHelper.checkHasValidAlternateHostname()); String cookieName = "fish"; driver.manage().addCookie(new Cookie.Builder(cookieName, "cod").build()); assertCookieIsPresentWithName(cookieName); driver.get(domainHelper.getUrlForSecondValidHostname("simpleTest.html")); assertCookieIsNotPresentWithName(cookieName); } @Ignore(value = {ANDROID, CHROME, HTMLUNIT, IE, IPHONE, OPERA}, reason = "Untested browsers.") @Test public void testShouldBeAbleToAddToADomainWhichIsRelatedToTheCurrentDomain() { String cookieName = "name"; assertCookieIsNotPresentWithName(cookieName); String shorter = domainHelper.getHostName().replaceFirst(".*?\\.", "."); Cookie cookie = new Cookie.Builder(cookieName, "value").domain(shorter).build(); driver.manage().addCookie(cookie); assertCookieIsPresentWithName(cookieName); } @Ignore(value = {ALL}) @Test public void testsShouldNotGetCookiesRelatedToCurrentDomainWithoutLeadingPeriod() { String cookieName = "name"; assertCookieIsNotPresentWithName(cookieName); String shorter = domainHelper.getHostName().replaceFirst(".*?\\.", ""); Cookie cookie = new Cookie.Builder(cookieName, "value").domain(shorter).build(); driver.manage().addCookie(cookie); assertCookieIsNotPresentWithName(cookieName); } @Ignore({REMOTE, IE}) @Test public void testShouldBeAbleToIncludeLeadingPeriodInDomainName() throws Exception { String cookieName = "name"; assertCookieIsNotPresentWithName(cookieName); String shorter = domainHelper.getHostName().replaceFirst(".*?\\.", "."); Cookie cookie = new Cookie.Builder("name", "value").domain(shorter).build(); driver.manage().addCookie(cookie); assertCookieIsPresentWithName(cookieName); } @Ignore(IE) @Test public void testShouldBeAbleToSetDomainToTheCurrentDomain() throws Exception { URI url = new URI(driver.getCurrentUrl()); String host = url.getHost() + ":" + url.getPort(); Cookie cookie = new Cookie.Builder("fish", "cod").domain(host).build(); driver.manage().addCookie(cookie); driver.get(domainHelper.getUrlForFirstValidHostname("javascriptPage.html")); Set<Cookie> cookies = driver.manage().getCookies(); assertTrue(cookies.contains(cookie)); } @Test public void testShouldWalkThePathToDeleteACookie() { Cookie cookie1 = new Cookie.Builder("fish", "cod").build(); driver.manage().addCookie(cookie1); driver.get(domainHelper.getUrlForFirstValidHostname("child/childPage.html")); Cookie cookie2 = new Cookie("rodent", "hamster", "/common/child"); driver.manage().addCookie(cookie2); driver.get(domainHelper.getUrlForFirstValidHostname("child/grandchild/grandchildPage.html")); Cookie cookie3 = new Cookie("dog", "dalmation", "/common/child/grandchild/"); driver.manage().addCookie(cookie3); driver.get(domainHelper.getUrlForFirstValidHostname("child/grandchild/grandchildPage.html")); driver.manage().deleteCookieNamed("rodent"); assertNull(driver.manage().getCookies().toString(), driver.manage().getCookieNamed("rodent")); Set<Cookie> cookies = driver.manage().getCookies(); assertEquals(2, cookies.size()); assertTrue(cookies.contains(cookie1)); assertTrue(cookies.contains(cookie3)); driver.manage().deleteAllCookies(); driver.get(domainHelper.getUrlForFirstValidHostname("child/grandchild/grandchildPage.html")); assertNoCookiesArePresent(); } @Ignore(IE) @Test public void testShouldIgnoreThePortNumberOfTheHostWhenSettingTheCookie() throws Exception { URI uri = new URI(driver.getCurrentUrl()); String host = String.format("%s:%d", uri.getHost(), uri.getPort()); String cookieName = "name"; assertCookieIsNotPresentWithName(cookieName); Cookie cookie = new Cookie.Builder(cookieName, "value").domain(host).build(); driver.manage().addCookie(cookie); assertCookieIsPresentWithName(cookieName); } @Ignore(OPERA) @Test public void testCookieEqualityAfterSetAndGet() { driver.get(domainHelper.getUrlForFirstValidHostname("animals")); driver.manage().deleteAllCookies(); Cookie addedCookie = new Cookie.Builder("fish", "cod") .path("/common/animals") .expiresOn(someTimeInTheFuture()) .build(); driver.manage().addCookie(addedCookie); Set<Cookie> cookies = driver.manage().getCookies(); Cookie retrievedCookie = null; for (Cookie temp : cookies) { if (addedCookie.equals(temp)) { retrievedCookie = temp; break; } } assertNotNull("Cookie was null", retrievedCookie); // Cookie.equals only compares name, domain and path assertEquals(addedCookie, retrievedCookie); } @Ignore(value = {ANDROID, IE, OPERA}, reason = "Selenium, which use JavaScript to retrieve cookies, cannot return expiry info; " + "Other suppressed browsers have not been tested.") @Test public void testRetainsCookieExpiry() { Cookie addedCookie = new Cookie.Builder("fish", "cod") .path("/common/animals") .expiresOn(someTimeInTheFuture()) .build(); driver.manage().addCookie(addedCookie); Cookie retrieved = driver.manage().getCookieNamed("fish"); assertNotNull(retrieved); assertEquals(addedCookie.getExpiry(), retrieved.getExpiry()); } @Ignore(value = {ANDROID, IE, OPERA, OPERA_MOBILE, PHANTOMJS, SAFARI}) @Test public void testRetainsCookieSecure() { driver.get(domainHelper.getSecureUrlForFirstValidHostname("animals")); Cookie addedCookie = new Cookie.Builder("fish", "cod") .path("/common/animals") .isSecure(true) .build(); driver.manage().addCookie(addedCookie); driver.navigate().refresh(); Cookie retrieved = driver.manage().getCookieNamed("fish"); assertNotNull(retrieved); assertTrue(retrieved.isSecure()); } @Ignore(value = {ANDROID, CHROME, FIREFOX, HTMLUNIT, IE, OPERA, OPERA_MOBILE, PHANTOMJS, SAFARI}) @Test public void testRetainsHttpOnlyFlag() { Cookie addedCookie = new Cookie.Builder("fish", "cod") .path("/common/animals") .isHttpOnly(true) .build(); addCookieOnServerSide(addedCookie); Cookie retrieved = driver.manage().getCookieNamed("fish"); assertNotNull(retrieved); assertTrue(retrieved.isHttpOnly()); } @Ignore(ANDROID) @Test public void testSettingACookieThatExpiredInThePast() { long expires = System.currentTimeMillis() - 1000; Cookie cookie = new Cookie.Builder("expired", "yes").expiresOn(new Date(expires)).build(); driver.manage().addCookie(cookie); cookie = driver.manage().getCookieNamed("fish"); assertNull( "Cookie expired before it was set, so nothing should be returned: " + cookie, cookie); } @Test public void testCanSetCookieWithoutOptionalFieldsSet() { String key = generateUniqueKey(); String value = "foo"; Cookie cookie = new Cookie(key, value); assertCookieIsNotPresentWithName(key); driver.manage().addCookie(cookie); assertCookieHasValue(key, value); } @Test public void testDeleteNotExistedCookie() { String key = generateUniqueKey(); assertCookieIsNotPresentWithName(key); driver.manage().deleteCookieNamed(key); } @Ignore(value = {ANDROID, CHROME, FIREFOX, IE, OPERA, OPERA_MOBILE, PHANTOMJS, SAFARI}) @Test public void testShouldDeleteOneOfTheCookiesWithTheSameName() { driver.get(domainHelper.getUrlForFirstValidHostname("/common/animals")); Cookie cookie1 = new Cookie.Builder("fish", "cod") .domain(domainHelper.getHostName()).path("/common/animals").build(); Cookie cookie2 = new Cookie.Builder("fish", "tune") .domain(domainHelper.getHostName()).path("/common/").build(); WebDriver.Options options = driver.manage(); options.addCookie(cookie1); options.addCookie(cookie2); assertEquals(driver.manage().getCookies().size(), 2); driver.manage().deleteCookie(cookie1); assertEquals(driver.manage().getCookies().size(), 1); Cookie retrieved = driver.manage().getCookieNamed("fish"); assertNotNull("Cookie was null", retrieved); assertEquals(cookie2, retrieved); } private String generateUniqueKey() { return String.format("key_%d", random.nextInt()); } private void assertNoCookiesArePresent() { Set<Cookie> cookies = driver.manage().getCookies(); assertTrue("Cookies were not empty, present: " + cookies, cookies.isEmpty()); String documentCookie = getDocumentCookieOrNull(); if (documentCookie != null) { assertEquals("Cookies were not empty", "", documentCookie); } } private void assertSomeCookiesArePresent() { assertFalse("Cookies were empty", driver.manage().getCookies().isEmpty()); String documentCookie = getDocumentCookieOrNull(); if (documentCookie != null) { assertNotSame("Cookies were empty", "", documentCookie); } } private void assertCookieIsNotPresentWithName(final String key) { assertNull("Cookie was present with name " + key, driver.manage().getCookieNamed(key)); String documentCookie = getDocumentCookieOrNull(); if (documentCookie != null) { assertThat("Cookie was present with name " + key, documentCookie, not(containsString(key + "="))); } } private void assertCookieIsPresentWithName(final String key) { assertNotNull("Cookie was not present with name " + key, driver.manage().getCookieNamed(key)); String documentCookie = getDocumentCookieOrNull(); if (documentCookie != null) { assertThat("Cookie was not present with name " + key + ", got: " + documentCookie, documentCookie, containsString(key + "=")); } } private void assertCookieHasValue(final String key, final String value) { assertEquals("Cookie had wrong value", value, driver.manage().getCookieNamed(key).getValue()); String documentCookie = getDocumentCookieOrNull(); if (documentCookie != null) { assertThat("Cookie was present with name " + key, documentCookie, containsString(key + "=" + value)); } } private String getDocumentCookieOrNull() { if (!(driver instanceof JavascriptExecutor)) { return null; } try { return (String) ((JavascriptExecutor) driver).executeScript("return document.cookie"); } catch (UnsupportedOperationException e) { return null; } } private Date someTimeInTheFuture() { return new Date(System.currentTimeMillis() + 100000); } private void openAnotherPage() { driver.get(domainHelper.getUrlForFirstValidHostname("simpleTest.html")); } private void deleteAllCookiesOnServerSide() { driver.get(cookiePage + "?action=deleteAll"); } private void addCookieOnServerSide(Cookie cookie) { StringBuilder url = new StringBuilder(cookiePage); url.append("?action=add"); url.append("&name=").append(cookie.getName()); url.append("&value=").append(cookie.getValue()); if (cookie.getDomain() != null) { url.append("&domain=").append(cookie.getDomain()); } if (cookie.getPath() != null) { url.append("&path=").append(cookie.getPath()); } if (cookie.getExpiry() != null) { url.append("&expiry=").append(cookie.getExpiry().getTime()); } if (cookie.isSecure()) { url.append("&secure=").append(cookie.isSecure()); } if (cookie.isHttpOnly()) { url.append("&httpOnly=").append(cookie.isHttpOnly()); } driver.get(url.toString()); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.geode.internal.cache; import java.io.File; import java.io.FilenameFilter; import java.util.ArrayList; import java.util.Comparator; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Set; import java.util.TreeSet; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; import it.unimi.dsi.fastutil.longs.LongOpenHashSet; import org.apache.logging.log4j.Logger; import org.apache.geode.cache.DiskAccessException; import org.apache.geode.internal.cache.DiskStoreImpl.OplogEntryIdSet; import org.apache.geode.internal.cache.entries.DiskEntry; import org.apache.geode.internal.cache.entries.DiskEntry.Helper.ValueWrapper; import org.apache.geode.internal.cache.persistence.DiskRecoveryStore; import org.apache.geode.internal.cache.persistence.DiskRegionView; import org.apache.geode.internal.cache.persistence.DiskStoreFilter; import org.apache.geode.internal.cache.persistence.OplogType; import org.apache.geode.internal.cache.versions.RegionVersionVector; import org.apache.geode.internal.i18n.LocalizedStrings; import org.apache.geode.internal.logging.LogService; import org.apache.geode.internal.logging.log4j.LocalizedMessage; import org.apache.geode.internal.logging.log4j.LogMarker; import org.apache.geode.internal.sequencelog.EntryLogger; public class PersistentOplogSet implements OplogSet { private static final Logger logger = LogService.getLogger(); /** The active oplog * */ protected volatile Oplog child; /** variable to generate sequential unique oplogEntryId's* */ private final AtomicLong oplogEntryId = new AtomicLong(DiskStoreImpl.INVALID_ID); /** counter used for round-robin logic * */ int dirCounter = -1; /** * Contains all the oplogs that only have a drf (i.e. the crf has been deleted). */ final Map<Long, Oplog> drfOnlyOplogs = new LinkedHashMap<Long, Oplog>(); /** oplogs that are ready to compact */ final Map<Long, Oplog> oplogIdToOplog = new LinkedHashMap<Long, Oplog>(); /** oplogs that are done being written to but not yet ready to compact */ private final Map<Long, Oplog> inactiveOplogs = new LinkedHashMap<Long, Oplog>(16, 0.75f, true); private final DiskStoreImpl parent; final AtomicInteger inactiveOpenCount = new AtomicInteger(); private final Map<Long, DiskRecoveryStore> pendingRecoveryMap = new HashMap<Long, DiskRecoveryStore>(); private final Map<Long, DiskRecoveryStore> currentRecoveryMap = new HashMap<Long, DiskRecoveryStore>(); final AtomicBoolean alreadyRecoveredOnce = new AtomicBoolean(false); /** * The maximum oplog id we saw while recovering */ private volatile long maxRecoveredOplogId = 0; public PersistentOplogSet(DiskStoreImpl parent) { this.parent = parent; } /** * returns the active child */ public Oplog getChild() { return this.child; } /** * set the child to a new oplog * */ void setChild(Oplog oplog) { this.child = oplog; // oplogSetAdd(oplog); } public Oplog[] getAllOplogs() { synchronized (this.oplogIdToOplog) { int rollNum = this.oplogIdToOplog.size(); int inactiveNum = this.inactiveOplogs.size(); int drfOnlyNum = this.drfOnlyOplogs.size(); int num = rollNum + inactiveNum + drfOnlyNum + 1; Oplog[] oplogs = new Oplog[num]; oplogs[0] = getChild(); { Iterator<Oplog> itr = this.oplogIdToOplog.values().iterator(); for (int i = 1; i <= rollNum; i++) { oplogs[i] = itr.next(); } } { Iterator<Oplog> itr = this.inactiveOplogs.values().iterator(); for (int i = 1; i <= inactiveNum; i++) { oplogs[i + rollNum] = itr.next(); } } { Iterator<Oplog> itr = this.drfOnlyOplogs.values().iterator(); for (int i = 1; i <= drfOnlyNum; i++) { oplogs[i + rollNum + inactiveNum] = itr.next(); } } // Special case - no oplogs found if (oplogs.length == 1 && oplogs[0] == null) { return new Oplog[0]; } return oplogs; } } private TreeSet<Oplog> getSortedOplogs() { TreeSet<Oplog> result = new TreeSet<Oplog>(new Comparator() { public int compare(Object arg0, Object arg1) { return Long.signum(((Oplog) arg1).getOplogId() - ((Oplog) arg0).getOplogId()); } }); for (Oplog oplog : getAllOplogs()) { if (oplog != null) { result.add(oplog); } } return result; } /** * Get the oplog specified * * @param id int oplogId to be got * @return Oplogs the oplog corresponding to the oplodId, id */ public Oplog getChild(long id) { Oplog localOplog = this.child; if (localOplog != null && id == localOplog.getOplogId()) { return localOplog; } else { Long key = Long.valueOf(id); synchronized (this.oplogIdToOplog) { Oplog result = oplogIdToOplog.get(key); if (result == null) { result = inactiveOplogs.get(key); } return result; } } } @Override public void create(InternalRegion region, DiskEntry entry, ValueWrapper value, boolean async) { getChild().create(region, entry, value, async); } @Override public void modify(InternalRegion region, DiskEntry entry, ValueWrapper value, boolean async) { getChild().modify(region, entry, value, async); } public void offlineModify(DiskRegionView drv, DiskEntry entry, byte[] value, boolean isSerializedObject) { getChild().offlineModify(drv, entry, value, isSerializedObject); } @Override public void remove(InternalRegion region, DiskEntry entry, boolean async, boolean isClear) { getChild().remove(region, entry, async, isClear); } public void forceRoll(DiskRegion dr) { Oplog child = getChild(); if (child != null) { child.forceRolling(dr); } } public Map<File, DirectoryHolder> findFiles(String partialFileName) { this.dirCounter = 0; Map<File, DirectoryHolder> backupFiles = new HashMap<File, DirectoryHolder>(); FilenameFilter backupFileFilter = getFileNameFilter(partialFileName); for (DirectoryHolder dh : parent.directories) { File[] backupList = dh.getDir().listFiles(backupFileFilter); if (backupList != null) { for (File f : backupList) { backupFiles.put(f, dh); } } } return backupFiles; } protected FilenameFilter getFileNameFilter(String partialFileName) { return new DiskStoreFilter(OplogType.BACKUP, false, partialFileName); } public void createOplogs(boolean needsOplogs, Map<File, DirectoryHolder> backupFiles) { LongOpenHashSet foundCrfs = new LongOpenHashSet(); LongOpenHashSet foundDrfs = new LongOpenHashSet(); for (Map.Entry<File, DirectoryHolder> entry : backupFiles.entrySet()) { File file = entry.getKey(); String absolutePath = file.getAbsolutePath(); int underscorePosition = absolutePath.lastIndexOf("_"); int pointPosition = absolutePath.lastIndexOf("."); String opid = absolutePath.substring(underscorePosition + 1, pointPosition); long oplogId = Long.parseLong(opid); maxRecoveredOplogId = Math.max(maxRecoveredOplogId, oplogId); // here look diskinit file and check if this opid already deleted or not // if deleted then don't process it. if (Oplog.isCRFFile(file.getName())) { if (!isCrfOplogIdPresent(oplogId)) { deleteFileOnRecovery(file); try { String krfFileName = Oplog.getKRFFilenameFromCRFFilename(file.getAbsolutePath()); File krfFile = new File(krfFileName); deleteFileOnRecovery(krfFile); } catch (Exception ex) {// ignore } continue; // this file we unable to delete earlier } } else if (Oplog.isDRFFile(file.getName())) { if (!isDrfOplogIdPresent(oplogId)) { deleteFileOnRecovery(file); continue; // this file we unable to delete earlier } } Oplog oplog = getChild(oplogId); if (oplog == null) { oplog = new Oplog(oplogId, this); // oplogSet.add(oplog); addRecoveredOplog(oplog); } if (oplog.addRecoveredFile(file, entry.getValue())) { foundCrfs.add(oplogId); } else { foundDrfs.add(oplogId); } } if (needsOplogs) { verifyOplogs(foundCrfs, foundDrfs); } } protected boolean isDrfOplogIdPresent(long oplogId) { return parent.getDiskInitFile().isDRFOplogIdPresent(oplogId); } protected boolean isCrfOplogIdPresent(long oplogId) { return parent.getDiskInitFile().isCRFOplogIdPresent(oplogId); } protected void verifyOplogs(LongOpenHashSet foundCrfs, LongOpenHashSet foundDrfs) { parent.getDiskInitFile().verifyOplogs(foundCrfs, foundDrfs); } private void deleteFileOnRecovery(File f) { try { f.delete(); } catch (Exception e) { // ignore, one more attempt to delete the file failed } } void addRecoveredOplog(Oplog oplog) { basicAddToBeCompacted(oplog); // don't schedule a compaction here. Wait for recovery to complete } /** * Taking a lock on the LinkedHashMap oplogIdToOplog as it the operation of adding an Oplog to the * Map & notifying the Compactor thread , if not already compaction has to be an atomic operation. * add the oplog to the to be compacted set. if compactor thread is active and recovery is not * going on then the compactor thread is notified of the addition */ void addToBeCompacted(Oplog oplog) { basicAddToBeCompacted(oplog); parent.scheduleCompaction(); } private void basicAddToBeCompacted(Oplog oplog) { if (!oplog.isRecovering() && oplog.hasNoLiveValues()) { oplog.cancelKrf(); oplog.close(); // fix for bug 41687 oplog.deleteFiles(oplog.getHasDeletes()); } else { int inactivePromotedCount = 0; parent.getStats().incCompactableOplogs(1); Long key = Long.valueOf(oplog.getOplogId()); synchronized (this.oplogIdToOplog) { if (this.inactiveOplogs.remove(key) != null) { if (oplog.isRAFOpen()) { inactiveOpenCount.decrementAndGet(); } inactivePromotedCount++; } this.oplogIdToOplog.put(key, oplog); } if (inactivePromotedCount > 0) { parent.getStats().incInactiveOplogs(-inactivePromotedCount); } } } public void recoverRegionsThatAreReady() { // The following sync also prevents concurrent recoveries by multiple regions // which is needed currently. synchronized (this.alreadyRecoveredOnce) { // need to take a snapshot of DiskRecoveryStores we will recover synchronized (this.pendingRecoveryMap) { this.currentRecoveryMap.clear(); this.currentRecoveryMap.putAll(this.pendingRecoveryMap); this.pendingRecoveryMap.clear(); } if (this.currentRecoveryMap.isEmpty() && this.alreadyRecoveredOnce.get()) { // no recovery needed return; } for (DiskRecoveryStore drs : this.currentRecoveryMap.values()) { // Call prepare early to fix bug 41119. drs.getDiskRegionView().prepareForRecovery(); } if (!this.alreadyRecoveredOnce.get()) { initOplogEntryId(); // Fix for #43026 - make sure we don't reuse an entry // id that has been marked as cleared. updateOplogEntryId(parent.getDiskInitFile().getMaxRecoveredClearEntryId()); } final long start = parent.getStats().startRecovery(); long byteCount = 0; EntryLogger.setSource(parent.getDiskStoreID(), "recovery"); try { byteCount = recoverOplogs(byteCount); } finally { Map<String, Integer> prSizes = null; Map<String, Integer> prBuckets = null; if (parent.isValidating()) { prSizes = new HashMap<String, Integer>(); prBuckets = new HashMap<String, Integer>(); } for (DiskRecoveryStore drs : this.currentRecoveryMap.values()) { for (Oplog oplog : getAllOplogs()) { if (oplog != null) { // Need to do this AFTER recovery to protect from concurrent compactions // trying to remove the oplogs. // We can't remove a dr from the oplog's unrecoveredRegionCount // until it is fully recovered. // This fixes bug 41119. oplog.checkForRecoverableRegion(drs.getDiskRegionView()); } } if (parent.isValidating()) { if (drs instanceof ValidatingDiskRegion) { ValidatingDiskRegion vdr = ((ValidatingDiskRegion) drs); if (logger.isTraceEnabled(LogMarker.PERSIST_RECOVERY)) { vdr.dump(System.out); } if (vdr.isBucket()) { String prName = vdr.getPrName(); if (prSizes.containsKey(prName)) { int oldSize = prSizes.get(prName); oldSize += vdr.size(); prSizes.put(prName, oldSize); int oldBuckets = prBuckets.get(prName); oldBuckets++; prBuckets.put(prName, oldBuckets); } else { prSizes.put(prName, vdr.size()); prBuckets.put(prName, 1); } } else { parent.incLiveEntryCount(vdr.size()); System.out.println(vdr.getName() + ": entryCount=" + vdr.size()); } } } } if (parent.isValidating()) { for (Map.Entry<String, Integer> me : prSizes.entrySet()) { parent.incLiveEntryCount(me.getValue()); System.out.println(me.getKey() + " entryCount=" + me.getValue() + " bucketCount=" + prBuckets.get(me.getKey())); } } parent.getStats().endRecovery(start, byteCount); this.alreadyRecoveredOnce.set(true); this.currentRecoveryMap.clear(); EntryLogger.clearSource(); } } } private long recoverOplogs(long byteCount) { OplogEntryIdSet deletedIds = new OplogEntryIdSet(); TreeSet<Oplog> oplogSet = getSortedOplogs(); Set<Oplog> oplogsNeedingValueRecovery = new HashSet<Oplog>(); if (!this.alreadyRecoveredOnce.get()) { if (getChild() != null && !getChild().hasBeenUsed()) { // Then remove the current child since it is empty // and does not need to be recovered from // and it is important to not call initAfterRecovery on it. oplogSet.remove(getChild()); } } if (oplogSet.size() > 0) { long startOpLogRecovery = System.currentTimeMillis(); // first figure out all entries that have been destroyed boolean latestOplog = true; for (Oplog oplog : oplogSet) { byteCount += oplog.recoverDrf(deletedIds, this.alreadyRecoveredOnce.get(), latestOplog); latestOplog = false; if (!this.alreadyRecoveredOnce.get()) { updateOplogEntryId(oplog.getMaxRecoveredOplogEntryId()); } } parent.incDeadRecordCount(deletedIds.size()); // now figure out live entries latestOplog = true; for (Oplog oplog : oplogSet) { long startOpLogRead = parent.getStats().startOplogRead(); long bytesRead = oplog.recoverCrf(deletedIds, // @todo make recoverValues per region recoverValues(), recoverValuesSync(), this.alreadyRecoveredOnce.get(), oplogsNeedingValueRecovery, latestOplog); latestOplog = false; if (!this.alreadyRecoveredOnce.get()) { updateOplogEntryId(oplog.getMaxRecoveredOplogEntryId()); } byteCount += bytesRead; parent.getStats().endOplogRead(startOpLogRead, bytesRead); // Callback to the disk regions to indicate the oplog is recovered // Used for offline export for (DiskRecoveryStore drs : this.currentRecoveryMap.values()) { drs.getDiskRegionView().oplogRecovered(oplog.oplogId); } } long endOpLogRecovery = System.currentTimeMillis(); long elapsed = endOpLogRecovery - startOpLogRecovery; logger.info(LocalizedMessage.create(LocalizedStrings.DiskRegion_OPLOG_LOAD_TIME, elapsed)); } if (!parent.isOfflineCompacting()) { long startRegionInit = System.currentTimeMillis(); // create the oplogs now so that loadRegionData can have them available // Create an array of Oplogs so that we are able to add it in a single shot // to the map for (DiskRecoveryStore drs : this.currentRecoveryMap.values()) { drs.getDiskRegionView().initRecoveredEntryCount(); } if (!this.alreadyRecoveredOnce.get()) { for (Oplog oplog : oplogSet) { if (oplog != getChild()) { oplog.initAfterRecovery(parent.isOffline()); } } if (getChild() == null) { setFirstChild(getSortedOplogs(), false); } } if (!parent.isOffline()) { if (recoverValues() && !recoverValuesSync()) { // TODO DAN - should we defer compaction until after // value recovery is complete? Or at least until after // value recovery for a given oplog is complete? // Right now, that's effectively what we're doing // because this uses up the compactor thread. parent.scheduleValueRecovery(oplogsNeedingValueRecovery, this.currentRecoveryMap); } if (!this.alreadyRecoveredOnce.get()) { // Create krfs for oplogs that are missing them for (Oplog oplog : oplogSet) { if (oplog.needsKrf()) { oplog.createKrfAsync(); } } parent.scheduleCompaction(); } long endRegionInit = System.currentTimeMillis(); logger.info(LocalizedMessage.create(LocalizedStrings.DiskRegion_REGION_INIT_TIME, endRegionInit - startRegionInit)); } } return byteCount; } protected boolean recoverValuesSync() { return parent.RECOVER_VALUES_SYNC; } protected boolean recoverValues() { return parent.RECOVER_VALUES; } private void setFirstChild(TreeSet<Oplog> oplogSet, boolean force) { if (parent.isOffline() && !parent.isOfflineCompacting() && !parent.isOfflineModify()) return; if (!oplogSet.isEmpty()) { Oplog first = oplogSet.first(); DirectoryHolder dh = first.getDirectoryHolder(); dirCounter = dh.getArrayIndex(); dirCounter = (++dirCounter) % parent.dirLength; // we want the first child to go in the directory after the directory // used by the existing oplog with the max id. // This fixes bug 41822. } if (force || maxRecoveredOplogId > 0) { setChild(new Oplog(maxRecoveredOplogId + 1, this, getNextDir())); } } private void initOplogEntryId() { this.oplogEntryId.set(DiskStoreImpl.INVALID_ID); } /** * Sets the last created oplogEntryId to the given value if and only if the given value is greater * than the current last created oplogEntryId */ private void updateOplogEntryId(long v) { long curVal; do { curVal = this.oplogEntryId.get(); if (curVal >= v) { // no need to set return; } } while (!this.oplogEntryId.compareAndSet(curVal, v)); } /** * Returns the last created oplogEntryId. Returns INVALID_ID if no oplogEntryId has been created. */ long getOplogEntryId() { parent.initializeIfNeeded(); return this.oplogEntryId.get(); } /** * Creates and returns a new oplogEntryId for the given key. An oplogEntryId is needed when * storing a key/value pair on disk. A new one is only needed if the key is new. Otherwise the * oplogEntryId already allocated for a key can be reused for the same key. * * @return A disk id that can be used to access this key/value pair on disk */ long newOplogEntryId() { long result = this.oplogEntryId.incrementAndGet(); return result; } /** * Returns the next available DirectoryHolder which has space. If no dir has space then it will * return one anyway if compaction is enabled. * * @param minAvailableSpace the minimum amount of space we need in this directory. */ DirectoryHolder getNextDir(int minAvailableSpace, boolean checkForWarning) { DirectoryHolder dirHolder = null; DirectoryHolder selectedHolder = null; synchronized (parent.directories) { for (int i = 0; i < parent.dirLength; ++i) { dirHolder = parent.directories[this.dirCounter]; // Asif :Increment the directory counter to next position so that next // time when this operation is invoked, it checks for the Directory // Space in a cyclical fashion. dirCounter = (++dirCounter) % parent.dirLength; // if the current directory has some space, then quit and // return the dir if (dirHolder.getAvailableSpace() >= minAvailableSpace) { if (checkForWarning && !parent.isDirectoryUsageNormal(dirHolder)) { if (logger.isDebugEnabled()) { logger.debug("Ignoring directory {} due to insufficient disk space", dirHolder); } } else { selectedHolder = dirHolder; break; } } } if (selectedHolder == null) { // we didn't find a warning-free directory, try again ignoring the check if (checkForWarning) { return getNextDir(minAvailableSpace, false); } if (parent.isCompactionEnabled()) { /* * try { this.isThreadWaitingForSpace = true; this.directories.wait(MAX_WAIT_FOR_SPACE); } * catch (InterruptedException ie) { throw new DiskAccessException(LocalizedStrings. * DiskRegion_UNABLE_TO_GET_FREE_SPACE_FOR_CREATING_AN_OPLOG_AS_THE_THREAD_ENCOUNETERD_EXCEPTION_WHILE_WAITING_FOR_COMPACTOR_THREAD_TO_FREE_SPACE * .toLocalizedString(), ie); } */ selectedHolder = parent.directories[this.dirCounter]; // Increment the directory counter to next position this.dirCounter = (++this.dirCounter) % parent.dirLength; if (selectedHolder.getAvailableSpace() < minAvailableSpace) { /* * throw new DiskAccessException(LocalizedStrings. * DiskRegion_UNABLE_TO_GET_FREE_SPACE_FOR_CREATING_AN_OPLOG_AFTER_WAITING_FOR_0_1_2_SECONDS * .toLocalizedString(new Object[] {MAX_WAIT_FOR_SPACE, /, (1000)})); */ logger.warn(LocalizedMessage.create( LocalizedStrings.DiskRegion_COMPLEXDISKREGIONGETNEXTDIR_MAX_DIRECTORY_SIZE_WILL_GET_VIOLATED__GOING_AHEAD_WITH_THE_SWITCHING_OF_OPLOG_ANY_WAYS_CURRENTLY_AVAILABLE_SPACE_IN_THE_DIRECTORY_IS__0__THE_CAPACITY_OF_DIRECTORY_IS___1, new Object[] {Long.valueOf(selectedHolder.getUsedSpace()), Long.valueOf(selectedHolder.getCapacity())})); } } else { throw new DiskAccessException( LocalizedStrings.DiskRegion_DISK_IS_FULL_COMPACTION_IS_DISABLED_NO_SPACE_CAN_BE_CREATED .toLocalizedString(), parent); } } } return selectedHolder; } DirectoryHolder getNextDir() { return getNextDir(DiskStoreImpl.MINIMUM_DIR_SIZE, true); } void addDrf(Oplog oplog) { synchronized (this.oplogIdToOplog) { this.drfOnlyOplogs.put(Long.valueOf(oplog.getOplogId()), oplog); } } void removeDrf(Oplog oplog) { synchronized (this.oplogIdToOplog) { this.drfOnlyOplogs.remove(Long.valueOf(oplog.getOplogId())); } } /** * Return true if id is less than all the ids in the oplogIdToOplog map. Since the oldest one is * in the LINKED hash map is first we only need to compare ourselves to it. */ boolean isOldestExistingOplog(long id) { synchronized (this.oplogIdToOplog) { Iterator<Long> it = this.oplogIdToOplog.keySet().iterator(); while (it.hasNext()) { long otherId = it.next().longValue(); if (id > otherId) { return false; } } // since the inactiveOplogs map is an LRU we need to check each one it = this.inactiveOplogs.keySet().iterator(); while (it.hasNext()) { long otherId = it.next().longValue(); if (id > otherId) { return false; } } } return true; } /** * Destroy all the oplogs that are: 1. the oldest (based on smallest oplog id) 2. empty (have no * live values) */ void destroyOldestReadyToCompact() { synchronized (this.oplogIdToOplog) { if (this.drfOnlyOplogs.isEmpty()) return; } Oplog oldestLiveOplog = getOldestLiveOplog(); ArrayList<Oplog> toDestroy = new ArrayList<Oplog>(); if (oldestLiveOplog == null) { // remove all oplogs that are empty synchronized (this.oplogIdToOplog) { toDestroy.addAll(this.drfOnlyOplogs.values()); } } else { // remove all empty oplogs that are older than the oldest live one synchronized (this.oplogIdToOplog) { for (Oplog oplog : this.drfOnlyOplogs.values()) { if (oplog.getOplogId() < oldestLiveOplog.getOplogId()) { toDestroy.add(oplog); // } else { // // since drfOnlyOplogs is sorted any other ones will be even // bigger // // so we can break out of this loop // break; } } } } for (Oplog oplog : toDestroy) { oplog.destroy(); } } /** * Returns the oldest oplog that is ready to compact. Returns null if no oplogs are ready to * compact. Age is based on the oplog id. */ private Oplog getOldestReadyToCompact() { Oplog oldest = null; synchronized (this.oplogIdToOplog) { Iterator<Oplog> it = this.oplogIdToOplog.values().iterator(); while (it.hasNext()) { Oplog oldestCompactable = it.next(); if (oldest == null || oldestCompactable.getOplogId() < oldest.getOplogId()) { oldest = oldestCompactable; } } it = this.drfOnlyOplogs.values().iterator(); while (it.hasNext()) { Oplog oldestDrfOnly = it.next(); if (oldest == null || oldestDrfOnly.getOplogId() < oldest.getOplogId()) { oldest = oldestDrfOnly; } } } return oldest; } private Oplog getOldestLiveOplog() { Oplog result = null; synchronized (this.oplogIdToOplog) { Iterator<Oplog> it = this.oplogIdToOplog.values().iterator(); while (it.hasNext()) { Oplog n = it.next(); if (result == null || n.getOplogId() < result.getOplogId()) { result = n; } } // since the inactiveOplogs map is an LRU we need to check each one it = this.inactiveOplogs.values().iterator(); while (it.hasNext()) { Oplog n = it.next(); if (result == null || n.getOplogId() < result.getOplogId()) { result = n; } } } return result; } void inactiveAccessed(Oplog oplog) { Long key = Long.valueOf(oplog.getOplogId()); synchronized (this.oplogIdToOplog) { // update last access time this.inactiveOplogs.get(key); } } void inactiveReopened(Oplog oplog) { addInactive(oplog, true); } void addInactive(Oplog oplog) { addInactive(oplog, false); } private void addInactive(Oplog oplog, boolean reopen) { Long key = Long.valueOf(oplog.getOplogId()); ArrayList<Oplog> openlist = null; synchronized (this.oplogIdToOplog) { boolean isInactive = true; if (reopen) { // It is possible that 'oplog' is compactable instead of inactive. // So set the isInactive. isInactive = this.inactiveOplogs.get(key) != null; } else { this.inactiveOplogs.put(key, oplog); } if ((reopen && isInactive) || oplog.isRAFOpen()) { if (inactiveOpenCount.incrementAndGet() > DiskStoreImpl.MAX_OPEN_INACTIVE_OPLOGS) { openlist = new ArrayList<Oplog>(); for (Oplog o : this.inactiveOplogs.values()) { if (o.isRAFOpen()) { // add to my list openlist.add(o); } } } } } if (openlist != null) { for (Oplog o : openlist) { if (o.closeRAF()) { if (inactiveOpenCount.decrementAndGet() <= DiskStoreImpl.MAX_OPEN_INACTIVE_OPLOGS) { break; } } } } if (!reopen) { parent.getStats().incInactiveOplogs(1); } } public void clear(DiskRegion dr, RegionVersionVector rvv) { // call clear on each oplog // to fix bug 44336 put them in another collection ArrayList<Oplog> oplogsToClear = new ArrayList<Oplog>(); synchronized (this.oplogIdToOplog) { for (Oplog oplog : this.oplogIdToOplog.values()) { oplogsToClear.add(oplog); } for (Oplog oplog : this.inactiveOplogs.values()) { oplogsToClear.add(oplog); } { Oplog child = getChild(); if (child != null) { oplogsToClear.add(child); } } } for (Oplog oplog : oplogsToClear) { oplog.clear(dr, rvv); } if (rvv != null) { parent.getDiskInitFile().clearRegion(dr, rvv); } else { long clearedOplogEntryId = getOplogEntryId(); parent.getDiskInitFile().clearRegion(dr, clearedOplogEntryId); } } public RuntimeException close() { RuntimeException rte = null; try { closeOtherOplogs(); } catch (RuntimeException e) { if (rte != null) { rte = e; } } if (this.child != null) { try { this.child.finishKrf(); } catch (RuntimeException e) { if (rte != null) { rte = e; } } try { this.child.close(); } catch (RuntimeException e) { if (rte != null) { rte = e; } } } return rte; } /** closes all the oplogs except the current one * */ private void closeOtherOplogs() { // get a snapshot to prevent CME Oplog[] oplogs = getAllOplogs(); // if there are oplogs which are to be compacted, destroy them // do not do oplogs[0] for (int i = 1; i < oplogs.length; i++) { oplogs[i].finishKrf(); oplogs[i].close(); removeOplog(oplogs[i].getOplogId()); } } /** * Removes the oplog from the map given the oplogId * * @param id id of the oplog to be removed from the list * @return oplog Oplog which has been removed */ Oplog removeOplog(long id) { return removeOplog(id, false, null); } Oplog removeOplog(long id, boolean deleting, Oplog olgToAddToDrfOnly) { Oplog oplog = null; boolean drfOnly = false; boolean inactive = false; Long key = Long.valueOf(id); synchronized (this.oplogIdToOplog) { oplog = this.oplogIdToOplog.remove(key); if (oplog == null) { oplog = this.inactiveOplogs.remove(key); if (oplog != null) { if (oplog.isRAFOpen()) { inactiveOpenCount.decrementAndGet(); } inactive = true; } else { oplog = this.drfOnlyOplogs.remove(key); if (oplog != null) { drfOnly = true; } } } if (olgToAddToDrfOnly != null) { addDrf(olgToAddToDrfOnly); } } if (oplog != null) { if (!drfOnly) { if (inactive) { parent.getStats().incInactiveOplogs(-1); } else { parent.getStats().incCompactableOplogs(-1); } } if (!deleting && !oplog.isOplogEmpty()) { // we are removing an oplog whose files are not deleted parent.undeletedOplogSize.addAndGet(oplog.getOplogSize()); } } return oplog; } public void basicClose(DiskRegion dr) { ArrayList<Oplog> oplogsToClose = new ArrayList<Oplog>(); synchronized (this.oplogIdToOplog) { oplogsToClose.addAll(this.oplogIdToOplog.values()); oplogsToClose.addAll(this.inactiveOplogs.values()); oplogsToClose.addAll(this.drfOnlyOplogs.values()); { Oplog child = getChild(); if (child != null) { oplogsToClose.add(child); } } } for (Oplog oplog : oplogsToClose) { oplog.close(dr); } } public void prepareForClose() { ArrayList<Oplog> oplogsToPrepare = new ArrayList<Oplog>(); synchronized (this.oplogIdToOplog) { oplogsToPrepare.addAll(this.oplogIdToOplog.values()); oplogsToPrepare.addAll(this.inactiveOplogs.values()); } boolean childPreparedForClose = false; long child_oplogid = this.getChild() == null ? -1 : this.getChild().oplogId; for (Oplog oplog : oplogsToPrepare) { oplog.prepareForClose(); if (child_oplogid != -1 && oplog.oplogId == child_oplogid) { childPreparedForClose = true; } } if (!childPreparedForClose && this.getChild() != null) { this.getChild().prepareForClose(); } } public void basicDestroy(DiskRegion dr) { ArrayList<Oplog> oplogsToDestroy = new ArrayList<Oplog>(); synchronized (this.oplogIdToOplog) { for (Oplog oplog : this.oplogIdToOplog.values()) { oplogsToDestroy.add(oplog); } for (Oplog oplog : this.inactiveOplogs.values()) { oplogsToDestroy.add(oplog); } for (Oplog oplog : this.drfOnlyOplogs.values()) { oplogsToDestroy.add(oplog); } { Oplog child = getChild(); if (child != null) { oplogsToDestroy.add(child); } } } for (Oplog oplog : oplogsToDestroy) { oplog.destroy(dr); } } public void destroyAllOplogs() { // get a snapshot to prevent CME for (Oplog oplog : getAllOplogs()) { if (oplog != null) { oplog.destroy(); removeOplog(oplog.getOplogId()); } } } /** * Add compactable oplogs to the list, up to the maximum size. */ public void getCompactableOplogs(List<CompactableOplog> l, int max) { synchronized (this.oplogIdToOplog) { // Sort this list so we compact the oldest first instead of the one // that was // compactable first. // ArrayList<CompactableOplog> l = new // ArrayList<CompactableOplog>(this.oplogIdToOplog.values()); // Collections.sort(l); // Iterator<Oplog> itr = l.iterator(); { Iterator<Oplog> itr = this.oplogIdToOplog.values().iterator(); while (itr.hasNext() && l.size() < max) { Oplog oplog = itr.next(); if (oplog.needsCompaction()) { l.add(oplog); } } } } } public void scheduleForRecovery(DiskRecoveryStore drs) { DiskRegionView dr = drs.getDiskRegionView(); if (dr.isRecreated() && (dr.getMyPersistentID() != null || dr.getMyInitializingID() != null)) { // If a region does not have either id then don't pay the cost // of scanning the oplogs for recovered data. DiskRecoveryStore p_drs = drs; synchronized (this.pendingRecoveryMap) { this.pendingRecoveryMap.put(dr.getId(), p_drs); } } } /** * Returns null if we are not currently recovering the DiskRegion with the given drId. */ public DiskRecoveryStore getCurrentlyRecovering(long drId) { return this.currentRecoveryMap.get(drId); } public void initChild() { if (getChild() == null) { setFirstChild(getSortedOplogs(), true); } } public void offlineCompact() { if (getChild() != null) { // check active oplog and if it is empty delete it getChild().krfClose(); if (getChild().isOplogEmpty()) { getChild().destroy(); } } { // remove any oplogs that only have a drf to fix bug 42036 ArrayList<Oplog> toDestroy = new ArrayList<Oplog>(); synchronized (this.oplogIdToOplog) { Iterator<Oplog> it = this.oplogIdToOplog.values().iterator(); while (it.hasNext()) { Oplog n = it.next(); if (n.isDrfOnly()) { toDestroy.add(n); } } } for (Oplog oplog : toDestroy) { oplog.destroy(); } destroyOldestReadyToCompact(); } } public DiskStoreImpl getParent() { return parent; } public void updateDiskRegion(AbstractDiskRegion dr) { for (Oplog oplog : getAllOplogs()) { if (oplog != null) { oplog.updateDiskRegion(dr); } } } public void flushChild() { Oplog oplog = getChild(); if (oplog != null) { oplog.flushAll(); } } public String getPrefix() { return OplogType.BACKUP.getPrefix(); } public void crfCreate(long oplogId) { getParent().getDiskInitFile().crfCreate(oplogId); } public void drfCreate(long oplogId) { getParent().getDiskInitFile().drfCreate(oplogId); } public void crfDelete(long oplogId) { getParent().getDiskInitFile().crfDelete(oplogId); } public void drfDelete(long oplogId) { getParent().getDiskInitFile().drfDelete(oplogId); } public boolean couldHaveKrf() { return getParent().couldHaveKrf(); } public boolean isCompactionPossible() { return getParent().isCompactionPossible(); } }
package fr.openwide.core.wicket.more.condition; import static com.google.common.base.Preconditions.checkNotNull; import java.io.Serializable; import java.util.Arrays; import java.util.Collection; import java.util.Map; import org.apache.wicket.Component; import org.apache.wicket.MarkupContainer; import org.apache.wicket.behavior.Behavior; import org.apache.wicket.injection.Injector; import org.apache.wicket.markup.html.form.FormComponent; import org.apache.wicket.markup.repeater.data.IDataProvider; import org.apache.wicket.model.AbstractReadOnlyModel; import org.apache.wicket.model.IDetachable; import org.apache.wicket.model.IModel; import org.apache.wicket.model.Model; import org.apache.wicket.spring.injection.annot.SpringBean; import org.springframework.security.acls.domain.PermissionFactory; import org.springframework.security.acls.model.Permission; import com.google.common.base.Equivalence; import com.google.common.base.Function; import com.google.common.base.Joiner; import com.google.common.base.Predicate; import com.google.common.base.Predicates; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import fr.openwide.core.commons.util.functional.Predicates2; import fr.openwide.core.jpa.security.service.IAuthenticationService; import fr.openwide.core.wicket.more.markup.html.basic.ComponentBooleanProperty; import fr.openwide.core.wicket.more.markup.html.basic.ComponentBooleanPropertyBehavior; import fr.openwide.core.wicket.more.markup.html.basic.impl.AbstractConfigurableComponentBooleanPropertyBehavior.Operator; import fr.openwide.core.wicket.more.markup.repeater.sequence.ISequenceProvider; import fr.openwide.core.wicket.more.model.BindingModel; import fr.openwide.core.wicket.more.util.Detach; import fr.openwide.core.wicket.more.util.binding.CoreWicketMoreBindings; public abstract class Condition implements IModel<Boolean>, IDetachable { private static final long serialVersionUID = -3315852580233582804L; private static final Joiner COMMA_JOINER = Joiner.on(','); public abstract boolean applies(); /** * @deprecated Provided only to satisfy the {@link IModel} interface. Use {@link #applies()} instead. */ @Override @Deprecated public final Boolean getObject() { return applies(); } /** * @deprecated Provided only to satisfy the {@link IModel} interface. Not supported. */ @Override @Deprecated public void setObject(Boolean object) { throw new UnsupportedOperationException("Cannot set the value of a condition"); } @Override public void detach() { } public static Condition or(Condition firstCondition, Condition ... otherConditions) { return composite(BooleanOperator.OR, Lists.asList(firstCondition, otherConditions)); } public Condition or(Condition operand) { return or(this, operand); } public static Condition nor(Condition firstCondition, Condition ... otherConditions) { return composite(BooleanOperator.NOR, Lists.asList(firstCondition, otherConditions)); } public Condition nor(Condition operand) { return nor(this, operand); } public static Condition and(Condition firstCondition, Condition ... otherConditions) { return composite(BooleanOperator.AND, Lists.asList(firstCondition, otherConditions)); } public Condition and(Condition operand) { return and(this, operand); } public static Condition nand(Condition firstCondition, Condition ... otherConditions) { return composite(BooleanOperator.NAND, Lists.asList(firstCondition, otherConditions)); } public Condition nand(Condition operand) { return nand(this, operand); } public Condition negate() { return new NegateCondition(this); } private static class NegateCondition extends Condition { private static final long serialVersionUID = 1L; private final Condition condition; public NegateCondition(Condition condition) { super(); this.condition = condition; } @Override public boolean applies() { return !condition.applies(); } @Override public void detach() { super.detach(); condition.detach(); } @Override public String toString() { return "not(" + condition + ")"; } } public <T> IfElseBuilder<T> then(IModel<? extends T> modelIfTrue) { return new IfElseBuilder<>(this, modelIfTrue); } public <T extends Serializable> ValueIfElseBuilder<T> then(T valueIfTrue) { return new ValueIfElseBuilder<>(this, Model.of(valueIfTrue)); } public static class IfElseBuilder<T> { private final Condition condition; private final IModel<? extends T> modelIfTrue; public IfElseBuilder(Condition condition, IModel<? extends T> modelIfTrue) { super(); this.condition = condition; this.modelIfTrue = modelIfTrue; } public IfElseBuilder<T> elseIf(Condition condition, IModel<? extends T> model) { return new IfElseBuilder<T>(condition, model) { @Override public IModel<T> otherwise(IModel<? extends T> modelIfFalse) { return IfElseBuilder.this.otherwise(super.otherwise(modelIfFalse)); } }; } public IModel<T> otherwise(IModel<? extends T> modelIfFalse) { return condition.asValue(modelIfTrue, modelIfFalse); } } public static class ValueIfElseBuilder<T extends Serializable> extends IfElseBuilder<T> { public ValueIfElseBuilder(Condition condition, IModel<? extends T> modelIfTrue) { super(condition, modelIfTrue); } @Override public ValueIfElseBuilder<T> elseIf(Condition condition, IModel<? extends T> model) { return new ValueIfElseBuilder<T>(condition, model) { @Override public IModel<T> otherwise(IModel<? extends T> modelIfFalse) { return ValueIfElseBuilder.this.otherwise(super.otherwise(modelIfFalse)); } }; } public ValueIfElseBuilder<T> elseIf(Condition condition, T valueIfFalse) { return elseIf(condition, Model.of(valueIfFalse)); } public IModel<T> otherwise(T valueIfFalse) { return otherwise(Model.of(valueIfFalse)); } } /** * @deprecated Use .then(...).otherwise(...) instead. */ @Deprecated public <T> IModel<T> asValue(IModel<? extends T> modelIfTrue, IModel<? extends T> modelIfFalse) { return new ConditionalModel<>(this, modelIfTrue, modelIfFalse); } /** * @deprecated Use .then(...).otherwise(...) instead. */ @Deprecated public <T extends Serializable> IModel<T> asValue(T valueIfTrue, T valueIfFalse) { return new ConditionalModel<>(this, Model.of(valueIfTrue), Model.of(valueIfFalse)); } private static final class ConditionalModel<T> extends AbstractReadOnlyModel<T> { private static final long serialVersionUID = 4696234484508240728L; private final Condition condition; private IModel<? extends T> modelIfTrue; private IModel<? extends T> modelIfFalse; private ConditionalModel(Condition condition, IModel<? extends T> modelIfTrue, IModel<? extends T> modelIfFalse) { super(); this.condition = checkNotNull(condition); this.modelIfTrue = checkNotNull(modelIfTrue); this.modelIfFalse = checkNotNull(modelIfFalse); } @Override public T getObject() { if (condition.applies()) { return modelIfTrue.getObject(); } else { return modelIfFalse.getObject(); } } @Override public void detach() { super.detach(); condition.detach(); modelIfTrue.detach(); modelIfFalse.detach(); } } public static Condition composite(BooleanOperator operator, Condition ... operands) { return new CompositeCondition(operator, Arrays.asList(operands)); } public static Condition composite(BooleanOperator operator, Iterable<? extends Condition> operands) { return new CompositeCondition(operator, operands); } private static class CompositeCondition extends Condition { private static final long serialVersionUID = 1L; private final BooleanOperator operator; private final Iterable<? extends Condition> operands; public CompositeCondition(BooleanOperator operator, Iterable<? extends Condition> operands) { super(); this.operator = operator; this.operands = ImmutableList.copyOf(operands); } @Override public boolean applies() { return operator.apply(operands); } @Override public void detach() { super.detach(); for (Condition operand : operands) { operand.detach(); } } @Override public String toString() { StringBuilder builder = new StringBuilder(); builder.append(operator.name().toLowerCase()).append("(").append(COMMA_JOINER.join(operands)).append(")"); return builder.toString(); } } public static Condition constant(boolean value) { return value ? alwaysTrue() : alwaysFalse(); } public static Condition alwaysTrue() { return ConstantCondition.ALWAYS_TRUE; } public static Condition alwaysFalse() { return ConstantCondition.ALWAYS_FALSE; } private static class ConstantCondition extends Condition { private static final long serialVersionUID = -7678144550356610455L; private static final ConstantCondition ALWAYS_TRUE = new ConstantCondition(true) { private static final long serialVersionUID = -8786829515620843503L; private Object readResolve() { return ALWAYS_TRUE; } }; private static final ConstantCondition ALWAYS_FALSE = new ConstantCondition(false) { private static final long serialVersionUID = -6055735778127387150L; private Object readResolve() { return ALWAYS_FALSE; } }; private final boolean value; public ConstantCondition(boolean value) { super(); this.value = value; } @Override public boolean applies() { return value; } @Override public String toString() { return String.valueOf(value); } } public static <T> Condition isEqual(IModel<? extends T> leftModel, IModel<? extends T> rightModel) { return isEquivalent(leftModel, rightModel, Equivalence.equals()); } public static <T> Condition isEquivalent(IModel<? extends T> leftModel, IModel<? extends T> rightModel, Equivalence<? super T> equivalence) { return new EquivalenceCondition<>(leftModel, rightModel, equivalence); } private static class EquivalenceCondition<T> extends Condition { private static final long serialVersionUID = 1L; private final IModel<? extends T> leftModel; private final IModel<? extends T> rightModel; private final Equivalence<? super T> equivalence; public EquivalenceCondition(IModel<? extends T> leftModel, IModel<? extends T> rightModel, Equivalence<? super T> equivalence) { super(); this.leftModel = leftModel; this.rightModel = rightModel; this.equivalence = equivalence; } @Override public boolean applies() { return equivalence.equivalent(leftModel.getObject(), rightModel.getObject()); } @Override public void detach() { super.detach(); if (equivalence instanceof IDetachable) { ((IDetachable)equivalence).detach(); } leftModel.detach(); rightModel.detach(); } @Override public String toString() { StringBuilder builder = new StringBuilder(); builder.append(equivalence).append("(").append(COMMA_JOINER.join(leftModel, rightModel)).append(")"); return builder.toString(); } } public static <T> Condition contains(IModel<? extends Collection<? super T>> collectionModel, IModel<? extends T> elementModel) { return new ContainsCondition<>(collectionModel, elementModel); } private static class ContainsCondition<T> extends Condition { private static final long serialVersionUID = 1L; private final IModel<? extends Collection<? super T>> collectionModel; private final IModel<? extends T> elementModel; public ContainsCondition(IModel<? extends Collection<? super T>> collectionModel, IModel<? extends T> elementModel) { super(); this.collectionModel = collectionModel; this.elementModel = elementModel; } @Override public boolean applies() { Collection<? super T> collection = collectionModel.getObject(); T element = elementModel.getObject(); try { return collection != null && collection.contains(element); } catch (NullPointerException e) { // We need to catch the NPE to deal with collections and maps which don't permit null keys. return false; } } @Override public void detach() { super.detach(); collectionModel.detach(); elementModel.detach(); } @Override public String toString() { StringBuilder builder = new StringBuilder(); builder.append("contains(").append(COMMA_JOINER.join(collectionModel, elementModel)).append(")"); return builder.toString(); } } public static <T> Condition predicate(IModel<? extends T> model, Predicate<? super T> predicate) { return predicate(model, Detach.YES, predicate); } public static <T1, T2> Condition predicate(IModel<? extends T2> model, Function<? super T2, ? extends T1> function, Predicate<? super T1> predicate) { return predicate(model, Detach.YES, function, predicate); } public static <T> Condition predicate(IModel<? extends T> model, Detach detachModel, Predicate<? super T> predicate) { return new PredicateCondition<>(model, detachModel, predicate); } public static <T> Condition convertedInputPredicate(final FormComponent<? extends T> formComponent, Predicate<? super T> predicate) { return convertedInputPredicate(formComponent, Detach.YES, predicate); } public static <T1, T2> Condition predicate(IModel<? extends T2> model, Detach detachModel, Function<? super T2, ? extends T1> function, Predicate<? super T1> predicate) { return predicate(model, detachModel, Predicates.compose(predicate, function)); } public static <T> Condition convertedInputPredicate(final FormComponent<? extends T> formComponent, Detach detachModel, Predicate<? super T> predicate) { return predicate( new AbstractReadOnlyModel<T>() { private static final long serialVersionUID = 1L; @Override public T getObject() { return formComponent.getConvertedInput(); } @Override public String toString() { return formComponent.toString(); } }, detachModel, predicate ); } private static class PredicateCondition<T> extends Condition { private static final long serialVersionUID = 1L; private final IModel<? extends T> model; private final Detach detachModel; private final Predicate<? super T> predicate; public PredicateCondition(IModel<? extends T> model, Detach detachModel, Predicate<? super T> predicate) { super(); this.model = model; this.detachModel = detachModel; this.predicate = predicate; } @Override public boolean applies() { return predicate.apply(model.getObject()); } @Override public void detach() { super.detach(); if (predicate instanceof IDetachable) { ((IDetachable)predicate).detach(); } if (Detach.YES.equals(detachModel)) { model.detach(); } } @Override public String toString() { StringBuilder builder = new StringBuilder(); builder.append(predicate).append("(").append(model).append(")"); return builder.toString(); } } /** * @see Predicates2#isTrue() */ public static Condition isTrue(IModel<Boolean> model) { return predicate(model, Predicates2.isTrue()); } /** * @see Predicates2#isTrueOrNull() */ public static Condition isTrueOrNull(IModel<Boolean> model) { return predicate(model, Predicates2.isTrueOrNull()); } /** * @see Predicates2#isFalse() */ public static Condition isFalse(IModel<Boolean> model) { return predicate(model, Predicates2.isFalse()); } /** * @see Predicates2#isFalseOrNull() */ public static Condition isFalseOrNull(IModel<Boolean> model) { return predicate(model, Predicates2.isFalseOrNull()); } public static Condition isEmpty(IDataProvider<?> dataProvider) { return predicate(BindingModel.of(dataProvider, CoreWicketMoreBindings.iBindableDataProvider().size()), Predicates.equalTo(0L)); } public static Condition isNotEmpty(IDataProvider<?> dataProvider) { return predicate(BindingModel.of(dataProvider, CoreWicketMoreBindings.iBindableDataProvider().size()), Predicates.equalTo(0L)).negate(); } public static Condition isEmpty(final ISequenceProvider<?> sequenceProvider) { return new Condition() { private static final long serialVersionUID = 1L; @Override public boolean applies() { return sequenceProvider.size() == 0; } @Override public void detach() { sequenceProvider.detach(); } }; } public static Condition isNotEmpty(final ISequenceProvider<?> sequenceProvider) { return new Condition() { private static final long serialVersionUID = 1L; @Override public boolean applies() { return sequenceProvider.size() > 0; } @Override public void detach() { sequenceProvider.detach(); } }; } public static Condition visible(Component component) { return new VisibleCondition(component); } private static class VisibleCondition extends Condition { private static final long serialVersionUID = 1L; private final Component component; public VisibleCondition(Component component) { super(); this.component = component; } @Override public boolean applies() { component.configure(); return component.determineVisibility(); } @Override public String toString() { return "visible(" + component + ")"; } } public static Condition anyChildVisible(MarkupContainer container) { return new AnyChildVisibleCondition(container); } private static class AnyChildVisibleCondition extends Condition { private static final long serialVersionUID = 1L; private final MarkupContainer container; public AnyChildVisibleCondition(MarkupContainer container) { super(); this.container = container; } @Override public boolean applies() { for (Component child : container) { child.configure(); if (child.determineVisibility()) { return true; } } return false; } @Override public String toString() { return "anyChildVisible(" + container + ")"; } } public static Condition anyChildEnabled(MarkupContainer container) { return new AnyChildEnabledCondition(container); } private static class AnyChildEnabledCondition extends Condition { private static final long serialVersionUID = 1L; private final MarkupContainer container; public AnyChildEnabledCondition(MarkupContainer container) { super(); this.container = container; } @Override public boolean applies() { for (Component child : container) { child.configure(); if (child.isEnabled() && child.isEnableAllowed()) { return true; } } return false; } @Override public String toString() { return "anyChildEnabled(" + container + ")"; } } public static Condition role(String role) { return AnyRoleCondition.fromStrings(ImmutableList.of(role)); } public static Condition anyRole(String role, String ... otherRoles) { return AnyRoleCondition.fromStrings(Lists.asList(role, otherRoles)); } public static Condition role(IModel<String> roleModel) { return AnyRoleCondition.fromModels(ImmutableList.of(roleModel)); } @SafeVarargs public static Condition anyRole(IModel<String> roleModel, IModel<String> ... otherRoleModels) { return AnyRoleCondition.fromModels(Lists.asList(roleModel, otherRoleModels)); } private static class AnyRoleCondition extends Condition { private static final long serialVersionUID = 1L; @SpringBean private IAuthenticationService authenticationService; private final Iterable<? extends IModel<String>> roleModels; public static AnyRoleCondition fromStrings(Iterable<String> roles) { return new AnyRoleCondition(Iterables.transform(roles, new Function<String, IModel<String>>() { @Override public IModel<String> apply(String input) { return Model.of(input); } })); } public static AnyRoleCondition fromModels(Iterable<? extends IModel<String>> roleModels) { return new AnyRoleCondition(roleModels); } private AnyRoleCondition(Iterable<? extends IModel<String>> roleModels) { super(); Injector.get().inject(this); this.roleModels = ImmutableSet.copyOf(roleModels); } @Override public boolean applies() { for (IModel<String> roleModel : roleModels) { if (authenticationService.hasRole(roleModel.getObject())) { return true; } } return false; } @Override public void detach() { super.detach(); for (IModel<String> roleModel : roleModels) { roleModel.detach(); } } @Override public String toString() { return "anyRole(" + COMMA_JOINER.join(roleModels) + ")"; } } public static Condition permission(String permissionName) { return new AnyGlobalPermissionCondition(ImmutableList.of(permissionName)); } public static Condition anyPermission(String permissionName, String ... otherPermissionNames) { return new AnyGlobalPermissionCondition(Lists.asList(permissionName, otherPermissionNames)); } public static Condition anyPermission(Iterable<String> permissionNames) { return new AnyGlobalPermissionCondition(permissionNames); } public static Condition permission(Permission permission) { return new AnyGlobalPermissionCondition(permission); } private static class AnyGlobalPermissionCondition extends Condition { private static final long serialVersionUID = 1L; @SpringBean private PermissionFactory permissionFactory; @SpringBean private IAuthenticationService authenticationService; private final Iterable<Permission> permissions; public AnyGlobalPermissionCondition(Iterable<String> permissionNames) { super(); Injector.get().inject(this); this.permissions = permissionFactory.buildFromNames(ImmutableList.copyOf(permissionNames)); } public AnyGlobalPermissionCondition(Permission permission) { super(); Injector.get().inject(this); this.permissions = ImmutableList.of(permission); } @Override public boolean applies() { for (Permission permission : permissions) { if (authenticationService.hasPermission(permission)) { return true; } } return false; } @Override public String toString() { return "anyGlobalPermission(" + COMMA_JOINER.join(permissions) + ")"; } } public static Condition permission(IModel<?> securedObjectModel, String permissionName) { return new AnyObjectPermissionCondition(securedObjectModel, ImmutableList.of(permissionName)); } public static Condition anyPermission(IModel<?> securedObjectModel, String permissionName, String ... otherPermissionNames) { return new AnyObjectPermissionCondition(securedObjectModel, Lists.asList(permissionName, otherPermissionNames)); } public static Condition anyPermission(IModel<?> securedObjectModel, Iterable<String> permissionNames) { return new AnyObjectPermissionCondition(securedObjectModel, permissionNames); } public static Condition permission(IModel<?> securedObjectModel, Permission permission) { return new AnyObjectPermissionCondition(securedObjectModel, permission); } private static class AnyObjectPermissionCondition extends Condition { private static final long serialVersionUID = 1L; @SpringBean private PermissionFactory permissionFactory; @SpringBean private IAuthenticationService authenticationService; private final IModel<?> securedObjectModel; private final Iterable<Permission> permissions; public AnyObjectPermissionCondition(IModel<?> securedObjectModel, Iterable<String> permissionNames) { super(); Injector.get().inject(this); this.securedObjectModel = securedObjectModel; this.permissions = permissionFactory.buildFromNames(ImmutableList.copyOf(permissionNames)); } public AnyObjectPermissionCondition(IModel<?> securedObjectModel, Permission permission) { super(); Injector.get().inject(this); this.securedObjectModel = securedObjectModel; this.permissions = ImmutableList.of(permission); } @Override public boolean applies() { Object securedObject = securedObjectModel.getObject(); for (Permission permission : permissions) { if (authenticationService.hasPermission(securedObject, permission)) { return true; } } return false; } @Override public void detach() { super.detach(); securedObjectModel.detach(); } @Override public String toString() { return "anyObjectPermission(" + securedObjectModel + "," + COMMA_JOINER.join(permissions) + ")"; } } public static Condition modelNotNull(IModel<?> model) { return predicate(model, Predicates.notNull()); } public static <C extends Collection<?>> Condition collectionModelNotEmpty(IModel<C> collectionModel) { return predicate(collectionModel, Predicates2.notEmpty()); } public static <M extends Map<?, ?>> Condition mapModelNotEmpty(IModel<M> mapModel) { return predicate(mapModel, Predicates2.mapNotEmpty()); } public static Condition modelsAnyNotNull(IModel<?> firstModel, IModel<?>... otherModels) { Condition condition = Condition.alwaysFalse(); for (IModel<?> model : Lists.asList(firstModel, otherModels)) { condition = condition.or(modelNotNull(model)); } return condition; } @SafeVarargs public final <T> Condition predicateAnyTrue(Predicate<? super T> predicate, IModel<? extends T> firstModel, IModel<? extends T>... otherModels) { Condition condition = Condition.alwaysFalse(); for (IModel<? extends T> model : Lists.asList(firstModel, otherModels)) { condition = condition.or(predicate(model, predicate)); } return condition; } public static Condition componentVisible(Component component) { return visible(component); } public static Condition componentsAnyVisible(Component firstComponent, Component... otherComponents) { return componentsAnyVisible(Lists.asList(firstComponent, otherComponents)); } public static Condition componentsAnyVisible(Collection<? extends Component> targetComponents) { Condition condition = Condition.alwaysFalse(); for (Component component : targetComponents) { condition = condition.or(visible(component)); } return condition; } /** * Toggle component's visibilityAllowed property. * * @see #thenHide() * @see #thenShowInternal() */ public Behavior thenShow() { return thenProperty(ComponentBooleanProperty.VISIBILITY_ALLOWED); } /** * Toggle component's visibilityAllowed property. * * @see #thenShow() * @see #thenHideInternal() */ public Behavior thenHide() { return thenPropertyNegate(ComponentBooleanProperty.VISIBILITY_ALLOWED); } /** * Toggle component's visible property. * * Recommended way to manipulate component visibility is to use {@link #thenShow()}. This method may be used * for compatibility needs or if {@link #thenShow()} is already used and cannot be overriden. * * @see #thenShow() */ public Behavior thenShowInternal() { return thenProperty(ComponentBooleanProperty.VISIBLE); } /** * Toggle component's visible property. * * Recommended way to manipulate component visibility is to use {@link #thenHide()}. This method may be used * for compatibility needs or if {@link #thenHide()} is already used and cannot be overriden. * * @see #thenHide() */ public Behavior thenHideInternal() { return thenPropertyNegate(ComponentBooleanProperty.VISIBLE); } /** * Toggle component's enabled property. */ public Behavior thenEnable() { return thenProperty(ComponentBooleanProperty.ENABLE); } /** * Toggle component's enabled property. */ public Behavior thenDisable() { return thenPropertyNegate(ComponentBooleanProperty.ENABLE); } /** * Toggle component's provided property. * * @see #thenShow() * @see #thenHide() * @see #thenShowInternal() * @see #thenHideInternal() * @see #thenEnable() * @see #thenDisable() */ public Behavior thenProperty(ComponentBooleanProperty property) { return new ComponentBooleanPropertyBehavior(property, Operator.WHEN_ALL_TRUE).condition(this); } public Behavior thenPropertyNegate(ComponentBooleanProperty property) { return new ComponentBooleanPropertyBehavior(property, Operator.WHEN_ALL_TRUE).condition(this.negate()); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.shindig.gadgets.servlet; import static org.easymock.EasyMock.capture; import static org.easymock.EasyMock.expect; import static org.easymock.EasyMock.reportMatcher; import java.util.Arrays; import java.util.Collections; import java.util.Map; import java.util.concurrent.ExecutionException; import org.apache.shindig.common.EasyMockTestCase; import org.apache.shindig.common.JsonAssert; import org.apache.shindig.common.testing.FakeGadgetToken; import org.apache.shindig.common.uri.Uri; import org.apache.shindig.gadgets.AuthType; import org.apache.shindig.gadgets.FeedProcessor; import org.apache.shindig.gadgets.FeedProcessorImpl; import org.apache.shindig.gadgets.http.HttpRequest; import org.apache.shindig.gadgets.http.HttpResponse; import org.apache.shindig.gadgets.http.HttpResponseBuilder; import org.apache.shindig.gadgets.http.RequestPipeline; import org.apache.shindig.gadgets.oauth.OAuthArguments; import org.apache.shindig.gadgets.rewrite.CaptureRewriter; import org.apache.shindig.gadgets.rewrite.DefaultResponseRewriterRegistry; import org.apache.shindig.gadgets.rewrite.ResponseRewriter; import org.apache.shindig.gadgets.rewrite.ResponseRewriterRegistry; import org.apache.shindig.protocol.DefaultHandlerRegistry; import org.apache.shindig.protocol.HandlerExecutionListener; import org.apache.shindig.protocol.HandlerRegistry; import org.apache.shindig.protocol.ProtocolException; import org.apache.shindig.protocol.RpcHandler; import org.apache.shindig.protocol.conversion.BeanJsonConverter; import org.apache.shindig.protocol.multipart.FormDataItem; import org.easymock.Capture; import org.easymock.IArgumentMatcher; import org.json.JSONArray; import org.json.JSONObject; import org.junit.Before; import org.junit.Test; import com.google.common.base.Objects; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.inject.Guice; import com.google.inject.Injector; import com.google.inject.Provider; /** * Has coverage for all tests in MakeRequestHandlerTest and should be maintained in sync until * MakeRequestHandler is eliminated. */ public class HttpRequestHandlerTest extends EasyMockTestCase { private BeanJsonConverter converter; private FakeGadgetToken token; private final RequestPipeline pipeline = mock(RequestPipeline.class); private final CaptureRewriter rewriter = new CaptureRewriter(); private final ResponseRewriterRegistry rewriterRegistry = new DefaultResponseRewriterRegistry(Arrays.<ResponseRewriter>asList(rewriter), null); private HandlerRegistry registry; private HttpResponseBuilder builder; private final Map<String,FormDataItem> emptyFormItems = Collections.emptyMap(); private final Provider<FeedProcessor> feedProcessorProvider = new Provider<FeedProcessor>() { public FeedProcessor get() { return new FeedProcessorImpl(); } }; @Before public void setUp() throws Exception { token = new FakeGadgetToken(); token.setAppUrl("http://www.example.com/gadget.xml"); Injector injector = Guice.createInjector(); converter = new BeanJsonConverter(injector); HttpRequestHandler handler = new HttpRequestHandler(pipeline, rewriterRegistry, feedProcessorProvider); registry = new DefaultHandlerRegistry(injector, converter, new HandlerExecutionListener.NoOpHandler()); registry.addHandlers(ImmutableSet.<Object>of(handler)); builder = new HttpResponseBuilder().setResponseString("CONTENT"); } @Test public void testSimpleGet() throws Exception { JSONObject request = new JSONObject("{method:http.get, id:req1, params : {" + "href:'http://www.example.org/somecontent'" + "}}"); HttpRequest httpRequest = new HttpRequest(Uri.parse("http://www.example.org/somecontent")); httpRequest.setMethod("GET"); expect(pipeline.execute(eqRequest(httpRequest))).andReturn(builder.create()).anyTimes(); replay(); RpcHandler operation = registry.getRpcHandler(request); HttpRequestHandler.HttpApiResponse httpApiResponse = (HttpRequestHandler.HttpApiResponse)operation.execute(emptyFormItems, token, converter).get(); verify(); JsonAssert.assertJsonEquals("{ headers : {}, status : 200, content : 'CONTENT' }}", converter.convertToString(httpApiResponse)); } @Test public void testFailGetWithBodyGet() throws Exception { JSONObject request = new JSONObject("{method:http.get, id:req1, params : {" + "href:'http://www.example.org/somecontent'," + "body:'POSTBODY'" + "}}"); HttpRequest httpRequest = new HttpRequest(Uri.parse("http://www.example.org/somecontent")); httpRequest.setMethod("GET"); RpcHandler operation = registry.getRpcHandler(request); try { operation.execute(emptyFormItems, token, converter).get(); fail("Body should not be allowed in GET request"); } catch (ExecutionException ee) { assertTrue(ee.getCause() instanceof ProtocolException); } } @Test public void testSimplePost() throws Exception { JSONObject request = new JSONObject("{method:http.post, id:req1, params : {" + "href:'http://www.example.org/somecontent'," + "body:'POSTBODY'" + "}}"); HttpRequest httpRequest = new HttpRequest(Uri.parse("http://www.example.org/somecontent")); httpRequest.setMethod("POST"); httpRequest.setPostBody("POSTBODY".getBytes()); expect(pipeline.execute(eqRequest(httpRequest))).andReturn(builder.create()).anyTimes(); replay(); RpcHandler operation = registry.getRpcHandler(request); HttpRequestHandler.HttpApiResponse httpApiResponse = (HttpRequestHandler.HttpApiResponse)operation.execute(emptyFormItems, token, converter).get(); verify(); JsonAssert.assertJsonEquals("{ headers : {}, status : 200, content : 'CONTENT' }}", converter.convertToString(httpApiResponse)); } @Test public void testPostWithHeaders() throws Exception { JSONObject request = new JSONObject("{method:http.post, id:req1, params : {" + "href:'http://www.example.org/somecontent'," + "body:'POSTBODY'," + "headers:{goodheader:[good], host : [iamstripped], 'Content-Length':['1000']}" + "}}"); HttpRequest httpRequest = new HttpRequest(Uri.parse("http://www.example.org/somecontent")); httpRequest.setMethod("POST"); httpRequest.setPostBody("POSTBODY".getBytes()); httpRequest.setHeader("goodheader", "good"); httpRequest.setHeader("Content-Length", "1000"); expect(pipeline.execute(eqRequest(httpRequest))).andReturn(builder.create()).anyTimes(); replay(); RpcHandler operation = registry.getRpcHandler(request); HttpRequestHandler.HttpApiResponse httpApiResponse = (HttpRequestHandler.HttpApiResponse)operation.execute(emptyFormItems, token, converter).get(); verify(); JsonAssert.assertJsonEquals("{ headers : {}, status : 200, content : 'CONTENT' }}", converter.convertToString(httpApiResponse)); } @Test public void testFetchContentTypeFeed() throws Exception { JSONObject request = new JSONObject("{method:http.get, id:req1, params : {" + "href:'http://www.example.org/somecontent'," + "format : FEED" + "}}"); String entryTitle = "Feed title"; String entryLink = "http://example.org/entry/0/1"; String entrySummary = "This is the summary"; String rss = "<?xml version=\"1.0\" encoding=\"utf-8\"?>" + "<rss version=\"2.0\"><channel>" + "<title>dummy</title>" + "<link>http://example.org/</link>" + "<item>" + "<title>" + entryTitle + "</title>" + "<link>" + entryLink + "</link>" + "<description>" + entrySummary + "</description>" + "</item>" + "</channel></rss>"; builder.setResponseString(rss); HttpRequest httpRequest = new HttpRequest(Uri.parse("http://www.example.org/somecontent")); httpRequest.setMethod("GET"); expect(pipeline.execute(eqRequest(httpRequest))).andReturn(builder.create()).anyTimes(); replay(); RpcHandler operation = registry.getRpcHandler(request); HttpRequestHandler.HttpApiResponse httpApiResponse = (HttpRequestHandler.HttpApiResponse)operation.execute(emptyFormItems, token, converter).get(); verify(); JSONObject feed = (JSONObject) httpApiResponse.getContent(); JSONObject entry = feed.getJSONArray("Entry").getJSONObject(0); assertEquals(entryTitle, entry.getString("Title")); assertEquals(entryLink, entry.getString("Link")); assertNull("getSummaries has the wrong default value (should be false).", entry.optString("Summary", null)); assertTrue(rewriter.responseWasRewritten()); } @Test public void testFetchFeedWithParameters() throws Exception { JSONObject request = new JSONObject("{method:http.get, id:req1, params : {" + "href:'http://www.example.org/somecontent'," + "format : FEED," + "summarize : true," + "entryCount : 2" + "}}"); String entryTitle = "Feed title"; String entryLink = "http://example.org/entry/0/1"; String entrySummary = "This is the summary"; String rss = "<?xml version=\"1.0\" encoding=\"utf-8\"?>" + "<rss version=\"2.0\"><channel>" + "<title>dummy</title>" + "<link>http://example.org/</link>" + "<item>" + "<title>" + entryTitle + "</title>" + "<link>" + entryLink + "</link>" + "<description>" + entrySummary + "</description>" + "</item>" + "<item>" + "<title>" + entryTitle + "</title>" + "<link>" + entryLink + "</link>" + "<description>" + entrySummary + "</description>" + "</item>" + "<item>" + "<title>" + entryTitle + "</title>" + "<link>" + entryLink + "</link>" + "<description>" + entrySummary + "</description>" + "</item>" + "</channel></rss>"; builder.setResponseString(rss); HttpRequest httpRequest = new HttpRequest(Uri.parse("http://www.example.org/somecontent")); httpRequest.setMethod("GET"); expect(pipeline.execute(eqRequest(httpRequest))).andReturn(builder.create()).anyTimes(); replay(); RpcHandler operation = registry.getRpcHandler(request); HttpRequestHandler.HttpApiResponse httpApiResponse = (HttpRequestHandler.HttpApiResponse)operation.execute(emptyFormItems, token, converter).get(); verify(); JSONObject feed = (JSONObject) httpApiResponse.getContent(); JSONArray feeds = feed.getJSONArray("Entry"); assertEquals("numEntries not parsed correctly.", 2, feeds.length()); JSONObject entry = feeds.getJSONObject(1); assertEquals(entryTitle, entry.getString("Title")); assertEquals(entryLink, entry.getString("Link")); assertTrue("getSummaries not parsed correctly.", entry.has("Summary")); assertEquals(entrySummary, entry.getString("Summary")); assertTrue(rewriter.responseWasRewritten()); } @Test public void testJsonObjectGet() throws Exception { JSONObject request = new JSONObject("{method:http.get, id:req1, params : {" + "href:'http://www.example.org/somecontent', format:'json'" + "}}"); HttpRequest httpRequest = new HttpRequest(Uri.parse("http://www.example.org/somecontent")); httpRequest.setMethod("GET"); builder.setResponseString("{key:1}"); expect(pipeline.execute(eqRequest(httpRequest))).andReturn(builder.create()).anyTimes(); replay(); RpcHandler operation = registry.getRpcHandler(request); HttpRequestHandler.HttpApiResponse httpApiResponse = (HttpRequestHandler.HttpApiResponse)operation.execute(emptyFormItems, token, converter).get(); verify(); JsonAssert.assertJsonEquals("{ headers : {}, status : 200, content : {key: 1}}}", converter.convertToString(httpApiResponse)); } @Test public void testJsonArrayGet() throws Exception { JSONObject request = new JSONObject("{method:http.get, id:req1, params : {" + "href:'http://www.example.org/somecontent', format:'json'" + "}}"); HttpRequest httpRequest = new HttpRequest(Uri.parse("http://www.example.org/somecontent")); httpRequest.setMethod("GET"); builder.setResponseString("[{key:1},{key:2}]"); expect(pipeline.execute(eqRequest(httpRequest))).andReturn(builder.create()).anyTimes(); replay(); RpcHandler operation = registry.getRpcHandler(request); HttpRequestHandler.HttpApiResponse httpApiResponse = (HttpRequestHandler.HttpApiResponse)operation.execute(emptyFormItems, token, converter).get(); verify(); JsonAssert.assertJsonEquals("{ headers : {}, status : 200, content : [{key:1},{key:2}]}}", converter.convertToString(httpApiResponse)); } @Test public void testSignedGetRequest() throws Exception { JSONObject request = new JSONObject("{method:http.get, id:req1, params : {" + "href:'http://www.example.org/somecontent'," + "authz : 'signed' }" + "}}"); HttpRequest httpRequest = new HttpRequest(Uri.parse("http://www.example.org/somecontent")); httpRequest.setMethod("GET"); httpRequest.setAuthType(AuthType.SIGNED); httpRequest.setOAuthArguments( new OAuthArguments(AuthType.SIGNED, ImmutableMap.<String, String>of())); expect(pipeline.execute(eqRequest(httpRequest))).andReturn(builder.create()).anyTimes(); replay(); RpcHandler operation = registry.getRpcHandler(request); HttpRequestHandler.HttpApiResponse httpApiResponse = (HttpRequestHandler.HttpApiResponse)operation.execute(emptyFormItems, token, converter).get(); verify(); JsonAssert.assertJsonEquals("{ headers : {}, status : 200, content : 'CONTENT' }}", converter.convertToString(httpApiResponse)); assertTrue(rewriter.responseWasRewritten()); } @Test public void testSignedPostAndUpdateSecurityToken() throws Exception { token.setUpdatedToken("updated"); JSONObject request = new JSONObject("{method:http.post, id:req1, params : {" + "href:'http://www.example.org/somecontent'," + "body:'POSTBODY'," + "authz: 'signed' }" + "}}"); HttpRequest httpRequest = new HttpRequest(Uri.parse("http://www.example.org/somecontent")); httpRequest.setMethod("POST"); httpRequest.setAuthType(AuthType.SIGNED); httpRequest.setOAuthArguments( new OAuthArguments(AuthType.SIGNED, ImmutableMap.<String, String>of())); httpRequest.setPostBody("POSTBODY".getBytes()); expect(pipeline.execute(eqRequest(httpRequest))).andReturn(builder.create()).anyTimes(); replay(); RpcHandler operation = registry.getRpcHandler(request); HttpRequestHandler.HttpApiResponse httpApiResponse = (HttpRequestHandler.HttpApiResponse)operation.execute(emptyFormItems, token, converter).get(); verify(); JsonAssert.assertJsonEquals("{ headers : {}, status : 200, content : 'CONTENT', token : updated }}", converter.convertToString(httpApiResponse)); assertTrue(rewriter.responseWasRewritten()); } @Test public void testOAuthRequest() throws Exception { JSONObject request = new JSONObject("{method:http.post, id:req1, params : {" + "href:'http://www.example.org/somecontent'," + "body:'POSTBODY'," + "authz: 'oauth' }" + "}}"); HttpRequest httpRequest = new HttpRequest(Uri.parse("http://www.example.org/somecontent")); httpRequest.setMethod("POST"); httpRequest.setAuthType(AuthType.OAUTH); httpRequest.setOAuthArguments( new OAuthArguments(AuthType.OAUTH, ImmutableMap.<String, String>of())); httpRequest.setPostBody("POSTBODY".getBytes()); expect(pipeline.execute(eqRequest(httpRequest))).andReturn(builder.create()).anyTimes(); replay(); RpcHandler operation = registry.getRpcHandler(request); operation.execute(emptyFormItems, token, converter).get(); verify(); } @Test public void testOAuthRequestWithParameters() throws Exception { JSONObject request = new JSONObject("{method:http.post, id:req1, params : {" + "href:'http://www.example.org/somecontent'," + "body:'POSTBODY'," + "sign_owner:'false'," + "sign_viewer:'true'," + "oauth_service_name:'oauthService'," + "authz: 'oauth' }" + "}}"); HttpRequest httpRequest = new HttpRequest(Uri.parse("http://www.example.org/somecontent")); httpRequest.setMethod("POST"); httpRequest.setAuthType(AuthType.OAUTH); OAuthArguments oauthArgs = new OAuthArguments(AuthType.OAUTH, ImmutableMap.<String, String>of()); oauthArgs.setSignOwner(false); oauthArgs.setServiceName("oauthService"); httpRequest.setOAuthArguments(oauthArgs); httpRequest.setPostBody("POSTBODY".getBytes()); Capture<HttpRequest> requestCapture = new Capture<HttpRequest>(); expect(pipeline.execute(capture(requestCapture))).andReturn(builder.create()); replay(); RpcHandler operation = registry.getRpcHandler(request); operation.execute(emptyFormItems, token, converter).get(); verify(); assertEquals(httpRequest.getOAuthArguments(), requestCapture.getValue().getOAuthArguments()); } @Test public void testInvalidSigningTypeTreatedAsNone() throws Exception { JSONObject request = new JSONObject("{method:http.post, id:req1, params : {" + "href:'http://www.example.org/somecontent'," + "body:'POSTBODY'," + "authz : 'rubbish' }" + "}}"); HttpRequest httpRequest = new HttpRequest(Uri.parse("http://www.example.org/somecontent")); httpRequest.setMethod("POST"); httpRequest.setAuthType(AuthType.NONE); httpRequest.setPostBody("POSTBODY".getBytes()); expect(pipeline.execute(eqRequest(httpRequest))).andReturn(builder.create()).anyTimes(); replay(); RpcHandler operation = registry.getRpcHandler(request); operation.execute(emptyFormItems, token, converter).get(); verify(); } @Test public void testSignedGetRequestNoSecurityToken() throws Exception { JSONObject request = new JSONObject("{method:http.get, id:req1, params : {" + "href:'http://www.example.org/somecontent'," + "authz : 'signed'}" + "}}"); HttpRequest httpRequest = new HttpRequest(Uri.parse("http://www.example.org/somecontent")); httpRequest.setMethod("GET"); httpRequest.setAuthType(AuthType.SIGNED); expect(pipeline.execute(eqRequest(httpRequest))).andReturn(builder.create()).anyTimes(); replay(); RpcHandler operation = registry.getRpcHandler(request); try { operation.execute(emptyFormItems, null, converter).get(); fail("Cannot execute a request without a security token"); } catch (ExecutionException ee) { assertTrue(ee.getCause() instanceof ProtocolException); } verify(); } @Test public void testBadHttpResponseIsPropagated() throws Exception { JSONObject request = new JSONObject("{method:http.get, id:req1, params : {" + "href:'http://www.example.org/somecontent'" + "}}"); HttpRequest httpRequest = new HttpRequest(Uri.parse("http://www.example.org/somecontent")); httpRequest.setMethod("GET"); httpRequest.setAuthType(AuthType.NONE); builder.setHttpStatusCode(HttpResponse.SC_INTERNAL_SERVER_ERROR); builder.setResponseString("I AM AN ERROR MESSAGE"); expect(pipeline.execute(eqRequest(httpRequest))).andReturn(builder.create()).anyTimes(); replay(); RpcHandler operation = registry.getRpcHandler(request); HttpRequestHandler.HttpApiResponse httpApiResponse = (HttpRequestHandler.HttpApiResponse)operation.execute(emptyFormItems, token, converter).get(); verify(); JsonAssert.assertJsonEquals("{ headers : {}, status : 500, content : 'I AM AN ERROR MESSAGE' }}", converter.convertToString(httpApiResponse)); } @Test public void testMetadataCopied() throws Exception { JSONObject request = new JSONObject("{method:http.get, id:req1, params : {" + "href:'http://www.example.org/somecontent'" + "}}"); HttpRequest httpRequest = new HttpRequest(Uri.parse("http://www.example.org/somecontent")); httpRequest.setMethod("GET"); builder.setMetadata("foo", "CONTENT"); expect(pipeline.execute(eqRequest(httpRequest))).andReturn(builder.create()).anyTimes(); replay(); RpcHandler operation = registry.getRpcHandler(request); HttpRequestHandler.HttpApiResponse httpApiResponse = (HttpRequestHandler.HttpApiResponse)operation.execute(emptyFormItems, token, converter).get(); verify(); JsonAssert.assertJsonEquals("{ headers : {}, status : 200, content : 'CONTENT', metadata : { foo : 'CONTENT' }}", converter.convertToString(httpApiResponse)); } @Test public void testSetCookiesReturned() throws Exception { JSONObject request = new JSONObject("{method:http.get, id:req1, params : {" + "href:'http://www.example.org/somecontent'," + "}}"); HttpRequest httpRequest = new HttpRequest(Uri.parse("http://www.example.org/somecontent")); httpRequest.setMethod("GET"); builder.addHeader("Set-Cookie", "foo=bar; Secure"); builder.addHeader("Set-Cookie", "name=value"); expect(pipeline.execute(eqRequest(httpRequest))).andReturn(builder.create()).anyTimes(); replay(); RpcHandler operation = registry.getRpcHandler(request); HttpRequestHandler.HttpApiResponse httpApiResponse = (HttpRequestHandler.HttpApiResponse)operation.execute(emptyFormItems, token, converter).get(); verify(); JsonAssert.assertJsonEquals( "{ headers : { 'set-cookie' : ['foo=bar; Secure','name=value'] }," + " status : 200, content : 'CONTENT' }", converter.convertToString(httpApiResponse)); } @Test public void testLocationReturned() throws Exception { JSONObject request = new JSONObject("{method:http.get, id:req1, params : {" + "href:'http://www.example.org/somecontent'," + "}}"); HttpRequest httpRequest = new HttpRequest(Uri.parse("http://www.example.org/somecontent")); httpRequest.setMethod("GET"); builder.addHeader("Location", "here"); expect(pipeline.execute(eqRequest(httpRequest))).andReturn(builder.create()).anyTimes(); replay(); RpcHandler operation = registry.getRpcHandler(request); HttpRequestHandler.HttpApiResponse httpApiResponse = (HttpRequestHandler.HttpApiResponse)operation.execute(emptyFormItems, token, converter).get(); verify(); JsonAssert.assertJsonEquals("{ headers : { 'location' : ['here'] }," + " status : 200, content : 'CONTENT' }", converter.convertToString(httpApiResponse)); } private static HttpRequest eqRequest(HttpRequest request) { reportMatcher(new RequestMatcher(request)); return null; } private static class RequestMatcher implements IArgumentMatcher { private final HttpRequest req; public RequestMatcher(HttpRequest request) { this.req = request; } public void appendTo(StringBuffer buffer) { buffer.append("eqRequest[]"); } public boolean matches(Object obj) { HttpRequest match = (HttpRequest)obj; return (match.getMethod().equals(req.getMethod()) && match.getUri().equals(req.getUri()) && match.getAuthType().equals(req.getAuthType()) && match.getPostBodyAsString().equals(req.getPostBodyAsString()) && Objects.equal(match.getOAuthArguments(), req.getOAuthArguments()) && match.getHeaders().equals(req.getHeaders())); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.storm.kafka.spout; import java.io.Serializable; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.Set; import java.util.regex.Pattern; import org.apache.kafka.clients.consumer.ConsumerConfig; import org.apache.kafka.clients.consumer.ConsumerRecord; import org.apache.kafka.clients.consumer.KafkaConsumer; import org.apache.kafka.common.serialization.StringDeserializer; import org.apache.storm.kafka.spout.KafkaSpoutRetryExponentialBackoff.TimeInterval; import org.apache.storm.kafka.spout.subscription.ManualPartitionSubscription; import org.apache.storm.kafka.spout.subscription.NamedTopicFilter; import org.apache.storm.kafka.spout.subscription.PatternTopicFilter; import org.apache.storm.kafka.spout.subscription.RoundRobinManualPartitioner; import org.apache.storm.kafka.spout.subscription.Subscription; import org.apache.storm.tuple.Fields; /** * KafkaSpoutConfig defines the required configuration to connect a consumer to a consumer group, as well as the subscribing topics. */ public class KafkaSpoutConfig<K, V> implements Serializable { private static final long serialVersionUID = 141902646130682494L; // 200ms public static final long DEFAULT_POLL_TIMEOUT_MS = 200; // 30s public static final long DEFAULT_OFFSET_COMMIT_PERIOD_MS = 30_000; // Retry forever public static final int DEFAULT_MAX_RETRIES = Integer.MAX_VALUE; // 10,000,000 records => 80MBs of memory footprint in the worst case public static final int DEFAULT_MAX_UNCOMMITTED_OFFSETS = 10_000_000; // 2s public static final long DEFAULT_PARTITION_REFRESH_PERIOD_MS = 2_000; public static final FirstPollOffsetStrategy DEFAULT_FIRST_POLL_OFFSET_STRATEGY = FirstPollOffsetStrategy.UNCOMMITTED_EARLIEST; public static final KafkaSpoutRetryService DEFAULT_RETRY_SERVICE = new KafkaSpoutRetryExponentialBackoff(TimeInterval.seconds(0), TimeInterval.milliSeconds(2), DEFAULT_MAX_RETRIES, TimeInterval.seconds(10)); public static final KafkaTupleListener DEFAULT_TUPLE_LISTENER = new EmptyKafkaTupleListener(); // Kafka consumer configuration private final Map<String, Object> kafkaProps; private final Subscription subscription; private final long pollTimeoutMs; // Kafka spout configuration private final RecordTranslator<K, V> translator; private final long offsetCommitPeriodMs; private final int maxUncommittedOffsets; private final FirstPollOffsetStrategy firstPollOffsetStrategy; private final KafkaSpoutRetryService retryService; private final KafkaTupleListener tupleListener; private final long partitionRefreshPeriodMs; private final boolean emitNullTuples; /** * Creates a new KafkaSpoutConfig using a Builder. * * @param builder The Builder to construct the KafkaSpoutConfig from */ public KafkaSpoutConfig(Builder<K, V> builder) { this.kafkaProps = setDefaultsAndGetKafkaProps(builder.kafkaProps); this.subscription = builder.subscription; this.translator = builder.translator; this.pollTimeoutMs = builder.pollTimeoutMs; this.offsetCommitPeriodMs = builder.offsetCommitPeriodMs; this.firstPollOffsetStrategy = builder.firstPollOffsetStrategy; this.maxUncommittedOffsets = builder.maxUncommittedOffsets; this.retryService = builder.retryService; this.tupleListener = builder.tupleListener; this.partitionRefreshPeriodMs = builder.partitionRefreshPeriodMs; this.emitNullTuples = builder.emitNullTuples; } /** * The offset used by the Kafka spout in the first poll to Kafka broker. The choice of this parameter will affect the number of consumer * records returned in the first poll. By default this parameter is set to UNCOMMITTED_EARLIEST. <br/><br/> * The allowed values are EARLIEST, LATEST, UNCOMMITTED_EARLIEST, UNCOMMITTED_LATEST. <br/> * <ul> * <li>EARLIEST means that the kafka spout polls records starting in the first offset of the partition, regardless of previous * commits</li> * <li>LATEST means that the kafka spout polls records with offsets greater than the last offset in the partition, regardless of * previous commits</li> * <li>UNCOMMITTED_EARLIEST means that the kafka spout polls records from the last committed offset, if any. If no offset has been * committed, it behaves as EARLIEST.</li> * <li>UNCOMMITTED_LATEST means that the kafka spout polls records from the last committed offset, if any. If no offset has been * committed, it behaves as LATEST.</li> * </ul> * */ public static enum FirstPollOffsetStrategy { EARLIEST, LATEST, UNCOMMITTED_EARLIEST, UNCOMMITTED_LATEST } public static class Builder<K, V> { private final Map<String, Object> kafkaProps; private final Subscription subscription; private RecordTranslator<K, V> translator; private long pollTimeoutMs = DEFAULT_POLL_TIMEOUT_MS; private long offsetCommitPeriodMs = DEFAULT_OFFSET_COMMIT_PERIOD_MS; private FirstPollOffsetStrategy firstPollOffsetStrategy = DEFAULT_FIRST_POLL_OFFSET_STRATEGY; private int maxUncommittedOffsets = DEFAULT_MAX_UNCOMMITTED_OFFSETS; private KafkaSpoutRetryService retryService = DEFAULT_RETRY_SERVICE; private KafkaTupleListener tupleListener = DEFAULT_TUPLE_LISTENER; private long partitionRefreshPeriodMs = DEFAULT_PARTITION_REFRESH_PERIOD_MS; private boolean emitNullTuples = false; public Builder(String bootstrapServers, String... topics) { this(bootstrapServers, new ManualPartitionSubscription(new RoundRobinManualPartitioner(), new NamedTopicFilter(topics))); } public Builder(String bootstrapServers, Set<String> topics) { this(bootstrapServers, new ManualPartitionSubscription(new RoundRobinManualPartitioner(), new NamedTopicFilter(topics))); } public Builder(String bootstrapServers, Pattern topics) { this(bootstrapServers, new ManualPartitionSubscription(new RoundRobinManualPartitioner(), new PatternTopicFilter(topics))); } /** * Create a KafkaSpoutConfig builder with default property values and no key/value deserializers. * * @param bootstrapServers The bootstrap servers the consumer will use * @param subscription The subscription defining which topics and partitions each spout instance will read. */ public Builder(String bootstrapServers, Subscription subscription) { kafkaProps = new HashMap<>(); if (bootstrapServers == null || bootstrapServers.isEmpty()) { throw new IllegalArgumentException("bootstrap servers cannot be null"); } kafkaProps.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers); this.subscription = subscription; this.translator = new DefaultRecordTranslator<>(); } /** * Set a {@link KafkaConsumer} property. */ public Builder<K, V> setProp(String key, Object value) { kafkaProps.put(key, value); return this; } /** * Set multiple {@link KafkaConsumer} properties. */ public Builder<K, V> setProp(Map<String, Object> props) { kafkaProps.putAll(props); return this; } /** * Set multiple {@link KafkaConsumer} properties. */ public Builder<K, V> setProp(Properties props) { props.forEach((key, value) -> { if (key instanceof String) { kafkaProps.put((String) key, value); } else { throw new IllegalArgumentException("Kafka Consumer property keys must be Strings"); } }); return this; } //Spout Settings /** * Specifies the time, in milliseconds, spent waiting in poll if data is not available. Default is 2s. * * @param pollTimeoutMs time in ms */ public Builder<K, V> setPollTimeoutMs(long pollTimeoutMs) { this.pollTimeoutMs = pollTimeoutMs; return this; } /** * Specifies the period, in milliseconds, the offset commit task is periodically called. Default is 15s. * * @param offsetCommitPeriodMs time in ms */ public Builder<K, V> setOffsetCommitPeriodMs(long offsetCommitPeriodMs) { this.offsetCommitPeriodMs = offsetCommitPeriodMs; return this; } /** * Defines the max number of polled offsets (records) that can be pending commit, before another poll can take place. Once this * limit is reached, no more offsets (records) can be polled until the next successful commit(s) sets the number of pending offsets * below the threshold. The default is {@link #DEFAULT_MAX_UNCOMMITTED_OFFSETS}. Note that this limit can in some cases be exceeded, * but no partition will exceed this limit by more than maxPollRecords - 1. * * @param maxUncommittedOffsets max number of records that can be be pending commit */ public Builder<K, V> setMaxUncommittedOffsets(int maxUncommittedOffsets) { this.maxUncommittedOffsets = maxUncommittedOffsets; return this; } /** * Sets the offset used by the Kafka spout in the first poll to Kafka broker upon process start. Please refer to to the * documentation in {@link FirstPollOffsetStrategy} * * @param firstPollOffsetStrategy Offset used by Kafka spout first poll */ public Builder<K, V> setFirstPollOffsetStrategy(FirstPollOffsetStrategy firstPollOffsetStrategy) { this.firstPollOffsetStrategy = firstPollOffsetStrategy; return this; } /** * Sets the retry service for the spout to use. * * @param retryService the new retry service * @return the builder (this). */ public Builder<K, V> setRetry(KafkaSpoutRetryService retryService) { if (retryService == null) { throw new NullPointerException("retryService cannot be null"); } this.retryService = retryService; return this; } /** * Sets the tuple listener for the spout to use. * * @param tupleListener the tuple listener * @return the builder (this). */ public Builder<K, V> setTupleListener(KafkaTupleListener tupleListener) { if (tupleListener == null) { throw new NullPointerException("KafkaTupleListener cannot be null"); } this.tupleListener = tupleListener; return this; } public Builder<K, V> setRecordTranslator(RecordTranslator<K, V> translator) { this.translator = translator; return this; } /** * Configure a translator with tuples to be emitted on the default stream. * * @param func extracts and turns a Kafka ConsumerRecord into a list of objects to be emitted * @param fields the names of the fields extracted * @return this to be able to chain configuration */ public Builder<K, V> setRecordTranslator(Func<ConsumerRecord<K, V>, List<Object>> func, Fields fields) { return setRecordTranslator(new SimpleRecordTranslator<>(func, fields)); } /** * Configure a translator with tuples to be emitted to a given stream. * * @param func extracts and turns a Kafka ConsumerRecord into a list of objects to be emitted * @param fields the names of the fields extracted * @param stream the stream to emit the tuples on * @return this to be able to chain configuration */ public Builder<K, V> setRecordTranslator(Func<ConsumerRecord<K, V>, List<Object>> func, Fields fields, String stream) { return setRecordTranslator(new SimpleRecordTranslator<>(func, fields, stream)); } /** * Sets partition refresh period in milliseconds. This is how often kafka will be polled to check for new topics and/or new * partitions. This is mostly for Subscription implementations that manually assign partitions. NamedSubscription and * PatternSubscription rely on kafka to handle this instead. * * @param partitionRefreshPeriodMs time in milliseconds * @return the builder (this) */ public Builder<K, V> setPartitionRefreshPeriodMs(long partitionRefreshPeriodMs) { this.partitionRefreshPeriodMs = partitionRefreshPeriodMs; return this; } /** * Specifies if the spout should emit null tuples to the component downstream, or rather not emit and directly ack them. By default * this parameter is set to false, which means that null tuples are not emitted. * * @param emitNullTuples sets if null tuples should or not be emitted downstream */ public Builder<K, V> setEmitNullTuples(boolean emitNullTuples) { this.emitNullTuples = emitNullTuples; return this; } public KafkaSpoutConfig<K, V> build() { return new KafkaSpoutConfig<>(this); } } /** * Factory method that creates a Builder with String key/value deserializers. * * @param bootstrapServers The bootstrap servers for the consumer * @param topics The topics to subscribe to * @return The new builder */ public static Builder<String, String> builder(String bootstrapServers, String... topics) { return setStringDeserializers(new Builder<>(bootstrapServers, topics)); } /** * Factory method that creates a Builder with String key/value deserializers. * * @param bootstrapServers The bootstrap servers for the consumer * @param topics The topics to subscribe to * @return The new builder */ public static Builder<String, String> builder(String bootstrapServers, Set<String> topics) { return setStringDeserializers(new Builder<>(bootstrapServers, topics)); } /** * Factory method that creates a Builder with String key/value deserializers. * * @param bootstrapServers The bootstrap servers for the consumer * @param topics The topic pattern to subscribe to * @return The new builder */ public static Builder<String, String> builder(String bootstrapServers, Pattern topics) { return setStringDeserializers(new Builder<>(bootstrapServers, topics)); } private static Builder<String, String> setStringDeserializers(Builder<String, String> builder) { builder.setProp(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class); builder.setProp(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class); return builder; } private static Map<String, Object> setDefaultsAndGetKafkaProps(Map<String, Object> kafkaProps) { // set defaults for properties not specified if (!kafkaProps.containsKey(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG)) { kafkaProps.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "false"); } return kafkaProps; } /** * Gets the properties that will be passed to the KafkaConsumer. * * @return The Kafka properties map */ public Map<String, Object> getKafkaProps() { return kafkaProps; } public Subscription getSubscription() { return subscription; } public RecordTranslator<K, V> getTranslator() { return translator; } public long getPollTimeoutMs() { return pollTimeoutMs; } public long getOffsetsCommitPeriodMs() { return offsetCommitPeriodMs; } public boolean isConsumerAutoCommitMode() { return kafkaProps.get(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG) == null // default is false || Boolean.valueOf((String) kafkaProps.get(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG)); } public String getConsumerGroupId() { return (String) kafkaProps.get(ConsumerConfig.GROUP_ID_CONFIG); } public FirstPollOffsetStrategy getFirstPollOffsetStrategy() { return firstPollOffsetStrategy; } public int getMaxUncommittedOffsets() { return maxUncommittedOffsets; } public KafkaSpoutRetryService getRetryService() { return retryService; } public KafkaTupleListener getTupleListener() { return tupleListener; } public long getPartitionRefreshPeriodMs() { return partitionRefreshPeriodMs; } public boolean isEmitNullTuples() { return emitNullTuples; } @Override public String toString() { return "KafkaSpoutConfig{" + "kafkaProps=" + kafkaProps + ", pollTimeoutMs=" + pollTimeoutMs + ", offsetCommitPeriodMs=" + offsetCommitPeriodMs + ", maxUncommittedOffsets=" + maxUncommittedOffsets + ", firstPollOffsetStrategy=" + firstPollOffsetStrategy + ", subscription=" + subscription + ", translator=" + translator + ", retryService=" + retryService + ", tupleListener=" + tupleListener + '}'; } }
package io.github.luizgrp.sectionedrecyclerviewadapter.compat; import org.junit.Before; import org.junit.Test; import org.mockito.Mock; import org.mockito.MockitoAnnotations; import org.mockito.Spy; import io.github.luizgrp.sectionedrecyclerviewadapter.Section; import io.github.luizgrp.sectionedrecyclerviewadapter.SectionAdapter; import static org.hamcrest.core.Is.is; import static org.junit.Assert.assertThat; import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; public class SectionedRecyclerViewAdapterV2CompatTest { private final String tag = "tag"; @Mock private Section section; @Mock private SectionAdapter sectionAdapter; @Spy private SectionedRecyclerViewAdapterV2Compat cut; @Before public void setUp() { MockitoAnnotations.initMocks(this); doReturn(sectionAdapter).when(cut).getAdapterForSection(tag); doReturn(sectionAdapter).when(cut).getAdapterForSection(section); } @Test public void givenSection_whenGetFooterPositionInAdapterWithSection_thenCallsNewClass() { // Given final int index = 10; when(sectionAdapter.getFooterPosition()).thenReturn(index); // When int result = cut.getFooterPositionInAdapter(section); // Then verify(sectionAdapter).getFooterPosition(); assertThat(result, is(index)); } @Test public void givenTag_whenGetFooterPositionInAdapterWithTag_thenCallsNewClass() { // Given final int index = 10; when(sectionAdapter.getFooterPosition()).thenReturn(index); // When int result = cut.getFooterPositionInAdapter(tag); // Then verify(sectionAdapter).getFooterPosition(); assertThat(result, is(index)); } @Test public void givenSection_whenGetHeaderPositionInAdapterWithSection_thenCallsNewClass() { // Given final int index = 10; when(sectionAdapter.getHeaderPosition()).thenReturn(index); // When int result = cut.getHeaderPositionInAdapter(section); // Then verify(sectionAdapter).getHeaderPosition(); assertThat(result, is(index)); } @Test public void givenTag_whenGetHeaderPositionInAdapterWithTag_thenCallsNewClass() { // Given final int index = 10; when(sectionAdapter.getHeaderPosition()).thenReturn(index); // When int result = cut.getHeaderPositionInAdapter(tag); // Then verify(sectionAdapter).getHeaderPosition(); assertThat(result, is(index)); } @Test public void givenSection_whenGetPositionInAdapter_thenCallsNewClass() { // Given final int position = 10; final int adapterPosition = 20; when(sectionAdapter.getPositionInAdapter(position)).thenReturn(adapterPosition); // When int result = cut.getPositionInAdapter(section, position); // Then verify(sectionAdapter).getPositionInAdapter(position); assertThat(result, is(adapterPosition)); } @Test public void givenTag_whenGetPositionInAdapter_thenCallsNewClass() { // Given final int position = 10; final int adapterPosition = 20; when(sectionAdapter.getPositionInAdapter(position)).thenReturn(adapterPosition); // When int result = cut.getPositionInAdapter(tag, position); // Then verify(sectionAdapter).getPositionInAdapter(position); assertThat(result, is(adapterPosition)); } @Test public void givenSection_whenGetSectionPosition_thenCallsNewClass() { // Given final int position = 10; when(sectionAdapter.getSectionPosition()).thenReturn(position); // When int result = cut.getSectionPosition(section); // Then verify(sectionAdapter).getSectionPosition(); assertThat(result, is(position)); } @Test public void givenTag_whenGetSectionPosition_thenCallsNewClass() { // Given final int position = 10; when(sectionAdapter.getSectionPosition()).thenReturn(position); // When int result = cut.getSectionPosition(tag); // Then verify(sectionAdapter).getSectionPosition(); assertThat(result, is(position)); } @Test public void givenSection_whenNotifyAllItemsChangedInSection_thenCallsNewClass() { // When cut.notifyAllItemsChangedInSection(section); // Then verify(sectionAdapter).notifyAllItemsChanged(); } @Test public void givenTag_whenNotifyAllItemsChangedInSection_thenCallsNewClass() { // When cut.notifyAllItemsChangedInSection(tag); // Then verify(sectionAdapter).notifyAllItemsChanged(); } @Test public void givenSection_whenNotifyAllItemsInsertedInSection_thenCallsNewClass() { // When cut.notifyAllItemsInsertedInSection(section); // Then verify(sectionAdapter).notifyAllItemsInserted(); } @Test public void givenTag_whenNotifyAllItemsInsertedInSection_thenCallsNewClass() { // When cut.notifyAllItemsInsertedInSection(tag); // Then verify(sectionAdapter).notifyAllItemsInserted(); } @Test public void givenSection_whenNotifyFooterChangedInSection_thenCallsNewClass() { // When cut.notifyFooterChangedInSection(section); // Then verify(sectionAdapter).notifyFooterChanged(); } @Test public void givenTag_whenNotifyFooterChangedInSection_thenCallsNewClass() { // When cut.notifyFooterChangedInSection(tag); // Then verify(sectionAdapter).notifyFooterChanged(); } @Test public void givenSection_whenNotifyFooterInsertedInSection_thenCallsNewClass() { // When cut.notifyFooterInsertedInSection(section); // Then verify(sectionAdapter).notifyFooterInserted(); } @Test public void givenTag_whenNotifyFooterInsertedInSection_thenCallsNewClass() { // When cut.notifyFooterInsertedInSection(tag); // Then verify(sectionAdapter).notifyFooterInserted(); } @Test public void givenSection_whenNotifyFooterRemovedFromSection_thenCallsNewClass() { // When cut.notifyFooterRemovedFromSection(section); // Then verify(sectionAdapter).notifyFooterRemoved(); } @Test public void givenTag_whenNotifyFooterRemovedFromSection_thenCallsNewClass() { // When cut.notifyFooterRemovedFromSection(tag); // Then verify(sectionAdapter).notifyFooterRemoved(); } @Test public void givenSection_whenNotifyHeaderChangedInSection_thenCallsNewClass() { // When cut.notifyHeaderChangedInSection(section); // Then verify(sectionAdapter).notifyHeaderChanged(); } @Test public void givenTag_whenNotifyHeaderChangedInSection_thenCallsNewClass() { // When cut.notifyHeaderChangedInSection(tag); // Then verify(sectionAdapter).notifyHeaderChanged(); } @Test public void givenSection_whenNotifyHeaderInsertedInSection_thenCallsNewClass() { // When cut.notifyHeaderInsertedInSection(section); // Then verify(sectionAdapter).notifyHeaderInserted(); } @Test public void givenTag_whenNotifyHeaderInsertedInSection_thenCallsNewClass() { // When cut.notifyHeaderInsertedInSection(tag); // Then verify(sectionAdapter).notifyHeaderInserted(); } @Test public void givenSection_whenNotifyHeaderRemovedFromSection_thenCallsNewClass() { // When cut.notifyHeaderRemovedFromSection(section); // Then verify(sectionAdapter).notifyHeaderRemoved(); } @Test public void givenTag_whenNotifyHeaderRemovedFromSection_thenCallsNewClass() { // When cut.notifyHeaderRemovedFromSection(tag); // Then verify(sectionAdapter).notifyHeaderRemoved(); } @Test public void givenSection_whenNotifyItemChangedInSection_thenCallsNewClass() { // Given final int position = 10; // When cut.notifyItemChangedInSection(section, position); // Then verify(sectionAdapter).notifyItemChanged(position); } @Test public void givenTag_whenNotifyItemChangedInSection_thenCallsNewClass() { // Given final int position = 10; // When cut.notifyItemChangedInSection(tag, position); // Then verify(sectionAdapter).notifyItemChanged(position); } @Test public void givenSection_whenNotifyItemInsertedInSection_thenCallsNewClass() { // Given final int position = 10; // When cut.notifyItemInsertedInSection(section, position); // Then verify(sectionAdapter).notifyItemInserted(position); } @Test public void givenTag_whenNotifyItemInsertedInSection_thenCallsNewClass() { // Given final int position = 10; // When cut.notifyItemInsertedInSection(tag, position); // Then verify(sectionAdapter).notifyItemInserted(position); } @Test public void givenSection_whenNotifyItemMovedInSection_thenCallsNewClass() { // Given final int fromPosition = 10; final int toPosition = 20; // When cut.notifyItemMovedInSection(section, fromPosition, toPosition); // Then verify(sectionAdapter).notifyItemMoved(fromPosition, toPosition); } @Test public void givenTag_whenNotifyItemMovedInSection_thenCallsNewClass() { // Given final int fromPosition = 10; final int toPosition = 20; // When cut.notifyItemMovedInSection(tag, fromPosition, toPosition); // Then verify(sectionAdapter).notifyItemMoved(fromPosition, toPosition); } @Test public void givenSection_whenNotifyItemRangeChangedInSection_thenCallsNewClass() { // Given final int positionStart = 10; final int itemCount = 20; // When cut.notifyItemRangeChangedInSection(section, positionStart, itemCount); // Then verify(sectionAdapter).notifyItemRangeChanged(positionStart, itemCount); } @Test public void givenSectionAndPayload_whenNotifyItemRangeChangedInSection_thenCallsNewClass() { // Given final int positionStart = 10; final int itemCount = 20; final Object payload = new Object(); // When cut.notifyItemRangeChangedInSection(section, positionStart, itemCount, payload); // Then verify(sectionAdapter).notifyItemRangeChanged(positionStart, itemCount, payload); } @Test public void givenTag_whenNotifyItemRangeChangedInSection_thenCallsNewClass() { // Given final int positionStart = 10; final int itemCount = 20; // When cut.notifyItemRangeChangedInSection(tag, positionStart, itemCount); // Then verify(sectionAdapter).notifyItemRangeChanged(positionStart, itemCount); } @Test public void givenTagAndPayload_whenNotifyItemRangeChangedInSection_thenCallsNewClass() { // Given final int positionStart = 10; final int itemCount = 20; final Object payload = new Object(); // When cut.notifyItemRangeChangedInSection(tag, positionStart, itemCount, payload); // Then verify(sectionAdapter).notifyItemRangeChanged(positionStart, itemCount, payload); } @Test public void givenSection_whenNotifyItemRangeInsertedInSection_thenCallsNewClass() { // Given final int positionStart = 10; final int itemCount = 20; // When cut.notifyItemRangeInsertedInSection(section, positionStart, itemCount); // Then verify(sectionAdapter).notifyItemRangeInserted(positionStart, itemCount); } @Test public void givenTag_whenNotifyItemRangeInsertedInSection_thenCallsNewClass() { // Given final int positionStart = 10; final int itemCount = 20; // When cut.notifyItemRangeInsertedInSection(tag, positionStart, itemCount); // Then verify(sectionAdapter).notifyItemRangeInserted(positionStart, itemCount); } @Test public void givenSection_whenNotifyItemRangeRemovedFromSection_thenCallsNewClass() { // Given final int positionStart = 10; final int itemCount = 20; // When cut.notifyItemRangeRemovedFromSection(section, positionStart, itemCount); // Then verify(sectionAdapter).notifyItemRangeRemoved(positionStart, itemCount); } @Test public void givenTag_whenNotifyItemRangeRemovedFromSection_thenCallsNewClass() { // Given final int positionStart = 10; final int itemCount = 20; // When cut.notifyItemRangeRemovedFromSection(tag, positionStart, itemCount); // Then verify(sectionAdapter).notifyItemRangeRemoved(positionStart, itemCount); } @Test public void givenSection_whenNotifyItemRemovedFromSection_thenCallsNewClass() { // Given final int position = 10; // When cut.notifyItemRemovedFromSection(section, position); // Then verify(sectionAdapter).notifyItemRemoved(position); } @Test public void givenTag_whenNotifyItemRemovedFromSection_thenCallsNewClass() { // Given final int position = 10; // When cut.notifyItemRemovedFromSection(tag, position); // Then verify(sectionAdapter).notifyItemRemoved(position); } @Test public void givenSection_whenNotifyNotLoadedStateChanged_thenCallsNewClass() { // Given final Section.State previousState = Section.State.EMPTY; // When cut.notifyNotLoadedStateChanged(section, previousState); // Then verify(sectionAdapter).notifyNotLoadedStateChanged(previousState); } @Test public void givenTag_whenNotifyNotLoadedStateChanged_thenCallsNewClass() { // Given final Section.State previousState = Section.State.EMPTY; // When cut.notifyNotLoadedStateChanged(tag, previousState); // Then verify(sectionAdapter).notifyNotLoadedStateChanged(previousState); } @Test public void givenSection_whenNotifySectionChangedToInvisible_thenCallsNewClass() { // Given final int previousSectionPosition = 10; // When cut.notifySectionChangedToInvisible(section, previousSectionPosition); // Then verify(sectionAdapter).notifySectionChangedToInvisible(previousSectionPosition); } @Test public void givenTag_whenNotifySectionChangedToInvisible_thenCallsNewClass() { // Given final int previousSectionPosition = 10; // When cut.notifySectionChangedToInvisible(tag, previousSectionPosition); // Then verify(sectionAdapter).notifySectionChangedToInvisible(previousSectionPosition); } @Test public void givenSection_whenNotifySectionChangedToVisible_thenCallsNewClass() { // When cut.notifySectionChangedToVisible(section); // Then verify(sectionAdapter).notifySectionChangedToVisible(); } @Test public void givenTag_whenNotifySectionChangedToVisible_thenCallsNewClass() { // When cut.notifySectionChangedToVisible(tag); // Then verify(sectionAdapter).notifySectionChangedToVisible(); } @Test public void givenSection_whenNotifyStateChangedFromLoaded_thenCallsNewClass() { // Given final int previousSectionPosition = 10; // When cut.notifyStateChangedFromLoaded(section, previousSectionPosition); // Then verify(sectionAdapter).notifyStateChangedFromLoaded(previousSectionPosition); } @Test public void givenTag_whenNotifyStateChangedFromLoaded_thenCallsNewClass() { // Given final int previousSectionPosition = 10; // When cut.notifyStateChangedFromLoaded(tag, previousSectionPosition); // Then verify(sectionAdapter).notifyStateChangedFromLoaded(previousSectionPosition); } @Test public void givenSection_whenNotifyStateChangedToLoaded_thenCallsNewClass() { // Given final Section.State previousState = Section.State.EMPTY; // When cut.notifyStateChangedToLoaded(section, previousState); // Then verify(sectionAdapter).notifyStateChangedToLoaded(previousState); } @Test public void givenTag_whenNotifyStateChangedToLoaded_thenCallsNewClass() { // Given final Section.State previousState = Section.State.EMPTY; // When cut.notifyStateChangedToLoaded(tag, previousState); // Then verify(sectionAdapter).notifyStateChangedToLoaded(previousState); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hdfs.server.datanode.checker; import static org.mockito.Matchers.any; import static org.mockito.Matchers.anyObject; import static org.mockito.Matchers.anySet; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; import static org.mockito.Mockito.timeout; import static org.mockito.Mockito.verify; import com.google.common.base.Optional; import com.google.common.util.concurrent.FutureCallback; import com.google.common.util.concurrent.Futures; import com.google.common.util.concurrent.ListenableFuture; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.hadoop.hdfs.HdfsConfiguration; import org.apache.hadoop.hdfs.LogVerificationAppender; import org.apache.hadoop.hdfs.server.datanode.fsdataset.FsVolumeSpi; import org.apache.hadoop.util.FakeTimer; import org.apache.log4j.Logger; import org.apache.log4j.spi.LoggingEvent; import static org.hamcrest.CoreMatchers.is; import static org.junit.Assert.*; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TestName; import org.junit.rules.Timeout; import org.slf4j.LoggerFactory; import java.util.Set; import java.util.concurrent.ExecutorService; import java.util.concurrent.ScheduledThreadPoolExecutor; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.locks.ReentrantLock; public class TestThrottledAsyncCheckerTimeout { public static final org.slf4j.Logger LOG = LoggerFactory.getLogger(TestThrottledAsyncCheckerTimeout.class); @Rule public TestName testName = new TestName(); @Rule public Timeout testTimeout = new Timeout(300_000); private static final long DISK_CHECK_TIMEOUT = 10; private ReentrantLock lock; private ExecutorService getExecutorService() { return new ScheduledThreadPoolExecutor(1); } @Before public void initializeLock() { lock = new ReentrantLock(); } @Test public void testDiskCheckTimeout() throws Exception { LOG.info("Executing {}", testName.getMethodName()); final DummyCheckable target = new DummyCheckable(); final FakeTimer timer = new FakeTimer(); ThrottledAsyncChecker<Boolean, Boolean> checker = new ThrottledAsyncChecker<>(timer, 0, DISK_CHECK_TIMEOUT, getExecutorService()); // Acquire lock to halt checker. Release after timeout occurs. lock.lock(); final Optional<ListenableFuture<Boolean>> olf = checker .schedule(target, true); final AtomicLong numCallbackInvocationsSuccess = new AtomicLong(0); final AtomicLong numCallbackInvocationsFailure = new AtomicLong(0); AtomicBoolean callbackResult = new AtomicBoolean(false); final Throwable[] throwable = new Throwable[1]; assertTrue(olf.isPresent()); Futures.addCallback(olf.get(), new FutureCallback<Boolean>() { @Override public void onSuccess(Boolean result) { numCallbackInvocationsSuccess.incrementAndGet(); callbackResult.set(true); } @Override public void onFailure(Throwable t) { throwable[0] = t; numCallbackInvocationsFailure.incrementAndGet(); callbackResult.set(true); } }); while (!callbackResult.get()) { // Wait for the callback Thread.sleep(DISK_CHECK_TIMEOUT); } lock.unlock(); assertThat(numCallbackInvocationsFailure.get(), is(1L)); assertThat(numCallbackInvocationsSuccess.get(), is(0L)); assertTrue(throwable[0] instanceof TimeoutException); } @Test public void testDiskCheckTimeoutInvokesOneCallbackOnly() throws Exception { LOG.info("Executing {}", testName.getMethodName()); final DummyCheckable target = new DummyCheckable(); final FakeTimer timer = new FakeTimer(); ThrottledAsyncChecker<Boolean, Boolean> checker = new ThrottledAsyncChecker<>(timer, 0, DISK_CHECK_TIMEOUT, getExecutorService()); FutureCallback<Boolean> futureCallback = mock(FutureCallback.class); // Acquire lock to halt disk checker. Release after timeout occurs. lock.lock(); final Optional<ListenableFuture<Boolean>> olf1 = checker .schedule(target, true); assertTrue(olf1.isPresent()); Futures.addCallback(olf1.get(), futureCallback); // Verify that timeout results in only 1 onFailure call and 0 onSuccess // calls. verify(futureCallback, timeout((int) DISK_CHECK_TIMEOUT*10).times(1)) .onFailure(any()); verify(futureCallback, timeout((int) DISK_CHECK_TIMEOUT*10).times(0)) .onSuccess(any()); // Release lock so that target can acquire it. lock.unlock(); final Optional<ListenableFuture<Boolean>> olf2 = checker .schedule(target, true); assertTrue(olf2.isPresent()); Futures.addCallback(olf2.get(), futureCallback); // Verify that normal check (dummy) results in only 1 onSuccess call. // Number of times onFailure is invoked should remain the same i.e. 1. verify(futureCallback, timeout((int) DISK_CHECK_TIMEOUT*10).times(1)) .onFailure(any()); verify(futureCallback, timeout((int) DISK_CHECK_TIMEOUT*10).times(1)) .onSuccess(any()); } @Test public void testTimeoutExceptionIsNotThrownForGoodDisk() throws Exception { LOG.info("Executing {}", testName.getMethodName()); final DummyCheckable target = new DummyCheckable(); final FakeTimer timer = new FakeTimer(); ThrottledAsyncChecker<Boolean, Boolean> checker = new ThrottledAsyncChecker<>(timer, 0, DISK_CHECK_TIMEOUT, getExecutorService()); final Optional<ListenableFuture<Boolean>> olf = checker .schedule(target, true); AtomicBoolean callbackResult = new AtomicBoolean(false); final Throwable[] throwable = new Throwable[1]; assertTrue(olf.isPresent()); Futures.addCallback(olf.get(), new FutureCallback<Boolean>() { @Override public void onSuccess(Boolean result) { callbackResult.set(true); } @Override public void onFailure(Throwable t) { throwable[0] = t; callbackResult.set(true); } }); while (!callbackResult.get()) { // Wait for the callback Thread.sleep(DISK_CHECK_TIMEOUT); } assertTrue(throwable[0] == null); } /** * A dummy Checkable that just returns true after acquiring lock. */ protected class DummyCheckable implements Checkable<Boolean,Boolean> { @Override public Boolean check(Boolean context) throws Exception { // Wait to acquire lock lock.lock(); lock.unlock(); return true; } } }
/* * Copyright (C) 2017 grandcentrix GmbH * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.grandcentrix.thirtyinch.internal; import net.grandcentrix.thirtyinch.TiConfiguration; import net.grandcentrix.thirtyinch.TiPresenter; import net.grandcentrix.thirtyinch.TiView; import org.junit.Test; import android.support.annotation.NonNull; import android.view.LayoutInflater; import static org.assertj.core.api.Java6Assertions.assertThat; import static org.mockito.Mockito.mock; /** * Same test cases as {@link SingleTiFragmentPresenterDestroyTest} but with the "don't keep * Activities option enabled". This means that the lifecycle may be different but the final * assertions must be identical */ public class SingleTiFragmentPresenterDestroyTestIgnoreKeepDontKeepActivities extends AbstractPresenterDestroyTest { /** * Activity changing configuration without retain (don't keep Activities enabled) */ @Test public void activityChangingConfiguration_retainFalse_dkATrue() { final HostingActivity hostingActivity = new HostingActivity(); // Given a Presenter that does use a static savior but does not retain itself. final TestPresenter presenter = new TestPresenter(new TiConfiguration.Builder() .setRetainPresenterEnabled(false) .build()); // And given a Fragment. final TestTiFragment fragment = new TestTiFragment.Builder() .setHostingActivity(hostingActivity) .setSavior(mSavior) .setPresenter(presenter) .build(); // When the Fragment is added to the Activity. fragment.onCreate(null); fragment.setAdded(true); fragment.onCreateView(mock(LayoutInflater.class), null, null); fragment.onStart(); // Then the presenter will *not* be stored in the savior assertThat(mSavior.getPresenterCount()).isEqualTo(0); assertThat(mSavior.mActivityInstanceObserver).isNull(); // And when the Activity is changing its configuration. fragment.onSaveInstanceState(mFragmentSavedState); fragment.onStop(); fragment.onDestroyView(); fragment.onDestroy(); // Then assert that the presenter is destroyed and not saved in the savior. assertThat(fragment.getPresenter().isDestroyed()).isTrue(); assertThat(mSavior.getPresenterCount()).isEqualTo(0); assertThat(mSavior.mActivityInstanceObserver).isNull(); // When the Activity is recreated. final HostingActivity hostingActivity2 = new HostingActivity(); // And generates a new Fragment instance. final TestPresenter presenter2 = new TestPresenter(new TiConfiguration.Builder() .setRetainPresenterEnabled(false) .build()); final TestTiFragment fragment2 = new TestTiFragment.Builder() .setHostingActivity(hostingActivity2) .setSavior(mSavior) .setPresenter(presenter2) .build(); // And the fragment will be resumed fragment2.setAdded(true); fragment2.onCreate(mFragmentSavedState); fragment2.onCreateView(mock(LayoutInflater.class), null, mFragmentSavedState); fragment2.onStart(); // Then a new Presenter instance will be generated and the old presenter isn't used assertThat(fragment2.getPresenter()).isNotEqualTo(presenter).isEqualTo(presenter2); assertThat(mSavior.getPresenterCount()).isEqualTo(0); } /** * Activity changing configuration Default config (don't keep Activities enabled) */ @Test public void activityChangingConfiguration_retainTrue_dkATrue() { final HostingActivity hostingActivity = new HostingActivity(); // Given a Presenter that uses a static savior to retain itself. final TestPresenter presenter = new TestPresenter(new TiConfiguration.Builder() .setRetainPresenterEnabled(true) .build()); // And given a Fragment. final TestTiFragment fragment = new TestTiFragment.Builder() .setHostingActivity(hostingActivity) .setSavior(mSavior) .setPresenter(presenter) .build(); // When the fragment is added to the activity. fragment.onCreate(null); fragment.setAdded(true); fragment.onCreateView(mock(LayoutInflater.class), null, null); fragment.onStart(); // Then the presenter will be stored in the savior assertThat(mSavior.getPresenterCount()).isEqualTo(1); // And when the Activity is changing its configuration. mSavior.mActivityInstanceObserver.onActivitySaveInstanceState( hostingActivity.getMockActivityInstance(), mActivitySavedState); fragment.onSaveInstanceState(mFragmentSavedState); fragment.onStop(); fragment.onDestroyView(); fragment.onDestroy(); // Then the presenter will be retained and saved in the savior. assertThat(fragment.getPresenter().isDestroyed()).isFalse(); assertThat(mSavior.getPresenterCount()).isEqualTo(1); // When the Activity is recreated. final HostingActivity hostingActivity2 = new HostingActivity(); mSavior.mActivityInstanceObserver.onActivityCreated( hostingActivity2.getMockActivityInstance(), mActivitySavedState); // And generates a new Fragment instance. final TestPresenter presenter2 = new TestPresenter(new TiConfiguration.Builder() .setRetainPresenterEnabled(false) .build()); final TestTiFragment fragment2 = new TestTiFragment.Builder() .setHostingActivity(hostingActivity2) .setSavior(mSavior) .setPresenter(presenter2) .build(); // And the fragment will be resumed fragment2.setAdded(true); fragment2.onCreate(mFragmentSavedState); fragment2.onCreateView(mock(LayoutInflater.class), null, mFragmentSavedState); fragment2.onStart(); // Then the Presenter is the same assertThat(fragment.getPresenter().isDestroyed()).isFalse(); assertThat(fragment.getPresenter()).isEqualTo(presenter); assertThat(mSavior.getPresenterCount()).isEqualTo(1); } /** * Activity finishing without retain (don't keep Activities enabled) */ @Test public void activityFinishing_retainFalse_dkATrue() { final HostingActivity hostingActivity = new HostingActivity(); // Given a Presenter without retain. final TestPresenter presenter = new TestPresenter(new TiConfiguration.Builder() .setRetainPresenterEnabled(false) .build()); // And given a Fragment. final TestTiFragment fragment = new TestTiFragment.Builder() .setHostingActivity(hostingActivity) .setSavior(mSavior) .setPresenter(presenter) .build(); // When the Fragment is added to the Activity. fragment.onCreate(null); fragment.setAdded(true); fragment.onCreateView(mock(LayoutInflater.class), null, null); fragment.onStart(); // Then the presenter will *not* be stored in the savior assertThat(mSavior.getPresenterCount()).isEqualTo(0); // And when the Activity is finishing. hostingActivity.setFinishing(true); assertThat(mSavior.mActivityInstanceObserver).isNull(); fragment.onStop(); fragment.onDestroyView(); fragment.onDestroy(); // Then assert that the Presenter is destroyed and not saved in the savior. assertThat(fragment.getPresenter().isDestroyed()).isTrue(); assertThat(mSavior.getPresenterCount()).isEqualTo(0); } /** * Activity finish Default config (don't keep Activities enabled) */ @Test public void activityFinishing_retainTrue_dkATrue() { final HostingActivity hostingActivity = new HostingActivity(); // Check that the default config matches this test case final TiConfiguration config = new TiConfiguration.Builder() .setRetainPresenterEnabled(true) .build(); assertThat(TiConfiguration.DEFAULT).isEqualTo(config); // Given a Presenter that uses a static savior to retain itself (default config). final TestPresenter presenter = new TestPresenter(config); // And given a Fragment. final TestTiFragment fragment = new TestTiFragment.Builder() .setHostingActivity(hostingActivity) .setSavior(mSavior) .setPresenter(presenter) .build(); // When the Fragment is added to the Activity. fragment.onCreate(null); fragment.setAdded(true); fragment.onCreateView(mock(LayoutInflater.class), null, null); fragment.onStart(); // Then the presenter will be stored in the savior assertThat(mSavior.getPresenterCount()).isEqualTo(1); // And when the Activity is finishing. hostingActivity.setFinishing(true); mSavior.mActivityInstanceObserver.onActivityDestroyed( hostingActivity.getMockActivityInstance()); fragment.onStop(); fragment.onDestroyView(); fragment.onDestroy(); // Then the presenter is destroyed and not saved in the savior. assertThat(fragment.getPresenter().isDestroyed()).isTrue(); assertThat(mSavior.getPresenterCount()).isEqualTo(0); } /** * Activity move to background -> move to foreground without retain(don't keep Activities * enabled) */ @Test public void moveToBackground_moveToForeground_retainFalse_dkATrue() { final HostingActivity hostingActivity = new HostingActivity(); // Given a Presenter that does use a static savior but does not retain itself. final TestPresenter presenter = new TestPresenter(new TiConfiguration.Builder() .setRetainPresenterEnabled(false) .build()); // And given a Fragment. final TestTiFragment fragment = new TestTiFragment.Builder() .setHostingActivity(hostingActivity) .setSavior(mSavior) .setPresenter(presenter) .build(); // When the Fragment is added to the Activity. fragment.onCreate(null); fragment.setAdded(true); fragment.onCreateView(mock(LayoutInflater.class), null, null); fragment.onStart(); // Then the Presenter will *not* be stored in the savior assertThat(mSavior.getPresenterCount()).isEqualTo(0); assertThat(mSavior.mActivityInstanceObserver).isNull(); // When the Activity is moved to background fragment.onSaveInstanceState(mFragmentSavedState); fragment.onStop(); fragment.onDestroyView(); fragment.onDestroy(); // Then the Presenter gets destroyed. assertThat(fragment.getPresenter().isDestroyed()).isTrue(); assertThat(mSavior.getPresenterCount()).isEqualTo(0); // When the Activity is recreated. final HostingActivity hostingActivity2 = new HostingActivity(); // And generates a new Fragment instance. final TestPresenter presenter2 = new TestPresenter(new TiConfiguration.Builder() .setRetainPresenterEnabled(false) .build()); final TestTiFragment fragment2 = new TestTiFragment.Builder() .setHostingActivity(hostingActivity2) .setSavior(mSavior) .setPresenter(presenter2) .build(); fragment2.setAdded(true); fragment2.onCreate(mFragmentSavedState); fragment2.onCreateView(mock(LayoutInflater.class), null, mFragmentSavedState); fragment2.onStart(); // Then the new Presenter does not equals the previous Presenter. assertThat(fragment2.getPresenter()).isNotEqualTo(presenter).isEqualTo(presenter2); assertThat(mSavior.getPresenterCount()).isEqualTo(0); } /** * Activity move to background -> move to foreground Default config (don't keep Activities * enabled) */ @Test public void moveToBackground_moveToForeground_retainTrue_dkATrue() { final HostingActivity hostingActivity = new HostingActivity(); // Given a Presenter that does use a static savior but does not retain itself. final TestPresenter presenter = new TestPresenter(new TiConfiguration.Builder() .setRetainPresenterEnabled(true) .build()); // And given a Fragment. final TestTiFragment fragment = new TestTiFragment.Builder() .setHostingActivity(hostingActivity) .setSavior(mSavior) .setPresenter(presenter) .build(); // When the Fragment is added to the Activity. fragment.onCreate(null); fragment.setAdded(true); fragment.onCreateView(mock(LayoutInflater.class), null, null); fragment.onStart(); // Then the presenter will be stored in the savior assertThat(mSavior.getPresenterCount()).isEqualTo(1); // When the Activity gets moved to background mSavior.mActivityInstanceObserver.onActivitySaveInstanceState( hostingActivity.getMockActivityInstance(), mActivitySavedState); fragment.onSaveInstanceState(mFragmentSavedState); fragment.onStop(); fragment.onDestroyView(); fragment.onDestroy(); // Then the presenter stays alive and is saved in the savior. assertThat(fragment.getPresenter().isDestroyed()).isFalse(); assertThat(mSavior.getPresenterCount()).isEqualTo(1); // When the Activity moves to foreground again // A new Activity gets created by the Android Framework. final HostingActivity hostingActivity2 = new HostingActivity(); mSavior.mActivityInstanceObserver.onActivityCreated( hostingActivity2.getMockActivityInstance(), mActivitySavedState); // And generates a new Fragment instance. final TestPresenter presenter2 = new TestPresenter(new TiConfiguration.Builder() .setRetainPresenterEnabled(false) .build()); final TestTiFragment fragment2 = new TestTiFragment.Builder() .setHostingActivity(hostingActivity2) .setSavior(mSavior) .setPresenter(presenter2) .build(); fragment2.setAdded(true); fragment2.onCreate(mFragmentSavedState); fragment2.onCreateView(mock(LayoutInflater.class), null, mFragmentSavedState); fragment2.onStart(); // Then the Presenter is the same as in the previous fragment instance assertThat(fragment2.getPresenter()).isNotEqualTo(presenter2).isEqualTo(presenter); assertThat(fragment2.getPresenter().isDestroyed()).isFalse(); assertThat(mSavior.getPresenterCount()).isEqualTo(1); } /** * removed the added fragment from the Activity without retain (don't keep Activities enabled) */ @Test public void remove_fragment_retainFalse_dkATrue() { final HostingActivity hostingActivity = new HostingActivity(); // Given a Presenter does not retain itself. final TestPresenter presenter = new TestPresenter(new TiConfiguration.Builder() .setRetainPresenterEnabled(false) .build()); // And given a Fragment. final TestTiFragment fragment = new TestTiFragment.Builder() .setHostingActivity(hostingActivity) .setSavior(mSavior) .setPresenter(presenter) .build(); // When the Fragment is added to the Activity. fragment.onCreate(null); fragment.setAdded(true); fragment.onCreateView(mock(LayoutInflater.class), null, null); fragment.onStart(); // Then the presenter will not be stored in the savior assertThat(mSavior.getPresenterCount()).isEqualTo(0); // When the fragment will be removed fragment.setRemoving(true); fragment.onStop(); fragment.onDestroyView(); fragment.onDestroy(); // Then the presenter is destroyed and not saved assertThat(fragment.getPresenter().isDestroyed()).isTrue(); assertThat(mSavior.getPresenterCount()).isEqualTo(0); } /** * removed the added fragment from the Activity Default config (don't keep Activities enabled) */ @Test public void remove_fragment_retainTrue_dkATrue() { final HostingActivity hostingActivity = new HostingActivity(); // Given a Presenter does not retain itself. final TestPresenter presenter = new TestPresenter(new TiConfiguration.Builder() .setRetainPresenterEnabled(true) .build()); // And given a Fragment. final TestTiFragment fragment = new TestTiFragment.Builder() .setHostingActivity(hostingActivity) .setSavior(mSavior) .setPresenter(presenter) .build(); // When the Fragment is added to the Activity. fragment.onCreate(null); fragment.setAdded(true); fragment.onCreateView(mock(LayoutInflater.class), null, null); fragment.onStart(); // Then the presenter will be stored in the savior assertThat(mSavior.getPresenterCount()).isEqualTo(1); // When the fragment will be removed fragment.setRemoving(true); fragment.onStop(); fragment.onDestroyView(); fragment.onDestroy(); // Then the presenter is destroyed and not saved assertThat(fragment.getPresenter().isDestroyed()).isTrue(); assertThat(mSavior.getPresenterCount()).isEqualTo(0); } /** * A fragment will be added to UI, then removed and added again resulting in two provideView * calls */ @Test public void reuse_fragment_retainFalse() throws Exception { final HostingActivity hostingActivity = new HostingActivity(); // Check that the default config matches this test case final TiConfiguration config = new TiConfiguration.Builder() .setRetainPresenterEnabled(false) .build(); // And given a Fragment. final TestTiFragment fragment = new TestTiFragment.Builder() .setHostingActivity(hostingActivity) .setSavior(mSavior) .setPresenterProvider(new TiPresenterProvider<TiPresenter<TiView>>() { @NonNull @Override public TiPresenter<TiView> providePresenter() { return new TestPresenter(config); } }) .build(); // When the Fragment is added to the Activity. fragment.onCreate(null); fragment.setAdded(true); fragment.onCreateView(mock(LayoutInflater.class), null, null); fragment.onStart(); // Then the presenter will not stored in the savior assertThat(mSavior.getPresenterCount()).isEqualTo(0); final TiPresenter<TiView> firstPresenter = fragment.getPresenter(); // When the fragment will be removed from the Activity. fragment.setAdded(false); fragment.setRemoving(true); fragment.onStop(); fragment.onDestroyView(); fragment.onDestroy(); // Then the presenter is removed from the savior and the presenter gets destroyed assertThat(mSavior.getPresenterCount()).isEqualTo(0); assertThat(fragment.getPresenter().isDestroyed()).isTrue(); // When the same fragment instance is added again fragment.onCreate(null); fragment.onCreateView(mock(LayoutInflater.class), null, null); fragment.onStart(); // A new presenter is generated. assertThat(fragment.getPresenter().isDestroyed()).isFalse(); assertThat(fragment.getPresenter()).isNotEqualTo(firstPresenter); } /** * A fragment will be added to UI, then removed and added again resulting in two provideView * calls * Default config */ @Test public void reuse_fragment_retainTrue() throws Exception { final HostingActivity hostingActivity = new HostingActivity(); // Check that the default config matches this test case final TiConfiguration config = new TiConfiguration.Builder() .setRetainPresenterEnabled(true) .build(); assertThat(TiConfiguration.DEFAULT).isEqualTo(config); // And given a Fragment. final TestTiFragment fragment = new TestTiFragment.Builder() .setHostingActivity(hostingActivity) .setSavior(mSavior) .setPresenterProvider(new TiPresenterProvider<TiPresenter<TiView>>() { @NonNull @Override public TiPresenter<TiView> providePresenter() { return new TestPresenter(config); } }) .build(); // When the Fragment is added to the Activity. fragment.onCreate(null); fragment.setAdded(true); fragment.onCreateView(mock(LayoutInflater.class), null, null); fragment.onStart(); // Then the presenter will be stored in the savior assertThat(mSavior.getPresenterCount()).isEqualTo(1); final TiPresenter<TiView> firstPresenter = fragment.getPresenter(); // When the fragment will be removed from the Activity. fragment.setAdded(false); fragment.setRemoving(true); fragment.onStop(); fragment.onDestroyView(); fragment.onDestroy(); // Then the presenter is removed from the savior and the presenter gets destroyed assertThat(mSavior.getPresenterCount()).isEqualTo(0); assertThat(fragment.getPresenter().isDestroyed()).isTrue(); // When the same fragment instance is added again fragment.onCreate(null); fragment.onCreateView(mock(LayoutInflater.class), null, null); fragment.onStart(); // A new presenter is generated. assertThat(fragment.getPresenter().isDestroyed()).isFalse(); assertThat(fragment.getPresenter()).isNotEqualTo(firstPresenter); } }
// Copyright 2015 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. package org.chromium.chrome.browser.ntp; import android.test.suitebuilder.annotation.LargeTest; import android.text.TextUtils; import android.view.View; import android.widget.EditText; import android.widget.LinearLayout; import android.widget.ListView; import org.chromium.base.ThreadUtils; import org.chromium.base.test.util.CommandLineFlags; import org.chromium.base.test.util.DisabledTest; import org.chromium.chrome.R; import org.chromium.chrome.browser.BookmarksBridge.BookmarkItem; import org.chromium.chrome.browser.ChromeSwitches; import org.chromium.chrome.browser.UrlConstants; import org.chromium.chrome.browser.bookmark.AddEditBookmarkFragment; import org.chromium.chrome.browser.bookmark.ManageBookmarkActivity; import org.chromium.chrome.browser.bookmark.SelectBookmarkFolderFragment; import org.chromium.chrome.browser.ntp.BrowserBookmarksRecyclerView; import org.chromium.chrome.browser.tab.Tab; import org.chromium.chrome.test.ChromeTabbedActivityTestBase; import org.chromium.chrome.test.util.ActivityUtils; import org.chromium.chrome.test.util.BookmarkTestUtils; import org.chromium.chrome.test.util.ChromeTabUtils; import org.chromium.chrome.test.util.TestHttpServerClient; import org.chromium.content.browser.test.util.Criteria; import org.chromium.content.browser.test.util.CriteriaHelper; import org.chromium.content.browser.test.util.TestTouchUtils; import org.chromium.content.browser.test.util.TouchCommon; import org.chromium.content.browser.test.util.UiUtils; import java.util.concurrent.Callable; /** * Tests for the old bookmarks page. */ @CommandLineFlags.Add(ChromeSwitches.ENABLE_ENHANCED_BOOKMARKS + "=0") public class BookmarksPageTest extends ChromeTabbedActivityTestBase { private static final String TEST_PAGE = TestHttpServerClient.getUrl("chrome/test/data/android/about.html"); private static final String TEST_PAGE_TITLE = "About"; private static final String TEST_FOLDER_TITLE = "Test Folder"; private static final String TEST_PAGE_TITLE_2 = "About 2"; private static final String MOBILE_BOOKMARKS_TITLE = "Mobile bookmarks"; private static final String BOOKMARKS_TITLE = "Bookmarks"; private BrowserBookmarksRecyclerView mBookmarksList; private LinearLayout mHierarchyLayout; @Override public void startMainActivity() throws InterruptedException { startMainActivityOnBlankPage(); } private void addBookmark() throws InterruptedException { loadUrl(TEST_PAGE); BookmarkTestUtils.addCurrentUrlAsBookmark(this, getActivity()); loadMobileBookmarksPage(); } private void addFolderAndAddBookmark() throws InterruptedException { loadUrl(TEST_PAGE); ManageBookmarkActivity addActivity = BookmarkTestUtils.selectBookmarkItemFromMenu( getInstrumentation(), getActivity()); final AddEditBookmarkFragment addFragment = BookmarkTestUtils.loadAddEditFragment(addActivity); BookmarkTestUtils.clickSelectFolderButton(this, addFragment); SelectBookmarkFolderFragment selectedFolder = BookmarkTestUtils.loadSelectFragment( addActivity); BookmarkTestUtils.clickNewFolderButton(this, selectedFolder); final AddEditBookmarkFragment addNewFolderFragment = BookmarkTestUtils.loadAddFolderFragment(addActivity); getInstrumentation().runOnMainSync(new Runnable() { @Override public void run() { ((EditText) addNewFolderFragment.getView().findViewById(R.id.bookmark_title_input)) .setText(TEST_FOLDER_TITLE); } }); BookmarkTestUtils.clickOkButton(this, addNewFolderFragment); BookmarkTestUtils.clickOkButton(this, addFragment); loadMobileBookmarksPage(); } private void loadMobileBookmarksPage() throws InterruptedException { final String mobileFolderUrl = UrlConstants.BOOKMARKS_FOLDER_URL + "2"; loadUrl(mobileFolderUrl); Tab tab = getActivity().getActivityTab(); assertTrue(tab.getNativePage() instanceof BookmarksPage || tab.getNativePage() instanceof BrowserNewTabPage); mHierarchyLayout = (LinearLayout) getActivity().findViewById( R.id.bookmark_folder_structure); mBookmarksList = (BrowserBookmarksRecyclerView) getActivity().findViewById(R.id.bookmarks_list_view); } private void openBookmarkInCurrentTab(final View itemView) throws InterruptedException { ChromeTabUtils.waitForTabPageLoaded(getActivity().getActivityTab(), new Runnable() { @Override public void run() { TouchCommon.singleClickView(itemView); } }); BookmarkTestUtils.assertUrlBarEquals( getActivity(), "urlBar string not matching the bookmarked page", TEST_PAGE); } private void addBookmarkAndLongClickForContextMenu() throws InterruptedException { addBookmark(); View itemView = (View) BookmarkTestUtils.getViewWithText( mBookmarksList, TEST_PAGE_TITLE); TouchCommon.longPressView(itemView, itemView.getWidth() / 2, itemView.getHeight() / 2); } private String getCurrentFolderTitle() { return ThreadUtils.runOnUiThreadBlockingNoException(new Callable<String>() { @Override public String call() throws Exception { return ((BookmarkFolderHierarchyItem) mHierarchyLayout.getChildAt( mHierarchyLayout.getChildCount() - 1)).getText().toString(); } }); } private void clickFolderInFolderHierarchy(final String folderToSelect) throws InterruptedException { CriteriaHelper.pollForCriteria(new Criteria() { @Override public boolean isSatisfied() { return (BookmarkTestUtils.getViewWithText(mHierarchyLayout, folderToSelect) != null); } }); final BookmarkFolderHierarchyItem itemView = (BookmarkFolderHierarchyItem) BookmarkTestUtils.getViewWithText( mHierarchyLayout, folderToSelect); TouchCommon.singleClickView(itemView); assertEquals(folderToSelect, getCurrentFolderTitle()); } private void clickFolderInBookmarksList(final String folderToSelect) throws InterruptedException { CriteriaHelper.pollForCriteria(new Criteria() { @Override public boolean isSatisfied() { return (BookmarkTestUtils.getViewWithText(mBookmarksList, folderToSelect) != null); } }); final View itemView = (View) BookmarkTestUtils.getViewWithText( mBookmarksList, folderToSelect); TouchCommon.singleClickView(itemView); assertEquals(folderToSelect, getCurrentFolderTitle()); } private boolean isItemPresentInBookmarksList(final String expectedTitle) { return ThreadUtils.runOnUiThreadBlockingNoException(new Callable<Boolean>() { @Override public Boolean call() throws Exception { for (int i = 0; i < mBookmarksList.getCount(); i++) { String actualTitle = ((BookmarkItem) mBookmarksList.getItemAtPosition(i)).getTitle(); if (TextUtils.equals(actualTitle, expectedTitle)) { return true; } } return false; } }); } @LargeTest public void testCreateAndOpenBookmark() throws InterruptedException { addBookmark(); // Assert "About" item is listed in the bookmarks list. assertTrue(isItemPresentInBookmarksList(TEST_PAGE_TITLE)); // Click the item "About". openBookmarkInCurrentTab((View) BookmarkTestUtils.getViewWithText( mBookmarksList, TEST_PAGE_TITLE)); } @LargeTest public void testNavigateFoldersInFolderHierarchy() throws InterruptedException { addFolderAndAddBookmark(); // Click on "Mobile bookmarks" in the Folder hierarchy. clickFolderInFolderHierarchy(MOBILE_BOOKMARKS_TITLE); // Assert "Test Folder" is listed in the bookmarks list. assertTrue(isItemPresentInBookmarksList(TEST_FOLDER_TITLE)); // Click on "Bookmarks" in the Folder hierarchy. clickFolderInFolderHierarchy(BOOKMARKS_TITLE); // Assert "Desktop Bookmarks" is listed in the bookmarks list. assertTrue(isItemPresentInBookmarksList(MOBILE_BOOKMARKS_TITLE)); } /* @LargeTest Disabled because of repeated flakes on ICS bot. http://crbug.com/384126 */ @DisabledTest public void testNavigateFoldersInBookmarksListView() throws InterruptedException { addFolderAndAddBookmark(); // Click on "Bookmarks" in the Folder hierarchy. clickFolderInFolderHierarchy(BOOKMARKS_TITLE); // Assert "Mobile Bookmarks" is listed in the bookmarks list. assertTrue(isItemPresentInBookmarksList(MOBILE_BOOKMARKS_TITLE)); // Click on "Mobile bookmarks" in the bookmarks list view. clickFolderInBookmarksList(MOBILE_BOOKMARKS_TITLE); // Assert "Test Folder" is listed in the bookmarks list. assertTrue(isItemPresentInBookmarksList(TEST_FOLDER_TITLE)); // Click on "Test Folder" in the bookmarks list view. clickFolderInBookmarksList(TEST_FOLDER_TITLE); // Assert "About" is listed in the bookmarks list. assertTrue(isItemPresentInBookmarksList(TEST_PAGE_TITLE)); } @LargeTest public void testContextMenuOptionOpenInANewTab() throws InterruptedException { addBookmark(); View itemView = (View) BookmarkTestUtils.getViewWithText( mBookmarksList, TEST_PAGE_TITLE); invokeContextMenuAndOpenInANewTab(itemView, BookmarkItemView.ID_OPEN_IN_NEW_TAB, false, TEST_PAGE); } @LargeTest public void testContextMenuOptionOpenInAnIncognitoTab() throws InterruptedException { addBookmark(); View itemView = (View) BookmarkTestUtils.getViewWithText( mBookmarksList, TEST_PAGE_TITLE); invokeContextMenuAndOpenInANewTab(itemView, BookmarkItemView.ID_OPEN_IN_INCOGNITO_TAB, true, TEST_PAGE); } @LargeTest public void testContextMenuOptionEditBookmark() throws InterruptedException { addBookmarkAndLongClickForContextMenu(); // Invoke the "Edit Bookmark" context menu option. final ManageBookmarkActivity activity = ActivityUtils.waitForActivity( getInstrumentation(), ManageBookmarkActivity.class, new Runnable() { @Override public void run() { getInstrumentation().invokeContextMenuAction( getActivity(), BookmarkItemView.ID_EDIT, 0); } } ); UiUtils.settleDownUI(getInstrumentation()); // Edit the bookmark title. getInstrumentation().runOnMainSync(new Runnable() { @Override public void run() { ((EditText) activity.findViewById(R.id.bookmark_title_input)) .setText(TEST_PAGE_TITLE_2); } }); TestTouchUtils.clickView(this, activity.findViewById(R.id.ok)); // Assert "About 2" is listed in the bookmarks list. assertTrue(isItemPresentInBookmarksList(TEST_PAGE_TITLE_2)); } @LargeTest public void testContextMenuOptionDeleteBookmark() throws InterruptedException { addBookmarkAndLongClickForContextMenu(); // Invoke the "Delete Bookmark" context menu option. getInstrumentation().invokeContextMenuAction( getActivity(), BookmarkItemView.ID_DELETE, 0); UiUtils.settleDownUI(getInstrumentation()); // Assert no bookmarks exist in the current folder. assertTrue(mBookmarksList.getCount() == 0); } }
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.cognitoidp; import javax.annotation.Generated; import com.amazonaws.services.cognitoidp.model.*; /** * Abstract implementation of {@code AWSCognitoIdentityProviderAsync}. Convenient method forms pass through to the * corresponding overload that takes a request object and an {@code AsyncHandler}, which throws an * {@code UnsupportedOperationException}. */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class AbstractAWSCognitoIdentityProviderAsync extends AbstractAWSCognitoIdentityProvider implements AWSCognitoIdentityProviderAsync { protected AbstractAWSCognitoIdentityProviderAsync() { } @Override public java.util.concurrent.Future<AddCustomAttributesResult> addCustomAttributesAsync(AddCustomAttributesRequest request) { return addCustomAttributesAsync(request, null); } @Override public java.util.concurrent.Future<AddCustomAttributesResult> addCustomAttributesAsync(AddCustomAttributesRequest request, com.amazonaws.handlers.AsyncHandler<AddCustomAttributesRequest, AddCustomAttributesResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<AdminAddUserToGroupResult> adminAddUserToGroupAsync(AdminAddUserToGroupRequest request) { return adminAddUserToGroupAsync(request, null); } @Override public java.util.concurrent.Future<AdminAddUserToGroupResult> adminAddUserToGroupAsync(AdminAddUserToGroupRequest request, com.amazonaws.handlers.AsyncHandler<AdminAddUserToGroupRequest, AdminAddUserToGroupResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<AdminConfirmSignUpResult> adminConfirmSignUpAsync(AdminConfirmSignUpRequest request) { return adminConfirmSignUpAsync(request, null); } @Override public java.util.concurrent.Future<AdminConfirmSignUpResult> adminConfirmSignUpAsync(AdminConfirmSignUpRequest request, com.amazonaws.handlers.AsyncHandler<AdminConfirmSignUpRequest, AdminConfirmSignUpResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<AdminCreateUserResult> adminCreateUserAsync(AdminCreateUserRequest request) { return adminCreateUserAsync(request, null); } @Override public java.util.concurrent.Future<AdminCreateUserResult> adminCreateUserAsync(AdminCreateUserRequest request, com.amazonaws.handlers.AsyncHandler<AdminCreateUserRequest, AdminCreateUserResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<AdminDeleteUserResult> adminDeleteUserAsync(AdminDeleteUserRequest request) { return adminDeleteUserAsync(request, null); } @Override public java.util.concurrent.Future<AdminDeleteUserResult> adminDeleteUserAsync(AdminDeleteUserRequest request, com.amazonaws.handlers.AsyncHandler<AdminDeleteUserRequest, AdminDeleteUserResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<AdminDeleteUserAttributesResult> adminDeleteUserAttributesAsync(AdminDeleteUserAttributesRequest request) { return adminDeleteUserAttributesAsync(request, null); } @Override public java.util.concurrent.Future<AdminDeleteUserAttributesResult> adminDeleteUserAttributesAsync(AdminDeleteUserAttributesRequest request, com.amazonaws.handlers.AsyncHandler<AdminDeleteUserAttributesRequest, AdminDeleteUserAttributesResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<AdminDisableProviderForUserResult> adminDisableProviderForUserAsync(AdminDisableProviderForUserRequest request) { return adminDisableProviderForUserAsync(request, null); } @Override public java.util.concurrent.Future<AdminDisableProviderForUserResult> adminDisableProviderForUserAsync(AdminDisableProviderForUserRequest request, com.amazonaws.handlers.AsyncHandler<AdminDisableProviderForUserRequest, AdminDisableProviderForUserResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<AdminDisableUserResult> adminDisableUserAsync(AdminDisableUserRequest request) { return adminDisableUserAsync(request, null); } @Override public java.util.concurrent.Future<AdminDisableUserResult> adminDisableUserAsync(AdminDisableUserRequest request, com.amazonaws.handlers.AsyncHandler<AdminDisableUserRequest, AdminDisableUserResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<AdminEnableUserResult> adminEnableUserAsync(AdminEnableUserRequest request) { return adminEnableUserAsync(request, null); } @Override public java.util.concurrent.Future<AdminEnableUserResult> adminEnableUserAsync(AdminEnableUserRequest request, com.amazonaws.handlers.AsyncHandler<AdminEnableUserRequest, AdminEnableUserResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<AdminForgetDeviceResult> adminForgetDeviceAsync(AdminForgetDeviceRequest request) { return adminForgetDeviceAsync(request, null); } @Override public java.util.concurrent.Future<AdminForgetDeviceResult> adminForgetDeviceAsync(AdminForgetDeviceRequest request, com.amazonaws.handlers.AsyncHandler<AdminForgetDeviceRequest, AdminForgetDeviceResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<AdminGetDeviceResult> adminGetDeviceAsync(AdminGetDeviceRequest request) { return adminGetDeviceAsync(request, null); } @Override public java.util.concurrent.Future<AdminGetDeviceResult> adminGetDeviceAsync(AdminGetDeviceRequest request, com.amazonaws.handlers.AsyncHandler<AdminGetDeviceRequest, AdminGetDeviceResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<AdminGetUserResult> adminGetUserAsync(AdminGetUserRequest request) { return adminGetUserAsync(request, null); } @Override public java.util.concurrent.Future<AdminGetUserResult> adminGetUserAsync(AdminGetUserRequest request, com.amazonaws.handlers.AsyncHandler<AdminGetUserRequest, AdminGetUserResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<AdminInitiateAuthResult> adminInitiateAuthAsync(AdminInitiateAuthRequest request) { return adminInitiateAuthAsync(request, null); } @Override public java.util.concurrent.Future<AdminInitiateAuthResult> adminInitiateAuthAsync(AdminInitiateAuthRequest request, com.amazonaws.handlers.AsyncHandler<AdminInitiateAuthRequest, AdminInitiateAuthResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<AdminLinkProviderForUserResult> adminLinkProviderForUserAsync(AdminLinkProviderForUserRequest request) { return adminLinkProviderForUserAsync(request, null); } @Override public java.util.concurrent.Future<AdminLinkProviderForUserResult> adminLinkProviderForUserAsync(AdminLinkProviderForUserRequest request, com.amazonaws.handlers.AsyncHandler<AdminLinkProviderForUserRequest, AdminLinkProviderForUserResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<AdminListDevicesResult> adminListDevicesAsync(AdminListDevicesRequest request) { return adminListDevicesAsync(request, null); } @Override public java.util.concurrent.Future<AdminListDevicesResult> adminListDevicesAsync(AdminListDevicesRequest request, com.amazonaws.handlers.AsyncHandler<AdminListDevicesRequest, AdminListDevicesResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<AdminListGroupsForUserResult> adminListGroupsForUserAsync(AdminListGroupsForUserRequest request) { return adminListGroupsForUserAsync(request, null); } @Override public java.util.concurrent.Future<AdminListGroupsForUserResult> adminListGroupsForUserAsync(AdminListGroupsForUserRequest request, com.amazonaws.handlers.AsyncHandler<AdminListGroupsForUserRequest, AdminListGroupsForUserResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<AdminListUserAuthEventsResult> adminListUserAuthEventsAsync(AdminListUserAuthEventsRequest request) { return adminListUserAuthEventsAsync(request, null); } @Override public java.util.concurrent.Future<AdminListUserAuthEventsResult> adminListUserAuthEventsAsync(AdminListUserAuthEventsRequest request, com.amazonaws.handlers.AsyncHandler<AdminListUserAuthEventsRequest, AdminListUserAuthEventsResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<AdminRemoveUserFromGroupResult> adminRemoveUserFromGroupAsync(AdminRemoveUserFromGroupRequest request) { return adminRemoveUserFromGroupAsync(request, null); } @Override public java.util.concurrent.Future<AdminRemoveUserFromGroupResult> adminRemoveUserFromGroupAsync(AdminRemoveUserFromGroupRequest request, com.amazonaws.handlers.AsyncHandler<AdminRemoveUserFromGroupRequest, AdminRemoveUserFromGroupResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<AdminResetUserPasswordResult> adminResetUserPasswordAsync(AdminResetUserPasswordRequest request) { return adminResetUserPasswordAsync(request, null); } @Override public java.util.concurrent.Future<AdminResetUserPasswordResult> adminResetUserPasswordAsync(AdminResetUserPasswordRequest request, com.amazonaws.handlers.AsyncHandler<AdminResetUserPasswordRequest, AdminResetUserPasswordResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<AdminRespondToAuthChallengeResult> adminRespondToAuthChallengeAsync(AdminRespondToAuthChallengeRequest request) { return adminRespondToAuthChallengeAsync(request, null); } @Override public java.util.concurrent.Future<AdminRespondToAuthChallengeResult> adminRespondToAuthChallengeAsync(AdminRespondToAuthChallengeRequest request, com.amazonaws.handlers.AsyncHandler<AdminRespondToAuthChallengeRequest, AdminRespondToAuthChallengeResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<AdminSetUserMFAPreferenceResult> adminSetUserMFAPreferenceAsync(AdminSetUserMFAPreferenceRequest request) { return adminSetUserMFAPreferenceAsync(request, null); } @Override public java.util.concurrent.Future<AdminSetUserMFAPreferenceResult> adminSetUserMFAPreferenceAsync(AdminSetUserMFAPreferenceRequest request, com.amazonaws.handlers.AsyncHandler<AdminSetUserMFAPreferenceRequest, AdminSetUserMFAPreferenceResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<AdminSetUserPasswordResult> adminSetUserPasswordAsync(AdminSetUserPasswordRequest request) { return adminSetUserPasswordAsync(request, null); } @Override public java.util.concurrent.Future<AdminSetUserPasswordResult> adminSetUserPasswordAsync(AdminSetUserPasswordRequest request, com.amazonaws.handlers.AsyncHandler<AdminSetUserPasswordRequest, AdminSetUserPasswordResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<AdminSetUserSettingsResult> adminSetUserSettingsAsync(AdminSetUserSettingsRequest request) { return adminSetUserSettingsAsync(request, null); } @Override public java.util.concurrent.Future<AdminSetUserSettingsResult> adminSetUserSettingsAsync(AdminSetUserSettingsRequest request, com.amazonaws.handlers.AsyncHandler<AdminSetUserSettingsRequest, AdminSetUserSettingsResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<AdminUpdateAuthEventFeedbackResult> adminUpdateAuthEventFeedbackAsync(AdminUpdateAuthEventFeedbackRequest request) { return adminUpdateAuthEventFeedbackAsync(request, null); } @Override public java.util.concurrent.Future<AdminUpdateAuthEventFeedbackResult> adminUpdateAuthEventFeedbackAsync(AdminUpdateAuthEventFeedbackRequest request, com.amazonaws.handlers.AsyncHandler<AdminUpdateAuthEventFeedbackRequest, AdminUpdateAuthEventFeedbackResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<AdminUpdateDeviceStatusResult> adminUpdateDeviceStatusAsync(AdminUpdateDeviceStatusRequest request) { return adminUpdateDeviceStatusAsync(request, null); } @Override public java.util.concurrent.Future<AdminUpdateDeviceStatusResult> adminUpdateDeviceStatusAsync(AdminUpdateDeviceStatusRequest request, com.amazonaws.handlers.AsyncHandler<AdminUpdateDeviceStatusRequest, AdminUpdateDeviceStatusResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<AdminUpdateUserAttributesResult> adminUpdateUserAttributesAsync(AdminUpdateUserAttributesRequest request) { return adminUpdateUserAttributesAsync(request, null); } @Override public java.util.concurrent.Future<AdminUpdateUserAttributesResult> adminUpdateUserAttributesAsync(AdminUpdateUserAttributesRequest request, com.amazonaws.handlers.AsyncHandler<AdminUpdateUserAttributesRequest, AdminUpdateUserAttributesResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<AdminUserGlobalSignOutResult> adminUserGlobalSignOutAsync(AdminUserGlobalSignOutRequest request) { return adminUserGlobalSignOutAsync(request, null); } @Override public java.util.concurrent.Future<AdminUserGlobalSignOutResult> adminUserGlobalSignOutAsync(AdminUserGlobalSignOutRequest request, com.amazonaws.handlers.AsyncHandler<AdminUserGlobalSignOutRequest, AdminUserGlobalSignOutResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<AssociateSoftwareTokenResult> associateSoftwareTokenAsync(AssociateSoftwareTokenRequest request) { return associateSoftwareTokenAsync(request, null); } @Override public java.util.concurrent.Future<AssociateSoftwareTokenResult> associateSoftwareTokenAsync(AssociateSoftwareTokenRequest request, com.amazonaws.handlers.AsyncHandler<AssociateSoftwareTokenRequest, AssociateSoftwareTokenResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<ChangePasswordResult> changePasswordAsync(ChangePasswordRequest request) { return changePasswordAsync(request, null); } @Override public java.util.concurrent.Future<ChangePasswordResult> changePasswordAsync(ChangePasswordRequest request, com.amazonaws.handlers.AsyncHandler<ChangePasswordRequest, ChangePasswordResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<ConfirmDeviceResult> confirmDeviceAsync(ConfirmDeviceRequest request) { return confirmDeviceAsync(request, null); } @Override public java.util.concurrent.Future<ConfirmDeviceResult> confirmDeviceAsync(ConfirmDeviceRequest request, com.amazonaws.handlers.AsyncHandler<ConfirmDeviceRequest, ConfirmDeviceResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<ConfirmForgotPasswordResult> confirmForgotPasswordAsync(ConfirmForgotPasswordRequest request) { return confirmForgotPasswordAsync(request, null); } @Override public java.util.concurrent.Future<ConfirmForgotPasswordResult> confirmForgotPasswordAsync(ConfirmForgotPasswordRequest request, com.amazonaws.handlers.AsyncHandler<ConfirmForgotPasswordRequest, ConfirmForgotPasswordResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<ConfirmSignUpResult> confirmSignUpAsync(ConfirmSignUpRequest request) { return confirmSignUpAsync(request, null); } @Override public java.util.concurrent.Future<ConfirmSignUpResult> confirmSignUpAsync(ConfirmSignUpRequest request, com.amazonaws.handlers.AsyncHandler<ConfirmSignUpRequest, ConfirmSignUpResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<CreateGroupResult> createGroupAsync(CreateGroupRequest request) { return createGroupAsync(request, null); } @Override public java.util.concurrent.Future<CreateGroupResult> createGroupAsync(CreateGroupRequest request, com.amazonaws.handlers.AsyncHandler<CreateGroupRequest, CreateGroupResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<CreateIdentityProviderResult> createIdentityProviderAsync(CreateIdentityProviderRequest request) { return createIdentityProviderAsync(request, null); } @Override public java.util.concurrent.Future<CreateIdentityProviderResult> createIdentityProviderAsync(CreateIdentityProviderRequest request, com.amazonaws.handlers.AsyncHandler<CreateIdentityProviderRequest, CreateIdentityProviderResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<CreateResourceServerResult> createResourceServerAsync(CreateResourceServerRequest request) { return createResourceServerAsync(request, null); } @Override public java.util.concurrent.Future<CreateResourceServerResult> createResourceServerAsync(CreateResourceServerRequest request, com.amazonaws.handlers.AsyncHandler<CreateResourceServerRequest, CreateResourceServerResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<CreateUserImportJobResult> createUserImportJobAsync(CreateUserImportJobRequest request) { return createUserImportJobAsync(request, null); } @Override public java.util.concurrent.Future<CreateUserImportJobResult> createUserImportJobAsync(CreateUserImportJobRequest request, com.amazonaws.handlers.AsyncHandler<CreateUserImportJobRequest, CreateUserImportJobResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<CreateUserPoolResult> createUserPoolAsync(CreateUserPoolRequest request) { return createUserPoolAsync(request, null); } @Override public java.util.concurrent.Future<CreateUserPoolResult> createUserPoolAsync(CreateUserPoolRequest request, com.amazonaws.handlers.AsyncHandler<CreateUserPoolRequest, CreateUserPoolResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<CreateUserPoolClientResult> createUserPoolClientAsync(CreateUserPoolClientRequest request) { return createUserPoolClientAsync(request, null); } @Override public java.util.concurrent.Future<CreateUserPoolClientResult> createUserPoolClientAsync(CreateUserPoolClientRequest request, com.amazonaws.handlers.AsyncHandler<CreateUserPoolClientRequest, CreateUserPoolClientResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<CreateUserPoolDomainResult> createUserPoolDomainAsync(CreateUserPoolDomainRequest request) { return createUserPoolDomainAsync(request, null); } @Override public java.util.concurrent.Future<CreateUserPoolDomainResult> createUserPoolDomainAsync(CreateUserPoolDomainRequest request, com.amazonaws.handlers.AsyncHandler<CreateUserPoolDomainRequest, CreateUserPoolDomainResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<DeleteGroupResult> deleteGroupAsync(DeleteGroupRequest request) { return deleteGroupAsync(request, null); } @Override public java.util.concurrent.Future<DeleteGroupResult> deleteGroupAsync(DeleteGroupRequest request, com.amazonaws.handlers.AsyncHandler<DeleteGroupRequest, DeleteGroupResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<DeleteIdentityProviderResult> deleteIdentityProviderAsync(DeleteIdentityProviderRequest request) { return deleteIdentityProviderAsync(request, null); } @Override public java.util.concurrent.Future<DeleteIdentityProviderResult> deleteIdentityProviderAsync(DeleteIdentityProviderRequest request, com.amazonaws.handlers.AsyncHandler<DeleteIdentityProviderRequest, DeleteIdentityProviderResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<DeleteResourceServerResult> deleteResourceServerAsync(DeleteResourceServerRequest request) { return deleteResourceServerAsync(request, null); } @Override public java.util.concurrent.Future<DeleteResourceServerResult> deleteResourceServerAsync(DeleteResourceServerRequest request, com.amazonaws.handlers.AsyncHandler<DeleteResourceServerRequest, DeleteResourceServerResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<DeleteUserResult> deleteUserAsync(DeleteUserRequest request) { return deleteUserAsync(request, null); } @Override public java.util.concurrent.Future<DeleteUserResult> deleteUserAsync(DeleteUserRequest request, com.amazonaws.handlers.AsyncHandler<DeleteUserRequest, DeleteUserResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<DeleteUserAttributesResult> deleteUserAttributesAsync(DeleteUserAttributesRequest request) { return deleteUserAttributesAsync(request, null); } @Override public java.util.concurrent.Future<DeleteUserAttributesResult> deleteUserAttributesAsync(DeleteUserAttributesRequest request, com.amazonaws.handlers.AsyncHandler<DeleteUserAttributesRequest, DeleteUserAttributesResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<DeleteUserPoolResult> deleteUserPoolAsync(DeleteUserPoolRequest request) { return deleteUserPoolAsync(request, null); } @Override public java.util.concurrent.Future<DeleteUserPoolResult> deleteUserPoolAsync(DeleteUserPoolRequest request, com.amazonaws.handlers.AsyncHandler<DeleteUserPoolRequest, DeleteUserPoolResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<DeleteUserPoolClientResult> deleteUserPoolClientAsync(DeleteUserPoolClientRequest request) { return deleteUserPoolClientAsync(request, null); } @Override public java.util.concurrent.Future<DeleteUserPoolClientResult> deleteUserPoolClientAsync(DeleteUserPoolClientRequest request, com.amazonaws.handlers.AsyncHandler<DeleteUserPoolClientRequest, DeleteUserPoolClientResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<DeleteUserPoolDomainResult> deleteUserPoolDomainAsync(DeleteUserPoolDomainRequest request) { return deleteUserPoolDomainAsync(request, null); } @Override public java.util.concurrent.Future<DeleteUserPoolDomainResult> deleteUserPoolDomainAsync(DeleteUserPoolDomainRequest request, com.amazonaws.handlers.AsyncHandler<DeleteUserPoolDomainRequest, DeleteUserPoolDomainResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<DescribeIdentityProviderResult> describeIdentityProviderAsync(DescribeIdentityProviderRequest request) { return describeIdentityProviderAsync(request, null); } @Override public java.util.concurrent.Future<DescribeIdentityProviderResult> describeIdentityProviderAsync(DescribeIdentityProviderRequest request, com.amazonaws.handlers.AsyncHandler<DescribeIdentityProviderRequest, DescribeIdentityProviderResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<DescribeResourceServerResult> describeResourceServerAsync(DescribeResourceServerRequest request) { return describeResourceServerAsync(request, null); } @Override public java.util.concurrent.Future<DescribeResourceServerResult> describeResourceServerAsync(DescribeResourceServerRequest request, com.amazonaws.handlers.AsyncHandler<DescribeResourceServerRequest, DescribeResourceServerResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<DescribeRiskConfigurationResult> describeRiskConfigurationAsync(DescribeRiskConfigurationRequest request) { return describeRiskConfigurationAsync(request, null); } @Override public java.util.concurrent.Future<DescribeRiskConfigurationResult> describeRiskConfigurationAsync(DescribeRiskConfigurationRequest request, com.amazonaws.handlers.AsyncHandler<DescribeRiskConfigurationRequest, DescribeRiskConfigurationResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<DescribeUserImportJobResult> describeUserImportJobAsync(DescribeUserImportJobRequest request) { return describeUserImportJobAsync(request, null); } @Override public java.util.concurrent.Future<DescribeUserImportJobResult> describeUserImportJobAsync(DescribeUserImportJobRequest request, com.amazonaws.handlers.AsyncHandler<DescribeUserImportJobRequest, DescribeUserImportJobResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<DescribeUserPoolResult> describeUserPoolAsync(DescribeUserPoolRequest request) { return describeUserPoolAsync(request, null); } @Override public java.util.concurrent.Future<DescribeUserPoolResult> describeUserPoolAsync(DescribeUserPoolRequest request, com.amazonaws.handlers.AsyncHandler<DescribeUserPoolRequest, DescribeUserPoolResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<DescribeUserPoolClientResult> describeUserPoolClientAsync(DescribeUserPoolClientRequest request) { return describeUserPoolClientAsync(request, null); } @Override public java.util.concurrent.Future<DescribeUserPoolClientResult> describeUserPoolClientAsync(DescribeUserPoolClientRequest request, com.amazonaws.handlers.AsyncHandler<DescribeUserPoolClientRequest, DescribeUserPoolClientResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<DescribeUserPoolDomainResult> describeUserPoolDomainAsync(DescribeUserPoolDomainRequest request) { return describeUserPoolDomainAsync(request, null); } @Override public java.util.concurrent.Future<DescribeUserPoolDomainResult> describeUserPoolDomainAsync(DescribeUserPoolDomainRequest request, com.amazonaws.handlers.AsyncHandler<DescribeUserPoolDomainRequest, DescribeUserPoolDomainResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<ForgetDeviceResult> forgetDeviceAsync(ForgetDeviceRequest request) { return forgetDeviceAsync(request, null); } @Override public java.util.concurrent.Future<ForgetDeviceResult> forgetDeviceAsync(ForgetDeviceRequest request, com.amazonaws.handlers.AsyncHandler<ForgetDeviceRequest, ForgetDeviceResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<ForgotPasswordResult> forgotPasswordAsync(ForgotPasswordRequest request) { return forgotPasswordAsync(request, null); } @Override public java.util.concurrent.Future<ForgotPasswordResult> forgotPasswordAsync(ForgotPasswordRequest request, com.amazonaws.handlers.AsyncHandler<ForgotPasswordRequest, ForgotPasswordResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<GetCSVHeaderResult> getCSVHeaderAsync(GetCSVHeaderRequest request) { return getCSVHeaderAsync(request, null); } @Override public java.util.concurrent.Future<GetCSVHeaderResult> getCSVHeaderAsync(GetCSVHeaderRequest request, com.amazonaws.handlers.AsyncHandler<GetCSVHeaderRequest, GetCSVHeaderResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<GetDeviceResult> getDeviceAsync(GetDeviceRequest request) { return getDeviceAsync(request, null); } @Override public java.util.concurrent.Future<GetDeviceResult> getDeviceAsync(GetDeviceRequest request, com.amazonaws.handlers.AsyncHandler<GetDeviceRequest, GetDeviceResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<GetGroupResult> getGroupAsync(GetGroupRequest request) { return getGroupAsync(request, null); } @Override public java.util.concurrent.Future<GetGroupResult> getGroupAsync(GetGroupRequest request, com.amazonaws.handlers.AsyncHandler<GetGroupRequest, GetGroupResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<GetIdentityProviderByIdentifierResult> getIdentityProviderByIdentifierAsync( GetIdentityProviderByIdentifierRequest request) { return getIdentityProviderByIdentifierAsync(request, null); } @Override public java.util.concurrent.Future<GetIdentityProviderByIdentifierResult> getIdentityProviderByIdentifierAsync( GetIdentityProviderByIdentifierRequest request, com.amazonaws.handlers.AsyncHandler<GetIdentityProviderByIdentifierRequest, GetIdentityProviderByIdentifierResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<GetSigningCertificateResult> getSigningCertificateAsync(GetSigningCertificateRequest request) { return getSigningCertificateAsync(request, null); } @Override public java.util.concurrent.Future<GetSigningCertificateResult> getSigningCertificateAsync(GetSigningCertificateRequest request, com.amazonaws.handlers.AsyncHandler<GetSigningCertificateRequest, GetSigningCertificateResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<GetUICustomizationResult> getUICustomizationAsync(GetUICustomizationRequest request) { return getUICustomizationAsync(request, null); } @Override public java.util.concurrent.Future<GetUICustomizationResult> getUICustomizationAsync(GetUICustomizationRequest request, com.amazonaws.handlers.AsyncHandler<GetUICustomizationRequest, GetUICustomizationResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<GetUserResult> getUserAsync(GetUserRequest request) { return getUserAsync(request, null); } @Override public java.util.concurrent.Future<GetUserResult> getUserAsync(GetUserRequest request, com.amazonaws.handlers.AsyncHandler<GetUserRequest, GetUserResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<GetUserAttributeVerificationCodeResult> getUserAttributeVerificationCodeAsync( GetUserAttributeVerificationCodeRequest request) { return getUserAttributeVerificationCodeAsync(request, null); } @Override public java.util.concurrent.Future<GetUserAttributeVerificationCodeResult> getUserAttributeVerificationCodeAsync( GetUserAttributeVerificationCodeRequest request, com.amazonaws.handlers.AsyncHandler<GetUserAttributeVerificationCodeRequest, GetUserAttributeVerificationCodeResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<GetUserPoolMfaConfigResult> getUserPoolMfaConfigAsync(GetUserPoolMfaConfigRequest request) { return getUserPoolMfaConfigAsync(request, null); } @Override public java.util.concurrent.Future<GetUserPoolMfaConfigResult> getUserPoolMfaConfigAsync(GetUserPoolMfaConfigRequest request, com.amazonaws.handlers.AsyncHandler<GetUserPoolMfaConfigRequest, GetUserPoolMfaConfigResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<GlobalSignOutResult> globalSignOutAsync(GlobalSignOutRequest request) { return globalSignOutAsync(request, null); } @Override public java.util.concurrent.Future<GlobalSignOutResult> globalSignOutAsync(GlobalSignOutRequest request, com.amazonaws.handlers.AsyncHandler<GlobalSignOutRequest, GlobalSignOutResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<InitiateAuthResult> initiateAuthAsync(InitiateAuthRequest request) { return initiateAuthAsync(request, null); } @Override public java.util.concurrent.Future<InitiateAuthResult> initiateAuthAsync(InitiateAuthRequest request, com.amazonaws.handlers.AsyncHandler<InitiateAuthRequest, InitiateAuthResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<ListDevicesResult> listDevicesAsync(ListDevicesRequest request) { return listDevicesAsync(request, null); } @Override public java.util.concurrent.Future<ListDevicesResult> listDevicesAsync(ListDevicesRequest request, com.amazonaws.handlers.AsyncHandler<ListDevicesRequest, ListDevicesResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<ListGroupsResult> listGroupsAsync(ListGroupsRequest request) { return listGroupsAsync(request, null); } @Override public java.util.concurrent.Future<ListGroupsResult> listGroupsAsync(ListGroupsRequest request, com.amazonaws.handlers.AsyncHandler<ListGroupsRequest, ListGroupsResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<ListIdentityProvidersResult> listIdentityProvidersAsync(ListIdentityProvidersRequest request) { return listIdentityProvidersAsync(request, null); } @Override public java.util.concurrent.Future<ListIdentityProvidersResult> listIdentityProvidersAsync(ListIdentityProvidersRequest request, com.amazonaws.handlers.AsyncHandler<ListIdentityProvidersRequest, ListIdentityProvidersResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<ListResourceServersResult> listResourceServersAsync(ListResourceServersRequest request) { return listResourceServersAsync(request, null); } @Override public java.util.concurrent.Future<ListResourceServersResult> listResourceServersAsync(ListResourceServersRequest request, com.amazonaws.handlers.AsyncHandler<ListResourceServersRequest, ListResourceServersResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<ListTagsForResourceResult> listTagsForResourceAsync(ListTagsForResourceRequest request) { return listTagsForResourceAsync(request, null); } @Override public java.util.concurrent.Future<ListTagsForResourceResult> listTagsForResourceAsync(ListTagsForResourceRequest request, com.amazonaws.handlers.AsyncHandler<ListTagsForResourceRequest, ListTagsForResourceResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<ListUserImportJobsResult> listUserImportJobsAsync(ListUserImportJobsRequest request) { return listUserImportJobsAsync(request, null); } @Override public java.util.concurrent.Future<ListUserImportJobsResult> listUserImportJobsAsync(ListUserImportJobsRequest request, com.amazonaws.handlers.AsyncHandler<ListUserImportJobsRequest, ListUserImportJobsResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<ListUserPoolClientsResult> listUserPoolClientsAsync(ListUserPoolClientsRequest request) { return listUserPoolClientsAsync(request, null); } @Override public java.util.concurrent.Future<ListUserPoolClientsResult> listUserPoolClientsAsync(ListUserPoolClientsRequest request, com.amazonaws.handlers.AsyncHandler<ListUserPoolClientsRequest, ListUserPoolClientsResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<ListUserPoolsResult> listUserPoolsAsync(ListUserPoolsRequest request) { return listUserPoolsAsync(request, null); } @Override public java.util.concurrent.Future<ListUserPoolsResult> listUserPoolsAsync(ListUserPoolsRequest request, com.amazonaws.handlers.AsyncHandler<ListUserPoolsRequest, ListUserPoolsResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<ListUsersResult> listUsersAsync(ListUsersRequest request) { return listUsersAsync(request, null); } @Override public java.util.concurrent.Future<ListUsersResult> listUsersAsync(ListUsersRequest request, com.amazonaws.handlers.AsyncHandler<ListUsersRequest, ListUsersResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<ListUsersInGroupResult> listUsersInGroupAsync(ListUsersInGroupRequest request) { return listUsersInGroupAsync(request, null); } @Override public java.util.concurrent.Future<ListUsersInGroupResult> listUsersInGroupAsync(ListUsersInGroupRequest request, com.amazonaws.handlers.AsyncHandler<ListUsersInGroupRequest, ListUsersInGroupResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<ResendConfirmationCodeResult> resendConfirmationCodeAsync(ResendConfirmationCodeRequest request) { return resendConfirmationCodeAsync(request, null); } @Override public java.util.concurrent.Future<ResendConfirmationCodeResult> resendConfirmationCodeAsync(ResendConfirmationCodeRequest request, com.amazonaws.handlers.AsyncHandler<ResendConfirmationCodeRequest, ResendConfirmationCodeResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<RespondToAuthChallengeResult> respondToAuthChallengeAsync(RespondToAuthChallengeRequest request) { return respondToAuthChallengeAsync(request, null); } @Override public java.util.concurrent.Future<RespondToAuthChallengeResult> respondToAuthChallengeAsync(RespondToAuthChallengeRequest request, com.amazonaws.handlers.AsyncHandler<RespondToAuthChallengeRequest, RespondToAuthChallengeResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<RevokeTokenResult> revokeTokenAsync(RevokeTokenRequest request) { return revokeTokenAsync(request, null); } @Override public java.util.concurrent.Future<RevokeTokenResult> revokeTokenAsync(RevokeTokenRequest request, com.amazonaws.handlers.AsyncHandler<RevokeTokenRequest, RevokeTokenResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<SetRiskConfigurationResult> setRiskConfigurationAsync(SetRiskConfigurationRequest request) { return setRiskConfigurationAsync(request, null); } @Override public java.util.concurrent.Future<SetRiskConfigurationResult> setRiskConfigurationAsync(SetRiskConfigurationRequest request, com.amazonaws.handlers.AsyncHandler<SetRiskConfigurationRequest, SetRiskConfigurationResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<SetUICustomizationResult> setUICustomizationAsync(SetUICustomizationRequest request) { return setUICustomizationAsync(request, null); } @Override public java.util.concurrent.Future<SetUICustomizationResult> setUICustomizationAsync(SetUICustomizationRequest request, com.amazonaws.handlers.AsyncHandler<SetUICustomizationRequest, SetUICustomizationResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<SetUserMFAPreferenceResult> setUserMFAPreferenceAsync(SetUserMFAPreferenceRequest request) { return setUserMFAPreferenceAsync(request, null); } @Override public java.util.concurrent.Future<SetUserMFAPreferenceResult> setUserMFAPreferenceAsync(SetUserMFAPreferenceRequest request, com.amazonaws.handlers.AsyncHandler<SetUserMFAPreferenceRequest, SetUserMFAPreferenceResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<SetUserPoolMfaConfigResult> setUserPoolMfaConfigAsync(SetUserPoolMfaConfigRequest request) { return setUserPoolMfaConfigAsync(request, null); } @Override public java.util.concurrent.Future<SetUserPoolMfaConfigResult> setUserPoolMfaConfigAsync(SetUserPoolMfaConfigRequest request, com.amazonaws.handlers.AsyncHandler<SetUserPoolMfaConfigRequest, SetUserPoolMfaConfigResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<SetUserSettingsResult> setUserSettingsAsync(SetUserSettingsRequest request) { return setUserSettingsAsync(request, null); } @Override public java.util.concurrent.Future<SetUserSettingsResult> setUserSettingsAsync(SetUserSettingsRequest request, com.amazonaws.handlers.AsyncHandler<SetUserSettingsRequest, SetUserSettingsResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<SignUpResult> signUpAsync(SignUpRequest request) { return signUpAsync(request, null); } @Override public java.util.concurrent.Future<SignUpResult> signUpAsync(SignUpRequest request, com.amazonaws.handlers.AsyncHandler<SignUpRequest, SignUpResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<StartUserImportJobResult> startUserImportJobAsync(StartUserImportJobRequest request) { return startUserImportJobAsync(request, null); } @Override public java.util.concurrent.Future<StartUserImportJobResult> startUserImportJobAsync(StartUserImportJobRequest request, com.amazonaws.handlers.AsyncHandler<StartUserImportJobRequest, StartUserImportJobResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<StopUserImportJobResult> stopUserImportJobAsync(StopUserImportJobRequest request) { return stopUserImportJobAsync(request, null); } @Override public java.util.concurrent.Future<StopUserImportJobResult> stopUserImportJobAsync(StopUserImportJobRequest request, com.amazonaws.handlers.AsyncHandler<StopUserImportJobRequest, StopUserImportJobResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<TagResourceResult> tagResourceAsync(TagResourceRequest request) { return tagResourceAsync(request, null); } @Override public java.util.concurrent.Future<TagResourceResult> tagResourceAsync(TagResourceRequest request, com.amazonaws.handlers.AsyncHandler<TagResourceRequest, TagResourceResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<UntagResourceResult> untagResourceAsync(UntagResourceRequest request) { return untagResourceAsync(request, null); } @Override public java.util.concurrent.Future<UntagResourceResult> untagResourceAsync(UntagResourceRequest request, com.amazonaws.handlers.AsyncHandler<UntagResourceRequest, UntagResourceResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<UpdateAuthEventFeedbackResult> updateAuthEventFeedbackAsync(UpdateAuthEventFeedbackRequest request) { return updateAuthEventFeedbackAsync(request, null); } @Override public java.util.concurrent.Future<UpdateAuthEventFeedbackResult> updateAuthEventFeedbackAsync(UpdateAuthEventFeedbackRequest request, com.amazonaws.handlers.AsyncHandler<UpdateAuthEventFeedbackRequest, UpdateAuthEventFeedbackResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<UpdateDeviceStatusResult> updateDeviceStatusAsync(UpdateDeviceStatusRequest request) { return updateDeviceStatusAsync(request, null); } @Override public java.util.concurrent.Future<UpdateDeviceStatusResult> updateDeviceStatusAsync(UpdateDeviceStatusRequest request, com.amazonaws.handlers.AsyncHandler<UpdateDeviceStatusRequest, UpdateDeviceStatusResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<UpdateGroupResult> updateGroupAsync(UpdateGroupRequest request) { return updateGroupAsync(request, null); } @Override public java.util.concurrent.Future<UpdateGroupResult> updateGroupAsync(UpdateGroupRequest request, com.amazonaws.handlers.AsyncHandler<UpdateGroupRequest, UpdateGroupResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<UpdateIdentityProviderResult> updateIdentityProviderAsync(UpdateIdentityProviderRequest request) { return updateIdentityProviderAsync(request, null); } @Override public java.util.concurrent.Future<UpdateIdentityProviderResult> updateIdentityProviderAsync(UpdateIdentityProviderRequest request, com.amazonaws.handlers.AsyncHandler<UpdateIdentityProviderRequest, UpdateIdentityProviderResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<UpdateResourceServerResult> updateResourceServerAsync(UpdateResourceServerRequest request) { return updateResourceServerAsync(request, null); } @Override public java.util.concurrent.Future<UpdateResourceServerResult> updateResourceServerAsync(UpdateResourceServerRequest request, com.amazonaws.handlers.AsyncHandler<UpdateResourceServerRequest, UpdateResourceServerResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<UpdateUserAttributesResult> updateUserAttributesAsync(UpdateUserAttributesRequest request) { return updateUserAttributesAsync(request, null); } @Override public java.util.concurrent.Future<UpdateUserAttributesResult> updateUserAttributesAsync(UpdateUserAttributesRequest request, com.amazonaws.handlers.AsyncHandler<UpdateUserAttributesRequest, UpdateUserAttributesResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<UpdateUserPoolResult> updateUserPoolAsync(UpdateUserPoolRequest request) { return updateUserPoolAsync(request, null); } @Override public java.util.concurrent.Future<UpdateUserPoolResult> updateUserPoolAsync(UpdateUserPoolRequest request, com.amazonaws.handlers.AsyncHandler<UpdateUserPoolRequest, UpdateUserPoolResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<UpdateUserPoolClientResult> updateUserPoolClientAsync(UpdateUserPoolClientRequest request) { return updateUserPoolClientAsync(request, null); } @Override public java.util.concurrent.Future<UpdateUserPoolClientResult> updateUserPoolClientAsync(UpdateUserPoolClientRequest request, com.amazonaws.handlers.AsyncHandler<UpdateUserPoolClientRequest, UpdateUserPoolClientResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<UpdateUserPoolDomainResult> updateUserPoolDomainAsync(UpdateUserPoolDomainRequest request) { return updateUserPoolDomainAsync(request, null); } @Override public java.util.concurrent.Future<UpdateUserPoolDomainResult> updateUserPoolDomainAsync(UpdateUserPoolDomainRequest request, com.amazonaws.handlers.AsyncHandler<UpdateUserPoolDomainRequest, UpdateUserPoolDomainResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<VerifySoftwareTokenResult> verifySoftwareTokenAsync(VerifySoftwareTokenRequest request) { return verifySoftwareTokenAsync(request, null); } @Override public java.util.concurrent.Future<VerifySoftwareTokenResult> verifySoftwareTokenAsync(VerifySoftwareTokenRequest request, com.amazonaws.handlers.AsyncHandler<VerifySoftwareTokenRequest, VerifySoftwareTokenResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<VerifyUserAttributeResult> verifyUserAttributeAsync(VerifyUserAttributeRequest request) { return verifyUserAttributeAsync(request, null); } @Override public java.util.concurrent.Future<VerifyUserAttributeResult> verifyUserAttributeAsync(VerifyUserAttributeRequest request, com.amazonaws.handlers.AsyncHandler<VerifyUserAttributeRequest, VerifyUserAttributeResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } }
// Copyright 2018 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.location.suplclient.asn1.supl2.rrlp_components; // Copyright 2008 Google Inc. All Rights Reserved. /* * This class is AUTOMATICALLY GENERATED. Do NOT EDIT. */ // // import com.google.location.suplclient.asn1.base.Asn1Object; import com.google.location.suplclient.asn1.base.Asn1Sequence; import com.google.location.suplclient.asn1.base.Asn1Tag; import com.google.location.suplclient.asn1.base.BitStream; import com.google.location.suplclient.asn1.base.BitStreamReader; import com.google.location.suplclient.asn1.base.SequenceComponent; import com.google.common.collect.ImmutableList; import java.util.Collection; import javax.annotation.Nullable; /** * */ public class GPSTime extends Asn1Sequence { // private static final Asn1Tag TAG_GPSTime = Asn1Tag.fromClassAndNumber(-1, -1); public GPSTime() { super(); } @Override @Nullable protected Asn1Tag getTag() { return TAG_GPSTime; } @Override protected boolean isTagImplicit() { return true; } public static Collection<Asn1Tag> getPossibleFirstTags() { if (TAG_GPSTime != null) { return ImmutableList.of(TAG_GPSTime); } else { return Asn1Sequence.getPossibleFirstTags(); } } /** * Creates a new GPSTime from encoded stream. */ public static GPSTime fromPerUnaligned(byte[] encodedBytes) { GPSTime result = new GPSTime(); result.decodePerUnaligned(new BitStreamReader(encodedBytes)); return result; } /** * Creates a new GPSTime from encoded stream. */ public static GPSTime fromPerAligned(byte[] encodedBytes) { GPSTime result = new GPSTime(); result.decodePerAligned(new BitStreamReader(encodedBytes)); return result; } @Override protected boolean isExtensible() { return false; } @Override public boolean containsExtensionValues() { for (SequenceComponent extensionComponent : getExtensionComponents()) { if (extensionComponent.isExplicitlySet()) return true; } return false; } private GPSTOW23b gpsTOW23b_; public GPSTOW23b getGpsTOW23b() { return gpsTOW23b_; } /** * @throws ClassCastException if value is not a GPSTOW23b */ public void setGpsTOW23b(Asn1Object value) { this.gpsTOW23b_ = (GPSTOW23b) value; } public GPSTOW23b setGpsTOW23bToNewInstance() { gpsTOW23b_ = new GPSTOW23b(); return gpsTOW23b_; } private GPSWeek gpsWeek_; public GPSWeek getGpsWeek() { return gpsWeek_; } /** * @throws ClassCastException if value is not a GPSWeek */ public void setGpsWeek(Asn1Object value) { this.gpsWeek_ = (GPSWeek) value; } public GPSWeek setGpsWeekToNewInstance() { gpsWeek_ = new GPSWeek(); return gpsWeek_; } @Override public Iterable<? extends SequenceComponent> getComponents() { ImmutableList.Builder<SequenceComponent> builder = ImmutableList.builder(); builder.add(new SequenceComponent() { Asn1Tag tag = Asn1Tag.fromClassAndNumber(2, 0); @Override public boolean isExplicitlySet() { return getGpsTOW23b() != null; } @Override public boolean hasDefaultValue() { return false; } @Override public boolean isOptional() { return false; } @Override public Asn1Object getComponentValue() { return getGpsTOW23b(); } @Override public void setToNewInstance() { setGpsTOW23bToNewInstance(); } @Override public Collection<Asn1Tag> getPossibleFirstTags() { return tag == null ? GPSTOW23b.getPossibleFirstTags() : ImmutableList.of(tag); } @Override public Asn1Tag getTag() { return tag; } @Override public boolean isImplicitTagging() { return true; } @Override public String toIndentedString(String indent) { return "gpsTOW23b : " + getGpsTOW23b().toIndentedString(indent); } }); builder.add(new SequenceComponent() { Asn1Tag tag = Asn1Tag.fromClassAndNumber(2, 1); @Override public boolean isExplicitlySet() { return getGpsWeek() != null; } @Override public boolean hasDefaultValue() { return false; } @Override public boolean isOptional() { return false; } @Override public Asn1Object getComponentValue() { return getGpsWeek(); } @Override public void setToNewInstance() { setGpsWeekToNewInstance(); } @Override public Collection<Asn1Tag> getPossibleFirstTags() { return tag == null ? GPSWeek.getPossibleFirstTags() : ImmutableList.of(tag); } @Override public Asn1Tag getTag() { return tag; } @Override public boolean isImplicitTagging() { return true; } @Override public String toIndentedString(String indent) { return "gpsWeek : " + getGpsWeek().toIndentedString(indent); } }); return builder.build(); } @Override public Iterable<? extends SequenceComponent> getExtensionComponents() { ImmutableList.Builder<SequenceComponent> builder = ImmutableList.builder(); return builder.build(); } @Override public Iterable<BitStream> encodePerUnaligned() { return super.encodePerUnaligned(); } @Override public Iterable<BitStream> encodePerAligned() { return super.encodePerAligned(); } @Override public void decodePerUnaligned(BitStreamReader reader) { super.decodePerUnaligned(reader); } @Override public void decodePerAligned(BitStreamReader reader) { super.decodePerAligned(reader); } @Override public String toString() { return toIndentedString(""); } public String toIndentedString(String indent) { StringBuilder builder = new StringBuilder(); builder.append("GPSTime = {\n"); final String internalIndent = indent + " "; for (SequenceComponent component : getComponents()) { if (component.isExplicitlySet()) { builder.append(internalIndent) .append(component.toIndentedString(internalIndent)); } } if (isExtensible()) { builder.append(internalIndent).append("...\n"); for (SequenceComponent component : getExtensionComponents()) { if (component.isExplicitlySet()) { builder.append(internalIndent) .append(component.toIndentedString(internalIndent)); } } } builder.append(indent).append("};\n"); return builder.toString(); } }
/* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package com.exacttarget.fuelsdk; import com.exacttarget.fuelsdk.internal.ExtractOptions; import com.exacttarget.fuelsdk.internal.ExtractParameter; import com.exacttarget.fuelsdk.internal.ExtractRequest; import com.exacttarget.fuelsdk.internal.ExtractRequestMsg; import com.exacttarget.fuelsdk.internal.ExtractResponseMsg; import com.exacttarget.fuelsdk.internal.Soap; import java.lang.reflect.Field; import java.lang.reflect.Type; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Date; import java.util.HashMap; import java.util.Iterator; import java.util.logging.Level; import java.util.logging.Logger; /** * An <code>ETDataExtract</code> object can perform data extract activity for data extension and tracking data * in the Salesforce Marketing Cloud. */ public class ETDataExtract { private Soap soap; public HashMap<String, String> extractType; private SimpleDateFormat dateFormat; private Date StartDate; private Date EndDate; private String DECustomerKey; private String _AsyncID; private String OutputFileName; private boolean HasColumnHeaders; private boolean ExtractBounces; private boolean ExtractClicks; private boolean ExtractConversions; private boolean ExtractOpens; private boolean ExtractSends; private boolean ExtractSendJobs; private boolean ExtractSurveyResponses; private boolean IncludeTestSends; private boolean ExtractUnsubs; /** * Class constructor, Initializes a new instance of the class. * @param client the ETCleint object */ public ETDataExtract(ETClient client) { try { extractType = new HashMap<String, String>(); soap = client.getSoapConnection().getSoap("Extract"); dateFormat = new SimpleDateFormat("yyyy-MM-dd hh:mm aa"); ExtractBounces = false; ExtractClicks = true; ExtractConversions = true; ExtractOpens = true; ExtractSendJobs = true; ExtractSends = true; ExtractSurveyResponses = true; IncludeTestSends = false; ExtractUnsubs = true; HasColumnHeaders = true; _AsyncID = "0"; populateExtractType(client); } catch (Exception ex) { Logger.getLogger(ETDataExtract.class.getName()).log(Level.SEVERE, null, ex); } } /** * Sends a data extension extract request. * @param deCustomerKey the customer key of the data extension * @param outputFileName the output file name * @param hasColumnHeaders true if the output file should contain column headers, false otherwise * @return ExtractResponseMsg object which holds the status, request ID, etc * @throws Exception */ public ExtractResponseMsg extractDataExtension( String deCustomerKey, String outputFileName, boolean hasColumnHeaders) throws Exception { setDECustomerKey(deCustomerKey); setHasColumnHeaders(hasColumnHeaders); setOutputFileName(outputFileName); return extractDataExtension(); } /** * Sends a data extension extract request. * @return ExtractResponseMsg object which holds the status, request ID, etc * @throws Exception */ public ExtractResponseMsg extractDataExtension() throws Exception{ validate(); String dateInString = "2017-08-01 12:00 AM"; StartDate = dateFormat.parse(dateInString); EndDate = dateFormat.parse(dateInString); String[] paramNames = { "DECustomerKey", "HasColumnHeaders", "_AsyncID", "OutputFileName", "StartDate", "EndDate" }; return performDataExtract("Data Extension Extract", paramNames); } /** * Sends an extract request for tracking data. * @return ExtractResponseMsg object which holds the status, request ID, etc * @throws Exception */ public ExtractResponseMsg extractTrackingData() throws Exception { validateTrackingDataParams(); String[] paramNames = {"ExtractClicks", "ExtractBounces","ExtractConversions","ExtractSendJobs","ExtractSends","ExtractSurveyResponses","IncludeTestSends","ExtractUnsubs", "ExtractOpens", "OutputFileName", "StartDate", "EndDate"}; return performDataExtract("Tracking Extract", paramNames); } private ExtractResponseMsg performDataExtract(String extractName, String[] paramNames) throws Exception { ArrayList<ExtractParameter> extractParameters = new ArrayList(); ExtractParameter extractParam = null; Class obj = ETDataExtract.class; for(String param: paramNames){ Field field = obj.getDeclaredField(param); Object value = field.get(this); extractParam = new ExtractParameter(); extractParam.setName(field.getName()); if(value instanceof Date){ extractParam.setValue(dateFormat.format(value)); } else if(value != null) extractParam.setValue(value.toString()); else continue; extractParameters.add(extractParam); } ExtractRequest.Parameters eparams = new ExtractRequest.Parameters(); eparams.getParameter().addAll(extractParameters); ExtractRequest request = new ExtractRequest(); request.setOptions(new ExtractOptions()); request.setID(extractType.get(extractName)); request.setParameters(eparams); ExtractRequestMsg erm = new ExtractRequestMsg(); erm.getRequests().add(request); ExtractResponseMsg resp = soap.extract(erm); return resp; } private void validate() throws ETSdkException { if(this.getDECustomerKey()==null || this.getOutputFileName()==null) throw new ETSdkException("Customer Key and Output file name needs to be set."); String ext = this.getOutputFileName().toLowerCase(); if(!ext.endsWith(".csv") && !ext.endsWith(".zip")) throw new ETSdkException("Invalid file extension. Only csv or zip allowed."); } private void validateTrackingDataParams() throws ETSdkException { if(this.getOutputFileName()==null || this.getOutputFileName().trim()=="") throw new ETSdkException("Output file name can not be empty or null."); String ext = this.getOutputFileName().toLowerCase(); if(!ext.endsWith(".zip")) throw new ETSdkException("Invalid file extension. Only zip allowed."); } private void populateExtractType(ETClient client) throws ETSdkException { ETResponse<ETExtractDescription> response = client.retrieve(ETExtractDescription.class); for(ETResult<ETExtractDescription> r : response.getResults()){ extractType.put(r.getObject().getName(), r.getObject().getId()); } } /** * @return the outputFileName */ public String getOutputFileName() { return OutputFileName; } /** * @param outputFileName the outputFileName to set */ public void setOutputFileName(String outputFileName) { this.OutputFileName = outputFileName; } /** * @param StartDate the StartDate to set */ public void setStartDate(Date StartDate) { this.StartDate = StartDate; } /** * @param EndDate the EndDate to set */ public void setEndDate(Date EndDate) { this.EndDate = EndDate; } /** * @return the DECustomerKey */ public String getDECustomerKey() { return DECustomerKey; } /** * @param DECustomerKey the DECustomerKey to set */ public void setDECustomerKey(String DECustomerKey) { this.DECustomerKey = DECustomerKey; } /** * @return the ExtractBounces */ public boolean isExtractBounces() { return ExtractBounces; } /** * @param ExtractBounces the ExtractBounces to set */ public void setExtractBounces(boolean ExtractBounces) { this.ExtractBounces = ExtractBounces; } /** * @return the ExtractClicks */ public boolean isExtractClicks() { return ExtractClicks; } /** * @param ExtractClicks the ExtractClicks to set */ public void setExtractClicks(boolean ExtractClicks) { this.ExtractClicks = ExtractClicks; } /** * @return the ExtractConversions */ public boolean isExtractConversions() { return ExtractConversions; } /** * @param ExtractConversions the ExtractConversions to set */ public void setExtractConversions(boolean ExtractConversions) { this.ExtractConversions = ExtractConversions; } /** * @return the ExtractOpens */ public boolean isExtractOpens() { return ExtractOpens; } /** * @param ExtractOpens the ExtractOpens to set */ public void setExtractOpens(boolean ExtractOpens) { this.ExtractOpens = ExtractOpens; } /** * @return the ExtractSends */ public boolean isExtractSends() { return ExtractSends; } /** * @param ExtractSends the ExtractSends to set */ public void setExtractSends(boolean ExtractSends) { this.ExtractSends = ExtractSends; } /** * @return the ExtractSendJobs */ public boolean isExtractSendJobs() { return ExtractSendJobs; } /** * @param ExtractSendJobs the ExtractSendJobs to set */ public void setExtractSendJobs(boolean ExtractSendJobs) { this.ExtractSendJobs = ExtractSendJobs; } /** * @return the ExtractSurveyResponses */ public boolean isExtractSurveyResponses() { return ExtractSurveyResponses; } /** * @param ExtractSurveyResponses the ExtractSurveyResponses to set */ public void setExtractSurveyResponses(boolean ExtractSurveyResponses) { this.ExtractSurveyResponses = ExtractSurveyResponses; } /** * @return the IncludeTestSends */ public boolean isIncludeTestSends() { return IncludeTestSends; } /** * @param IncludeTestSends the IncludeTestSends to set */ public void setIncludeTestSends(boolean IncludeTestSends) { this.IncludeTestSends = IncludeTestSends; } /** * @return the ExtractUnsubs */ public boolean isExtractUnsubs() { return ExtractUnsubs; } /** * @param ExtractUnsubs the ExtractUnsubs to set */ public void setExtractUnsubs(boolean ExtractUnsubs) { this.ExtractUnsubs = ExtractUnsubs; } /** * @return the StartDate */ public Date getStartDate() { return StartDate; } /** * @return the EndDate */ public Date getEndDate() { return EndDate; } /** * @return the _AsyncID */ public String getAsyncID() { return _AsyncID; } /** * @param _AsyncID the _AsyncID to set */ private void setAsyncID(String _AsyncID) { this._AsyncID = _AsyncID; } /** * @return the HasColumnHeaders */ public boolean isHasColumnHeaders() { return HasColumnHeaders; } /** * @param HasColumnHeaders the HasColumnHeaders to set */ public void setHasColumnHeaders(boolean HasColumnHeaders) { this.HasColumnHeaders = HasColumnHeaders; } }
package com.surevine.buddycloud.payload.validator; import java.io.FileInputStream; import java.io.IOException; import java.io.StringReader; import java.util.Date; import java.util.concurrent.LinkedBlockingQueue; import org.junit.Before; import org.junit.Test; import org.mockito.Mockito; import org.xmpp.packet.IQ; import org.xmpp.packet.JID; import org.xmpp.packet.Packet; import org.apache.commons.io.IOUtils; import org.buddycloud.channelserver.channel.ChannelManager; import org.buddycloud.channelserver.packetprocessor.iq.namespace.pubsub.get.RepliesGet; import org.buddycloud.channelserver.pubsub.model.GlobalItemID; import org.buddycloud.channelserver.pubsub.model.NodeItem; import org.buddycloud.channelserver.pubsub.model.impl.GlobalItemIDImpl; import org.buddycloud.channelserver.pubsub.model.impl.NodeItemImpl; import org.buddycloud.channelserver.utils.node.item.payload.ActivityStreams; import org.dom4j.DocumentException; import org.dom4j.Element; import org.dom4j.dom.DOMElement; import org.dom4j.io.SAXReader; import junit.framework.Assert; import junit.framework.TestCase; /** * TODO Additional work required: * * - Check for (and add) activity stream verb * * - Test 'media' if present * * - Test 'meta' if present * */ public class AtomEntryTest { public final static String STANZA_PATH = "src/test/resources/stanzas"; private AtomEntry validator; private IQ publishRequest; private Element publishEntry; private IQ replyRequest; private Element replyEntry; private IQ ratingRequest; private Element ratingEntry; private ChannelManager channelManager; JID jid = new JID("juliet@shakespeare.lit/balcony"); String node = "/users/romeo@shakespeare.lit/posts"; String server = "channels.shakespeare.lit"; private Element emptyEntry; @Before public void setUp() throws Exception { publishRequest = readStanzaAsIq("/iq/pubsub/publish/request.stanza"); publishEntry = publishRequest.getChildElement().element("publish") .element("item"); replyRequest = readStanzaAsIq("/iq/pubsub/publish/reply.stanza"); replyEntry = replyRequest.getChildElement().element("publish") .element("item"); ratingRequest = readStanzaAsIq("/iq/pubsub/publish/rating.stanza"); ratingEntry = ratingRequest.getChildElement().element("publish") .element("item"); channelManager = Mockito.mock(ChannelManager.class); NodeItem item = new NodeItemImpl(node, "1", new Date(), "<entry/>"); Mockito.when( channelManager.getNodeItem(Mockito.eq(node), Mockito.anyString())).thenReturn(item); } private AtomEntry getEntryObject(Element item) { AtomEntry validate = new AtomEntry(item); validate.setNode(node); validate.setTo(server); validate.setUser(jid); validate.setChannelManager(channelManager); return validate; } @Test public void notProvidingAnEntryReturnsError() throws Exception { validator = new AtomEntry(null); Assert.assertFalse(validator.isValid()); Assert.assertEquals(AtomEntry.MISSING_ENTRY_ELEMENT, validator.getErrorMessage()); } @Test public void missingIdAttributeGetsAdded() throws Exception { Assert.assertEquals("96da02ee1baef61e767742844207bec4", publishEntry .element("entry").elementText("id")); Element item = (Element) this.publishEntry.clone(); item.element("entry").element("id").detach(); validator = getEntryObject(item); Assert.assertTrue(validator.isValid()); Element entry = validator.getPayload(); Assert.assertTrue(entry.elementText("id").contains( "tag:" + server + "," + node + ",")); } @Test public void emptyIdElementHasValueAdded() throws Exception { Assert.assertEquals("96da02ee1baef61e767742844207bec4", publishEntry .element("entry").elementText("id")); Element item = (Element) this.publishEntry.clone(); item.element("entry").element("id").detach(); item.element("entry").addElement("id"); validator = getEntryObject(item); Assert.assertTrue(validator.isValid()); Element entry = validator.getPayload(); Assert.assertTrue(entry.elementText("id").contains( "tag:" + server + "," + node + ",")); } @Test public void idElementIsIgnored() throws Exception { String id = "96da02ee1baef61e767742844207bec4"; Assert.assertEquals(id, publishEntry.element("entry").elementText("id")); Element item = (Element) this.publishEntry.clone(); validator = getEntryObject(item); Assert.assertTrue(validator.isValid()); Element entry = validator.getPayload(); Assert.assertFalse(entry.elementText("id").contains(id)); } @Test public void missingTitleElementIsAdded() throws Exception { Assert.assertEquals("Post title", publishEntry.element("entry") .elementText("title")); Element item = (Element) this.publishEntry.clone(); item.element("entry").element("title").detach(); validator = getEntryObject(item); Assert.assertTrue(validator.isValid()); Element entry = validator.getPayload(); Assert.assertEquals("Post", entry.elementText("title")); } @Test public void missingContentElementReturnsInvalid() throws Exception { Assert.assertNotNull(publishEntry.element("entry").element("content")); Element item = (Element) this.publishEntry.clone(); item.element("entry").element("content").detach(); validator = getEntryObject(item); Assert.assertFalse(validator.isValid()); Assert.assertEquals(AtomEntry.MISSING_CONTENT_ELEMENT, validator.getErrorMessage()); } @Test public void missingUpdatedElementHasValueAdded() throws Exception { Assert.assertEquals("2014-01-01T00:00:00.000Z", publishEntry.element("entry").elementText("updated")); Element item = (Element) this.publishEntry.clone(); item.element("entry").element("updated").detach(); validator = getEntryObject(item); Assert.assertTrue(validator.isValid()); Element entry = validator.getPayload(); Assert.assertTrue(entry .elementText("updated") .matches( "[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{3}Z")); } @Test public void updateDateIsIgnored() throws Exception { String dateString = "2014-01-01T00:00:00.000Z"; Assert.assertEquals(dateString, publishEntry.element("entry") .elementText("updated")); Element item = (Element) this.publishEntry.clone(); validator = getEntryObject(item); Assert.assertTrue(validator.isValid()); Element entry = validator.getPayload(); Assert.assertFalse(entry.elementText("updated").equals(dateString)); } @Test public void authorEntryIsAdded() throws Exception { Element item = (Element) this.publishEntry.clone(); validator = getEntryObject(item); Assert.assertTrue(validator.isValid()); Element entry = validator.getPayload(); Element author = entry.element("author"); Assert.assertNotNull(author); Assert.assertEquals(AtomEntry.AUTHOR_URI_PREFIX + jid.toBareJID(), author.elementText("uri")); Assert.assertEquals(jid.toBareJID(), author.elementText("name")); Assert.assertEquals(AtomEntry.AUTHOR_TYPE, author.elementText("object-type")); } @Test public void geolocationEntryIsAdded() throws Exception { Element item = (Element) this.publishEntry.clone(); validator = getEntryObject(item); Assert.assertTrue(validator.isValid()); Element entry = validator.getPayload(); Assert.assertNotNull(entry.element("geoloc")); item = (Element) this.replyEntry.clone(); validator = getEntryObject(item); Assert.assertTrue(validator.isValid()); entry = validator.getPayload(); Assert.assertNull(entry.element("geoloc")); } @Test public void globalInReplyToIdIsMadeALocalId() throws Exception { Assert.assertEquals( "tag:channels.shakespeare.lit,/users/romeo@shakespeare.lit/posts,fc362eb42085f017ed9ccd9c4004b095", replyEntry.element("entry").element("in-reply-to") .attributeValue("ref")); Element item = (Element) this.replyEntry.clone(); validator = getEntryObject(item); Assert.assertTrue(validator.isValid()); Element entry = validator.getPayload(); Assert.assertEquals("fc362eb42085f017ed9ccd9c4004b095", entry.element("in-reply-to").attributeValue("ref")); } @Test public void postGetsNoteTypeReplyGetsCommentType() throws Exception { Element item = (Element) this.publishEntry.clone(); validator = getEntryObject(item); Assert.assertTrue(validator.isValid()); Element entry = validator.getPayload(); Assert.assertEquals(AtomEntry.POST_TYPE_NOTE, entry.element("object") .elementText("object-type")); item = (Element) this.replyEntry.clone(); validator = getEntryObject(item); Assert.assertTrue(validator.isValid()); entry = validator.getPayload(); Assert.assertEquals(AtomEntry.POST_TYPE_COMMENT, entry .element("object").elementText("object-type")); } @Test public void badContentTypeReturnsError() throws Exception { Element item = (Element) this.publishEntry.clone(); item.element("entry").element("content").attribute("type") .setText("emojis"); validator = getEntryObject(item); Assert.assertFalse(validator.isValid()); Assert.assertEquals(AtomEntry.UNSUPPORTED_CONTENT_TYPE, validator.getErrorMessage()); } @Test public void noContentTypeGetsDefaultedToText() throws Exception { Element item = (Element) this.publishEntry.clone(); item.element("entry").element("content").attribute("type").detach(); validator = getEntryObject(item); Assert.assertTrue(validator.isValid()); Element entry = validator.getPayload(); Assert.assertEquals(AtomEntry.CONTENT_TEXT, entry.element("content") .attributeValue("type")); } @Test public void contentTypeGetsAddedAsRequired() throws Exception { Element item = (Element) this.publishEntry.clone(); item.element("entry").element("content").attribute("type") .setText(AtomEntry.CONTENT_XHTML); validator = getEntryObject(item); Assert.assertTrue(validator.isValid()); Element entry = validator.getPayload(); Assert.assertEquals(AtomEntry.CONTENT_XHTML, entry.element("content") .attributeValue("type")); } @Test public void activityVerbIsAdded() throws Exception { Element item = (Element) this.publishEntry.clone(); validator = getEntryObject(item); Assert.assertTrue(validator.isValid()); Element entry = validator.getPayload(); Assert.assertEquals(AtomEntry.ACTIVITY_VERB_POST, entry.elementText("verb")); } @Test public void replyParentItemNotFoundResultsInError() throws Exception { Mockito.when( channelManager.getNodeItem(Mockito.eq(node), Mockito.anyString())).thenReturn(null); Element item = (Element) this.replyEntry.clone(); validator = getEntryObject(item); Assert.assertFalse(validator.isValid()); Assert.assertEquals(AtomEntry.PARENT_ITEM_NOT_FOUND, validator.getErrorMessage()); } @Test public void canNotReplyToAReply() throws Exception { NodeItem nodeItem = new NodeItemImpl(node, "2", new Date(), "<entry/>", "1"); Mockito.when( channelManager.getNodeItem(Mockito.eq(node), Mockito.anyString())).thenReturn(nodeItem); Element item = (Element) this.replyEntry.clone(); validator = getEntryObject(item); Assert.assertFalse(validator.isValid()); Assert.assertEquals(AtomEntry.MAX_THREAD_DEPTH_EXCEEDED, validator.getErrorMessage()); } @Test public void targetElementWithoutInReplyToReturnsError() throws Exception { NodeItem nodeItem = new NodeItemImpl(node, "2", new Date(), "<entry/>", "1"); Mockito.when( channelManager.getNodeItem(Mockito.eq(node), Mockito.eq("1"))) .thenReturn(nodeItem); Mockito.when( channelManager.getNodeItem(Mockito.eq(node), Mockito.eq("2"))) .thenReturn(null); Element item = (Element) this.ratingEntry.clone(); item.element("entry").element("in-reply-to").detach(); validator = getEntryObject(item); Assert.assertFalse(validator.isValid()); Assert.assertEquals(AtomEntry.IN_REPLY_TO_MISSING, validator.getErrorMessage()); } @Test public void missingTargetIdElementReturnsError() throws Exception { NodeItem nodeItem = new NodeItemImpl(node, "2", new Date(), "<entry/>"); Mockito.when( channelManager.getNodeItem(Mockito.eq(node), Mockito.eq("1"))) .thenReturn(nodeItem); Mockito.when( channelManager.getNodeItem(Mockito.eq(node), Mockito.eq("2"))) .thenReturn(null); Element item = (Element) this.ratingEntry.clone(); item.element("entry").element("target").element("id").detach(); validator = getEntryObject(item); Assert.assertFalse(validator.isValid()); Assert.assertEquals(AtomEntry.MISSING_TARGET_ID, validator.getErrorMessage()); } @Test public void emptyTargetIdElementReturnsError() throws Exception { NodeItem nodeItem = new NodeItemImpl(node, "2", new Date(), "<entry/>"); Mockito.when( channelManager.getNodeItem(Mockito.eq(node), Mockito.eq("1"))) .thenReturn(nodeItem); Mockito.when( channelManager.getNodeItem(Mockito.eq(node), Mockito.eq("2"))) .thenReturn(null); Element item = (Element) this.ratingEntry.clone(); item.element("entry").element("target").element("id").detach(); item.element("entry").element("target").addElement("id"); validator = getEntryObject(item); Assert.assertFalse(validator.isValid()); Assert.assertEquals(AtomEntry.MISSING_TARGET_ID, validator.getErrorMessage()); } @Test public void ifTargetedPostDoesntExistErrorIsReturned() throws Exception { NodeItem nodeItem = new NodeItemImpl(node, "1", new Date(), "<entry/>"); Mockito.when( channelManager.getNodeItem(Mockito.eq(node), Mockito.eq("1"))) .thenReturn(nodeItem); Mockito.when( channelManager.getNodeItem(Mockito.eq(node), Mockito.eq("2"))) .thenReturn(null); Element item = (Element) this.ratingEntry.clone(); validator = getEntryObject(item); Assert.assertFalse(validator.isValid()); Assert.assertEquals(AtomEntry.TARGETED_ITEM_NOT_FOUND, validator.getErrorMessage()); } @Test public void ifTargetedPostIsntInSameThreadErrorIsReturnedParentCheck() throws Exception { NodeItem nodeItem = new NodeItemImpl(node, "1", new Date(), "<entry/>"); Mockito.when( channelManager.getNodeItem(Mockito.eq(node), Mockito.eq("1"))) .thenReturn(nodeItem); Mockito.when( channelManager.getNodeItem(Mockito.eq(node), Mockito.eq("2"))) .thenReturn(nodeItem); Element item = (Element) this.ratingEntry.clone(); validator = getEntryObject(item); Assert.assertFalse(validator.isValid()); Assert.assertEquals(AtomEntry.TARGET_MUST_BE_IN_SAME_THREAD, validator.getErrorMessage()); } @Test public void ifTargetedPostIsntInSameThreadErrorIsReturnedThreadCheck() throws Exception { NodeItem nodeItem1 = new NodeItemImpl(node, "1", new Date(), "<entry/>"); NodeItem nodeItem2 = new NodeItemImpl(node, "B", new Date(), "<entry/>", "A"); Mockito.when( channelManager.getNodeItem(Mockito.eq(node), Mockito.eq("1"))) .thenReturn(nodeItem1); Mockito.when( channelManager.getNodeItem(Mockito.eq(node), Mockito.eq("2"))) .thenReturn(nodeItem2); Element item = (Element) this.ratingEntry.clone(); item.element("entry").element("target").element("id").setText("B"); validator = getEntryObject(item); Assert.assertFalse(validator.isValid()); Assert.assertEquals(AtomEntry.TARGET_MUST_BE_IN_SAME_THREAD, validator.getErrorMessage()); } @Test public void ifTargetedIdIsSameAsReplyToIdOnlyOneDatabaseLookupPerformed() throws Exception { NodeItem nodeItem = new NodeItemImpl(node, "1", new Date(), "<entry/>"); Mockito.when( channelManager.getNodeItem(Mockito.eq(node), Mockito.eq("1"))) .thenReturn(nodeItem); Element item = (Element) this.ratingEntry.clone(); item.element("entry").element("target").element("id").setText("1"); validator = getEntryObject(item); validator.isValid(); Mockito.verify(channelManager, Mockito.times(1)).getNodeItem( Mockito.eq(node), Mockito.eq("1")); } @Test public void targetElementGetsAddedAsExpected() throws Exception { Element item = (Element) this.ratingEntry.clone(); item.element("entry").element("target").element("id").setText("1"); validator = getEntryObject(item); Assert.assertTrue(validator.isValid()); Element payload = validator.getPayload(); String expectedId = "tag:channels.shakespeare.lit,/users/romeo@shakespeare.lit/posts,1"; Assert.assertEquals(expectedId, payload.element("target").elementText("id")); Assert.assertEquals("post", payload.element("target").elementText("object-type")); } @Test public void missingInReplyToErrorsIfRatingElementPresent() throws Exception { Element item = (Element) this.ratingEntry.clone(); item.element("entry").element("in-reply-to").detach(); validator = getEntryObject(item); Assert.assertFalse(validator.isValid()); Assert.assertEquals(AtomEntry.IN_REPLY_TO_MISSING, validator.getErrorMessage()); } @Test public void missingTargetErrorsIfRatingElementPresent() throws Exception { Element item = (Element) this.ratingEntry.clone(); item.element("entry").element("target").element("id").setText("1"); item.element("entry").element("target").detach(); validator = getEntryObject(item); Assert.assertFalse(validator.isValid()); Assert.assertEquals(AtomEntry.TARGET_ELEMENT_MISSING, validator.getErrorMessage()); } @Test public void invalidRatingValueReturnsError() throws Exception { Element item = (Element) this.ratingEntry.clone(); item.element("entry").element("target").element("id").setText("1"); item.element("entry").element("rating").setText("awesome"); validator = getEntryObject(item); Assert.assertFalse(validator.isValid()); Assert.assertEquals(AtomEntry.INVALID_RATING_VALUE, validator.getErrorMessage()); } @Test public void outOfRangeRatingReturnsError() throws Exception { Element item = (Element) this.ratingEntry.clone(); item.element("entry").element("rating").setText("6.0"); item.element("entry").element("target").element("id").setText("1"); validator = getEntryObject(item); Assert.assertFalse(validator.isValid()); Assert.assertEquals(AtomEntry.RATING_OUT_OF_RANGE, validator.getErrorMessage()); } @Test public void nonWholeNumberRatingReturnsError() throws Exception { Element item = (Element) this.ratingEntry.clone(); item.element("entry").element("rating").setText("4.1"); item.element("entry").element("target").element("id").setText("1"); validator = getEntryObject(item); Assert.assertFalse(validator.isValid()); Assert.assertEquals(AtomEntry.INVALID_RATING_VALUE, validator.getErrorMessage()); } @Test public void ratingElementGetsAddedToPayloadAsExpected() throws Exception { String rating = "4"; Element item = (Element) this.ratingEntry.clone(); item.element("entry").element("rating").setText(rating); item.element("entry").element("target").element("id").setText("1"); validator = getEntryObject(item); Assert.assertTrue(validator.isValid()); Element payload = validator.getPayload(); Assert.assertEquals(ActivityStreams.NS_REVIEW, payload .getNamespaceForPrefix("review").getText()); Assert.assertEquals(rating + ".0", payload.element("rating") .getTextTrim()); } @Test public void postTitleIsSetToRatingWhenRated() throws Exception { Element item = (Element) this.ratingEntry.clone(); item.element("entry").element("target").element("id").setText("1"); validator = getEntryObject(item); Assert.assertTrue(validator.isValid()); Element payload = validator.getPayload(); Assert.assertEquals(ActivityStreams.NS_REVIEW, payload .getNamespaceForPrefix("review").getText()); Assert.assertEquals("Rating", payload.elementText("title")); } @Test public void postVerbGetsSwitchedToRated() throws Exception { Element item = (Element) this.ratingEntry.clone(); item.element("entry").element("target").element("id").setText("1"); validator = getEntryObject(item); Assert.assertTrue(validator.isValid()); Element payload = validator.getPayload(); Assert.assertEquals(AtomEntry.ACTIVITY_VERB_RATED, payload.elementText("verb")); } @Test public void postContentGetsReplacedWithRating() throws Exception { Element item = (Element) this.ratingEntry.clone(); item.element("entry").element("target").element("id").setText("1"); String expectedContent = "rating:5.0"; Assert.assertFalse(item.element("entry").elementText("content") .equals(expectedContent)); validator = getEntryObject(item); Assert.assertTrue(validator.isValid()); Element payload = validator.getPayload(); Assert.assertEquals(expectedContent, payload.elementText("content")); } @Test public void canNotRateARating() throws Exception { String testPayload = "<entry xmlns=\"http://www.w3.org/2005/Atom\" " + "xmlns:review=\"http://activitystrea.ms/schema/1.0/review\">" + "<review:rating>5.0</review:rating></entry>"; NodeItem nodeItem = new NodeItemImpl(node, "1", new Date(), testPayload); Mockito.when( channelManager.getNodeItem(Mockito.eq(node), Mockito.anyString())).thenReturn(nodeItem); Element item = (Element) this.ratingEntry.clone(); item.element("entry").element("target").element("id").setText("1"); validator = getEntryObject(item); Assert.assertFalse(validator.isValid()); Assert.assertEquals(AtomEntry.CAN_ONLY_RATE_A_POST, validator.getErrorMessage()); } @Test public void onlyAllowsSingleRatingPerPersonPerPost() throws Exception { Mockito.when( channelManager.userHasRatedPost(Mockito.anyString(), Mockito.any(JID.class), Mockito.any(GlobalItemID.class))) .thenReturn(true); Element item = (Element) this.ratingEntry.clone(); item.element("entry").element("target").element("id").setText("1"); validator = getEntryObject(item); Assert.assertFalse(validator.isValid()); Assert.assertEquals(AtomEntry.ITEM_ALREADY_RATED, validator.getErrorMessage()); } @Test public void suppliesLocalItemId() throws Exception { Element item = (Element) this.publishEntry.clone(); validator = getEntryObject(item); String localItemId = validator.getLocalItemId(); Assert.assertNotNull(localItemId); } @Test public void suppliesGlobalItemId() throws Exception { Element item = (Element) this.publishEntry.clone(); validator = getEntryObject(item); String globalItemId = validator.getGlobalItemId(); Assert.assertNotNull(globalItemId); Assert.assertTrue(GlobalItemIDImpl.isGlobalId(globalItemId)); } @Test public void suppliesInReplyTo() throws Exception { Assert.assertEquals( "tag:channels.shakespeare.lit,/users/romeo@shakespeare.lit/posts,fc362eb42085f017ed9ccd9c4004b095", replyEntry.element("entry").element("in-reply-to") .attributeValue("ref")); NodeItem nodeItem = new NodeItemImpl(node, "2", new Date(), "<entry/>"); Mockito.when( channelManager.getNodeItem(Mockito.eq(node), Mockito.anyString())).thenReturn(nodeItem); Element item = (Element) this.replyEntry.clone(); validator = getEntryObject(item); Assert.assertEquals("fc362eb42085f017ed9ccd9c4004b095", validator.getInReplyTo()); } public static IQ readStanzaAsIq(String stanzaPath) throws IOException, DocumentException { String stanzaStr = IOUtils.toString( new FileInputStream(STANZA_PATH + stanzaPath)); return toIq(stanzaStr); } public static IQ toIq(String stanzaStr) throws DocumentException { return new IQ(parseXml(stanzaStr)); } public static Element parseXml(String stanzaStr) throws DocumentException { SAXReader xmlReader = new SAXReader(); xmlReader.setMergeAdjacentText(true); xmlReader.setStringInternEnabled(true); xmlReader.setStripWhitespaceText(true); return xmlReader.read(new StringReader(stanzaStr)).getRootElement(); } }
/* * Copyright (c) 2010-2014 Evolveum * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.evolveum.midpoint.init; import ch.qos.logback.classic.LoggerContext; import ch.qos.logback.classic.joran.JoranConfigurator; import ch.qos.logback.core.joran.spi.JoranException; import ch.qos.logback.core.util.StatusPrinter; import com.evolveum.midpoint.common.configuration.api.MidpointConfiguration; import com.evolveum.midpoint.util.ClassPathUtil; import com.evolveum.midpoint.util.DOMUtil; import com.evolveum.midpoint.util.QNameUtil; import com.evolveum.midpoint.util.exception.SystemException; import com.evolveum.midpoint.util.logging.Trace; import com.evolveum.midpoint.util.logging.TraceManager; import org.apache.commons.configuration.*; import org.apache.wss4j.dom.WSSConfig; import org.slf4j.LoggerFactory; import org.w3c.dom.Document; import javax.xml.parsers.DocumentBuilder; import java.io.File; import java.text.DateFormat; import java.text.SimpleDateFormat; import java.util.Iterator; public class StartupConfiguration implements MidpointConfiguration { private static final Trace LOGGER = TraceManager.getTrace(StartupConfiguration.class); private static final String USER_HOME = "user.home"; private static final String MIDPOINT_HOME = "midpoint.home"; private static final String MIDPOINT_SECTION = "midpoint"; private static final String SAFE_MODE = "safeMode"; private static final String DEFAULT_CONFIG_FILE_NAME = "config.xml"; private static final String LOGBACK_CONFIG_FILENAME = "logback.xml"; private CompositeConfiguration config = null; private Document xmlConfigAsDocument = null; // just in case when we need to access original XML document private String configFilename = null; /** * Default constructor */ public StartupConfiguration() { this.configFilename = DEFAULT_CONFIG_FILE_NAME; } /** * Constructor * * @param configFilename alternative configuration file */ public StartupConfiguration(String configFilename) { this.configFilename = configFilename; } /** * Get current configuration file name * * @return */ public String getConfigFilename() { return this.configFilename; } /** * Set configuration filename * * @param configFilename */ public void setConfigFilename(String configFilename) { this.configFilename = configFilename; } @Override public String getMidpointHome() { return System.getProperty(MIDPOINT_HOME); } @Override public Configuration getConfiguration(String componentName) { if (null == componentName) { throw new IllegalArgumentException("NULL argument"); } Configuration sub = config.subset(componentName); // Insert replacement for relative path to midpoint.home else clean // replace if (getMidpointHome() != null) { sub.addProperty(MIDPOINT_HOME, getMidpointHome()); } else { @SuppressWarnings("unchecked") Iterator<String> i = sub.getKeys(); while (i.hasNext()) { String key = i.next(); sub.setProperty(key, sub.getString(key).replace("${" + MIDPOINT_HOME + "}/", "")); sub.setProperty(key, sub.getString(key).replace("${" + MIDPOINT_HOME + "}", "")); } } if (LOGGER.isDebugEnabled()) { LOGGER.debug("Configuration for {} :", componentName); @SuppressWarnings("unchecked") Iterator<String> i = sub.getKeys(); while (i.hasNext()) { String key = i.next(); LOGGER.debug(" {} = {}", key, sub.getString(key)); } } return sub; } /** * Initialize system configuration */ public void init() { welcome(); if (System.getProperty(MIDPOINT_HOME) == null || System.getProperty(MIDPOINT_HOME).isEmpty()) { LOGGER.warn("*****************************************************************************************"); LOGGER.warn(MIDPOINT_HOME + " is not set ! Using default configuration, for more information see http://wiki.evolveum.com/display/midPoint/"); LOGGER.warn("*****************************************************************************************"); System.out.println("*******************************************************************************"); System.out.println(MIDPOINT_HOME + " is not set ! Using default configuration, for more information"); System.out.println(" see http://wiki.evolveum.com/display/midPoint/"); System.out.println("*******************************************************************************"); if (getConfigFilename().startsWith("test")) { String midpointHome = "./target/midpoint-home"; System.setProperty(MIDPOINT_HOME, midpointHome); System.out.println("Using " + MIDPOINT_HOME + " for test runs: '" + midpointHome + "'."); } else { String userHome = System.getProperty(USER_HOME); if (!userHome.endsWith("/")) { userHome += "/"; } userHome += "midpoint"; System.setProperty(MIDPOINT_HOME, userHome); LOGGER.warn("Setting {} to '{}'.", new Object[] { MIDPOINT_HOME, userHome }); System.out.println("Setting " + MIDPOINT_HOME + " to '" + userHome + "'."); } } String midpointHomeString = System.getProperty(MIDPOINT_HOME); if (midpointHomeString != null) { //Fix missing last slash in path if (!midpointHomeString.endsWith("/")) { midpointHomeString = midpointHomeString + "/"; System.setProperty(MIDPOINT_HOME, midpointHomeString); } } File midpointHome = new File(midpointHomeString); setupInitialLogging(midpointHome); loadConfiguration(midpointHome); if (isSafeMode()) { LOGGER.info("Safe mode is ON; setting tolerateUndeclaredPrefixes to TRUE"); QNameUtil.setTolerateUndeclaredPrefixes(true); } // Make sure that this is called very early in the startup sequence. // This is needed to properly initialize the resources // (the "org/apache/xml/security/resource/xmlsecurity" resource bundle error) WSSConfig.init(); } /** * Loading logic */ private void loadConfiguration(File midpointHome) { if (config != null) { config.clear(); } else { config = new CompositeConfiguration(); } DocumentBuilder documentBuilder = DOMUtil.createDocumentBuilder(); // we need namespace-aware document builder (see GeneralChangeProcessor.java) if (midpointHome != null) { /* configuration logic */ File f = new File(midpointHome, this.getConfigFilename()); System.out.println("Loading midPoint configuration from file "+f); LOGGER.info("Loading midPoint configuration from file {}", f); try { if (!f.exists()) { LOGGER.warn("Configuration file {} does not exists. Need to do extraction ...", f); ApplicationHomeSetup ah = new ApplicationHomeSetup(); ah.init(MIDPOINT_HOME); ClassPathUtil.extractFileFromClassPath(this.getConfigFilename(), f.getPath()); } //Load and parse properties config.addProperty(MIDPOINT_HOME, System.getProperty(MIDPOINT_HOME)); createXmlConfiguration(documentBuilder, f.getPath()); } catch (ConfigurationException e) { String message = "Unable to read configuration file [" + f + "]: " + e.getMessage(); LOGGER.error(message); System.out.println(message); throw new SystemException(message, e); // there's no point in continuing with midpoint initialization } } else { // Load from current directory try { createXmlConfiguration(documentBuilder, this.getConfigFilename()); } catch (ConfigurationException e) { String message = "Unable to read configuration file [" + this.getConfigFilename() + "]: " + e.getMessage(); LOGGER.error(message); System.out.println(message); throw new SystemException(message, e); } } } private void setupInitialLogging(File midpointHome) { File logbackConfigFile = new File(midpointHome, LOGBACK_CONFIG_FILENAME); if (!logbackConfigFile.exists()) { return; } LOGGER.info("Loading additional logging configuration from {}", logbackConfigFile); LoggerContext context = (LoggerContext) LoggerFactory.getILoggerFactory(); try { JoranConfigurator configurator = new JoranConfigurator(); configurator.setContext(context); configurator.doConfigure(logbackConfigFile); } catch (JoranException je) { // StatusPrinter will handle this } catch (Exception ex) { ex.printStackTrace(); } StatusPrinter.printInCaseOfErrorsOrWarnings(context); } private void createXmlConfiguration(DocumentBuilder documentBuilder, String filename) throws ConfigurationException { XMLConfiguration xmlConfig = new XMLConfiguration(); xmlConfig.setDocumentBuilder(documentBuilder); xmlConfig.setFileName(filename); xmlConfig.load(); config.addConfiguration(xmlConfig); xmlConfigAsDocument = DOMUtil.parseFile(filename); } @Override public Document getXmlConfigAsDocument() { return xmlConfigAsDocument; } @Override public boolean isSafeMode() { Configuration c = getConfiguration(MIDPOINT_SECTION); if (c == null) { return false; // should not occur } return c.getBoolean(SAFE_MODE, false); } @Override public String toString() { @SuppressWarnings("unchecked") Iterator<String> i = config.getKeys(); StringBuilder sb = new StringBuilder(); while (i.hasNext()) { String key = i.next(); sb.append(key); sb.append(" = "); sb.append(config.getString(key)); sb.append("; "); } return sb.toString(); } private void welcome() { try { Configuration info = new PropertiesConfiguration("midpoint.info"); DateFormat formatter = new SimpleDateFormat("dd/MM/yyyy hh:mm:ss.SSS"); LOGGER.info("+--------------------------------------------------------------------------------------------+"); LOGGER.info("| _ | | _ \\ _ _| |_"); LOGGER.info("| ___ ____ (_) __| | |_) |___ (_)___|_ _|"); LOGGER.info("| | _ ` _ `| |/ _ | __/ _ \\| | _` | |"); LOGGER.info("| | | | | | | | (_| | | | (_) | | | | | |_"); LOGGER.info("| |_| |_| |_|_|\\____|_| \\____/|_|_| |_|\\__| by Evolveum and partners"); LOGGER.info("|"); LOGGER.info("| Licensed under the Apache License, Version 2.0 see: http://www.apache.org/licenses/LICENSE-2.0"); LOGGER.info("| Version : " + info.getString("midpoint.version")); // try { // LOGGER.info("| Build : " + info.getString("midpoint.build") + " at " // + formatter.format(new Date(info.getLong("midpoint.timestamp")))); // } catch (NumberFormatException ex) { // LOGGER.info("| Build : " + info.getString("midpoint.build")); // } LOGGER.info("| Sources : " + info.getString("midpoint.scm") + " branch: " + info.getString("midpoint.branch")); LOGGER.info("| Bug reporting system : " + info.getString("midpoint.jira")); LOGGER.info("| Product information : http://wiki.evolveum.com/display/midPoint"); LOGGER.info("+---------------------------------------------------------------------------------------------+"); } catch (ConfigurationException e) { //NOTHING just skip } } }
/* * ModeShape (http://www.modeshape.org) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.modeshape.jdbc; import static org.hamcrest.core.Is.is; import static org.hamcrest.core.IsInstanceOf.instanceOf; import static org.hamcrest.core.IsNull.notNullValue; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; import static org.mockito.Mockito.when; import java.sql.Connection; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.util.List; import java.util.Set; import javax.jcr.nodetype.NodeType; import javax.jcr.query.QueryResult; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.mockito.Mock; import org.mockito.MockitoAnnotations; import org.modeshape.jdbc.delegate.ConnectionInfo; import org.modeshape.jdbc.delegate.RepositoryDelegate; /** * */ public class JcrStatementTest { private JcrStatement stmt; @Mock private JcrConnection connection; @Mock private QueryResult queryResult; @After public void afterEach() { if (stmt != null) { stmt.close(); stmt = null; } } @Before public void beforeEach() throws Exception { MockitoAnnotations.initMocks(this); stmt = new JcrStatement(connection); when(connection.getRepositoryDelegate()).thenReturn(new TestJcrCommRepositoryInterface()); when(queryResult.getColumnNames()).thenReturn(TestUtil.COLUMN_NAMES); } @Test public void shouldHaveStatement() { assertThat(stmt, is(notNullValue())); } @Test public void shouldBeAbleToClearWarnings() throws SQLException { stmt.clearWarnings(); } @Test public void shouldHaveConnection() throws SQLException { assertThat(stmt.getConnection(), is(notNullValue())); } @Test public void shouldReturnDefaultForFetchDirection() throws SQLException { assertThat(stmt.getFetchDirection(), is(ResultSet.FETCH_FORWARD)); } @Test public void shouldHaveFetchSize() throws SQLException { assertThat(stmt.getFetchSize(), is(0)); } @Test public void shouldReturnDefaultForMaxRows() throws SQLException { assertThat(stmt.getMaxRows(), is(0)); } @Test public void shouldHaveMoreResults() throws SQLException { assertThat(stmt.getMoreResults(), is(false)); } @Test public void shouldHaveMoreResultsAtPostion() throws SQLException { assertThat(stmt.getMoreResults(Statement.CLOSE_CURRENT_RESULT), is(false)); } @Test public void shouldReturnDefaultForMaxFieldSize() throws SQLException { assertThat(stmt.getMaxFieldSize(), is(0)); } @Test public void shouldReturnDefaultForQueryTimeout() throws SQLException { assertThat(stmt.getQueryTimeout(), is(0)); } @Test public void shouldReturnDefaultForUpdateCount() throws SQLException { assertThat(stmt.getUpdateCount(), is(-1)); } @Test public void shouldExcute() throws SQLException { stmt.execute(TestUtil.SQL_SELECT); } public void shouldExcuteQuery() throws SQLException { stmt.executeQuery(TestUtil.SQL_SELECT); } /** * Because updates are not supported, this test should throw an exception. * * @throws SQLException */ @Test( expected = SQLException.class ) public void shouldThrowExceptionForAddBatch() throws SQLException { stmt.addBatch("Update sql"); } /** * Because updates are not supported, this test should throw an exception. * * @throws SQLException */ @Test( expected = SQLException.class ) public void shouldThrowExceptionForExcuteBatch() throws SQLException { stmt.executeBatch(); } /** * Because updates are not supported, this test should throw an exception. * * @throws SQLException */ @Test( expected = SQLException.class ) public void shouldThrowExceptionForUpdate() throws SQLException { stmt.executeUpdate("Update sql"); } /** * Because updates are not supported, this test should throw an exception. * * @throws SQLException */ @Test( expected = SQLException.class ) public void shouldThrowExceptionForClearBatch() throws SQLException { stmt.clearBatch(); } @Test public void shouldReturnResultSetConcurreny() throws SQLException { assertThat(stmt.getResultSetConcurrency(), is(ResultSet.CONCUR_READ_ONLY)); } @Test public void shouldReturnResultSetHoldability() throws SQLException { assertThat(stmt.getResultSetHoldability(), is(ResultSet.CLOSE_CURSORS_AT_COMMIT)); } @Test public void shouldReturnResultSetType() throws SQLException { assertThat(stmt.getResultSetType(), is(ResultSet.TYPE_SCROLL_INSENSITIVE)); } @Test public void shouldReturnDefaultForGeneratedKeys() { assertThat(stmt.getGeneratedKeys(), instanceOf(ResultSet.class)); } @Test public void shouldReturnDefaultResultSet() throws SQLException { assertNull(stmt.getResultSet()); } @Test public void shouldReturnDefaultForWarnings() throws SQLException { assertNull(stmt.getWarnings()); } /** * Because updates are not supported, this test should throw an exception. * * @throws SQLException */ @Test public void shouldSupportCancel() throws SQLException { stmt.cancel(); } @Test public void shouldSupportEquals() { assertTrue(stmt.equals(stmt)); JcrStatement stmt2 = null; try { stmt2 = new JcrStatement(connection); assertFalse(stmt.equals(stmt2)); } finally { if (stmt2 != null) { stmt2.close(); } } } @Test public void shouldBeAbleToClose() { stmt.close(); } @Test public void shouldSetFetchSize() throws SQLException { stmt.setFetchSize(100); } /** * Because updates are not supported, this test should throw an exception. * * @throws SQLException */ @Test( expected = SQLException.class ) public void shouldSetCursorName() throws SQLException { stmt.setCursorName("CursorName"); } @Test public void shouldSetEscapeProcessingTrue() throws SQLException { stmt.setEscapeProcessing(true); } @Test public void shouldSetEscapeProcessingFalse() throws SQLException { stmt.setEscapeProcessing(false); } @Test public void shouldSetFetchDirectionReverse() throws SQLException { stmt.setFetchDirection(ResultSet.FETCH_REVERSE); } @Test public void shouldSetFetchDirectionUnknown() throws SQLException { stmt.setFetchDirection(ResultSet.FETCH_UNKNOWN); } @Test public void shouldSetFetchDirectionForward() throws SQLException { stmt.setFetchDirection(ResultSet.FETCH_FORWARD); } @Test public void shouldSetMaxFieldSize() throws SQLException { stmt.setMaxFieldSize(30); } @Test public void shouldSetMaxRows() throws SQLException { stmt.setMaxRows(200); } @Test public void shouldSetPoolableTrue() throws SQLException { stmt.setPoolable(true); } @Test public void shouldSetPoolableFalse() throws SQLException { stmt.setPoolable(false); } @Test public void shouldSetQueryTimeout() throws SQLException { stmt.setQueryTimeout(60); } public class TestJcrCommRepositoryInterface implements RepositoryDelegate { /** * {@inheritDoc} * * @see org.modeshape.jdbc.delegate.RepositoryDelegate#createConnection(org.modeshape.jdbc.DriverInfo) */ @Override public Connection createConnection( DriverInfo info ) { return null; } @SuppressWarnings( "synthetic-access" ) @Override public QueryResult execute( String query, String language ) { return queryResult; } @SuppressWarnings( "synthetic-access" ) @Override public String explain( String query, String language ) { return ((org.modeshape.jcr.api.query.QueryResult)queryResult).getPlan(); } @Override public ConnectionInfo getConnectionInfo() { return null; } @Override public void close() { } /** * {@inheritDoc} * * @see org.modeshape.jdbc.delegate.RepositoryDelegate#closeStatement() */ @Override public void closeStatement() { } @Override public void commit() { } @Override public boolean isValid( int timeout ) { return false; } @Override public NodeType nodeType( String name ) { return null; } @Override public List<NodeType> nodeTypes() { return null; } @Override public void rollback() { } /** * {@inheritDoc} * * @see org.modeshape.jdbc.delegate.RepositoryDelegate#isWrapperFor(java.lang.Class) */ @Override public boolean isWrapperFor( Class<?> iface ) { return false; } /** * {@inheritDoc} * * @see org.modeshape.jdbc.delegate.RepositoryDelegate#unwrap(java.lang.Class) */ @Override public <T> T unwrap( Class<T> iface ) { return null; } @Override public Set<String> getRepositoryNames() { return null; } /** * {@inheritDoc} * * @see org.modeshape.jdbc.delegate.RepositoryDelegate#getDescriptor(java.lang.String) */ @Override public String getDescriptor( String descriptorKey ) { return null; } } }
package org.ihtsdo.otf.mapping.jpa.services; import java.util.Collections; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Properties; import javax.persistence.NoResultException; import org.apache.log4j.Logger; import org.ihtsdo.otf.mapping.helpers.LocalException; import org.ihtsdo.otf.mapping.helpers.MapUserList; import org.ihtsdo.otf.mapping.helpers.MapUserListJpa; import org.ihtsdo.otf.mapping.helpers.MapUserRole; import org.ihtsdo.otf.mapping.helpers.PfsParameter; import org.ihtsdo.otf.mapping.jpa.MapUserJpa; import org.ihtsdo.otf.mapping.jpa.MapUserPreferencesJpa; import org.ihtsdo.otf.mapping.model.MapUser; import org.ihtsdo.otf.mapping.model.MapUserPreferences; import org.ihtsdo.otf.mapping.services.MappingService; import org.ihtsdo.otf.mapping.services.SecurityService; import org.ihtsdo.otf.mapping.services.helpers.ConfigUtility; import org.ihtsdo.otf.mapping.services.helpers.SecurityServiceHandler; /** * Reference implementation of the {@link SecurityService}. */ public class SecurityServiceJpa extends RootServiceJpa implements SecurityService { /** The token userName . */ private static Map<String, String> tokenMapUsernameMap = Collections .synchronizedMap(new HashMap<String, String>()); /** The token login time . */ private static Map<String, Date> tokenTimeoutMap = Collections .synchronizedMap(new HashMap<String, Date>()); /** The handler. */ private static SecurityServiceHandler handler = null; /** The timeout. */ private static int timeout; /** * Instantiates an empty {@link SecurityServiceJpa}. * * @throws Exception the exception */ public SecurityServiceJpa() throws Exception { super(); } /* see superclass */ @Override public MapUser authenticate(String userName, String password) throws Exception { // Check userName and password are not null if (userName == null || userName.isEmpty()) throw new LocalException("Invalid userName: null"); if (password == null || password.isEmpty()) throw new LocalException("Invalid password: null"); Properties config = ConfigUtility.getConfigProperties(); if (handler == null) { timeout = Integer.valueOf(config.getProperty("security.timeout")); String handlerName = config.getProperty("security.handler"); handler = ConfigUtility.newStandardHandlerInstanceWithConfiguration( "security.handler", handlerName, SecurityServiceHandler.class); } // // Call the security service // // handle guest user unless MapUser authMapUser = handler.authenticate(userName, password); return authHelper(authMapUser); } /** * Auth helper. * * @param authMapUser the auth user * @return the user * @throws Exception the exception */ private MapUser authHelper(MapUser authMapUser) throws Exception { if (authMapUser == null) return null; // check if authenticated user exists MapUser userFound = getMapUser(authMapUser.getUserName()); // if user was found, update to match settings Long userId = null; if (userFound != null) { handleLazyInit(userFound); Logger.getLogger(getClass()).info("update"); userFound.setEmail(authMapUser.getEmail()); userFound.setName(authMapUser.getName()); userFound.setUserName(authMapUser.getUserName()); userFound.setApplicationRole(authMapUser.getApplicationRole()); updateMapUser(userFound); // if (userFound.getUserPreferences() == null) { // MapUserPreferences newMapUserPreferences = new MapUserPreferencesJpa(); // newMapUserPreferences.setUser(userFound); // addMapUserPreferences(newMapUserPreferences); // } userId = userFound.getId(); } // if MapUser not found, create one for our use else { Logger.getLogger(getClass()).info("add"); MapUser newMapUser = new MapUserJpa(); newMapUser.setEmail(authMapUser.getEmail()); newMapUser.setName(authMapUser.getName()); newMapUser.setUserName(authMapUser.getUserName()); newMapUser.setApplicationRole(authMapUser.getApplicationRole()); newMapUser = addMapUser(newMapUser); MapUserPreferences newMapUserPreferences = new MapUserPreferencesJpa(); newMapUserPreferences.setMapUser(newMapUser); newMapUserPreferences.setLastLogin(new Date().getTime()); newMapUserPreferences.setLastMapProjectId(0L); newMapUserPreferences.setNotifiedByEmail(false); addMapUserPreferences(newMapUserPreferences); userId = newMapUser.getId(); } manager.clear(); // Generate application-managed token String token = handler.computeTokenForUser(authMapUser.getUserName()); tokenMapUsernameMap.put(token, authMapUser.getUserName()); tokenTimeoutMap.put(token, new Date(new Date().getTime() + timeout)); Logger.getLogger(getClass()).debug( "MapUser = " + authMapUser.getUserName() + ", " + authMapUser); // Reload the user to populate MapUserPreferences final MapUser result = getMapUser(userId); handleLazyInit(result); result.setAuthToken(token); return result; } /* see superclass */ @Override public String logout(String authToken) throws Exception { tokenMapUsernameMap.remove(authToken); tokenTimeoutMap.remove(authToken); if (handler == null) { Properties config = ConfigUtility.getConfigProperties(); String handlerName = config.getProperty("security.handler"); handler = ConfigUtility.newStandardHandlerInstanceWithConfiguration( "security.handler", handlerName, SecurityServiceHandler.class); return handler.getLogoutUrl(); } else { return handler.getLogoutUrl(); } } /* see superclass */ @Override public String getUsernameForToken(String authToken) throws Exception { // use guest user for null auth token if (authToken == null) throw new LocalException( "Attempt to access a service without an AuthToken, the user is likely not logged in."); // handle guest user unless if (authToken.equals("guest") && "false".equals(ConfigUtility.getConfigProperties().getProperty( "security.guest.disabled"))) { return "guest"; } // Replace double quotes in auth token. final String parsedToken = authToken.replace("\"", ""); // Check auth token against the userName map if (tokenMapUsernameMap.containsKey(parsedToken)) { String userName = tokenMapUsernameMap.get(parsedToken); // Validate that the user has not timed out. if (handler.timeoutUser(userName)) { if (tokenTimeoutMap.get(parsedToken) == null) { throw new LocalException("No login timeout set for authToken."); } if (tokenTimeoutMap.get(parsedToken).before(new Date())) { throw new LocalException( "AuthToken has expired. Please reload and log in again."); } tokenTimeoutMap.put(parsedToken, new Date(new Date().getTime() + timeout)); } return userName; } else { throw new LocalException("AuthToken does not have a valid userName."); } } /* see superclass */ @Override public MapUserRole getApplicationRoleForToken(String authToken) throws Exception { if (authToken == null) { throw new LocalException( "Attempt to access a service without an AuthToken, the user is likely not logged in."); } // Handle "guest" user if (authToken.equals("guest") && "false".equals(ConfigUtility.getConfigProperties().getProperty( "security.guest.disabled"))) { return MapUserRole.VIEWER; } final String parsedToken = authToken.replace("\"", ""); final String userName = getUsernameForToken(parsedToken); // check for null userName if (userName == null) { throw new LocalException("Unable to find user for the AuthToken"); } final MapUser user = getMapUser(userName.toLowerCase()); if (user == null) { return MapUserRole.VIEWER; // throw new // LocalException("Unable to obtain user information for userName = " + // userName); } return user.getApplicationRole(); } /* see superclass */ @Override public MapUserRole getMapProjectRoleForToken(String authToken, Long projectId) throws Exception { if (authToken == null) { throw new LocalException( "Attempt to access a service without an AuthToken, the user is likely not logged in."); } if (projectId == null) { throw new Exception("Unexpected null project id"); } // Ask mapping service for this info. final String userName = getUsernameForToken(authToken); final MappingService mappingService = new MappingServiceJpa(); try { return mappingService.getMapUserRoleForMapProject(userName, projectId); } catch (Exception e) { throw e; } finally { mappingService.close(); } } /* see superclass */ @Override public MapUser getMapUser(Long id) throws Exception { return manager.find(MapUserJpa.class, id); } /* see superclass */ @Override public MapUser getMapUser(String userName) throws Exception { final javax.persistence.Query query = manager .createQuery("select u from MapUserJpa u where userName = :userName"); query.setParameter("userName", userName); try { final List<?> list = query.getResultList(); if (list.isEmpty()) { return null; } return (MapUser) list.iterator().next(); } catch (NoResultException e) { return null; } } /* see superclass */ @Override public MapUser addMapUser(MapUser user) { Logger.getLogger(getClass()).debug("Security Service - add user " + user); try { if (getTransactionPerOperation()) { tx = manager.getTransaction(); tx.begin(); manager.persist(user); tx.commit(); } else { manager.persist(user); } } catch (Exception e) { if (tx.isActive()) { tx.rollback(); } throw e; } return user; } /* see superclass */ @Override public void removeMapUser(Long id) { Logger.getLogger(getClass()).debug("Security Service - remove user " + id); tx = manager.getTransaction(); // retrieve this user final MapUser mu = manager.find(MapUserJpa.class, id); try { if (getTransactionPerOperation()) { tx.begin(); if (manager.contains(mu)) { manager.remove(mu); } else { manager.remove(manager.merge(mu)); } tx.commit(); } else { if (manager.contains(mu)) { manager.remove(mu); } else { manager.remove(manager.merge(mu)); } } } catch (Exception e) { if (tx.isActive()) { tx.rollback(); } throw e; } } /* see superclass */ @Override public void updateMapUser(MapUser user) { Logger.getLogger(getClass()) .debug("Security Service - update user " + user); try { if (getTransactionPerOperation()) { tx = manager.getTransaction(); tx.begin(); manager.merge(user); tx.commit(); } else { manager.merge(user); } } catch (Exception e) { if (tx.isActive()) { tx.rollback(); } throw e; } } /* see superclass */ @SuppressWarnings("unchecked") @Override public MapUserList getMapUsers() { javax.persistence.Query query = manager.createQuery("select u from MapUserJpa u"); final List<MapUser> m = query.getResultList(); final MapUserListJpa mapMapUserList = new MapUserListJpa(); mapMapUserList.setMapUsers(m); mapMapUserList.setTotalCount(m.size()); return mapMapUserList; } /* see superclass */ @SuppressWarnings("unchecked") @Override public MapUserList findMapUsersForQuery(String query, PfsParameter pfs) throws Exception { Logger.getLogger(getClass()).info( "Security Service - find users " + query + ", pfs= " + pfs); int[] totalCt = new int[1]; final List<MapUser> list = (List<MapUser>) getQueryResults(query == null || query.isEmpty() ? "id:[* TO *]" : query, MapUserJpa.class, MapUserJpa.class, pfs, totalCt); final MapUserList result = new MapUserListJpa(); result.setTotalCount(totalCt[0]); result.setMapUsers(list); for (final MapUser user : result.getMapUsers()) { handleLazyInit(user); } return result; } /* see superclass */ @Override public MapUserPreferences addMapUserPreferences( MapUserPreferences userPreferences) { Logger.getLogger(getClass()).debug( "Security Service - add user preferences " + userPreferences); try { if (getTransactionPerOperation()) { tx = manager.getTransaction(); tx.begin(); manager.persist(userPreferences); tx.commit(); } else { manager.persist(userPreferences); } } catch (Exception e) { if (tx.isActive()) { tx.rollback(); } throw e; } return userPreferences; } /* see superclass */ @Override public void removeMapUserPreferences(Long id) { Logger.getLogger(getClass()).debug( "Security Service - remove user preferences " + id); tx = manager.getTransaction(); // retrieve this user final MapUserPreferences mu = manager.find(MapUserPreferencesJpa.class, id); try { if (getTransactionPerOperation()) { tx.begin(); if (manager.contains(mu)) { manager.remove(mu); } else { manager.remove(manager.merge(mu)); } tx.commit(); } else { if (manager.contains(mu)) { manager.remove(mu); } else { manager.remove(manager.merge(mu)); } } } catch (Exception e) { if (tx.isActive()) { tx.rollback(); } throw e; } } /* see superclass */ @Override public void updateMapUserPreferences(MapUserPreferences userPreferences) { Logger.getLogger(getClass()).debug( "Security Service - update user preferences " + userPreferences); try { if (getTransactionPerOperation()) { tx = manager.getTransaction(); tx.begin(); manager.merge(userPreferences); tx.commit(); } else { manager.merge(userPreferences); } } catch (Exception e) { if (tx.isActive()) { tx.rollback(); } throw e; } } /** * Handle lazy init. * * @param user the user */ @Override public void handleLazyInit(MapUser user) { // n/a - no objects connected } }
package com.github.maybeec.lexeme.systemtest; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import java.util.List; import org.jdom2.Document; import org.jdom2.Element; import org.jdom2.Namespace; import org.junit.Test; import com.github.maybeec.lexeme.ConflictHandlingType; import com.github.maybeec.lexeme.LeXeMerger; import com.github.maybeec.lexeme.common.util.JDom2Util; /** * * @author sholzer (16.03.2015) */ public class FirstWorkingBuild { /** * Path to the used resources folder */ String resources = "src/test/resources/systemtests/"; /** * Test for a simple use case. Used to ensure the basic implementation works as expected. Test case: First * .Expecting * * <pre> * &lt;A> * &lt;B id =''abc''/> * &lt;B id =''def''/> * &lt;C id =''def''/> * &lt;/A> * </pre * @author sholzer (16.03.2015) * @throws Exception * when something somewhere goes wrong */ @Test public void FirstTest() throws Exception { String mergeSchemaLocation = resources + "mergeschemas/FirstMergeSchema.xml"; Document base = JDom2Util.getInstance().getDocument(resources + "bases/FirstBase.xml"); Document patch = JDom2Util.getInstance().getDocument(resources + "patches/FirstPatch.xml"); String uri = "http://www.example.org/FirstXMLSchema"; LeXeMerger testMerger = new LeXeMerger(mergeSchemaLocation); Element result = testMerger.merge(base.getRootElement(), patch.getRootElement(), ConflictHandlingType.PATCHATTACHOROVERWRITE); List<Element> bList = result.getChildren("B", Namespace.getNamespace(uri)); assertTrue("Wrong number of B elements found. Excpected 2, found " + bList.size(), bList.size() == 2); assertTrue("Wrong id attribute at first B element", bList.get(0).getAttribute("id").getValue() .equals("abc")); assertTrue("Wrong id attribute at first B element", bList.get(1).getAttribute("id").getValue() .equals("def")); List<Element> cList = result.getChildren("C", Namespace.getNamespace(uri)); assertTrue("Wrong number of C elements found. Excpected 1, found " + cList.size(), cList.size() == 1); assertTrue("Wrong id attribute at first C element", cList.get(0).getAttribute("id").getValue() .equals("def")); } /** * Test for a more complex use case. Scenario 'Second'. Expecting * * <pre> * &lt;A> * &lt;B id="abc;def"/> * &lt;C> * &lt;Ca>0&lt;/> * &lt;Cb>0&lt;/> * &lt;Cb>1&lt;/> * &lt;Cb>2&lt;/> * &lt;/> * &lt;/> * </> * @throws Exception when something somewhere goes wrong * @author sholzer (18.03.2015) */ @Test public void SecondTest() throws Exception { String mergeSchemaLocation = resources + "mergeschemas/SecondMergeSchema.xml"; String uri = "http://www.example.org/SecondSchema"; Document base = JDom2Util.getInstance().getDocument(resources + "bases/SecondBase.xml"); Document patch = JDom2Util.getInstance().getDocument(resources + "patches/SecondPatch.xml"); LeXeMerger testMerger = new LeXeMerger(mergeSchemaLocation); Element result = testMerger.merge(base.getRootElement(), patch.getRootElement(), ConflictHandlingType.PATCHATTACHOROVERWRITE); assertTrue("No B Element found", result.getChildren("B", Namespace.getNamespace(uri)).size() == 1); Element b = result.getChildren("B", Namespace.getNamespace(uri)).get(0); assertTrue("Wrong id at B element", b.getAttribute("id").getValue().equals("abc;def")); assertTrue("No B Element found", result.getChildren("C", Namespace.getNamespace(uri)).size() == 1); Element c = result.getChildren("C", Namespace.getNamespace(uri)).get(0); assertTrue("Wrong number of Ca elements", c.getChildren("Ca", Namespace.getNamespace(uri)).size() == 1); assertTrue("Wrong content of Ca", JDom2Util.getInstance().getTextNodes(c.getChildren("Ca", Namespace.getNamespace(uri)).get(0)) .get(0).getText().equals("0")); int numberOfCb = c.getChildren("Cb", Namespace.getNamespace(uri)).size(); assertEquals("Wrong number of Cb elements. Found " + numberOfCb, 3, numberOfCb); } /** * Test case as in {@link FirstWorkingBuild#SecondTest()}. Patch uses another prefix for its namespace. * The prefix used in the result should be the same as in the base * @throws Exception * when something somewhere goes wrong * @author sholzer (31.03.2015) */ @Test public void SecondTestWithDifferentPrefixes() throws Exception { String mergeSchemaLocation = resources + "mergeschemas/SecondMergeSchema.xml"; String uri = "http://www.example.org/SecondSchema"; Document base = JDom2Util.getInstance().getDocument(resources + "bases/SecondBase.xml"); Document patch = JDom2Util.getInstance().getDocument(resources + "patches/SecondPatchWithOtherPrefixes.xml"); LeXeMerger testMerger = new LeXeMerger(mergeSchemaLocation); Element result = testMerger.merge(base.getRootElement(), patch.getRootElement(), ConflictHandlingType.PATCHATTACHOROVERWRITE); assertTrue("No B Element found", result.getChildren("B", Namespace.getNamespace(uri)).size() == 1); Element b = result.getChildren("B", Namespace.getNamespace(uri)).get(0); assertTrue("Wrong id at B element", b.getAttribute("id").getValue().equals("abc;def")); assertTrue("No B Element found", (result.getChildren("C", Namespace.getNamespace(uri))).size() == 1); Element c = result.getChildren("C", Namespace.getNamespace(uri)).get(0); assertTrue("Wrong number of Ca elements", (c.getChildren("Ca", Namespace.getNamespace(uri))).size() == 1); assertTrue("Wrong content of Ca", JDom2Util.getInstance().getTextNodes(c.getChildren("Ca", Namespace.getNamespace(uri)).get(0)) .get(0).getText().equals("0")); int numberOfCb = (c.getChildren("Cb", Namespace.getNamespace(uri))).size(); assertTrue("Wrong number of Cb elements. Found " + numberOfCb, numberOfCb == 3); } /** * Simple Spring Beans test case. * @throws Exception * when something somewhere goes wrong * @author sholzer (31.03.2015) */ @Test public void FirstBeanTest() throws Exception { String mergeSchemaLocation = resources + "mergeschemas/BeansMergeSchema.xml"; Document base = JDom2Util.getInstance().getDocument(resources + "bases/Beans1.xml"); Document patch = JDom2Util.getInstance().getDocument(resources + "patches/Beans1.xml"); LeXeMerger testMerger = new LeXeMerger(mergeSchemaLocation); Element result = testMerger.merge(base.getRootElement(), patch.getRootElement(), ConflictHandlingType.PATCHOVERWRITE); assertEquals("Not all bean elements found", 3, result.getChildren("bean", Namespace.getNamespace("http://www.springframework.org/schema/beans")) .size()); List<Element> beanNodeList = result.getChildren("bean", Namespace.getNamespace("http://www.springframework.org/schema/beans")); assertEquals( "Not all properties at bean1 found", 2, beanNodeList .get(0) .getChildren("property", Namespace.getNamespace("http://www.springframework.org/schema/beans")).size()); } /** * Simple Spring Beans test case * @throws Exception * when something somewhere goes wrong * @author sholzer (31.03.2015) */ @Test public void SecondBeanTest() throws Exception { String mergeSchemaLocation = resources + "mergeschemas/BeansMergeSchema.xml"; String namespaceUri = "http://www.springframework.org/schema/beans"; Document base = JDom2Util.getInstance().getDocument(resources + "bases/Beans2.xml"); Document patch = JDom2Util.getInstance().getDocument(resources + "patches/Beans2.xml"); LeXeMerger testMerger = new LeXeMerger(mergeSchemaLocation); Element result = testMerger.merge(base.getRootElement(), patch.getRootElement(), ConflictHandlingType.PATCHOVERWRITE); assertEquals("Not all bean elements found", 2, result.getChildren("bean", Namespace.getNamespace("http://www.springframework.org/schema/beans")) .size()); List<Element> beanNodeList = result.getChildren("bean", Namespace.getNamespace(namespaceUri)); Element bean1 = beanNodeList.get(0); assertEquals("wrong parent for bean1", "bean2", bean1.getAttribute("parent").getValue()); assertEquals("No properties found", 1, bean1.getChildren("property", Namespace.getNamespace(namespaceUri)).size()); } }
/* * Copyright 2014-2022 Aleksandr Mashchenko. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.amashchenko.maven.plugin.gitflow; import java.util.HashMap; import java.util.Map; import org.apache.maven.artifact.Artifact; import org.apache.maven.artifact.ArtifactUtils; import org.apache.maven.plugin.MojoExecutionException; import org.apache.maven.plugin.MojoFailureException; import org.apache.maven.plugins.annotations.Mojo; import org.apache.maven.plugins.annotations.Parameter; import org.codehaus.plexus.components.interactivity.PrompterException; import org.codehaus.plexus.util.StringUtils; /** * The git flow release mojo. * * @since 1.2.0 */ @Mojo(name = "release", aggregator = true) public class GitFlowReleaseMojo extends AbstractGitFlowMojo { /** Whether to skip tagging the release in Git. */ @Parameter(property = "skipTag", defaultValue = "false") private boolean skipTag = false; /** * Whether to skip calling Maven test goal before releasing. * * @since 1.0.5 */ @Parameter(property = "skipTestProject", defaultValue = "false") private boolean skipTestProject = false; /** * Whether to allow SNAPSHOT versions in dependencies. * * @since 1.2.2 */ @Parameter(property = "allowSnapshots", defaultValue = "false") private boolean allowSnapshots = false; /** * Whether to rebase branch or merge. If <code>true</code> then rebase will * be performed. * * @since 1.2.3 */ @Parameter(property = "releaseRebase", defaultValue = "false") private boolean releaseRebase = false; /** * Whether to use <code>--no-ff</code> option when merging. * * @since 1.2.3 */ @Parameter(property = "releaseMergeNoFF", defaultValue = "true") private boolean releaseMergeNoFF = true; /** * Whether to push to the remote. * * @since 1.3.0 */ @Parameter(property = "pushRemote", defaultValue = "true") private boolean pushRemote; /** * Release version to use instead of the default next release version in non * interactive mode. * * @since 1.3.1 */ @Parameter(property = "releaseVersion", defaultValue = "") private String releaseVersion = ""; /** * Whether to use <code>--ff-only</code> option when merging. * * @since 1.4.0 */ @Parameter(property = "releaseMergeFFOnly", defaultValue = "false") private boolean releaseMergeFFOnly = false; /** * Whether to remove qualifiers from the next development version. * * @since 1.6.0 */ @Parameter(property = "digitsOnlyDevVersion", defaultValue = "false") private boolean digitsOnlyDevVersion = false; /** * Development version to use instead of the default next development * version in non interactive mode. * * @since 1.6.0 */ @Parameter(property = "developmentVersion", defaultValue = "") private String developmentVersion = ""; /** * Which digit to increment in the next development version. Starts from * zero. * * @since 1.6.0 */ @Parameter(property = "versionDigitToIncrement") private Integer versionDigitToIncrement; /** * Maven goals to execute before the release. * * @since 1.8.0 */ @Parameter(property = "preReleaseGoals") private String preReleaseGoals; /** * Maven goals to execute after the release. * * @since 1.8.0 */ @Parameter(property = "postReleaseGoals") private String postReleaseGoals; /** * Whether to make a GPG-signed tag. * * @since 1.9.0 */ @Parameter(property = "gpgSignTag", defaultValue = "false") private boolean gpgSignTag = false; /** * Whether to skip merging release into the production branch. * * @since 1.15.0 */ @Parameter(property = "skipReleaseMergeProdBranch", defaultValue = "false") private boolean skipReleaseMergeProdBranch = false; /** {@inheritDoc} */ @Override public void execute() throws MojoExecutionException, MojoFailureException { validateConfiguration(preReleaseGoals, postReleaseGoals); try { // set git flow configuration initGitFlowConfig(); // check uncommitted changes checkUncommittedChanges(); // git for-each-ref --count=1 refs/heads/release/* final String releaseBranch = gitFindBranches( gitFlowConfig.getReleaseBranchPrefix(), true); if (StringUtils.isNotBlank(releaseBranch)) { throw new MojoFailureException( "Release branch already exists. Cannot start release."); } if (fetchRemote) { // checkout from remote if doesn't exist gitFetchRemoteAndCreate(gitFlowConfig.getDevelopmentBranch()); // fetch and check remote gitFetchRemoteAndCompare(gitFlowConfig.getDevelopmentBranch()); if (notSameProdDevName()) { // checkout from remote if doesn't exist gitFetchRemoteAndCreate(gitFlowConfig.getProductionBranch()); // fetch and check remote gitFetchRemoteAndCompare(gitFlowConfig .getProductionBranch()); } } // need to be in develop to check snapshots and to get correct // project version // git checkout develop gitCheckout(gitFlowConfig.getDevelopmentBranch()); // check snapshots dependencies if (!allowSnapshots) { checkSnapshotDependencies(); } if (!skipTestProject) { // mvn clean test mvnCleanTest(); } // get current project version from pom final String currentVersion = getCurrentProjectVersion(); String defaultVersion = null; if (tychoBuild) { defaultVersion = currentVersion; } else { // get default release version defaultVersion = new GitFlowVersionInfo(currentVersion, getVersionPolicy()) .getReleaseVersionString(); } if (defaultVersion == null) { throw new MojoFailureException( "Cannot get default project version."); } String version = null; if (settings.isInteractiveMode()) { try { while (version == null) { version = prompter.prompt("What is release version? [" + defaultVersion + "]"); if (!"".equals(version) && (!GitFlowVersionInfo.isValidVersion(version) || !validBranchName(version))) { getLog().info("The version is not valid."); version = null; } } } catch (PrompterException e) { throw new MojoFailureException("release", e); } } else { version = releaseVersion; } if (StringUtils.isBlank(version)) { getLog().info("Version is blank. Using default version."); version = defaultVersion; } // maven goals before release if (StringUtils.isNotBlank(preReleaseGoals)) { mvnRun(preReleaseGoals); } Map<String, String> messageProperties = new HashMap<>(); messageProperties.put("version", version); // execute if version changed if (!version.equals(currentVersion)) { // mvn set version mvnSetVersions(version); // git commit -a -m updating versions for release gitCommit(commitMessages.getReleaseStartMessage(), messageProperties); } if (!skipReleaseMergeProdBranch && notSameProdDevName()) { // git checkout master gitCheckout(gitFlowConfig.getProductionBranch()); gitMerge(gitFlowConfig.getDevelopmentBranch(), releaseRebase, releaseMergeNoFF, releaseMergeFFOnly, commitMessages.getReleaseFinishMergeMessage(), messageProperties); } if (!skipTag) { if (tychoBuild && ArtifactUtils.isSnapshot(version)) { version = version.replace("-" + Artifact.SNAPSHOT_VERSION, ""); } messageProperties.put("version", version); // git tag -a ... gitTag(gitFlowConfig.getVersionTagPrefix() + version, commitMessages.getTagReleaseMessage(), gpgSignTag, messageProperties); } // maven goals after release if (StringUtils.isNotBlank(postReleaseGoals)) { mvnRun(postReleaseGoals); } if (notSameProdDevName()) { // git checkout develop gitCheckout(gitFlowConfig.getDevelopmentBranch()); } // get next snapshot version final String nextSnapshotVersion; if (!settings.isInteractiveMode() && StringUtils.isNotBlank(developmentVersion)) { nextSnapshotVersion = developmentVersion; } else { GitFlowVersionInfo versionInfo = new GitFlowVersionInfo(version, getVersionPolicy()); if (digitsOnlyDevVersion) { versionInfo = versionInfo.digitsVersionInfo(); } nextSnapshotVersion = versionInfo .nextSnapshotVersion(versionDigitToIncrement); } if (StringUtils.isBlank(nextSnapshotVersion)) { throw new MojoFailureException( "Next snapshot version is blank."); } // mvn set version mvnSetVersions(nextSnapshotVersion); messageProperties.put("version", nextSnapshotVersion); // git commit -a -m updating for next development version gitCommit(commitMessages.getReleaseFinishMessage(), messageProperties); if (installProject) { // mvn clean install mvnCleanInstall(); } if (pushRemote) { gitPush(gitFlowConfig.getProductionBranch(), !skipTag); if (notSameProdDevName()) { gitPush(gitFlowConfig.getDevelopmentBranch(), !skipTag); } } } catch (Exception e) { throw new MojoFailureException("release", e); } } }
/* * Copyright 2015 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.social.facebook.api; import static org.junit.Assert.*; import static org.springframework.http.HttpMethod.*; import static org.springframework.test.web.client.match.MockRestRequestMatchers.*; import static org.springframework.test.web.client.response.MockRestResponseCreators.*; import org.junit.Test; import org.springframework.http.MediaType; public class SocialContextTemplateTest extends AbstractFacebookApiTest { @Test public void getMutualFriends() { mockServer.expect(requestTo(fbUrl("12345?fields=context.fields%28mutual_friends.limit%2825%29%29"))) .andExpect(method(GET)) .andExpect(header("Authorization", "OAuth someAccessToken")) .andRespond(withSuccess(jsonResource("mutual_friends"), MediaType.APPLICATION_JSON)); CountedList<Reference> mutualFriends = facebook.socialContextOperations().getMutualFriends("12345"); assertEquals(2, mutualFriends.size()); assertEquals(177, mutualFriends.getTotalCount().intValue()); Reference friend = mutualFriends.get(0); assertEquals("708598160", friend.getId()); assertEquals("Josh Long", friend.getName()); friend = mutualFriends.get(1); assertEquals("1255689239", friend.getId()); assertEquals("Keith Donald", friend.getName()); mockServer.verify(); } @Test public void getMutualFriends_withLimit() { mockServer.expect(requestTo(fbUrl("12345?fields=context.fields%28mutual_friends.limit%2810%29%29"))) .andExpect(method(GET)) .andExpect(header("Authorization", "OAuth someAccessToken")) .andRespond(withSuccess(jsonResource("mutual_friends"), MediaType.APPLICATION_JSON)); CountedList<Reference> mutualFriends = facebook.socialContextOperations().getMutualFriends("12345", 10); assertEquals(2, mutualFriends.size()); assertEquals(177, mutualFriends.getTotalCount().intValue()); Reference friend = mutualFriends.get(0); assertEquals("708598160", friend.getId()); assertEquals("Josh Long", friend.getName()); friend = mutualFriends.get(1); assertEquals("1255689239", friend.getId()); assertEquals("Keith Donald", friend.getName()); mockServer.verify(); } @Test public void getAllMutualFriends() { mockServer.expect(requestTo(fbUrl("12345?fields=context.fields%28all_mutual_friends.limit%2825%29%29"))) .andExpect(method(GET)) .andExpect(header("Authorization", "OAuth someAccessToken")) .andRespond(withSuccess(jsonResource("all_mutual_friends"), MediaType.APPLICATION_JSON)); CountedList<Reference> mutualFriends = facebook.socialContextOperations().getAllMutualFriends("12345"); assertEquals(2, mutualFriends.size()); assertEquals(177, mutualFriends.getTotalCount().intValue()); Reference friend = mutualFriends.get(0); assertEquals("708598160", friend.getId()); assertEquals("Josh Long", friend.getName()); friend = mutualFriends.get(1); assertEquals("1255689239", friend.getId()); assertEquals("Keith Donald", friend.getName()); mockServer.verify(); } @Test public void getAllMutualFriends_withLimit() { mockServer.expect(requestTo(fbUrl("12345?fields=context.fields%28all_mutual_friends.limit%2810%29%29"))) .andExpect(method(GET)) .andExpect(header("Authorization", "OAuth someAccessToken")) .andRespond(withSuccess(jsonResource("all_mutual_friends"), MediaType.APPLICATION_JSON)); CountedList<Reference> mutualFriends = facebook.socialContextOperations().getAllMutualFriends("12345", 10); assertEquals(2, mutualFriends.size()); assertEquals(177, mutualFriends.getTotalCount().intValue()); Reference friend = mutualFriends.get(0); assertEquals("708598160", friend.getId()); assertEquals("Josh Long", friend.getName()); friend = mutualFriends.get(1); assertEquals("1255689239", friend.getId()); assertEquals("Keith Donald", friend.getName()); mockServer.verify(); } @Test public void getMutualLikes() { mockServer.expect(requestTo(fbUrl("12345?fields=context.fields%28mutual_likes.limit%2825%29%29"))) .andExpect(method(GET)) .andExpect(header("Authorization", "OAuth someAccessToken")) .andRespond(withSuccess(jsonResource("mutual_likes"), MediaType.APPLICATION_JSON)); CountedList<Reference> mutualLikes = facebook.socialContextOperations().getMutualLikes("12345"); assertEquals(2, mutualLikes.size()); assertEquals(68, mutualLikes.getTotalCount().intValue()); Reference like = mutualLikes.get(0); assertEquals("36643253738", like.getId()); assertEquals("Mulch, Sweat, & Shears", like.getName()); like = mutualLikes.get(1); assertEquals("188620089283", like.getId()); assertEquals("Torchy's Tacos", like.getName()); mockServer.verify(); } @Test public void getMutualLikes_withLimit() { mockServer.expect(requestTo(fbUrl("12345?fields=context.fields%28mutual_likes.limit%2810%29%29"))) .andExpect(method(GET)) .andExpect(header("Authorization", "OAuth someAccessToken")) .andRespond(withSuccess(jsonResource("mutual_likes"), MediaType.APPLICATION_JSON)); CountedList<Reference> mutualLikes = facebook.socialContextOperations().getMutualLikes("12345", 10); assertEquals(2, mutualLikes.size()); assertEquals(68, mutualLikes.getTotalCount().intValue()); Reference like = mutualLikes.get(0); assertEquals("36643253738", like.getId()); assertEquals("Mulch, Sweat, & Shears", like.getName()); like = mutualLikes.get(1); assertEquals("188620089283", like.getId()); assertEquals("Torchy's Tacos", like.getName()); mockServer.verify(); } @Test public void getFriendsUsingApp() { mockServer.expect(requestTo(fbUrl("12345?fields=context.fields%28friends_using_app.limit%2825%29%29"))) .andExpect(method(GET)) .andExpect(header("Authorization", "OAuth someAccessToken")) .andRespond(withSuccess(jsonResource("friends_using_app"), MediaType.APPLICATION_JSON)); CountedList<Reference> friends = facebook.socialContextOperations().getFriendsUsingApp("12345"); assertEquals(2, friends.size()); assertEquals(7, friends.getTotalCount().intValue()); Reference friend = friends.get(0); assertEquals("726452090", friend.getId()); assertEquals("Guillaume Laforge", friend.getName()); friend = friends.get(1); assertEquals("1255689239", friend.getId()); assertEquals("Keith Donald", friend.getName()); mockServer.verify(); } @Test public void getFriendsUsingApp_withLimit() { mockServer.expect(requestTo(fbUrl("12345?fields=context.fields%28friends_using_app.limit%2815%29%29"))) .andExpect(method(GET)) .andExpect(header("Authorization", "OAuth someAccessToken")) .andRespond(withSuccess(jsonResource("friends_using_app"), MediaType.APPLICATION_JSON)); CountedList<Reference> friends = facebook.socialContextOperations().getFriendsUsingApp("12345", 15); assertEquals(2, friends.size()); assertEquals(7, friends.getTotalCount().intValue()); Reference friend = friends.get(0); assertEquals("726452090", friend.getId()); assertEquals("Guillaume Laforge", friend.getName()); friend = friends.get(1); assertEquals("1255689239", friend.getId()); assertEquals("Keith Donald", friend.getName()); mockServer.verify(); } @Test public void getFriendsWhoLike() { mockServer.expect(requestTo(fbUrl("12345?fields=context.fields%28friends_who_like.limit%2825%29%29"))) .andExpect(method(GET)) .andExpect(header("Authorization", "OAuth someAccessToken")) .andRespond(withSuccess(jsonResource("friends_who_like"), MediaType.APPLICATION_JSON)); CountedList<Reference> friends = facebook.socialContextOperations().getFriendsWhoLike("12345"); assertEquals(2, friends.size()); assertEquals(7, friends.getTotalCount().intValue()); Reference friend = friends.get(0); assertEquals("726452090", friend.getId()); assertEquals("Guillaume Laforge", friend.getName()); friend = friends.get(1); assertEquals("1255689239", friend.getId()); assertEquals("Keith Donald", friend.getName()); mockServer.verify(); } @Test public void getFriendsWhoLike_withLimit() { mockServer.expect(requestTo(fbUrl("12345?fields=context.fields%28friends_who_like.limit%2815%29%29"))) .andExpect(method(GET)) .andExpect(header("Authorization", "OAuth someAccessToken")) .andRespond(withSuccess(jsonResource("friends_who_like"), MediaType.APPLICATION_JSON)); CountedList<Reference> friends = facebook.socialContextOperations().getFriendsWhoLike("12345", 15); assertEquals(2, friends.size()); assertEquals(7, friends.getTotalCount().intValue()); Reference friend = friends.get(0); assertEquals("726452090", friend.getId()); assertEquals("Guillaume Laforge", friend.getName()); friend = friends.get(1); assertEquals("1255689239", friend.getId()); assertEquals("Keith Donald", friend.getName()); mockServer.verify(); } @Test public void getFriendsWhoWatched() { mockServer.expect(requestTo(fbUrl("12345?fields=context.fields%28video_watch_friends.limit%2825%29%29"))) .andExpect(method(GET)) .andExpect(header("Authorization", "OAuth someAccessToken")) .andRespond(withSuccess(jsonResource("video_watch_friends"), MediaType.APPLICATION_JSON)); CountedList<Reference> friends = facebook.socialContextOperations().getFriendsWhoWatched("12345"); assertEquals(2, friends.size()); assertEquals(7, friends.getTotalCount().intValue()); Reference friend = friends.get(0); assertEquals("726452090", friend.getId()); assertEquals("Guillaume Laforge", friend.getName()); friend = friends.get(1); assertEquals("1255689239", friend.getId()); assertEquals("Keith Donald", friend.getName()); mockServer.verify(); } @Test public void getFriendsWhoWatched_withLimit() { mockServer.expect(requestTo(fbUrl("12345?fields=context.fields%28video_watch_friends.limit%2815%29%29"))) .andExpect(method(GET)) .andExpect(header("Authorization", "OAuth someAccessToken")) .andRespond(withSuccess(jsonResource("video_watch_friends"), MediaType.APPLICATION_JSON)); CountedList<Reference> friends = facebook.socialContextOperations().getFriendsWhoWatched("12345", 15); assertEquals(2, friends.size()); assertEquals(7, friends.getTotalCount().intValue()); Reference friend = friends.get(0); assertEquals("726452090", friend.getId()); assertEquals("Guillaume Laforge", friend.getName()); friend = friends.get(1); assertEquals("1255689239", friend.getId()); assertEquals("Keith Donald", friend.getName()); mockServer.verify(); } @Test public void getFriendsWhoListenTo() { mockServer.expect(requestTo(fbUrl("12345?fields=context.fields%28music_listen_friends.limit%2825%29%29"))) .andExpect(method(GET)) .andExpect(header("Authorization", "OAuth someAccessToken")) .andRespond(withSuccess(jsonResource("music_listen_friends"), MediaType.APPLICATION_JSON)); CountedList<Reference> friends = facebook.socialContextOperations().getFriendsWhoListenTo("12345"); assertEquals(2, friends.size()); assertEquals(7, friends.getTotalCount().intValue()); Reference friend = friends.get(0); assertEquals("726452090", friend.getId()); assertEquals("Guillaume Laforge", friend.getName()); friend = friends.get(1); assertEquals("1255689239", friend.getId()); assertEquals("Keith Donald", friend.getName()); mockServer.verify(); } @Test public void getFriendsWhoListenTo_withLimit() { mockServer.expect(requestTo(fbUrl("12345?fields=context.fields%28music_listen_friends.limit%2815%29%29"))) .andExpect(method(GET)) .andExpect(header("Authorization", "OAuth someAccessToken")) .andRespond(withSuccess(jsonResource("music_listen_friends"), MediaType.APPLICATION_JSON)); CountedList<Reference> friends = facebook.socialContextOperations().getFriendsWhoListenTo("12345", 15); assertEquals(2, friends.size()); assertEquals(7, friends.getTotalCount().intValue()); Reference friend = friends.get(0); assertEquals("726452090", friend.getId()); assertEquals("Guillaume Laforge", friend.getName()); friend = friends.get(1); assertEquals("1255689239", friend.getId()); assertEquals("Keith Donald", friend.getName()); mockServer.verify(); } @Test public void getFriendsTaggedAt() { mockServer.expect(requestTo(fbUrl("12345?fields=context.fields%28friends_tagged_at.limit%2825%29%29"))) .andExpect(method(GET)) .andExpect(header("Authorization", "OAuth someAccessToken")) .andRespond(withSuccess(jsonResource("friends_tagged_at"), MediaType.APPLICATION_JSON)); CountedList<Reference> friends = facebook.socialContextOperations().getFriendsTaggedAt("12345"); assertEquals(2, friends.size()); assertEquals(7, friends.getTotalCount().intValue()); Reference friend = friends.get(0); assertEquals("726452090", friend.getId()); assertEquals("Guillaume Laforge", friend.getName()); friend = friends.get(1); assertEquals("1255689239", friend.getId()); assertEquals("Keith Donald", friend.getName()); mockServer.verify(); } @Test public void getFriendsTaggedAt_withLimit() { mockServer.expect(requestTo(fbUrl("12345?fields=context.fields%28friends_tagged_at.limit%2815%29%29"))) .andExpect(method(GET)) .andExpect(header("Authorization", "OAuth someAccessToken")) .andRespond(withSuccess(jsonResource("friends_tagged_at"), MediaType.APPLICATION_JSON)); CountedList<Reference> friends = facebook.socialContextOperations().getFriendsTaggedAt("12345", 15); assertEquals(2, friends.size()); assertEquals(7, friends.getTotalCount().intValue()); Reference friend = friends.get(0); assertEquals("726452090", friend.getId()); assertEquals("Guillaume Laforge", friend.getName()); friend = friends.get(1); assertEquals("1255689239", friend.getId()); assertEquals("Keith Donald", friend.getName()); mockServer.verify(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.nifi.events; import org.apache.nifi.reporting.Bulletin; import org.apache.nifi.reporting.BulletinQuery; import org.apache.nifi.reporting.BulletinRepository; import org.apache.nifi.reporting.ComponentType; import org.apache.nifi.util.RingBuffer; import org.apache.nifi.util.RingBuffer.Filter; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Set; import java.util.TreeSet; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicLong; import java.util.regex.Pattern; public class VolatileBulletinRepository implements BulletinRepository { private static final int CONTROLLER_BUFFER_SIZE = 10; private static final int COMPONENT_BUFFER_SIZE = 5; private static final String CONTROLLER_BULLETIN_STORE_KEY = "CONTROLLER"; private static final String SERVICE_BULLETIN_STORE_KEY = "SERVICE"; private static final String REPORTING_TASK_BULLETIN_STORE_KEY = "REPORTING_TASK"; private final ConcurrentMap<String, ConcurrentMap<String, RingBuffer<Bulletin>>> bulletinStoreMap = new ConcurrentHashMap<>(); private volatile BulletinProcessingStrategy processingStrategy = new DefaultBulletinProcessingStrategy(); private final AtomicLong maxId = new AtomicLong(-1L); @Override public void addBulletin(final Bulletin bulletin) { processingStrategy.update(bulletin); final long bulletinId = bulletin.getId(); maxId.getAndAccumulate(bulletinId, Math::max); } @Override public long getMaxBulletinId() { return maxId.get(); } @Override public int getControllerBulletinCapacity() { return CONTROLLER_BUFFER_SIZE; } @Override public int getComponentBulletinCapacity() { return COMPONENT_BUFFER_SIZE; } @Override public List<Bulletin> findBulletins(final BulletinQuery bulletinQuery) { final Filter<Bulletin> filter = createFilter(bulletinQuery); final Set<Bulletin> selected = new TreeSet<>(); int max = bulletinQuery.getLimit() == null ? Integer.MAX_VALUE : bulletinQuery.getLimit(); for (final ConcurrentMap<String, RingBuffer<Bulletin>> componentMap : bulletinStoreMap.values()) { for (final RingBuffer<Bulletin> ringBuffer : componentMap.values()) { final List<Bulletin> bulletinsForComponent = ringBuffer.getSelectedElements(filter, max); selected.addAll(bulletinsForComponent); max -= bulletinsForComponent.size(); if (max <= 0) { break; } } } return new ArrayList<>(selected); } private Filter<Bulletin> createFilter(final BulletinQuery bulletinQuery) { final long fiveMinutesAgo = System.currentTimeMillis() - TimeUnit.MINUTES.toMillis(5); return new Filter<Bulletin>() { @Override public boolean select(final Bulletin bulletin) { // only include bulletins after the specified id if (bulletinQuery.getAfter() != null && bulletin.getId() <= bulletinQuery.getAfter()) { return false; } // if group pattern was specified see if it should be excluded if (bulletinQuery.getGroupIdPattern() != null) { // exclude if this bulletin doesnt have a group or if it doesnt match if (bulletin.getGroupId() == null || !bulletinQuery.getGroupIdPattern().matcher(bulletin.getGroupId()).find()) { return false; } } // if a message pattern was specified see if it should be excluded if (bulletinQuery.getMessagePattern() != null) { // exclude if this bulletin doesnt have a message or if it doesnt match if (bulletin.getMessage() == null || !bulletinQuery.getMessagePattern().matcher(bulletin.getMessage()).find()) { return false; } } // if a name pattern was specified see if it should be excluded if (bulletinQuery.getNamePattern() != null) { // exclude if this bulletin doesnt have a source name or if it doesnt match if (bulletin.getSourceName() == null || !bulletinQuery.getNamePattern().matcher(bulletin.getSourceName()).find()) { return false; } } if (bulletin.getTimestamp().getTime() < fiveMinutesAgo) { return false; } // if a source id was specified see if it should be excluded if (bulletinQuery.getSourceIdPattern() != null) { // exclude if this bulletin doesn't have a source id or if it doesn't match if (bulletin.getSourceId() == null || !bulletinQuery.getSourceIdPattern().matcher(bulletin.getSourceId()).find()) { return false; } } // if a source component type was specified see if it should be excluded if (bulletinQuery.getSourceType() != null) { // exclude if this bulletin source type doesn't match if (bulletin.getSourceType() == null || !bulletinQuery.getSourceType().equals(bulletin.getSourceType())) { return false; } } return true; } }; } @Override public List<Bulletin> findBulletinsForSource(final String sourceId, final String groupId) { final BulletinQuery bulletinQuery = new BulletinQuery.Builder().sourceIdMatches(Pattern.quote(sourceId)).groupIdMatches(Pattern.quote(groupId)).limit(COMPONENT_BUFFER_SIZE).build(); final ConcurrentMap<String, RingBuffer<Bulletin>> componentMap = bulletinStoreMap.get(groupId); if (componentMap == null) { return Collections.emptyList(); } return findBulletinsForSource(sourceId, bulletinQuery, Collections.singleton(componentMap)); } @Override public List<Bulletin> findBulletinsForSource(final String sourceId) { final BulletinQuery bulletinQuery = new BulletinQuery.Builder().sourceIdMatches(Pattern.quote(sourceId)).limit(COMPONENT_BUFFER_SIZE).build(); return findBulletinsForSource(sourceId, bulletinQuery, this.bulletinStoreMap.values()); } private List<Bulletin> findBulletinsForSource(final String sourceId, final BulletinQuery bulletinQuery, final Collection<ConcurrentMap<String, RingBuffer<Bulletin>>> bulletinStoreMaps) { final Filter<Bulletin> filter = createFilter(bulletinQuery); final int max = bulletinQuery.getLimit() == null ? Integer.MAX_VALUE : bulletinQuery.getLimit(); for (final ConcurrentMap<String, RingBuffer<Bulletin>> componentMap : bulletinStoreMaps) { final RingBuffer<Bulletin> ringBuffer = componentMap.get(sourceId); if (ringBuffer == null) { continue; } final List<Bulletin> bulletinsForComponent = ringBuffer.getSelectedElements(filter, max); Collections.sort(bulletinsForComponent); return bulletinsForComponent; } return Collections.emptyList(); } @Override public List<Bulletin> findBulletinsForGroupBySource(String groupId) { return findBulletinsForGroupBySource(groupId, COMPONENT_BUFFER_SIZE); } @Override public List<Bulletin> findBulletinsForGroupBySource(final String groupId, final int maxPerComponent) { final long fiveMinutesAgo = System.currentTimeMillis() - TimeUnit.MINUTES.toMillis(5); final ConcurrentMap<String, RingBuffer<Bulletin>> componentMap = bulletinStoreMap.get(groupId); if (componentMap == null) { return Collections.<Bulletin>emptyList(); } final List<Bulletin> allComponentBulletins = new ArrayList<>(); for (final RingBuffer<Bulletin> ringBuffer : componentMap.values()) { allComponentBulletins.addAll(ringBuffer.getSelectedElements(new Filter<Bulletin>() { @Override public boolean select(final Bulletin bulletin) { return bulletin.getTimestamp().getTime() >= fiveMinutesAgo; } }, maxPerComponent)); } return allComponentBulletins; } @Override public List<Bulletin> findBulletinsForController() { return findBulletinsForController(CONTROLLER_BUFFER_SIZE); } @Override public List<Bulletin> findBulletinsForController(final int max) { final long fiveMinutesAgo = System.currentTimeMillis() - TimeUnit.MINUTES.toMillis(5); final Filter<Bulletin> filter = new Filter<Bulletin>() { @Override public boolean select(final Bulletin bulletin) { return bulletin.getTimestamp().getTime() >= fiveMinutesAgo; } }; final List<Bulletin> controllerBulletins = new ArrayList<>(); final ConcurrentMap<String, RingBuffer<Bulletin>> controllerBulletinMap = bulletinStoreMap.get(CONTROLLER_BULLETIN_STORE_KEY); if (controllerBulletinMap != null) { final RingBuffer<Bulletin> buffer = controllerBulletinMap.get(CONTROLLER_BULLETIN_STORE_KEY); if (buffer != null) { controllerBulletins.addAll(buffer.getSelectedElements(filter, max)); } } // We only want the newest bulletin, so we sort based on time and take the top 'max' entries Collections.sort(controllerBulletins); if (controllerBulletins.size() > max) { return controllerBulletins.subList(0, max); } return controllerBulletins; } private List<RingBuffer<Bulletin>> getBulletinBuffers(final Bulletin bulletin) { final String storageKey = getBulletinStoreKey(bulletin); ConcurrentMap<String, RingBuffer<Bulletin>> componentMap = bulletinStoreMap.get(storageKey); if (componentMap == null) { componentMap = new ConcurrentHashMap<>(); final ConcurrentMap<String, RingBuffer<Bulletin>> existing = bulletinStoreMap.putIfAbsent(storageKey, componentMap); if (existing != null) { componentMap = existing; } } final List<RingBuffer<Bulletin>> buffers = new ArrayList<>(2); if (isControllerBulletin(bulletin)) { RingBuffer<Bulletin> bulletinBuffer = componentMap.get(CONTROLLER_BULLETIN_STORE_KEY); if (bulletinBuffer == null) { bulletinBuffer = new RingBuffer<>(CONTROLLER_BUFFER_SIZE); final RingBuffer<Bulletin> existingBuffer = componentMap.putIfAbsent(CONTROLLER_BULLETIN_STORE_KEY, bulletinBuffer); if (existingBuffer != null) { bulletinBuffer = existingBuffer; } } buffers.add(bulletinBuffer); } if (bulletin.getSourceType() != ComponentType.FLOW_CONTROLLER) { RingBuffer<Bulletin> bulletinBuffer = componentMap.get(bulletin.getSourceId()); if (bulletinBuffer == null) { bulletinBuffer = new RingBuffer<>(COMPONENT_BUFFER_SIZE); final RingBuffer<Bulletin> existingBuffer = componentMap.putIfAbsent(bulletin.getSourceId(), bulletinBuffer); if (existingBuffer != null) { bulletinBuffer = existingBuffer; } } buffers.add(bulletinBuffer); } return buffers; } private String getBulletinStoreKey(final Bulletin bulletin) { switch (bulletin.getSourceType()) { case FLOW_CONTROLLER: return CONTROLLER_BULLETIN_STORE_KEY; case CONTROLLER_SERVICE: return SERVICE_BULLETIN_STORE_KEY; case REPORTING_TASK: return REPORTING_TASK_BULLETIN_STORE_KEY; default: return bulletin.getGroupId(); } } private boolean isControllerBulletin(final Bulletin bulletin) { switch (bulletin.getSourceType()) { case FLOW_CONTROLLER: case CONTROLLER_SERVICE: case REPORTING_TASK: return true; default: return false; } } private class DefaultBulletinProcessingStrategy implements BulletinProcessingStrategy { @Override public void update(final Bulletin bulletin) { for (final RingBuffer<Bulletin> bulletinBuffer : getBulletinBuffers(bulletin)) { bulletinBuffer.add(bulletin); } } } }
package com.geekyouup.android.ustopwatch.wear.fragments; import android.animation.ValueAnimator; import android.annotation.TargetApi; import android.content.Context; import android.content.SharedPreferences; import android.content.res.Resources; import android.content.res.TypedArray; import android.graphics.Bitmap; import android.graphics.Canvas; import android.graphics.Color; import android.graphics.DashPathEffect; import android.graphics.Paint; import android.graphics.Path; import android.graphics.drawable.Drawable; import android.os.Build; import android.os.Bundle; import android.os.Handler; import android.os.Message; import android.os.Vibrator; import android.support.v4.view.ViewCompat; import android.support.v4.view.animation.FastOutSlowInInterpolator; import android.util.AttributeSet; import android.util.Log; import android.view.MotionEvent; import android.view.View; import android.view.WindowInsets; import android.view.animation.AccelerateDecelerateInterpolator; import com.geekyouup.android.ustopwatch.R; import com.geekyouup.android.ustopwatch.WearActivity; public class StopwatchCustomVectorView extends View { private boolean mIsStopwatch = true; //true=stopwatch, false=countdown private boolean mIsRunning = false; private static final String KEY_STATE = "state_bool"; private static final String KEY_LASTTIME = "lasttime"; private static final String KEY_NOWTIME = "currenttime_int"; private static final String KEY_COUNTDOWN_SUFFIX = "_cd"; private float mMinsAngle = 0; private float mSecsAngle = 0; private int mDisplayTimeMillis = 0; //max value is 100hours, 360000000ms private final float twoPI = (float) (Math.PI * 2.0); private boolean mStopwatchMode = true; private long mTouching = 0; private int mCanvasWidth = 320; private int mCanvasHeight = 480; private int mWatchfaceCenterX = 156; private int mWatchfaceCenterY = 230; private int mMinsCenterY = 185; private float mWatchFaceOuterRadius = 400; //the paint styles private Paint mPrimaryDashPaint; private Paint mSecondaryDashPaint; private Paint mTertiaryDashPaint; private Paint mClockNumeralsPaint; private Paint mPatternCircleSolidPaint; private Paint mPatternCircleDashPaint; private Paint m100thsPrimaryDashPaint; private Paint m100thsSecondaryDashPaint; private Paint m100thsNumeralsPaint; private Paint mMinsCirclePaint; private Paint mMinsNumeralsPaint; private Paint mWatchhandsPaint; //bitmaps private Drawable mBmpNameplate; private Bitmap mWatchBackground; private float mWatchBMPMarginTop = 0; private float mWatchBMPMarginLeft = 0; //metrics private static final int FULL_CANVAS_SIZE = 1000; //basing the custom view on a 1000x1000px canvas and scaling as needed private static final int FULL_WATCHFACE_OUTER_RADIUS = 400; private static final int FULL_WATCHFACE_OUTER_RADIUS_SQUARE = 500; private static final float FULL_PRIMARY_DASH_WIDTH = 3; private static final float FULL_SECONDARY_DASH_WIDTH = 2; private static final float FULL_60TH_1_DASH_HEIGHT = 50; private static final float FULL_60TH_2_DASH_HEIGHT = 50; private static final float FULL_60TH_3_DASH_HEIGHT = 40; private static final float FULL_PATTERN_CIRCLE_RADIUS = FULL_WATCHFACE_OUTER_RADIUS - FULL_60TH_1_DASH_HEIGHT - 18; private static final float FULL_PATTERN_CIRCLE_SOLID_THICKNESS = 18; private static final float FULL_PATTERN_CIRCLE_DASH_THICKNESS = 12; private static final float FULL_100TH_OUTER_RADIUS = FULL_PATTERN_CIRCLE_RADIUS - 30; private static final float FULL_100TH_OUTER_DIAMETER = FULL_100TH_OUTER_RADIUS * 2; private static final float FULL_100th_1_DASH_HEIGHT = 40; private static final float FULL_100th_2_DASH_HEIGHT = 20; private static final float FULL_60th_NUMERALS_SIZE = 50; private static final float FULL_60TH_NUMERALS_RADIUS = FULL_WATCHFACE_OUTER_RADIUS + FULL_60th_NUMERALS_SIZE - 10; //-10 for kerning private static final float FULL_100th_NUMERALS_SIZE = 30; private static final float FULL_100TH_NUMERALS_RADIUS = FULL_100TH_OUTER_RADIUS - FULL_100th_1_DASH_HEIGHT - FULL_100th_NUMERALS_SIZE; private static final float FULL_NAMEPLATE_Y = 640; private static final float FULL_MINS_CENTER_Y = 345; private static final float FULL_MINS_CIRCLE_RADIUS = 94; private static final float FULL_MINS_DASH_HEIGHT = 10; private static final float FULL_MINS_NUMERALS_RADIUS = FULL_MINS_CIRCLE_RADIUS - FULL_MINS_DASH_HEIGHT * 2.5f; private static final float FULL_MINS_NUMERALS_SIZE = 20; private static final float FULL_SECHAND_HALFBASEWIDTH = 10; private static final float FULL_MINHAND_HALFBASEWIDTH = 5; private float mScaleFactor = 0; //colors private final int COLOR_BACKGROUND; private final int COLOR_HANDS; private final int COLOR_60TH_NUMERALS; private final int COLOR_100TH_NUMERALS; private final int COLOR_60TH_PRI_DASH; private final int COLOR_60TH_SEC_DASH; private final int COLOR_100TH_PRI_DASH; private final int COLOR_100TH_SEC_DASH; private final int COLOR_MINS; private final int COLOR_PATTERN_CIRCLE_SOLID; private final int COLOR_PATTERN_CIRCLE_DASHED; //arrays of verticies for the watch hands private float[] mSecHandVerticies; private float[] mMinHandVerticies; private boolean isRoundDevice = true; //Used to figure out elapsed time between frames private long mLastTime = 0; //pass back messages to UI thread private Handler mHandler; public static final boolean IS_HONEYCOMB_OR_ABOVE = Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB; public StopwatchCustomVectorView(Context context, AttributeSet attrs) { super(context, attrs); //find out if this view is specificed as a stopwatch or countdown view TypedArray a = context.getTheme().obtainStyledAttributes( attrs, R.styleable.StopwatchCustomView, 0, 0); try { mIsStopwatch = a.getBoolean(R.styleable.StopwatchCustomView_watchType, true); } finally { a.recycle(); } Resources res = getResources(); COLOR_BACKGROUND = res.getColor(mIsStopwatch ? R.color.stopwatch_background : R.color.countdown_background); COLOR_HANDS = mIsStopwatch ? Color.BLACK : Color.WHITE; COLOR_60TH_NUMERALS = mIsStopwatch ? Color.rgb(52, 52, 52) : Color.rgb(238, 238, 238); COLOR_100TH_NUMERALS = mIsStopwatch ? Color.rgb(193, 103, 91) : Color.rgb(0, 172, 228); COLOR_MINS = mIsStopwatch ? Color.rgb(85, 85, 85) : Color.rgb(85, 85, 85); COLOR_60TH_PRI_DASH = mIsStopwatch ? Color.rgb(52, 52, 52) : Color.WHITE; COLOR_60TH_SEC_DASH = mIsStopwatch ? Color.rgb(197, 197, 197) : Color.rgb(63, 63, 63); COLOR_100TH_PRI_DASH = mIsStopwatch ? Color.rgb(193, 103, 91) : Color.rgb(0, 172, 228); COLOR_100TH_SEC_DASH = mIsStopwatch ? Color.rgb(232, 205, 200) : Color.rgb(96, 96, 96); COLOR_PATTERN_CIRCLE_SOLID = mIsStopwatch ? Color.rgb(52, 52, 52) : Color.rgb(112, 112, 112); COLOR_PATTERN_CIRCLE_DASHED = COLOR_BACKGROUND; init(); } private void init() { /* setOnApplyWindowInsetsListener(new OnApplyWindowInsetsListener() { @Override public WindowInsets onApplyWindowInsets(View view, WindowInsets windowInsets) { if(windowInsets!=null) isRoundDevice = windowInsets.isRound(); return null; } }); */ Resources res = getResources(); //the stopwatch graphics are square, so find the smallest dimension they must fit in and load appropriately float minDim = Math.min(mCanvasHeight, mCanvasWidth); mScaleFactor = minDim / FULL_CANVAS_SIZE; mWatchBMPMarginTop = (mCanvasHeight - minDim) / 2; mWatchBMPMarginLeft = (mCanvasWidth - minDim) / 2; float watchfaceOuterRadius = (isRoundDevice ? FULL_WATCHFACE_OUTER_RADIUS : FULL_WATCHFACE_OUTER_RADIUS_SQUARE) * mScaleFactor; double mClockDiameter = watchfaceOuterRadius * 2; mWatchfaceCenterX = mCanvasWidth / 2; mWatchfaceCenterY = mCanvasHeight / 2; mMinsCenterY = (int) ((FULL_MINS_CENTER_Y * mScaleFactor) + mWatchBMPMarginTop); float primaryDashWidth = FULL_PRIMARY_DASH_WIDTH * mScaleFactor; float secondaryDashWidth = FULL_SECONDARY_DASH_WIDTH * mScaleFactor; mPrimaryDashPaint = new Paint(); mPrimaryDashPaint.setColor(COLOR_60TH_PRI_DASH); mPrimaryDashPaint.setStrokeWidth(primaryDashWidth); mPrimaryDashPaint.setAntiAlias(true); mSecondaryDashPaint = new Paint(); mSecondaryDashPaint.setColor(COLOR_60TH_SEC_DASH); mSecondaryDashPaint.setStrokeWidth(primaryDashWidth); mSecondaryDashPaint.setAntiAlias(true); mTertiaryDashPaint = new Paint(); mTertiaryDashPaint.setColor(COLOR_60TH_SEC_DASH); mTertiaryDashPaint.setStrokeWidth(secondaryDashWidth); mTertiaryDashPaint.setAntiAlias(true); mClockNumeralsPaint = new Paint(); mClockNumeralsPaint.setColor(COLOR_60TH_NUMERALS); mClockNumeralsPaint.setTextSize(FULL_60th_NUMERALS_SIZE * mScaleFactor); mClockNumeralsPaint.setAntiAlias(true); mClockNumeralsPaint.setTextAlign(Paint.Align.CENTER); mPatternCircleSolidPaint = new Paint(); mPatternCircleSolidPaint.setColor(COLOR_PATTERN_CIRCLE_SOLID); mPatternCircleSolidPaint.setStrokeWidth(FULL_PATTERN_CIRCLE_SOLID_THICKNESS * mScaleFactor); mPatternCircleSolidPaint.setStyle(Paint.Style.STROKE); mPatternCircleSolidPaint.setAntiAlias(true); mPatternCircleDashPaint = new Paint(); mPatternCircleDashPaint.setColor(COLOR_PATTERN_CIRCLE_DASHED); mPatternCircleDashPaint.setStrokeWidth(FULL_PATTERN_CIRCLE_DASH_THICKNESS * mScaleFactor); mPatternCircleDashPaint.setStyle(Paint.Style.STROKE); mPatternCircleDashPaint.setAntiAlias(true); mPatternCircleDashPaint.setPathEffect(new DashPathEffect(new float[]{20 * mScaleFactor, 30 * mScaleFactor}, 0)); m100thsPrimaryDashPaint = new Paint(); m100thsPrimaryDashPaint.setColor(COLOR_100TH_PRI_DASH); m100thsPrimaryDashPaint.setStrokeWidth(primaryDashWidth); m100thsPrimaryDashPaint.setAntiAlias(true); m100thsSecondaryDashPaint = new Paint(); m100thsSecondaryDashPaint.setColor(COLOR_100TH_SEC_DASH); m100thsSecondaryDashPaint.setStrokeWidth(secondaryDashWidth); m100thsSecondaryDashPaint.setAntiAlias(true); m100thsNumeralsPaint = new Paint(); m100thsNumeralsPaint.setColor(COLOR_100TH_NUMERALS); m100thsNumeralsPaint.setTextSize(FULL_100th_NUMERALS_SIZE * mScaleFactor); m100thsNumeralsPaint.setAntiAlias(true); m100thsNumeralsPaint.setTextAlign(Paint.Align.CENTER); mMinsCirclePaint = new Paint(); mMinsCirclePaint.setColor(COLOR_MINS); mMinsCirclePaint.setStrokeWidth(2 * mScaleFactor); mMinsCirclePaint.setStyle(Paint.Style.STROKE); mMinsCirclePaint.setAntiAlias(true); mMinsNumeralsPaint = new Paint(); mMinsNumeralsPaint.setColor(COLOR_MINS); mMinsNumeralsPaint.setTextSize(FULL_MINS_NUMERALS_SIZE * mScaleFactor); mMinsNumeralsPaint.setAntiAlias(true); mMinsNumeralsPaint.setTextAlign(Paint.Align.CENTER); mWatchhandsPaint = new Paint(); mWatchhandsPaint.setColor(COLOR_HANDS); mWatchhandsPaint.setAntiAlias(true); mSecHandVerticies = new float[6]; mSecHandVerticies[0] = mWatchfaceCenterX - FULL_SECHAND_HALFBASEWIDTH * mScaleFactor; mSecHandVerticies[1] = mWatchfaceCenterY; mSecHandVerticies[2] = mWatchfaceCenterX + FULL_SECHAND_HALFBASEWIDTH * mScaleFactor; mSecHandVerticies[3] = mWatchfaceCenterY; mSecHandVerticies[4] = mWatchfaceCenterX; mSecHandVerticies[5] = mWatchfaceCenterY - (FULL_WATCHFACE_OUTER_RADIUS - FULL_60TH_3_DASH_HEIGHT) * mScaleFactor; mMinHandVerticies = new float[6]; mMinHandVerticies[0] = mWatchfaceCenterX - FULL_MINHAND_HALFBASEWIDTH * mScaleFactor; mMinHandVerticies[1] = mMinsCenterY; mMinHandVerticies[2] = mWatchfaceCenterX + FULL_MINHAND_HALFBASEWIDTH * mScaleFactor; mMinHandVerticies[3] = mMinsCenterY; mMinHandVerticies[4] = mWatchfaceCenterX; mMinHandVerticies[5] = mMinsCenterY - (FULL_MINS_CIRCLE_RADIUS) * mScaleFactor; //load the USW logo mBmpNameplate = res.getDrawable(mIsStopwatch ? R.drawable.nameplate : R.drawable.nameplate_cw); //draw the watchface mWatchBackground = Bitmap.createBitmap((int) minDim, (int) minDim, Bitmap.Config.ARGB_8888); Canvas canvas = new Canvas(mWatchBackground); drawWatchface(canvas); } //assumed that the incoming canvas is square private void drawWatchface(Canvas canvas) { int canvasWidth = canvas.getWidth(); int canvasHeight = canvas.getHeight(); float canvasCenter = canvasWidth / 2.0f; float watchfaceOuterRadius = FULL_WATCHFACE_OUTER_RADIUS * mScaleFactor; if (!isRoundDevice) watchfaceOuterRadius = 500 * mScaleFactor; float watchfaceDiameter = watchfaceOuterRadius * 2.0f; float pri60thDashHeight = FULL_60TH_1_DASH_HEIGHT * mScaleFactor; float sec60thDashHeight = FULL_60TH_2_DASH_HEIGHT * mScaleFactor; float ter60thDashHeight = FULL_60TH_3_DASH_HEIGHT * mScaleFactor; float yOffset60thCircle = (canvasHeight - watchfaceDiameter) / 2.0f + pri60thDashHeight; float yOffset100thCircle = (canvasHeight - (FULL_100TH_OUTER_DIAMETER * mScaleFactor)) / 2.0f; float pri100thDashHeight = FULL_100th_1_DASH_HEIGHT * mScaleFactor; float sec100thDashHeight = FULL_100th_2_DASH_HEIGHT * mScaleFactor; float patternCircleRadius = FULL_PATTERN_CIRCLE_RADIUS * mScaleFactor; float text60thHalfHeight = FULL_60th_NUMERALS_SIZE * mScaleFactor / 3; //one third height lines up better float text60thRadius = FULL_60TH_NUMERALS_RADIUS * mScaleFactor; if (!isRoundDevice) text60thRadius = canvasWidth / 2.0f + ter60thDashHeight; float text100thRadius = FULL_100TH_NUMERALS_RADIUS * mScaleFactor; float text100thHalfHeight = FULL_100th_NUMERALS_SIZE * mScaleFactor / 2; //minutes float minsCircleRadius = FULL_MINS_CIRCLE_RADIUS * mScaleFactor; float minsCenterY = FULL_MINS_CENTER_Y * mScaleFactor; float minsDashHeight = FULL_MINS_DASH_HEIGHT * mScaleFactor; float textMinsRadius = FULL_MINS_NUMERALS_RADIUS * mScaleFactor; float textMinsHalfHeigh = FULL_MINS_NUMERALS_SIZE * mScaleFactor / 3; canvas.drawColor(COLOR_BACKGROUND); //usw name plate if (mBmpNameplate != null) { int plateWidth = (int) (mBmpNameplate.getIntrinsicWidth() * mScaleFactor * 2); int plateHeight = (int) (mBmpNameplate.getIntrinsicHeight() * mScaleFactor * 2); int namePlateX = (canvasWidth - plateWidth) / 2; int namePlateY = (int) (FULL_NAMEPLATE_Y * mScaleFactor); mBmpNameplate.setBounds(namePlateX, namePlateY, namePlateX + plateWidth, namePlateY + plateHeight); mBmpNameplate.draw(canvas); } //draw the minutes canvas.drawCircle(canvasCenter, minsCenterY, minsCircleRadius, mMinsCirclePaint); //draw the minute ticks canvas.save(); for (int a = 0; a < 360; a += 12) { //draw the 60ths etc.. lines canvas.drawLine(canvasCenter, minsCenterY - minsCircleRadius, canvasCenter, minsCenterY - minsCircleRadius + minsDashHeight, mMinsCirclePaint); canvas.rotate(12, canvasCenter, minsCenterY); } canvas.restore(); //draw the minutes numerals for (int a = 0; a < 6; a++) { double xPos = canvasCenter + Math.cos(Math.toRadians(a * 60 - 30)) * textMinsRadius; double yPos = minsCenterY + Math.sin(Math.toRadians(a * 60 - 30)) * textMinsRadius + textMinsHalfHeigh; int numeral = (a + 1) * 5; canvas.drawText((numeral < 10 ? " " : "") + numeral, (float) xPos, (float) yPos, mMinsNumeralsPaint); } //draw the clock face canvas.save(); for (int a = 0; a < 360; a += 2) { //draw the 60ths etc.. lines if (a % 30 == 0) {//+60,+70,+90 if (isRoundDevice) canvas.drawLine(canvasCenter, yOffset60thCircle - pri60thDashHeight, canvasCenter, yOffset60thCircle, mPrimaryDashPaint); else canvas.drawLine(canvasCenter, 0, canvasCenter, pri60thDashHeight, mPrimaryDashPaint); } else if (a % 6 == 0) { if (isRoundDevice) canvas.drawLine(canvasCenter, yOffset60thCircle - sec60thDashHeight, canvasCenter, yOffset60thCircle, mSecondaryDashPaint); else canvas.drawLine(canvasCenter, 0, canvasCenter, sec60thDashHeight, mSecondaryDashPaint); } else if (a % 2 == 0) { if (isRoundDevice) canvas.drawLine(canvasCenter, yOffset60thCircle - ter60thDashHeight, canvasCenter, yOffset60thCircle, mTertiaryDashPaint); else canvas.drawLine(canvasCenter, 0, canvasCenter, ter60thDashHeight, mTertiaryDashPaint); } canvas.rotate(2, canvasCenter, canvasCenter); } canvas.restore(); //draw the black circle if (isRoundDevice) { canvas.drawCircle(canvasCenter, canvasCenter, patternCircleRadius, mPatternCircleSolidPaint); canvas.drawCircle(canvasCenter, canvasCenter, patternCircleRadius, mPatternCircleDashPaint); } else { canvas.drawCircle(canvasCenter, canvasCenter, mCanvasWidth / 2.0f - pri60thDashHeight, mPatternCircleSolidPaint); canvas.drawCircle(canvasCenter, canvasCenter, mCanvasWidth / 2.0f - pri60thDashHeight, mPatternCircleDashPaint); } //draw the 100ths lines canvas.save(); for (int a = 0; a < 200; a++) { if (a % 10 == 0) { canvas.drawLine(canvasCenter, yOffset100thCircle, canvasCenter, yOffset100thCircle + pri100thDashHeight, m100thsPrimaryDashPaint); } else if (a % 2 == 0) { canvas.drawLine(canvasCenter, yOffset100thCircle, canvasCenter, yOffset100thCircle + sec100thDashHeight, m100thsPrimaryDashPaint); } else { canvas.drawLine(canvasCenter, yOffset100thCircle, canvasCenter, yOffset100thCircle + sec100thDashHeight, m100thsSecondaryDashPaint); } canvas.rotate(1.8f, canvasCenter, canvasCenter);//200ths } canvas.restore(); //draw the 60ths numerals for (int a = 0; a < 360; a += 30) { double xPos = canvasCenter + Math.cos(Math.toRadians(a)) * text60thRadius; double yPos = canvasCenter + Math.sin(Math.toRadians(a)) * text60thRadius + text60thHalfHeight; int numeral = ((a / 6 + 15) % 60); if (numeral == 0) numeral = 60; canvas.drawText((numeral < 10 ? " " : "") + numeral, (float) xPos, (float) yPos, mClockNumeralsPaint); } //draw the 100ths numerals for (int a = 0; a < 10; a++) { if (a == 2 || a == 7) continue; //no text at 0 or 50 double xPos = canvasCenter + Math.cos(Math.toRadians(a * 36 + 18)) * text100thRadius; double yPos = canvasCenter + Math.sin(Math.toRadians(a * 36 + 18)) * text100thRadius + text100thHalfHeight; String numeral = (a * 10 + 30) % 100 + ""; canvas.drawText(numeral, (float) xPos, (float) yPos, m100thsNumeralsPaint); } } private void drawFilledTriangle(Canvas canvas, float[] verticies, Paint p) { Path path = new Path(); path.moveTo(verticies[0], verticies[1]); path.lineTo(verticies[2], verticies[3]); path.lineTo(verticies[4], verticies[5]); canvas.drawPath(path, p); } @Override protected void onSizeChanged(int w, int h, int oldw, int oldh) { // Account for padding int xpad = (getPaddingLeft() + getPaddingRight()); int ypad = (getPaddingTop() + getPaddingBottom()); mCanvasWidth = w - xpad; mCanvasHeight = h - ypad; init(); } @Override public boolean hasOverlappingRendering() { return false; } protected void onDraw(Canvas canvas) { super.onDraw(canvas); //draw the watch background canvas.drawBitmap(mWatchBackground, mWatchBMPMarginLeft, mWatchBMPMarginTop, null); // draw the mins hands with its current rotatiom canvas.save(); canvas.rotate((float) Math.toDegrees(mMinsAngle), mWatchfaceCenterX, mMinsCenterY); drawFilledTriangle(canvas, mMinHandVerticies, mWatchhandsPaint); canvas.restore(); canvas.drawCircle(mWatchfaceCenterX, mMinsCenterY, FULL_MINHAND_HALFBASEWIDTH * 2 * mScaleFactor, mWatchhandsPaint); // Draw the secs hand with its current rotation canvas.save(); canvas.rotate((float) Math.toDegrees(mSecsAngle), mWatchfaceCenterX, mWatchfaceCenterY); drawFilledTriangle(canvas, mSecHandVerticies, mWatchhandsPaint); canvas.restore(); canvas.drawCircle(mWatchfaceCenterX, mWatchfaceCenterY, FULL_SECHAND_HALFBASEWIDTH * 2 * mScaleFactor, mWatchhandsPaint); } //set the time on the stopwatch/countdown face, animating the hands if resettings countdown //To make the animation feel right, we always wind backwards when resetting public void setTime(final int hours, final int minutes, final int seconds, boolean resetting) { mIsRunning = false; mLastTime = System.currentTimeMillis(); if (IS_HONEYCOMB_OR_ABOVE) { animateWatchToAPI11(hours, minutes, seconds, resetting); } else { //to fix bug #42, now the hands reset even when paused removeCallbacks(animator); post(new Runnable() { @Override public void run() { //during the animation also roll back the clock time to the current hand times. mSecsAngle = (twoPI * ((float) seconds / 60.0f)); //ensure the hands have ended at correct position mMinsAngle = (twoPI * ((float) minutes / 30.0f)); mDisplayTimeMillis = hours * 3600000 + minutes * 60000 + seconds * 1000; broadcastClockTime(mIsStopwatch ? mDisplayTimeMillis : -mDisplayTimeMillis); invalidate(); } }); } } @TargetApi(Build.VERSION_CODES.HONEYCOMB) private void animateWatchToAPI11(final int hours, final int minutes, final int seconds, boolean resetting) { mSecsAngle = mSecsAngle % twoPI; //avoids more than 1 rotation mMinsAngle = mMinsAngle % twoPI; //avoids more than 1 rotation //forces hands to go back to 0 not forwards final float toSecsAngle = shortestAngleToDestination(mSecsAngle, twoPI * seconds / 60f, resetting); //avoid multiple minutes hands rotates as face is 0-29 not 0-59 final float toMinsAngle = shortestAngleToDestination(mMinsAngle, twoPI * ((minutes > 30 ? minutes - 30 : minutes) / 30f + seconds / 1800f), resetting); float maxAngleChange = Math.max(Math.abs(mSecsAngle - toSecsAngle), Math.abs(toMinsAngle - mMinsAngle)); long duration = (long) (maxAngleChange / twoPI * 1000) + 250; FastOutSlowInInterpolator fosiInterp = new FastOutSlowInInterpolator(); final ValueAnimator secsAnimation = ValueAnimator.ofFloat(mSecsAngle, toSecsAngle); secsAnimation.setInterpolator(fosiInterp); secsAnimation.setDuration(duration); secsAnimation.start(); final ValueAnimator minsAnimation = ValueAnimator.ofFloat(mMinsAngle, toMinsAngle); minsAnimation.setInterpolator(fosiInterp); minsAnimation.setDuration(duration); minsAnimation.start(); final ValueAnimator clockAnimation = ValueAnimator.ofInt(mDisplayTimeMillis, (hours * 3600000 + minutes * 60000 + seconds * 1000)); clockAnimation.setInterpolator(fosiInterp); clockAnimation.setDuration(duration); clockAnimation.start(); //approach is to go from xMs to yMs removeCallbacks(animator); post(new Runnable() { @Override public void run() { //during the animation also roll back the clock time to the current hand times. if (secsAnimation.isRunning() || minsAnimation.isRunning() || clockAnimation.isRunning()) { mSecsAngle = (Float) secsAnimation.getAnimatedValue(); mMinsAngle = (Float) minsAnimation.getAnimatedValue(); broadcastClockTime(mIsStopwatch ? (Integer) clockAnimation.getAnimatedValue() : -(Integer) clockAnimation.getAnimatedValue()); invalidate(); postDelayed(this, 15); } else { mSecsAngle = toSecsAngle; //ensure the hands have ended at correct position mMinsAngle = toMinsAngle; mDisplayTimeMillis = hours * 3600000 + minutes * 60000 + seconds * 1000; broadcastClockTime(mIsStopwatch ? mDisplayTimeMillis : -mDisplayTimeMillis); invalidate(); } } }); } //This method returns the angle in rads closest to fromAngle that is equivalent to toAngle //unless we are animating a reset, as it feels better to always reset by reversing the hand direction //e.g. toAngle+2*Pi may be closer than toAngle //To get from -6 rads to 1 rads, shortest distance is clockwise through 0 rads //From 1 rads to 5 rads shortest distance is CCW back through 0 rads private float shortestAngleToDestination(final float fromAngle, final float toAngle, boolean resetting) { if (resetting && mIsStopwatch) // hands must always go backwards { return toAngle; // stopwatch reset always returns to 0, } else if (resetting && !mIsStopwatch) //hands must always go forwards { //countdown reset can be to any clock position, ensure CW rotation if (toAngle > fromAngle) return toAngle; else return (toAngle + twoPI); } else //not restting hands must take shortest route { float absFromMinusTo = Math.abs(fromAngle - toAngle); //toAngle-twoPi, toAngle, toAngle+twoPi if (absFromMinusTo < Math.abs(fromAngle - (toAngle + twoPI))) { if (Math.abs(fromAngle - (toAngle - twoPI)) < absFromMinusTo) { return (toAngle - twoPI); } else { return toAngle; } } else return toAngle + twoPI; } } //Stopwatch and countdown animation runnable private final Runnable animator = new Runnable() { @Override public void run() { updateWatchState(false); if (mIsRunning) { invalidate(); removeCallbacks(this); ViewCompat.postOnAnimation(StopwatchCustomVectorView.this, this); } } }; /** * Update the time */ private void updateWatchState(boolean appResuming) { long now = System.currentTimeMillis(); if (mIsRunning) { if (mIsStopwatch) mDisplayTimeMillis += (now - mLastTime); else mDisplayTimeMillis -= (now - mLastTime); } else { mLastTime = now; } // mins is 0 to 30 mMinsAngle = twoPI * (mDisplayTimeMillis / 1800000.0f); mSecsAngle = twoPI * mDisplayTimeMillis / 60000.0f; if (mDisplayTimeMillis < 0) mDisplayTimeMillis = 0; // send the time back to the Activity to update the other views broadcastClockTime(mIsStopwatch ? mDisplayTimeMillis : -mDisplayTimeMillis); mLastTime = now; // stop timer at end if (mIsRunning && !mIsStopwatch && mDisplayTimeMillis <= 0) { notifyCountdownComplete(appResuming); } } // Deal with touch events, either start/stop or swipe @Override public boolean onTouchEvent(MotionEvent event) { if (event.getAction() == MotionEvent.ACTION_DOWN) { mTouching = System.currentTimeMillis(); } else if (event.getAction() == MotionEvent.ACTION_UP) { if (System.currentTimeMillis() - mTouching > 750) notifyReset(); if (mTouching > 0) startStop(); mTouching = 0L; } return true; } public boolean startStop() { if (mIsRunning) { stop(); notifyStateChanged(); } else if (mIsStopwatch || mDisplayTimeMillis != 0) { // don't start the countdown if it is 0 start(); notifyStateChanged(); } else { //mDisplayTimeMillis == 0 return false; } return (mIsRunning); } private void start() { mLastTime = System.currentTimeMillis(); mIsRunning = true; Vibrator v = (Vibrator) getContext().getSystemService(Context.VIBRATOR_SERVICE); v.vibrate(20); removeCallbacks(animator); post(animator); } protected void stop() { mIsRunning = false; Vibrator v = (Vibrator) getContext().getSystemService(Context.VIBRATOR_SERVICE); v.vibrate(20); removeCallbacks(animator); } public boolean isRunning() { return mIsRunning; } public double getWatchTime() { return mDisplayTimeMillis; } /** * Dump state to the provided Bundle. Typically called when the * Activity is being suspended. */ public void saveState(SharedPreferences.Editor map) { if (!mIsStopwatch || mDisplayTimeMillis > 0) { /* if (!mIsStopwatch && mDisplayTimeMillis > 0 && mIsRunning) { AlarmUpdater.setCountdownAlarm(getContext(), (long) mDisplayTimeMillis); } else { AlarmUpdater.cancelCountdownAlarm(getContext()); //just to be sure } */ map.putBoolean(KEY_STATE + (mStopwatchMode ? "" : KEY_COUNTDOWN_SUFFIX), mIsRunning); map.putLong(KEY_LASTTIME + (mStopwatchMode ? "" : KEY_COUNTDOWN_SUFFIX), mLastTime); map.putInt(KEY_NOWTIME + (mStopwatchMode ? "" : KEY_COUNTDOWN_SUFFIX), mDisplayTimeMillis); } else { map.clear(); } } /** * Restores state from the indicated Bundle. Called when * the Activity is being restored after having been previously * destroyed. */ public synchronized void restoreState(SharedPreferences savedState) { if (savedState != null) { mIsRunning = (savedState.getBoolean(KEY_STATE + (mStopwatchMode ? "" : KEY_COUNTDOWN_SUFFIX), false)); mLastTime = savedState.getLong(KEY_LASTTIME + (mStopwatchMode ? "" : KEY_COUNTDOWN_SUFFIX), System.currentTimeMillis()); mDisplayTimeMillis = savedState.getInt(KEY_NOWTIME + (mStopwatchMode ? "" : KEY_COUNTDOWN_SUFFIX), 0); updateWatchState(true); removeCallbacks(animator); if (mIsRunning) post(animator); } notifyStateChanged(); /*AlarmUpdater.cancelCountdownAlarm(getContext()); //just to be sure */ } //for optimization purposes @Override public boolean isOpaque() { return true; } //Message Handling between Activity/Fragment and View public void setHandler(Handler handler) { this.mHandler = handler; } private void notifyStateChanged() { Bundle b = new Bundle(); b.putBoolean(WearActivity.MSG_STATE_CHANGE, true); sendMessageToHandler(b); } private void notifyReset() { stop(); Bundle b = new Bundle(); b.putBoolean(WearActivity.MSG_RESET, true); sendMessageToHandler(b); } private void notifyCountdownComplete(boolean appResuming) { /*Bundle b = new Bundle(); b.putBoolean(CountdownFragment.MSG_COUNTDOWN_COMPLETE, true); b.putBoolean(CountdownFragment.MSG_APP_RESUMING, appResuming); sendMessageToHandler(b);*/ } //send the latest time to the parent fragment to populate the digits private void broadcastClockTime(double mTime) { Bundle b = new Bundle(); b.putBoolean(WearActivity.MSG_UPDATE_COUNTER_TIME, true); b.putDouble(WearActivity.MSG_NEW_TIME_DOUBLE, mTime); sendMessageToHandler(b); } private void sendMessageToHandler(Bundle b) { if (mHandler != null) { Message msg = mHandler.obtainMessage(); msg.setData(b); mHandler.sendMessage(msg); } } }
package org.ovirt.engine.core.common.businessentities.network; import java.io.Serializable; import java.util.Map; import java.util.Set; import javax.validation.constraints.NotNull; import javax.validation.constraints.Pattern; import javax.validation.constraints.Size; import org.ovirt.engine.core.common.businessentities.BusinessEntitiesDefinitions; import org.ovirt.engine.core.common.utils.ObjectUtils; import org.ovirt.engine.core.common.utils.ValidationUtils; import org.ovirt.engine.core.common.validation.annotation.ValidNetworkConfiguration; import org.ovirt.engine.core.common.validation.annotation.ValidNetworkLabelFormat; import org.ovirt.engine.core.compat.Guid; /** * <code>VdsNetworkInterface</code> defines a type of {@link BaseNetworkInterface} for instances of {@link VDS}. * */ @ValidNetworkConfiguration public class VdsNetworkInterface extends NetworkInterface<VdsNetworkStatistics> { private static final long serialVersionUID = -6347816237220936283L; private Guid vdsId; private String vdsName; private NetworkBootProtocol bootProtocol; private String networkName; @Pattern(regexp = ValidationUtils.IP_PATTERN, message = "NETWORK_ADDR_IN_STATIC_IP_BAD_FORMAT") private String address; @Pattern(regexp = ValidationUtils.IP_PATTERN, message = "NETWORK_ADDR_IN_SUBNET_BAD_FORMAT") private String subnet; @Pattern(regexp = ValidationUtils.IP_PATTERN, message = "NETWORK_ADDR_IN_GATEWAY_BAD_FORMAT") private String gateway; private String baseInterface; private Integer vlanId; private Boolean bonded; private String bondName; private Integer bondType; private String bondOptions; private int mtu; private boolean bridged; private NetworkImplementationDetails networkImplementationDetails; private NetworkQoS qos; private boolean qosOverridden; private Map<String, String> customProperties; @ValidNetworkLabelFormat(message = "NETWORK_LABEL_FORMAT_INVALID") private Set<String> labels; public VdsNetworkInterface() { super(new VdsNetworkStatistics(), VdsInterfaceType.NONE.getValue()); } @Override @Size(min = 1, max = BusinessEntitiesDefinitions.HOST_NIC_NAME_LENGTH) @NotNull(message = "VALIDATION_NAME_NULL") public String getName() { return super.getName(); } /** * Returns if this is the management interface. * * @return <code>true</code> if this is the management interface */ public boolean getIsManagement() { return getType() != null && ((getType() & 2) > 0); } /** * Sets the related VDS id. * * @param vdsId * the id */ public void setVdsId(Guid vdsId) { this.vdsId = vdsId; this.statistics.setVdsId(vdsId); } /** * Returns the VDS id. * * @return the id */ public Guid getVdsId() { return vdsId; } /** * Sets the VDS entity's name. * * @param vdsName * the name */ public void setVdsName(String vdsName) { this.vdsName = vdsName; } /** * Returns the VDS entity's name. * * @return the name */ public String getVdsName() { return vdsName; } /** * Sets the boot protocol. * * @param bootProtocol * the boot protocol */ public void setBootProtocol(NetworkBootProtocol bootProtocol) { this.bootProtocol = bootProtocol; } /** * Returns the boot protocol. * * @return the boot protocol */ public NetworkBootProtocol getBootProtocol() { return bootProtocol; } /** * Sets the name of the network. * * @param networkName * the network name */ public void setNetworkName(String networkName) { this.networkName = networkName; } /** * Returns the name of the network. * * @return the network name */ public String getNetworkName() { return networkName; } /** * Sets the network address. * * @param address * the address */ public void setAddress(String address) { this.address = address; } /** * Returns the network address. * * @return the address */ public String getAddress() { return address; } /** * Sets the address's subnet. * * @param subnet * the subnet */ public void setSubnet(String subnet) { this.subnet = subnet; } /** * Returns the subnet. * * @return the subnet */ public String getSubnet() { return subnet; } /** * Sets the gateway. * * @param gateway * the gateway */ public void setGateway(String gateway) { this.gateway = gateway; } /** * Returns the gateway. * * @return the gateway */ public String getGateway() { return gateway; } /** * If the interface is vlan set its base interface name * * @param baseInterface * the base interface name */ public void setBaseInterface(String baseInterface) { this.baseInterface = baseInterface; } /** * Returns the base interface name. * * @return baseInterface */ public String getBaseInterface() { return baseInterface; } /** * Sets the VLAN id * * @param vlanId * the VLAN id */ public void setVlanId(Integer vlanId) { this.vlanId = vlanId; } /** * Returns the VLAN id. * * @return */ public Integer getVlanId() { return vlanId; } /** * Sets whether the interface is bonded or not. * * @param bonded * <code>true</code> if it is bonded */ public void setBonded(Boolean bonded) { this.bonded = bonded; } /** * Returns if the interface is bonded or not. * * @return <code>true</code> if it is bonded */ public Boolean getBonded() { return bonded; } /** * Sets the bond name. * * @param bondName * the bond name */ public void setBondName(String bondName) { this.bondName = bondName; } /** * Returns the bond name. * * @return the bond name */ public String getBondName() { return bondName; } /** * Sets the bond type. * * @param bondType * the bond type */ public void setBondType(Integer bondType) { this.bondType = bondType; } /** * Returns the bond type. * * @return the bond type */ public Integer getBondType() { return bondType; } /** * Sets the bond options. * * @param bondOptions * the bond options */ public void setBondOptions(String bondOptions) { this.bondOptions = bondOptions; } /** * Returns the bond options. * * @return the bond options */ public String getBondOptions() { return bondOptions; } @Override public Object getQueryableId() { return id; } public int getMtu() { return mtu; } public void setMtu(int mtu) { this.mtu = mtu; } public boolean isBridged() { return bridged; } public void setBridged(boolean bridged) { this.bridged = bridged; } public NetworkImplementationDetails getNetworkImplementationDetails() { return networkImplementationDetails; } public void setNetworkImplementationDetails(NetworkImplementationDetails networkImplementationDetails) { this.networkImplementationDetails = networkImplementationDetails; } /** * Gets the QoS configured on this interface, which overrides the one possibly configured on the network. */ public NetworkQoS getQos() { return qos; } /** * Sets the QoS configured on this interface, which overrides the one possibly configured on the network. */ public void setQos(NetworkQoS qos) { this.qos = qos; } /** * Gets whether QoS overriding is enabled. */ public boolean isQosOverridden() { return qosOverridden; } /** * Sets whether QoS overriding is enabled. */ public void setQosOverridden(boolean qosOverridden) { this.qosOverridden = qosOverridden; } public Set<String> getLabels() { return labels; } public void setLabels(Set<String> labels) { this.labels = labels; } public boolean hasCustomProperties() { return customProperties != null && !customProperties.isEmpty(); } public Map<String, String> getCustomProperties() { return customProperties; } public void setCustomProperties (Map<String, String> customProperties) { this.customProperties = customProperties; } @Override public String toString() { StringBuilder builder = new StringBuilder(); builder.append(getName()) .append(" {id=") .append(getId()) .append(", vdsId=") .append(getVdsId()) .append(", macAddress=") .append(getMacAddress()) .append(", networkName=") .append(getNetworkName()) .append(", baseInterface=") .append(getBaseInterface()) .append(", vlanId=") .append(getVlanId()) .append(", bonded=") .append(getBonded()) .append(", bondName=") .append(getBondName()) .append(", bondOptions=") .append(getBondOptions()) .append(", bootProtocol=") .append(getBootProtocol()) .append(", address=") .append(getAddress()) .append(", subnet=") .append(getSubnet()) .append(", gateway=") .append(getGateway()) .append(", mtu=") .append(getMtu()) .append(", bridged=") .append(isBridged()) .append(", speed=") .append(getSpeed()) .append(", type=") .append(getType()) .append(", networkImplementationDetails=") .append(getNetworkImplementationDetails()) .append(", qos=") .append(getQos()) .append(" qosOverridden=") .append(isQosOverridden()) .append(", labels=") .append(getLabels()) .append(", customProperties=") .append(getCustomProperties()) .append("}"); return builder.toString(); } @Override public int hashCode() { final int prime = 31; int result = super.hashCode(); result = prime * result + ((address == null) ? 0 : address.hashCode()); result = prime * result + ((bondName == null) ? 0 : bondName.hashCode()); result = prime * result + ((bondOptions == null) ? 0 : bondOptions.hashCode()); result = prime * result + ((bondType == null) ? 0 : bondType.hashCode()); result = prime * result + ((bonded == null) ? 0 : bonded.hashCode()); result = prime * result + ((bootProtocol == null) ? 0 : bootProtocol.hashCode()); result = prime * result + ((networkName == null) ? 0 : networkName.hashCode()); result = prime * result + (bridged ? 1231 : 1237); result = prime * result + ((gateway == null) ? 0 : gateway.hashCode()); result = prime * result + mtu; result = prime * result + ((subnet == null) ? 0 : subnet.hashCode()); result = prime * result + ((vdsId == null) ? 0 : vdsId.hashCode()); result = prime * result + ((baseInterface == null) ? 0 : baseInterface.hashCode()); result = prime * result + ((vlanId == null) ? 0 : vlanId.hashCode()); result = prime * result + ((qos == null) ? 0 : qos.hashCode()); result = prime * result + (qosOverridden ? 1231 : 1237); result = prime * result + ((labels == null) ? 0 : labels.hashCode()); result = prime * result + ((customProperties == null) ? 0 : customProperties.hashCode()); return result; } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (!super.equals(obj)) { return false; } if (!(obj instanceof VdsNetworkInterface)) { return false; } VdsNetworkInterface other = (VdsNetworkInterface) obj; if (address == null) { if (other.address != null) { return false; } } else if (!address.equals(other.address)) { return false; } if (bondName == null) { if (other.bondName != null) { return false; } } else if (!bondName.equals(other.bondName)) { return false; } if (bondOptions == null) { if (other.bondOptions != null) { return false; } } else if (!bondOptions.equals(other.bondOptions)) { return false; } if (bondType == null) { if (other.bondType != null) { return false; } } else if (!bondType.equals(other.bondType)) { return false; } if (bonded == null) { if (other.bonded != null) { return false; } } else if (!bonded.equals(other.bonded)) { return false; } if (bootProtocol != other.bootProtocol) { return false; } if (!ObjectUtils.objectsEqual(networkName, other.networkName)) { return false; } if (bridged != other.bridged) { return false; } if (gateway == null) { if (other.gateway != null) { return false; } } else if (!gateway.equals(other.gateway)) { return false; } if (mtu != other.mtu) { return false; } if (subnet == null) { if (other.subnet != null) { return false; } } else if (!subnet.equals(other.subnet)) { return false; } if (vdsId == null) { if (other.vdsId != null) { return false; } } else if (!vdsId.equals(other.vdsId)) { return false; } if (baseInterface == null) { if (other.baseInterface != null) { return false; } } else if (!baseInterface.equals(other.baseInterface)) { return false; } if (vlanId == null) { if (other.vlanId != null) { return false; } } else if (!vlanId.equals(other.vlanId)) { return false; } if (!ObjectUtils.objectsEqual(qos, other.qos)) { return false; } if (qosOverridden != other.qosOverridden) { return false; } if (!ObjectUtils.objectsEqual(labels, other.labels)) { return false; } if (!ObjectUtils.objectsEqual(customProperties, other.customProperties)) { return false; } return true; } /** * Holds various details about regarding the logical network implementation on the device. */ public static class NetworkImplementationDetails implements Serializable{ private static final long serialVersionUID = 5213991878221362832L; private boolean inSync; private boolean managed; public NetworkImplementationDetails() { } public NetworkImplementationDetails(boolean inSync, boolean managed) { this.inSync = inSync; this.managed = managed; } /** * @return Is the network's physical definition on the device same as the logical definition. */ public boolean isInSync() { return inSync; } /** * @return Is the network that is defined on this interface managed by the engine, or some custom network which * exists solely on the host. */ public boolean isManaged() { return managed; } @Override public String toString() { StringBuilder builder = new StringBuilder(); builder.append("{inSync=") .append(isInSync()) .append(", managed=") .append(isManaged()) .append("}"); return builder.toString(); } } }
/* * Licensed to Elastic Search and Shay Banon under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. Elastic Search licenses this * file to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.action.admin.indices.mapping.put; import org.elasticsearch.ElasticSearchGenerationException; import org.elasticsearch.ElasticSearchIllegalArgumentException; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.support.master.MasterNodeOperationRequest; import org.elasticsearch.common.Required; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; import java.io.IOException; import java.util.Map; import java.util.concurrent.TimeUnit; import static org.elasticsearch.action.ValidateActions.addValidationError; import static org.elasticsearch.common.unit.TimeValue.readTimeValue; /** * Puts mapping definition registered under a specific type into one or more indices. Best created with * {@link org.elasticsearch.client.Requests#putMappingRequest(String...)}. * <p/> * <p>If the mappings already exists, the new mappings will be merged with the new one. If there are elements * that can't be merged are detected, the request will be rejected unless the {@link #ignoreConflicts(boolean)} * is set. In such a case, the duplicate mappings will be rejected. * * @see org.elasticsearch.client.Requests#putMappingRequest(String...) * @see org.elasticsearch.client.IndicesAdminClient#putMapping(PutMappingRequest) * @see PutMappingResponse */ public class PutMappingRequest extends MasterNodeOperationRequest { private String[] indices; private String mappingType; private String mappingSource; private TimeValue timeout = new TimeValue(10, TimeUnit.SECONDS); private boolean ignoreConflicts = false; PutMappingRequest() { } /** * Constructs a new put mapping request against one or more indices. If nothing is set then * it will be executed against all indices. */ public PutMappingRequest(String... indices) { this.indices = indices; } @Override public ActionRequestValidationException validate() { ActionRequestValidationException validationException = null; if (mappingType == null) { validationException = addValidationError("mapping type is missing", validationException); } if (mappingSource == null) { validationException = addValidationError("mapping source is missing", validationException); } return validationException; } /** * Sets the indices this put mapping operation will execute on. */ public PutMappingRequest indices(String[] indices) { this.indices = indices; return this; } /** * The indices the mappings will be put. */ public String[] indices() { return indices; } /** * The mapping type. */ public String type() { return mappingType; } /** * The type of the mappings. */ @Required public PutMappingRequest type(String mappingType) { this.mappingType = mappingType; return this; } /** * The mapping source definition. */ String source() { return mappingSource; } /** * The mapping source definition. */ @Required public PutMappingRequest source(XContentBuilder mappingBuilder) { try { return source(mappingBuilder.string()); } catch (IOException e) { throw new ElasticSearchIllegalArgumentException("Failed to build json for mapping request", e); } } /** * The mapping source definition. */ @Required public PutMappingRequest source(Map mappingSource) { try { XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); builder.map(mappingSource); return source(builder.string()); } catch (IOException e) { throw new ElasticSearchGenerationException("Failed to generate [" + mappingSource + "]", e); } } /** * The mapping source definition. */ @Required public PutMappingRequest source(String mappingSource) { this.mappingSource = mappingSource; return this; } /** * Timeout to wait till the put mapping gets acknowledged of all current cluster nodes. Defaults to * <tt>10s</tt>. */ TimeValue timeout() { return timeout; } /** * Timeout to wait till the put mapping gets acknowledged of all current cluster nodes. Defaults to * <tt>10s</tt>. */ public PutMappingRequest timeout(TimeValue timeout) { this.timeout = timeout; return this; } /** * Timeout to wait till the put mapping gets acknowledged of all current cluster nodes. Defaults to * <tt>10s</tt>. */ public PutMappingRequest timeout(String timeout) { return timeout(TimeValue.parseTimeValue(timeout, null)); } /** * If there is already a mapping definition registered against the type, then it will be merged. If there are * elements that can't be merged are detected, the request will be rejected unless the * {@link #ignoreConflicts(boolean)} is set. In such a case, the duplicate mappings will be rejected. */ public boolean ignoreConflicts() { return ignoreConflicts; } /** * If there is already a mapping definition registered against the type, then it will be merged. If there are * elements that can't be merged are detected, the request will be rejected unless the * {@link #ignoreConflicts(boolean)} is set. In such a case, the duplicate mappings will be rejected. */ public PutMappingRequest ignoreConflicts(boolean ignoreDuplicates) { this.ignoreConflicts = ignoreDuplicates; return this; } @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); indices = new String[in.readVInt()]; for (int i = 0; i < indices.length; i++) { indices[i] = in.readUTF(); } if (in.readBoolean()) { mappingType = in.readUTF(); } mappingSource = in.readUTF(); timeout = readTimeValue(in); ignoreConflicts = in.readBoolean(); } @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); if (indices == null) { out.writeVInt(0); } else { out.writeVInt(indices.length); for (String index : indices) { out.writeUTF(index); } } if (mappingType == null) { out.writeBoolean(false); } else { out.writeBoolean(true); out.writeUTF(mappingType); } out.writeUTF(mappingSource); timeout.writeTo(out); out.writeBoolean(ignoreConflicts); } }
package io.ddf.spark.ml; import io.ddf.DDF; import io.ddf.content.IHandleRepresentations.IGetResult; import io.ddf.content.IHandleSchema; import io.ddf.content.Schema; import io.ddf.exception.DDFException; import io.ddf.ml.CrossValidationSet; import io.ddf.ml.IModel; import io.ddf.types.TupleMatrixVector; import io.ddf.util.Utils.MethodInfo.ParamInfo; import io.ddf.spark.SparkDDF; import io.ddf.spark.analytics.CrossValidation; import org.apache.commons.lang.ArrayUtils; import org.apache.spark.api.java.JavaRDD; import org.apache.spark.api.java.function.FlatMapFunction; import org.apache.spark.api.java.function.Function; import org.apache.spark.api.java.function.Function2; import org.apache.spark.mllib.linalg.Vector; import org.apache.spark.mllib.recommendation.Rating; import org.apache.spark.mllib.regression.LabeledPoint; import org.apache.spark.rdd.RDD; import java.io.Serializable; import java.util.ArrayList; import java.util.Arrays; import java.util.Iterator; import java.util.List; public class MLSupporter extends io.ddf.ml.MLSupporter implements Serializable { public MLSupporter(DDF theDDF) { super(theDDF); } /** * Override this to return the approriate DDF representation matching that specified in {@link ParamInfo}. The base * implementation simply returns the DDF. * * @param paramInfo * @return */ @SuppressWarnings("unchecked") @Override protected Object convertDDF(ParamInfo paramInfo) throws DDFException { mLog.info(">>>> Running ConvertDDF of io.ddf.spark.ml.MLSupporter"); if (paramInfo.argMatches(RDD.class)) { // Yay, our target data format is an RDD! RDD<?> rdd = null; if (paramInfo.paramMatches(LabeledPoint.class)) { rdd = (RDD<LabeledPoint>) this.getDDF().getRepresentationHandler().get(RDD.class, LabeledPoint.class); } else if (paramInfo.paramMatches(Vector.class)) { rdd = (RDD<Vector>) this.getDDF().getRepresentationHandler().get(RDD.class, Vector.class); } else if (paramInfo.paramMatches(double[].class)) { rdd = (RDD<double[]>) this.getDDF().getRepresentationHandler().get(RDD.class, double[].class); } else if (paramInfo.paramMatches(io.ddf.types.Vector.class)) { rdd = (RDD<io.ddf.types.Vector>) this.getDDF().getRepresentationHandler() .get(RDD.class, io.ddf.types.Vector.class); } else if (paramInfo.paramMatches(TupleMatrixVector.class)) { rdd = (RDD<TupleMatrixVector>) this.getDDF().getRepresentationHandler().get(RDD.class, TupleMatrixVector.class); } else if (paramInfo.paramMatches(Rating.class)) { rdd = (RDD<Rating>) this.getDDF().getRepresentationHandler().get(RDD.class, Rating.class); } // else if (paramInfo.paramMatches(TablePartition.class)) { // rdd = (RDD<TablePartition>) this.getDDF().getRepresentationHandler().get(RDD.class, TablePartition.class); // } else if (paramInfo.paramMatches(Object.class)) { rdd = (RDD<Object[]>) this.getDDF().getRepresentationHandler().get(RDD.class, Object[].class); } return rdd; } else { return super.convertDDF(paramInfo); } } @Override public DDF applyModel(IModel model) throws DDFException { return this.applyModel(model, false, false); } @Override public DDF applyModel(IModel model, boolean hasLabels) throws DDFException { return this.applyModel(model, hasLabels, false); } @SuppressWarnings("unchecked") @Override public DDF applyModel(IModel model, boolean hasLabels, boolean includeFeatures) throws DDFException { SparkDDF ddf = (SparkDDF) this.getDDF(); IGetResult gr = ddf.getJavaRDD(Vector.class, double[].class, LabeledPoint.class, Object[].class); // Apply appropriate mapper JavaRDD<?> result = null; Class<?> resultUnitType = double[].class; if (LabeledPoint.class.equals(gr.getTypeSpecs()[0])) { mLog.info(">>> applyModel, inputClass= LabeledPoint"); result = ((JavaRDD<LabeledPoint>) gr.getObject()).mapPartitions(new PredictMapper<LabeledPoint, double[]>( LabeledPoint.class, double[].class, model, hasLabels, includeFeatures)); } else if (double[].class.equals(gr.getTypeSpecs()[0])) { mLog.info(">>> applyModel, inputClass= double[]"); result = ((JavaRDD<double[]>) gr.getObject()).mapPartitions(new PredictMapper<double[], double[]>(double[].class, double[].class, model, hasLabels, includeFeatures)); } else if (Vector.class.equals(gr.getTypeSpecs()[0])) { mLog.info(">>> applyModel, inputClass= Vector"); result = ((JavaRDD<Vector>) gr.getObject()).mapPartitions(new PredictMapper<Vector, double[]>(Vector.class, double[].class, model, hasLabels, includeFeatures)); } else if (Object[].class.equals(gr.getTypeSpecs()[0])) { result = ((JavaRDD<Object[]>) gr.getObject()).mapPartitions(new PredictMapper<Object[], Object[]>(Object[].class, Object[].class, model, hasLabels, includeFeatures)); resultUnitType = Object[].class; } else { throw new DDFException(String.format("Error apply model %s", model.getRawModel().getClass().getName())); } // Build schema List<Schema.Column> outputColumns = new ArrayList<Schema.Column>(); if (includeFeatures) { outputColumns = ddf.getSchema().getColumns(); // set columns features of result ddf to Double type for (Schema.Column col : outputColumns) { col.setType(Schema.ColumnType.DOUBLE); } } else if (!includeFeatures && hasLabels) { outputColumns.add(new Schema.Column("ytrue", "double")); } outputColumns.add(new Schema.Column("yPredict", "double")); Schema schema = new Schema(null, outputColumns); if (double[].class.equals(resultUnitType)) { DDF resultDDF = this.getManager() .newDDF(this.getManager(), result.rdd(), new Class<?>[] { RDD.class, double[].class }, null, schema); return resultDDF; } else if (Object[].class.equals(resultUnitType)) { DDF resultDDF = this.getManager() .newDDF(this.getManager(), result.rdd(), new Class<?>[] { RDD.class, Object[].class }, null, schema); return resultDDF; } else return null; } private static class PredictMapper<I, O> implements FlatMapFunction<Iterator<I>, O> { private static final long serialVersionUID = 1L; private IModel mModel; private boolean mHasLabels; private boolean mIncludeFeatures; private Class<?> mInputType; private Class<?> mOutputType; public PredictMapper(Class<I> inputType, Class<O> outputType, IModel model, boolean hasLabels, boolean includeFeatures) throws DDFException { mInputType = inputType; mOutputType = outputType; mModel = model; mHasLabels = hasLabels; mIncludeFeatures = includeFeatures; } @SuppressWarnings("unchecked") @Override public Iterable<O> call(Iterator<I> samples) throws DDFException { List<O> results = new ArrayList<O>(); while (samples.hasNext()) { I sample = samples.next(); O outputRow = null; try { if (sample instanceof LabeledPoint || sample instanceof double[]) { double label = 0; double[] features; if (sample instanceof LabeledPoint) { LabeledPoint s = (LabeledPoint) sample; label = s.label(); features = s.features().toArray(); } else { double[] s = (double[]) sample; if (mHasLabels) { label = s[s.length - 1]; features = Arrays.copyOf(s, s.length - 1); } else { features = s; } } if (double[].class.equals(mOutputType)) { if (mHasLabels) { outputRow = (O) new double[] { label, (Double) this.mModel.predict(features) }; } else { outputRow = (O) new double[] { (Double) this.mModel.predict(features) }; } if (mIncludeFeatures) { outputRow = (O) ArrayUtils.addAll(features, (double[]) outputRow); } } else if (Object[].class.equals(mOutputType)) { if (mHasLabels) { outputRow = (O) new Object[] { label, this.mModel.predict(features) }; } else { outputRow = (O) new Object[] { this.mModel.predict(features)}; } if (mIncludeFeatures) { Object[] oFeatures = new Object[features.length]; for (int i = 0; i < features.length; i++) { oFeatures[i] = (Object) features[i]; } outputRow = (O) ArrayUtils.addAll(oFeatures, (Object[]) outputRow); } } else { throw new DDFException(String.format("Unsupported output type %s", mOutputType)); } } else if (sample instanceof Vector) { Double label = 0.0; double[] features; Vector vector = (Vector) sample; if (mHasLabels) { label = vector.apply(vector.size() - 1); features = Arrays.copyOf(vector.toArray(), vector.size() - 1); } else { features = vector.toArray(); } if (double[].class.equals(mOutputType)) { if (mHasLabels) { outputRow = (O) new double[] { label, (Double) this.mModel.predict(features) }; } else { outputRow = (O) new double[] { (Double) this.mModel.predict(features) }; } if (mIncludeFeatures) { outputRow = (O) ArrayUtils.addAll(features, (double[]) outputRow); } } } else if (sample instanceof Object[]) { Object label = null; Object[] features; Object[] s = (Object[]) sample; if (mHasLabels) { label = s[s.length - 1]; features = Arrays.copyOf(s, s.length - 1); } else { features = s; } double[] dFeatures = new double[features.length]; for (int i = 0; i < features.length; i++) { dFeatures[i] = (Double) features[i]; } if (mHasLabels) { outputRow = (O) new Object[] { label, this.mModel.predict(dFeatures) }; } else { outputRow = (O) new Object[] { this.mModel.predict(dFeatures) }; } if (mIncludeFeatures) { outputRow = (O) ArrayUtils.addAll(features, (Object[]) outputRow); } } else { throw new DDFException(String.format("Unsupported input type %s", mInputType)); } results.add(outputRow); } catch (Exception e) { throw new DDFException(String.format("Error predicting with model %s", this.mModel.getRawModel().getClass() .getName()), e); } } return results; } } @Override public long[][] getConfusionMatrix(IModel model, double threshold) throws DDFException { SparkDDF ddf = (SparkDDF) this.getDDF(); SparkDDF predictions = (SparkDDF) ddf.ML.applyModel(model, true, false); // Now get the underlying RDD to compute JavaRDD<double[]> yTrueYPred = (JavaRDD<double[]>) predictions.getJavaRDD(double[].class); final double threshold1 = threshold; long[] cm = yTrueYPred.map(new Function<double[], long[]>() { @Override public long[] call(double[] params) { byte isPos = toByte(params[0] > threshold1); byte predPos = toByte(params[1] > threshold1); long[] result = new long[] { 0L, 0L, 0L, 0L }; result[isPos << 1 | predPos] = 1L; return result; } }).reduce(new Function2<long[], long[], long[]>() { @Override public long[] call(long[] a, long[] b) { return new long[] { a[0] + b[0], a[1] + b[1], a[2] + b[2], a[3] + b[3] }; } }); return new long[][] { new long[] { cm[3], cm[2] }, new long[] { cm[1], cm[0] } }; } private byte toByte(boolean exp) { if (exp) return 1; else return 0; } public List<CrossValidationSet> CVKFold(int k, Long seed) throws DDFException { return CrossValidation.DDFKFoldSplit(this.getDDF(), k, seed); } public List<CrossValidationSet> CVRandom(int k, double trainingSize, Long seed) throws DDFException { return CrossValidation.DDFRandomSplit(this.getDDF(), k, trainingSize, seed); } }
/* * TypeBindings.java * * Copyright (c) 2012 Mike Strobel * * This source code is subject to terms and conditions of the Apache License, Version 2.0. * A copy of the license can be found in the License.html file at the root of this distribution. * By using this source code in any fashion, you are agreeing to be bound by the terms of the * Apache License, Version 2.0. * * You must not remove this notice, or any other, from this software. */ package com.strobel.reflection; import com.strobel.core.VerifyArgument; /** * @author Mike Strobel */ public final class TypeBindings { private final static TypeBindings EMPTY = new TypeBindings(TypeList.empty(), TypeList.empty()); private final TypeList _genericParameters; private final TypeList _boundTypes; private final int _hashCode; private TypeBindings(final TypeList genericParameters, final TypeList boundTypes) { _genericParameters = genericParameters; _boundTypes = boundTypes; final int parameterCount = _genericParameters.size(); if (parameterCount != boundTypes.size()) { throw Error.incorrectNumberOfTypeArguments(); } for (int i = 0; i < parameterCount; i++) { if (!genericParameters.get(i).isGenericParameter()) { throw new IllegalArgumentException("All types in the 'genericParameters' list must be generic parameters types."); } } int hash = 1; for (final Type boundType : boundTypes) { if (boundType != null) { hash = hash * 31 + boundType.hashCode(); } } _hashCode = hash; } public static TypeBindings empty() { return EMPTY; } public static TypeBindings createUnbound(final TypeList genericParameters) { return new TypeBindings( VerifyArgument.noNullElements(genericParameters, "genericParameters"), genericParameters ); } public static TypeBindings create(final TypeList genericParameters, final Type... boundTypes) { return new TypeBindings( VerifyArgument.noNullElements(genericParameters, "genericParameters"), Type.list(VerifyArgument.noNullElements(boundTypes, "boundTypes")) ); } public static TypeBindings create(final TypeList genericParameters, final TypeList boundTypes) { return new TypeBindings( VerifyArgument.noNullElements(genericParameters, "genericParameters"), VerifyArgument.noNullElements(boundTypes, "boundTypes") ); } public TypeList getGenericParameters() { return _genericParameters; } public TypeList getBoundTypes() { return _boundTypes; } public Type getGenericParameter(final int index) { VerifyArgument.inRange(0, size(), index, "index"); return _genericParameters.get(index); } public Type getBoundType(final int index) { VerifyArgument.inRange(0, size(), index, "index"); return _boundTypes.get(index); } public boolean containsGenericParameter(final Type type) { return type != null && _genericParameters.contains(type); } public boolean containsBoundType(final Type type) { return type != null && _boundTypes.contains(type); } public TypeBindings bindingsFor(final TypeList genericParameters) { if (VerifyArgument.notNull(genericParameters, "genericParameters").isEmpty()) { return empty(); } final Type[] boundTypes = new Type[genericParameters.size()]; for (int i = 0, n = genericParameters.size(); i < n; i++) { final Type genericParameter = genericParameters.get(i); final int index = _genericParameters.indexOf(genericParameter); if (index == -1) { boundTypes[i] = genericParameters.get(i); } else { boundTypes[i] = _boundTypes.get(index); } } return new TypeBindings(genericParameters, Type.list(boundTypes)); } public boolean hasConcreteParameter(final Type genericParameter) { final int index = _genericParameters.indexOf(genericParameter); return index != -1 && !_boundTypes.get(index).isGenericParameter(); } public boolean hasConcreteParameters() { for (int i = 0, n = size(); i < n; i++) { final Type parameter = getBoundType(i); if (!parameter.isGenericParameter()) { return true; } } return false; } public boolean hasUnboundParameters() { for (int i = 0, n = size(); i < n; i++) { final Type parameter = getBoundType(i); if (parameter.isGenericParameter()) { return true; } } return false; } public boolean hasBoundParameter(final Type genericParameter) { final int index = _genericParameters.indexOf(genericParameter); return index != -1 && _boundTypes.get(index) != genericParameter; } public boolean hasBoundParameters() { for (int i = 0, n = size(); i < n; i++) { final Type genericParameter = getGenericParameter(i); final Type parameter = getBoundType(i); if (parameter != genericParameter) { return true; } } return false; } public TypeBindings withAdditionalBinding(final Type genericParameter, final Type typeArgument) { final TypeList genericParameters; final Type[] boundTypes; int index = _genericParameters.indexOf(genericParameter); if (index == -1) { boundTypes = new Type[_genericParameters.size() + 1]; _boundTypes.toArray(boundTypes); index = boundTypes.length - 1; final Type[] genericParameterArray = new Type[boundTypes.length]; _genericParameters.toArray(genericParameterArray); genericParameterArray[index] = genericParameter; genericParameters = Type.list(genericParameterArray); } else { genericParameters = _genericParameters; boundTypes = _boundTypes.toArray(new Type[_boundTypes.size()]); } boundTypes[index] = typeArgument; final TypeBindings results = new TypeBindings(genericParameters, Type.list(boundTypes)); for (int i = 0, n = boundTypes.length; i < n; i++) { if (boundTypes[i] == genericParameter && i != index) { return results.withAdditionalBinding(genericParameters.get(i), typeArgument); // boundTypes[i] = typeArgument; } } return results; } public TypeBindings withAdditionalBindings(final TypeBindings additionalBindings) { TypeBindings bindings = this; for (final Type parameter : additionalBindings.getGenericParameters()) { bindings = bindings.withAdditionalBinding(parameter, additionalBindings.getBoundType(parameter)); } return bindings; } public TypeBindings withAdditionalParameter(final Type genericParameter) { if (containsGenericParameter(genericParameter)) { return this; } final Type[] genericParameters; final Type[] boundTypes; final int newParameterCount = _genericParameters.size() + 1; boundTypes = new Type[newParameterCount]; genericParameters = new Type[newParameterCount]; _boundTypes.toArray(boundTypes); _genericParameters.toArray(genericParameters); genericParameters[newParameterCount - 1] = genericParameter; boundTypes[newParameterCount - 1] = genericParameter; return new TypeBindings(Type.list(genericParameters), Type.list(boundTypes)); } public Type findGenericParameter(final String genericParameterName) { for (int i = 0, n = _genericParameters.size(); i < n; i++) { final Type parameter = _genericParameters.get(i); if (parameter.getFullName().equals(genericParameterName)) { return parameter; } } return null; } public Type findBoundType(final String genericParameterName) { for (int i = 0, n = _genericParameters.size(); i < n; i++) { final Type parameter = _genericParameters.get(i); if (parameter.getFullName().equals(genericParameterName)) { return getBoundType(i); } } return null; } public Type getBoundType(final Type genericParameter) { final int index = _genericParameters.indexOf(genericParameter); if (index == -1) { throw Error.typeParameterNotDefined(genericParameter); } return getBoundType(index); } @Override public String toString() { if (isEmpty()) { return ""; } StringBuilder sb = new StringBuilder(); sb.append('<'); for (int i = 0, n = size(); i < n; ++i) { if (i > 0) { sb.append(','); } final Type binding = getBoundType(i); if (binding == null) { sb.append('<'); sb.append(i); sb.append('>'); } else { sb = binding.appendBriefDescription(sb); } } sb.append('>'); return sb.toString(); } @Override public int hashCode() { return _hashCode; } @Override public boolean equals(final Object o) { if (o == this) { return true; } if (o == null || o.getClass() != getClass()) { return false; } final int size = size(); final TypeBindings other = (TypeBindings)o; if (other._hashCode != _hashCode) { return false; } if (other.size() != size) { return false; } for (int i = 0; i < size; ++i) { final Type parameter = getGenericParameter(i); final Type otherParameter = other.getGenericParameter(i); if (otherParameter == null) { if (parameter != null) { return false; } } else if (!otherParameter.equals(parameter)) { return false; } final Type binding = getBoundType(i); final Type otherBinding = other.getBoundType(i); if (otherBinding == null) { if (binding != null) { return false; } } else if (!otherBinding.equals(binding)) { return false; } } return true; } public int size() { return _genericParameters.size(); } public boolean isEmpty() { return _genericParameters.isEmpty(); } }
package gr.iti.mklab.sm.storages; import java.io.IOException; import gr.iti.mklab.simmo.core.documents.Post; import gr.iti.mklab.simmo.core.documents.Webpage; import gr.iti.mklab.simmo.core.items.Image; import gr.iti.mklab.simmo.core.items.Video; import gr.iti.mklab.simmo.core.morphia.DAOManager; import gr.iti.mklab.simmo.core.morphia.MorphiaManager; import gr.iti.mklab.sm.Configuration; import org.apache.log4j.Logger; import com.mongodb.MongoException; /** * Class for storing items in mongo db * * @author manosetro * @email manosetro@iti.gr */ public class MongoDbStorage implements Storage { private static String HOST = "mongodb.host"; private static String DB = "mongodb.database"; private static String USERNAME = "mongodb.username"; private static String PASSWORD = "mongodb.password"; private Logger logger = Logger.getLogger(MongoDbStorage.class); private String storageName = "Mongodb"; private String host; private String database; private String username = null; private String password = null; private DAOManager dao = null; public MongoDbStorage(Configuration config) { this.host = config.getParameter(MongoDbStorage.HOST); this.database = config.getParameter(MongoDbStorage.DB); this.username = config.getParameter(MongoDbStorage.USERNAME); this.password = config.getParameter(MongoDbStorage.PASSWORD); } @Override public void close() { MorphiaManager.tearDown(); } @Override public boolean delete(String id) throws IOException { return false; } @Override public boolean open() { logger.info("Open MongoDB storage <host: " + host + ">"); if (database != null) { try { if(username != null && !username.equals("") && password != null && !password.equals("")) { MorphiaManager.setup(host, username, password); } else { MorphiaManager.setup(host); } dao = new DAOManager(database); } catch (Exception e) { logger.error("MongoDB Storage failed to open!"); return false; } } return true; } @Override public void store(gr.iti.mklab.simmo.core.Object object) throws IOException { try { if (object instanceof Image) { dao.userDAO.save(object.getContributor()); dao.imageDAO.save((Image) object); } else if (object instanceof Video){ dao.userDAO.save(object.getContributor()); dao.videoDAO.save((Video) object); } else if (object instanceof Webpage) { dao.saveWebpage((Webpage) object); } else { dao.savePost((Post) object); } } catch (MongoException e) { e.printStackTrace(); logger.error("Storing item " + object.getId() + " failed."); } } @Override public boolean checkStatus() { return true; } @Override public String getStorageName() { return this.storageName; } /* private class UpdaterTask extends Thread { private long timeout = 10 * 60 * 1000; private boolean stop = true; @Override public void run() { stop = false; while(!stop) { try { synchronized(this) { this.wait(timeout); } logger.info("Update: "); long t = System.currentTimeMillis(); synchronized(itemsMap) { logger.info(itemsMap.size() + " items to update"); for(Item item : itemsMap.values()) { //itemDAO.updateItem(item); } itemsMap.clear(); } synchronized(usersMap) { logger.info(usersMap.size() + " users to update"); for(Entry<String, StreamUser> user : usersMap.entrySet()) { //streamUserDAO.updateStreamUserStatistics(user.getValue()); } usersMap.clear(); } if(webPageDAO != null) { synchronized(webpagesSharesMap) { logger.info(webpagesSharesMap.size() + " web pages to update"); for(Entry<String, Integer> e : webpagesSharesMap.entrySet()) { //webPageDAO.updateWebPageShares(e.getKey(), e.getValue()); } webpagesSharesMap.clear(); } } if(mediaItemDAO != null) { synchronized(mediaItemsSharesMap) { logger.info(mediaItemsSharesMap.size() + " media Items to update"); for(Entry<String, Integer> entry : mediaItemsSharesMap.entrySet()) { //mediaItemDAO.updateMediaItemShares(entry.getKey(), entry.getValue()); } mediaItemsSharesMap.clear(); } } t = System.currentTimeMillis() - t; logger.info("Mongo Updates took " + t + " milliseconds"); logger.info("======================================"); } catch (Exception e) { if(stop) { logger.info("Mongo updater thread interrupted from sleep to stop"); } else { logger.error("Exception in mongo updater thread. ", e); logger.info(mediaItemsSharesMap.size() + " media Items to update"); logger.info(webpagesSharesMap.size() + " web pages to update"); logger.info(usersMap.size() + " users to update"); logger.info(itemsMap.size() + " items to update"); mediaItemsSharesMap.clear(); webpagesSharesMap.clear(); usersMap.clear(); itemsMap.clear(); } continue; } } } public void stopTask() { logger.info("Stop updater task"); try { this.stop = true; this.interrupt(); } catch(Exception e) { logger.error("Fail to stop update task in MongoDBStorage", e); } } } */ }
/* * Copyright 2013 Haulmont * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package integration; import com.haulmont.yarg.formatters.ReportFormatter; import com.haulmont.yarg.formatters.factory.DefaultFormatterFactory; import com.haulmont.yarg.formatters.factory.FormatterFactoryInput; import com.haulmont.yarg.structure.ReportOutputType; import com.haulmont.yarg.structure.BandData; import com.haulmont.yarg.structure.BandOrientation; import com.haulmont.yarg.structure.impl.ReportTemplateImpl; import junit.framework.Assert; import org.apache.commons.io.FileUtils; import org.apache.commons.io.IOUtils; import org.apache.poi.hssf.usermodel.HSSFCell; import org.apache.poi.hssf.usermodel.HSSFRow; import org.apache.poi.hssf.usermodel.HSSFSheet; import org.apache.poi.hssf.usermodel.HSSFWorkbook; import org.junit.Test; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.text.SimpleDateFormat; import java.util.*; public class XlsIntegrationTest { @Test public void testFormats() throws Exception { BandData root = new BandData("Root", null, BandOrientation.HORIZONTAL); HashMap<String, Object> rootData = new HashMap<String, Object>(); root.setData(rootData); BandData band1 = new BandData("Band1", root, BandOrientation.HORIZONTAL); band1.addData("date", new SimpleDateFormat("dd-MM-yyyy").parse("12-04-1961")); root.addChild(band1); FileOutputStream outputStream = new FileOutputStream("./result/integration/result-formats.xls"); ReportFormatter formatter = new DefaultFormatterFactory().createFormatter(new FormatterFactoryInput("xls", root, new ReportTemplateImpl("", "./modules/core/test/integration/test-formats.xls", "./modules/core/test/integration/test-formats.xls", ReportOutputType.xls), outputStream)); formatter.renderDocument(); IOUtils.closeQuietly(outputStream); compareFiles("./modules/core/test/integration/etalon-formats.xls", "./result/integration/result-formats.xls"); } @Test public void testFormulas() throws Exception { BandData root = createRootBandForFormulas(); FileOutputStream outputStream = new FileOutputStream("./result/integration/result-with-formulas.xls"); ReportFormatter formatter = new DefaultFormatterFactory().createFormatter(new FormatterFactoryInput("xls", root, new ReportTemplateImpl("", "smoketest/test.xls", "./modules/core/test/integration/test-with-formulas.xls", ReportOutputType.xls), outputStream)); formatter.renderDocument(); IOUtils.closeQuietly(outputStream); compareFiles("./modules/core/test/integration/etalon-with-formulas.xls", "./result/integration/result-with-formulas.xls"); } @Test public void testAggregations() throws Exception { BandData root = createRootBandForAggregation(); FileOutputStream outputStream = new FileOutputStream("./result/integration/result-with-aggregation.xls"); ReportFormatter formatter = new DefaultFormatterFactory().createFormatter(new FormatterFactoryInput("xls", root, new ReportTemplateImpl("", "smoketest/test.xls", "./modules/core/test/integration/test-with-aggregation.xls", ReportOutputType.xls), outputStream)); formatter.renderDocument(); IOUtils.closeQuietly(outputStream); compareFiles("./modules/core/test/integration/etalon-with-aggregation.xls", "./result/integration/result-with-aggregation.xls"); } @Test public void testAggregationsEmpty() throws Exception { BandData root = new BandData("Root", null, BandOrientation.HORIZONTAL); HashMap<String, Object> rootData = new HashMap<String, Object>(); root.setData(rootData); FileOutputStream outputStream = new FileOutputStream("./result/integration/result-empty.xls"); ReportFormatter formatter = new DefaultFormatterFactory().createFormatter(new FormatterFactoryInput("xls", root, new ReportTemplateImpl("", "smoketest/test.xls", "./modules/core/test/integration/test-with-aggregation.xls", ReportOutputType.xls), outputStream)); formatter.renderDocument(); IOUtils.closeQuietly(outputStream); compareFiles("./modules/core/test/integration/etalon-empty.xls", "./result/integration/result-empty.xls"); } private void compareFiles(String etalonFile, String resultFile) throws IOException { HSSFWorkbook result = new HSSFWorkbook(FileUtils.openInputStream(new File(etalonFile))); HSSFWorkbook etalon = new HSSFWorkbook(FileUtils.openInputStream(new File(resultFile))); HSSFSheet resultSheet = result.getSheetAt(0); HSSFSheet etalonSheet = etalon.getSheetAt(0); for (int row = 0; row < 10; row++) { HSSFRow resultRow = resultSheet.getRow(row); HSSFRow etalonRow = etalonSheet.getRow(row); if (resultRow == null && etalonRow == null) { continue; } else if ((resultRow == null) || (etalonRow == null)) { Assert.fail("fail on row [" + row + "]"); } for (int cell = 0; cell < 10; cell++) { HSSFCell resultCell = resultRow.getCell(cell); HSSFCell etalonCell = etalonRow.getCell(cell); if (resultCell != null && etalonCell != null) { Assert.assertEquals(String.format("fail on cell [%d,%d]", row, cell), etalonCell.getNumericCellValue(), resultCell.getNumericCellValue()); } else if ((resultCell == null && etalonCell != null) || (resultCell != null)) { Assert.fail(String.format("fail on cell [%d,%d]", row, cell)); } } } } private BandData createRootBandForFormulas() { BandData root = new BandData("Root", null, BandOrientation.HORIZONTAL); HashMap<String, Object> rootData = new HashMap<String, Object>(); root.setData(rootData); BandData band1_1 = new BandData("Band1", root, BandOrientation.HORIZONTAL); BandData band1_2 = new BandData("Band1", root, BandOrientation.HORIZONTAL); BandData band1_3 = new BandData("Band1", root, BandOrientation.HORIZONTAL); BandData footer = new BandData("Footer", root, BandOrientation.HORIZONTAL); Map<String, Object> datamap = new HashMap<String, Object>(); datamap.put("col1", 1); datamap.put("col2", 2); datamap.put("col3", 3); band1_1.setData(datamap); Map<String, Object> datamap2 = new HashMap<String, Object>(); datamap2.put("col1", 4); datamap2.put("col2", 5); datamap2.put("col3", 6); band1_2.setData(datamap2); Map<String, Object> datamap3 = new HashMap<String, Object>(); datamap3.put("col1", 7); datamap3.put("col2", 8); datamap3.put("col3", 9); band1_3.setData(datamap3); root.addChild(band1_1); root.addChild(band1_2); root.addChild(band1_3); root.addChild(footer); root.setFirstLevelBandDefinitionNames(new HashSet<String>()); root.getFirstLevelBandDefinitionNames().add("Band1"); return root; } private BandData createRootBandForAggregation() { BandData root = new BandData("Root", null, BandOrientation.HORIZONTAL); HashMap<String, Object> rootData = new HashMap<String, Object>(); root.setData(rootData); BandData band1_1 = band(1, 2, BandOrientation.HORIZONTAL, null, "Band1"); BandData band2_1 = band(11, 22, BandOrientation.HORIZONTAL, null, "Band2"); BandData band2_2 = band(12, 23, BandOrientation.HORIZONTAL, null, "Band2"); band1_1.addChildren(Arrays.asList(band2_1, band2_2)); BandData band1_2 = band(2, 3, BandOrientation.HORIZONTAL, null, "Band1"); BandData band2_3 = band(13, 24, BandOrientation.HORIZONTAL, null, "Band2"); BandData band3_1 = band(111, null, BandOrientation.VERTICAL, band2_3, "Band3"); BandData band3_2 = band(222, null, BandOrientation.VERTICAL, band2_3, "Band3"); band1_2.addChildren(Collections.singletonList(band2_3)); band2_3.addChildren(Arrays.asList(band3_1, band3_2)); BandData band1_3 = band(3, 4, BandOrientation.HORIZONTAL, null, "Band1"); root.addChild(band1_1); root.addChild(band1_2); root.addChild(band1_3); root.setFirstLevelBandDefinitionNames(new HashSet<String>()); root.getFirstLevelBandDefinitionNames().add("Band1"); return root; } private BandData band(int col1, Integer col2, BandOrientation orientation, BandData parentBand, String name) { BandData band1_1 = new BandData(name, parentBand, orientation); Map<String, Object> datamap = new HashMap<String, Object>(); datamap.put("col1", col1); datamap.put("col2", col2); band1_1.setData(datamap); return band1_1; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.sysml.hops.rewrite; import java.util.Arrays; import java.util.List; import org.apache.sysml.hops.AggUnaryOp; import org.apache.sysml.hops.BinaryOp; import org.apache.sysml.hops.DataOp; import org.apache.sysml.hops.Hop; import org.apache.sysml.hops.Hop.AggOp; import org.apache.sysml.hops.Hop.Direction; import org.apache.sysml.hops.Hop.OpOp1; import org.apache.sysml.hops.HopsException; import org.apache.sysml.hops.IndexingOp; import org.apache.sysml.hops.LeftIndexingOp; import org.apache.sysml.hops.LiteralOp; import org.apache.sysml.hops.Hop.OpOp2; import org.apache.sysml.hops.UnaryOp; import org.apache.sysml.parser.ForStatement; import org.apache.sysml.parser.ForStatementBlock; import org.apache.sysml.parser.IfStatementBlock; import org.apache.sysml.parser.StatementBlock; import org.apache.sysml.parser.WhileStatementBlock; import org.apache.sysml.parser.Expression.DataType; /** * Rule: Simplify program structure by pulling if or else statement body out * (removing the if statement block ifself) in order to allow intra-procedure * analysis to propagate exact statistics. * */ public class RewriteForLoopVectorization extends StatementBlockRewriteRule { private static final OpOp2[] MAP_SCALAR_AGGREGATE_SOURCE_OPS = new OpOp2[]{OpOp2.PLUS, OpOp2.MULT, OpOp2.MIN, OpOp2.MAX}; private static final AggOp[] MAP_SCALAR_AGGREGATE_TARGET_OPS = new AggOp[]{AggOp.SUM, AggOp.PROD, AggOp.MIN, AggOp.MAX}; @Override public List<StatementBlock> rewriteStatementBlock(StatementBlock sb, ProgramRewriteStatus state) throws HopsException { if( sb instanceof ForStatementBlock ) { ForStatementBlock fsb = (ForStatementBlock) sb; ForStatement fs = (ForStatement) fsb.getStatement(0); Hop from = fsb.getFromHops(); Hop to = fsb.getToHops(); Hop incr = fsb.getIncrementHops(); String iterVar = fsb.getIterPredicate().getIterVar().getName(); if( fs.getBody()!=null && fs.getBody().size()==1 ) //single child block { StatementBlock csb = (StatementBlock) fs.getBody().get(0); if( !( csb instanceof WhileStatementBlock //last level block || csb instanceof IfStatementBlock || csb instanceof ForStatementBlock ) ) { //AUTO VECTORIZATION PATTERNS //Note: unnecessary row or column indexing then later removed via hop rewrites //e.g., for(i in a:b){s = s + as.scalar(X[i,2])} -> s = sum(X[a:b,2]) sb = vectorizeScalarAggregate(sb, csb, from, to, incr, iterVar); //e.g., for(i in a:b){X[i,2] = Y[i,1] + Z[i,3]} -> X[a:b,2] = Y[a:b,1] + Z[a:b,3]; sb = vectorizeElementwiseBinary(sb, csb, from, to, incr, iterVar); //e.g., for(i in a:b){X[i,2] = abs(Y[i,1])} -> X[a:b,2] = abs(Y[a:b,1]); sb = vectorizeElementwiseUnary(sb, csb, from, to, incr, iterVar); //e.g., for(i in a:b){X[7,i] = Y[1,i]} -> X[7,a:b] = Y[1,a:b]; sb = vectorizeIndexedCopy(sb, csb, from, to, incr, iterVar); } } } //if no rewrite applied sb is the original for loop otherwise a last level statement block //that includes the equivalent vectorized operations. return Arrays.asList(sb); } @Override public List<StatementBlock> rewriteStatementBlocks(List<StatementBlock> sbs, ProgramRewriteStatus sate) throws HopsException { return sbs; } private StatementBlock vectorizeScalarAggregate( StatementBlock sb, StatementBlock csb, Hop from, Hop to, Hop increment, String itervar ) throws HopsException { StatementBlock ret = sb; //check missing and supported increment values if( !(increment!=null && increment instanceof LiteralOp && ((LiteralOp)increment).getDoubleValue()==1.0) ) { return ret; } //check for applicability boolean leftScalar = false; boolean rightScalar = false; boolean rowIx = false; //row or col if( csb.get_hops()!=null && csb.get_hops().size()==1 ){ Hop root = csb.get_hops().get(0); if( root.getDataType()==DataType.SCALAR && root.getInput().get(0) instanceof BinaryOp ) { BinaryOp bop = (BinaryOp) root.getInput().get(0); Hop left = bop.getInput().get(0); Hop right = bop.getInput().get(1); //check for left scalar plus if( HopRewriteUtils.isValidOp(bop.getOp(), MAP_SCALAR_AGGREGATE_SOURCE_OPS) && left instanceof DataOp && left.getDataType() == DataType.SCALAR && root.getName().equals(left.getName()) && right instanceof UnaryOp && ((UnaryOp) right).getOp() == OpOp1.CAST_AS_SCALAR && right.getInput().get(0) instanceof IndexingOp ) { IndexingOp ix = (IndexingOp)right.getInput().get(0); if( ix.isRowLowerEqualsUpper() && ix.getInput().get(1) instanceof DataOp && ix.getInput().get(1).getName().equals(itervar) ){ leftScalar = true; rowIx = true; } else if( ix.isColLowerEqualsUpper() && ix.getInput().get(3) instanceof DataOp && ix.getInput().get(3).getName().equals(itervar) ){ leftScalar = true; rowIx = false; } } //check for right scalar plus else if( HopRewriteUtils.isValidOp(bop.getOp(), MAP_SCALAR_AGGREGATE_SOURCE_OPS) && right instanceof DataOp && right.getDataType() == DataType.SCALAR && root.getName().equals(right.getName()) && left instanceof UnaryOp && ((UnaryOp) left).getOp() == OpOp1.CAST_AS_SCALAR && left.getInput().get(0) instanceof IndexingOp ) { IndexingOp ix = (IndexingOp)left.getInput().get(0); if( ix.isRowLowerEqualsUpper() && ix.getInput().get(1) instanceof DataOp && ix.getInput().get(1).getName().equals(itervar) ){ rightScalar = true; rowIx = true; } else if( ix.isColLowerEqualsUpper() && ix.getInput().get(3) instanceof DataOp && ix.getInput().get(3).getName().equals(itervar) ){ rightScalar = true; rowIx = false; } } } } //apply rewrite if possible if( leftScalar || rightScalar ) { Hop root = csb.get_hops().get(0); BinaryOp bop = (BinaryOp) root.getInput().get(0); Hop cast = bop.getInput().get( leftScalar?1:0 ); Hop ix = cast.getInput().get(0); int aggOpPos = HopRewriteUtils.getValidOpPos(bop.getOp(), MAP_SCALAR_AGGREGATE_SOURCE_OPS); AggOp aggOp = MAP_SCALAR_AGGREGATE_TARGET_OPS[aggOpPos]; //replace cast with sum AggUnaryOp newSum = HopRewriteUtils.createAggUnaryOp(ix, aggOp, Direction.RowCol); HopRewriteUtils.removeChildReference(cast, ix); HopRewriteUtils.removeChildReference(bop, cast); HopRewriteUtils.addChildReference(bop, newSum, leftScalar?1:0 ); //modify indexing expression according to loop predicate from-to //NOTE: any redundant index operations are removed via dynamic algebraic simplification rewrites int index1 = rowIx ? 1 : 3; int index2 = rowIx ? 2 : 4; HopRewriteUtils.replaceChildReference(ix, ix.getInput().get(index1), from, index1); HopRewriteUtils.replaceChildReference(ix, ix.getInput().get(index2), to, index2); //update indexing size information if( rowIx ) ((IndexingOp)ix).setRowLowerEqualsUpper(false); else ((IndexingOp)ix).setColLowerEqualsUpper(false); ix.refreshSizeInformation(); ret = csb; LOG.debug("Applied vectorizeScalarSumForLoop."); } return ret; } private StatementBlock vectorizeElementwiseBinary( StatementBlock sb, StatementBlock csb, Hop from, Hop to, Hop increment, String itervar ) throws HopsException { StatementBlock ret = sb; //check supported increment values if( !(increment instanceof LiteralOp && ((LiteralOp)increment).getDoubleValue()==1.0) ){ return ret; } //check for applicability boolean apply = false; boolean rowIx = false; //row or col if( csb.get_hops()!=null && csb.get_hops().size()==1 ) { Hop root = csb.get_hops().get(0); if( root.getDataType()==DataType.MATRIX && root.getInput().get(0) instanceof LeftIndexingOp ) { LeftIndexingOp lix = (LeftIndexingOp) root.getInput().get(0); Hop lixlhs = lix.getInput().get(0); Hop lixrhs = lix.getInput().get(1); if( lixlhs instanceof DataOp && lixrhs instanceof BinaryOp && lixrhs.getInput().get(0) instanceof IndexingOp && lixrhs.getInput().get(1) instanceof IndexingOp && lixrhs.getInput().get(0).getInput().get(0) instanceof DataOp && lixrhs.getInput().get(1).getInput().get(0) instanceof DataOp) { IndexingOp rix0 = (IndexingOp) lixrhs.getInput().get(0); IndexingOp rix1 = (IndexingOp) lixrhs.getInput().get(1); //check for rowwise if( lix.isRowLowerEqualsUpper() && rix0.isRowLowerEqualsUpper() && rix1.isRowLowerEqualsUpper() && lix.getInput().get(2).getName().equals(itervar) && rix0.getInput().get(1).getName().equals(itervar) && rix1.getInput().get(1).getName().equals(itervar)) { apply = true; rowIx = true; } //check for colwise if( lix.isColLowerEqualsUpper() && rix0.isColLowerEqualsUpper() && rix1.isColLowerEqualsUpper() && lix.getInput().get(4).getName().equals(itervar) && rix0.getInput().get(3).getName().equals(itervar) && rix1.getInput().get(3).getName().equals(itervar)) { apply = true; rowIx = false; } } } } //apply rewrite if possible if( apply ) { Hop root = csb.get_hops().get(0); LeftIndexingOp lix = (LeftIndexingOp) root.getInput().get(0); BinaryOp bop = (BinaryOp) lix.getInput().get(1); IndexingOp rix0 = (IndexingOp) bop.getInput().get(0); IndexingOp rix1 = (IndexingOp) bop.getInput().get(1); int index1 = rowIx ? 2 : 4; int index2 = rowIx ? 3 : 5; //modify left indexing bounds HopRewriteUtils.replaceChildReference(lix, lix.getInput().get(index1),from, index1); HopRewriteUtils.replaceChildReference(lix, lix.getInput().get(index2),to, index2); //modify both right indexing HopRewriteUtils.replaceChildReference(rix0, rix0.getInput().get(index1-1), from, index1-1); HopRewriteUtils.replaceChildReference(rix0, rix0.getInput().get(index2-1), to, index2-1); HopRewriteUtils.replaceChildReference(rix1, rix1.getInput().get(index1-1), from, index1-1); HopRewriteUtils.replaceChildReference(rix1, rix1.getInput().get(index2-1), to, index2-1); updateLeftAndRightIndexingSizes(rowIx, lix, rix0, rix1); bop.refreshSizeInformation(); lix.refreshSizeInformation(); //after bop update ret = csb; //ret.liveIn().removeVariable(itervar); LOG.debug("Applied vectorizeElementwiseBinaryForLoop."); } return ret; } private StatementBlock vectorizeElementwiseUnary( StatementBlock sb, StatementBlock csb, Hop from, Hop to, Hop increment, String itervar ) throws HopsException { StatementBlock ret = sb; //check supported increment values if( !(increment instanceof LiteralOp && ((LiteralOp)increment).getDoubleValue()==1.0) ){ return ret; } //check for applicability boolean apply = false; boolean rowIx = false; //row or col if( csb.get_hops()!=null && csb.get_hops().size()==1 ) { Hop root = csb.get_hops().get(0); if( root.getDataType()==DataType.MATRIX && root.getInput().get(0) instanceof LeftIndexingOp ) { LeftIndexingOp lix = (LeftIndexingOp) root.getInput().get(0); Hop lixlhs = lix.getInput().get(0); Hop lixrhs = lix.getInput().get(1); if( lixlhs instanceof DataOp && lixrhs instanceof UnaryOp && lixrhs.getInput().get(0) instanceof IndexingOp && lixrhs.getInput().get(0).getInput().get(0) instanceof DataOp ) { boolean[] tmp = checkLeftAndRightIndexing(lix, (IndexingOp) lixrhs.getInput().get(0), itervar); apply = tmp[0]; rowIx = tmp[1]; } } } //apply rewrite if possible if( apply ) { Hop root = csb.get_hops().get(0); LeftIndexingOp lix = (LeftIndexingOp) root.getInput().get(0); UnaryOp uop = (UnaryOp) lix.getInput().get(1); IndexingOp rix = (IndexingOp) uop.getInput().get(0); int index1 = rowIx ? 2 : 4; int index2 = rowIx ? 3 : 5; //modify left indexing bounds HopRewriteUtils.replaceChildReference(lix, lix.getInput().get(index1), from, index1); HopRewriteUtils.replaceChildReference(lix, lix.getInput().get(index2), to, index2); //modify right indexing HopRewriteUtils.replaceChildReference(rix, rix.getInput().get(index1-1), from, index1-1); HopRewriteUtils.replaceChildReference(rix, rix.getInput().get(index2-1), to, index2-1); updateLeftAndRightIndexingSizes(rowIx, lix, rix); uop.refreshSizeInformation(); lix.refreshSizeInformation(); //after uop update ret = csb; LOG.debug("Applied vectorizeElementwiseUnaryForLoop."); } return ret; } private StatementBlock vectorizeIndexedCopy( StatementBlock sb, StatementBlock csb, Hop from, Hop to, Hop increment, String itervar ) throws HopsException { StatementBlock ret = sb; //check supported increment values if( !(increment instanceof LiteralOp && ((LiteralOp)increment).getDoubleValue()==1.0) ) { return ret; } //check for applicability boolean apply = false; boolean rowIx = false; //row or col if( csb.get_hops()!=null && csb.get_hops().size()==1 ) { Hop root = csb.get_hops().get(0); if( root.getDataType()==DataType.MATRIX && root.getInput().get(0) instanceof LeftIndexingOp ) { LeftIndexingOp lix = (LeftIndexingOp) root.getInput().get(0); Hop lixlhs = lix.getInput().get(0); Hop lixrhs = lix.getInput().get(1); if( lixlhs instanceof DataOp && lixrhs instanceof IndexingOp && lixrhs.getInput().get(0) instanceof DataOp ) { boolean[] tmp = checkLeftAndRightIndexing(lix, (IndexingOp)lixrhs, itervar); apply = tmp[0]; rowIx = tmp[1]; } } } //apply rewrite if possible if( apply ) { Hop root = csb.get_hops().get(0); LeftIndexingOp lix = (LeftIndexingOp) root.getInput().get(0); IndexingOp rix = (IndexingOp) lix.getInput().get(1); int index1 = rowIx ? 2 : 4; int index2 = rowIx ? 3 : 5; //modify left indexing bounds HopRewriteUtils.replaceChildReference(lix, lix.getInput().get(index1), from, index1); HopRewriteUtils.replaceChildReference(lix, lix.getInput().get(index2), to, index2); //modify right indexing HopRewriteUtils.replaceChildReference(rix, rix.getInput().get(index1-1), from, index1-1); HopRewriteUtils.replaceChildReference(rix, rix.getInput().get(index2-1), to, index2-1); updateLeftAndRightIndexingSizes(rowIx, lix, rix); ret = csb; LOG.debug("Applied vectorizeIndexedCopy."); } return ret; } private static boolean[] checkLeftAndRightIndexing(LeftIndexingOp lix, IndexingOp rix, String itervar) { boolean[] ret = new boolean[2]; //apply, rowIx //check for rowwise if( lix.isRowLowerEqualsUpper() && rix.isRowLowerEqualsUpper() && lix.getInput().get(2).getName().equals(itervar) && rix.getInput().get(1).getName().equals(itervar) ) { ret[0] = true; ret[1] = true; } //check for colwise if( lix.isColLowerEqualsUpper() && rix.isColLowerEqualsUpper() && lix.getInput().get(4).getName().equals(itervar) && rix.getInput().get(3).getName().equals(itervar) ) { ret[0] = true; ret[1] = false; } return ret; } private static void updateLeftAndRightIndexingSizes(boolean rowIx, LeftIndexingOp lix, IndexingOp... rix) { //unset special flags if( rowIx ) { lix.setRowLowerEqualsUpper(false); for( IndexingOp rixi : rix ) rixi.setRowLowerEqualsUpper(false); } else { lix.setColLowerEqualsUpper(false); for( IndexingOp rixi : rix ) rixi.setColLowerEqualsUpper(false); } for( IndexingOp rixi : rix ) rixi.refreshSizeInformation(); lix.refreshSizeInformation(); } }
/* * ARX: Powerful Data Anonymization * Copyright 2012 - 2015 Florian Kohlmayer, Fabian Prasser * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.deidentifier.arx.metric.v2; import java.util.List; import java.util.Map; import org.deidentifier.arx.metric.InformationLoss; import org.deidentifier.arx.metric.Metric; import org.deidentifier.arx.metric.Metric.AggregateFunction; /** * This internal class provides access to version 2 of all metrics. Users of the API should use * <code>org.deidentifier.arx.metric.Metric<code> for creating instances of metrics for information loss. * * @author Fabian Prasser */ public class __MetricV2 { /** * Creates a new instance of the AECS metric. * * @return */ public static Metric<ILSingleDimensional> createAECSMetric() { return new MetricSDAECS(); } /** * Creates a new instance of the AECS metric. * * @param rowCount * @return */ public static Metric<ILSingleDimensional> createAECSMetric(double rowCount) { return new MetricSDAECS(rowCount); } /** * Creates an instance of the ambiguity metric. * * @return */ public static Metric<ILSingleDimensional> createAmbiguityMetric() { return new MetricSDNMAmbiguity(); } /** * Creates an instance of the discernability metric. * * @return */ public static Metric<ILSingleDimensional> createDiscernabilityMetric() { return createDiscernabilityMetric(false); } /** * Creates an instance of the discernability metric. The monotonic variant is DM*. * * @param monotonic If set to true, the monotonic variant (DM*) will be created * @return */ public static Metric<ILSingleDimensional> createDiscernabilityMetric(boolean monotonic) { return createDiscernabilityMetric(monotonic, 0); } /** * Creates an instance of the discernability metric. The monotonic variant is DM*. * * @param monotonic If set to true, the monotonic variant (DM*) will be created * @param numTuples Pre-initialization * @return */ public static Metric<ILSingleDimensional> createDiscernabilityMetric(boolean monotonic, double numTuples) { if (monotonic) { MetricSDDiscernability result = new MetricSDDiscernability(); result.setNumTuples(numTuples); return result; } else { MetricSDNMDiscernability result = new MetricSDNMDiscernability(); result.setNumTuples(numTuples); return result; } } /** * Creates an instance of the non-monotonic non-uniform entropy metric. The default aggregate function, * which is the sum-function, will be used for comparing results. * This metric will respect attribute weights defined in the configuration. * * @return */ public static Metric<AbstractILMultiDimensional> createEntropyMetric() { return createEntropyMetric(false, AggregateFunction.SUM); } /** * Creates an instance of the non-uniform entropy metric. The default aggregate function, * which is the sum-function, will be used for comparing results. * This metric will respect attribute weights defined in the configuration. * * @param monotonic If set to true, the monotonic variant of the metric will be created * * @return */ public static Metric<AbstractILMultiDimensional> createEntropyMetric(boolean monotonic) { return createEntropyMetric(monotonic, AggregateFunction.SUM); } /** * Creates an instance of the non-uniform entropy metric. * This metric will respect attribute weights defined in the configuration. * * @param monotonic If set to true, the monotonic variant of the metric will be created * @param function The aggregate function to be used for comparing results * * @return */ public static Metric<AbstractILMultiDimensional> createEntropyMetric(boolean monotonic, AggregateFunction function) { if (monotonic) { return new MetricMDNUEntropy(function); } else { return new MetricMDNUNMEntropy(function); } } /** * Creates an instance of the non-uniform entropy metric. The default aggregate function, * which is the sum-function, will be used for comparing results. * This metric will respect attribute weights defined in the configuration. * * @param monotonic If set to true, the monotonic variant of the metric will be created * @param cache * @param cardinalities * @param hierarchies * @return */ public static Metric<AbstractILMultiDimensional> createEntropyMetric(boolean monotonic, double[][] cache, int[][][] cardinalities, int[][][] hierarchies) { MetricMDNUEntropyPrecomputed result = (MetricMDNUEntropyPrecomputed)createEntropyMetric(monotonic, AggregateFunction.SUM); result.initialize(cache, cardinalities, hierarchies); return result; } /** * Creates an instance of the height metric. The default aggregate function, which is the sum-function, * will be used for comparing results. * This metric will respect attribute weights defined in the configuration. * * @return */ public static Metric<AbstractILMultiDimensional> createHeightMetric() { return new MetricMDHeight(); } /** * Creates an instance of the height metric. * This metric will respect attribute weights defined in the configuration. * * @param function The aggregate function to use for comparing results * * @return */ public static Metric<AbstractILMultiDimensional> createHeightMetric(AggregateFunction function) { return new MetricMDHeight(function); } /** * Creates an instance of the height metric. The default aggregate function, which is the sum-function, * will be used for comparing results. * This metric will respect attribute weights defined in the configuration. * * @param minHeight * @param maxHeight * @return */ public static Metric<AbstractILMultiDimensional> createHeightMetric(int minHeight, int maxHeight) { MetricMDHeight result = new MetricMDHeight(); result.initialize(minHeight, maxHeight); return result; } /** * Helper method. Normally, there should be no need to call this * @param value * @return */ public static InformationLoss<?> createILMultiDimensionalArithmeticMean(double value) { return new ILMultiDimensionalArithmeticMean(value); } /** * Helper method. Normally, there should be no need to call this * @param value * @return */ public static InformationLoss<?> createILMultiDimensionalSum(double value) { return new ILMultiDimensionalSum(value); } /** * Helper method. Normally, there should be no need to call this * @param value * @return */ public static InformationLoss<?> createILSingleDimensional(double value) { return new ILSingleDimensional(value); } /** * Creates an instance of the loss metric which treats generalization and suppression equally. * The default aggregate function, which is the rank function, will be used. * This metric will respect attribute weights defined in the configuration. * * @return */ public static Metric<AbstractILMultiDimensional> createLossMetric() { return new MetricMDNMLoss(); } /** * Creates an instance of the loss metric which treats generalization and suppression equally. * This metric will respect attribute weights defined in the configuration. * * @param function The aggregate function to use for comparing results * @return */ public static Metric<AbstractILMultiDimensional> createLossMetric(AggregateFunction function) { return new MetricMDNMLoss(function); } /** * Creates an instance of the loss metric with factors for weighting generalization and suppression. * The default aggregate function, which is the rank function, will be used. * This metric will respect attribute weights defined in the configuration. * * @param gsFactor A factor [0,1] weighting generalization and suppression. * The default value is 0.5, which means that generalization * and suppression will be treated equally. A factor of 0 * will favor suppression, and a factor of 1 will favor * generalization. The values in between can be used for * balancing both methods. * @return */ public static Metric<AbstractILMultiDimensional> createLossMetric(double gsFactor) { return new MetricMDNMLoss(gsFactor, AggregateFunction.RANK); } /** * Creates an instance of the loss metric with factors for weighting generalization and suppression. * This metric will respect attribute weights defined in the configuration. * * @param gsFactor A factor [0,1] weighting generalization and suppression. * The default value is 0.5, which means that generalization * and suppression will be treated equally. A factor of 0 * will favor suppression, and a factor of 1 will favor * generalization. The values in between can be used for * balancing both methods. * * @param function The aggregate function to use for comparing results * @return */ public static Metric<AbstractILMultiDimensional> createLossMetric(double gsFactor, AggregateFunction function) { return new MetricMDNMLoss(gsFactor, function); } /** * Creates an instance of the normalized entropy metric. * The default aggregate function, which is the sum function, will be used. * This metric will respect attribute weights defined in the configuration. * * @return */ public static Metric<AbstractILMultiDimensional> createNormalizedEntropyMetric() { return new MetricMDNUNMNormalizedEntropy(); } /** * Creates an instance of the normalized entropy metric. * This metric will respect attribute weights defined in the configuration. * * @param function The aggregate function to use for comparing results * @return */ public static Metric<AbstractILMultiDimensional> createNormalizedEntropyMetric(AggregateFunction function) { return new MetricMDNUNMNormalizedEntropy(function); } /** * Creates an instance of the non-monotonic precision metric. * The default aggregate function, which is the arithmetic mean, will be used. * This metric will respect attribute weights defined in the configuration. * * @return */ public static Metric<AbstractILMultiDimensional> createPrecisionMetric() { return createPrecisionMetric(false, AggregateFunction.ARITHMETIC_MEAN); } /** * Creates an instance of the non-monotonic precision metric. * This metric will respect attribute weights defined in the configuration. * * @param function The aggregate function to use for comparing results * * @return */ public static Metric<AbstractILMultiDimensional> createPrecisionMetric(AggregateFunction function) { return createPrecisionMetric(false, function); } /** * Creates an instance of the precision metric. * The default aggregate function, which is the arithmetic mean, will be used. * This metric will respect attribute weights defined in the configuration. * * @param monotonic If set to true, the monotonic variant of the metric will be created * * @return */ public static Metric<AbstractILMultiDimensional> createPrecisionMetric(boolean monotonic) { return createPrecisionMetric(monotonic, AggregateFunction.ARITHMETIC_MEAN); } /** * Creates an instance of the precision metric. * This metric will respect attribute weights defined in the configuration. * * @param monotonic If set to true, the monotonic variant of the metric will be created * @param function * @return */ public static Metric<AbstractILMultiDimensional> createPrecisionMetric(boolean monotonic, AggregateFunction function) { if (monotonic) { return new MetricMDPrecision(function); } else { return new MetricMDNMPrecision(function); } } /** * Creates an instance of the precision metric. * The default aggregate function, which is the arithmetic mean, will be used. * This metric will respect attribute weights defined in the configuration. * * @param monotonic If set to true, the monotonic variant of the metric will be created * @param heights * @param cells * @return */ public static Metric<AbstractILMultiDimensional> createPrecisionMetric(boolean monotonic, int[] heights, double cells) { MetricMDNMPrecision result = (MetricMDNMPrecision)createPrecisionMetric(monotonic, AggregateFunction.ARITHMETIC_MEAN); result.initialize(heights, cells); return result; } /** * Creates a potentially precomputed instance of the non-monotonic non-uniform entropy metric. The default aggregate function, * which is the sum-function, will be used for comparing results. * This metric will respect attribute weights defined in the configuration. * * @param threshold The precomputed variant of the metric will be used if * #distinctValues / #rows <= threshold for all quasi-identifiers. * * @return */ public static Metric<AbstractILMultiDimensional> createPrecomputedEntropyMetric(double threshold) { return createPrecomputedEntropyMetric(threshold, false, AggregateFunction.SUM); } /** * Creates a potentially precomputed instance of the non-uniform entropy metric. The default aggregate function, * which is the sum-function, will be used for comparing results. * This metric will respect attribute weights defined in the configuration. * * @param threshold The precomputed variant of the metric will be used if * #distinctValues / #rows <= threshold for all quasi-identifiers. * @param monotonic If set to true, the monotonic variant of the metric will be created * * @return */ public static Metric<AbstractILMultiDimensional> createPrecomputedEntropyMetric(double threshold, boolean monotonic) { return createPrecomputedEntropyMetric(threshold, monotonic, AggregateFunction.SUM); } /** * Creates a potentially precomputed instance of the non-uniform entropy metric. * This metric will respect attribute weights defined in the configuration. * * @param threshold The precomputed variant of the metric will be used if * #distinctValues / #rows <= threshold for all quasi-identifiers. * @param monotonic If set to true, the monotonic variant of the metric will be created * @param function The aggregate function to be used for comparing results * * @return */ public static Metric<AbstractILMultiDimensional> createPrecomputedEntropyMetric(double threshold, boolean monotonic, AggregateFunction function) { if (monotonic) { return new MetricMDNUEntropyPotentiallyPrecomputed(threshold, function); } else { return new MetricMDNUNMEntropyPotentiallyPrecomputed(threshold, function); } } /** * Creates a potentially precomputed instance of the loss metric which treats generalization * and suppression equally. * The default aggregate function, which is the rank function, will be used. * This metric will respect attribute weights defined in the configuration. * * @param threshold The precomputed variant of the metric will be used if * #distinctValues / #rows <= threshold for all quasi-identifiers. * @return */ public static Metric<AbstractILMultiDimensional> createPrecomputedLossMetric(double threshold) { return new MetricMDNMLossPotentiallyPrecomputed(threshold); } /** * Creates a potentially precomputed instance of the loss metric which treats generalization and suppression equally. * This metric will respect attribute weights defined in the configuration. * * @param threshold The precomputed variant of the metric will be used if * #distinctValues / #rows <= threshold for all quasi-identifiers. * @param function The aggregate function to use for comparing results * @return */ public static Metric<AbstractILMultiDimensional> createPrecomputedLossMetric(double threshold, AggregateFunction function) { return new MetricMDNMLossPotentiallyPrecomputed(threshold, function); } /** * Creates a potentially precomputed instance of the loss metric with factors for weighting generalization and suppression. * The default aggregate function, which is the rank function, will be used. * This metric will respect attribute weights defined in the configuration. * * @param threshold The precomputed variant of the metric will be used if * #distinctValues / #rows <= threshold for all quasi-identifiers. * @param gsFactor A factor [0,1] weighting generalization and suppression. * The default value is 0.5, which means that generalization * and suppression will be treated equally. A factor of 0 * will favor suppression, and a factor of 1 will favor * generalization. The values in between can be used for * balancing both methods. * * @return */ public static Metric<AbstractILMultiDimensional> createPrecomputedLossMetric(double threshold, double gsFactor) { return new MetricMDNMLossPotentiallyPrecomputed(threshold, gsFactor, AggregateFunction.RANK); } /** * Creates a potentially precomputed instance of the loss metric with factors for weighting generalization and suppression. * This metric will respect attribute weights defined in the configuration. * * @param threshold The precomputed variant of the metric will be used if * #distinctValues / #rows <= threshold for all quasi-identifiers. * @param gsFactor A factor [0,1] weighting generalization and suppression. * The default value is 0.5, which means that generalization * and suppression will be treated equally. A factor of 0 * will favor suppression, and a factor of 1 will favor * generalization. The values in between can be used for * balancing both methods. * * @param function The aggregate function to use for comparing results * @return */ public static Metric<AbstractILMultiDimensional> createPrecomputedLossMetric(double threshold, double gsFactor, AggregateFunction function) { return new MetricMDNMLossPotentiallyPrecomputed(threshold, gsFactor, function); } /** * Creates a potentially precomputed instance of the normalized entropy metric. * The default aggregate function, which is the sum function, will be used. * This metric will respect attribute weights defined in the configuration. * * @param threshold The precomputed variant of the metric will be used if * #distinctValues / #rows <= threshold for all quasi-identifiers. * * @return */ public static Metric<AbstractILMultiDimensional> createPrecomputedNormalizedEntropyMetric(double threshold) { return new MetricMDNUNMNormalizedEntropyPotentiallyPrecomputed(threshold); } /** * Creates a potentially precomputed instance of the normalized entropy metric. * This metric will respect attribute weights defined in the configuration. * * @param threshold The precomputed variant of the metric will be used if * #distinctValues / #rows <= threshold for all quasi-identifiers. * * @param function The aggregate function to use for comparing results * @return */ public static Metric<AbstractILMultiDimensional> createPrecomputedNormalizedEntropyMetric(double threshold, AggregateFunction function) { return new MetricMDNUNMNormalizedEntropyPotentiallyPrecomputed(threshold, function); } /** * Creates an instance of a metric with statically defined information loss. * The default aggregate function, which is the sum-function, will be used for comparing results. * This metric will respect attribute weights defined in the configuration. * * @param loss User defined information loss per attribute * * @return */ public static Metric<AbstractILMultiDimensional> createStaticMetric(Map<String, List<Double>> loss) { return new MetricMDStatic(loss); } /** * Creates an instance of a metric with statically defined information loss. * This metric will respect attribute weights defined in the configuration. * * @param loss User defined information loss per attribute * @param function The aggregate function to use for comparing results * * @return */ public static Metric<AbstractILMultiDimensional> createStaticMetric(Map<String, List<Double>> loss, AggregateFunction function) { return new MetricMDStatic(function, loss); } }
/* * To change this template, choose Tools | Templates * and open the template in the editor. */ package Controlador; import java.sql.Connection; import java.sql.DriverManager; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.text.SimpleDateFormat; import java.util.Date; import java.util.logging.Level; import java.util.logging.Logger; import javax.swing.JOptionPane; import vista.frmProducto; /** * * @author claudio */ public class producto implements interfazInicial{ private Statement stmt= null; private Connection conn = null; private ResultSet rs = null; private String total; public int resultado=0; public int stockPublico=1; public String codigo2=null; public int getCodproducto() { return codproducto; } public void setCodproducto(int codproducto) { this.codproducto = codproducto; } public String getSerieProducto() { return serieProducto; } public void setSerieProducto(String serieProducto) { this.serieProducto = serieProducto; } public String getMarca() { return marca; } public void setMarca(String marca) { this.marca = marca; } public String getTipo() { return tipo; } public void setTipo(String tipo) { this.tipo = tipo; } public int getStockActual() { return StockActual; } public void setStockActual(int StockActual) { this.StockActual = StockActual; } public int getPreciocosto() { return preciocosto; } public void setPreciocosto(int preciocosto) { this.preciocosto = preciocosto; } public int getPrecioventa() { return precioventa; } public void setPrecioventa(int precioventa) { this.precioventa = precioventa; } public String getFechaactualizacion() { Date ahora = new Date(); SimpleDateFormat formateador = new SimpleDateFormat("dd-MM-yyyy"); return formateador.format(ahora); } public void setFechaactualizacion(String fechaactualizacion) { this.fechaactualizacion = fechaactualizacion; } public int getIdProveedor() { return idProveedor; } public void setIdProveedor(int idProveedor) { this.idProveedor = idProveedor; } public producto(int codproducto, String serieProducto, String marca, String tipo, int StockActual, int preciocosto, int precioventa, String fechaactualizacion, int idProveedor) { this.codproducto = codproducto; this.serieProducto = serieProducto; this.marca = marca; this.tipo = tipo; this.StockActual = StockActual; this.preciocosto = preciocosto; this.precioventa = precioventa; this.fechaactualizacion = fechaactualizacion; this.idProveedor = idProveedor; } private int codproducto; private String serieProducto; private String marca; private String tipo; private int StockActual; private int preciocosto; private int precioventa; private String fechaactualizacion; private int idProveedor; @Override public boolean insertar() { int exito=0,contador=0; String conexion = "jdbc:odbc:venta"; try { conn= DriverManager.getConnection(conexion, "root","1234"); } catch (Exception e) { JOptionPane.showMessageDialog(null,"no conecto");} String consultaInsertar = "insert into producto(codproducto,serieProducto,marca,tipo," + "StockActual,preciocosto,precioventa,fechaactualizacion,idProveedor) " + "values ("+ this.codproducto+",'"+ this.serieProducto+"','"+ this.marca+"','"+ this.tipo+"',"+ this.StockActual+","+ this.preciocosto+","+ this.precioventa+",'"+ this.fechaactualizacion+"',"+ this.idProveedor+")"; System.out.println(consultaInsertar); try{ stmt = conn.createStatement(); exito = stmt.executeUpdate(consultaInsertar); JOptionPane.showMessageDialog(null,"INSERTADO CORRECTAMENTE"); } catch (Exception e ){ System.out.println("error en la insercion"+ e.getMessage()); } return true; } @Override public boolean actualizar() { conectar(); String sentenciaActualizacion = "update Producto " + "set CODPRODUCTO="+this.codproducto+","+ "SERIEPRODUCTO='"+ this.serieProducto+"',"+ " MARCA='"+ this.marca+"',"+ " TIPO='"+ this.tipo+"',"+ " STOCKACTUAL="+ this.StockActual+","+ " PRECIOCOSTO="+ this.preciocosto+","+ " PRECIOVENTA="+ this.precioventa+","+ " FECHAACTUALIZACION='"+ this.fechaactualizacion+"',"+ " IDPROVEEDOR="+ this.idProveedor+""; System.out.println(sentenciaActualizacion); try{ stmt=conn.createStatement(); stmt.executeUpdate(sentenciaActualizacion); } catch(Exception e ){System.out.println("error en la actualizacion");} return true; } public boolean actualizarProducto(int codProducto) { String conexion = "jdbc:odbc:venta"; try { conn= DriverManager.getConnection(conexion, "root","1234"); } catch (Exception e) { JOptionPane.showMessageDialog(null,"no conecto");} String sentenciaActualizacion = "update Producto " + "set CODPRODUCTO="+this.codproducto+","+ "SERIEPRODUCTO='"+ this.serieProducto+"',"+ " MARCA='"+ this.marca+"',"+ " TIPO='"+ this.tipo+"',"+ " STOCKACTUAL="+ this.StockActual+","+ " PRECIOCOSTO="+ this.preciocosto+","+ " PRECIOVENTA="+ this.precioventa+","+ " FECHAACTUALIZACION='"+ this.fechaactualizacion+"',"+ " IDPROVEEDOR="+ this.idProveedor+""+ " where CODPRODUCTO="+codProducto; // JOptionPane.showMessageDialog(null,codProducto ); System.out.println(sentenciaActualizacion); try{ stmt=conn.createStatement(); stmt.executeUpdate(sentenciaActualizacion); } catch(Exception e ){System.out.println("error en la actualizacion");} return true; } @Override public boolean eliminar() { int exito=0; String sentenciaborrado; int codigo=0; /* String conexion = "jdbc:odbc:venta"; try { conn= DriverManager.getConnection(conexion, "root","1234"); } catch (Exception e) { JOptionPane.showMessageDialog(null,"no conecto");}*/ conectar(); try {codigo = Integer.parseInt(JOptionPane.showInputDialog("Ingrese Codigo a eliminar"));} catch(Exception e){System.out.println("codigo debe ser numerico");} sentenciaborrado = "delete from PRODUCTO where CODPRODUCTO = " +codigo; System.out.println(sentenciaborrado); try{ stmt=conn.createStatement(); stmt.executeUpdate(sentenciaborrado); JOptionPane.showMessageDialog(null, "PRODUCTO ELIMINADO CORRECTAMENTE"); } catch (Exception e) { System.out.println("error en la eliminiacion"); } return true; } @Override public boolean conectar() { String conexion = "jdbc:odbc:venta"; try { conn= DriverManager.getConnection(conexion, "root","1234"); System.out.println("CONECTADO EXITOSAMENTE"); } catch (Exception e) { JOptionPane.showMessageDialog(null,"no conecto");} return true; } @Override public boolean consultar() { return true; } public boolean validarProducto(String codigoValidacion) throws SQLException{ int exito=0,contador=0; String conexion = "jdbc:odbc:venta"; try { conn= DriverManager.getConnection(conexion, "root","1234"); } catch (Exception e) { JOptionPane.showMessageDialog(null,"no conecto");} // String consultaInsertar = "insert into usuario(id,usuario,clave,nombre,apellido) values ('"+ String consultaValidar = "select serieProducto from producto"; System.out.println(consultaValidar); JOptionPane.showMessageDialog(null,consultaValidar ); try{ stmt = conn.createStatement(); exito = stmt.executeUpdate(consultaValidar); JOptionPane.showMessageDialog(null,"VALIDADO"); } catch (Exception e ){ System.out.println("error en la consulta"+ e.getMessage()); } while(rs.next()){ if(codigoValidacion.equals(rs.getString("serieProducto"))) { System.out.println("existe"); } else{ System.out.println("no existe"); } } return true; } @Override public boolean consultar2(int codigo) { throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates. } public boolean Buscar(String codigoBarra) { String conexion="jdbc:odbc:venta"; try { conn= DriverManager.getConnection(conexion, "root","1234"); } catch (Exception e) { JOptionPane.showMessageDialog(null,"no conecto");} //conectar(); String consulta = "select codproducto,serieProducto,marca,tipo,StockActual," + "preciocosto,precioventa,fechaactualizacion,idProveedor " + "from producto where serieproducto="+codigoBarra; // nombre='"+nombre+"'"; // System.out.println(consulta); try {stmt = conn.createStatement(); rs = stmt.executeQuery(consulta); } catch (Exception e) {System.out.println("error en la consulta: " + codigoBarra);} try {if (rs.next()) { // if(thi) this.codproducto = rs.getInt("codproducto"); this.serieProducto = codigoBarra; this.marca = rs.getString("marca"); this.tipo = rs.getString("tipo"); this.StockActual = rs.getInt("StockActual"); this.preciocosto = rs.getInt("preciocosto"); this.precioventa = rs.getInt("precioventa"); this.fechaactualizacion = rs.getString("fechaactualizacion"); this.idProveedor=rs.getInt("idProveedor"); // rs.close(); // stmt.close(); return true;} else return false;} catch(Exception e) {return false;} } //CONTRUCTOR POR DEFECTO public producto(){ this(0,"","","",0,0,0,"",0); } public boolean consultarStock(String codigo) throws SQLException { /*String conexion="jdbc:odbc:venta"; try { conn= DriverManager.getConnection(conexion, "root","1234"); } catch (Exception e) { JOptionPane.showMessageDialog(null,"no conecto");}*/ conectar(); int valor=0; String consultaValor="select count(*)as Total from producto where serieProducto='"+codigo+"'"; // String consultaValor="select count(*)as Total from producto"; // System.out.println(consultaValor); try {stmt = conn.createStatement(); rs = stmt.executeQuery(consultaValor); } catch (Exception e) {System.out.println("error en la consulta: " + codigo);} while (rs.next()) { resultado = Integer.parseInt(rs.getString("total")); System.out.println(resultado); } return true;} public boolean sumar(String codigoBarra,int cantidad) throws SQLException{ conectar(); int valor=0; String consultaValor="select sum(precioventa)as Total from producto where serieProducto='"+codigoBarra+"'"; // String consultaValor="select count(*)as Total from producto"; // System.out.println(consultaValor); try {stmt = conn.createStatement(); rs = stmt.executeQuery(consultaValor); } catch (Exception e) {System.out.println("error en la consulta: " + codigoBarra);} int TotalProducto=0; int Total=0; while (rs.next()) { TotalProducto= rs.getInt("Total"); System.out.println(TotalProducto); Total=cantidad*TotalProducto; System.out.println(TotalProducto); } return true; } }
/** * Copyright (c) 2015 Source Auditor Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package org.spdx.compare; import java.util.Arrays; import java.util.Comparator; import org.apache.poi.ss.usermodel.Cell; import org.apache.poi.ss.usermodel.CellStyle; import org.apache.poi.ss.usermodel.Row; import org.apache.poi.ss.usermodel.Sheet; import org.apache.poi.ss.usermodel.Workbook; import org.spdx.rdfparser.InvalidSPDXAnalysisException; import org.spdx.rdfparser.model.Annotation; import org.spdx.spdxspreadsheet.AbstractSheet; /** * Sheet for document level annotations * @author Gary O'Neall * */ public class DocumentAnnotationSheet extends AbstractSheet { private static class AnnotationComparator implements Comparator<Annotation> { /* (non-Javadoc) * @see java.util.Comparator#compare(java.lang.Object, java.lang.Object) */ @Override public int compare(Annotation o1, Annotation o2) { if (o1 != null) { if (o2 != null) { Annotation a1 = o1; Annotation a2 = o2; int retval = a1.getAnnotator().compareTo(a2.getAnnotator()); if (retval != 0) { return retval; } retval = a1.getAnnotationType().toString().compareTo(a2.getAnnotationType().toString()); if (retval != 0) { return retval; } return a1.getComment().compareTo(a2.getComment()); } else { return 1; } } else { return -1; } } } AnnotationComparator annotationComparator = new AnnotationComparator(); static final int ANNOTATOR_COL = 0; static final int ANNOTATOR_COL_WIDTH = 40; static final String ANNOTATOR_COL_TEXT_TITLE = "Annotator"; static final int TYPE_COL = 1; static final int TYPE_COL_WIDTH = 15; static final String TYPE_COL_TEXT_TITLE = "Type"; static final int COMMENT_COL = 2; static final int COMMENT_COL_WIDTH = 70; static final String COMMENT_COL_TEXT_TITLE = "Comment"; static final int FIRST_DATE_COL = 3; static final int DATE_COL_WIDTH = 25; public DocumentAnnotationSheet(Workbook workbook, String sheetName) { super(workbook, sheetName); } /** * @param wb * @param sheetName */ public static void create(Workbook wb, String sheetName) { int sheetNum = wb.getSheetIndex(sheetName); if (sheetNum >= 0) { wb.removeSheetAt(sheetNum); } Sheet sheet = wb.createSheet(sheetName); CellStyle headerStyle = AbstractSheet.createHeaderStyle(wb); CellStyle defaultStyle = AbstractSheet.createLeftWrapStyle(wb); Row row = sheet.createRow(0); sheet.setColumnWidth(ANNOTATOR_COL, ANNOTATOR_COL_WIDTH*256); sheet.setDefaultColumnStyle(ANNOTATOR_COL, defaultStyle); Cell annotatorHeaderCell = row.createCell(ANNOTATOR_COL); annotatorHeaderCell.setCellStyle(headerStyle); annotatorHeaderCell.setCellValue(ANNOTATOR_COL_TEXT_TITLE); sheet.setColumnWidth(TYPE_COL, TYPE_COL_WIDTH*256); sheet.setDefaultColumnStyle(TYPE_COL, defaultStyle); Cell typeHeaderCell = row.createCell(TYPE_COL); typeHeaderCell.setCellStyle(headerStyle); typeHeaderCell.setCellValue(TYPE_COL_TEXT_TITLE); sheet.setColumnWidth(COMMENT_COL, COMMENT_COL_WIDTH*256); sheet.setDefaultColumnStyle(COMMENT_COL, defaultStyle); Cell commentHeaderCell = row.createCell(COMMENT_COL); commentHeaderCell.setCellStyle(headerStyle); commentHeaderCell.setCellValue(COMMENT_COL_TEXT_TITLE); for (int i = FIRST_DATE_COL; i < MultiDocumentSpreadsheet.MAX_DOCUMENTS; i++) { sheet.setColumnWidth(i, DATE_COL_WIDTH*256); sheet.setDefaultColumnStyle(i, defaultStyle); Cell cell = row.createCell(i); cell.setCellStyle(headerStyle); } } /** * @param comparer * @param docNames * @throws InvalidSPDXAnalysisException */ public void importCompareResults(SpdxComparer comparer, String[] docNames) throws SpdxCompareException, InvalidSPDXAnalysisException { if (comparer.getNumSpdxDocs() != docNames.length) { throw(new SpdxCompareException("Number of document names does not match the number of SPDX documents")); } this.clear(); Row header = sheet.getRow(0); int[] annotationIndexes = new int[comparer.getNumSpdxDocs()]; Annotation[][] annotations = new Annotation[comparer.getNumSpdxDocs()][]; for (int i = 0; i < annotations.length; i++) { Cell headerCell = header.getCell(FIRST_DATE_COL+i); headerCell.setCellValue(docNames[i]); Annotation[] docAnnotations = comparer.getSpdxDoc(i).getAnnotations(); Arrays.sort(docAnnotations, annotationComparator); annotations[i] = docAnnotations; annotationIndexes[i] = 0; } while (!allAnnotationsExhausted(annotations, annotationIndexes)) { Row currentRow = this.addRow(); Annotation nextAnnotation = getNextAnnotation(annotations, annotationIndexes); Cell annotatorCell = currentRow.createCell(ANNOTATOR_COL); annotatorCell.setCellValue(nextAnnotation.getAnnotator()); Cell typeCell = currentRow.createCell(TYPE_COL); typeCell.setCellValue(nextAnnotation.getAnnotationType().getTag()); Cell commentCell = currentRow.createCell(COMMENT_COL); commentCell.setCellValue(nextAnnotation.getComment()); for (int i = 0; i < annotations.length; i++) { if (annotations[i].length > annotationIndexes[i]) { Annotation compareAnnotation = annotations[i][annotationIndexes[i]]; if (annotationComparator.compare(nextAnnotation, compareAnnotation) == 0) { Cell dateCell = currentRow.createCell(FIRST_DATE_COL+i); dateCell.setCellValue(annotations[i][annotationIndexes[i]].getAnnotationDate()); annotationIndexes[i]++; } } } } } /** * @param annotations * @param annotationIndexes * @return */ private Annotation getNextAnnotation(Annotation[][] annotations, int[] annotationIndexes) { Annotation retval = null; for (int i = 0; i < annotations.length; i++) { if (annotations[i].length > annotationIndexes[i]) { Annotation candidate = annotations[i][annotationIndexes[i]]; if (retval == null || this.annotationComparator.compare(retval, candidate) > 0) { retval = candidate; } } } return retval; } /** * @param annotations * @param annotationIndexes * @return */ private boolean allAnnotationsExhausted(Annotation[][] annotations, int[] annotationIndexes) { for (int i = 0; i < annotations.length; i++) { if (annotationIndexes[i] < annotations[i].length) { return false; } } return true; } /* (non-Javadoc) * @see org.spdx.spdxspreadsheet.AbstractSheet#verify() */ @Override public String verify() { return null; // Nothing to verify } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.chukwa.rest.actions; import java.util.*; import org.apache.commons.logging.*; import org.apache.commons.lang.time.*; import java.lang.reflect.*; import net.sf.json.*; import net.sf.json.processors.JsonValueProcessor; public class RestController { protected static Log log = LogFactory.getLog(RestController.class); private static String convertObjectToXml(Object obj) { StringBuilder s=new StringBuilder(); s.append("<item>"); try { Class cls = obj.getClass(); Field fieldlist[] = cls.getDeclaredFields(); for (int i = 0; i < fieldlist.length; i++) { Field fld = fieldlist[i]; String fldName = fld.getName(); String functionName = "get"+ fldName.substring(0,1).toUpperCase() + fldName.substring(1); String value = ""; Object oret = null; try { @SuppressWarnings("unchecked") Method meth=cls.getMethod(functionName); if (meth == null) { continue; } oret = meth.invoke(obj); } catch (Exception e) { continue; } if (oret == null) { value=""; } else if ((oret instanceof Date) || (oret instanceof java.sql.Timestamp)) { java.sql.Timestamp d = (java.sql.Timestamp) oret; long time = d.getTime(); String date = DateFormatUtils.format(time, "yyyy-MM-dd HH:mm:ss"); value = date; } else { value = oret.toString(); } s.append("<"+fldName+">"+value+"</"+fldName+">"); } s.append("\n"); } catch (Throwable e) { System.err.println(e); } s.append("</item>"); return s.toString(); } private static String getObjectFields(Object obj) { StringBuilder s=new StringBuilder(); try { Class cls = obj.getClass(); Method methlist[] = cls.getDeclaredMethods(); int count=0; for (int i = 0; i < methlist.length; i++) { Method m = methlist[i]; if (m.getName().startsWith("get")) { String name=m.getName().substring(3); if (count!=0) { s.append(","); } count+=1; s.append("\""+name+"\""); } } s.append("\n"); } catch (Throwable e) { System.err.println(e); } return s.toString(); } private static String getObjectValues(Object obj) { StringBuilder s=new StringBuilder(); try { Class cls = obj.getClass(); Method methlist[] = cls.getDeclaredMethods(); int count=0; for (int i = 0; i < methlist.length; i++) { Method m = methlist[i]; if (m.getName().startsWith("get")) { String name=m.getName(); Object oret = null; try { @SuppressWarnings("unchecked") Method meth=cls.getMethod(name); if (meth == null) { continue; } oret = meth.invoke(obj); } catch (Exception e) { continue; } if (count!=0) { s.append(","); } count+=1; if (oret == null) { s.append("\"\""); } else if ((oret instanceof Date) || (oret instanceof java.sql.Timestamp)) { long time=0; if (oret instanceof Date) { Date d = (Date) oret; time = d.getTime(); } else if (oret instanceof java.sql.Timestamp) { java.sql.Timestamp d = (java.sql.Timestamp) oret; time = d.getTime(); } String date = DateFormatUtils.format(time, "yyyy-MM-dd HH:mm:ss"); s.append("\""+date+"\""); } else { s.append("\""+oret.toString()+"\""); } } } s.append("\n"); } catch (Throwable e) { System.err.println(e); } return s.toString(); } protected static String convertToJson(Object obj) { String str=""; JsonConfig config = new JsonConfig(); config.registerJsonValueProcessor(Date.class,new JsonDateValueProcessor()); config.registerJsonValueProcessor(java.sql.Timestamp.class,new JsonDateValueProcessor()); if (obj != null) { if (isArray(obj)) { JSONArray jsonArray = JSONArray.fromObject(obj, config); str=jsonArray.toString(); } else { JSONObject jsonObject = JSONObject.fromObject(obj, config); str=jsonObject.toString(); } } return str; } protected static String convertToXml(Object obj) { StringBuilder s=new StringBuilder(); s.append("<items>"); if ( obj != null) { if (isArray(obj)) { Iterator iterator = ((Collection)obj).iterator(); while (iterator.hasNext()) { Object element = iterator.next(); s.append(convertObjectToXml(element)); } } else { s.append(convertObjectToXml(obj)); } } s.append("</items>"); return s.toString(); } protected static String convertToCsv(Object obj) { StringBuilder str=new StringBuilder(); if ( obj != null) { if (isArray(obj)) { boolean first=true; Iterator iterator = ((Collection)obj).iterator(); while (iterator.hasNext()) { Object element = iterator.next(); if (first) { first=false; str.append(getObjectFields(element)); } str.append(getObjectValues(element)); } } else { str.append(getObjectFields(obj)); str.append(getObjectValues(obj)); } } return str.toString(); } private static boolean isArray(Object obj) { return obj instanceof Collection || obj.getClass().isArray(); } }
/* * Copyright 2012-2021 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.boot.web.embedded.undertow; import java.io.File; import java.net.InetAddress; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.LinkedHashSet; import java.util.List; import java.util.Set; import io.undertow.Handlers; import io.undertow.Undertow; import io.undertow.Undertow.Builder; import io.undertow.UndertowOptions; import org.springframework.boot.web.server.AbstractConfigurableWebServerFactory; import org.springframework.boot.web.server.Compression; import org.springframework.boot.web.server.Http2; import org.springframework.boot.web.server.Shutdown; import org.springframework.boot.web.server.Ssl; import org.springframework.util.Assert; import org.springframework.util.StringUtils; /** * Delegate class used by {@link UndertowServletWebServerFactory} and * {@link UndertowReactiveWebServerFactory}. * * @author Phillip Webb * @author Andy Wilkinson */ class UndertowWebServerFactoryDelegate { private Set<UndertowBuilderCustomizer> builderCustomizers = new LinkedHashSet<>(); private Integer bufferSize; private Integer ioThreads; private Integer workerThreads; private Boolean directBuffers; private File accessLogDirectory; private String accessLogPattern; private String accessLogPrefix; private String accessLogSuffix; private boolean accessLogEnabled = false; private boolean accessLogRotate = true; private boolean useForwardHeaders; void setBuilderCustomizers(Collection<? extends UndertowBuilderCustomizer> customizers) { Assert.notNull(customizers, "Customizers must not be null"); this.builderCustomizers = new LinkedHashSet<>(customizers); } void addBuilderCustomizers(UndertowBuilderCustomizer... customizers) { Assert.notNull(customizers, "Customizers must not be null"); this.builderCustomizers.addAll(Arrays.asList(customizers)); } Collection<UndertowBuilderCustomizer> getBuilderCustomizers() { return this.builderCustomizers; } void setBufferSize(Integer bufferSize) { this.bufferSize = bufferSize; } void setIoThreads(Integer ioThreads) { this.ioThreads = ioThreads; } void setWorkerThreads(Integer workerThreads) { this.workerThreads = workerThreads; } void setUseDirectBuffers(Boolean directBuffers) { this.directBuffers = directBuffers; } void setAccessLogDirectory(File accessLogDirectory) { this.accessLogDirectory = accessLogDirectory; } void setAccessLogPattern(String accessLogPattern) { this.accessLogPattern = accessLogPattern; } void setAccessLogPrefix(String accessLogPrefix) { this.accessLogPrefix = accessLogPrefix; } String getAccessLogPrefix() { return this.accessLogPrefix; } void setAccessLogSuffix(String accessLogSuffix) { this.accessLogSuffix = accessLogSuffix; } void setAccessLogEnabled(boolean accessLogEnabled) { this.accessLogEnabled = accessLogEnabled; } boolean isAccessLogEnabled() { return this.accessLogEnabled; } void setAccessLogRotate(boolean accessLogRotate) { this.accessLogRotate = accessLogRotate; } void setUseForwardHeaders(boolean useForwardHeaders) { this.useForwardHeaders = useForwardHeaders; } boolean isUseForwardHeaders() { return this.useForwardHeaders; } Builder createBuilder(AbstractConfigurableWebServerFactory factory) { Ssl ssl = factory.getSsl(); InetAddress address = factory.getAddress(); int port = factory.getPort(); Builder builder = Undertow.builder(); if (this.bufferSize != null) { builder.setBufferSize(this.bufferSize); } if (this.ioThreads != null) { builder.setIoThreads(this.ioThreads); } if (this.workerThreads != null) { builder.setWorkerThreads(this.workerThreads); } if (this.directBuffers != null) { builder.setDirectBuffers(this.directBuffers); } Http2 http2 = factory.getHttp2(); if (http2 != null) { builder.setServerOption(UndertowOptions.ENABLE_HTTP2, http2.isEnabled()); } if (ssl != null && ssl.isEnabled()) { new SslBuilderCustomizer(factory.getPort(), address, ssl, factory.getSslStoreProvider()).customize(builder); } else { builder.addHttpListener(port, (address != null) ? address.getHostAddress() : "0.0.0.0"); } builder.setServerOption(UndertowOptions.SHUTDOWN_TIMEOUT, 0); for (UndertowBuilderCustomizer customizer : this.builderCustomizers) { customizer.customize(builder); } return builder; } List<HttpHandlerFactory> createHttpHandlerFactories(AbstractConfigurableWebServerFactory webServerFactory, HttpHandlerFactory... initialHttpHandlerFactories) { List<HttpHandlerFactory> factories = createHttpHandlerFactories(webServerFactory.getCompression(), this.useForwardHeaders, webServerFactory.getServerHeader(), webServerFactory.getShutdown(), initialHttpHandlerFactories); if (isAccessLogEnabled()) { factories.add(new AccessLogHttpHandlerFactory(this.accessLogDirectory, this.accessLogPattern, this.accessLogPrefix, this.accessLogSuffix, this.accessLogRotate)); } return factories; } static List<HttpHandlerFactory> createHttpHandlerFactories(Compression compression, boolean useForwardHeaders, String serverHeader, Shutdown shutdown, HttpHandlerFactory... initialHttpHandlerFactories) { List<HttpHandlerFactory> factories = new ArrayList<>(Arrays.asList(initialHttpHandlerFactories)); if (compression != null && compression.getEnabled()) { factories.add(new CompressionHttpHandlerFactory(compression)); } if (useForwardHeaders) { factories.add(Handlers::proxyPeerAddress); } if (StringUtils.hasText(serverHeader)) { factories.add((next) -> Handlers.header(next, "Server", serverHeader)); } if (shutdown == Shutdown.GRACEFUL) { factories.add(Handlers::gracefulShutdown); } return factories; } }
/* * This file is part of Sponge, licensed under the MIT License (MIT). * * Copyright (c) SpongePowered <https://www.spongepowered.org> * Copyright (c) contributors * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package org.spongepowered.common.registry.provider; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; import com.google.common.base.CaseFormat; import net.minecraft.block.*; import net.minecraft.block.properties.IProperty; import org.apache.logging.log4j.LogManager; import org.spongepowered.common.registry.TypeProvider; import java.lang.reflect.Field; import java.lang.reflect.Modifier; import java.util.HashMap; import java.util.IdentityHashMap; import java.util.Locale; import java.util.Optional; public class BlockPropertyIdProvider implements TypeProvider<IProperty<?>, String> { private final IdentityHashMap<IProperty<?>, String> propertyIdMap = new IdentityHashMap<>(); private final HashMap<String, IProperty<?>> idPropertyMap = new HashMap<>(); public static BlockPropertyIdProvider getInstance() { return Holder.INSTANCE; } @Override public Optional<String> get(IProperty<?> key) { return Optional.ofNullable(this.propertyIdMap.get(checkNotNull(key, "Property cannot be null!"))); } @Override public Optional<IProperty<?>> getKey(String value) { return Optional.ofNullable(this.idPropertyMap.get(checkNotNull(value, "Id cannot be null!").toLowerCase(Locale.ENGLISH))); } private boolean isRegistered(IProperty<?> property) { return this.propertyIdMap.containsKey(property); } public static String getIdAndTryRegistration(IProperty<?> property, Block block, String blockId) { BlockPropertyIdProvider instance = getInstance(); checkNotNull(property, "Property is null! Cannot retrieve a registration for a null property!"); checkNotNull(block, "Block cannot be null!"); checkNotNull(blockId, "Block id cannot be null!"); checkArgument(!blockId.isEmpty(), "Block id cannot be empty!"); if (instance.isRegistered(property)) { return instance.propertyIdMap.get(property); } else { final String lowerCasedBlockId = CaseFormat.UPPER_CAMEL.to(CaseFormat.LOWER_UNDERSCORE, blockId); final String modId = lowerCasedBlockId.split(":")[0]; final String propertyName = property.getName(); final String lastAttemptId = lowerCasedBlockId + "_" + property.getName(); try { // Seriously, don't look past this try state. just continue on with your day... // I warned you... final String originalClass = CaseFormat.UPPER_CAMEL.to(CaseFormat.LOWER_UNDERSCORE, block.getClass().getSimpleName()); Class<?> blockClass = block.getClass(); while (true) { if (blockClass == Object.class) { final String propertyId = modId + ":" + originalClass + "_" + property.getName(); LogManager.getLogger("Sponge").warn("Could not find {} owning class, assigning fallback id: {}", property.getName(), propertyId); instance.register(property, propertyId); return propertyId; } // Had enough? for (Field field : blockClass.getDeclaredFields()) { field.setAccessible(true); final boolean isStatic = Modifier.isStatic(field.getModifiers()); final Object o = isStatic ? field.get(null) : field.get(block); if (property != o) { continue; } final String className = field.getDeclaringClass().getSimpleName().replace("Block", "").replace("block", ""); final String classNameId = CaseFormat.UPPER_CAMEL.to(CaseFormat.LOWER_UNDERSCORE, className); final String propertyClassName = isStatic ? classNameId : originalClass; final String combinedId = modId + ":" + propertyClassName + "_" + propertyName.toLowerCase(Locale.ENGLISH); if (instance.idPropertyMap.containsKey(combinedId)) { // in this case, we really do have to fall back on the full block id... if (instance.idPropertyMap.containsKey(lastAttemptId)) { // we really are screwed... throw new IllegalArgumentException("Sorry! Someone is trying to re-register a block with the same property instances of" + "block: " + blockId + " , with property: " + propertyName); } else { instance.register((IProperty<?>) o, lastAttemptId); return lastAttemptId; } } instance.register(((IProperty<?>) o), combinedId); return combinedId; } blockClass = blockClass.getSuperclass(); } } catch (Exception e) { LogManager.getLogger("Sponge").warn("An exception was thrown while trying to resolve the property " + property.getName() +"'s owning class, assigning " + "fallback id: " + lastAttemptId, e); instance.register(property, lastAttemptId); return lastAttemptId; } } } private void register(IProperty<?> property, String id) { checkArgument(!this.propertyIdMap.containsKey(property), "Property is already registered! Property: " + property.getName() + " is registered as : " + this.propertyIdMap.get(property)); this.propertyIdMap.put(property, id.toLowerCase(Locale.ENGLISH)); this.idPropertyMap.put(id.toLowerCase(Locale.ENGLISH), property); } BlockPropertyIdProvider() { register(BlockRotatedPillar.AXIS, "minecraft:pillar_axis"); register(BlockDirectional.FACING, "minecraft:directional_facing"); register(BlockLog.LOG_AXIS, "minecraft:log_axis"); register(BlockNewLog.VARIANT, "minecraft:new_log_variant"); register(BlockOldLog.VARIANT, "minecraft:log_variant"); register(BlockFarmland.MOISTURE, "minecraft:farmland_moisture"); register(BlockPistonBase.FACING, "minecraft:piston_facing"); register(BlockPistonBase.EXTENDED, "minecraft:piston_extended"); register(BlockVine.NORTH, "minecraft:vine_north"); register(BlockVine.EAST, "minecraft:vine_east"); register(BlockVine.SOUTH, "minecraft:vine_south"); register(BlockVine.WEST, "minecraft:vine_west"); register(BlockVine.UP, "minecraft:vine_up"); register(BlockRedSandstone.TYPE, "minecraft:red_sandstone_type"); register(BlockLiquid.LEVEL, "minecraft:liquid_level"); register(BlockReed.AGE, "minecraft:reed_age"); register(BlockMycelium.SNOWY, "minecraft:mycelium_snowy"); register(BlockColored.COLOR, "minecraft:dyed_color"); register(BlockTorch.FACING, "minecraft:torch_facing"); register(BlockDirt.SNOWY, "minecraft:dirt_snowy"); register(BlockDirt.VARIANT, "minecraft:dirt_variant"); register(BlockEndPortalFrame.FACING, "minecraft:end_portal_facing"); register(BlockEndPortalFrame.EYE, "minecraft:end_portal_eye"); register(BlockCarpet.COLOR, "minecraft:carpet_color"); register(BlockStone.VARIANT, "minecraft:stone_variant"); register(BlockHugeMushroom.VARIANT, "minecraft:huge_mushroom_variant"); register(BlockSnow.LAYERS, "minecraft:snow_layer"); register(BlockWall.UP, "minecraft:wall_up"); register(BlockWall.NORTH, "minecraft:wall_north"); register(BlockWall.EAST, "minecraft:wall_east"); register(BlockWall.SOUTH, "minecraft:wall_south"); register(BlockWall.WEST, "minecraft:wall_west"); register(BlockWall.VARIANT, "minecraft:wall_variant"); register(BlockStairs.FACING, "minecraft:stairs_facing"); register(BlockStairs.HALF, "minecraft:stairs_half"); register(BlockStairs.SHAPE, "minecraft:stairs_shape"); register(BlockButton.FACING, "minecraft:button_facing"); register(BlockButton.POWERED, "minecraft:button_powered"); register(BlockCactus.AGE, "minecraft:cactus_age"); register(BlockCrops.AGE, "minecraft:crops_age"); register(BlockNetherWart.AGE, "minecraft:nether_wart_age"); register(BlockDoublePlant.VARIANT, "minecraft:double_plant_variant"); register(BlockDoublePlant.HALF, "minecraft:double_plant_half"); register(BlockStem.AGE, "minecraft:stem_age"); register(BlockStem.FACING, "minecraft:stem_facing"); register(BlockTallGrass.TYPE, "minecraft:tall_grass_type"); register(BlockSapling.TYPE, "minecraft:sapling_type"); register(BlockSapling.STAGE, "minecraft:sapling_stage"); register(BlockPrismarine.VARIANT, "minecraft:prismarine_variant"); register(BlockFence.NORTH, "minecraft:fence_north"); register(BlockFence.EAST, "minecraft:fence_east"); register(BlockFence.SOUTH, "minecraft:fence_south"); register(BlockFence.WEST, "minecraft:fence_west"); register(BlockSilverfish.VARIANT, "minecraft:disguised_variant"); register(BlockPane.NORTH, "minecraft:pane_north"); register(BlockPane.EAST, "minecraft:pane_east"); register(BlockPane.SOUTH, "minecraft:pane_south"); register(BlockPane.WEST, "minecraft:pane_west"); register(BlockStainedGlassPane.COLOR, "minecraft:stained_dyed_color"); register(BlockQuartz.VARIANT, "minecraft:quartz_variant"); register(BlockPistonExtension.FACING, "minecraft:piston_extension_facing"); register(BlockPistonExtension.TYPE, "minecraft:piston_extension_type"); register(BlockPistonExtension.SHORT, "minecraft:piston_extension_short"); register(BlockSandStone.TYPE, "minecraft:sand_stone_type"); register(BlockPlanks.VARIANT, "minecraft:plank_variant"); register(BlockPortal.AXIS, "minecraft:portal_axis"); register(BlockStainedGlass.COLOR, "minecraft:stained_glass_color"); register(BlockRail.SHAPE, "minecraft:rail_shape"); register(BlockRailPowered.POWERED, "minecraft:powered_rail_powered"); register(BlockRailPowered.SHAPE, "minecraft:powered_rail_shape"); register(BlockRailDetector.POWERED, "minecraft:detector_rail_powered"); register(BlockRailDetector.SHAPE, "minecraft:detector_rail_shape"); register(BlockLeaves.DECAYABLE, "minecraft:leaves_decay"); register(BlockLeaves.CHECK_DECAY, "minecraft:leaves_check_decay"); register(BlockOldLeaf.VARIANT, "minecraft:old_leaves_variant"); register(BlockNewLeaf.VARIANT, "minecraft:new_leaves_variant"); register(BlockGrass.SNOWY, "minecraft:grass_snowy"); register(BlockCauldron.LEVEL, "minecraft:cauldron_level"); register(BlockFurnace.FACING, "minecraft:furnace_facing"); register(BlockBanner.FACING, "minecraft:banner_facing"); register(BlockBanner.ROTATION, "minecraft:banner_rotation"); register(BlockCommandBlock.TRIGGERED, "minecraft:command_block_triggered"); register(BlockChest.FACING, "minecraft:chest_facing"); register(BlockSkull.FACING, "minecraft:skull_facing"); register(BlockSkull.NODROP, "minecraft:skull_no_drop"); register(BlockStandingSign.ROTATION, "minecraft:standing_sign_rotation"); register(BlockWallSign.FACING, "minecraft:wall_sign_facing"); register(BlockBrewingStand.HAS_BOTTLE[0], "minecraft:brewing_stand_1_has_bottle"); register(BlockBrewingStand.HAS_BOTTLE[1], "minecraft:brewing_stand_2_has_bottle"); register(BlockBrewingStand.HAS_BOTTLE[2], "minecraft:brewing_stand_3_has_bottle"); register(BlockHopper.ENABLED, "minecraft:hopper_enabled"); register(BlockHopper.FACING, "minecraft:hopper_facing"); register(BlockEnderChest.FACING, "minecraft:ender_chest_facing"); register(BlockFlowerPot.LEGACY_DATA, "minecraft:flower_pot_legacy"); register(BlockFlowerPot.CONTENTS, "minecraft:flower_pot_contents"); register(BlockDaylightDetector.POWER, "minecraft:daylight_detector_power"); register(BlockDispenser.FACING, "minecraft:dispenser_facing"); register(BlockDispenser.TRIGGERED, "minecraft:dispenser_triggered"); register(BlockJukebox.HAS_RECORD, "minecraft:jukebox_has_record"); register(BlockSand.VARIANT, "minecraft:sand_variant"); register(BlockAnvil.DAMAGE, "minecraft:anvil_damage"); register(BlockAnvil.FACING, "minecraft:anvil_facing"); register(BlockCake.BITES, "minecraft:cake_bites"); register(BlockFire.AGE, "minecraft:fire_age"); register(BlockFire.ALT, "minecraft:fire_alt"); register(BlockFire.NORTH, "minecraft:fire_north"); register(BlockFire.EAST, "minecraft:fire_east"); register(BlockFire.SOUTH, "minecraft:fire_south"); register(BlockFire.WEST, "minecraft:fire_west"); register(BlockFire.UPPER, "minecraft:fire_upper"); register(BlockFire.FLIP, "minecraft:fire_flip"); register(BlockSlab.HALF, "minecraft:slab_half"); register(BlockStoneSlabNew.SEAMLESS, "minecraft:stone_slab_new_seamless"); register(BlockStoneSlabNew.VARIANT, "minecraft:stone_slab_new_variant"); register(BlockStoneSlab.SEAMLESS, "minecraft:stone_slab_seamless"); register(BlockStoneSlab.VARIANT, "minecraft:stone_slab_variant"); register(BlockWoodSlab.VARIANT, "minecraft:wood_slab_variant"); register(BlockLadder.FACING, "minecraft:ladder_facing"); register(BlockSponge.WET, "minecraft:sponge_wet"); register(BlockTripWireHook.FACING, "minecraft:trip_wire_hook_facing"); register(BlockTripWireHook.ATTACHED, "minecraft:trip_wire_hook_attached"); register(BlockTripWireHook.POWERED, "minecraft:trip_wire_hook_powered"); register(BlockTripWireHook.SUSPENDED, "minecraft:trip_wire_hook_suspended"); register(BlockDoor.FACING, "minecraft:door_facing"); register(BlockDoor.OPEN, "minecraft:door_open"); register(BlockDoor.HINGE, "minecraft:door_hinge"); register(BlockDoor.POWERED, "minecraft:door_powered"); register(BlockDoor.HALF, "minecraft:door_half"); register(BlockStoneBrick.VARIANT, "minecraft:stone_brick_variant"); register(BlockLever.FACING, "minecraft:lever_variant"); register(BlockLever.POWERED, "minecraft:lever_powered"); register(BlockTNT.EXPLODE, "minecraft:tnt_explode"); register(BlockBed.PART, "minecraft:bed_part"); register(BlockBed.OCCUPIED, "minecraft:bed_occupied"); register(BlockRedstoneComparator.MODE, "minecraft:comparator_mode"); register(BlockRedstoneComparator.POWERED, "minecraft:comparator_powered"); register(BlockCocoa.AGE, "minecraft:cocoa_facing"); register(BlockFenceGate.IN_WALL, "minecraft:fence_gate_in_wall"); register(BlockFenceGate.OPEN, "minecraft:fence_gate_open"); register(BlockFenceGate.POWERED, "minecraft:fence_gate_powered"); register(BlockRedstoneWire.NORTH, "minecraft:redstone_north"); register(BlockRedstoneWire.EAST, "minecraft:redstone_east"); register(BlockRedstoneWire.SOUTH, "minecraft:redstone_south"); register(BlockRedstoneWire.WEST, "minecraft:redstone_west"); register(BlockRedstoneWire.POWER, "minecraft:redstone_power"); register(BlockTripWire.POWERED, "minecraft:trip_wire_powered"); register(BlockTripWire.SUSPENDED, "minecraft:trip_wire_suspended"); register(BlockTripWire.ATTACHED, "minecraft:trip_wire_attached"); register(BlockTripWire.DISARMED, "minecraft:trip_wire_disarmed"); register(BlockTripWire.NORTH, "minecraft:trip_wire_north"); register(BlockTripWire.EAST, "minecraft:trip_wire_east"); register(BlockTripWire.SOUTH, "minecraft:trip_wire_south"); register(BlockTripWire.WEST, "minecraft:trip_wire_west"); register(BlockPressurePlateWeighted.POWER, "minecraft:weighted_pressure_plate_power"); register(BlockPressurePlate.POWERED, "minecraft:pressure_plate_power"); register(BlockTrapDoor.FACING, "minecraft:trap_door_facing"); register(BlockTrapDoor.OPEN, "minecraft:trap_door_open"); register(BlockTrapDoor.HALF, "minecraft:trap_door_half"); register(BlockRedstoneRepeater.DELAY, "minecraft:redstone_repeater_delay"); register(BlockRedstoneRepeater.LOCKED, "minecraft:redstone_repeater_locked"); } private static final class Holder { static final BlockPropertyIdProvider INSTANCE = new BlockPropertyIdProvider(); } }
package org.apache.lucene.search.suggest; /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Random; import java.util.Set; import org.apache.lucene.analysis.MockAnalyzer; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; import org.apache.lucene.document.NumericDocValuesField; import org.apache.lucene.document.StoredField; import org.apache.lucene.document.TextField; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.RandomIndexWriter; import org.apache.lucene.index.Term; import org.apache.lucene.queries.function.ValueSource; import org.apache.lucene.queries.function.valuesource.DoubleConstValueSource; import org.apache.lucene.queries.function.valuesource.LongFieldSource; import org.apache.lucene.queries.function.valuesource.SumFloatFunction; import org.apache.lucene.search.spell.Dictionary; import org.apache.lucene.store.Directory; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.LuceneTestCase.SuppressCodecs; import org.apache.lucene.util.LuceneTestCase; import org.junit.Test; @SuppressCodecs("Lucene3x") public class DocumentValueSourceDictionaryTest extends LuceneTestCase { static final String FIELD_NAME = "f1"; static final String WEIGHT_FIELD_NAME_1 = "w1"; static final String WEIGHT_FIELD_NAME_2 = "w2"; static final String WEIGHT_FIELD_NAME_3 = "w3"; static final String PAYLOAD_FIELD_NAME = "p1"; static final String CONTEXTS_FIELD_NAME = "c1"; private Map<String, Document> generateIndexDocuments(int ndocs) { Map<String, Document> docs = new HashMap<>(); for(int i = 0; i < ndocs ; i++) { Field field = new TextField(FIELD_NAME, "field_" + i, Field.Store.YES); Field payload = new StoredField(PAYLOAD_FIELD_NAME, new BytesRef("payload_" + i)); Field weight1 = new NumericDocValuesField(WEIGHT_FIELD_NAME_1, 10 + i); Field weight2 = new NumericDocValuesField(WEIGHT_FIELD_NAME_2, 20 + i); Field weight3 = new NumericDocValuesField(WEIGHT_FIELD_NAME_3, 30 + i); Field contexts = new StoredField(CONTEXTS_FIELD_NAME, new BytesRef("ctx_" + i + "_0")); Document doc = new Document(); doc.add(field); doc.add(payload); doc.add(weight1); doc.add(weight2); doc.add(weight3); doc.add(contexts); for(int j = 1; j < atLeast(3); j++) { contexts.setBytesValue(new BytesRef("ctx_" + i + "_" + j)); doc.add(contexts); } docs.put(field.stringValue(), doc); } return docs; } @Test public void testEmptyReader() throws IOException { Directory dir = newDirectory(); IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random())); iwc.setMergePolicy(newLogMergePolicy()); // Make sure the index is created? RandomIndexWriter writer = new RandomIndexWriter(random(), dir, iwc); writer.commit(); writer.close(); IndexReader ir = DirectoryReader.open(dir); Dictionary dictionary = new DocumentValueSourceDictionary(ir, FIELD_NAME, new DoubleConstValueSource(10), PAYLOAD_FIELD_NAME); InputIterator inputIterator = dictionary.getEntryIterator(); assertNull(inputIterator.next()); assertEquals(inputIterator.weight(), 0); assertNull(inputIterator.payload()); ir.close(); dir.close(); } @Test public void testBasic() throws IOException { Directory dir = newDirectory(); IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random())); iwc.setMergePolicy(newLogMergePolicy()); RandomIndexWriter writer = new RandomIndexWriter(random(), dir, iwc); Map<String, Document> docs = generateIndexDocuments(atLeast(100)); for(Document doc: docs.values()) { writer.addDocument(doc); } writer.commit(); writer.close(); IndexReader ir = DirectoryReader.open(dir); ValueSource[] toAdd = new ValueSource[] {new LongFieldSource(WEIGHT_FIELD_NAME_1), new LongFieldSource(WEIGHT_FIELD_NAME_2), new LongFieldSource(WEIGHT_FIELD_NAME_3)}; Dictionary dictionary = new DocumentValueSourceDictionary(ir, FIELD_NAME, new SumFloatFunction(toAdd), PAYLOAD_FIELD_NAME); InputIterator inputIterator = dictionary.getEntryIterator(); BytesRef f; while((f = inputIterator.next())!=null) { Document doc = docs.remove(f.utf8ToString()); long w1 = doc.getField(WEIGHT_FIELD_NAME_1).numericValue().longValue(); long w2 = doc.getField(WEIGHT_FIELD_NAME_2).numericValue().longValue(); long w3 = doc.getField(WEIGHT_FIELD_NAME_3).numericValue().longValue(); assertTrue(f.equals(new BytesRef(doc.get(FIELD_NAME)))); assertEquals(inputIterator.weight(), (w1 + w2 + w3)); assertTrue(inputIterator.payload().equals(doc.getField(PAYLOAD_FIELD_NAME).binaryValue())); } assertTrue(docs.isEmpty()); ir.close(); dir.close(); } @Test public void testWithContext() throws IOException { Directory dir = newDirectory(); IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random())); iwc.setMergePolicy(newLogMergePolicy()); RandomIndexWriter writer = new RandomIndexWriter(random(), dir, iwc); Map<String, Document> docs = generateIndexDocuments(atLeast(100)); for(Document doc: docs.values()) { writer.addDocument(doc); } writer.commit(); writer.close(); IndexReader ir = DirectoryReader.open(dir); ValueSource[] toAdd = new ValueSource[] {new LongFieldSource(WEIGHT_FIELD_NAME_1), new LongFieldSource(WEIGHT_FIELD_NAME_2), new LongFieldSource(WEIGHT_FIELD_NAME_3)}; Dictionary dictionary = new DocumentValueSourceDictionary(ir, FIELD_NAME, new SumFloatFunction(toAdd), PAYLOAD_FIELD_NAME, CONTEXTS_FIELD_NAME); InputIterator inputIterator = dictionary.getEntryIterator(); BytesRef f; while((f = inputIterator.next())!=null) { Document doc = docs.remove(f.utf8ToString()); long w1 = doc.getField(WEIGHT_FIELD_NAME_1).numericValue().longValue(); long w2 = doc.getField(WEIGHT_FIELD_NAME_2).numericValue().longValue(); long w3 = doc.getField(WEIGHT_FIELD_NAME_3).numericValue().longValue(); assertTrue(f.equals(new BytesRef(doc.get(FIELD_NAME)))); assertEquals(inputIterator.weight(), (w1 + w2 + w3)); assertTrue(inputIterator.payload().equals(doc.getField(PAYLOAD_FIELD_NAME).binaryValue())); Set<BytesRef> originalCtxs = new HashSet<>(); for (IndexableField ctxf: doc.getFields(CONTEXTS_FIELD_NAME)) { originalCtxs.add(ctxf.binaryValue()); } assertEquals(originalCtxs, inputIterator.contexts()); } assertTrue(docs.isEmpty()); ir.close(); dir.close(); } @Test public void testWithoutPayload() throws IOException { Directory dir = newDirectory(); IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random())); iwc.setMergePolicy(newLogMergePolicy()); RandomIndexWriter writer = new RandomIndexWriter(random(), dir, iwc); Map<String, Document> docs = generateIndexDocuments(atLeast(100)); for(Document doc: docs.values()) { writer.addDocument(doc); } writer.commit(); writer.close(); IndexReader ir = DirectoryReader.open(dir); ValueSource[] toAdd = new ValueSource[] {new LongFieldSource(WEIGHT_FIELD_NAME_1), new LongFieldSource(WEIGHT_FIELD_NAME_2), new LongFieldSource(WEIGHT_FIELD_NAME_3)}; Dictionary dictionary = new DocumentValueSourceDictionary(ir, FIELD_NAME, new SumFloatFunction(toAdd)); InputIterator inputIterator = dictionary.getEntryIterator(); BytesRef f; while((f = inputIterator.next())!=null) { Document doc = docs.remove(f.utf8ToString()); long w1 = doc.getField(WEIGHT_FIELD_NAME_1).numericValue().longValue(); long w2 = doc.getField(WEIGHT_FIELD_NAME_2).numericValue().longValue(); long w3 = doc.getField(WEIGHT_FIELD_NAME_3).numericValue().longValue(); assertTrue(f.equals(new BytesRef(doc.get(FIELD_NAME)))); assertEquals(inputIterator.weight(), (w1 + w2 + w3)); assertEquals(inputIterator.payload(), null); } assertTrue(docs.isEmpty()); ir.close(); dir.close(); } @Test public void testWithDeletions() throws IOException { Directory dir = newDirectory(); IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random())); iwc.setMergePolicy(newLogMergePolicy()); RandomIndexWriter writer = new RandomIndexWriter(random(), dir, iwc); Map<String, Document> docs = generateIndexDocuments(atLeast(100)); Random rand = random(); List<String> termsToDel = new ArrayList<>(); for(Document doc : docs.values()) { if(rand.nextBoolean() && termsToDel.size() < docs.size()-1) { termsToDel.add(doc.get(FIELD_NAME)); } writer.addDocument(doc); } writer.commit(); Term[] delTerms = new Term[termsToDel.size()]; for(int i=0; i < termsToDel.size() ; i++) { delTerms[i] = new Term(FIELD_NAME, termsToDel.get(i)); } for(Term delTerm: delTerms) { writer.deleteDocuments(delTerm); } writer.commit(); writer.close(); for(String termToDel: termsToDel) { assertTrue(null!=docs.remove(termToDel)); } IndexReader ir = DirectoryReader.open(dir); assertTrue("NumDocs should be > 0 but was " + ir.numDocs(), ir.numDocs() > 0); assertEquals(ir.numDocs(), docs.size()); ValueSource[] toAdd = new ValueSource[] {new LongFieldSource(WEIGHT_FIELD_NAME_1), new LongFieldSource(WEIGHT_FIELD_NAME_2)}; Dictionary dictionary = new DocumentValueSourceDictionary(ir, FIELD_NAME, new SumFloatFunction(toAdd), PAYLOAD_FIELD_NAME); InputIterator inputIterator = dictionary.getEntryIterator(); BytesRef f; while((f = inputIterator.next())!=null) { Document doc = docs.remove(f.utf8ToString()); long w1 = doc.getField(WEIGHT_FIELD_NAME_1).numericValue().longValue(); long w2 = doc.getField(WEIGHT_FIELD_NAME_2).numericValue().longValue(); assertTrue(f.equals(new BytesRef(doc.get(FIELD_NAME)))); assertEquals(inputIterator.weight(), w2+w1); assertTrue(inputIterator.payload().equals(doc.getField(PAYLOAD_FIELD_NAME).binaryValue())); } assertTrue(docs.isEmpty()); ir.close(); dir.close(); } @Test public void testWithValueSource() throws IOException { Directory dir = newDirectory(); IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random())); iwc.setMergePolicy(newLogMergePolicy()); RandomIndexWriter writer = new RandomIndexWriter(random(), dir, iwc); Map<String, Document> docs = generateIndexDocuments(atLeast(100)); for(Document doc: docs.values()) { writer.addDocument(doc); } writer.commit(); writer.close(); IndexReader ir = DirectoryReader.open(dir); Dictionary dictionary = new DocumentValueSourceDictionary(ir, FIELD_NAME, new DoubleConstValueSource(10), PAYLOAD_FIELD_NAME); InputIterator inputIterator = dictionary.getEntryIterator(); BytesRef f; while((f = inputIterator.next())!=null) { Document doc = docs.remove(f.utf8ToString()); assertTrue(f.equals(new BytesRef(doc.get(FIELD_NAME)))); assertEquals(inputIterator.weight(), 10); assertTrue(inputIterator.payload().equals(doc.getField(PAYLOAD_FIELD_NAME).binaryValue())); } assertTrue(docs.isEmpty()); ir.close(); dir.close(); } }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. package com.azure.tools.checkstyle.checks; import com.puppycrawl.tools.checkstyle.api.AbstractCheck; import com.puppycrawl.tools.checkstyle.api.DetailAST; import com.puppycrawl.tools.checkstyle.api.FullIdent; import com.puppycrawl.tools.checkstyle.api.TokenTypes; import com.puppycrawl.tools.checkstyle.checks.naming.AccessModifierOption; import com.puppycrawl.tools.checkstyle.utils.CheckUtil; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Set; /** * No external dependency exposed in public API */ public class ExternalDependencyExposedCheck extends AbstractCheck { private static final String EXTERNAL_DEPENDENCY_ERROR = "Class ''%s'', is a class from external dependency. You should not use it as a %s type."; private static final Set<String> VALID_DEPENDENCY_SET = Collections.unmodifiableSet(new HashSet<>(Arrays.asList( "java", "com.azure", "reactor", "org.reactivestreams" ))); private final Map<String, String> simpleClassNameToQualifiedNameMap = new HashMap<>(); private boolean isPublicClass; @Override public void beginTree(DetailAST rootAST) { simpleClassNameToQualifiedNameMap.clear(); } @Override public int[] getDefaultTokens() { return getRequiredTokens(); } @Override public int[] getAcceptableTokens() { return getRequiredTokens(); } @Override public int[] getRequiredTokens() { return new int[] { TokenTypes.IMPORT, TokenTypes.CLASS_DEF, TokenTypes.METHOD_DEF }; } @Override public void visitToken(DetailAST token) { switch (token.getType()) { case TokenTypes.IMPORT: // Add all imported classes into a map, key is the name of class and value is the full package // path of class. final String importClassPath = FullIdent.createFullIdentBelow(token).getText(); final String className = importClassPath.substring(importClassPath.lastIndexOf(".") + 1); simpleClassNameToQualifiedNameMap.put(className, importClassPath); break; case TokenTypes.CLASS_DEF: // CLASS_DEF always has MODIFIERS final AccessModifierOption accessModifier = CheckUtil.getAccessModifierFromModifiersToken(token); isPublicClass = accessModifier.equals(AccessModifierOption.PUBLIC) || accessModifier.equals(AccessModifierOption.PROTECTED); break; case TokenTypes.METHOD_DEF: if (!isPublicClass) { return; } checkNoExternalDependencyExposed(token); break; default: // Checkstyle complains if there's no default block in switch break; } } /** * Checks for external dependency, log the error if it is an invalid external dependency. * * @param methodDefToken METHOD_DEF AST node */ private void checkNoExternalDependencyExposed(DetailAST methodDefToken) { // Getting the modifier of the method to determine if it is 'public' or 'protected'. // Ignore the check if it is neither of 'public' nor 'protected', final AccessModifierOption accessModifier = CheckUtil.getAccessModifierFromModifiersToken(methodDefToken); if (!accessModifier.equals(AccessModifierOption.PUBLIC) && !accessModifier.equals(AccessModifierOption.PROTECTED)) { return; } // Checks for the return type of method final DetailAST typeToken = methodDefToken.findFirstToken(TokenTypes.TYPE); if (typeToken != null) { getInvalidReturnTypes(typeToken).forEach( (token, returnTypeName) -> log( token, String.format(EXTERNAL_DEPENDENCY_ERROR, returnTypeName, "return"))); } // Checks for the parameters of the method final DetailAST parametersToken = methodDefToken.findFirstToken(TokenTypes.PARAMETERS); if (parametersToken != null) { getInvalidParameterTypes(parametersToken).forEach( (token, parameterTypeName) -> log( token, String.format(EXTERNAL_DEPENDENCY_ERROR, parameterTypeName, "method argument"))); } } /** * Get invalid return types from a given TYPE node. * * @param typeToken TYPE AST node * @return a map that maps the invalid TYPE node and the type name. */ private Map<DetailAST, String> getInvalidReturnTypes(DetailAST typeToken) { final Map<DetailAST, String> invalidReturnTypeMap = new HashMap<>(); // Add all invalid external return types to the map final DetailAST identToken = typeToken.findFirstToken(TokenTypes.IDENT); if (identToken == null) { return invalidReturnTypeMap; } final String typeName = identToken.getText(); if (!isValidClassDependency(typeName)) { invalidReturnTypeMap.put(typeToken, typeName); } // TYPE_ARGUMENTS, add all invalid external types to the map final DetailAST typeArgumentsToken = typeToken.findFirstToken(TokenTypes.TYPE_ARGUMENTS); if (typeArgumentsToken != null) { getInvalidParameterType(typeArgumentsToken, invalidReturnTypeMap); } return invalidReturnTypeMap; } /** * Get invalid parameter types from a given PARAMETERS node. * * @param parametersTypeToken PARAMETERS AST node * @return a map that maps all the invalid TYPE_ARGUMENT node and the type name */ private Map<DetailAST, String> getInvalidParameterTypes(DetailAST parametersTypeToken) { final Map<DetailAST, String> invalidParameterTypesMap = new HashMap<>(); for (DetailAST ast = parametersTypeToken.getFirstChild(); ast != null; ast = ast.getNextSibling()) { if (ast.getType() == TokenTypes.PARAMETER_DEF) { getInvalidParameterType(ast.findFirstToken(TokenTypes.TYPE), invalidParameterTypesMap); } } return invalidParameterTypesMap; } /** * Get all invalid AST nodes from a given token. DFS tree traversal used to find all invalid nodes. * * @param token TYPE_ARGUMENT, TYPE_ARGUMENTS or TYPE AST node * @return a map that maps all the invalid node and the type name */ private Map<DetailAST, String> getInvalidParameterType(DetailAST token, Map<DetailAST, String> invalidTypesMap) { if (token == null) { return invalidTypesMap; } for (DetailAST ast = token.getFirstChild(); ast != null; ast = ast.getNextSibling()) { final int tokenType = ast.getType(); if (tokenType == TokenTypes.IDENT) { final String identName = ast.getText(); if (!isValidClassDependency(identName)) { invalidTypesMap.put(ast, identName); } } else if (tokenType == TokenTypes.TYPE_ARGUMENT || tokenType == TokenTypes.TYPE_ARGUMENTS) { getInvalidParameterType(ast, invalidTypesMap); } } return invalidTypesMap; } /** * A helper function that checks for whether a class is from a valid internal dependency or is a suppression class * * @param typeName the type name of class * @return true if the class is a suppression class, otherwise, return false. */ private boolean isValidClassDependency(String typeName) { // If the qualified class name does not exist in the map, // it implies the type is a primitive Java type (ie. int, long, etc). if (!simpleClassNameToQualifiedNameMap.containsKey(typeName)) { return true; } final String qualifiedName = simpleClassNameToQualifiedNameMap.get(typeName); return VALID_DEPENDENCY_SET.stream() .anyMatch(validPackageName -> qualifiedName.startsWith(validPackageName)); } }
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/ads/googleads/v8/services/conversion_upload_service.proto package com.google.ads.googleads.v8.services; /** * <pre> * Identifying information for a successfully processed CallConversionUpload. * </pre> * * Protobuf type {@code google.ads.googleads.v8.services.CallConversionResult} */ public final class CallConversionResult extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.ads.googleads.v8.services.CallConversionResult) CallConversionResultOrBuilder { private static final long serialVersionUID = 0L; // Use CallConversionResult.newBuilder() to construct. private CallConversionResult(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private CallConversionResult() { callerId_ = ""; callStartDateTime_ = ""; conversionAction_ = ""; conversionDateTime_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new CallConversionResult(); } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private CallConversionResult( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 42: { java.lang.String s = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; callerId_ = s; break; } case 50: { java.lang.String s = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; callStartDateTime_ = s; break; } case 58: { java.lang.String s = input.readStringRequireUtf8(); bitField0_ |= 0x00000004; conversionAction_ = s; break; } case 66: { java.lang.String s = input.readStringRequireUtf8(); bitField0_ |= 0x00000008; conversionDateTime_ = s; break; } default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v8.services.ConversionUploadServiceProto.internal_static_google_ads_googleads_v8_services_CallConversionResult_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v8.services.ConversionUploadServiceProto.internal_static_google_ads_googleads_v8_services_CallConversionResult_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v8.services.CallConversionResult.class, com.google.ads.googleads.v8.services.CallConversionResult.Builder.class); } private int bitField0_; public static final int CALLER_ID_FIELD_NUMBER = 5; private volatile java.lang.Object callerId_; /** * <pre> * The caller id from which this call was placed. Caller id is expected to be * in E.164 format with preceding '+' sign. * </pre> * * <code>optional string caller_id = 5;</code> * @return Whether the callerId field is set. */ @java.lang.Override public boolean hasCallerId() { return ((bitField0_ & 0x00000001) != 0); } /** * <pre> * The caller id from which this call was placed. Caller id is expected to be * in E.164 format with preceding '+' sign. * </pre> * * <code>optional string caller_id = 5;</code> * @return The callerId. */ @java.lang.Override public java.lang.String getCallerId() { java.lang.Object ref = callerId_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); callerId_ = s; return s; } } /** * <pre> * The caller id from which this call was placed. Caller id is expected to be * in E.164 format with preceding '+' sign. * </pre> * * <code>optional string caller_id = 5;</code> * @return The bytes for callerId. */ @java.lang.Override public com.google.protobuf.ByteString getCallerIdBytes() { java.lang.Object ref = callerId_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); callerId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int CALL_START_DATE_TIME_FIELD_NUMBER = 6; private volatile java.lang.Object callStartDateTime_; /** * <pre> * The date time at which the call occurred. The format is * "yyyy-mm-dd hh:mm:ss+|-hh:mm", e.g. "2019-01-01 12:32:45-08:00". * </pre> * * <code>optional string call_start_date_time = 6;</code> * @return Whether the callStartDateTime field is set. */ @java.lang.Override public boolean hasCallStartDateTime() { return ((bitField0_ & 0x00000002) != 0); } /** * <pre> * The date time at which the call occurred. The format is * "yyyy-mm-dd hh:mm:ss+|-hh:mm", e.g. "2019-01-01 12:32:45-08:00". * </pre> * * <code>optional string call_start_date_time = 6;</code> * @return The callStartDateTime. */ @java.lang.Override public java.lang.String getCallStartDateTime() { java.lang.Object ref = callStartDateTime_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); callStartDateTime_ = s; return s; } } /** * <pre> * The date time at which the call occurred. The format is * "yyyy-mm-dd hh:mm:ss+|-hh:mm", e.g. "2019-01-01 12:32:45-08:00". * </pre> * * <code>optional string call_start_date_time = 6;</code> * @return The bytes for callStartDateTime. */ @java.lang.Override public com.google.protobuf.ByteString getCallStartDateTimeBytes() { java.lang.Object ref = callStartDateTime_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); callStartDateTime_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int CONVERSION_ACTION_FIELD_NUMBER = 7; private volatile java.lang.Object conversionAction_; /** * <pre> * Resource name of the conversion action associated with this conversion. * </pre> * * <code>optional string conversion_action = 7;</code> * @return Whether the conversionAction field is set. */ @java.lang.Override public boolean hasConversionAction() { return ((bitField0_ & 0x00000004) != 0); } /** * <pre> * Resource name of the conversion action associated with this conversion. * </pre> * * <code>optional string conversion_action = 7;</code> * @return The conversionAction. */ @java.lang.Override public java.lang.String getConversionAction() { java.lang.Object ref = conversionAction_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); conversionAction_ = s; return s; } } /** * <pre> * Resource name of the conversion action associated with this conversion. * </pre> * * <code>optional string conversion_action = 7;</code> * @return The bytes for conversionAction. */ @java.lang.Override public com.google.protobuf.ByteString getConversionActionBytes() { java.lang.Object ref = conversionAction_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); conversionAction_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int CONVERSION_DATE_TIME_FIELD_NUMBER = 8; private volatile java.lang.Object conversionDateTime_; /** * <pre> * The date time at which the conversion occurred. The format is * "yyyy-mm-dd hh:mm:ss+|-hh:mm", e.g. "2019-01-01 12:32:45-08:00". * </pre> * * <code>optional string conversion_date_time = 8;</code> * @return Whether the conversionDateTime field is set. */ @java.lang.Override public boolean hasConversionDateTime() { return ((bitField0_ & 0x00000008) != 0); } /** * <pre> * The date time at which the conversion occurred. The format is * "yyyy-mm-dd hh:mm:ss+|-hh:mm", e.g. "2019-01-01 12:32:45-08:00". * </pre> * * <code>optional string conversion_date_time = 8;</code> * @return The conversionDateTime. */ @java.lang.Override public java.lang.String getConversionDateTime() { java.lang.Object ref = conversionDateTime_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); conversionDateTime_ = s; return s; } } /** * <pre> * The date time at which the conversion occurred. The format is * "yyyy-mm-dd hh:mm:ss+|-hh:mm", e.g. "2019-01-01 12:32:45-08:00". * </pre> * * <code>optional string conversion_date_time = 8;</code> * @return The bytes for conversionDateTime. */ @java.lang.Override public com.google.protobuf.ByteString getConversionDateTimeBytes() { java.lang.Object ref = conversionDateTime_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); conversionDateTime_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 5, callerId_); } if (((bitField0_ & 0x00000002) != 0)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 6, callStartDateTime_); } if (((bitField0_ & 0x00000004) != 0)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 7, conversionAction_); } if (((bitField0_ & 0x00000008) != 0)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 8, conversionDateTime_); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(5, callerId_); } if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(6, callStartDateTime_); } if (((bitField0_ & 0x00000004) != 0)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(7, conversionAction_); } if (((bitField0_ & 0x00000008) != 0)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(8, conversionDateTime_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.ads.googleads.v8.services.CallConversionResult)) { return super.equals(obj); } com.google.ads.googleads.v8.services.CallConversionResult other = (com.google.ads.googleads.v8.services.CallConversionResult) obj; if (hasCallerId() != other.hasCallerId()) return false; if (hasCallerId()) { if (!getCallerId() .equals(other.getCallerId())) return false; } if (hasCallStartDateTime() != other.hasCallStartDateTime()) return false; if (hasCallStartDateTime()) { if (!getCallStartDateTime() .equals(other.getCallStartDateTime())) return false; } if (hasConversionAction() != other.hasConversionAction()) return false; if (hasConversionAction()) { if (!getConversionAction() .equals(other.getConversionAction())) return false; } if (hasConversionDateTime() != other.hasConversionDateTime()) return false; if (hasConversionDateTime()) { if (!getConversionDateTime() .equals(other.getConversionDateTime())) return false; } if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasCallerId()) { hash = (37 * hash) + CALLER_ID_FIELD_NUMBER; hash = (53 * hash) + getCallerId().hashCode(); } if (hasCallStartDateTime()) { hash = (37 * hash) + CALL_START_DATE_TIME_FIELD_NUMBER; hash = (53 * hash) + getCallStartDateTime().hashCode(); } if (hasConversionAction()) { hash = (37 * hash) + CONVERSION_ACTION_FIELD_NUMBER; hash = (53 * hash) + getConversionAction().hashCode(); } if (hasConversionDateTime()) { hash = (37 * hash) + CONVERSION_DATE_TIME_FIELD_NUMBER; hash = (53 * hash) + getConversionDateTime().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static com.google.ads.googleads.v8.services.CallConversionResult parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v8.services.CallConversionResult parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v8.services.CallConversionResult parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v8.services.CallConversionResult parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v8.services.CallConversionResult parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v8.services.CallConversionResult parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v8.services.CallConversionResult parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v8.services.CallConversionResult parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v8.services.CallConversionResult parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.google.ads.googleads.v8.services.CallConversionResult parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v8.services.CallConversionResult parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v8.services.CallConversionResult parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.ads.googleads.v8.services.CallConversionResult prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * Identifying information for a successfully processed CallConversionUpload. * </pre> * * Protobuf type {@code google.ads.googleads.v8.services.CallConversionResult} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.ads.googleads.v8.services.CallConversionResult) com.google.ads.googleads.v8.services.CallConversionResultOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v8.services.ConversionUploadServiceProto.internal_static_google_ads_googleads_v8_services_CallConversionResult_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v8.services.ConversionUploadServiceProto.internal_static_google_ads_googleads_v8_services_CallConversionResult_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v8.services.CallConversionResult.class, com.google.ads.googleads.v8.services.CallConversionResult.Builder.class); } // Construct using com.google.ads.googleads.v8.services.CallConversionResult.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } @java.lang.Override public Builder clear() { super.clear(); callerId_ = ""; bitField0_ = (bitField0_ & ~0x00000001); callStartDateTime_ = ""; bitField0_ = (bitField0_ & ~0x00000002); conversionAction_ = ""; bitField0_ = (bitField0_ & ~0x00000004); conversionDateTime_ = ""; bitField0_ = (bitField0_ & ~0x00000008); return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.ads.googleads.v8.services.ConversionUploadServiceProto.internal_static_google_ads_googleads_v8_services_CallConversionResult_descriptor; } @java.lang.Override public com.google.ads.googleads.v8.services.CallConversionResult getDefaultInstanceForType() { return com.google.ads.googleads.v8.services.CallConversionResult.getDefaultInstance(); } @java.lang.Override public com.google.ads.googleads.v8.services.CallConversionResult build() { com.google.ads.googleads.v8.services.CallConversionResult result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.ads.googleads.v8.services.CallConversionResult buildPartial() { com.google.ads.googleads.v8.services.CallConversionResult result = new com.google.ads.googleads.v8.services.CallConversionResult(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { to_bitField0_ |= 0x00000001; } result.callerId_ = callerId_; if (((from_bitField0_ & 0x00000002) != 0)) { to_bitField0_ |= 0x00000002; } result.callStartDateTime_ = callStartDateTime_; if (((from_bitField0_ & 0x00000004) != 0)) { to_bitField0_ |= 0x00000004; } result.conversionAction_ = conversionAction_; if (((from_bitField0_ & 0x00000008) != 0)) { to_bitField0_ |= 0x00000008; } result.conversionDateTime_ = conversionDateTime_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.ads.googleads.v8.services.CallConversionResult) { return mergeFrom((com.google.ads.googleads.v8.services.CallConversionResult)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.ads.googleads.v8.services.CallConversionResult other) { if (other == com.google.ads.googleads.v8.services.CallConversionResult.getDefaultInstance()) return this; if (other.hasCallerId()) { bitField0_ |= 0x00000001; callerId_ = other.callerId_; onChanged(); } if (other.hasCallStartDateTime()) { bitField0_ |= 0x00000002; callStartDateTime_ = other.callStartDateTime_; onChanged(); } if (other.hasConversionAction()) { bitField0_ |= 0x00000004; conversionAction_ = other.conversionAction_; onChanged(); } if (other.hasConversionDateTime()) { bitField0_ |= 0x00000008; conversionDateTime_ = other.conversionDateTime_; onChanged(); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.ads.googleads.v8.services.CallConversionResult parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (com.google.ads.googleads.v8.services.CallConversionResult) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private java.lang.Object callerId_ = ""; /** * <pre> * The caller id from which this call was placed. Caller id is expected to be * in E.164 format with preceding '+' sign. * </pre> * * <code>optional string caller_id = 5;</code> * @return Whether the callerId field is set. */ public boolean hasCallerId() { return ((bitField0_ & 0x00000001) != 0); } /** * <pre> * The caller id from which this call was placed. Caller id is expected to be * in E.164 format with preceding '+' sign. * </pre> * * <code>optional string caller_id = 5;</code> * @return The callerId. */ public java.lang.String getCallerId() { java.lang.Object ref = callerId_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); callerId_ = s; return s; } else { return (java.lang.String) ref; } } /** * <pre> * The caller id from which this call was placed. Caller id is expected to be * in E.164 format with preceding '+' sign. * </pre> * * <code>optional string caller_id = 5;</code> * @return The bytes for callerId. */ public com.google.protobuf.ByteString getCallerIdBytes() { java.lang.Object ref = callerId_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); callerId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <pre> * The caller id from which this call was placed. Caller id is expected to be * in E.164 format with preceding '+' sign. * </pre> * * <code>optional string caller_id = 5;</code> * @param value The callerId to set. * @return This builder for chaining. */ public Builder setCallerId( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; callerId_ = value; onChanged(); return this; } /** * <pre> * The caller id from which this call was placed. Caller id is expected to be * in E.164 format with preceding '+' sign. * </pre> * * <code>optional string caller_id = 5;</code> * @return This builder for chaining. */ public Builder clearCallerId() { bitField0_ = (bitField0_ & ~0x00000001); callerId_ = getDefaultInstance().getCallerId(); onChanged(); return this; } /** * <pre> * The caller id from which this call was placed. Caller id is expected to be * in E.164 format with preceding '+' sign. * </pre> * * <code>optional string caller_id = 5;</code> * @param value The bytes for callerId to set. * @return This builder for chaining. */ public Builder setCallerIdBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); bitField0_ |= 0x00000001; callerId_ = value; onChanged(); return this; } private java.lang.Object callStartDateTime_ = ""; /** * <pre> * The date time at which the call occurred. The format is * "yyyy-mm-dd hh:mm:ss+|-hh:mm", e.g. "2019-01-01 12:32:45-08:00". * </pre> * * <code>optional string call_start_date_time = 6;</code> * @return Whether the callStartDateTime field is set. */ public boolean hasCallStartDateTime() { return ((bitField0_ & 0x00000002) != 0); } /** * <pre> * The date time at which the call occurred. The format is * "yyyy-mm-dd hh:mm:ss+|-hh:mm", e.g. "2019-01-01 12:32:45-08:00". * </pre> * * <code>optional string call_start_date_time = 6;</code> * @return The callStartDateTime. */ public java.lang.String getCallStartDateTime() { java.lang.Object ref = callStartDateTime_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); callStartDateTime_ = s; return s; } else { return (java.lang.String) ref; } } /** * <pre> * The date time at which the call occurred. The format is * "yyyy-mm-dd hh:mm:ss+|-hh:mm", e.g. "2019-01-01 12:32:45-08:00". * </pre> * * <code>optional string call_start_date_time = 6;</code> * @return The bytes for callStartDateTime. */ public com.google.protobuf.ByteString getCallStartDateTimeBytes() { java.lang.Object ref = callStartDateTime_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); callStartDateTime_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <pre> * The date time at which the call occurred. The format is * "yyyy-mm-dd hh:mm:ss+|-hh:mm", e.g. "2019-01-01 12:32:45-08:00". * </pre> * * <code>optional string call_start_date_time = 6;</code> * @param value The callStartDateTime to set. * @return This builder for chaining. */ public Builder setCallStartDateTime( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; callStartDateTime_ = value; onChanged(); return this; } /** * <pre> * The date time at which the call occurred. The format is * "yyyy-mm-dd hh:mm:ss+|-hh:mm", e.g. "2019-01-01 12:32:45-08:00". * </pre> * * <code>optional string call_start_date_time = 6;</code> * @return This builder for chaining. */ public Builder clearCallStartDateTime() { bitField0_ = (bitField0_ & ~0x00000002); callStartDateTime_ = getDefaultInstance().getCallStartDateTime(); onChanged(); return this; } /** * <pre> * The date time at which the call occurred. The format is * "yyyy-mm-dd hh:mm:ss+|-hh:mm", e.g. "2019-01-01 12:32:45-08:00". * </pre> * * <code>optional string call_start_date_time = 6;</code> * @param value The bytes for callStartDateTime to set. * @return This builder for chaining. */ public Builder setCallStartDateTimeBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); bitField0_ |= 0x00000002; callStartDateTime_ = value; onChanged(); return this; } private java.lang.Object conversionAction_ = ""; /** * <pre> * Resource name of the conversion action associated with this conversion. * </pre> * * <code>optional string conversion_action = 7;</code> * @return Whether the conversionAction field is set. */ public boolean hasConversionAction() { return ((bitField0_ & 0x00000004) != 0); } /** * <pre> * Resource name of the conversion action associated with this conversion. * </pre> * * <code>optional string conversion_action = 7;</code> * @return The conversionAction. */ public java.lang.String getConversionAction() { java.lang.Object ref = conversionAction_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); conversionAction_ = s; return s; } else { return (java.lang.String) ref; } } /** * <pre> * Resource name of the conversion action associated with this conversion. * </pre> * * <code>optional string conversion_action = 7;</code> * @return The bytes for conversionAction. */ public com.google.protobuf.ByteString getConversionActionBytes() { java.lang.Object ref = conversionAction_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); conversionAction_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <pre> * Resource name of the conversion action associated with this conversion. * </pre> * * <code>optional string conversion_action = 7;</code> * @param value The conversionAction to set. * @return This builder for chaining. */ public Builder setConversionAction( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000004; conversionAction_ = value; onChanged(); return this; } /** * <pre> * Resource name of the conversion action associated with this conversion. * </pre> * * <code>optional string conversion_action = 7;</code> * @return This builder for chaining. */ public Builder clearConversionAction() { bitField0_ = (bitField0_ & ~0x00000004); conversionAction_ = getDefaultInstance().getConversionAction(); onChanged(); return this; } /** * <pre> * Resource name of the conversion action associated with this conversion. * </pre> * * <code>optional string conversion_action = 7;</code> * @param value The bytes for conversionAction to set. * @return This builder for chaining. */ public Builder setConversionActionBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); bitField0_ |= 0x00000004; conversionAction_ = value; onChanged(); return this; } private java.lang.Object conversionDateTime_ = ""; /** * <pre> * The date time at which the conversion occurred. The format is * "yyyy-mm-dd hh:mm:ss+|-hh:mm", e.g. "2019-01-01 12:32:45-08:00". * </pre> * * <code>optional string conversion_date_time = 8;</code> * @return Whether the conversionDateTime field is set. */ public boolean hasConversionDateTime() { return ((bitField0_ & 0x00000008) != 0); } /** * <pre> * The date time at which the conversion occurred. The format is * "yyyy-mm-dd hh:mm:ss+|-hh:mm", e.g. "2019-01-01 12:32:45-08:00". * </pre> * * <code>optional string conversion_date_time = 8;</code> * @return The conversionDateTime. */ public java.lang.String getConversionDateTime() { java.lang.Object ref = conversionDateTime_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); conversionDateTime_ = s; return s; } else { return (java.lang.String) ref; } } /** * <pre> * The date time at which the conversion occurred. The format is * "yyyy-mm-dd hh:mm:ss+|-hh:mm", e.g. "2019-01-01 12:32:45-08:00". * </pre> * * <code>optional string conversion_date_time = 8;</code> * @return The bytes for conversionDateTime. */ public com.google.protobuf.ByteString getConversionDateTimeBytes() { java.lang.Object ref = conversionDateTime_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); conversionDateTime_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <pre> * The date time at which the conversion occurred. The format is * "yyyy-mm-dd hh:mm:ss+|-hh:mm", e.g. "2019-01-01 12:32:45-08:00". * </pre> * * <code>optional string conversion_date_time = 8;</code> * @param value The conversionDateTime to set. * @return This builder for chaining. */ public Builder setConversionDateTime( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000008; conversionDateTime_ = value; onChanged(); return this; } /** * <pre> * The date time at which the conversion occurred. The format is * "yyyy-mm-dd hh:mm:ss+|-hh:mm", e.g. "2019-01-01 12:32:45-08:00". * </pre> * * <code>optional string conversion_date_time = 8;</code> * @return This builder for chaining. */ public Builder clearConversionDateTime() { bitField0_ = (bitField0_ & ~0x00000008); conversionDateTime_ = getDefaultInstance().getConversionDateTime(); onChanged(); return this; } /** * <pre> * The date time at which the conversion occurred. The format is * "yyyy-mm-dd hh:mm:ss+|-hh:mm", e.g. "2019-01-01 12:32:45-08:00". * </pre> * * <code>optional string conversion_date_time = 8;</code> * @param value The bytes for conversionDateTime to set. * @return This builder for chaining. */ public Builder setConversionDateTimeBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); bitField0_ |= 0x00000008; conversionDateTime_ = value; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.ads.googleads.v8.services.CallConversionResult) } // @@protoc_insertion_point(class_scope:google.ads.googleads.v8.services.CallConversionResult) private static final com.google.ads.googleads.v8.services.CallConversionResult DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.ads.googleads.v8.services.CallConversionResult(); } public static com.google.ads.googleads.v8.services.CallConversionResult getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<CallConversionResult> PARSER = new com.google.protobuf.AbstractParser<CallConversionResult>() { @java.lang.Override public CallConversionResult parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new CallConversionResult(input, extensionRegistry); } }; public static com.google.protobuf.Parser<CallConversionResult> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<CallConversionResult> getParserForType() { return PARSER; } @java.lang.Override public com.google.ads.googleads.v8.services.CallConversionResult getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
/* * Copyright 2019 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.compute.v1; import com.google.api.core.BetaApi; import com.google.api.gax.httpjson.ApiMessage; import java.util.List; import java.util.Objects; import javax.annotation.Generated; import javax.annotation.Nullable; @Generated("by GAPIC") @BetaApi /** * Request object for method compute.nodeGroups.get. Returns the specified NodeGroup. Get a list of * available NodeGroups by making a list() request. Note: the "nodes" field should not be used. Use * nodeGroups.listNodes instead. */ public final class GetNodeGroupHttpRequest implements ApiMessage { private final String access_token; private final String callback; private final String fields; private final String key; private final String nodeGroup; private final String prettyPrint; private final String quotaUser; private final String userIp; private GetNodeGroupHttpRequest() { this.access_token = null; this.callback = null; this.fields = null; this.key = null; this.nodeGroup = null; this.prettyPrint = null; this.quotaUser = null; this.userIp = null; } private GetNodeGroupHttpRequest( String access_token, String callback, String fields, String key, String nodeGroup, String prettyPrint, String quotaUser, String userIp) { this.access_token = access_token; this.callback = callback; this.fields = fields; this.key = key; this.nodeGroup = nodeGroup; this.prettyPrint = prettyPrint; this.quotaUser = quotaUser; this.userIp = userIp; } @Override public Object getFieldValue(String fieldName) { if ("access_token".equals(fieldName)) { return access_token; } if ("callback".equals(fieldName)) { return callback; } if ("fields".equals(fieldName)) { return fields; } if ("key".equals(fieldName)) { return key; } if ("nodeGroup".equals(fieldName)) { return nodeGroup; } if ("prettyPrint".equals(fieldName)) { return prettyPrint; } if ("quotaUser".equals(fieldName)) { return quotaUser; } if ("userIp".equals(fieldName)) { return userIp; } return null; } @Nullable @Override public ApiMessage getApiMessageRequestBody() { return null; } @Nullable @Override /** * The fields that should be serialized (even if they have empty values). If the containing * message object has a non-null fieldmask, then all the fields in the field mask (and only those * fields in the field mask) will be serialized. If the containing object does not have a * fieldmask, then only non-empty fields will be serialized. */ public List<String> getFieldMask() { return null; } /** OAuth 2.0 token for the current user. */ public String getAccessToken() { return access_token; } /** Name of the JavaScript callback function that handles the response. */ public String getCallback() { return callback; } /** Selector specifying a subset of fields to include in the response. */ public String getFields() { return fields; } /** API key. Required unless you provide an OAuth 2.0 token. */ public String getKey() { return key; } /** * Name of the node group to return. It must have the format * `{project}/zones/{zone}/nodeGroups/{nodeGroup}`. \`{nodeGroup}\` must start with a letter, and * contain only letters (\`[A-Za-z]\`), numbers (\`[0-9]\`), dashes (\`-\`), &#42; underscores * (\`_\`), periods (\`.\`), tildes (\`~\`), plus (\`+\`) or percent &#42; signs (\`%\`). It must * be between 3 and 255 characters in length, and it &#42; must not start with \`"goog"\`. */ public String getNodeGroup() { return nodeGroup; } /** Returns response with indentations and line breaks. */ public String getPrettyPrint() { return prettyPrint; } /** Alternative to userIp. */ public String getQuotaUser() { return quotaUser; } /** IP address of the end user for whom the API call is being made. */ public String getUserIp() { return userIp; } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(GetNodeGroupHttpRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } public static GetNodeGroupHttpRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final GetNodeGroupHttpRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new GetNodeGroupHttpRequest(); } public static class Builder { private String access_token; private String callback; private String fields; private String key; private String nodeGroup; private String prettyPrint; private String quotaUser; private String userIp; Builder() {} public Builder mergeFrom(GetNodeGroupHttpRequest other) { if (other == GetNodeGroupHttpRequest.getDefaultInstance()) return this; if (other.getAccessToken() != null) { this.access_token = other.access_token; } if (other.getCallback() != null) { this.callback = other.callback; } if (other.getFields() != null) { this.fields = other.fields; } if (other.getKey() != null) { this.key = other.key; } if (other.getNodeGroup() != null) { this.nodeGroup = other.nodeGroup; } if (other.getPrettyPrint() != null) { this.prettyPrint = other.prettyPrint; } if (other.getQuotaUser() != null) { this.quotaUser = other.quotaUser; } if (other.getUserIp() != null) { this.userIp = other.userIp; } return this; } Builder(GetNodeGroupHttpRequest source) { this.access_token = source.access_token; this.callback = source.callback; this.fields = source.fields; this.key = source.key; this.nodeGroup = source.nodeGroup; this.prettyPrint = source.prettyPrint; this.quotaUser = source.quotaUser; this.userIp = source.userIp; } /** OAuth 2.0 token for the current user. */ public String getAccessToken() { return access_token; } /** OAuth 2.0 token for the current user. */ public Builder setAccessToken(String access_token) { this.access_token = access_token; return this; } /** Name of the JavaScript callback function that handles the response. */ public String getCallback() { return callback; } /** Name of the JavaScript callback function that handles the response. */ public Builder setCallback(String callback) { this.callback = callback; return this; } /** Selector specifying a subset of fields to include in the response. */ public String getFields() { return fields; } /** Selector specifying a subset of fields to include in the response. */ public Builder setFields(String fields) { this.fields = fields; return this; } /** API key. Required unless you provide an OAuth 2.0 token. */ public String getKey() { return key; } /** API key. Required unless you provide an OAuth 2.0 token. */ public Builder setKey(String key) { this.key = key; return this; } /** * Name of the node group to return. It must have the format * `{project}/zones/{zone}/nodeGroups/{nodeGroup}`. \`{nodeGroup}\` must start with a letter, * and contain only letters (\`[A-Za-z]\`), numbers (\`[0-9]\`), dashes (\`-\`), &#42; * underscores (\`_\`), periods (\`.\`), tildes (\`~\`), plus (\`+\`) or percent &#42; signs * (\`%\`). It must be between 3 and 255 characters in length, and it &#42; must not start with * \`"goog"\`. */ public String getNodeGroup() { return nodeGroup; } /** * Name of the node group to return. It must have the format * `{project}/zones/{zone}/nodeGroups/{nodeGroup}`. \`{nodeGroup}\` must start with a letter, * and contain only letters (\`[A-Za-z]\`), numbers (\`[0-9]\`), dashes (\`-\`), &#42; * underscores (\`_\`), periods (\`.\`), tildes (\`~\`), plus (\`+\`) or percent &#42; signs * (\`%\`). It must be between 3 and 255 characters in length, and it &#42; must not start with * \`"goog"\`. */ public Builder setNodeGroup(String nodeGroup) { this.nodeGroup = nodeGroup; return this; } /** Returns response with indentations and line breaks. */ public String getPrettyPrint() { return prettyPrint; } /** Returns response with indentations and line breaks. */ public Builder setPrettyPrint(String prettyPrint) { this.prettyPrint = prettyPrint; return this; } /** Alternative to userIp. */ public String getQuotaUser() { return quotaUser; } /** Alternative to userIp. */ public Builder setQuotaUser(String quotaUser) { this.quotaUser = quotaUser; return this; } /** IP address of the end user for whom the API call is being made. */ public String getUserIp() { return userIp; } /** IP address of the end user for whom the API call is being made. */ public Builder setUserIp(String userIp) { this.userIp = userIp; return this; } public GetNodeGroupHttpRequest build() { String missing = ""; if (nodeGroup == null) { missing += " nodeGroup"; } if (!missing.isEmpty()) { throw new IllegalStateException("Missing required properties:" + missing); } return new GetNodeGroupHttpRequest( access_token, callback, fields, key, nodeGroup, prettyPrint, quotaUser, userIp); } public Builder clone() { Builder newBuilder = new Builder(); newBuilder.setAccessToken(this.access_token); newBuilder.setCallback(this.callback); newBuilder.setFields(this.fields); newBuilder.setKey(this.key); newBuilder.setNodeGroup(this.nodeGroup); newBuilder.setPrettyPrint(this.prettyPrint); newBuilder.setQuotaUser(this.quotaUser); newBuilder.setUserIp(this.userIp); return newBuilder; } } @Override public String toString() { return "GetNodeGroupHttpRequest{" + "access_token=" + access_token + ", " + "callback=" + callback + ", " + "fields=" + fields + ", " + "key=" + key + ", " + "nodeGroup=" + nodeGroup + ", " + "prettyPrint=" + prettyPrint + ", " + "quotaUser=" + quotaUser + ", " + "userIp=" + userIp + "}"; } @Override public boolean equals(Object o) { if (o == this) { return true; } if (o instanceof GetNodeGroupHttpRequest) { GetNodeGroupHttpRequest that = (GetNodeGroupHttpRequest) o; return Objects.equals(this.access_token, that.getAccessToken()) && Objects.equals(this.callback, that.getCallback()) && Objects.equals(this.fields, that.getFields()) && Objects.equals(this.key, that.getKey()) && Objects.equals(this.nodeGroup, that.getNodeGroup()) && Objects.equals(this.prettyPrint, that.getPrettyPrint()) && Objects.equals(this.quotaUser, that.getQuotaUser()) && Objects.equals(this.userIp, that.getUserIp()); } return false; } @Override public int hashCode() { return Objects.hash( access_token, callback, fields, key, nodeGroup, prettyPrint, quotaUser, userIp); } }
package com.configx.web.model; import java.util.Date; public class ConfigItemHistory { /** * This field was generated by MyBatis Generator. * This field corresponds to the database column config_item_history.revision * * @mbggenerated */ private Long revision; /** * This field was generated by MyBatis Generator. * This field corresponds to the database column config_item_history.last_revision * * @mbggenerated */ private Long lastRevision; /** * This field was generated by MyBatis Generator. * This field corresponds to the database column config_item_history.app_id * * @mbggenerated */ private Integer appId; /** * This field was generated by MyBatis Generator. * This field corresponds to the database column config_item_history.app_name * * @mbggenerated */ private String appName; /** * This field was generated by MyBatis Generator. * This field corresponds to the database column config_item_history.env_id * * @mbggenerated */ private Integer envId; /** * This field was generated by MyBatis Generator. * This field corresponds to the database column config_item_history.env_name * * @mbggenerated */ private String envName; /** * This field was generated by MyBatis Generator. * This field corresponds to the database column config_item_history.profile_id * * @mbggenerated */ private Integer profileId; /** * This field was generated by MyBatis Generator. * This field corresponds to the database column config_item_history.profile_name * * @mbggenerated */ private String profileName; /** * This field was generated by MyBatis Generator. * This field corresponds to the database column config_item_history.config_id * * @mbggenerated */ private Long configId; /** * This field was generated by MyBatis Generator. * This field corresponds to the database column config_item_history.config_name * * @mbggenerated */ private String configName; /** * This field was generated by MyBatis Generator. * This field corresponds to the database column config_item_history.config_value_id * * @mbggenerated */ private Long configValueId; /** * This field was generated by MyBatis Generator. * This field corresponds to the database column config_item_history.config_value_type * * @mbggenerated */ private Byte configValueType; /** * This field was generated by MyBatis Generator. * This field corresponds to the database column config_item_history.config_tags * * @mbggenerated */ private String configTags; /** * This field was generated by MyBatis Generator. * This field corresponds to the database column config_item_history.create_time * * @mbggenerated */ private Date createTime; /** * This field was generated by MyBatis Generator. * This field corresponds to the database column config_item_history.config_value * * @mbggenerated */ private String configValue; /** * This method was generated by MyBatis Generator. * This method returns the value of the database column config_item_history.revision * * @return the value of config_item_history.revision * * @mbggenerated */ public Long getRevision() { return revision; } /** * This method was generated by MyBatis Generator. * This method sets the value of the database column config_item_history.revision * * @param revision the value for config_item_history.revision * * @mbggenerated */ public void setRevision(Long revision) { this.revision = revision; } /** * This method was generated by MyBatis Generator. * This method returns the value of the database column config_item_history.last_revision * * @return the value of config_item_history.last_revision * * @mbggenerated */ public Long getLastRevision() { return lastRevision; } /** * This method was generated by MyBatis Generator. * This method sets the value of the database column config_item_history.last_revision * * @param lastRevision the value for config_item_history.last_revision * * @mbggenerated */ public void setLastRevision(Long lastRevision) { this.lastRevision = lastRevision; } /** * This method was generated by MyBatis Generator. * This method returns the value of the database column config_item_history.app_id * * @return the value of config_item_history.app_id * * @mbggenerated */ public Integer getAppId() { return appId; } /** * This method was generated by MyBatis Generator. * This method sets the value of the database column config_item_history.app_id * * @param appId the value for config_item_history.app_id * * @mbggenerated */ public void setAppId(Integer appId) { this.appId = appId; } /** * This method was generated by MyBatis Generator. * This method returns the value of the database column config_item_history.app_name * * @return the value of config_item_history.app_name * * @mbggenerated */ public String getAppName() { return appName; } /** * This method was generated by MyBatis Generator. * This method sets the value of the database column config_item_history.app_name * * @param appName the value for config_item_history.app_name * * @mbggenerated */ public void setAppName(String appName) { this.appName = appName == null ? null : appName.trim(); } /** * This method was generated by MyBatis Generator. * This method returns the value of the database column config_item_history.env_id * * @return the value of config_item_history.env_id * * @mbggenerated */ public Integer getEnvId() { return envId; } /** * This method was generated by MyBatis Generator. * This method sets the value of the database column config_item_history.env_id * * @param envId the value for config_item_history.env_id * * @mbggenerated */ public void setEnvId(Integer envId) { this.envId = envId; } /** * This method was generated by MyBatis Generator. * This method returns the value of the database column config_item_history.env_name * * @return the value of config_item_history.env_name * * @mbggenerated */ public String getEnvName() { return envName; } /** * This method was generated by MyBatis Generator. * This method sets the value of the database column config_item_history.env_name * * @param envName the value for config_item_history.env_name * * @mbggenerated */ public void setEnvName(String envName) { this.envName = envName == null ? null : envName.trim(); } /** * This method was generated by MyBatis Generator. * This method returns the value of the database column config_item_history.profile_id * * @return the value of config_item_history.profile_id * * @mbggenerated */ public Integer getProfileId() { return profileId; } /** * This method was generated by MyBatis Generator. * This method sets the value of the database column config_item_history.profile_id * * @param profileId the value for config_item_history.profile_id * * @mbggenerated */ public void setProfileId(Integer profileId) { this.profileId = profileId; } /** * This method was generated by MyBatis Generator. * This method returns the value of the database column config_item_history.profile_name * * @return the value of config_item_history.profile_name * * @mbggenerated */ public String getProfileName() { return profileName; } /** * This method was generated by MyBatis Generator. * This method sets the value of the database column config_item_history.profile_name * * @param profileName the value for config_item_history.profile_name * * @mbggenerated */ public void setProfileName(String profileName) { this.profileName = profileName == null ? null : profileName.trim(); } /** * This method was generated by MyBatis Generator. * This method returns the value of the database column config_item_history.config_id * * @return the value of config_item_history.config_id * * @mbggenerated */ public Long getConfigId() { return configId; } /** * This method was generated by MyBatis Generator. * This method sets the value of the database column config_item_history.config_id * * @param configId the value for config_item_history.config_id * * @mbggenerated */ public void setConfigId(Long configId) { this.configId = configId; } /** * This method was generated by MyBatis Generator. * This method returns the value of the database column config_item_history.config_name * * @return the value of config_item_history.config_name * * @mbggenerated */ public String getConfigName() { return configName; } /** * This method was generated by MyBatis Generator. * This method sets the value of the database column config_item_history.config_name * * @param configName the value for config_item_history.config_name * * @mbggenerated */ public void setConfigName(String configName) { this.configName = configName == null ? null : configName.trim(); } /** * This method was generated by MyBatis Generator. * This method returns the value of the database column config_item_history.config_value_id * * @return the value of config_item_history.config_value_id * * @mbggenerated */ public Long getConfigValueId() { return configValueId; } /** * This method was generated by MyBatis Generator. * This method sets the value of the database column config_item_history.config_value_id * * @param configValueId the value for config_item_history.config_value_id * * @mbggenerated */ public void setConfigValueId(Long configValueId) { this.configValueId = configValueId; } /** * This method was generated by MyBatis Generator. * This method returns the value of the database column config_item_history.config_value_type * * @return the value of config_item_history.config_value_type * * @mbggenerated */ public Byte getConfigValueType() { return configValueType; } /** * This method was generated by MyBatis Generator. * This method sets the value of the database column config_item_history.config_value_type * * @param configValueType the value for config_item_history.config_value_type * * @mbggenerated */ public void setConfigValueType(Byte configValueType) { this.configValueType = configValueType; } /** * This method was generated by MyBatis Generator. * This method returns the value of the database column config_item_history.config_tags * * @return the value of config_item_history.config_tags * * @mbggenerated */ public String getConfigTags() { return configTags; } /** * This method was generated by MyBatis Generator. * This method sets the value of the database column config_item_history.config_tags * * @param configTags the value for config_item_history.config_tags * * @mbggenerated */ public void setConfigTags(String configTags) { this.configTags = configTags == null ? null : configTags.trim(); } /** * This method was generated by MyBatis Generator. * This method returns the value of the database column config_item_history.create_time * * @return the value of config_item_history.create_time * * @mbggenerated */ public Date getCreateTime() { return createTime; } /** * This method was generated by MyBatis Generator. * This method sets the value of the database column config_item_history.create_time * * @param createTime the value for config_item_history.create_time * * @mbggenerated */ public void setCreateTime(Date createTime) { this.createTime = createTime; } /** * This method was generated by MyBatis Generator. * This method returns the value of the database column config_item_history.config_value * * @return the value of config_item_history.config_value * * @mbggenerated */ public String getConfigValue() { return configValue; } /** * This method was generated by MyBatis Generator. * This method sets the value of the database column config_item_history.config_value * * @param configValue the value for config_item_history.config_value * * @mbggenerated */ public void setConfigValue(String configValue) { this.configValue = configValue == null ? null : configValue.trim(); } }
/* * Copyright Terracotta, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.ehcache.spi; import org.ehcache.spi.service.Service; import org.ehcache.spi.service.ServiceConfiguration; import org.ehcache.spi.service.ServiceDependencies; import org.ehcache.spi.service.ServiceFactory; import org.ehcache.spi.service.SupplementaryService; import org.ehcache.util.ClassLoading; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.ArrayDeque; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Deque; import java.util.HashSet; import java.util.IdentityHashMap; import java.util.List; import java.util.ServiceLoader; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReadWriteLock; import java.util.concurrent.locks.ReentrantReadWriteLock; /** * @author Alex Snaps */ public final class ServiceLocator implements ServiceProvider { private static final Logger LOGGER = LoggerFactory.getLogger(ServiceLocator.class); private final ConcurrentMap<Class<? extends Service>, Service> services = new ConcurrentHashMap<Class<? extends Service>, Service>(); @SuppressWarnings("rawtypes") private final ServiceLoader<ServiceFactory> serviceFactory = ClassLoading.libraryServiceLoaderFor(ServiceFactory.class); private final ReadWriteLock runningLock = new ReentrantReadWriteLock(); private final AtomicBoolean running = new AtomicBoolean(false); public ServiceLocator(Service... services) { for (Service service : services) { addService(service); } } private <T extends Service> T discoverService(Class<T> serviceClass, ServiceConfiguration<T> config) { for (ServiceFactory<T> factory : ServiceLocator.<T> getServiceFactories(serviceFactory)) { if (serviceClass.isAssignableFrom(factory.getServiceType())) { T service = factory.create(config); addService(service, true); return service; } } return null; } @SuppressWarnings("unchecked") private static <T extends Service> Iterable<ServiceFactory<T>> getServiceFactories(@SuppressWarnings("rawtypes") ServiceLoader<ServiceFactory> serviceFactory) { List<ServiceFactory<T>> list = new ArrayList<ServiceFactory<T>>(); for (ServiceFactory<?> factory : serviceFactory) { list.add((ServiceFactory<T>)factory); } return list; } public void addService(final Service service) { addService(service, false); } void addService(final Service service, final boolean expectsAbstractRegistration) { final Lock lock = runningLock.readLock(); lock.lock(); try { Set<Class<? extends Service>> serviceClazzes = new HashSet<Class<? extends Service>>(); for (Class<?> i : getAllInterfaces(service.getClass())) { if (Service.class != i && Service.class.isAssignableFrom(i)) { @SuppressWarnings("unchecked") Class<? extends Service> serviceClass = (Class<? extends Service>) i; serviceClazzes.add(serviceClass); } } if (services.putIfAbsent(service.getClass(), service) != null) { throw new IllegalStateException("Registration of duplicate service " + service.getClass()); } if (!service.getClass().isAnnotationPresent(SupplementaryService.class)) { boolean registered = false; for (Class<? extends Service> serviceClazz : serviceClazzes) { if (services.putIfAbsent(serviceClazz, service) == null && !registered) { registered = true; } } if (!registered) { final StringBuilder message = new StringBuilder("Duplicate service implementation found for ").append(serviceClazzes) .append(" by ") .append(service.getClass()); for (Class<? extends Service> serviceClass : serviceClazzes) { final Service declaredService = services.get(serviceClass); if (declaredService != null) { message .append("\n\t\t- ") .append(serviceClass) .append(" already has ") .append(declaredService.getClass()); } } if (expectsAbstractRegistration) { throw new IllegalStateException(message.toString()); } LOGGER.debug(message.toString()); } } loadDependenciesOf(service.getClass()); if (running.get()) { service.start(this); } } finally { lock.unlock(); } } private Collection<Class<?>> getAllInterfaces(final Class<?> clazz) { ArrayList<Class<?>> interfaces = new ArrayList<Class<?>>(); for(Class<?> c = clazz; c != null; c = c.getSuperclass()) { for (Class<?> i : c.getInterfaces()) { interfaces.add(i); interfaces.addAll(getAllInterfaces(i)); } } return interfaces; } public <T extends Service> T getOrCreateServiceFor(ServiceConfiguration<T> config) { return findService(config.getServiceType(), config, true); } @Override public <T extends Service> T getService(Class<T> serviceType) { return findService(serviceType, null, false); } private <T extends Service> T findService(Class<T> serviceType, ServiceConfiguration<T> config, boolean shouldCreate) { T service = serviceType.cast(services.get(serviceType)); if (service == null && shouldCreate) { return discoverService(serviceType, config); } else { return service; } } public static <T> Collection<T> findAmongst(Class<T> clazz, Object ... instances) { Collection<T> matches = new ArrayList<T>(); for (Object instance : instances) { if (instance != null && clazz.isAssignableFrom(instance.getClass())) { matches.add(clazz.cast(instance)); } } return Collections.unmodifiableCollection(matches); } public static <T> T findSingletonAmongst(Class<T> clazz, Object ... instances) { final Collection<T> matches = findAmongst(clazz, instances); if (matches.isEmpty()) { return null; } else if (matches.size() == 1) { return matches.iterator().next(); } else { throw new IllegalArgumentException(); } } public void startAllServices() throws Exception { Deque<Service> started = new ArrayDeque<Service>(); final Lock lock = runningLock.writeLock(); lock.lock(); try { if (!running.compareAndSet(false, true)) { throw new IllegalStateException("Already started!"); } for (Service service : services.values()) { if (!started.contains(service)) { service.start(this); started.push(service); } } LOGGER.info("All Services successfully started."); } catch (Exception e) { while(!started.isEmpty()) { Service toBeStopped = started.pop(); try { toBeStopped.stop(); } catch (Exception e1) { LOGGER.error("Stopping Service failed due to ", e1); } } throw e; } finally { lock.unlock(); } } public void stopAllServices() throws Exception { Exception firstException = null; Lock lock = runningLock.writeLock(); lock.lock(); try { if(!running.compareAndSet(true, false)) { throw new IllegalStateException("Already stopped!"); } Set<Service> stoppedServices = Collections.newSetFromMap(new IdentityHashMap<Service, Boolean>()); for (Service service : services.values()) { if (stoppedServices.contains(service)) { continue; } try { service.stop(); } catch (Exception e) { if (firstException == null) { firstException = e; } else { LOGGER.error("Stopping Service failed due to ", e); } } stoppedServices.add(service); } } finally { lock.unlock(); } if(firstException != null) { throw firstException; } } public void loadDependenciesOf(Class<?> clazz) { ServiceDependencies annotation = clazz.getAnnotation(ServiceDependencies.class); if (annotation != null) { for (Class aClass : annotation.value()) { if (findService(aClass, null, true) == null) { throw new IllegalStateException("Unable to resolve dependent service: " + aClass.getSimpleName()); } } } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.accumulo.core.clientImpl; import java.io.IOException; import java.security.SecureRandom; import java.util.ArrayList; import java.util.Collections; import java.util.EnumMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Random; import java.util.Set; import java.util.SortedMap; import java.util.SortedSet; import java.util.concurrent.TimeUnit; import org.apache.accumulo.core.Constants; import org.apache.accumulo.core.client.AccumuloException; import org.apache.accumulo.core.client.AccumuloSecurityException; import org.apache.accumulo.core.client.SampleNotPresentException; import org.apache.accumulo.core.client.TableDeletedException; import org.apache.accumulo.core.client.TableNotFoundException; import org.apache.accumulo.core.client.TableOfflineException; import org.apache.accumulo.core.client.sample.SamplerConfiguration; import org.apache.accumulo.core.clientImpl.TabletLocator.TabletLocation; import org.apache.accumulo.core.clientImpl.thrift.ThriftSecurityException; import org.apache.accumulo.core.conf.Property; import org.apache.accumulo.core.data.Column; import org.apache.accumulo.core.data.Key; import org.apache.accumulo.core.data.KeyValue; import org.apache.accumulo.core.data.PartialKey; import org.apache.accumulo.core.data.Range; import org.apache.accumulo.core.data.TableId; import org.apache.accumulo.core.data.Value; import org.apache.accumulo.core.dataImpl.KeyExtent; import org.apache.accumulo.core.dataImpl.thrift.InitialScan; import org.apache.accumulo.core.dataImpl.thrift.IterInfo; import org.apache.accumulo.core.dataImpl.thrift.ScanResult; import org.apache.accumulo.core.dataImpl.thrift.TKeyValue; import org.apache.accumulo.core.master.state.tables.TableState; import org.apache.accumulo.core.rpc.ThriftUtil; import org.apache.accumulo.core.sample.impl.SamplerConfigurationImpl; import org.apache.accumulo.core.security.Authorizations; import org.apache.accumulo.core.tabletserver.thrift.NoSuchScanIDException; import org.apache.accumulo.core.tabletserver.thrift.NotServingTabletException; import org.apache.accumulo.core.tabletserver.thrift.TSampleNotPresentException; import org.apache.accumulo.core.tabletserver.thrift.TabletClientService; import org.apache.accumulo.core.tabletserver.thrift.TooManyFilesException; import org.apache.accumulo.core.trace.TraceUtil; import org.apache.accumulo.core.trace.thrift.TInfo; import org.apache.accumulo.core.util.HostAndPort; import org.apache.accumulo.core.util.OpTimer; import org.apache.hadoop.io.Text; import org.apache.htrace.Trace; import org.apache.htrace.TraceScope; import org.apache.thrift.TApplicationException; import org.apache.thrift.TException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class ThriftScanner { private static final Logger log = LoggerFactory.getLogger(ThriftScanner.class); public static final Map<TabletType,Set<String>> serversWaitedForWrites = new EnumMap<>(TabletType.class); private static Random secureRandom = new SecureRandom(); static { for (TabletType ttype : TabletType.values()) { serversWaitedForWrites.put(ttype, Collections.synchronizedSet(new HashSet<>())); } } public static boolean getBatchFromServer(ClientContext context, Range range, KeyExtent extent, String server, SortedMap<Key,Value> results, SortedSet<Column> fetchedColumns, List<IterInfo> serverSideIteratorList, Map<String,Map<String,String>> serverSideIteratorOptions, int size, Authorizations authorizations, long batchTimeOut, String classLoaderContext) throws AccumuloException, AccumuloSecurityException { if (server == null) throw new AccumuloException(new IOException()); final HostAndPort parsedServer = HostAndPort.fromString(server); try { TInfo tinfo = TraceUtil.traceInfo(); TabletClientService.Client client = ThriftUtil.getTServerClient(parsedServer, context); try { // not reading whole rows (or stopping on row boundaries) so there is no need to enable // isolation below ScanState scanState = new ScanState(context, extent.getTableId(), authorizations, range, fetchedColumns, size, serverSideIteratorList, serverSideIteratorOptions, false, Constants.SCANNER_DEFAULT_READAHEAD_THRESHOLD, null, batchTimeOut, classLoaderContext, null); TabletType ttype = TabletType.type(extent); boolean waitForWrites = !serversWaitedForWrites.get(ttype).contains(server); InitialScan isr = client.startScan(tinfo, scanState.context.rpcCreds(), extent.toThrift(), scanState.range.toThrift(), Translator.translate(scanState.columns, Translators.CT), scanState.size, scanState.serverSideIteratorList, scanState.serverSideIteratorOptions, scanState.authorizations.getAuthorizationsBB(), waitForWrites, scanState.isolated, scanState.readaheadThreshold, null, scanState.batchTimeOut, classLoaderContext, scanState.executionHints); if (waitForWrites) serversWaitedForWrites.get(ttype).add(server); Key.decompress(isr.result.results); for (TKeyValue kv : isr.result.results) results.put(new Key(kv.key), new Value(kv.value)); client.closeScan(tinfo, isr.scanID); return isr.result.more; } finally { ThriftUtil.returnClient(client); } } catch (TApplicationException tae) { throw new AccumuloServerException(server, tae); } catch (TooManyFilesException e) { log.debug("Tablet ({}) has too many files {} : {}", extent, server, e.getMessage()); } catch (ThriftSecurityException e) { log.warn("Security Violation in scan request to {}: {}", server, e.getMessage()); throw new AccumuloSecurityException(e.user, e.code, e); } catch (TException e) { log.debug("Error getting transport to {}: {}", server, e.getMessage()); } throw new AccumuloException("getBatchFromServer: failed"); } public static class ScanState { boolean isolated; TableId tableId; Text startRow; boolean skipStartRow; long readaheadThreshold; long batchTimeOut; Range range; int size; ClientContext context; Authorizations authorizations; List<Column> columns; TabletLocation prevLoc; Long scanID; String classLoaderContext; boolean finished = false; List<IterInfo> serverSideIteratorList; Map<String,Map<String,String>> serverSideIteratorOptions; SamplerConfiguration samplerConfig; Map<String,String> executionHints; public ScanState(ClientContext context, TableId tableId, Authorizations authorizations, Range range, SortedSet<Column> fetchedColumns, int size, List<IterInfo> serverSideIteratorList, Map<String,Map<String,String>> serverSideIteratorOptions, boolean isolated, long readaheadThreshold, SamplerConfiguration samplerConfig, long batchTimeOut, String classLoaderContext, Map<String,String> executionHints) { this.context = context; this.authorizations = authorizations; this.classLoaderContext = classLoaderContext; columns = new ArrayList<>(fetchedColumns.size()); for (Column column : fetchedColumns) { columns.add(column); } this.tableId = tableId; this.range = range; Key startKey = range.getStartKey(); if (startKey == null) { startKey = new Key(); } this.startRow = startKey.getRow(); this.skipStartRow = false; this.size = size; this.serverSideIteratorList = serverSideIteratorList; this.serverSideIteratorOptions = serverSideIteratorOptions; this.isolated = isolated; this.readaheadThreshold = readaheadThreshold; this.samplerConfig = samplerConfig; this.batchTimeOut = batchTimeOut; if (executionHints == null || executionHints.size() == 0) this.executionHints = null; // avoid thrift serialization for empty map else this.executionHints = executionHints; } } public static class ScanTimedOutException extends IOException { private static final long serialVersionUID = 1L; } static long pause(long millis, long maxSleep) throws InterruptedException { Thread.sleep(millis); // wait 2 * last time, with +-10% random jitter return (long) (Math.min(millis * 2, maxSleep) * (.9 + secureRandom.nextDouble() / 5)); } public static List<KeyValue> scan(ClientContext context, ScanState scanState, long timeOut) throws ScanTimedOutException, AccumuloException, AccumuloSecurityException, TableNotFoundException { TabletLocation loc = null; long startTime = System.currentTimeMillis(); String lastError = null; String error = null; int tooManyFilesCount = 0; long sleepMillis = 100; final long maxSleepTime = context.getConfiguration().getTimeInMillis(Property.GENERAL_MAX_SCANNER_RETRY_PERIOD); List<KeyValue> results = null; try (TraceScope span = Trace.startSpan("scan")) { while (results == null && !scanState.finished) { if (Thread.currentThread().isInterrupted()) { throw new AccumuloException("Thread interrupted"); } if ((System.currentTimeMillis() - startTime) / 1000.0 > timeOut) throw new ScanTimedOutException(); while (loc == null) { long currentTime = System.currentTimeMillis(); if ((currentTime - startTime) / 1000.0 > timeOut) throw new ScanTimedOutException(); try (TraceScope locateSpan = Trace.startSpan("scan:locateTablet")) { loc = TabletLocator.getLocator(context, scanState.tableId).locateTablet(context, scanState.startRow, scanState.skipStartRow, false); if (loc == null) { if (!Tables.exists(context, scanState.tableId)) throw new TableDeletedException(scanState.tableId.canonical()); else if (Tables.getTableState(context, scanState.tableId) == TableState.OFFLINE) throw new TableOfflineException( Tables.getTableOfflineMsg(context, scanState.tableId)); error = "Failed to locate tablet for table : " + scanState.tableId + " row : " + scanState.startRow; if (!error.equals(lastError)) log.debug("{}", error); else if (log.isTraceEnabled()) log.trace("{}", error); lastError = error; sleepMillis = pause(sleepMillis, maxSleepTime); } else { // when a tablet splits we do want to continue scanning the low child // of the split if we are already passed it Range dataRange = loc.tablet_extent.toDataRange(); if (scanState.range.getStartKey() != null && dataRange.afterEndKey(scanState.range.getStartKey())) { // go to the next tablet scanState.startRow = loc.tablet_extent.getEndRow(); scanState.skipStartRow = true; loc = null; } else if (scanState.range.getEndKey() != null && dataRange.beforeStartKey(scanState.range.getEndKey())) { // should not happen throw new RuntimeException("Unexpected tablet, extent : " + loc.tablet_extent + " range : " + scanState.range + " startRow : " + scanState.startRow); } } } catch (AccumuloServerException e) { log.debug("Scan failed, server side exception : {}", e.getMessage()); throw e; } catch (AccumuloException e) { error = "exception from tablet loc " + e.getMessage(); if (!error.equals(lastError)) log.debug("{}", error); else if (log.isTraceEnabled()) log.trace("{}", error); lastError = error; sleepMillis = pause(sleepMillis, maxSleepTime); } } try (TraceScope scanLocation = Trace.startSpan("scan:location")) { if (scanLocation.getSpan() != null) { scanLocation.getSpan().addKVAnnotation("tserver", loc.tablet_location); } results = scan(loc, scanState, context); } catch (AccumuloSecurityException e) { Tables.clearCache(context); if (!Tables.exists(context, scanState.tableId)) throw new TableDeletedException(scanState.tableId.canonical()); e.setTableInfo(Tables.getPrintableTableInfoFromId(context, scanState.tableId)); throw e; } catch (TApplicationException tae) { throw new AccumuloServerException(loc.tablet_location, tae); } catch (TSampleNotPresentException tsnpe) { String message = "Table " + Tables.getPrintableTableInfoFromId(context, scanState.tableId) + " does not have sampling configured or built"; throw new SampleNotPresentException(message, tsnpe); } catch (NotServingTabletException e) { error = "Scan failed, not serving tablet " + loc; if (!error.equals(lastError)) log.debug("{}", error); else if (log.isTraceEnabled()) log.trace("{}", error); lastError = error; TabletLocator.getLocator(context, scanState.tableId).invalidateCache(loc.tablet_extent); loc = null; // no need to try the current scan id somewhere else scanState.scanID = null; if (scanState.isolated) throw new IsolationException(); sleepMillis = pause(sleepMillis, maxSleepTime); } catch (NoSuchScanIDException e) { error = "Scan failed, no such scan id " + scanState.scanID + " " + loc; if (!error.equals(lastError)) log.debug("{}", error); else if (log.isTraceEnabled()) log.trace("{}", error); lastError = error; if (scanState.isolated) throw new IsolationException(); scanState.scanID = null; } catch (TooManyFilesException e) { error = "Tablet has too many files " + loc + " retrying..."; if (!error.equals(lastError)) { log.debug("{}", error); tooManyFilesCount = 0; } else { tooManyFilesCount++; if (tooManyFilesCount == 300) log.warn("{}", error); else if (log.isTraceEnabled()) log.trace("{}", error); } lastError = error; // not sure what state the scan session on the server side is // in after this occurs, so lets be cautious and start a new // scan session scanState.scanID = null; if (scanState.isolated) throw new IsolationException(); sleepMillis = pause(sleepMillis, maxSleepTime); } catch (TException e) { TabletLocator.getLocator(context, scanState.tableId).invalidateCache(context, loc.tablet_location); error = "Scan failed, thrift error " + e.getClass().getName() + " " + e.getMessage() + " " + loc; if (!error.equals(lastError)) log.debug("{}", error); else if (log.isTraceEnabled()) log.trace("{}", error); lastError = error; loc = null; // do not want to continue using the same scan id, if a timeout occurred could cause a // batch to be skipped // because a thread on the server side may still be processing the timed out continue scan scanState.scanID = null; if (scanState.isolated) throw new IsolationException(); sleepMillis = pause(sleepMillis, maxSleepTime); } } if (results != null && results.size() == 0 && scanState.finished) { results = null; } return results; } catch (InterruptedException ex) { throw new AccumuloException(ex); } } private static List<KeyValue> scan(TabletLocation loc, ScanState scanState, ClientContext context) throws AccumuloSecurityException, NotServingTabletException, TException, NoSuchScanIDException, TooManyFilesException, TSampleNotPresentException { if (scanState.finished) return null; OpTimer timer = null; final TInfo tinfo = TraceUtil.traceInfo(); final HostAndPort parsedLocation = HostAndPort.fromString(loc.tablet_location); TabletClientService.Client client = ThriftUtil.getTServerClient(parsedLocation, context); String old = Thread.currentThread().getName(); try { ScanResult sr; if (scanState.prevLoc != null && !scanState.prevLoc.equals(loc)) scanState.scanID = null; scanState.prevLoc = loc; if (scanState.scanID == null) { Thread.currentThread().setName("Starting scan tserver=" + loc.tablet_location + " tableId=" + loc.tablet_extent.getTableId()); if (log.isTraceEnabled()) { String msg = "Starting scan tserver=" + loc.tablet_location + " tablet=" + loc.tablet_extent + " range=" + scanState.range + " ssil=" + scanState.serverSideIteratorList + " ssio=" + scanState.serverSideIteratorOptions + " context=" + scanState.classLoaderContext; log.trace("tid={} {}", Thread.currentThread().getId(), msg); timer = new OpTimer().start(); } TabletType ttype = TabletType.type(loc.tablet_extent); boolean waitForWrites = !serversWaitedForWrites.get(ttype).contains(loc.tablet_location); InitialScan is = client.startScan(tinfo, scanState.context.rpcCreds(), loc.tablet_extent.toThrift(), scanState.range.toThrift(), Translator.translate(scanState.columns, Translators.CT), scanState.size, scanState.serverSideIteratorList, scanState.serverSideIteratorOptions, scanState.authorizations.getAuthorizationsBB(), waitForWrites, scanState.isolated, scanState.readaheadThreshold, SamplerConfigurationImpl.toThrift(scanState.samplerConfig), scanState.batchTimeOut, scanState.classLoaderContext, scanState.executionHints); if (waitForWrites) serversWaitedForWrites.get(ttype).add(loc.tablet_location); sr = is.result; if (sr.more) scanState.scanID = is.scanID; else client.closeScan(tinfo, is.scanID); } else { // log.debug("Calling continue scan : "+scanState.range+" loc = "+loc); String msg = "Continuing scan tserver=" + loc.tablet_location + " scanid=" + scanState.scanID; Thread.currentThread().setName(msg); if (log.isTraceEnabled()) { log.trace("tid={} {}", Thread.currentThread().getId(), msg); timer = new OpTimer().start(); } sr = client.continueScan(tinfo, scanState.scanID); if (!sr.more) { client.closeScan(tinfo, scanState.scanID); scanState.scanID = null; } } if (!sr.more) { // log.debug("No more : tab end row = "+loc.tablet_extent.getEndRow()+" range = // "+scanState.range); if (loc.tablet_extent.getEndRow() == null) { scanState.finished = true; if (timer != null) { timer.stop(); log.trace("tid={} Completely finished scan in {} #results={}", Thread.currentThread().getId(), String.format("%.3f secs", timer.scale(TimeUnit.SECONDS)), sr.results.size()); } } else if (scanState.range.getEndKey() == null || !scanState.range .afterEndKey(new Key(loc.tablet_extent.getEndRow()).followingKey(PartialKey.ROW))) { scanState.startRow = loc.tablet_extent.getEndRow(); scanState.skipStartRow = true; if (timer != null) { timer.stop(); log.trace("tid={} Finished scanning tablet in {} #results={}", Thread.currentThread().getId(), String.format("%.3f secs", timer.scale(TimeUnit.SECONDS)), sr.results.size()); } } else { scanState.finished = true; if (timer != null) { timer.stop(); log.trace("tid={} Completely finished in {} #results={}", Thread.currentThread().getId(), String.format("%.3f secs", timer.scale(TimeUnit.SECONDS)), sr.results.size()); } } } else { if (timer != null) { timer.stop(); log.trace("tid={} Finished scan in {} #results={} scanid={}", Thread.currentThread().getId(), String.format("%.3f secs", timer.scale(TimeUnit.SECONDS)), sr.results.size(), scanState.scanID); } } Key.decompress(sr.results); if (sr.results.size() > 0 && !scanState.finished) scanState.range = new Range(new Key(sr.results.get(sr.results.size() - 1).key), false, scanState.range.getEndKey(), scanState.range.isEndKeyInclusive()); List<KeyValue> results = new ArrayList<>(sr.results.size()); for (TKeyValue tkv : sr.results) results.add(new KeyValue(new Key(tkv.key), tkv.value)); return results; } catch (ThriftSecurityException e) { throw new AccumuloSecurityException(e.user, e.code, e); } finally { ThriftUtil.returnClient(client); Thread.currentThread().setName(old); } } static void close(ScanState scanState) { if (!scanState.finished && scanState.scanID != null && scanState.prevLoc != null) { TInfo tinfo = TraceUtil.traceInfo(); log.debug("Closing active scan {} {}", scanState.prevLoc, scanState.scanID); HostAndPort parsedLocation = HostAndPort.fromString(scanState.prevLoc.tablet_location); TabletClientService.Client client = null; try { client = ThriftUtil.getTServerClient(parsedLocation, scanState.context); client.closeScan(tinfo, scanState.scanID); } catch (TException e) { // ignore this is a best effort log.debug("Failed to close active scan " + scanState.prevLoc + " " + scanState.scanID, e); } finally { if (client != null) ThriftUtil.returnClient(client); } } } }
/* * ice4j, the OpenSource Java Solution for NAT and Firewall Traversal. * * Copyright @ 2015 Atlassian Pty Ltd * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.ice4j.attribute; import static org.junit.jupiter.api.Assertions.*; import org.ice4j.*; import org.junit.jupiter.api.*; /** * Class to test the RequestedAddressFamilyAttribute class. * * @author Aakash Garg * */ public class RequestedAddressFamilyAttributeTest { private RequestedAddressFamilyAttribute requestedAddressFamilyAttribute = null; private MsgFixture msgFixture; @BeforeEach public void setUp() throws Exception { this.requestedAddressFamilyAttribute = new RequestedAddressFamilyAttribute(); this.msgFixture = new MsgFixture(); } @AfterEach public void tearDown() throws Exception { this.requestedAddressFamilyAttribute = null; this.msgFixture = null; } /** * Tests whether data length is properly calculated. */ @Test public void testGetDataLength() { char expectedReturn = 1; this.requestedAddressFamilyAttribute .setFamily(MsgFixture.REQUESTED_ADDRESS_FAMILY_ATTRIBUTE_V4); char actualReturn = this.requestedAddressFamilyAttribute.getDataLength(); assertEquals(expectedReturn, actualReturn, "Datalength is not properly calculated"); expectedReturn = 1; this.requestedAddressFamilyAttribute .setFamily(MsgFixture.REQUESTED_ADDRESS_FAMILY_ATTRIBUTE_V6); actualReturn = this.requestedAddressFamilyAttribute.getDataLength(); assertEquals(expectedReturn, actualReturn, "Datalength is not properly calculated"); } /** * Tests getting the name. */ @Test public void testGetName() { assertEquals("REQUESTED-ADDRESS-FAMILY", requestedAddressFamilyAttribute.getName()); } /** * Tests the equals method against a null, a different and an identical * object. */ @Test public void testEqualsObject() { // null test assertNotEquals(requestedAddressFamilyAttribute, null); // difference test RequestedAddressFamilyAttribute target; target = new RequestedAddressFamilyAttribute(); char family = MsgFixture.REQUESTED_ADDRESS_FAMILY_ATTRIBUTE_V6; target.setFamily(family); requestedAddressFamilyAttribute .setFamily(MsgFixture.REQUESTED_ADDRESS_FAMILY_ATTRIBUTE_V4); assertNotEquals(requestedAddressFamilyAttribute, target); // equality test target.setFamily(MsgFixture.REQUESTED_ADDRESS_FAMILY_ATTRIBUTE_V4); assertEquals(requestedAddressFamilyAttribute, target); // ipv6 equality test target.setFamily(MsgFixture.REQUESTED_ADDRESS_FAMILY_ATTRIBUTE_V6); requestedAddressFamilyAttribute .setFamily(MsgFixture.REQUESTED_ADDRESS_FAMILY_ATTRIBUTE_V6); assertEquals(requestedAddressFamilyAttribute, target); } /** * Test whether sample binary arrays are correctly decoded. * * @throws StunException if something goes wrong while decoding * Attribute Body. */ @Test public void testDecodeAttributeBodyV4() throws StunException { byte[] attributeValue = msgFixture.requestedAddressFamilyV4; char offset = Attribute.HEADER_LENGTH; char length = (char) (attributeValue.length - offset); requestedAddressFamilyAttribute.decodeAttributeBody( attributeValue, offset, length); assertEquals( MsgFixture.REQUESTED_ADDRESS_FAMILY_ATTRIBUTE_V4, requestedAddressFamilyAttribute.getFamily(), "RequestedAddressFamilyAttribute.decode() did not properly decode the family field." ); } /** * Test whether sample binary arrays are correctly decoded. * * @throws StunException if something goes wrong while decoding * Attribute Body. */ @Test public void testDecodeAttributeBodyV6() throws StunException { byte[] attributeValue = msgFixture.requestedAddressFamilyV6; char offset = Attribute.HEADER_LENGTH; char length = (char) (attributeValue.length - offset); requestedAddressFamilyAttribute.decodeAttributeBody( attributeValue, offset, length); assertEquals( MsgFixture.REQUESTED_ADDRESS_FAMILY_ATTRIBUTE_V6, requestedAddressFamilyAttribute.getFamily(), "RequestedAddressFamilyAttribute.decode() did not properly decode." ); } /** * Test whether attributes are properly encoded. */ @Test public void testEncodeV4() { byte[] expectedReturn = msgFixture.requestedAddressFamilyV4; requestedAddressFamilyAttribute .setFamily(MsgFixture.REQUESTED_ADDRESS_FAMILY_ATTRIBUTE_V4); byte[] actualReturn = requestedAddressFamilyAttribute.encode(); assertArrayEquals( expectedReturn, actualReturn, "RequestedAddressFamilyAttribute.encode() did not properly encode a sample attribute for IPv4 family" ); } /** * Test whether attributes are properly encoded. */ @Test public void testEncodeV6() { byte[] expectedReturn = msgFixture.requestedAddressFamilyV6; requestedAddressFamilyAttribute .setFamily(MsgFixture.REQUESTED_ADDRESS_FAMILY_ATTRIBUTE_V6); byte[] actualReturn = requestedAddressFamilyAttribute.encode(); assertArrayEquals( expectedReturn, actualReturn, "RequestedAddressFamilyAttribute.encode() did not properly encode a sample attribute for IPv6 family" ); } /** * Tests that the address family is always 0x01 or 0x02. */ @Test public void testGetFamily() { char expectedReturn = 0x01; this.requestedAddressFamilyAttribute .setFamily(MsgFixture.REQUESTED_ADDRESS_FAMILY_ATTRIBUTE_V4); char actualReturn = this.requestedAddressFamilyAttribute.getFamily(); assertEquals(expectedReturn, actualReturn, "Family is not properly calculated"); expectedReturn = 0x02; this.requestedAddressFamilyAttribute .setFamily(MsgFixture.REQUESTED_ADDRESS_FAMILY_ATTRIBUTE_V6); actualReturn = this.requestedAddressFamilyAttribute.getFamily(); assertEquals(expectedReturn, actualReturn, "Family is not properly calculated"); } }
/* * ARX: Powerful Data Anonymization * Copyright 2012 - 2017 Fabian Prasser, Florian Kohlmayer and contributors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.deidentifier.arx.gui.view.impl.common.datatable; import java.util.ArrayList; import java.util.List; import org.deidentifier.arx.DataHandle; import org.deidentifier.arx.RowSet; import org.deidentifier.arx.gui.Controller; import org.eclipse.nebula.widgets.nattable.NatTable; import org.eclipse.nebula.widgets.nattable.layer.ILayerListener; import org.eclipse.swt.graphics.Font; import org.eclipse.swt.graphics.Image; /** * A context for the data view. It provides all necessary data to be displayed. * @author Fabian Prasser */ public class DataTableContext { /** TODO */ private Font font = null; /** TODO */ private List<Image> images = new ArrayList<Image>(); /** TODO */ private List<ILayerListener> listeners = new ArrayList<ILayerListener>(); /** TODO */ private RowSet rows = null; /** TODO */ private int[] groups = null; /** TODO */ private DataHandle handle = null; /** TODO */ private Controller controller = null; /** TODO */ private int selectedIndex = -1; /** TODO */ private NatTable table = null; /** * Creates a new instance. * * @param controller */ public DataTableContext(Controller controller) { this.controller = controller; } /** * Returns the controller. * * @return */ public Controller getController() { return controller; } /** * Sets the font. * * @return */ public Font getFont() { return font; } /** * Returns information about the equivalence classes. * * @return */ public int[] getGroups() { return groups; } /** * Returns the data handle, if any. * * @return */ public DataHandle getHandle() { return handle; } /** * Returns the header images. * * @return */ public List<Image> getImages() { return images; } /** * Returns all layer listeners. * * @return */ public List<ILayerListener> getListeners() { return listeners; } /** * Returns the research subset. * * @return */ public RowSet getRows() { return rows; } /** * Returns the selected index. * * @return */ public int getSelectedIndex() { return selectedIndex; } /** * Returns the underlying table. * * @return */ public NatTable getTable() { return table; } /** * * * @return */ public boolean isColumnExpanded() { // TODO Auto-generated method stub return false; } /** * * * @return */ public boolean isRowExpanded() { // TODO Auto-generated method stub return false; } /** * Resets the context. */ public void reset() { this.handle = null; this.rows = null; this.groups = null; this.images.clear(); } /** * Gets the font. * * @param font */ public void setFont(Font font) { this.font = font; } /** * Sets the groups. * * @param groups */ public void setGroups(int[] groups) { this.groups = groups; } /** * Sets a data handle. * * @param handle */ public void setHandle(DataHandle handle) { this.handle = handle; } /** * Sets the header images. * * @param images */ public void setImages(List<Image> images) { this.images = images; } /** * Sets the layer listeners. * * @param listeners */ public void setListeners(List<ILayerListener> listeners) { this.listeners = listeners; } /** * Sets the research subset. * * @param rows */ public void setRows(RowSet rows) { this.rows = rows; } /** * Sets the selected column index. * * @param index */ public void setSelectedIndex(int index) { this.selectedIndex = index; } /** * Sets the underlying table. * * @param table */ public void setTable(NatTable table) { this.table = table; } }
/******************************************************************************* * Copyright (c) 2000, 2008 IBM Corporation and others. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Contributors: * IBM Corporation - initial API and implementation *******************************************************************************/ package org.eclipse.ui.texteditor.spelling; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import org.eclipse.core.runtime.Assert; import org.eclipse.jface.text.BadLocationException; import org.eclipse.jface.text.IDocument; import org.eclipse.jface.text.Position; import org.eclipse.jface.text.contentassist.ICompletionProposal; import org.eclipse.jface.text.quickassist.IQuickAssistInvocationContext; import org.eclipse.jface.text.source.Annotation; import org.eclipse.jface.text.source.IAnnotationModel; import org.eclipse.jface.text.source.IAnnotationModelExtension; import org.eclipse.jface.text.source.ISourceViewer; import org.eclipse.ui.IEditorInput; import org.eclipse.ui.texteditor.IDocumentProvider; import org.eclipse.ui.texteditor.ITextEditor; /** * A spelling problem as reported by the {@link SpellingService} service to the * {@link ISpellingProblemCollector}. * <p> * This class is intended to be subclassed by clients. * </p> * * @see SpellingService * @see ISpellingProblemCollector * @since 3.1 */ public abstract class SpellingProblem { /** * Removes all spelling problems that are reported * for the given <code>word</code> in the active editor. * <p> * <em>This a workaround to fix bug https://bugs.eclipse.org/bugs/show_bug.cgi?id=134338 * for 3.2 at the time where spelling still resides in JDT Text. * Once we move the spell check engine along with its quick fixes * down to Platform Text we need to provide the proposals with * a way to access the annotation model.</em> * </p> * * @param editor the text editor, if <code>null</code> this method does nothing * @param word the word for which to remove the problems or <code>null</code> to remove all * @since 3.3 * @deprecated As of 3.4, replaced by {@link #removeAll(ISourceViewer, String)} */ public static void removeAllInActiveEditor(ITextEditor editor, String word) { if (editor == null) return; IDocumentProvider documentProvider= editor.getDocumentProvider(); if (documentProvider == null) return; IEditorInput editorInput= editor.getEditorInput(); if (editorInput == null) return; IAnnotationModel model= documentProvider.getAnnotationModel(editorInput); if (model == null) return; IDocument document= documentProvider.getDocument(editorInput); if (document == null) return; boolean supportsBatchReplace= (model instanceof IAnnotationModelExtension); List toBeRemovedAnnotations= new ArrayList(); Iterator iter= model.getAnnotationIterator(); while (iter.hasNext()) { Annotation annotation= (Annotation) iter.next(); if (SpellingAnnotation.TYPE.equals(annotation.getType())) { boolean doRemove= word == null; if (word == null) doRemove= true; else { String annotationWord= null; Position pos= model.getPosition(annotation); try { annotationWord= document.get(pos.getOffset(), pos.getLength()); } catch (BadLocationException e) { continue; } doRemove= word.equals(annotationWord); } if (doRemove) { if (supportsBatchReplace) toBeRemovedAnnotations.add(annotation); else model.removeAnnotation(annotation); } } } if (supportsBatchReplace && !toBeRemovedAnnotations.isEmpty()) { Annotation[] annotationArray= (Annotation[])toBeRemovedAnnotations.toArray(new Annotation[toBeRemovedAnnotations.size()]); ((IAnnotationModelExtension)model).replaceAnnotations(annotationArray, null); } } /** * Removes all spelling problems that are reported * for the given <code>word</code> in the active editor. * * @param sourceViewer the source viewer * @param word the word for which to remove the problems or <code>null</code> to remove all * @since 3.4 */ public static void removeAll(ISourceViewer sourceViewer, String word) { Assert.isNotNull(sourceViewer); IAnnotationModel model= sourceViewer.getAnnotationModel(); if (model == null) return; IDocument document= sourceViewer.getDocument(); if (document == null) return; boolean supportsBatchReplace= (model instanceof IAnnotationModelExtension); List toBeRemovedAnnotations= new ArrayList(); Iterator iter= model.getAnnotationIterator(); while (iter.hasNext()) { Annotation annotation= (Annotation) iter.next(); if (SpellingAnnotation.TYPE.equals(annotation.getType())) { boolean doRemove= word == null; if (word == null) doRemove= true; else { String annotationWord= null; Position pos= model.getPosition(annotation); try { annotationWord= document.get(pos.getOffset(), pos.getLength()); } catch (BadLocationException e) { continue; } doRemove= word.equals(annotationWord); } if (doRemove) { if (supportsBatchReplace) toBeRemovedAnnotations.add(annotation); else model.removeAnnotation(annotation); } } } if (supportsBatchReplace && !toBeRemovedAnnotations.isEmpty()) { Annotation[] annotationArray= (Annotation[])toBeRemovedAnnotations.toArray(new Annotation[toBeRemovedAnnotations.size()]); ((IAnnotationModelExtension)model).replaceAnnotations(annotationArray, null); } } /** * Returns the offset of the incorrectly spelled region. * * @return the offset of the incorrectly spelled region */ public abstract int getOffset(); /** * Returns the length of the incorrectly spelled region. * * @return the length of the incorrectly spelled region */ public abstract int getLength(); /** * Returns a localized, human-readable message string which describes the spelling problem. * * @return a localized, human-readable message string which describes the spelling problem */ public abstract String getMessage(); /** * Returns the proposals for the incorrectly spelled region. * * @return the proposals for the incorrectly spelled region */ public abstract ICompletionProposal[] getProposals(); /** * Returns the proposals for the incorrectly spelled region. * * @param context the invocation context or <code>null</code> if none * @return the proposals for the incorrectly spelled region * @since 3.4 */ public ICompletionProposal[] getProposals(IQuickAssistInvocationContext context) { return getProposals(); } }
package android.support.v7.app; import android.content.Context; import android.content.DialogInterface.OnClickListener; import android.content.DialogInterface.OnKeyListener; import android.content.pm.PackageInfo; import android.content.pm.PackageManager; import android.content.pm.PackageManager.NameNotFoundException; import android.content.res.Resources; import android.graphics.drawable.Drawable; import android.os.Build; import android.os.Build.VERSION; import android.os.Environment; import android.telephony.TelephonyManager; import android.text.TextUtils; import android.util.DisplayMetrics; import android.view.ContextThemeWrapper; import android.view.View; import android.widget.ListAdapter; import com.xiaomi.a.a.a.b; import com.xiaomi.a.a.d.a; import java.io.UnsupportedEncodingException; import java.net.URLEncoder; import java.security.InvalidParameterException; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; import java.security.SecureRandom; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.TreeMap; import javax.crypto.Cipher; import javax.crypto.KeyGenerator; import javax.crypto.SecretKey; import javax.crypto.spec.IvParameterSpec; import javax.crypto.spec.SecretKeySpec; public class k { private final f a; private int b; public k(Context paramContext) { this(paramContext, j.a(paramContext, 0)); } private k(Context paramContext, int paramInt) { this.a = new f(new ContextThemeWrapper(paramContext, j.a(paramContext, paramInt))); this.b = paramInt; } public static int a(Context paramContext) { try { int i = paramContext.getPackageManager().getPackageInfo(paramContext.getPackageName(), 0).versionCode; return i; } catch (PackageManager.NameNotFoundException localNameNotFoundException) { localNameNotFoundException.printStackTrace(); return 0; } catch (Exception localException) { localException.printStackTrace(); } return 0; } public static String a(String paramString1, String paramString2) { try { byte[] arrayOfByte2 = a(paramString1.getBytes()); byte[] arrayOfByte3 = paramString2.getBytes(); SecretKeySpec localSecretKeySpec = new SecretKeySpec(arrayOfByte2, "AES"); Cipher localCipher = Cipher.getInstance("AES/CBC/PKCS5Padding"); localCipher.init(1, localSecretKeySpec, new IvParameterSpec(new byte[localCipher.getBlockSize()])); byte[] arrayOfByte4 = localCipher.doFinal(arrayOfByte3); arrayOfByte1 = arrayOfByte4; String str = null; if (arrayOfByte1 != null) str = b(arrayOfByte1); return str; } catch (Exception localException) { while (true) { localException.printStackTrace(); byte[] arrayOfByte1 = null; } } } public static String a(String paramString1, String paramString2, Map<String, String> paramMap, String paramString3) { int i = 1; if (TextUtils.isEmpty(paramString3)) throw new InvalidParameterException("security is not nullable"); ArrayList localArrayList = new ArrayList(); if (paramString1 != null) localArrayList.add(paramString1.toUpperCase()); if (!paramMap.isEmpty()) { Iterator localIterator2 = new TreeMap(paramMap).entrySet().iterator(); while (localIterator2.hasNext()) { Map.Entry localEntry = (Map.Entry)localIterator2.next(); Object[] arrayOfObject = new Object[2]; arrayOfObject[0] = localEntry.getKey(); arrayOfObject[i] = localEntry.getValue(); localArrayList.add(String.format("%s=%s", arrayOfObject)); } } localArrayList.add(paramString3); StringBuilder localStringBuilder = new StringBuilder(); Iterator localIterator1 = localArrayList.iterator(); while (localIterator1.hasNext()) { String str = (String)localIterator1.next(); if (i == 0) localStringBuilder.append('&'); localStringBuilder.append(str); i = 0; } return c(localStringBuilder.toString()); } public static boolean a(String paramString) { return (paramString == null) || (paramString.length() <= 0); } private static byte[] a(byte[] paramArrayOfByte) { KeyGenerator localKeyGenerator = KeyGenerator.getInstance("AES"); SecureRandom localSecureRandom = SecureRandom.getInstance("SHA1PRNG", "Crypto"); localSecureRandom.setSeed(paramArrayOfByte); localKeyGenerator.init(128, localSecureRandom); return localKeyGenerator.generateKey().getEncoded(); } public static int b(String paramString) { int i = 0; if (paramString.length() > 0) { char[] arrayOfChar = paramString.toCharArray(); j = 0; while (i < arrayOfChar.length) { j = j * 31 + arrayOfChar[i]; i++; } } int j = 0; return j; } public static String b(Context paramContext) { try { String str = paramContext.getPackageManager().getPackageInfo(paramContext.getPackageName(), 0).versionName; return str; } catch (PackageManager.NameNotFoundException localNameNotFoundException) { localNameNotFoundException.printStackTrace(); return ""; } catch (Exception localException) { while (true) localException.printStackTrace(); } } public static String b(String paramString1, String paramString2) { try { byte[] arrayOfByte1 = a(paramString1.getBytes()); int i = paramString2.length() / 2; byte[] arrayOfByte2 = new byte[i]; for (int j = 0; j < i; j++) arrayOfByte2[j] = Integer.valueOf(paramString2.substring(j * 2, 2 + j * 2), 16).byteValue(); SecretKeySpec localSecretKeySpec = new SecretKeySpec(arrayOfByte1, "AES"); Cipher localCipher = Cipher.getInstance("AES/CBC/PKCS5Padding"); localCipher.init(2, localSecretKeySpec, new IvParameterSpec(new byte[localCipher.getBlockSize()])); String str = new String(localCipher.doFinal(arrayOfByte2)); return str; } catch (Exception localException) { } return null; } private static String b(byte[] paramArrayOfByte) { if (paramArrayOfByte == null) return ""; StringBuffer localStringBuffer = new StringBuffer(2 * paramArrayOfByte.length); for (int i = 0; i < paramArrayOfByte.length; i++) { int j = paramArrayOfByte[i]; localStringBuffer.append("0123456789ABCDEF".charAt(0xF & j >> 4)).append("0123456789ABCDEF".charAt(j & 0xF)); } return localStringBuffer.toString(); } public static String c(Context paramContext) { return paramContext.getResources().getDisplayMetrics().widthPixels + "*" + paramContext.getResources().getDisplayMetrics().heightPixels; } public static String c(String paramString) { try { String str = String.valueOf(a.a(MessageDigest.getInstance("SHA1").digest(paramString.getBytes("UTF-8")))); return str; } catch (NoSuchAlgorithmException localNoSuchAlgorithmException) { b.a("CloudCoder.hash4SHA1 ", localNoSuchAlgorithmException); throw new IllegalStateException("failed to SHA1"); } catch (UnsupportedEncodingException localUnsupportedEncodingException) { while (true) b.a("CloudCoder.hash4SHA1 ", localUnsupportedEncodingException); } catch (Exception localException) { while (true) b.a("CloudCoder.hash4SHA1 ", localException); } } public static String d(Context paramContext) { try { TelephonyManager localTelephonyManager = (TelephonyManager)paramContext.getSystemService("phone"); if (localTelephonyManager == null) return ""; String str = localTelephonyManager.getDeviceId(); return str; } catch (Exception localException) { localException.printStackTrace(); } return ""; } public static boolean d() { return Environment.getExternalStorageState().equals("mounted"); } public static String e() { return Environment.getExternalStorageDirectory() + "/hpay100/"; } public static String e(Context paramContext) { try { TelephonyManager localTelephonyManager = (TelephonyManager)paramContext.getSystemService("phone"); if (localTelephonyManager == null) return ""; String str1 = localTelephonyManager.getSubscriberId(); if (str1 == null) return ""; String str2 = str1.trim(); return str2; } catch (Exception localException) { localException.printStackTrace(); } return ""; } public static int f() { try { int i = Build.VERSION.SDK_INT; return i; } catch (NumberFormatException localNumberFormatException) { localNumberFormatException.printStackTrace(); } return 0; } public static String f(Context paramContext) { try { TelephonyManager localTelephonyManager = (TelephonyManager)paramContext.getSystemService("phone"); if (localTelephonyManager == null) return ""; String str1 = localTelephonyManager.getSimSerialNumber(); if (str1 == null) return ""; String str2 = str1.trim(); return str2; } catch (Exception localException) { localException.printStackTrace(); } return ""; } public static String g() { try { String str = URLEncoder.encode(Build.MODEL, "utf-8"); return str; } catch (UnsupportedEncodingException localUnsupportedEncodingException) { localUnsupportedEncodingException.printStackTrace(); } return ""; } // ERROR // public static String g(Context paramContext) { // Byte code: // 0: iconst_1 // 1: anewarray 65 java/lang/String // 4: dup // 5: iconst_0 // 6: ldc_w 400 // 9: aastore // 10: astore_1 // 11: aload_0 // 12: invokevirtual 404 android/content/Context:getContentResolver ()Landroid/content/ContentResolver; // 15: ldc_w 406 // 18: invokestatic 412 android/net/Uri:parse (Ljava/lang/String;)Landroid/net/Uri; // 21: aload_1 // 22: aconst_null // 23: aconst_null // 24: ldc_w 414 // 27: invokevirtual 420 android/content/ContentResolver:query (Landroid/net/Uri;[Ljava/lang/String;Ljava/lang/String;[Ljava/lang/String;Ljava/lang/String;)Landroid/database/Cursor; // 30: astore 8 // 32: aload 8 // 34: astore_3 // 35: aload_3 // 36: ifnull +221 -> 257 // 39: aload_3 // 40: invokeinterface 425 1 0 // 45: ifeq +212 -> 257 // 48: aload_3 // 49: ldc_w 400 // 52: invokeinterface 428 2 0 // 57: istore 10 // 59: iconst_0 // 60: istore 11 // 62: aload_3 // 63: iload 10 // 65: invokeinterface 431 2 0 // 70: astore 6 // 72: aload 6 // 74: ifnull +13 -> 87 // 77: ldc 238 // 79: aload 6 // 81: invokevirtual 434 java/lang/String:equalsIgnoreCase (Ljava/lang/String;)Z // 84: ifeq +67 -> 151 // 87: ldc 238 // 89: astore 6 // 91: iinc 11 1 // 94: aload_3 // 95: invokeinterface 437 1 0 // 100: istore 12 // 102: iload 12 // 104: ifeq +10 -> 114 // 107: iload 11 // 109: bipush 50 // 111: if_icmplt -49 -> 62 // 114: aload_3 // 115: ifnull +9 -> 124 // 118: aload_3 // 119: invokeinterface 440 1 0 // 124: ldc_w 442 // 127: new 177 java/lang/StringBuilder // 130: dup // 131: ldc_w 444 // 134: invokespecial 287 java/lang/StringBuilder:<init> (Ljava/lang/String;)V // 137: aload 6 // 139: invokevirtual 186 java/lang/StringBuilder:append (Ljava/lang/String;)Ljava/lang/StringBuilder; // 142: invokevirtual 189 java/lang/StringBuilder:toString ()Ljava/lang/String; // 145: invokestatic 449 com/arcsoft/hpay100/a/a:b (Ljava/lang/String;Ljava/lang/String;)V // 148: aload 6 // 150: areturn // 151: aload 6 // 153: ldc_w 451 // 156: invokevirtual 454 java/lang/String:contains (Ljava/lang/CharSequence;)Z // 159: ifeq -45 -> 114 // 162: aload 6 // 164: iconst_3 // 165: invokevirtual 456 java/lang/String:substring (I)Ljava/lang/String; // 168: astore 13 // 170: aload 13 // 172: astore 6 // 174: goto -60 -> 114 // 177: astore 5 // 179: aconst_null // 180: astore_3 // 181: aload 5 // 183: invokevirtual 62 java/lang/Exception:printStackTrace ()V // 186: ldc 238 // 188: astore 6 // 190: aload_3 // 191: ifnull -67 -> 124 // 194: aload_3 // 195: invokeinterface 440 1 0 // 200: goto -76 -> 124 // 203: astore 7 // 205: aload 7 // 207: invokevirtual 62 java/lang/Exception:printStackTrace ()V // 210: goto -86 -> 124 // 213: astore_2 // 214: aconst_null // 215: astore_3 // 216: aload_3 // 217: ifnull +9 -> 226 // 220: aload_3 // 221: invokeinterface 440 1 0 // 226: aload_2 // 227: athrow // 228: astore 4 // 230: aload 4 // 232: invokevirtual 62 java/lang/Exception:printStackTrace ()V // 235: goto -9 -> 226 // 238: astore 9 // 240: aload 9 // 242: invokevirtual 62 java/lang/Exception:printStackTrace ()V // 245: goto -121 -> 124 // 248: astore_2 // 249: goto -33 -> 216 // 252: astore 5 // 254: goto -73 -> 181 // 257: ldc 238 // 259: astore 6 // 261: goto -147 -> 114 // // Exception table: // from to target type // 0 32 177 java/lang/Exception // 194 200 203 java/lang/Exception // 0 32 213 finally // 220 226 228 java/lang/Exception // 118 124 238 java/lang/Exception // 39 59 248 finally // 62 72 248 finally // 77 87 248 finally // 94 102 248 finally // 151 170 248 finally // 181 186 248 finally // 39 59 252 java/lang/Exception // 62 72 252 java/lang/Exception // 77 87 252 java/lang/Exception // 94 102 252 java/lang/Exception // 151 170 252 java/lang/Exception } public static String h() { try { String str = URLEncoder.encode(Build.VERSION.RELEASE, "utf-8"); return str; } catch (UnsupportedEncodingException localUnsupportedEncodingException) { localUnsupportedEncodingException.printStackTrace(); } return ""; } public static String h(Context paramContext) { try { TelephonyManager localTelephonyManager = (TelephonyManager)paramContext.getSystemService("phone"); String str; if (localTelephonyManager == null) str = ""; do { return str; str = localTelephonyManager.getLine1Number(); if (str != null) continue; str = ""; } while ((TextUtils.isEmpty(str)) || (!str.startsWith("0000"))); return ""; } catch (Exception localException) { localException.printStackTrace(); } return ""; } public static String i() { try { String str = URLEncoder.encode(Build.BRAND, "utf-8"); return str; } catch (UnsupportedEncodingException localUnsupportedEncodingException) { localUnsupportedEncodingException.printStackTrace(); } return ""; } public static String j() { while (true) { try { localObject = Build.MANUFACTURER; if ((localObject != null) && (!((String)localObject).equalsIgnoreCase("unknown"))) { if (TextUtils.isEmpty((CharSequence)localObject)) continue; String str = URLEncoder.encode((String)localObject, "utf-8"); localObject = str; return localObject; } } catch (Exception localException) { localException.printStackTrace(); return ""; } Object localObject = ""; } } public final Context a() { return this.a.a; } public final k a(int paramInt, DialogInterface.OnClickListener paramOnClickListener) { this.a.g = this.a.a.getText(2131034424); this.a.h = paramOnClickListener; return this; } public final k a(DialogInterface.OnKeyListener paramOnKeyListener) { this.a.l = paramOnKeyListener; return this; } public final k a(Drawable paramDrawable) { this.a.c = paramDrawable; return this; } public final k a(View paramView) { this.a.e = paramView; return this; } public final k a(ListAdapter paramListAdapter, DialogInterface.OnClickListener paramOnClickListener) { this.a.n = paramListAdapter; this.a.o = paramOnClickListener; return this; } public final k a(CharSequence paramCharSequence) { this.a.d = paramCharSequence; return this; } public final k a(CharSequence paramCharSequence, DialogInterface.OnClickListener paramOnClickListener) { this.a.g = paramCharSequence; this.a.h = null; return this; } public final k a(CharSequence[] paramArrayOfCharSequence, int paramInt, DialogInterface.OnClickListener paramOnClickListener) { this.a.m = paramArrayOfCharSequence; this.a.o = paramOnClickListener; this.a.q = paramInt; this.a.p = true; return this; } public final j b() { j localj = new j(this.a.a, this.b); this.a.a(j.a(localj)); localj.setCancelable(this.a.k); if (this.a.k) localj.setCanceledOnTouchOutside(true); localj.setOnCancelListener(null); localj.setOnDismissListener(null); if (this.a.l != null) localj.setOnKeyListener(this.a.l); return localj; } public final k b(int paramInt, DialogInterface.OnClickListener paramOnClickListener) { this.a.i = this.a.a.getText(2131034129); this.a.j = null; return this; } public final k b(CharSequence paramCharSequence) { this.a.f = paramCharSequence; return this; } public final k b(CharSequence paramCharSequence, DialogInterface.OnClickListener paramOnClickListener) { this.a.i = paramCharSequence; this.a.j = null; return this; } public final j c() { j localj = b(); localj.show(); return localj; } } /* Location: E:\Progs\Dev\Android\Decompile\apktool\zssq\zssq-dex2jar.jar * Qualified Name: android.support.v7.app.k * JD-Core Version: 0.6.0 */
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.bwcompat; import org.apache.lucene.search.Explanation; import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.util.TestUtil; import org.elasticsearch.Version; import org.elasticsearch.VersionTests; import org.elasticsearch.action.admin.indices.get.GetIndexResponse; import org.elasticsearch.action.admin.indices.recovery.RecoveryResponse; import org.elasticsearch.action.admin.indices.segments.IndexSegments; import org.elasticsearch.action.admin.indices.segments.IndexShardSegments; import org.elasticsearch.action.admin.indices.segments.IndicesSegmentResponse; import org.elasticsearch.action.admin.indices.segments.ShardSegments; import org.elasticsearch.action.admin.indices.settings.get.GetSettingsResponse; import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.support.WriteRequest.RefreshPolicy; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.routing.RecoverySource; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.io.FileSystemUtils; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.env.Environment; import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.gateway.MetaDataStateFormat; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.engine.Segment; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.indices.recovery.RecoveryState; import org.elasticsearch.node.Node; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHitField; import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.aggregations.AggregationBuilders; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.InternalSettingsPlugin; import org.elasticsearch.test.OldIndexUtils; import org.elasticsearch.test.VersionUtils; import org.elasticsearch.test.hamcrest.ElasticsearchAssertions; import org.hamcrest.Matchers; import org.junit.AfterClass; import org.junit.Before; import java.io.IOException; import java.io.InputStream; import java.nio.file.Files; import java.nio.file.Path; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.SortedSet; import java.util.TreeSet; import static org.elasticsearch.index.query.QueryBuilders.matchPhraseQuery; import static org.elasticsearch.test.OldIndexUtils.assertUpgradeWorks; import static org.elasticsearch.test.OldIndexUtils.getIndexDir; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.hamcrest.Matchers.greaterThanOrEqualTo; // needs at least 2 nodes since it bumps replicas to 1 @ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 0) @LuceneTestCase.SuppressFileSystems("ExtrasFS") public class OldIndexBackwardsCompatibilityIT extends ESIntegTestCase { // TODO: test for proper exception on unsupported indexes (maybe via separate test?) // We have a 0.20.6.zip etc for this. @Override protected Collection<Class<? extends Plugin>> nodePlugins() { return Arrays.asList(InternalSettingsPlugin.class); } List<String> indexes; List<String> unsupportedIndexes; static String singleDataPathNodeName; static String multiDataPathNodeName; static Path singleDataPath; static Path[] multiDataPath; @Before public void initIndexesList() throws Exception { indexes = OldIndexUtils.loadDataFilesList("index", getBwcIndicesPath()); unsupportedIndexes = OldIndexUtils.loadDataFilesList("unsupported", getBwcIndicesPath()); } @AfterClass public static void tearDownStatics() { singleDataPathNodeName = null; multiDataPathNodeName = null; singleDataPath = null; multiDataPath = null; } @Override public Settings nodeSettings(int ord) { return OldIndexUtils.getSettings(); } void setupCluster() throws Exception { List<String> replicas = internalCluster().startNodes(1); // for replicas Path baseTempDir = createTempDir(); // start single data path node Settings.Builder nodeSettings = Settings.builder() .put(Environment.PATH_DATA_SETTING.getKey(), baseTempDir.resolve("single-path").toAbsolutePath()) .put(Node.NODE_MASTER_SETTING.getKey(), false); // workaround for dangling index loading issue when node is master singleDataPathNodeName = internalCluster().startNode(nodeSettings); // start multi data path node nodeSettings = Settings.builder() .put(Environment.PATH_DATA_SETTING.getKey(), baseTempDir.resolve("multi-path1").toAbsolutePath() + "," + baseTempDir .resolve("multi-path2").toAbsolutePath()) .put(Node.NODE_MASTER_SETTING.getKey(), false); // workaround for dangling index loading issue when node is master multiDataPathNodeName = internalCluster().startNode(nodeSettings); // find single data path dir Path[] nodePaths = internalCluster().getInstance(NodeEnvironment.class, singleDataPathNodeName).nodeDataPaths(); assertEquals(1, nodePaths.length); singleDataPath = nodePaths[0].resolve(NodeEnvironment.INDICES_FOLDER); assertFalse(Files.exists(singleDataPath)); Files.createDirectories(singleDataPath); logger.info("--> Single data path: {}", singleDataPath); // find multi data path dirs nodePaths = internalCluster().getInstance(NodeEnvironment.class, multiDataPathNodeName).nodeDataPaths(); assertEquals(2, nodePaths.length); multiDataPath = new Path[]{nodePaths[0].resolve(NodeEnvironment.INDICES_FOLDER), nodePaths[1].resolve(NodeEnvironment.INDICES_FOLDER)}; assertFalse(Files.exists(multiDataPath[0])); assertFalse(Files.exists(multiDataPath[1])); Files.createDirectories(multiDataPath[0]); Files.createDirectories(multiDataPath[1]); logger.info("--> Multi data paths: {}, {}", multiDataPath[0], multiDataPath[1]); } void upgradeIndexFolder() throws Exception { OldIndexUtils.upgradeIndexFolder(internalCluster(), singleDataPathNodeName); OldIndexUtils.upgradeIndexFolder(internalCluster(), multiDataPathNodeName); } void importIndex(String indexName) throws IOException { // force reloading dangling indices with a cluster state republish client().admin().cluster().prepareReroute().get(); ensureGreen(indexName); } void unloadIndex(String indexName) throws Exception { assertAcked(client().admin().indices().prepareDelete(indexName).get()); } public void testAllVersionsTested() throws Exception { SortedSet<String> expectedVersions = new TreeSet<>(); for (Version v : VersionUtils.allReleasedVersions()) { if (VersionUtils.isSnapshot(v)) continue; // snapshots are unreleased, so there is no backcompat yet if (v.isRelease() == false) continue; // no guarantees for prereleases if (v.before(Version.CURRENT.minimumIndexCompatibilityVersion())) continue; // we can only support one major version backward if (v.equals(Version.CURRENT)) continue; // the current version is always compatible with itself expectedVersions.add("index-" + v.toString() + ".zip"); } for (String index : indexes) { if (expectedVersions.remove(index) == false) { logger.warn("Old indexes tests contain extra index: {}", index); } } if (expectedVersions.isEmpty() == false) { StringBuilder msg = new StringBuilder("Old index tests are missing indexes:"); for (String expected : expectedVersions) { msg.append("\n" + expected); } fail(msg.toString()); } } public void testOldIndexes() throws Exception { setupCluster(); Collections.shuffle(indexes, random()); for (String index : indexes) { long startTime = System.currentTimeMillis(); logger.info("--> Testing old index {}", index); assertOldIndexWorks(index); logger.info("--> Done testing {}, took {} seconds", index, (System.currentTimeMillis() - startTime) / 1000.0); } } void assertOldIndexWorks(String index) throws Exception { Version version = OldIndexUtils.extractVersion(index); Path[] paths; if (randomBoolean()) { logger.info("--> injecting index [{}] into single data path", index); paths = new Path[]{singleDataPath}; } else { logger.info("--> injecting index [{}] into multi data path", index); paths = multiDataPath; } String indexName = index.replace(".zip", "").toLowerCase(Locale.ROOT).replace("unsupported-", "index-"); OldIndexUtils.loadIndex(indexName, index, createTempDir(), getBwcIndicesPath(), logger, paths); // we explicitly upgrade the index folders as these indices // are imported as dangling indices and not available on // node startup upgradeIndexFolder(); importIndex(indexName); assertIndexSanity(indexName, version); assertBasicSearchWorks(indexName); assertAllSearchWorks(indexName); assertBasicAggregationWorks(indexName); assertRealtimeGetWorks(indexName); assertNewReplicasWork(indexName); assertUpgradeWorks(client(), indexName, version); assertPositionIncrementGapDefaults(indexName, version); assertAliasWithBadName(indexName, version); assertStoredBinaryFields(indexName, version); unloadIndex(indexName); } void assertIndexSanity(String indexName, Version indexCreated) { GetIndexResponse getIndexResponse = client().admin().indices().prepareGetIndex().addIndices(indexName).get(); assertEquals(1, getIndexResponse.indices().length); assertEquals(indexName, getIndexResponse.indices()[0]); Version actualVersionCreated = Version.indexCreated(getIndexResponse.getSettings().get(indexName)); assertEquals(indexCreated, actualVersionCreated); ensureYellow(indexName); RecoveryResponse recoveryResponse = client().admin().indices().prepareRecoveries(indexName) .setDetailed(true).setActiveOnly(false).get(); boolean foundTranslog = false; for (List<RecoveryState> states : recoveryResponse.shardRecoveryStates().values()) { for (RecoveryState state : states) { if (state.getStage() == RecoveryState.Stage.DONE && state.getPrimary() && state.getRecoverySource().getType() == RecoverySource.Type.EXISTING_STORE) { assertFalse("more than one primary recoverd?", foundTranslog); assertNotEquals(0, state.getTranslog().recoveredOperations()); foundTranslog = true; } } } assertTrue("expected translog but nothing was recovered", foundTranslog); IndicesSegmentResponse segmentsResponse = client().admin().indices().prepareSegments(indexName).get(); IndexSegments segments = segmentsResponse.getIndices().get(indexName); int numCurrent = 0; int numBWC = 0; for (IndexShardSegments indexShardSegments : segments) { for (ShardSegments shardSegments : indexShardSegments) { for (Segment segment : shardSegments) { if (indexCreated.luceneVersion.equals(segment.version)) { numBWC++; if (Version.CURRENT.luceneVersion.equals(segment.version)) { numCurrent++; } } else if (Version.CURRENT.luceneVersion.equals(segment.version)) { numCurrent++; } else { fail("unexpected version " + segment.version); } } } } assertNotEquals("expected at least 1 current segment after translog recovery", 0, numCurrent); assertNotEquals("expected at least 1 old segment", 0, numBWC); SearchResponse test = client().prepareSearch(indexName).get(); assertThat(test.getHits().getTotalHits(), greaterThanOrEqualTo(1L)); } void assertBasicSearchWorks(String indexName) { logger.info("--> testing basic search"); SearchRequestBuilder searchReq = client().prepareSearch(indexName).setQuery(QueryBuilders.matchAllQuery()); SearchResponse searchRsp = searchReq.get(); ElasticsearchAssertions.assertNoFailures(searchRsp); long numDocs = searchRsp.getHits().getTotalHits(); logger.info("Found {} in old index", numDocs); logger.info("--> testing basic search with sort"); searchReq.addSort("long_sort", SortOrder.ASC); ElasticsearchAssertions.assertNoFailures(searchReq.get()); logger.info("--> testing exists filter"); searchReq = client().prepareSearch(indexName).setQuery(QueryBuilders.existsQuery("string")); searchRsp = searchReq.get(); ElasticsearchAssertions.assertNoFailures(searchRsp); assertEquals(numDocs, searchRsp.getHits().getTotalHits()); GetSettingsResponse getSettingsResponse = client().admin().indices().prepareGetSettings(indexName).get(); Version versionCreated = Version.fromId(Integer.parseInt(getSettingsResponse.getSetting(indexName, "index.version.created"))); if (versionCreated.onOrAfter(Version.V_2_4_0)) { searchReq = client().prepareSearch(indexName).setQuery(QueryBuilders.existsQuery("field.with.dots")); searchRsp = searchReq.get(); ElasticsearchAssertions.assertNoFailures(searchRsp); assertEquals(numDocs, searchRsp.getHits().getTotalHits()); } } boolean findPayloadBoostInExplanation(Explanation expl) { if (expl.getDescription().startsWith("payloadBoost=") && expl.getValue() != 1f) { return true; } else { boolean found = false; for (Explanation sub : expl.getDetails()) { found |= findPayloadBoostInExplanation(sub); } return found; } } void assertAllSearchWorks(String indexName) { logger.info("--> testing _all search"); SearchResponse searchRsp = client().prepareSearch(indexName).get(); ElasticsearchAssertions.assertNoFailures(searchRsp); assertThat(searchRsp.getHits().getTotalHits(), greaterThanOrEqualTo(1L)); SearchHit bestHit = searchRsp.getHits().getAt(0); // Make sure there are payloads and they are taken into account for the score // the 'string' field has a boost of 4 in the mappings so it should get a payload boost String stringValue = (String) bestHit.sourceAsMap().get("string"); assertNotNull(stringValue); Explanation explanation = client().prepareExplain(indexName, bestHit.getType(), bestHit.getId()) .setQuery(QueryBuilders.matchQuery("_all", stringValue)).get().getExplanation(); assertTrue("Could not find payload boost in explanation\n" + explanation, findPayloadBoostInExplanation(explanation)); // Make sure the query can run on the whole index searchRsp = client().prepareSearch(indexName).setQuery(QueryBuilders.matchQuery("_all", stringValue)).setExplain(true).get(); ElasticsearchAssertions.assertNoFailures(searchRsp); assertThat(searchRsp.getHits().getTotalHits(), greaterThanOrEqualTo(1L)); } void assertBasicAggregationWorks(String indexName) { // histogram on a long SearchResponse searchRsp = client().prepareSearch(indexName).addAggregation(AggregationBuilders.histogram("histo").field ("long_sort").interval(10)).get(); ElasticsearchAssertions.assertSearchResponse(searchRsp); Histogram histo = searchRsp.getAggregations().get("histo"); assertNotNull(histo); long totalCount = 0; for (Histogram.Bucket bucket : histo.getBuckets()) { totalCount += bucket.getDocCount(); } assertEquals(totalCount, searchRsp.getHits().getTotalHits()); // terms on a boolean searchRsp = client().prepareSearch(indexName).addAggregation(AggregationBuilders.terms("bool_terms").field("bool")).get(); Terms terms = searchRsp.getAggregations().get("bool_terms"); totalCount = 0; for (Terms.Bucket bucket : terms.getBuckets()) { totalCount += bucket.getDocCount(); } assertEquals(totalCount, searchRsp.getHits().getTotalHits()); } void assertRealtimeGetWorks(String indexName) { assertAcked(client().admin().indices().prepareUpdateSettings(indexName).setSettings(Settings.builder() .put("refresh_interval", -1) .build())); SearchRequestBuilder searchReq = client().prepareSearch(indexName).setQuery(QueryBuilders.matchAllQuery()); SearchHit hit = searchReq.get().getHits().getAt(0); String docId = hit.getId(); // foo is new, it is not a field in the generated index client().prepareUpdate(indexName, "doc", docId).setDoc("foo", "bar").get(); GetResponse getRsp = client().prepareGet(indexName, "doc", docId).get(); Map<String, Object> source = getRsp.getSourceAsMap(); assertThat(source, Matchers.hasKey("foo")); assertAcked(client().admin().indices().prepareUpdateSettings(indexName).setSettings(Settings.builder() .put("refresh_interval", IndexSettings.DEFAULT_REFRESH_INTERVAL) .build())); } void assertNewReplicasWork(String indexName) throws Exception { final int numReplicas = 1; final long startTime = System.currentTimeMillis(); logger.debug("--> creating [{}] replicas for index [{}]", numReplicas, indexName); assertAcked(client().admin().indices().prepareUpdateSettings(indexName).setSettings(Settings.builder() .put("number_of_replicas", numReplicas) ).execute().actionGet()); ensureGreen(TimeValue.timeValueMinutes(2), indexName); logger.debug("--> index [{}] is green, took [{}]", indexName, TimeValue.timeValueMillis(System.currentTimeMillis() - startTime)); logger.debug("--> recovery status:\n{}", XContentHelper.toString(client().admin().indices().prepareRecoveries(indexName).get())); // TODO: do something with the replicas! query? index? } void assertPositionIncrementGapDefaults(String indexName, Version version) throws Exception { client().prepareIndex(indexName, "doc", "position_gap_test").setSource("string", Arrays.asList("one", "two three")) .setRefreshPolicy(RefreshPolicy.IMMEDIATE).get(); // Baseline - phrase query finds matches in the same field value assertHitCount(client().prepareSearch(indexName).setQuery(matchPhraseQuery("string", "two three")).get(), 1); // No match across gaps when slop < position gap assertHitCount( client().prepareSearch(indexName).setQuery(matchPhraseQuery("string", "one two").slop(99)).get(), 0); // Match across gaps when slop >= position gap assertHitCount(client().prepareSearch(indexName).setQuery(matchPhraseQuery("string", "one two").slop(100)).get(), 1); assertHitCount(client().prepareSearch(indexName).setQuery(matchPhraseQuery("string", "one two").slop(101)).get(), 1); // No match across gap using default slop with default positionIncrementGap assertHitCount(client().prepareSearch(indexName).setQuery(matchPhraseQuery("string", "one two")).get(), 0); // Nor with small-ish values assertHitCount(client().prepareSearch(indexName).setQuery(matchPhraseQuery("string", "one two").slop(5)).get(), 0); assertHitCount(client().prepareSearch(indexName).setQuery(matchPhraseQuery("string", "one two").slop(50)).get(), 0); // But huge-ish values still match assertHitCount(client().prepareSearch(indexName).setQuery(matchPhraseQuery("string", "one two").slop(500)).get(), 1); } private static final Version VERSION_5_1_0_UNRELEASED = Version.fromString("5.1.0"); public void testUnreleasedVersion() { VersionTests.assertUnknownVersion(VERSION_5_1_0_UNRELEASED); } /** * Search on an alias that contains illegal characters that would prevent it from being created after 5.1.0. It should still be * search-able though. */ void assertAliasWithBadName(String indexName, Version version) throws Exception { if (version.onOrAfter(VERSION_5_1_0_UNRELEASED)) { return; } // We can read from the alias just like we can read from the index. String aliasName = "#" + indexName; long totalDocs = client().prepareSearch(indexName).setSize(0).get().getHits().totalHits(); assertHitCount(client().prepareSearch(aliasName).setSize(0).get(), totalDocs); assertThat(totalDocs, greaterThanOrEqualTo(2000L)); // We can remove the alias. assertAcked(client().admin().indices().prepareAliases().removeAlias(indexName, aliasName).get()); assertFalse(client().admin().indices().prepareAliasesExist(aliasName).get().exists()); } /** * Make sure we can load stored binary fields. */ void assertStoredBinaryFields(String indexName, Version version) throws Exception { SearchRequestBuilder builder = client().prepareSearch(indexName); builder.setQuery(QueryBuilders.matchAllQuery()); builder.setSize(100); builder.addStoredField("binary"); SearchHits hits = builder.get().getHits(); assertEquals(100, hits.hits().length); for(SearchHit hit : hits) { SearchHitField field = hit.field("binary"); assertNotNull(field); Object value = field.value(); assertTrue(value instanceof BytesArray); assertEquals(16, ((BytesArray) value).length()); } } private Path getNodeDir(String indexFile) throws IOException { Path unzipDir = createTempDir(); Path unzipDataDir = unzipDir.resolve("data"); // decompress the index Path backwardsIndex = getBwcIndicesPath().resolve(indexFile); try (InputStream stream = Files.newInputStream(backwardsIndex)) { TestUtil.unzip(stream, unzipDir); } // check it is unique assertTrue(Files.exists(unzipDataDir)); Path[] list = FileSystemUtils.files(unzipDataDir); if (list.length != 1) { throw new IllegalStateException("Backwards index must contain exactly one cluster"); } int zipIndex = indexFile.indexOf(".zip"); final Version version = Version.fromString(indexFile.substring("index-".length(), zipIndex)); if (version.before(Version.V_5_0_0_alpha1)) { // the bwc scripts packs the indices under this path return list[0].resolve("nodes/0/"); } else { // after 5.0.0, data folders do not include the cluster name return list[0].resolve("0"); } } public void testOldClusterStates() throws Exception { // dangling indices do not load the global state, only the per-index states // so we make sure we can read them separately MetaDataStateFormat<MetaData> globalFormat = new MetaDataStateFormat<MetaData>(XContentType.JSON, "global-") { @Override public void toXContent(XContentBuilder builder, MetaData state) throws IOException { throw new UnsupportedOperationException(); } @Override public MetaData fromXContent(XContentParser parser) throws IOException { return MetaData.Builder.fromXContent(parser); } }; MetaDataStateFormat<IndexMetaData> indexFormat = new MetaDataStateFormat<IndexMetaData>(XContentType.JSON, "state-") { @Override public void toXContent(XContentBuilder builder, IndexMetaData state) throws IOException { throw new UnsupportedOperationException(); } @Override public IndexMetaData fromXContent(XContentParser parser) throws IOException { return IndexMetaData.Builder.fromXContent(parser); } }; Collections.shuffle(indexes, random()); for (String indexFile : indexes) { String indexName = indexFile.replace(".zip", "").toLowerCase(Locale.ROOT).replace("unsupported-", "index-"); Path nodeDir = getNodeDir(indexFile); logger.info("Parsing cluster state files from index [{}]", indexName); final MetaData metaData = globalFormat.loadLatestState(logger, xContentRegistry(), nodeDir); assertNotNull(metaData); final Version version = Version.fromString(indexName.substring("index-".length())); final Path dataDir; if (version.before(Version.V_5_0_0_alpha1)) { dataDir = nodeDir.getParent().getParent(); } else { dataDir = nodeDir.getParent(); } final Path indexDir = getIndexDir(logger, indexName, indexFile, dataDir); assertNotNull(indexFormat.loadLatestState(logger, xContentRegistry(), indexDir)); } } }
/** * */ package com.jarp.tutorials.bigranchprohects.ch12; import java.text.SimpleDateFormat; import java.util.Calendar; import java.util.Date; import java.util.UUID; import android.app.Activity; import android.content.Intent; import android.os.Bundle; import android.support.v4.app.Fragment; import android.support.v4.app.FragmentManager; import android.text.Editable; import android.text.TextWatcher; import android.view.LayoutInflater; import android.view.View; import android.view.View.OnClickListener; import android.view.ViewGroup; import android.widget.Button; import android.widget.CheckBox; import android.widget.CompoundButton; import android.widget.CompoundButton.OnCheckedChangeListener; import android.widget.EditText; import com.jarp.tutorials.bigranchprohects.R; /** * @author JARP * */ public class CrimeFragment extends Fragment { private Crime mCrime; private CheckBox mSolvedCheckbox; private Button mDateButton; private EditText mTitleField; private Button mTimeButton; public static final String EXTRA_CRIME_ID ="EXTRA_CRIME_ID"; public static final String TAG_DIALOG_DATE = "FRAGMENT_PICK"; public static final String TAG_DIALOG_TIME = "FRAGMENT_DATE_PICK"; private static final String FORMAT_DATE = "yyyy-MM-dd"; private static final String FORMAT_TIME = "HH:mm-ss"; private static SimpleDateFormat dateFormat = new SimpleDateFormat(FORMAT_DATE); private static SimpleDateFormat timeFormat = new SimpleDateFormat(FORMAT_TIME); private static final int REQUEST_DATE = 0; private static final int REQUEST_TIME = 1; @Override public void onActivityResult(int requestCode, int resultCode, Intent data) { if(resultCode!=Activity.RESULT_OK) return; if(requestCode==REQUEST_DATE) { Date date = (Date) data.getSerializableExtra(DatePickerFragment.EXTRA_DATE); mCrime.setDate(date); updateDate(); } else if(requestCode==REQUEST_TIME) { Date date = (Date) data.getSerializableExtra(TimePickerFragment.EXTRA_TIME); mCrime.setDate(date); updateTime(); } } public static CrimeFragment newInstance (UUID uuid) { Bundle args = new Bundle(); args.putSerializable(EXTRA_CRIME_ID, uuid); CrimeFragment fragment = new CrimeFragment(); fragment.setArguments(args); return fragment; } @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); UUID uuid = (UUID) getArguments().getSerializable(EXTRA_CRIME_ID); mCrime = CrimeLab.get(getActivity()).getCrime(uuid); } @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { //View v = inflater.inflate (R.layout.) View v = inflater. inflate(R.layout.ch12_fragment_crime,container,false); mTitleField = (EditText)v.findViewById(R.id.crime_title); mTitleField.setText(mCrime.getmTitle()); mTitleField.addTextChangedListener( new TextWatcher() { @Override public void onTextChanged(CharSequence c, int start, int before, int count) { mCrime.setmTitle(c.toString()); } @Override public void beforeTextChanged(CharSequence arg0, int arg1, int arg2, int arg3) { // TODO Auto-generated method stub } @Override public void afterTextChanged(Editable arg0) { // TODO Auto-generated method stub } }); mDateButton = (Button) v.findViewById(R.id.crime_date); updateDate(); //mDateButton.setEnabled(false); mDateButton.setOnClickListener(new OnClickListener() { @Override public void onClick(View arg0) { FragmentManager fm = getActivity().getSupportFragmentManager(); DatePickerFragment dateFragment = DatePickerFragment.newInstance(mCrime.getDate()); dateFragment.setTargetFragment(CrimeFragment.this, REQUEST_DATE); dateFragment.show(fm, TAG_DIALOG_DATE); } }); mTimeButton = (Button) v.findViewById(R.id.crime_time); mTimeButton.setOnClickListener(new OnClickListener() { @Override public void onClick(View v) { FragmentManager fm = getActivity().getSupportFragmentManager(); TimePickerFragment timeDialog = TimePickerFragment.newInstace(mCrime.getDate()); timeDialog.setTargetFragment(CrimeFragment.this, REQUEST_TIME); timeDialog.show(fm,TAG_DIALOG_TIME); } }); updateTime(); mSolvedCheckbox = (CheckBox)v.findViewById(R.id.crime_solved); mSolvedCheckbox.setChecked(mCrime.isSolved()); mSolvedCheckbox.setOnCheckedChangeListener(new OnCheckedChangeListener() { @Override public void onCheckedChanged(CompoundButton arg0, boolean isChecked) { // TODO Auto-generated method stub mCrime.setSolved(isChecked); } }); return v; } public void updateTime() { mTimeButton.setText(timeFormat.format(mCrime.getDate())); } public void updateDate() { mDateButton.setText(dateFormat.format (mCrime.getDate())); } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package org.jsmpp.session; import java.io.IOException; import java.util.Map; import java.util.Random; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.atomic.AtomicBoolean; import org.jsmpp.InvalidResponseException; import org.jsmpp.PDUException; import org.jsmpp.PDUSender; import org.jsmpp.SMPPConstant; import org.jsmpp.bean.Command; import org.jsmpp.bean.DataCoding; import org.jsmpp.bean.DataSm; import org.jsmpp.bean.DataSmResp; import org.jsmpp.bean.ESMClass; import org.jsmpp.bean.NumberingPlanIndicator; import org.jsmpp.bean.OptionalParameter; import org.jsmpp.bean.RegisteredDelivery; import org.jsmpp.bean.TypeOfNumber; import org.jsmpp.extra.NegativeResponseException; import org.jsmpp.extra.PendingResponse; import org.jsmpp.extra.ProcessRequestException; import org.jsmpp.extra.ResponseTimeoutException; import org.jsmpp.extra.SessionState; import org.jsmpp.session.connection.Connection; import org.jsmpp.util.IntUtil; import org.jsmpp.util.Sequence; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * @author uudashr * */ public abstract class AbstractSession implements Session { private static final Logger logger = LoggerFactory.getLogger(AbstractSession.class); private static final Random random = new Random(); private final Map<Integer, PendingResponse<Command>> pendingResponse = new ConcurrentHashMap<Integer, PendingResponse<Command>>(); private final Sequence sequence = new Sequence(1); private final PDUSender pduSender; private int pduProcessorDegree = 3; private String sessionId = generateSessionId(); private int enquireLinkTimer = 5000; private long transactionTimer = 2000; protected EnquireLinkSender enquireLinkSender; public AbstractSession(PDUSender pduSender) { this.pduSender = pduSender; } protected abstract AbstractSessionContext sessionContext(); protected abstract Connection connection(); protected abstract GenericMessageReceiverListener messageReceiverListener(); protected PDUSender pduSender() { return pduSender; } protected Sequence sequence() { return sequence; } protected PendingResponse<Command> removePendingResponse(int sequenceNumber) { return pendingResponse.remove(sequenceNumber); } public String getSessionId() { return sessionId; } public void setEnquireLinkTimer(int enquireLinkTimer) { if (sessionContext().getSessionState().isBound()) { try { connection().setSoTimeout(enquireLinkTimer); } catch (IOException e) { logger.error("Failed setting so_timeout for session timer", e); } } this.enquireLinkTimer = enquireLinkTimer; } public int getEnquireLinkTimer() { return enquireLinkTimer; } public void setTransactionTimer(long transactionTimer) { this.transactionTimer = transactionTimer; } public long getTransactionTimer() { return transactionTimer; } public SessionState getSessionState() { return sessionContext().getSessionState(); } protected synchronized boolean isReadPdu() { SessionState sessionState = getSessionState(); return sessionState.isBound() || sessionState.equals(SessionState.OPEN) || sessionState.equals(SessionState.OUTBOUND); } public void addSessionStateListener(SessionStateListener listener) { if (listener != null) { sessionContext().addSessionStateListener(listener); } } public void removeSessionStateListener(SessionStateListener listener) { sessionContext().removeSessionStateListener(listener); } public long getLastActivityTimestamp() { return sessionContext().getLastActivityTimestamp(); } /** * Set total thread can read PDU and process it in parallel. It's defaulted to * 3. * * @param pduProcessorDegree is the total thread can handle read and process * PDU in parallel. * @throws IllegalStateException if the PDU Reader has been started. */ public void setPduProcessorDegree(int pduProcessorDegree) throws IllegalStateException { if (!getSessionState().equals(SessionState.CLOSED)) { throw new IllegalStateException( "Cannot set PDU processor degree since the PDU dispatcher thread already created"); } this.pduProcessorDegree = pduProcessorDegree; } /** * Get the total of thread that can handle read and process PDU in parallel. * * @return the total of thread that can handle read and process PDU in * parallel. */ public int getPduProcessorDegree() { return pduProcessorDegree; } /** * Send the data_sm command. * * @param serviceType is the service_type parameter. * @param sourceAddrTon is the source_addr_ton parameter. * @param sourceAddrNpi is the source_addr_npi parameter. * @param sourceAddr is the source_addr parameter. * @param destAddrTon is the dest_addr_ton parameter. * @param destAddrNpi is the dest_addr_npi parameter. * @param destinationAddr is the destination_addr parameter. * @param esmClass is the esm_class parameter. * @param registeredDelivery is the registered_delivery parameter. * @param dataCoding is the data_coding parameter. * @param optionalParameters is the optional parameters. * @return the result of data_sm (data_sm_resp). * @throws PDUException if there is an invalid PDU parameter found. * @throws ResponseTimeoutException if the response take time too long. * @throws InvalidResponseException if the response is invalid. * @throws NegativeResponseException if the response return NON-OK command_status. * @throws IOException if there is an IO error found. */ public DataSmResult dataShortMessage(String serviceType, TypeOfNumber sourceAddrTon, NumberingPlanIndicator sourceAddrNpi, String sourceAddr, TypeOfNumber destAddrTon, NumberingPlanIndicator destAddrNpi, String destinationAddr, ESMClass esmClass, RegisteredDelivery registeredDelivery, DataCoding dataCoding, OptionalParameter... optionalParameters) throws PDUException, ResponseTimeoutException, InvalidResponseException, NegativeResponseException, IOException { DataSmCommandTask task = new DataSmCommandTask(pduSender, serviceType, sourceAddrTon, sourceAddrNpi, sourceAddr, destAddrTon, destAddrNpi, destinationAddr, esmClass, registeredDelivery, dataCoding, optionalParameters); DataSmResp resp = (DataSmResp)executeSendCommand(task, getTransactionTimer()); return new DataSmResult(resp.getMessageId(), resp.getOptionalParameters()); } public void close() { logger.debug("Close session {} in state {}", sessionId, getSessionState()); SessionContext ctx = sessionContext(); SessionState sessionState = ctx.getSessionState(); if (!sessionState.equals(SessionState.CLOSED)) { try { connection().close(); } catch (IOException e) { logger.warn("Failed to close connection:", e); } } // Make sure the enquireLinkThread doesn't wait for itself if (Thread.currentThread() != enquireLinkSender) { if (enquireLinkSender != null && enquireLinkSender.isAlive()) { logger.debug("Stop enquireLinkSender for session {}", sessionId); try { enquireLinkSender.interrupt(); enquireLinkSender.join(); } catch (InterruptedException e) { Thread.currentThread().interrupt(); logger.warn("Interrupted while waiting for enquireLinkSender thread to exit"); } } } if (!sessionState.equals(SessionState.CLOSED)) { logger.debug("Close session context {} in state {}", sessionId, sessionState); ctx.close(); } } /** * Validate the response, the command_status should be 0 otherwise will * throw {@link NegativeResponseException}. * * @param response is the response. * @throws NegativeResponseException if the command_status value is not zero. */ private static void validateResponse(final Command response) throws NegativeResponseException { if (response.getCommandStatus() != SMPPConstant.STAT_ESME_ROK) { throw new NegativeResponseException(response.getCommandStatus()); } } protected DataSmResult fireAcceptDataSm(final DataSm dataSm) throws ProcessRequestException { GenericMessageReceiverListener messageReceiverListener = messageReceiverListener(); if (messageReceiverListener != null) { return messageReceiverListener.onAcceptDataSm(dataSm, this); } else { throw new ProcessRequestException("MessageReceiverListener hasn't been set yet", SMPPConstant.STAT_ESME_RX_R_APPN); } } /** * Execute send command command task. * * @param task is the task. * @param timeout is the timeout in millisecond. * @return the command response. * @throws PDUException if there is invalid PDU parameter found. * @throws ResponseTimeoutException if the response has reach it timeout. * @throws InvalidResponseException if invalid response found. * @throws NegativeResponseException if the negative response found. * @throws IOException if there is an IO error found. */ protected Command executeSendCommand(SendCommandTask task, long timeout) throws PDUException, ResponseTimeoutException, InvalidResponseException, NegativeResponseException, IOException { int seqNum = sequence.nextValue(); PendingResponse<Command> pendingResp = new PendingResponse<Command>(timeout); pendingResponse.put(seqNum, pendingResp); try { task.executeTask(connection().getOutputStream(), seqNum); } catch (IOException e) { logger.error("Failed sending {} command", task.getCommandName(), e); if("enquire_link".equals(task.getCommandName())) { logger.info("Ignore failure of sending enquire_link, wait to see if connection is restored"); } else { pendingResponse.remove(seqNum); close(); throw e; } } try { pendingResp.waitDone(); logger.debug("{} response with sequence_number {} received for session {}", task.getCommandName(), seqNum, sessionId); } catch (ResponseTimeoutException e) { pendingResponse.remove(seqNum); throw new ResponseTimeoutException("No response after waiting for " + timeout + " millis when executing " + task.getCommandName() + " with session " + sessionId + " and sequence_number " + seqNum, e); } catch (InvalidResponseException e) { pendingResponse.remove(seqNum); throw e; } Command resp = pendingResp.getResponse(); validateResponse(resp); return resp; } /** * Execute send command command task. * * @param task is the task. * @throws PDUException if there is invalid PDU parameter found. * @throws IOException if there is an IO error found. */ protected void executeSendCommandWithNoResponse(SendCommandTask task) throws PDUException, IOException { int seqNum = sequence.nextValue(); try { task.executeTask(connection().getOutputStream(), seqNum); } catch (IOException e) { logger.error("Failed sending {} command", task.getCommandName(), e); close(); throw e; } } private synchronized static final String generateSessionId() { return IntUtil.toHexString(random.nextInt()); } /** * Ensure we have proper link. * * @throws ResponseTimeoutException if there is no valid response after defined millisecond. * @throws InvalidResponseException if there is invalid response found. * @throws IOException if there is an IO error found. */ protected void sendEnquireLink() throws ResponseTimeoutException, InvalidResponseException, IOException { EnquireLinkCommandTask task = new EnquireLinkCommandTask(pduSender); try { executeSendCommand(task, getTransactionTimer()); } catch (PDUException e) { // should never happen, since it doesn't have any String parameter. logger.warn("PDU String should be always valid", e); } catch (NegativeResponseException e) { // the command_status of the response should be always 0 logger.warn("command_status of response should be always 0", e); } } public void sendOutbind(String systemId, String password) throws IOException { if (sessionContext().getSessionState().equals(SessionState.CLOSED)) { throw new IOException("Session " + sessionId + " is closed"); } OutbindCommandTask task = new OutbindCommandTask(pduSender, systemId, password); try { executeSendCommandWithNoResponse(task); } catch (PDUException e) { // exception should be never caught since we didn't send any string parameter. logger.warn("PDU String should be always valid", e); } } public void unbind() throws ResponseTimeoutException, InvalidResponseException, IOException { if (sessionContext().getSessionState().equals(SessionState.CLOSED)) { throw new IOException("Session " + sessionId + " is closed"); } UnbindCommandTask task = new UnbindCommandTask(pduSender); try { executeSendCommand(task, transactionTimer); } catch (PDUException e) { // exception should be never caught since we didn't send any string parameter. logger.warn("PDU String should be always valid", e); } catch (NegativeResponseException e) { // ignore the negative response logger.warn("Receive non-ok command_status ({}) for unbind_resp", e.getCommandStatus()); } } public void unbindAndClose() { logger.debug("Unbind and close session {}", sessionId); if (sessionContext().getSessionState().isBound()) { try { unbind(); } catch (ResponseTimeoutException e) { logger.error("Timeout waiting unbind response", e); } catch (InvalidResponseException e) { logger.error("Receive invalid unbind response", e); } catch (IOException e) { logger.error("IO error found", e); } } close(); } /** * Ensure the session is receivable. If the session not receivable then an * exception thrown. * * @param activityName is the activity name. * @throws IOException if the session not receivable. */ protected void ensureReceivable(String activityName) throws IOException { // TODO uudashr: do we have to use another exception for this checking? SessionState currentState = getSessionState(); if (!currentState.isReceivable()) { throw new IOException("Cannot " + activityName + " while session " + sessionId + " in state " + currentState); } } /** * Ensure the session is transmittable. If the session not transmittable * then an exception thrown. * * @param activityName is the activity name. * @throws IOException if the session not transmittable. */ protected void ensureTransmittable(String activityName) throws IOException { ensureTransmittable(activityName, false); } /** * Ensure the session is transmittable. If the session not transmittable * then an exception thrown. * * @param activityName is the activity name. * @param only set to <tt>true</tt> if you want to ensure transmittable only * (transceive will not pass), otherwise set to <tt>false</tt>. * @throws IOException if the session not transmittable (by considering the * <code>only</code> parameter). */ protected void ensureTransmittable(String activityName, boolean only) throws IOException { // TODO uudashr: do we have to use another exception for this checking? SessionState currentState = getSessionState(); if (!currentState.isTransmittable() || (only && currentState.isReceivable())) { throw new IOException("Cannot " + activityName + " while session " + sessionId + " in state " + currentState); } } protected class EnquireLinkSender extends Thread { private final AtomicBoolean sendingEnquireLink = new AtomicBoolean(false); public EnquireLinkSender() { super("EnquireLinkSender: " + AbstractSession.this); } @Override public void run() { logger.debug("Starting EnquireLinkSender for session {}", sessionId); while (isReadPdu()) { while (!sendingEnquireLink.compareAndSet(true, false) && !Thread.currentThread().isInterrupted() && isReadPdu()) { synchronized (sendingEnquireLink) { try { sendingEnquireLink.wait(500); } catch (InterruptedException e) { Thread.currentThread().interrupt(); break; } } } if (Thread.currentThread().isInterrupted() || !isReadPdu()) { break; } try { sendEnquireLink(); } catch (ResponseTimeoutException e) { logger.error("Response timeout on enquireLink", e); close(); } catch (InvalidResponseException e) { logger.error("Invalid response on enquireLink", e); // lets unbind gracefully unbindAndClose(); } catch (IOException e) { logger.error("I/O exception on enquireLink", e); close(); } } logger.debug("EnquireLinkSender stopped for session {}", sessionId); } /** * This method will send enquire link asynchronously. */ public void enquireLink() { if (sendingEnquireLink.compareAndSet(false, true)) { synchronized (sendingEnquireLink) { sendingEnquireLink.notify(); } } else { logger.debug("Not sending enquire link notify"); } } } }
/* * Copyright 2013 University of Chicago and Argonne National Laboratory * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License */ package exm.stc.frontend.tree; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Set; import exm.stc.ast.SwiftAST; import exm.stc.ast.antlr.ExMParser; import exm.stc.common.exceptions.DoubleDefineException; import exm.stc.common.exceptions.InvalidSyntaxException; import exm.stc.common.exceptions.TypeMismatchException; import exm.stc.common.exceptions.UndefinedTypeException; import exm.stc.common.exceptions.UserException; import exm.stc.common.lang.Arg; import exm.stc.common.lang.DefaultVals; import exm.stc.common.lang.Types; import exm.stc.common.lang.Types.FunctionType; import exm.stc.common.lang.Types.Type; import exm.stc.common.lang.Types.TypeVariable; import exm.stc.common.lang.Types.UnionType; import exm.stc.common.lang.Var; import exm.stc.common.lang.Var.Alloc; import exm.stc.common.lang.Var.DefType; import exm.stc.common.lang.Var.VarProvenance; import exm.stc.frontend.Context; import exm.stc.frontend.ExprWalker; import exm.stc.frontend.LocalContext; import exm.stc.frontend.VarCreator; public class FunctionDecl { private final FunctionType ftype; private final ArrayList<String> inNames; private final ArrayList<String> outNames; private final DefaultVals<Var> defaultVals; private FunctionDecl(FunctionType ftype, ArrayList<String> inNames, ArrayList<String> outNames, DefaultVals<Var> defaultVals) { super(); this.ftype = ftype; this.inNames = inNames; this.outNames = outNames; this.defaultVals = defaultVals; } public FunctionType getFunctionType() { return ftype; } public List<String> getInNames() { return Collections.unmodifiableList(inNames); } public DefaultVals<Var> defaultVals() { return defaultVals; } public List<String> getOutNames() { return Collections.unmodifiableList(outNames); } private static class ArgDecl { final String name; final Type type; /** Default value if any (null otherwise) */ final Var defaultVal; final boolean varargs; private ArgDecl(String name, Type type, Var defaultVal, boolean varargs) { this.name = name; this.type = type; this.defaultVal = defaultVal; this.varargs = varargs; } } public static FunctionDecl fromAST(Context context, VarCreator varCreator, ExprWalker exprWalker, String function, SwiftAST inArgTree, SwiftAST outArgTree, Set<String> typeParams) throws UserException { LocalContext typeVarContext = LocalContext.fnContext(context, function); for (String typeParam: typeParams) { typeVarContext.defineType(typeParam, new TypeVariable(typeParam)); } assert(inArgTree.getType() == ExMParser.FORMAL_ARGUMENT_LIST); assert(outArgTree.getType() == ExMParser.FORMAL_ARGUMENT_LIST); ArrayList<String> inNames = new ArrayList<String>(); ArrayList<Type> inArgTypes = new ArrayList<Type>(); ArrayList<Var> defaultVector = new ArrayList<Var>(); boolean hasDefault = false; boolean varArgs = false; for (int i = 0; i < inArgTree.getChildCount(); i++) { ArgDecl argInfo = extractArgInfo(typeVarContext, varCreator, exprWalker, inArgTree.child(i)); inNames.add(argInfo.name); inArgTypes.add(argInfo.type); defaultVector.add(argInfo.defaultVal); hasDefault = hasDefault || argInfo.defaultVal != null; if (argInfo.defaultVal == null && hasDefault) { throw new TypeMismatchException(context, "argument " + argInfo.name + " in definition of function " + function + " comes after" + " a previous argument with a default value, but does not" + " specify a default"); } if (argInfo.varargs) { if (i != inArgTree.getChildCount() - 1) { throw new TypeMismatchException(context, "variable argument marker " + "... must be in final position of input argument list"); } if (hasDefault) { throw new TypeMismatchException(context, "Cannot provide default " + "values for argument in definition of function " + function + " with variable-length argument list"); } varArgs = true; } } assert(inNames.size() == inArgTypes.size()); ArrayList<String> outNames = new ArrayList<String>(); ArrayList<Type> outArgTypes = new ArrayList<Type>(); for (int i = 0; i < outArgTree.getChildCount(); i++) { ArgDecl argInfo = extractArgInfo(typeVarContext, varCreator, exprWalker, outArgTree.child(i)); if (argInfo.varargs) { throw new TypeMismatchException(context, "cannot have variable" + " argument specifier ... in output list"); } else if (Types.isUnion(argInfo.type)) { throw new TypeMismatchException(context, "Cannot have" + " union function output type: " + argInfo.type.typeName()); } else { outArgTypes.add(argInfo.type); outNames.add(argInfo.name); } } assert(outNames.size() == outArgTypes.size()); checkDuplicateArgs(context, function, inNames, outNames); FunctionType ftype; ftype = new FunctionType(inArgTypes, outArgTypes, varArgs, typeParams); DefaultVals<Var> defaultVals = DefaultVals.fromDefaultValVector(defaultVector); return new FunctionDecl(ftype, inNames, outNames, defaultVals); } private static ArgDecl extractArgInfo(Context context, VarCreator varCreator, ExprWalker exprWalker, SwiftAST arg) throws UserException { assert(arg.getType() == ExMParser.DECLARATION); assert(arg.getChildCount() == 2 || arg.getChildCount() == 3); SwiftAST baseTypes = arg.child(0); SwiftAST restDecl = arg.child(1); assert(restDecl.getType() == ExMParser.DECLARE_VARIABLE_REST); // Handle alternative types List<Type> altPrefixes = TypeTree.extractMultiType(context, baseTypes); assert(altPrefixes.size() > 0); ArrayList<Type> alts = new ArrayList<Type>(altPrefixes.size()); String argName = null; for (Type altPrefix: altPrefixes) { // Construct var in order to apply array markers and get full type Var v = fromFormalArgTree(context, altPrefix, restDecl, DefType.INARG); argName = v.name(); alts.add(v.type()); } Type argType = UnionType.makeUnion(alts); int nextArg = 2; boolean thisVarArgs = false; if (arg.getChildCount() > nextArg && arg.child(nextArg).getType() == ExMParser.VARARGS) { nextArg++; thisVarArgs = true; } Var defaultVar = null; if (arg.getChildCount() > nextArg) { // TODO: the FunctionDecl is created twice, so two globals are created SwiftAST defaultValT = arg.child(nextArg++); Arg defaultVal = exprWalker.valueOfConstExpr(context, argType, defaultValT, argName); String constNamePrefix = Var.generateGlobalConstName(defaultVal); defaultVar = context.createGlobalConst(constNamePrefix, argType, true); varCreator.assignGlobalConst(context, defaultVar, defaultVal); } return new ArgDecl(argName, argType, defaultVar, thisVarArgs); } /** * Take a DECLARE_VARIABLE_REST subtree of the AST and return the appropriate declared * variable. Doesn't check to see if variable already defined * @param context the current context, for info to add to error message * @param baseType the type preceding the declaration * @param tree a parse tree with the root a DECLARE_MULTI or DECLARE_SINGLE * subtree * @return * @throws UndefinedTypeException * @throws InvalidSyntaxException */ public static Var fromFormalArgTree( Context context, Type baseType, SwiftAST tree, DefType deftype) throws UserException { assert(tree.getType() == ExMParser.DECLARE_VARIABLE_REST); assert(tree.getChildCount() >= 1); SwiftAST nameTree = tree.child(0); assert(nameTree.getType() == ExMParser.ID); String varName = nameTree.getText(); // Process array markers to get final type List<SwiftAST> arrMarkers = tree.children(1); for (SwiftAST subtree: arrMarkers) { // Avoid internal errors if a mapping is applied in this context if (subtree.getType() == ExMParser.MAPPING) { throw new InvalidSyntaxException(context, "Cannot map function argument"); } } Type varType = TypeTree.applyArrayMarkers(context, arrMarkers, baseType); return new Var(varType, varName, Alloc.STACK, deftype, VarProvenance.userVar(context.getSourceLoc())); } @SafeVarargs private static void checkDuplicateArgs(Context context, String functionName, Collection<String> ...names) throws DoubleDefineException { Set<String> usedNames = new HashSet<String>(); for (Collection<String> names2: names) { for (String name: names2) { boolean added = usedNames.add(name); if (!added) { throw new DoubleDefineException(context, "Duplicate argument name " + name + " in function " + functionName); } } } } public List<Var> getInVars(Context context) { ArrayList<Var> inVars = new ArrayList<Var>(inNames.size()); for (int i = 0; i < inNames.size(); i++) { Type t = ftype.getInputs().get(i); inVars.add(new Var(t, inNames.get(i), Alloc.STACK, DefType.INARG, VarProvenance.userVar(context.getSourceLoc()))); } return inVars; } public List<Var> getOutVars(Context context) { ArrayList<Var> outVars = new ArrayList<Var>(outNames.size()); for (int i = 0; i < outNames.size(); i++) { Type outType = ftype.getOutputs().get(i); outVars.add(new Var(outType, outNames.get(i), Alloc.STACK, DefType.OUTARG, VarProvenance.userVar(context.getSourceLoc()))); } return outVars; } }