gt
stringclasses
1 value
context
stringlengths
2.05k
161k
package app.mosn.zdepthshadowlayout; import android.animation.PropertyValuesHolder; import android.animation.ValueAnimator; import android.content.Context; import android.graphics.Canvas; import android.os.Build; import android.util.AttributeSet; import android.view.View; import android.view.animation.LinearInterpolator; import app.mosn.zdepthshadowlayout.shadow.Shadow; import app.mosn.zdepthshadowlayout.shadow.ShadowOval; import app.mosn.zdepthshadowlayout.shadow.ShadowRect; public class ShadowView extends View { protected static final String TAG = "ShadowView"; protected static final String ANIM_PROPERTY_ALPHA_TOP_SHADOW = "alphaTopShadow"; protected static final String ANIM_PROPERTY_ALPHA_BOTTOM_SHADOW = "alphaBottomShadow"; protected static final String ANIM_PROPERTY_OFFSET_TOP_SHADOW = "offsetTopShadow"; protected static final String ANIM_PROPERTY_OFFSET_BOTTOM_SHADOW = "offsetBottomShadow"; protected static final String ANIM_PROPERTY_BLUR_TOP_SHADOW = "blurTopShadow"; protected static final String ANIM_PROPERTY_BLUR_BOTTOM_SHADOW = "blurBottomShadow"; protected Shadow mShadow; protected ZDepthParam mZDepthParam; protected int mZDepthPaddingLeft; protected int mZDepthPaddingTop; protected int mZDepthPaddingRight; protected int mZDepthPaddingBottom; protected long mZDepthAnimDuration; protected boolean mZDepthDoAnimation; protected ShadowView(Context context) { this(context, null); init(); } protected ShadowView(Context context, AttributeSet attrs) { this(context, attrs, 0); init(); } protected ShadowView(Context context, AttributeSet attrs, int defStyle) { super(context, attrs, defStyle); init(); } protected void init() { setWillNotDraw(false); if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) { setLayerType(View.LAYER_TYPE_SOFTWARE, null); } } protected void setZDepthDoAnimation(boolean doAnimation) { mZDepthDoAnimation = doAnimation; } protected void setZDepthAnimDuration(long duration) { mZDepthAnimDuration = duration; } protected void setZDepthPaddingLeft(int zDepthPaddingLeftValue) { ZDepth zDepth = getZDepthWithAttributeValue(zDepthPaddingLeftValue); mZDepthPaddingLeft = measureZDepthPadding(zDepth); } protected void setZDepthPaddingTop(int zDepthPaddingTopValue) { ZDepth zDepth = getZDepthWithAttributeValue(zDepthPaddingTopValue); mZDepthPaddingTop = measureZDepthPadding(zDepth); } protected void setZDepthPaddingRight(int zDepthPaddingRightValue) { ZDepth zDepth = getZDepthWithAttributeValue(zDepthPaddingRightValue); mZDepthPaddingRight = measureZDepthPadding(zDepth); } protected void setZDepthPaddingBottom(int zDepthPaddingBottomValue) { ZDepth zDepth = getZDepthWithAttributeValue(zDepthPaddingBottomValue); mZDepthPaddingBottom = measureZDepthPadding(zDepth); } protected int measureZDepthPadding(ZDepth zDepth) { float maxAboveBlurRadius = zDepth.getBlurTopShadowPx(getContext()); float maxAboveOffset = zDepth.getOffsetYTopShadowPx(getContext()); float maxBelowBlurRadius = zDepth.getBlurBottomShadowPx(getContext()); float maxBelowOffset = zDepth.getOffsetYBottomShadowPx(getContext()); float maxAboveSize = maxAboveBlurRadius + maxAboveOffset; float maxBelowSize = maxBelowBlurRadius + maxBelowOffset; return (int) Math.max(maxAboveSize, maxBelowSize); } public int getZDepthPaddingLeft() { return mZDepthPaddingLeft; } public int getZDepthPaddingTop() { return mZDepthPaddingTop; } public int getZDepthPaddingRight() { return mZDepthPaddingRight; } public int getZDepthPaddingBottom() { return mZDepthPaddingBottom; } protected void setShape(int shape) { switch (shape) { case ZDepthShadowLayout.SHAPE_RECT: mShadow = new ShadowRect(); break; case ZDepthShadowLayout.SHAPE_OVAL: mShadow = new ShadowOval(); break; default: throw new IllegalArgumentException("unknown shape value."); } } protected void setZDepth(int zDepthValue) { ZDepth zDepth = getZDepthWithAttributeValue(zDepthValue); setZDepth(zDepth); } protected void setZDepth(ZDepth zDepth) { mZDepthParam = new ZDepthParam(); mZDepthParam.initZDepth(getContext(), zDepth); } private ZDepth getZDepthWithAttributeValue(int zDepthValue) { switch (zDepthValue) { case 0: return ZDepth.Depth0; case 1: return ZDepth.Depth1; case 2: return ZDepth.Depth2; case 3: return ZDepth.Depth3; case 4: return ZDepth.Depth4; case 5: return ZDepth.Depth5; default: throw new IllegalArgumentException("unknown zDepth value."); } } @Override protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { int wSize = MeasureSpec.getSize(widthMeasureSpec); int hSize = MeasureSpec.getSize(heightMeasureSpec); int wMode = MeasureSpec.getMode(widthMeasureSpec); int hMode = MeasureSpec.getMode(heightMeasureSpec); switch (wMode) { case MeasureSpec.EXACTLY: // NOP break; case MeasureSpec.AT_MOST: case MeasureSpec.UNSPECIFIED: wSize = 0; break; } switch (hMode) { case MeasureSpec.EXACTLY: // NOP break; case MeasureSpec.AT_MOST: case MeasureSpec.UNSPECIFIED: hSize = 0; break; } super.onMeasure( MeasureSpec.makeMeasureSpec(wSize, wMode), MeasureSpec.makeMeasureSpec(hSize, hMode)); } @Override protected void onLayout(boolean changed, int left, int top, int right, int bottom) { int parentWidth = (right - left); int parentHeight = (bottom - top); mShadow.setParameter(mZDepthParam, mZDepthPaddingLeft, mZDepthPaddingTop, parentWidth - mZDepthPaddingRight, parentHeight - mZDepthPaddingBottom); } @Override protected void onDraw(Canvas canvas) { super.onDraw(canvas); mShadow.onDraw(canvas); } protected void changeZDepth(ZDepth zDepth) { int newAlphaTopShadow = zDepth.getAlphaTopShadow(); int newAlphaBottomShadow = zDepth.getAlphaBottomShadow(); float newOffsetYTopShadow = zDepth.getOffsetYTopShadowPx(getContext()); float newOffsetYBottomShadow = zDepth.getOffsetYBottomShadowPx(getContext()); float newBlurTopShadow = zDepth.getBlurTopShadowPx(getContext()); float newBlurBottomShadow = zDepth.getBlurBottomShadowPx(getContext()); if (!mZDepthDoAnimation) { mZDepthParam.mAlphaTopShadow = newAlphaTopShadow; mZDepthParam.mAlphaBottomShadow = newAlphaBottomShadow; mZDepthParam.mOffsetYTopShadowPx = newOffsetYTopShadow; mZDepthParam.mOffsetYBottomShadowPx = newOffsetYBottomShadow; mZDepthParam.mBlurTopShadowPx = newBlurTopShadow; mZDepthParam.mBlurBottomShadowPx = newBlurBottomShadow; mShadow.setParameter(mZDepthParam, mZDepthPaddingLeft, mZDepthPaddingTop, getWidth() - mZDepthPaddingRight, getHeight() - mZDepthPaddingBottom); invalidate(); return; } int nowAlphaTopShadow = mZDepthParam.mAlphaTopShadow; int nowAlphaBottomShadow = mZDepthParam.mAlphaBottomShadow; float nowOffsetYTopShadow = mZDepthParam.mOffsetYTopShadowPx; float nowOffsetYBottomShadow = mZDepthParam.mOffsetYBottomShadowPx; float nowBlurTopShadow = mZDepthParam.mBlurTopShadowPx; float nowBlurBottomShadow = mZDepthParam.mBlurBottomShadowPx; PropertyValuesHolder alphaTopShadowHolder = PropertyValuesHolder.ofInt(ANIM_PROPERTY_ALPHA_TOP_SHADOW, nowAlphaTopShadow, newAlphaTopShadow); PropertyValuesHolder alphaBottomShadowHolder = PropertyValuesHolder.ofInt(ANIM_PROPERTY_ALPHA_BOTTOM_SHADOW, nowAlphaBottomShadow, newAlphaBottomShadow); PropertyValuesHolder offsetTopShadowHolder = PropertyValuesHolder.ofFloat(ANIM_PROPERTY_OFFSET_TOP_SHADOW, nowOffsetYTopShadow, newOffsetYTopShadow); PropertyValuesHolder offsetBottomShadowHolder = PropertyValuesHolder.ofFloat(ANIM_PROPERTY_OFFSET_BOTTOM_SHADOW, nowOffsetYBottomShadow, newOffsetYBottomShadow); PropertyValuesHolder blurTopShadowHolder = PropertyValuesHolder.ofFloat(ANIM_PROPERTY_BLUR_TOP_SHADOW, nowBlurTopShadow, newBlurTopShadow); PropertyValuesHolder blurBottomShadowHolder = PropertyValuesHolder.ofFloat(ANIM_PROPERTY_BLUR_BOTTOM_SHADOW, nowBlurBottomShadow, newBlurBottomShadow); ValueAnimator anim = ValueAnimator .ofPropertyValuesHolder( alphaTopShadowHolder, alphaBottomShadowHolder, offsetTopShadowHolder, offsetBottomShadowHolder, blurTopShadowHolder, blurBottomShadowHolder); anim.setDuration(mZDepthAnimDuration); anim.setInterpolator(new LinearInterpolator()); anim.addUpdateListener(new ValueAnimator.AnimatorUpdateListener() { @Override public void onAnimationUpdate(ValueAnimator animation) { int alphaTopShadow = (Integer) animation.getAnimatedValue(ANIM_PROPERTY_ALPHA_TOP_SHADOW); int alphaBottomShadow = (Integer) animation.getAnimatedValue(ANIM_PROPERTY_ALPHA_BOTTOM_SHADOW); float offsetTopShadow = (Float) animation.getAnimatedValue(ANIM_PROPERTY_OFFSET_TOP_SHADOW); float offsetBottomShadow = (Float) animation.getAnimatedValue(ANIM_PROPERTY_OFFSET_BOTTOM_SHADOW); float blurTopShadow = (Float) animation.getAnimatedValue(ANIM_PROPERTY_BLUR_TOP_SHADOW); float blurBottomShadow = (Float) animation.getAnimatedValue(ANIM_PROPERTY_BLUR_BOTTOM_SHADOW); mZDepthParam.mAlphaTopShadow = alphaTopShadow; mZDepthParam.mAlphaBottomShadow = alphaBottomShadow; mZDepthParam.mOffsetYTopShadowPx = offsetTopShadow; mZDepthParam.mOffsetYBottomShadowPx = offsetBottomShadow; mZDepthParam.mBlurTopShadowPx = blurTopShadow; mZDepthParam.mBlurBottomShadowPx = blurBottomShadow; mShadow.setParameter(mZDepthParam, mZDepthPaddingLeft, mZDepthPaddingTop, getWidth() - mZDepthPaddingRight, getHeight() - mZDepthPaddingBottom); invalidate(); } }); anim.start(); } }
/** * Copyright (C) 2011 - present by OpenGamma Inc. and the OpenGamma group of companies * * Please see distribution for license. */ package com.opengamma.analytics.financial.instrument.annuity; import java.util.ArrayList; import java.util.List; import org.threeten.bp.LocalDate; import org.threeten.bp.Period; import org.threeten.bp.ZonedDateTime; import com.opengamma.analytics.financial.instrument.index.GeneratorSwapFixedON; import com.opengamma.analytics.financial.instrument.index.GeneratorSwapIborON; import com.opengamma.analytics.financial.instrument.index.IndexON; import com.opengamma.analytics.financial.instrument.payment.CouponONSpreadDefinition; import com.opengamma.analytics.financial.interestrate.annuity.derivative.Annuity; import com.opengamma.analytics.financial.interestrate.payments.derivative.Coupon; import com.opengamma.analytics.financial.schedule.ScheduleCalculator; import com.opengamma.financial.convention.businessday.BusinessDayConvention; import com.opengamma.financial.convention.calendar.Calendar; import com.opengamma.timeseries.DoubleTimeSeries; import com.opengamma.util.ArgumentChecker; /** * A wrapper class for an annuity containing overnight coupons with a spread. */ public class AnnuityCouponONSpreadDefinition extends AnnuityCouponDefinition<CouponONSpreadDefinition> { /** * Constructor from a list of overnight coupons. * @param payments The coupons. * @param calendar The holiday calendar */ public AnnuityCouponONSpreadDefinition(final CouponONSpreadDefinition[] payments, final Calendar calendar) { super(payments, calendar); } /** * Build a annuity of overnight coupons from financial details. * @param settlementDate The annuity settlement or first fixing date, not null. * @param tenorAnnuity The total tenor of the annuity, not null. * @param notional The annuity notional. * @param generator The overnight generator, not null. * @param isPayer The flag indicating if the annuity is paying (true) or receiving (false). * @param spread The spread * @return The annuity. */ public static AnnuityCouponONSpreadDefinition from(final ZonedDateTime settlementDate, final Period tenorAnnuity, final double notional, final GeneratorSwapFixedON generator, final boolean isPayer, final double spread) { ArgumentChecker.notNull(settlementDate, "settlement date"); ArgumentChecker.notNull(tenorAnnuity, "tenor annuity"); ArgumentChecker.notNull(generator, "generator"); final ZonedDateTime[] endFixingPeriodDate = ScheduleCalculator.getAdjustedDateSchedule(settlementDate, tenorAnnuity, generator.getLegsPeriod(), generator.isStubShort(), generator.isFromEnd(), generator.getBusinessDayConvention(), generator.getOvernightCalendar(), generator.isEndOfMonth()); return AnnuityCouponONSpreadDefinition.from(settlementDate, endFixingPeriodDate, notional, generator, isPayer, spread); } /** * Build a annuity of overnight coupons from financial details. * @param settlementDate The annuity settlement or first fixing date, not null. * @param tenorAnnuity The total tenor of the annuity, not null. * @param notional The annuity notional. * @param generator The Ibor/ON generator, not null. * @param isPayer The flag indicating if the annuity is paying (true) or receiving (false). * @param spread The spread * @return The annuity. */ public static AnnuityCouponONSpreadDefinition from(final ZonedDateTime settlementDate, final Period tenorAnnuity, final double notional, final GeneratorSwapIborON generator, final boolean isPayer, final double spread) { ArgumentChecker.notNull(settlementDate, "settlement date"); ArgumentChecker.notNull(tenorAnnuity, "tenor annuity"); ArgumentChecker.notNull(generator, "generator"); final ZonedDateTime[] endFixingPeriodDate = ScheduleCalculator.getAdjustedDateSchedule(settlementDate, tenorAnnuity, generator.getIndexIbor().getTenor(), generator.isStubShort(), generator.isFromEnd(), generator.getBusinessDayConvention(), generator.getOvernightCalendar(), generator.isEndOfMonth()); return AnnuityCouponONSpreadDefinition.from(settlementDate, endFixingPeriodDate, notional, generator, isPayer, spread); } /** * Build a annuity of overnight coupons from financial details. * @param settlementDate The annuity settlement or first fixing date, not null. * @param endFixingPeriodDate The end date of the overnight accrual period. Also called the maturity date of the annuity even if the actual payment can take place one or two days later. Not null. * @param notional The annuity notional. * @param generator The overnight generator, not null. * @param isPayer The flag indicating if the annuity is paying (true) or receiving (false). * @param spread The spread * @return The annuity. */ public static AnnuityCouponONSpreadDefinition from(final ZonedDateTime settlementDate, final ZonedDateTime endFixingPeriodDate, final double notional, final GeneratorSwapFixedON generator, final boolean isPayer, final double spread) { ArgumentChecker.notNull(settlementDate, "settlement date"); ArgumentChecker.notNull(endFixingPeriodDate, "End fixing period date"); ArgumentChecker.notNull(generator, "generator"); final ZonedDateTime[] endFixingPeriodDates = ScheduleCalculator.getAdjustedDateSchedule(settlementDate, endFixingPeriodDate, generator.getLegsPeriod(), generator.isStubShort(), generator.isFromEnd(), generator.getBusinessDayConvention(), generator.getOvernightCalendar(), generator.isEndOfMonth()); return AnnuityCouponONSpreadDefinition.from(settlementDate, endFixingPeriodDates, notional, generator, isPayer, spread); } /** * Build a annuity of overnight coupons from financial details. * @param settlementDate The annuity settlement or first fixing date, not null. * @param endFixingPeriodDate The end date of the overnight accrual period. Also called the maturity date of the annuity even if the actual payment can take place one or two days later. Not null. * @param notional The annuity notional. * @param isPayer The flag indicating if the annuity is paying (true) or receiving (false). * @param indexON The overnight index. * @param paymentLag The payment lag. * @param indexCalendar The calendar for the overnight index. * @param businessDayConvention The business day convention. * @param paymentPeriod The payment period. * @param isEOM Is EOM. * @param spread The spread * @return The annuity. */ public static AnnuityCouponONSpreadDefinition from(final ZonedDateTime settlementDate, final ZonedDateTime endFixingPeriodDate, final double notional, final boolean isPayer, final IndexON indexON, final int paymentLag, final Calendar indexCalendar, final BusinessDayConvention businessDayConvention, final Period paymentPeriod, final boolean isEOM, final double spread) { ArgumentChecker.notNull(settlementDate, "settlement date"); ArgumentChecker.notNull(endFixingPeriodDate, "End fixing period date"); ArgumentChecker.notNull(indexON, "overnight index"); ArgumentChecker.notNull(indexCalendar, "index calendar"); ArgumentChecker.notNull(businessDayConvention, "business day convention"); ArgumentChecker.notNull(paymentPeriod, "payment period"); final ZonedDateTime[] endFixingPeriodDates = ScheduleCalculator.getAdjustedDateSchedule(settlementDate, endFixingPeriodDate, paymentPeriod, true, false, businessDayConvention, indexCalendar, isEOM); //TODO get rid of hard-codings final double sign = isPayer ? -1.0 : 1.0; final double notionalSigned = sign * notional; final CouponONSpreadDefinition[] coupons = new CouponONSpreadDefinition[endFixingPeriodDates.length]; coupons[0] = CouponONSpreadDefinition.from(indexON, settlementDate, endFixingPeriodDates[0], notionalSigned, paymentLag, indexCalendar, spread); for (int loopcpn = 1; loopcpn < endFixingPeriodDates.length; loopcpn++) { coupons[loopcpn] = CouponONSpreadDefinition.from(indexON, endFixingPeriodDates[loopcpn - 1], endFixingPeriodDates[loopcpn], notionalSigned, paymentLag, indexCalendar, spread); } return new AnnuityCouponONSpreadDefinition(coupons, indexCalendar); } /** * Build a annuity of overnight coupons from financial details. * @param settlementDate The annuity settlement or first fixing date, not null. * @param endFixingPeriodDate The end date of the overnight accrual period. Also called the maturity date of the annuity even if the actual payment can take place one or two days later. Not null. * @param notional The annuity notional. * @param generator The Ibor/ON generator, not null. * @param isPayer The flag indicating if the annuity is paying (true) or receiving (false). * @param spread The spread * @return The annuity. */ public static AnnuityCouponONSpreadDefinition from(final ZonedDateTime settlementDate, final ZonedDateTime endFixingPeriodDate, final double notional, final GeneratorSwapIborON generator, final boolean isPayer, final double spread) { ArgumentChecker.notNull(settlementDate, "settlement date"); ArgumentChecker.notNull(endFixingPeriodDate, "End fixing period date"); ArgumentChecker.notNull(generator, "generator"); final ZonedDateTime[] endFixingPeriodDates = ScheduleCalculator.getAdjustedDateSchedule(settlementDate, endFixingPeriodDate, generator.getIndexIbor().getTenor(), generator.isStubShort(), generator.isFromEnd(), generator.getBusinessDayConvention(), generator.getOvernightCalendar(), generator.isEndOfMonth()); return AnnuityCouponONSpreadDefinition.from(settlementDate, endFixingPeriodDates, notional, generator, isPayer, spread); } /** * Creates an overnight annuity with spread. * @param settlementDate The setttlement date * @param endFixingPeriodDate The end fixing period dates * @param notional The notional * @param generator A fixed / overnight swap generator * @param isPayer True if the annuity is paid * @param spread The spread * @return An overnight annuity with spread */ private static AnnuityCouponONSpreadDefinition from(final ZonedDateTime settlementDate, final ZonedDateTime[] endFixingPeriodDate, final double notional, final GeneratorSwapFixedON generator, final boolean isPayer, final double spread) { final double sign = isPayer ? -1.0 : 1.0; final double notionalSigned = sign * notional; final CouponONSpreadDefinition[] coupons = new CouponONSpreadDefinition[endFixingPeriodDate.length]; coupons[0] = CouponONSpreadDefinition.from(generator.getIndex(), settlementDate, endFixingPeriodDate[0], notionalSigned, generator.getPaymentLag(), generator.getOvernightCalendar(), spread); for (int loopcpn = 1; loopcpn < endFixingPeriodDate.length; loopcpn++) { coupons[loopcpn] = CouponONSpreadDefinition.from(generator.getIndex(), endFixingPeriodDate[loopcpn - 1], endFixingPeriodDate[loopcpn], notionalSigned, generator.getPaymentLag(), generator.getOvernightCalendar(), spread); } return new AnnuityCouponONSpreadDefinition(coupons, generator.getOvernightCalendar()); } /** * Creates an overnight annuity with spread. * @param settlementDate The setttlement date * @param endFixingPeriodDate The end fixing period dates * @param notional The notional * @param generator A ibor / overnight swap generator * @param isPayer True if the annuity is paid * @param spread The spread * @return An overnight annuity with spread */ private static AnnuityCouponONSpreadDefinition from(final ZonedDateTime settlementDate, final ZonedDateTime[] endFixingPeriodDate, final double notional, final GeneratorSwapIborON generator, final boolean isPayer, final double spread) { final double sign = isPayer ? -1.0 : 1.0; final double notionalSigned = sign * notional; final CouponONSpreadDefinition[] coupons = new CouponONSpreadDefinition[endFixingPeriodDate.length]; coupons[0] = CouponONSpreadDefinition.from(generator.getIndexON(), settlementDate, endFixingPeriodDate[0], notionalSigned, generator.getPaymentLag(), generator.getOvernightCalendar(), spread); for (int loopcpn = 1; loopcpn < endFixingPeriodDate.length; loopcpn++) { coupons[loopcpn] = CouponONSpreadDefinition.from(generator.getIndexON(), endFixingPeriodDate[loopcpn - 1], endFixingPeriodDate[loopcpn], notionalSigned, generator.getPaymentLag(), generator.getOvernightCalendar(), spread); } return new AnnuityCouponONSpreadDefinition(coupons, generator.getOvernightCalendar()); } @Override public Annuity<? extends Coupon> toDerivative(final ZonedDateTime valZdt, final DoubleTimeSeries<ZonedDateTime> indexFixingTS) { ArgumentChecker.notNull(valZdt, "date"); ArgumentChecker.notNull(indexFixingTS, "index fixing time series"); final List<Coupon> resultList = new ArrayList<>(); final CouponONSpreadDefinition[] payments = getPayments(); final ZonedDateTime valZdtInPaymentZone = valZdt.withZoneSameInstant(payments[0].getPaymentDate().getZone()); final LocalDate valDate = valZdtInPaymentZone.toLocalDate(); for (int loopcoupon = 0; loopcoupon < payments.length; loopcoupon++) { if (!valDate.isAfter(payments[loopcoupon].getPaymentDate().toLocalDate())) { resultList.add(payments[loopcoupon].toDerivative(valZdt, indexFixingTS)); } } return new Annuity<>(resultList.toArray(new Coupon[resultList.size()])); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.regionserver; import java.io.IOException; import java.util.List; import java.util.concurrent.CountDownLatch; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableNotFoundException; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.RegionLocator; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.io.hfile.HFile; import org.apache.hadoop.hbase.io.hfile.HFileContext; import org.apache.hadoop.hbase.mapreduce.LoadIncrementalHFiles; import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.testclassification.RegionServerTests; import org.apache.hadoop.hbase.util.Bytes; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; import junit.framework.Assert; @Category({RegionServerTests.class, MediumTests.class}) public class TestScannerWithBulkload { private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); @BeforeClass public static void setUpBeforeClass() throws Exception { TEST_UTIL.startMiniCluster(1); } private static void createTable(Admin admin, TableName tableName) throws IOException { HTableDescriptor desc = new HTableDescriptor(tableName); HColumnDescriptor hcd = new HColumnDescriptor("col"); hcd.setMaxVersions(3); desc.addFamily(hcd); admin.createTable(desc); } @Test public void testBulkLoad() throws Exception { TableName tableName = TableName.valueOf("testBulkLoad"); long l = System.currentTimeMillis(); HBaseAdmin admin = TEST_UTIL.getHBaseAdmin(); createTable(admin, tableName); Scan scan = createScan(); final Table table = init(admin, l, scan, tableName); // use bulkload final Path hfilePath = writeToHFile(l, "/temp/testBulkLoad/", "/temp/testBulkLoad/col/file", false); Configuration conf = TEST_UTIL.getConfiguration(); conf.setBoolean("hbase.mapreduce.bulkload.assign.sequenceNumbers", true); final LoadIncrementalHFiles bulkload = new LoadIncrementalHFiles(conf); try (RegionLocator locator = TEST_UTIL.getConnection().getRegionLocator(tableName)) { bulkload.doBulkLoad(hfilePath, admin, table, locator); } ResultScanner scanner = table.getScanner(scan); Result result = scanner.next(); result = scanAfterBulkLoad(scanner, result, "version2"); Put put0 = new Put(Bytes.toBytes("row1")); put0.add(new KeyValue(Bytes.toBytes("row1"), Bytes.toBytes("col"), Bytes.toBytes("q"), l, Bytes .toBytes("version3"))); table.put(put0); admin.flush(tableName); scanner = table.getScanner(scan); result = scanner.next(); while (result != null) { List<Cell> cells = result.getColumnCells(Bytes.toBytes("col"), Bytes.toBytes("q")); for (Cell _c : cells) { if (Bytes.toString(_c.getRowArray(), _c.getRowOffset(), _c.getRowLength()) .equals("row1")) { System.out .println(Bytes.toString(_c.getRowArray(), _c.getRowOffset(), _c.getRowLength())); System.out.println(Bytes.toString(_c.getQualifierArray(), _c.getQualifierOffset(), _c.getQualifierLength())); System.out.println( Bytes.toString(_c.getValueArray(), _c.getValueOffset(), _c.getValueLength())); Assert.assertEquals("version3", Bytes.toString(_c.getValueArray(), _c.getValueOffset(), _c.getValueLength())); } } result = scanner.next(); } scanner.close(); table.close(); } private Result scanAfterBulkLoad(ResultScanner scanner, Result result, String expctedVal) throws IOException { while (result != null) { List<Cell> cells = result.getColumnCells(Bytes.toBytes("col"), Bytes.toBytes("q")); for (Cell _c : cells) { if (Bytes.toString(_c.getRowArray(), _c.getRowOffset(), _c.getRowLength()) .equals("row1")) { System.out .println(Bytes.toString(_c.getRowArray(), _c.getRowOffset(), _c.getRowLength())); System.out.println(Bytes.toString(_c.getQualifierArray(), _c.getQualifierOffset(), _c.getQualifierLength())); System.out.println( Bytes.toString(_c.getValueArray(), _c.getValueOffset(), _c.getValueLength())); Assert.assertEquals(expctedVal, Bytes.toString(_c.getValueArray(), _c.getValueOffset(), _c.getValueLength())); } } result = scanner.next(); } return result; } // If nativeHFile is true, we will set cell seq id and MAX_SEQ_ID_KEY in the file. // Else, we will set BULKLOAD_TIME_KEY. private Path writeToHFile(long l, String hFilePath, String pathStr, boolean nativeHFile) throws IOException { FileSystem fs = FileSystem.get(TEST_UTIL.getConfiguration()); final Path hfilePath = new Path(hFilePath); fs.mkdirs(hfilePath); Path path = new Path(pathStr); HFile.WriterFactory wf = HFile.getWriterFactoryNoCache(TEST_UTIL.getConfiguration()); Assert.assertNotNull(wf); HFileContext context = new HFileContext(); HFile.Writer writer = wf.withPath(fs, path).withFileContext(context).create(); KeyValue kv = new KeyValue(Bytes.toBytes("row1"), Bytes.toBytes("col"), Bytes.toBytes("q"), l, Bytes.toBytes("version2")); // Set cell seq id to test bulk load native hfiles. if (nativeHFile) { // Set a big seq id. Scan should not look at this seq id in a bulk loaded file. // Scan should only look at the seq id appended at the bulk load time, and not skip // this kv. kv.setSequenceId(9999999); } writer.append(kv); if (nativeHFile) { // Set a big MAX_SEQ_ID_KEY. Scan should not look at this seq id in a bulk loaded file. // Scan should only look at the seq id appended at the bulk load time, and not skip its // kv. writer.appendFileInfo(StoreFile.MAX_SEQ_ID_KEY, Bytes.toBytes(new Long(9999999))); } else { writer.appendFileInfo(StoreFile.BULKLOAD_TIME_KEY, Bytes.toBytes(System.currentTimeMillis())); } writer.close(); return hfilePath; } private Table init(HBaseAdmin admin, long l, Scan scan, TableName tableName) throws Exception { Table table = TEST_UTIL.getConnection().getTable(tableName); Put put0 = new Put(Bytes.toBytes("row1")); put0.add(new KeyValue(Bytes.toBytes("row1"), Bytes.toBytes("col"), Bytes.toBytes("q"), l, Bytes.toBytes("version0"))); table.put(put0); admin.flush(tableName); Put put1 = new Put(Bytes.toBytes("row2")); put1.add(new KeyValue(Bytes.toBytes("row2"), Bytes.toBytes("col"), Bytes.toBytes("q"), l, Bytes .toBytes("version0"))); table.put(put1); admin.flush(tableName); put0 = new Put(Bytes.toBytes("row1")); put0.add(new KeyValue(Bytes.toBytes("row1"), Bytes.toBytes("col"), Bytes.toBytes("q"), l, Bytes .toBytes("version1"))); table.put(put0); admin.flush(tableName); admin.compact(tableName); ResultScanner scanner = table.getScanner(scan); Result result = scanner.next(); List<Cell> cells = result.getColumnCells(Bytes.toBytes("col"), Bytes.toBytes("q")); Assert.assertEquals(1, cells.size()); Cell _c = cells.get(0); Assert.assertEquals("version1", Bytes.toString(_c.getValueArray(), _c.getValueOffset(), _c.getValueLength())); scanner.close(); return table; } @Test public void testBulkLoadWithParallelScan() throws Exception { final TableName tableName = TableName.valueOf("testBulkLoadWithParallelScan"); final long l = System.currentTimeMillis(); final HBaseAdmin admin = TEST_UTIL.getHBaseAdmin(); createTable(admin, tableName); Scan scan = createScan(); final Table table = init(admin, l, scan, tableName); // use bulkload final Path hfilePath = writeToHFile(l, "/temp/testBulkLoadWithParallelScan/", "/temp/testBulkLoadWithParallelScan/col/file", false); Configuration conf = TEST_UTIL.getConfiguration(); conf.setBoolean("hbase.mapreduce.bulkload.assign.sequenceNumbers", true); final LoadIncrementalHFiles bulkload = new LoadIncrementalHFiles(conf); ResultScanner scanner = table.getScanner(scan); // Create a scanner and then do bulk load final CountDownLatch latch = new CountDownLatch(1); new Thread() { public void run() { try { Put put1 = new Put(Bytes.toBytes("row5")); put1.add(new KeyValue(Bytes.toBytes("row5"), Bytes.toBytes("col"), Bytes.toBytes("q"), l, Bytes.toBytes("version0"))); table.put(put1); try (RegionLocator locator = TEST_UTIL.getConnection().getRegionLocator(tableName)) { bulkload.doBulkLoad(hfilePath, admin, table, locator); } latch.countDown(); } catch (TableNotFoundException e) { } catch (IOException e) { } } }.start(); latch.await(); // By the time we do next() the bulk loaded files are also added to the kv // scanner Result result = scanner.next(); scanAfterBulkLoad(scanner, result, "version1"); scanner.close(); table.close(); } @Test public void testBulkLoadNativeHFile() throws Exception { TableName tableName = TableName.valueOf("testBulkLoadNativeHFile"); long l = System.currentTimeMillis(); HBaseAdmin admin = TEST_UTIL.getHBaseAdmin(); createTable(admin, tableName); Scan scan = createScan(); final Table table = init(admin, l, scan, tableName); // use bulkload final Path hfilePath = writeToHFile(l, "/temp/testBulkLoadNativeHFile/", "/temp/testBulkLoadNativeHFile/col/file", true); Configuration conf = TEST_UTIL.getConfiguration(); conf.setBoolean("hbase.mapreduce.bulkload.assign.sequenceNumbers", true); final LoadIncrementalHFiles bulkload = new LoadIncrementalHFiles(conf); try (RegionLocator locator = TEST_UTIL.getConnection().getRegionLocator(tableName)) { bulkload.doBulkLoad(hfilePath, admin, table, locator); } ResultScanner scanner = table.getScanner(scan); Result result = scanner.next(); // We had 'version0', 'version1' for 'row1,col:q' in the table. // Bulk load added 'version2' scanner should be able to see 'version2' result = scanAfterBulkLoad(scanner, result, "version2"); Put put0 = new Put(Bytes.toBytes("row1")); put0.add(new KeyValue(Bytes.toBytes("row1"), Bytes.toBytes("col"), Bytes.toBytes("q"), l, Bytes .toBytes("version3"))); table.put(put0); admin.flush(tableName); scanner = table.getScanner(scan); result = scanner.next(); while (result != null) { List<Cell> cells = result.getColumnCells(Bytes.toBytes("col"), Bytes.toBytes("q")); for (Cell _c : cells) { if (Bytes.toString(_c.getRowArray(), _c.getRowOffset(), _c.getRowLength()) .equals("row1")) { System.out .println(Bytes.toString(_c.getRowArray(), _c.getRowOffset(), _c.getRowLength())); System.out.println(Bytes.toString(_c.getQualifierArray(), _c.getQualifierOffset(), _c.getQualifierLength())); System.out.println( Bytes.toString(_c.getValueArray(), _c.getValueOffset(), _c.getValueLength())); Assert.assertEquals("version3", Bytes.toString(_c.getValueArray(), _c.getValueOffset(), _c.getValueLength())); } } result = scanner.next(); } scanner.close(); table.close(); } private Scan createScan() { Scan scan = new Scan(); scan.setMaxVersions(3); return scan; } @AfterClass public static void tearDownAfterClass() throws Exception { TEST_UTIL.shutdownMiniCluster(); } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.search.aggregations.metrics; import org.apache.lucene.document.Document; import org.apache.lucene.document.DoublePoint; import org.apache.lucene.document.Field; import org.apache.lucene.document.FloatPoint; import org.apache.lucene.document.IntPoint; import org.apache.lucene.document.LongPoint; import org.apache.lucene.document.NumericDocValuesField; import org.apache.lucene.document.SortedNumericDocValuesField; import org.apache.lucene.document.StringField; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.MultiReader; import org.apache.lucene.index.NoMergePolicy; import org.apache.lucene.index.PointValues; import org.apache.lucene.index.RandomIndexWriter; import org.apache.lucene.index.Term; import org.apache.lucene.search.DocValuesFieldExistsQuery; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.store.Directory; import org.apache.lucene.util.Bits; import org.apache.lucene.util.FutureArrays; import org.elasticsearch.common.CheckedConsumer; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.script.MockScriptEngine; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptEngine; import org.elasticsearch.script.ScriptModule; import org.elasticsearch.script.ScriptService; import org.elasticsearch.script.ScriptType; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregationBuilders; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.AggregatorTestCase; import org.elasticsearch.search.aggregations.BucketCollector; import org.elasticsearch.search.aggregations.BucketOrder; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.MultiBucketCollector; import org.elasticsearch.search.aggregations.bucket.filter.Filter; import org.elasticsearch.search.aggregations.bucket.global.Global; import org.elasticsearch.search.aggregations.bucket.global.GlobalAggregator; import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregator; import org.elasticsearch.search.aggregations.support.AggregationInspectionHelper; import org.elasticsearch.search.aggregations.support.ValueType; import org.elasticsearch.search.lookup.LeafDocLookup; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.function.BiFunction; import java.util.function.Consumer; import java.util.function.Function; import java.util.function.Supplier; import static java.util.Collections.singleton; import static org.elasticsearch.index.query.QueryBuilders.termQuery; import static org.hamcrest.Matchers.equalTo; public class MaxAggregatorTests extends AggregatorTestCase { private final String SCRIPT_NAME = "script_name"; private final long SCRIPT_VALUE = 19L; /** Script to take a field name in params and sum the values of the field. */ public static final String SUM_FIELD_PARAMS_SCRIPT = "sum_field_params"; /** Script to sum the values of a field named {@code values}. */ public static final String SUM_VALUES_FIELD_SCRIPT = "sum_values_field"; /** Script to return the value of a field named {@code value}. */ public static final String VALUE_FIELD_SCRIPT = "value_field"; /** Script to return the {@code _value} provided by aggs framework. */ public static final String VALUE_SCRIPT = "_value"; @Override protected ScriptService getMockScriptService() { Map<String, Function<Map<String, Object>, Object>> scripts = new HashMap<>(); Function<Map<String, Object>, Integer> getInc = vars -> { if (vars == null || vars.containsKey("inc") == false) { return 0; } else { return ((Number) vars.get("inc")).intValue(); } }; BiFunction<Map<String, Object>, String, Object> sum = (vars, fieldname) -> { int inc = getInc.apply(vars); LeafDocLookup docLookup = (LeafDocLookup) vars.get("doc"); List<Long> values = new ArrayList<>(); for (Object v : docLookup.get(fieldname)) { values.add(((Number) v).longValue() + inc); } return values; }; scripts.put(SCRIPT_NAME, script -> SCRIPT_VALUE); scripts.put(SUM_FIELD_PARAMS_SCRIPT, vars -> { String fieldname = (String) vars.get("field"); return sum.apply(vars, fieldname); }); scripts.put(SUM_VALUES_FIELD_SCRIPT, vars -> sum.apply(vars, "values")); scripts.put(VALUE_FIELD_SCRIPT, vars -> sum.apply(vars, "value")); scripts.put(VALUE_SCRIPT, vars -> { int inc = getInc.apply(vars); return ((Number) vars.get("_value")).doubleValue() + inc; }); MockScriptEngine scriptEngine = new MockScriptEngine(MockScriptEngine.NAME, scripts, Collections.emptyMap()); Map<String, ScriptEngine> engines = Collections.singletonMap(scriptEngine.getType(), scriptEngine); return new ScriptService(Settings.EMPTY, engines, ScriptModule.CORE_CONTEXTS); } public void testNoDocs() throws IOException { testCase(new MatchAllDocsQuery(), iw -> { // Intentionally not writing any docs }, max -> { assertEquals(Double.NEGATIVE_INFINITY, max.getValue(), 0); assertFalse(AggregationInspectionHelper.hasValue(max)); }); } public void testNoMatchingField() throws IOException { testCase(new MatchAllDocsQuery(), iw -> { iw.addDocument(singleton(new SortedNumericDocValuesField("wrong_number", 7))); iw.addDocument(singleton(new SortedNumericDocValuesField("wrong_number", 1))); }, max -> { assertEquals(Double.NEGATIVE_INFINITY, max.getValue(), 0); assertFalse(AggregationInspectionHelper.hasValue(max)); }); } public void testSomeMatchesSortedNumericDocValues() throws IOException { testCase(new DocValuesFieldExistsQuery("number"), iw -> { iw.addDocument(singleton(new SortedNumericDocValuesField("number", 7))); iw.addDocument(singleton(new SortedNumericDocValuesField("number", 1))); }, max -> { assertEquals(7, max.getValue(), 0); assertTrue(AggregationInspectionHelper.hasValue(max)); }); } public void testSomeMatchesNumericDocValues() throws IOException { testCase(new DocValuesFieldExistsQuery("number"), iw -> { iw.addDocument(singleton(new NumericDocValuesField("number", 7))); iw.addDocument(singleton(new NumericDocValuesField("number", 1))); }, max -> { assertEquals(7, max.getValue(), 0); assertTrue(AggregationInspectionHelper.hasValue(max)); }); } public void testQueryFiltering() throws IOException { testCase(IntPoint.newRangeQuery("number", 0, 5), iw -> { iw.addDocument(Arrays.asList(new IntPoint("number", 7), new SortedNumericDocValuesField("number", 7))); iw.addDocument(Arrays.asList(new IntPoint("number", 1), new SortedNumericDocValuesField("number", 1))); }, max -> { assertEquals(1, max.getValue(), 0); assertTrue(AggregationInspectionHelper.hasValue(max)); }); } public void testQueryFiltersAll() throws IOException { testCase(IntPoint.newRangeQuery("number", -1, 0), iw -> { iw.addDocument(Arrays.asList(new IntPoint("number", 7), new SortedNumericDocValuesField("number", 7))); iw.addDocument(Arrays.asList(new IntPoint("number", 1), new SortedNumericDocValuesField("number", 1))); }, max -> { assertEquals(Double.NEGATIVE_INFINITY, max.getValue(), 0); assertFalse(AggregationInspectionHelper.hasValue(max)); }); } public void testUnmappedField() throws IOException { MaxAggregationBuilder aggregationBuilder = new MaxAggregationBuilder("_name").field("number"); testCase(aggregationBuilder, new DocValuesFieldExistsQuery("number"), iw -> { iw.addDocument(singleton(new NumericDocValuesField("number", 7))); iw.addDocument(singleton(new NumericDocValuesField("number", 1))); }, max -> { assertEquals(max.getValue(), Double.NEGATIVE_INFINITY, 0); assertFalse(AggregationInspectionHelper.hasValue(max)); }, null); } public void testUnmappedWithMissingField() throws IOException { MaxAggregationBuilder aggregationBuilder = new MaxAggregationBuilder("_name").field("number").missing(19L); testCase(aggregationBuilder, new DocValuesFieldExistsQuery("number"), iw -> { iw.addDocument(singleton(new NumericDocValuesField("number", 7))); iw.addDocument(singleton(new NumericDocValuesField("number", 1))); }, max -> { assertEquals(max.getValue(), 19.0, 0); assertTrue(AggregationInspectionHelper.hasValue(max)); }, null); } public void testScript() throws IOException { MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER); fieldType.setName("number"); MaxAggregationBuilder aggregationBuilder = new MaxAggregationBuilder("_name") .field("number") .script(new Script(ScriptType.INLINE, MockScriptEngine.NAME, SCRIPT_NAME, Collections.emptyMap())); testCase(aggregationBuilder, new DocValuesFieldExistsQuery("number"), iw -> { iw.addDocument(singleton(new NumericDocValuesField("number", 7))); iw.addDocument(singleton(new NumericDocValuesField("number", 1))); }, max -> { assertEquals(max.getValue(), SCRIPT_VALUE, 0); // Note this is the script value (19L), not the doc values above assertTrue(AggregationInspectionHelper.hasValue(max)); }, fieldType); } private void testCase(Query query, CheckedConsumer<RandomIndexWriter, IOException> buildIndex, Consumer<InternalMax> verify) throws IOException { MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER); fieldType.setName("number"); MaxAggregationBuilder aggregationBuilder = new MaxAggregationBuilder("_name").field("number"); testCase(aggregationBuilder, query, buildIndex, verify, fieldType); } private void testCase(MaxAggregationBuilder aggregationBuilder, Query query, CheckedConsumer<RandomIndexWriter, IOException> buildIndex, Consumer<InternalMax> verify, MappedFieldType fieldType) throws IOException { Directory directory = newDirectory(); RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory); buildIndex.accept(indexWriter); indexWriter.close(); IndexReader indexReader = DirectoryReader.open(directory); IndexSearcher indexSearcher = newSearcher(indexReader, true, true); MaxAggregator aggregator = createAggregator(query, aggregationBuilder, indexSearcher, createIndexSettings(), fieldType); aggregator.preCollection(); indexSearcher.search(query, aggregator); aggregator.postCollection(); verify.accept((InternalMax) aggregator.buildAggregation(0L)); indexReader.close(); directory.close(); } public void testMaxShortcutRandom() throws Exception { testMaxShortcutCase( () -> randomLongBetween(Integer.MIN_VALUE, Integer.MAX_VALUE), (n) -> new LongPoint("number", n.longValue()), (v) -> LongPoint.decodeDimension(v, 0)); testMaxShortcutCase( () -> randomInt(), (n) -> new IntPoint("number", n.intValue()), (v) -> IntPoint.decodeDimension(v, 0)); testMaxShortcutCase( () -> randomFloat(), (n) -> new FloatPoint("number", n.floatValue()), (v) -> FloatPoint.decodeDimension(v, 0)); testMaxShortcutCase( () -> randomDouble(), (n) -> new DoublePoint("number", n.doubleValue()), (v) -> DoublePoint.decodeDimension(v, 0)); } private void testMaxShortcutCase(Supplier<Number> randomNumber, Function<Number, Field> pointFieldFunc, Function<byte[], Number> pointConvertFunc) throws IOException { Directory directory = newDirectory(); IndexWriterConfig config = newIndexWriterConfig().setMergePolicy(NoMergePolicy.INSTANCE); IndexWriter indexWriter = new IndexWriter(directory, config); List<Document> documents = new ArrayList<>(); List<Tuple<Integer, Number>> values = new ArrayList<>(); int numValues = atLeast(50); int docID = 0; for (int i = 0; i < numValues; i++) { int numDup = randomIntBetween(1, 3); for (int j = 0; j < numDup; j++) { Document document = new Document(); Number nextValue = randomNumber.get(); values.add(new Tuple<>(docID, nextValue)); document.add(new StringField("id", Integer.toString(docID), Field.Store.NO)); document.add(pointFieldFunc.apply(nextValue)); documents.add(document); docID ++; } } // insert some documents without a value for the metric field. for (int i = 0; i < 3; i++) { Document document = new Document(); documents.add(document); } indexWriter.addDocuments(documents); Collections.sort(values, Comparator.comparingDouble(t -> t.v2().doubleValue())); try (IndexReader reader = DirectoryReader.open(indexWriter)) { LeafReaderContext ctx = reader.leaves().get(0); Number res = MaxAggregator.findLeafMaxValue(ctx.reader(), "number" , pointConvertFunc); assertThat(res, equalTo(values.get(values.size()-1).v2())); } for (int i = values.size()-1; i > 0; i--) { indexWriter.deleteDocuments(new Term("id", values.get(i).v1().toString())); try (IndexReader reader = DirectoryReader.open(indexWriter)) { LeafReaderContext ctx = reader.leaves().get(0); Number res = MaxAggregator.findLeafMaxValue(ctx.reader(), "number" , pointConvertFunc); if (res != null) { assertThat(res, equalTo(values.get(i - 1).v2())); } else { assertAllDeleted(ctx.reader().getLiveDocs(), ctx.reader().getPointValues("number")); } } } indexWriter.deleteDocuments(new Term("id", values.get(0).v1().toString())); try (IndexReader reader = DirectoryReader.open(indexWriter)) { LeafReaderContext ctx = reader.leaves().get(0); Number res = MaxAggregator.findLeafMaxValue(ctx.reader(), "number" , pointConvertFunc); assertThat(res, equalTo(null)); } indexWriter.close(); directory.close(); } // checks that documents inside the max leaves are all deleted private void assertAllDeleted(Bits liveDocs, PointValues values) throws IOException { final byte[] maxValue = values.getMaxPackedValue(); int numBytes = values.getBytesPerDimension(); final boolean[] seen = new boolean[1]; values.intersect(new PointValues.IntersectVisitor() { @Override public void visit(int docID) { throw new AssertionError(); } @Override public void visit(int docID, byte[] packedValue) { assertFalse(liveDocs.get(docID)); seen[0] = true; } @Override public PointValues.Relation compare(byte[] minPackedValue, byte[] maxPackedValue) { if (FutureArrays.equals(maxPackedValue, 0, numBytes, maxValue, 0, numBytes)) { return PointValues.Relation.CELL_CROSSES_QUERY; } return PointValues.Relation.CELL_OUTSIDE_QUERY; } }); assertTrue(seen[0]); } public void testSingleValuedField() throws IOException { testCase( new MatchAllDocsQuery(), iw -> { final int numDocs = 10; for (int i = 0; i < numDocs; i++) { iw.addDocument(singleton(new NumericDocValuesField("number", i + 1))); } }, max -> { assertEquals(10, max.getValue(), 0); assertTrue(AggregationInspectionHelper.hasValue(max)); }); } public void testSingleValuedFieldWithFormatter() throws IOException { MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER); fieldType.setName("value"); MaxAggregationBuilder aggregationBuilder = new MaxAggregationBuilder("_name") .format("0000.0") .field("value"); testCase(aggregationBuilder, new MatchAllDocsQuery(), iw -> { final int numDocs = 10; for (int i = 0; i < numDocs; i++) { iw.addDocument(singleton(new NumericDocValuesField("value", i + 1))); } }, max -> { assertEquals(10.0, max.getValue(), 0); assertTrue(AggregationInspectionHelper.hasValue(max)); assertEquals("0010.0", max.getValueAsString()); }, fieldType); } public void testSingleValuedFieldGetProperty() throws IOException { MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER); fieldType.setName("value"); fieldType.setHasDocValues(true); AggregationBuilder aggregationBuilder = AggregationBuilders.global("global") .subAggregation(AggregationBuilders.max("max").field("value")); Directory directory = newDirectory(); RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory); final int numDocs = 10; for (int i = 0; i < numDocs; i++) { indexWriter.addDocument(singleton(new NumericDocValuesField("value", i + 1))); } indexWriter.close(); IndexReader indexReader = DirectoryReader.open(directory); IndexSearcher indexSearcher = newSearcher(indexReader, true, true); GlobalAggregator aggregator = createAggregator(aggregationBuilder, indexSearcher, fieldType); aggregator.preCollection(); indexSearcher.search(new MatchAllDocsQuery(), aggregator); aggregator.postCollection(); Global global = (Global) aggregator.buildAggregation(0L); assertNotNull(global); assertEquals("global", global.getName()); assertEquals(10L, global.getDocCount()); assertNotNull(global.getAggregations()); assertEquals(1, global.getAggregations().asMap().size()); Max max = global.getAggregations().get("max"); assertNotNull(max); assertEquals("max", max.getName()); assertEquals(10.0, max.getValue(), 0); assertEquals(max, ((InternalAggregation) global).getProperty("max")); assertEquals(10.0, (double) ((InternalAggregation)global).getProperty("max.value"), 0); assertEquals(10.0, (double) ((InternalAggregation)max).getProperty("value"), 0); indexReader.close(); directory.close(); } public void testSingleValuedFieldPartiallyUnmapped() throws IOException { Directory directory = newDirectory(); RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory); final int numDocs = 10; for (int i = 0; i < numDocs; i++) { indexWriter.addDocument(singleton(new NumericDocValuesField("value", i + 1))); } indexWriter.close(); Directory unmappedDirectory = newDirectory(); RandomIndexWriter unmappedIndexWriter = new RandomIndexWriter(random(), unmappedDirectory); unmappedIndexWriter.close(); IndexReader indexReader = DirectoryReader.open(directory); IndexReader unamappedIndexReader = DirectoryReader.open(unmappedDirectory); MultiReader multiReader = new MultiReader(indexReader, unamappedIndexReader); IndexSearcher indexSearcher = newSearcher(multiReader, true, true); MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER); fieldType.setName("value"); AggregationBuilder aggregationBuilder = new MaxAggregationBuilder("max").field("value"); MaxAggregator aggregator = createAggregator(aggregationBuilder, indexSearcher, fieldType); aggregator.preCollection(); indexSearcher.search(new MatchAllDocsQuery(), aggregator); aggregator.postCollection(); InternalMax max = (InternalMax) aggregator.buildAggregation(0L); assertEquals(10.0, max.getValue(), 0); assertEquals("max", max.getName()); assertTrue(AggregationInspectionHelper.hasValue(max)); multiReader.close(); directory.close(); unmappedDirectory.close(); } public void testSingleValuedFieldWithValueScript() throws IOException { MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER); fieldType.setName("value"); MaxAggregationBuilder aggregationBuilder = new MaxAggregationBuilder("max") .field("value") .script(new Script(ScriptType.INLINE, MockScriptEngine.NAME, VALUE_SCRIPT, Collections.emptyMap())); testCase(aggregationBuilder, new MatchAllDocsQuery(), iw -> { final int numDocs = 10; for (int i = 0; i < numDocs; i++) { iw.addDocument(singleton(new NumericDocValuesField("value", i + 1))); } }, max -> { assertTrue(AggregationInspectionHelper.hasValue(max)); assertEquals(10.0, max.getValue(), 0); assertEquals("max", max.getName()); }, fieldType); } public void testSingleValuedFieldWithValueScriptWithParams() throws IOException { MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER); fieldType.setName("value"); Map<String, Object> params = Collections.singletonMap("inc", 1); MaxAggregationBuilder aggregationBuilder = new MaxAggregationBuilder("max") .field("value") .script(new Script(ScriptType.INLINE, MockScriptEngine.NAME, VALUE_SCRIPT, params)); testCase(aggregationBuilder, new MatchAllDocsQuery(), iw -> { final int numDocs = 10; for (int i = 0; i < numDocs; i++) { iw.addDocument(singleton(new NumericDocValuesField("value", i + 1))); } }, max -> { assertEquals(11.0, max.getValue(), 0); assertEquals("max", max.getName()); assertTrue(AggregationInspectionHelper.hasValue(max)); }, fieldType); } public void testMultiValuedField() throws IOException { testCase(new MatchAllDocsQuery(), iw -> { final int numDocs = 10; for (int i = 0; i < numDocs; i++) { Document document = new Document(); document.add(new SortedNumericDocValuesField("number", i + 2)); document.add(new SortedNumericDocValuesField("number", i + 3)); iw.addDocument(document); } }, max -> { assertEquals(12.0, max.getValue(), 0); assertTrue(AggregationInspectionHelper.hasValue(max)); }); } public void testMultiValuedFieldWithValueScript() throws IOException { MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER); fieldType.setName("values"); MaxAggregationBuilder aggregationBuilder = new MaxAggregationBuilder("max") .field("values") .script(new Script(ScriptType.INLINE, MockScriptEngine.NAME, VALUE_SCRIPT, Collections.emptyMap())); testCase(aggregationBuilder, new MatchAllDocsQuery(), iw -> { final int numDocs = 10; for (int i = 0; i < numDocs; i++) { Document document = new Document(); document.add(new SortedNumericDocValuesField("values", i + 2)); document.add(new SortedNumericDocValuesField("values", i + 3)); iw.addDocument(document); } }, max -> { assertEquals(12.0, max.getValue(), 0); assertTrue(AggregationInspectionHelper.hasValue(max)); }, fieldType); } public void testMultiValuedFieldWithValueScriptWithParams() throws IOException { MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER); fieldType.setName("values"); Map<String, Object> params = Collections.singletonMap("inc", 1); MaxAggregationBuilder aggregationBuilder = new MaxAggregationBuilder("max") .field("values") .script(new Script(ScriptType.INLINE, MockScriptEngine.NAME, VALUE_SCRIPT, params)); testCase(aggregationBuilder, new MatchAllDocsQuery(), iw -> { final int numDocs = 10; for (int i = 0; i < numDocs; i++) { Document document = new Document(); document.add(new SortedNumericDocValuesField("values", i + 2)); document.add(new SortedNumericDocValuesField("values", i + 3)); iw.addDocument(document); } }, max -> { assertEquals(13.0, max.getValue(), 0); assertTrue(AggregationInspectionHelper.hasValue(max)); }, fieldType); } public void testScriptSingleValued() throws IOException { MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER); fieldType.setName("value"); MaxAggregationBuilder aggregationBuilder = new MaxAggregationBuilder("max") .script(new Script(ScriptType.INLINE, MockScriptEngine.NAME, VALUE_FIELD_SCRIPT, Collections.emptyMap())); testCase(aggregationBuilder, new MatchAllDocsQuery(), iw -> { final int numDocs = 10; for (int i = 0; i < numDocs; i++) { iw.addDocument(singleton(new NumericDocValuesField("value", i + 1))); } }, max -> { assertEquals(10.0, max.getValue(), 0); assertTrue(AggregationInspectionHelper.hasValue(max)); }, fieldType); } public void testScriptSingleValuedWithParams() throws IOException { MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER); fieldType.setName("value"); Map<String, Object> params = new HashMap<>(); params.put("inc", 1); params.put("field", "value"); MaxAggregationBuilder aggregationBuilder = new MaxAggregationBuilder("max") .script(new Script(ScriptType.INLINE, MockScriptEngine.NAME, SUM_FIELD_PARAMS_SCRIPT, params)); testCase(aggregationBuilder, new MatchAllDocsQuery(), iw -> { final int numDocs = 10; for (int i = 0; i < numDocs; i++) { iw.addDocument(singleton(new NumericDocValuesField("value", i + 1))); } }, max -> { assertEquals(11.0, max.getValue(), 0); assertTrue(AggregationInspectionHelper.hasValue(max)); }, fieldType); } public void testScriptMultiValued() throws IOException { MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER); fieldType.setName("values"); MaxAggregationBuilder aggregationBuilder = new MaxAggregationBuilder("max") .script(new Script(ScriptType.INLINE, MockScriptEngine.NAME, SUM_VALUES_FIELD_SCRIPT, Collections.emptyMap())); testCase(aggregationBuilder, new MatchAllDocsQuery(), iw -> { final int numDocs = 10; for (int i = 0; i < numDocs; i++) { Document document = new Document(); document.add(new SortedNumericDocValuesField("values", i + 2)); document.add(new SortedNumericDocValuesField("values", i + 3)); iw.addDocument(document); } }, max -> { assertEquals(12.0, max.getValue(), 0); assertTrue(AggregationInspectionHelper.hasValue(max)); }, fieldType); } public void testScriptMultiValuedWithParams() throws IOException { Map<String, Object> params = new HashMap<>(); params.put("inc", 1); params.put("field", "values"); MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER); fieldType.setName("values"); MaxAggregationBuilder aggregationBuilder = new MaxAggregationBuilder("max") .script(new Script(ScriptType.INLINE, MockScriptEngine.NAME, SUM_FIELD_PARAMS_SCRIPT, params)); testCase(aggregationBuilder, new MatchAllDocsQuery(), iw -> { final int numDocs = 10; for (int i = 0; i < numDocs; i++) { Document document = new Document(); document.add(new SortedNumericDocValuesField("values", i + 2)); document.add(new SortedNumericDocValuesField("values", i + 3)); iw.addDocument(document); } }, max -> { assertEquals(13.0, max.getValue(), 0); assertTrue(AggregationInspectionHelper.hasValue(max)); }, fieldType); } public void testEmptyAggregation() throws Exception { MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER); fieldType.setName("value"); fieldType.setHasDocValues(true); AggregationBuilder aggregationBuilder = AggregationBuilders.global("global") .subAggregation(AggregationBuilders.max("max").field("value")); Directory directory = newDirectory(); RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory); // Do not add any documents indexWriter.close(); IndexReader indexReader = DirectoryReader.open(directory); IndexSearcher indexSearcher = newSearcher(indexReader, true, true); GlobalAggregator aggregator = createAggregator(aggregationBuilder, indexSearcher, fieldType); aggregator.preCollection(); indexSearcher.search(new MatchAllDocsQuery(), aggregator); aggregator.postCollection(); Global global = (Global) aggregator.buildAggregation(0L); assertNotNull(global); assertEquals("global", global.getName()); assertEquals(0L, global.getDocCount()); assertNotNull(global.getAggregations()); assertEquals(1, global.getAggregations().asMap().size()); Max max = global.getAggregations().get("max"); assertNotNull(max); assertEquals("max", max.getName()); assertEquals(Double.NEGATIVE_INFINITY, max.getValue(), 0); indexReader.close(); directory.close(); } public void testOrderByEmptyAggregation() throws IOException { MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER); fieldType.setName("value"); fieldType.setHasDocValues(true); TermsAggregationBuilder aggregationBuilder = new TermsAggregationBuilder("terms", ValueType.NUMERIC) .field("value") .order(BucketOrder.compound(BucketOrder.aggregation("filter>max", true))) .subAggregation(AggregationBuilders.filter("filter", termQuery("value", 100)) .subAggregation(AggregationBuilders.max("max").field("value"))); Directory directory = newDirectory(); RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory); final int numDocs = 10; for (int i = 0; i < numDocs; i++) { indexWriter.addDocument(singleton(new NumericDocValuesField("value", i + 1))); } indexWriter.close(); IndexReader indexReader = DirectoryReader.open(directory); IndexSearcher indexSearcher = newSearcher(indexReader, true, true); TermsAggregator aggregator = createAggregator(aggregationBuilder, indexSearcher, fieldType); aggregator.preCollection(); indexSearcher.search(new MatchAllDocsQuery(), aggregator); aggregator.postCollection(); Terms terms = (Terms) aggregator.buildAggregation(0L); assertNotNull(terms); List<? extends Terms.Bucket> buckets = terms.getBuckets(); assertNotNull(buckets); assertEquals(10, buckets.size()); for (int i = 0; i < 10; i++) { Terms.Bucket bucket = buckets.get(i); assertNotNull(bucket); assertEquals((long) i + 1, bucket.getKeyAsNumber()); assertEquals(1L, bucket.getDocCount()); Filter filter = bucket.getAggregations().get("filter"); assertNotNull(filter); assertEquals(0L, filter.getDocCount()); Max max = filter.getAggregations().get("max"); assertNotNull(max); assertEquals(Double.NEGATIVE_INFINITY, max.getValue(), 0); } indexReader.close(); directory.close(); } public void testEarlyTermination() throws Exception { MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER); fieldType.setName("values"); Directory directory = newDirectory(); RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory); final int numDocs = 10; for (int i = 0; i < numDocs; i++) { Document document = new Document(); document.add(new SortedNumericDocValuesField("values", i + 2)); document.add(new SortedNumericDocValuesField("values", i + 3)); indexWriter.addDocument(document); } indexWriter.close(); IndexReader indexReader = DirectoryReader.open(directory); IndexSearcher indexSearcher = newSearcher(indexReader, true, true); MaxAggregationBuilder maxAggregationBuilder = new MaxAggregationBuilder("max") .field("values"); ValueCountAggregationBuilder countAggregationBuilder = new ValueCountAggregationBuilder("count", null) .field("values"); MaxAggregator maxAggregator = createAggregator(maxAggregationBuilder, indexSearcher, fieldType); ValueCountAggregator countAggregator = createAggregator(countAggregationBuilder, indexSearcher, fieldType); BucketCollector bucketCollector = MultiBucketCollector.wrap(maxAggregator, countAggregator); bucketCollector.preCollection(); indexSearcher.search(new MatchAllDocsQuery(), bucketCollector); bucketCollector.postCollection(); InternalMax max = (InternalMax) maxAggregator.buildAggregation(0L); assertNotNull(max); assertEquals(12.0, max.getValue(), 0); assertEquals("max", max.getName()); InternalValueCount count = (InternalValueCount) countAggregator.buildAggregation(0L); assertNotNull(count); assertEquals(20L, count.getValue()); assertEquals("count", count.getName()); indexReader.close(); directory.close(); } public void testNestedEarlyTermination() throws Exception { MappedFieldType multiValuesfieldType = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER); multiValuesfieldType.setName("values"); MappedFieldType singleValueFieldType = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER); singleValueFieldType.setName("value"); Directory directory = newDirectory(); RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory); final int numDocs = 10; for (int i = 0; i < numDocs; i++) { Document document = new Document(); document.add(new NumericDocValuesField("value", i + 1)); document.add(new SortedNumericDocValuesField("values", i + 2)); document.add(new SortedNumericDocValuesField("values", i + 3)); indexWriter.addDocument(document); } indexWriter.close(); IndexReader indexReader = DirectoryReader.open(directory); IndexSearcher indexSearcher = newSearcher(indexReader, true, true); for (Aggregator.SubAggCollectionMode collectionMode : Aggregator.SubAggCollectionMode.values()) { MaxAggregationBuilder maxAggregationBuilder = new MaxAggregationBuilder("max") .field("values"); ValueCountAggregationBuilder countAggregationBuilder = new ValueCountAggregationBuilder("count", null) .field("values"); TermsAggregationBuilder termsAggregationBuilder = new TermsAggregationBuilder("terms", ValueType.NUMERIC) .field("value").collectMode(collectionMode) .subAggregation(new MaxAggregationBuilder("sub_max").field("invalid")); MaxAggregator maxAggregator = createAggregator(maxAggregationBuilder, indexSearcher, multiValuesfieldType); ValueCountAggregator countAggregator = createAggregator(countAggregationBuilder, indexSearcher, multiValuesfieldType); TermsAggregator termsAggregator = createAggregator(termsAggregationBuilder, indexSearcher, singleValueFieldType); BucketCollector bucketCollector = MultiBucketCollector.wrap(maxAggregator, countAggregator, termsAggregator); bucketCollector.preCollection(); indexSearcher.search(new MatchAllDocsQuery(), bucketCollector); bucketCollector.postCollection(); InternalMax max = (InternalMax) maxAggregator.buildAggregation(0L); assertNotNull(max); assertEquals(12.0, max.getValue(), 0); assertEquals("max", max.getName()); InternalValueCount count = (InternalValueCount) countAggregator.buildAggregation(0L); assertNotNull(count); assertEquals(20L, count.getValue()); assertEquals("count", count.getName()); Terms terms = (Terms) termsAggregator.buildAggregation(0L); assertNotNull(terms); List<? extends Terms.Bucket> buckets = terms.getBuckets(); assertNotNull(buckets); assertEquals(10, buckets.size()); for (Terms.Bucket b : buckets) { InternalMax subMax = b.getAggregations().get("sub_max"); assertEquals(Double.NEGATIVE_INFINITY, subMax.getValue(), 0); } } indexReader.close(); directory.close(); } /** * Make sure that an aggregation not using a script does get cached. */ public void testCacheAggregation() throws IOException { Directory directory = newDirectory(); RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory); final int numDocs = 10; for (int i = 0; i < numDocs; i++) { indexWriter.addDocument(singleton(new NumericDocValuesField("value", i + 1))); } indexWriter.close(); Directory unmappedDirectory = newDirectory(); RandomIndexWriter unmappedIndexWriter = new RandomIndexWriter(random(), unmappedDirectory); unmappedIndexWriter.close(); IndexReader indexReader = DirectoryReader.open(directory); IndexReader unamappedIndexReader = DirectoryReader.open(unmappedDirectory); MultiReader multiReader = new MultiReader(indexReader, unamappedIndexReader); IndexSearcher indexSearcher = newSearcher(multiReader, true, true); MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER); fieldType.setName("value"); MaxAggregationBuilder aggregationBuilder = new MaxAggregationBuilder("max") .field("value"); MaxAggregator aggregator = createAggregator(aggregationBuilder, indexSearcher, fieldType); aggregator.preCollection(); indexSearcher.search(new MatchAllDocsQuery(), aggregator); aggregator.postCollection(); InternalMax max = (InternalMax) aggregator.buildAggregation(0L); assertEquals(10.0, max.getValue(), 0); assertEquals("max", max.getName()); assertTrue(AggregationInspectionHelper.hasValue(max)); // Test that an aggregation not using a script does get cached assertTrue(aggregator.context().getQueryShardContext().isCacheable()); multiReader.close(); directory.close(); unmappedDirectory.close(); } /** * Make sure that an aggregation using a script does not get cached. */ public void testDontCacheScripts() throws IOException { Directory directory = newDirectory(); RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory); final int numDocs = 10; for (int i = 0; i < numDocs; i++) { indexWriter.addDocument(singleton(new NumericDocValuesField("value", i + 1))); } indexWriter.close(); Directory unmappedDirectory = newDirectory(); RandomIndexWriter unmappedIndexWriter = new RandomIndexWriter(random(), unmappedDirectory); unmappedIndexWriter.close(); IndexReader indexReader = DirectoryReader.open(directory); IndexReader unamappedIndexReader = DirectoryReader.open(unmappedDirectory); MultiReader multiReader = new MultiReader(indexReader, unamappedIndexReader); IndexSearcher indexSearcher = newSearcher(multiReader, true, true); MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER); fieldType.setName("value"); MaxAggregationBuilder aggregationBuilder = new MaxAggregationBuilder("max") .field("value") .script(new Script(ScriptType.INLINE, MockScriptEngine.NAME, VALUE_SCRIPT, Collections.emptyMap())); MaxAggregator aggregator = createAggregator(aggregationBuilder, indexSearcher, fieldType); aggregator.preCollection(); indexSearcher.search(new MatchAllDocsQuery(), aggregator); aggregator.postCollection(); InternalMax max = (InternalMax) aggregator.buildAggregation(0L); assertEquals(10.0, max.getValue(), 0); assertEquals("max", max.getName()); assertTrue(AggregationInspectionHelper.hasValue(max)); // Test that an aggregation using a script does not get cached assertFalse(aggregator.context().getQueryShardContext().isCacheable()); multiReader.close(); directory.close(); unmappedDirectory.close(); } }
package cz.metacentrum.perun.webgui.tabs.vostabs; import com.google.gwt.core.client.JavaScriptObject; import com.google.gwt.resources.client.ImageResource; import com.google.gwt.user.cellview.client.Column; import com.google.gwt.user.cellview.client.ColumnSortEvent; import com.google.gwt.user.cellview.client.RowStyles; import com.google.gwt.user.cellview.client.TextColumn; import com.google.gwt.user.client.ui.*; import com.google.gwt.view.client.ListDataProvider; import cz.metacentrum.perun.webgui.client.PerunWebSession; import cz.metacentrum.perun.webgui.client.UiElements; import cz.metacentrum.perun.webgui.client.mainmenu.MainMenu; import cz.metacentrum.perun.webgui.client.resources.*; import cz.metacentrum.perun.webgui.json.GetEntityById; import cz.metacentrum.perun.webgui.json.JsonCallbackEvents; import cz.metacentrum.perun.webgui.json.JsonUtils; import cz.metacentrum.perun.webgui.json.propagationStatsReader.GetAllResourcesState; import cz.metacentrum.perun.webgui.model.*; import cz.metacentrum.perun.webgui.tabs.TabItem; import cz.metacentrum.perun.webgui.tabs.TabItemWithUrl; import cz.metacentrum.perun.webgui.tabs.UrlMapper; import cz.metacentrum.perun.webgui.tabs.VosTabs; import cz.metacentrum.perun.webgui.widgets.AjaxLoaderImage; import cz.metacentrum.perun.webgui.widgets.CustomButton; import cz.metacentrum.perun.webgui.widgets.PerunTable; import java.util.ArrayList; import java.util.Map; /** * Tab with propagation status of all facilities related to VO. * * @author Pavel Zlamal <256627@mail.muni.cz> */ public class VoResourcesPropagationsTabItem implements TabItem, TabItemWithUrl { /** * Perun web session */ private PerunWebSession session = PerunWebSession.getInstance(); /** * Content widget - should be simple panel */ private SimplePanel contentWidget = new SimplePanel(); /** * Title widget */ private Label titleWidget = new Label("All VO's resources state"); private VirtualOrganization vo; private int voId; private int mainrow = 0; private int okCounter = 0; private int errorCounter = 0; private int notDeterminedCounter = 0; private int procesingCounter = 0; /** * Creates a tab instance * @param voId */ public VoResourcesPropagationsTabItem(int voId){ this.voId = voId; JsonCallbackEvents events = new JsonCallbackEvents(){ public void onFinished(JavaScriptObject jso) { vo = jso.cast(); } }; new GetEntityById(PerunEntity.VIRTUAL_ORGANIZATION, voId, events).retrieveData(); } /** * Creates a tab instance * @param vo */ public VoResourcesPropagationsTabItem(VirtualOrganization vo){ this.voId = vo.getId(); this.vo = vo; } public boolean isPrepared(){ return (vo != null); } @Override public boolean isRefreshParentOnClose() { return false; } @Override public void onClose() { } public Widget draw() { mainrow = 0; okCounter = 0; errorCounter = 0; notDeterminedCounter = 0; procesingCounter = 0; titleWidget.setText(Utils.getStrippedStringWithEllipsis(vo.getName())+": resources state"); final TabItem tab = this; VerticalPanel mainTab = new VerticalPanel(); mainTab.setWidth("100%"); // MAIN PANEL final ScrollPanel firstTabPanel = new ScrollPanel(); firstTabPanel.setSize("100%", "100%"); firstTabPanel.setStyleName("perun-tableScrollPanel"); final FlexTable help = new FlexTable(); help.setCellPadding(4); help.setWidth("100%"); final CustomButton cb = UiElements.getRefreshButton(this); help.setWidget(0, 0, cb); help.getFlexCellFormatter().setWidth(0, 0, "80px"); help.setHTML(0, 1, "<strong>Color&nbsp;notation:</strong>"); help.getFlexCellFormatter().setWidth(0, 1, "100px"); help.setHTML(0, 2, "<strong>OK</strong>"); help.getFlexCellFormatter().setHorizontalAlignment(0, 2, HasHorizontalAlignment.ALIGN_CENTER); help.getFlexCellFormatter().setWidth(0, 2, "50px"); help.getFlexCellFormatter().setStyleName(0, 2, "green"); help.setHTML(0, 3, "<strong>Error</strong>"); help.getFlexCellFormatter().setWidth(0, 3, "50px"); help.getFlexCellFormatter().setStyleName(0, 3, "red"); help.getFlexCellFormatter().setHorizontalAlignment(0, 3, HasHorizontalAlignment.ALIGN_CENTER); help.setHTML(0, 4, "<strong>Not&nbsp;determined</strong>"); help.getFlexCellFormatter().setWidth(0, 4, "50px"); help.getFlexCellFormatter().setHorizontalAlignment(0, 4, HasHorizontalAlignment.ALIGN_CENTER); help.getFlexCellFormatter().setStyleName(0, 4, "notdetermined"); /* help.setHTML(0, 5, "<strong>Processing</strong>"); help.getFlexCellFormatter().setWidth(0, 5, "50px"); help.getFlexCellFormatter().setStyleName(0, 5, "yellow"); help.getFlexCellFormatter().setHorizontalAlignment(0, 5, HasHorizontalAlignment.ALIGN_CENTER); */ help.setHTML(0, 5, "&nbsp;"); help.getFlexCellFormatter().setWidth(0, 6, "50%"); mainTab.add(help); mainTab.add(new HTML("<hr size=\"2\" />")); mainTab.add(firstTabPanel); final FlexTable content = new FlexTable(); content.setWidth("100%"); content.setBorderWidth(0); firstTabPanel.add(content); content.setStyleName("propagationTable", true); final AjaxLoaderImage im = new AjaxLoaderImage(); content.setWidget(0, 0, im); content.getFlexCellFormatter().setHorizontalAlignment(0, 0, HasHorizontalAlignment.ALIGN_CENTER); final GetAllResourcesState callback = new GetAllResourcesState(voId, new JsonCallbackEvents(){ public void onLoadingStart(){ im.loadingStart(); cb.setProcessing(true); } public void onError(PerunError error){ im.loadingError(error); cb.setProcessing(false); } public void onFinished(JavaScriptObject jso) { im.loadingFinished(); cb.setProcessing(false); content.clear(); content.getFlexCellFormatter().setHorizontalAlignment(0, 0, HasHorizontalAlignment.ALIGN_LEFT); ArrayList<ResourceState> list = JsonUtils.jsoAsList(jso); if (list != null && !list.isEmpty()){ list = new TableSorter<ResourceState>().sortByResourceName(list); // PROCESS CLUSTERS (with more than one destinations) for (final ResourceState state : list) { content.setHTML(mainrow, 0, new Image(LargeIcons.INSTANCE.serverGroupIcon())+"<span class=\"now-managing\" style=\"display: inline-block; position: relative; top: -8px;\">" + state.getResource().getName() + "</span>"); ArrayList<Task> tasks = new TableSorter<Task>().sortByService(JsonUtils.<Task>jsoAsList(state.getTasks())); if (tasks == null || tasks.isEmpty()) notDeterminedCounter++; boolean allOk = true; for (Task tsk :tasks) { if (tsk.getStatus().equalsIgnoreCase("ERROR")) { errorCounter++; allOk = false; break; } } if (allOk && tasks != null && !tasks.isEmpty()) okCounter++; ListDataProvider<Task> dataProvider = new ListDataProvider<Task>(); PerunTable<Task> table; // Table data provider. dataProvider = new ListDataProvider<Task>(tasks); // Cell table table = new PerunTable<Task>(tasks); table.removeRowCountChangeHandler(); // Connect the table to the data provider. dataProvider.addDataDisplay(table); // Sorting ColumnSortEvent.ListHandler<Task> columnSortHandler = new ColumnSortEvent.ListHandler<Task>(dataProvider.getList()); table.addColumnSortHandler(columnSortHandler); // set empty content & loader AjaxLoaderImage loaderImage = new AjaxLoaderImage(); loaderImage.setEmptyResultMessage("No service configuration was propagated to this resource."); table.setEmptyTableWidget(loaderImage); loaderImage.loadingFinished(); table.addIdColumn("Task Id"); // Service column Column<Task, String> serviceColumn = JsonUtils.addColumn( new JsonUtils.GetValue<Task, String>() { public String getValue(Task task) { return String.valueOf(task.getService().getName()); } }, null); // status column Column<Task, String> statusColumn = JsonUtils.addColumn( new JsonUtils.GetValue<Task, String>() { public String getValue(Task task) { return String.valueOf(task.getStatus()); } }, null); // start COLUMN TextColumn<Task> startTimeColumn = new TextColumn<Task>() { public String getValue(Task result) { return result.getStartTime(); } }; // end COLUMN TextColumn<Task> endTimeColumn = new TextColumn<Task>() { public String getValue(Task result) { return result.getEndTime(); } }; // schedule COLUMN TextColumn<Task> scheduleColumn = new TextColumn<Task>() { public String getValue(Task result) { return result.getSchedule(); } }; // Add the columns. table.addColumn(serviceColumn, "Service"); table.addColumn(statusColumn, "Status"); table.addColumn(scheduleColumn, "Scheduled"); table.addColumn(startTimeColumn, "Started"); table.addColumn(endTimeColumn, "Ended"); // set row styles based on task state table.setRowStyles(new RowStyles<Task>(){ public String getStyleNames(Task row, int rowIndex) { if (row.getStatus().equalsIgnoreCase("NONE")) { return "rowdarkgreen"; } else if (row.getStatus().equalsIgnoreCase("DONE")){ return "rowgreen"; } else if (row.getStatus().equalsIgnoreCase("PROCESSING")){ return "rowyellow"; } else if (row.getStatus().equalsIgnoreCase("ERROR")){ return "rowred"; } return ""; } }); table.setWidth("100%"); content.setWidget(mainrow+1, 0, table); content.getFlexCellFormatter().setStyleName(mainrow + 1, 0, "propagationTablePadding"); mainrow++; mainrow++; } } // set counters help.setHTML(0, 2, "<strong>Ok&nbsp;("+okCounter+")</strong>"); help.setHTML(0, 3, "<strong>Error&nbsp;("+errorCounter+")</strong>"); help.setHTML(0, 4, "<strong>Not&nbsp;determined&nbsp;("+notDeterminedCounter+")</strong>"); //help.setHTML(0, 5, "<strong>Processing&nbsp;(" + procesingCounter + ")</strong>"); } }); // get for all facilities for VO callback.retrieveData(); // resize perun table to correct size on screen session.getUiElements().resizePerunTable(firstTabPanel, 400, this); this.contentWidget.setWidget(mainTab); return getWidget(); } public Widget getWidget() { return this.contentWidget; } public Widget getTitle() { return this.titleWidget; } public ImageResource getIcon() { return SmallIcons.INSTANCE.arrowRightIcon(); } @Override public int hashCode() { final int prime = 1327; int result = 1; result = prime * result + voId; return result; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; VoResourcesPropagationsTabItem other = (VoResourcesPropagationsTabItem) obj; if (voId != other.voId) return false; return true; } public boolean multipleInstancesEnabled() { return false; } public void open() { session.getUiElements().getMenu().openMenu(MainMenu.VO_ADMIN); session.getUiElements().getBreadcrumbs().setLocation(vo, "Resources state", getUrlWithParameters()); if(vo != null){ session.setActiveVo(vo); return; } session.setActiveVoId(voId); } public boolean isAuthorized() { if (session.isVoAdmin(voId) || session.isVoObserver(voId)) { return true; } else { return false; } } public final static String URL = "propags"; public String getUrl() { return URL; } public String getUrlWithParameters() { return VosTabs.URL + UrlMapper.TAB_NAME_SEPARATOR + getUrl() + "?vo="+voId; } static public VoResourcesPropagationsTabItem load(Map<String, String> parameters) { int voId = Integer.parseInt(parameters.get("vo")); return new VoResourcesPropagationsTabItem(voId); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.giraph.comm.netty; import org.apache.commons.net.util.Base64; import org.apache.hadoop.classification.InterfaceStability; /*if[HADOOP_1_SECURITY] else[HADOOP_1_SECURITY]*/ import org.apache.hadoop.ipc.StandbyException; /*end[HADOOP_1_SECURITY]*/ import org.apache.hadoop.mapreduce.security.token.JobTokenIdentifier; import org.apache.hadoop.mapreduce.security.token.JobTokenSecretManager; import org.apache.hadoop.security.SaslRpcServer; import org.apache.log4j.Logger; import javax.security.auth.callback.Callback; import javax.security.auth.callback.CallbackHandler; import javax.security.auth.callback.NameCallback; import javax.security.auth.callback.PasswordCallback; import javax.security.auth.callback.UnsupportedCallbackException; import javax.security.sasl.AuthorizeCallback; import javax.security.sasl.RealmCallback; import javax.security.sasl.Sasl; import javax.security.sasl.SaslException; import javax.security.sasl.SaslServer; import java.io.IOException; import java.nio.charset.Charset; import org.apache.hadoop.security.SaslRpcServer.AuthMethod; /** * Encapsulates SASL server logic for Giraph BSP worker servers. */ public class SaslNettyServer extends SaslRpcServer { /** Logger */ public static final Logger LOG = Logger.getLogger(SaslNettyServer.class); /** * Actual SASL work done by this object from javax.security.sasl. * Initialized below in constructor. */ private SaslServer saslServer; /** * Constructor * * @param secretManager supplied by SaslServerHandler. */ public SaslNettyServer(JobTokenSecretManager secretManager) throws IOException { this(secretManager, AuthMethod.SIMPLE); } /** * Constructor * * @param secretManager supplied by SaslServerHandler. * @param authMethod Authentication method */ public SaslNettyServer(JobTokenSecretManager secretManager, AuthMethod authMethod) throws IOException { /*if[HADOOP_1_SECRET_MANAGER] else[HADOOP_1_SECRET_MANAGER]*/ super(authMethod); /*end[HADOOP_1_SECRET_MANAGER]*/ if (LOG.isDebugEnabled()) { LOG.debug("SaslNettyServer: Secret manager is: " + secretManager + " with authmethod " + authMethod); } /*if[HADOOP_1_SECRET_MANAGER] else[HADOOP_1_SECRET_MANAGER]*/ try { secretManager.checkAvailableForRead(); } catch (StandbyException e) { LOG.error("SaslNettyServer: Could not read secret manager: " + e); } /*end[HADOOP_1_SECRET_MANAGER]*/ try { SaslDigestCallbackHandler ch = new SaslNettyServer.SaslDigestCallbackHandler(secretManager); saslServer = Sasl.createSaslServer(SaslNettyServer.AuthMethod.DIGEST .getMechanismName(), null, SaslRpcServer.SASL_DEFAULT_REALM, SaslRpcServer.SASL_PROPS, ch); } catch (SaslException e) { LOG.error("SaslNettyServer: Could not create SaslServer: " + e); } } public boolean isComplete() { return saslServer.isComplete(); } public String getUserName() { return saslServer.getAuthorizationID(); } /** * Used by SaslTokenMessage::processToken() to respond to server SASL tokens. * * @param token Server's SASL token * @return token to send back to the server. */ public byte[] response(byte[] token) { try { if (LOG.isDebugEnabled()) { LOG.debug("response: Responding to input token of length: " + token.length); } byte[] retval = saslServer.evaluateResponse(token); if (LOG.isDebugEnabled()) { LOG.debug("response: Response token length: " + retval.length); } return retval; } catch (SaslException e) { LOG.error("response: Failed to evaluate client token of length: " + token.length + " : " + e); return null; } } /** * Encode a byte[] identifier as a Base64-encoded string. * * @param identifier identifier to encode * @return Base64-encoded string */ static String encodeIdentifier(byte[] identifier) { return new String(Base64.encodeBase64(identifier), Charset.defaultCharset()); } /** * Encode a password as a base64-encoded char[] array. * @param password as a byte array. * @return password as a char array. */ static char[] encodePassword(byte[] password) { return new String(Base64.encodeBase64(password), Charset.defaultCharset()).toCharArray(); } /** CallbackHandler for SASL DIGEST-MD5 mechanism */ @InterfaceStability.Evolving public static class SaslDigestCallbackHandler implements CallbackHandler { /** Used to authenticate the clients */ private JobTokenSecretManager secretManager; /** * Constructor * * @param secretManager used to authenticate clients */ public SaslDigestCallbackHandler( JobTokenSecretManager secretManager) { if (LOG.isDebugEnabled()) { LOG.debug("SaslDigestCallback: Creating SaslDigestCallback handler " + "with secret manager: " + secretManager); } this.secretManager = secretManager; } /** {@inheritDoc} */ @Override public void handle(Callback[] callbacks) throws IOException, UnsupportedCallbackException { NameCallback nc = null; PasswordCallback pc = null; AuthorizeCallback ac = null; for (Callback callback : callbacks) { if (callback instanceof AuthorizeCallback) { ac = (AuthorizeCallback) callback; } else if (callback instanceof NameCallback) { nc = (NameCallback) callback; } else if (callback instanceof PasswordCallback) { pc = (PasswordCallback) callback; } else if (callback instanceof RealmCallback) { continue; // realm is ignored } else { throw new UnsupportedCallbackException(callback, "handle: Unrecognized SASL DIGEST-MD5 Callback"); } } if (pc != null) { JobTokenIdentifier tokenIdentifier = getIdentifier(nc.getDefaultName(), secretManager); char[] password = encodePassword(secretManager.retrievePassword(tokenIdentifier)); if (LOG.isDebugEnabled()) { LOG.debug("handle: SASL server DIGEST-MD5 callback: setting " + "password for client: " + tokenIdentifier.getUser()); } pc.setPassword(password); } if (ac != null) { String authid = ac.getAuthenticationID(); String authzid = ac.getAuthorizationID(); if (authid.equals(authzid)) { ac.setAuthorized(true); } else { ac.setAuthorized(false); } if (ac.isAuthorized()) { if (LOG.isDebugEnabled()) { String username = getIdentifier(authzid, secretManager).getUser().getUserName(); if (LOG.isDebugEnabled()) { LOG.debug("handle: SASL server DIGEST-MD5 callback: setting " + "canonicalized client ID: " + username); } } ac.setAuthorizedID(authzid); } } } } }
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.appmesh.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.protocol.StructuredPojo; import com.amazonaws.protocol.ProtocolMarshaller; /** * <p> * An object that represents a listener's Transport Layer Security (TLS) certificate. * </p> * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/appmesh-2019-01-25/ListenerTlsCertificate" target="_top">AWS API * Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class ListenerTlsCertificate implements Serializable, Cloneable, StructuredPojo { /** * <p> * A reference to an object that represents an AWS Certicate Manager (ACM) certificate. * </p> */ private ListenerTlsAcmCertificate acm; /** * <p> * A reference to an object that represents a local file certificate. * </p> */ private ListenerTlsFileCertificate file; /** * <p> * A reference to an object that represents a listener's Secret Discovery Service certificate. * </p> */ private ListenerTlsSdsCertificate sds; /** * <p> * A reference to an object that represents an AWS Certicate Manager (ACM) certificate. * </p> * * @param acm * A reference to an object that represents an AWS Certicate Manager (ACM) certificate. */ public void setAcm(ListenerTlsAcmCertificate acm) { this.acm = acm; } /** * <p> * A reference to an object that represents an AWS Certicate Manager (ACM) certificate. * </p> * * @return A reference to an object that represents an AWS Certicate Manager (ACM) certificate. */ public ListenerTlsAcmCertificate getAcm() { return this.acm; } /** * <p> * A reference to an object that represents an AWS Certicate Manager (ACM) certificate. * </p> * * @param acm * A reference to an object that represents an AWS Certicate Manager (ACM) certificate. * @return Returns a reference to this object so that method calls can be chained together. */ public ListenerTlsCertificate withAcm(ListenerTlsAcmCertificate acm) { setAcm(acm); return this; } /** * <p> * A reference to an object that represents a local file certificate. * </p> * * @param file * A reference to an object that represents a local file certificate. */ public void setFile(ListenerTlsFileCertificate file) { this.file = file; } /** * <p> * A reference to an object that represents a local file certificate. * </p> * * @return A reference to an object that represents a local file certificate. */ public ListenerTlsFileCertificate getFile() { return this.file; } /** * <p> * A reference to an object that represents a local file certificate. * </p> * * @param file * A reference to an object that represents a local file certificate. * @return Returns a reference to this object so that method calls can be chained together. */ public ListenerTlsCertificate withFile(ListenerTlsFileCertificate file) { setFile(file); return this; } /** * <p> * A reference to an object that represents a listener's Secret Discovery Service certificate. * </p> * * @param sds * A reference to an object that represents a listener's Secret Discovery Service certificate. */ public void setSds(ListenerTlsSdsCertificate sds) { this.sds = sds; } /** * <p> * A reference to an object that represents a listener's Secret Discovery Service certificate. * </p> * * @return A reference to an object that represents a listener's Secret Discovery Service certificate. */ public ListenerTlsSdsCertificate getSds() { return this.sds; } /** * <p> * A reference to an object that represents a listener's Secret Discovery Service certificate. * </p> * * @param sds * A reference to an object that represents a listener's Secret Discovery Service certificate. * @return Returns a reference to this object so that method calls can be chained together. */ public ListenerTlsCertificate withSds(ListenerTlsSdsCertificate sds) { setSds(sds); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getAcm() != null) sb.append("Acm: ").append(getAcm()).append(","); if (getFile() != null) sb.append("File: ").append(getFile()).append(","); if (getSds() != null) sb.append("Sds: ").append(getSds()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof ListenerTlsCertificate == false) return false; ListenerTlsCertificate other = (ListenerTlsCertificate) obj; if (other.getAcm() == null ^ this.getAcm() == null) return false; if (other.getAcm() != null && other.getAcm().equals(this.getAcm()) == false) return false; if (other.getFile() == null ^ this.getFile() == null) return false; if (other.getFile() != null && other.getFile().equals(this.getFile()) == false) return false; if (other.getSds() == null ^ this.getSds() == null) return false; if (other.getSds() != null && other.getSds().equals(this.getSds()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getAcm() == null) ? 0 : getAcm().hashCode()); hashCode = prime * hashCode + ((getFile() == null) ? 0 : getFile().hashCode()); hashCode = prime * hashCode + ((getSds() == null) ? 0 : getSds().hashCode()); return hashCode; } @Override public ListenerTlsCertificate clone() { try { return (ListenerTlsCertificate) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } @com.amazonaws.annotation.SdkInternalApi @Override public void marshall(ProtocolMarshaller protocolMarshaller) { com.amazonaws.services.appmesh.model.transform.ListenerTlsCertificateMarshaller.getInstance().marshall(this, protocolMarshaller); } }
/** * Copyright (c) 2007-2014 Kaazing Corporation. All rights reserved. * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.kaazing.gateway.transport.http.bridge.filter; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import java.net.URI; import java.security.Principal; import java.util.Collections; import java.util.HashSet; import java.util.Set; import java.util.concurrent.CountDownLatch; import java.util.concurrent.Executor; import java.util.concurrent.TimeUnit; import javax.security.auth.Subject; import javax.security.auth.login.LoginContext; import javax.security.auth.login.LoginException; import org.apache.mina.core.filterchain.IoFilter.NextFilter; import org.apache.mina.core.session.IoSession; import org.apache.mina.core.write.WriteRequest; import org.hamcrest.BaseMatcher; import org.hamcrest.Description; import org.hamcrest.Matcher; import org.jmock.Expectations; import org.jmock.Mockery; import org.jmock.api.Invocation; import org.jmock.lib.action.CustomAction; import org.jmock.lib.action.VoidAction; import org.jmock.lib.concurrent.Synchroniser; import org.jmock.lib.legacy.ClassImposteriser; import org.junit.Test; import org.kaazing.gateway.resource.address.ResourceAddress; import org.kaazing.gateway.resource.address.http.HttpResourceAddress; import org.kaazing.gateway.resource.address.tcp.TcpResourceAddressFactorySpi; import org.kaazing.gateway.security.LoginContextFactory; import org.kaazing.gateway.security.TypedCallbackHandlerMap; import org.kaazing.gateway.security.auth.DefaultLoginResult; import org.kaazing.gateway.security.auth.context.DefaultLoginContextFactory; import org.kaazing.gateway.security.auth.context.ResultAwareLoginContext; import org.kaazing.gateway.server.spi.security.LoginResult; import org.kaazing.gateway.transport.http.HttpMethod; import org.kaazing.gateway.transport.http.HttpStatus; import org.kaazing.gateway.transport.http.HttpVersion; import org.kaazing.gateway.transport.http.bridge.HttpRequestMessage; import org.kaazing.gateway.transport.http.bridge.HttpResponseMessage; import org.kaazing.gateway.util.scheduler.SchedulerProvider; import org.kaazing.mina.core.session.IoSessionEx; import org.kaazing.mina.core.write.DefaultWriteRequestEx; import org.slf4j.LoggerFactory; public class HttpSubjectSecurityFilterTest { private static final Executor HTTP_SUBJECT_SECURITY_FILTER_TEST_EXECUTOR = new Executor() { @Override public void execute(Runnable task) { new Thread(task, "HttpSubjectSecurityFilterTest").start(); } @Override public String toString() { return "HttpSubjectSecurityFilterTestExecutor"; } }; public HttpSubjectSecurityFilterTest() { } @Test public void testNonHttpRequestMessage() throws Exception { Mockery context = new Mockery() { { setImposteriser(ClassImposteriser.INSTANCE); } }; context.setThreadingPolicy(new Synchroniser()); final NextFilter nextFilter = context.mock(NextFilter.class); final IoSession session = context.mock(IoSession.class); final HttpResponseMessage message = new HttpResponseMessage(); context.checking(new Expectations() { { oneOf(nextFilter).messageReceived(session, message); } }); HttpSubjectSecurityFilter filter = new HttpSubjectSecurityFilter(); filter.securityMessageReceived(nextFilter, session, message); context.assertIsSatisfied(); } @Test public void testGetWithoutAuthorization() throws Exception { Mockery context = new Mockery() { { setImposteriser(ClassImposteriser.INSTANCE); } }; context.setThreadingPolicy(new Synchroniser()); final NextFilter nextFilter = context.mock(NextFilter.class); final IoSession session = context.mock(IoSession.class); final HttpRequestMessage message = new HttpRequestMessage(); message.setMethod(HttpMethod.GET); final URI serviceURI = URI.create("http://localhost:8000/echo"); message.setRequestURI(serviceURI); message.addHeader("Host", "localhost:8000"); message.setLoginContext(HttpLoginSecurityFilter.LOGIN_CONTEXT_OK); final ResourceAddress address = context.mock(ResourceAddress.class); message.setLocalAddress(address); context.checking(new Expectations() { { allowing(address).getOption(HttpResourceAddress.REALM_NAME); will(returnValue(null)); allowing(address).getOption(HttpResourceAddress.REALM_AUTHORIZATION_MODE); will(returnValue(null)); allowing(address).getOption(HttpResourceAddress.REALM_CHALLENGE_SCHEME); will(returnValue(null)); allowing(address).getOption(HttpResourceAddress.REQUIRED_ROLES); will(returnValue(null)); oneOf(nextFilter).messageReceived(session, message); } }); HttpSubjectSecurityFilter filter = new HttpSubjectSecurityFilter(); filter.securityMessageReceived(nextFilter, session, message); context.assertIsSatisfied(); } @Test public void testGetWithFakeAuthorization() throws Exception { Mockery context = new Mockery() { { setImposteriser(ClassImposteriser.INSTANCE); } }; context.setThreadingPolicy(new Synchroniser()); final NextFilter nextFilter = context.mock(NextFilter.class); final IoSession session = context.mock(IoSession.class); final HttpRequestMessage message = new HttpRequestMessage(); final URI serviceURI = URI.create("http://localhost:8000/echo"); message.setMethod(HttpMethod.GET); message.setRequestURI(serviceURI); message.addHeader("Host", "localhost:8000"); message.setHeader("Authorization", "gobbledegook"); final ResourceAddress address = context.mock(ResourceAddress.class); message.setLocalAddress(address); context.checking(new Expectations() { { allowing(address).getOption(HttpResourceAddress.REALM_NAME); will(returnValue(null)); allowing(address).getOption(HttpResourceAddress.REALM_AUTHORIZATION_MODE); will(returnValue(null)); allowing(address).getOption(HttpResourceAddress.REALM_CHALLENGE_SCHEME); will(returnValue(null)); allowing(address).getOption(HttpResourceAddress.REQUIRED_ROLES); will(returnValue(null)); oneOf(nextFilter).messageReceived(session, message); } }); HttpSubjectSecurityFilter filter = new HttpSubjectSecurityFilter(); filter.securityMessageReceived(nextFilter, session, message); assertEquals(HttpLoginSecurityFilter.LOGIN_CONTEXT_OK, message.getLoginContext()); context.assertIsSatisfied(); } public static final String BASE_URI = "http://localhost:8000/dirservice"; public static final Principal AUTHORIZED_PRINCIPAL = new Principal() { @Override public String getName() { return "AUTHORIZED"; } }; @Test public void filterShouldPassThroughIfMessageIsNotHttpRequestMessage() throws Exception { Mockery context = new Mockery() {{setImposteriser(ClassImposteriser.INSTANCE);}}; context.setThreadingPolicy(new Synchroniser()); final NextFilter nextFilter = context.mock(NextFilter.class); final IoSession session = context.mock(IoSession.class); final HttpResponseMessage message = new HttpResponseMessage(); context.checking(new Expectations() { { oneOf(nextFilter).messageReceived(session, message); } }); HttpSubjectSecurityFilter filter = new HttpSubjectSecurityFilter(); filter.securityMessageReceived(nextFilter, session, message); context.assertIsSatisfied(); } @Test public void filterShouldPassRequestThroughIfNoRolesAreRequired() throws Exception { Mockery context = new Mockery() { { setImposteriser(ClassImposteriser.INSTANCE); } }; context.setThreadingPolicy(new Synchroniser()); final NextFilter nextFilter = context.mock(NextFilter.class); final IoSession session = context.mock(IoSession.class); final ResourceAddress address = context.mock(ResourceAddress.class); final HttpRequestMessage message = new HttpRequestMessage(); final URI serviceURI = URI.create("ws://localhost:8001/echo"); message.setRequestURI(serviceURI); message.addHeader("Host", "localhost:8000"); message.addHeader("Connection", "upgrade"); message.setLocalAddress(address); context.checking(new Expectations() { { allowing(address).getOption(HttpResourceAddress.REALM_NAME); will(returnValue(null)); allowing(address).getOption(HttpResourceAddress.REALM_CHALLENGE_SCHEME); will(returnValue(null)); allowing(address).getOption(HttpResourceAddress.REALM_AUTHORIZATION_MODE); will(returnValue(null)); allowing(address).getOption(HttpResourceAddress.REQUIRED_ROLES); will(returnValue(new String[]{})); oneOf(nextFilter).messageReceived(session, message); never(session).setAttribute(with(any(String.class)), with(any(Subject.class))); } }); HttpSubjectSecurityFilter filter = new HttpSubjectSecurityFilter(LoggerFactory.getLogger("security")); filter.securityMessageReceived(nextFilter, session, message); assertEquals(HttpLoginSecurityFilter.LOGIN_CONTEXT_OK, message.getLoginContext()); context.assertIsSatisfied(); } @Test public void filterShouldPassThroughIfNoServiceSecurityRealmIsConfigured() throws Exception { Mockery context = new Mockery() { { setImposteriser(ClassImposteriser.INSTANCE); } }; context.setThreadingPolicy(new Synchroniser()); final NextFilter nextFilter = context.mock(NextFilter.class); final IoSessionEx session = context.mock(IoSessionEx.class); final HttpRequestMessage message = new HttpRequestMessage(); message.setMethod(HttpMethod.GET); message.setVersion(HttpVersion.HTTP_1_1); message.setRequestURI(URI.create(BASE_URI)); message.addHeader("Connection", "Upgrade"); message.addHeader("Upgrade", "WebSocket"); message.addHeader("Host", "localhost:8000"); message.addHeader("Authorization", "gobbledegook"); final ResourceAddress address = context.mock(ResourceAddress.class); message.setLocalAddress(address); context.checking(new Expectations() { { allowing(address).getOption(HttpResourceAddress.REALM_NAME); will(returnValue(null)); allowing(address).getOption(HttpResourceAddress.REALM_CHALLENGE_SCHEME); will(returnValue(null)); allowing(address).getOption(HttpResourceAddress.REALM_AUTHORIZATION_MODE); will(returnValue(null)); // alreadyLoggedIn == false allowing(address).getOption(HttpResourceAddress.REQUIRED_ROLES); will(returnValue(new String[]{"ADMINISTRATOR"})); oneOf(session).getSubject(); // pass through oneOf(nextFilter).messageReceived(session, message); never(session).setAttribute(with(any(String.class)), with(any(Subject.class))); } }); HttpSubjectSecurityFilter filter = new HttpSubjectSecurityFilter(); filter.securityMessageReceived(nextFilter, session, message); // KG-3232, KG-3267: we should never leave the login context unset assertEquals(HttpLoginSecurityFilter.LOGIN_CONTEXT_OK, message.getLoginContext()); context.assertIsSatisfied(); } @Test public void filterShouldPassThroughWhenLoginSucceeds() throws Exception { Mockery context = new Mockery() { { setImposteriser(ClassImposteriser.INSTANCE); setThreadingPolicy(new Synchroniser()); } }; context.setThreadingPolicy(new Synchroniser()); final NextFilter nextFilter = context.mock(NextFilter.class); final IoSessionEx session = context.mock(IoSessionEx.class); final ResourceAddress address = context.mock(ResourceAddress.class); TcpResourceAddressFactorySpi factory = new TcpResourceAddressFactorySpi(); URI addressURI = URI.create("tcp://localhost:2020"); ResourceAddress tcpResourceAddress = factory.newResourceAddress(addressURI); final HttpRequestMessage message = new HttpRequestMessage(); message.setMethod(HttpMethod.GET); message.setVersion(HttpVersion.HTTP_1_1); message.setRequestURI(URI.create(BASE_URI)); message.addHeader("Connection", "Upgrade"); message.addHeader("Upgrade", "WebSocket"); message.addHeader("Host", "localhost:8000"); message.addHeader("Authorization", "Token gobbledegook"); message.setLocalAddress(address); final ResultAwareLoginContext loginContext = context.mock(ResultAwareLoginContext.class); final LoginContextFactory loginContextFactory = context.mock(DefaultLoginContextFactory.class); final DefaultLoginResult loginResult = context.mock(DefaultLoginResult.class); final Set<Principal> principals = new HashSet<>(); principals.add(AUTHORIZED_PRINCIPAL); final Subject subject = new Subject(false, principals, Collections.EMPTY_SET, Collections.EMPTY_SET); final HttpSubjectSecurityFilter filter = new HttpSubjectSecurityFilter(); filter.setSchedulerProvider(new SchedulerProvider()); final CountDownLatch latch = new CountDownLatch(1); context.checking(new Expectations() { { oneOf(session).getRemoteAddress(); will(returnValue(tcpResourceAddress)); oneOf(session).getSubject(); will(returnValue(null)); allowing(address).getOption(HttpResourceAddress.REALM_NAME); will(returnValue("demo")); allowing(address).getOption(HttpResourceAddress.REQUIRED_ROLES); will(returnValue(new String[]{"AUTHORIZED"})); allowing(address).getOption(HttpResourceAddress.REALM_CHALLENGE_SCHEME); will(returnValue("Application Token")); allowing(address).getOption(HttpResourceAddress.REALM_AUTHORIZATION_MODE); will(returnValue("challenge")); // not already logged in oneOf(session).getSubject(); // login() method itself allowing(address).getOption(HttpResourceAddress.REALM_AUTHENTICATION_HEADER_NAMES); will(returnValue(null)); allowing(address).getOption(HttpResourceAddress.REALM_AUTHENTICATION_PARAMETER_NAMES); will(returnValue(null)); allowing(address).getOption(HttpResourceAddress.REALM_AUTHENTICATION_COOKIE_NAMES); will(returnValue(null)); oneOf(address).getOption(HttpResourceAddress.LOGIN_CONTEXT_FACTORY); will(returnValue(loginContextFactory)); oneOf(loginContextFactory).createLoginContext(with(aNonNull(TypedCallbackHandlerMap.class))); will(returnValue(loginContext)); oneOf(session).suspendRead(); oneOf(loginContext).login(); will(VoidAction.INSTANCE); oneOf(loginContext).getLoginResult(); will(returnValue(loginResult)); oneOf(loginResult).getType(); will(returnValue(LoginResult.Type.SUCCESS)); oneOf(loginResult).hasLoginAuthorizationAttachment(); will(returnValue(false)); atMost(2).of(loginContext).getSubject(); will(returnValue(subject)); oneOf(nextFilter).messageReceived(session, message); oneOf(session).getIoExecutor(); will(returnValue(HTTP_SUBJECT_SECURITY_FILTER_TEST_EXECUTOR)); oneOf(session).resumeRead(); will(new LoginContextTaskDoneAction(latch, "login context task done")); } }); filter.messageReceived(nextFilter, session, message); latch.await(2000, TimeUnit.MILLISECONDS); assertNotNull(message.getLoginContext()); context.assertIsSatisfied(); } @Test public void filterShouldPassThroughWhenLoginSucceedsWithForwardedHeader() throws Exception { Mockery context = new Mockery() { { setImposteriser(ClassImposteriser.INSTANCE); setThreadingPolicy(new Synchroniser()); } }; context.setThreadingPolicy(new Synchroniser()); final NextFilter nextFilter = context.mock(NextFilter.class); final IoSessionEx session = context.mock(IoSessionEx.class); final ResourceAddress address = context.mock(ResourceAddress.class); final HttpRequestMessage message = new HttpRequestMessage(); message.setMethod(HttpMethod.GET); message.setVersion(HttpVersion.HTTP_1_1); message.setRequestURI(URI.create(BASE_URI)); message.addHeader("Connection", "Upgrade"); message.addHeader("Upgrade", "WebSocket"); message.addHeader("Host", "localhost:8000"); message.addHeader("Forwarded", "for=\"[0:0:0:0:0:0:0:1]:8000\";by=198.51.100.17;proto=http"); message.addHeader("Authorization", "Token gobbledegook"); message.setLocalAddress(address); final ResultAwareLoginContext loginContext = context.mock(ResultAwareLoginContext.class); final LoginContextFactory loginContextFactory = context.mock(DefaultLoginContextFactory.class); final DefaultLoginResult loginResult = context.mock(DefaultLoginResult.class); final Set<Principal> principals = new HashSet<>(); principals.add(AUTHORIZED_PRINCIPAL); final Subject subject = new Subject(false, principals, Collections.EMPTY_SET, Collections.EMPTY_SET); final HttpSubjectSecurityFilter filter = new HttpSubjectSecurityFilter(); filter.setSchedulerProvider(new SchedulerProvider()); final CountDownLatch latch = new CountDownLatch(1); context.checking(new Expectations() { { oneOf(session).getSubject(); will(returnValue(null)); allowing(address).getOption(HttpResourceAddress.REALM_NAME); will(returnValue("demo")); allowing(address).getOption(HttpResourceAddress.REQUIRED_ROLES); will(returnValue(new String[]{"AUTHORIZED"})); allowing(address).getOption(HttpResourceAddress.REALM_CHALLENGE_SCHEME); will(returnValue("Application Token")); allowing(address).getOption(HttpResourceAddress.REALM_AUTHORIZATION_MODE); will(returnValue("challenge")); // not already logged in oneOf(session).getSubject(); // login() method itself allowing(address).getOption(HttpResourceAddress.REALM_AUTHENTICATION_HEADER_NAMES); will(returnValue(null)); allowing(address).getOption(HttpResourceAddress.REALM_AUTHENTICATION_PARAMETER_NAMES); will(returnValue(null)); allowing(address).getOption(HttpResourceAddress.REALM_AUTHENTICATION_COOKIE_NAMES); will(returnValue(null)); oneOf(address).getOption(HttpResourceAddress.LOGIN_CONTEXT_FACTORY); will(returnValue(loginContextFactory)); oneOf(loginContextFactory).createLoginContext(with(aNonNull(TypedCallbackHandlerMap.class))); will(returnValue(loginContext)); oneOf(session).suspendRead(); oneOf(loginContext).login(); will(VoidAction.INSTANCE); oneOf(loginContext).getLoginResult(); will(returnValue(loginResult)); oneOf(loginResult).getType(); will(returnValue(LoginResult.Type.SUCCESS)); oneOf(loginResult).hasLoginAuthorizationAttachment(); will(returnValue(false)); atMost(2).of(loginContext).getSubject(); will(returnValue(subject)); oneOf(nextFilter).messageReceived(session, message); oneOf(session).getIoExecutor(); will(returnValue(HTTP_SUBJECT_SECURITY_FILTER_TEST_EXECUTOR)); oneOf(session).resumeRead(); will(new LoginContextTaskDoneAction(latch, "login context task done")); } }); filter.messageReceived(nextFilter, session, message); latch.await(200000, TimeUnit.MILLISECONDS); assertNotNull(message.getLoginContext()); context.assertIsSatisfied(); } @Test public void filterShouldEndChainWhenLoginFailsHard() throws Exception { Mockery context = new Mockery() { { setImposteriser(ClassImposteriser.INSTANCE); setThreadingPolicy(new Synchroniser()); } }; context.setThreadingPolicy(new Synchroniser()); final NextFilter nextFilter = context.mock(NextFilter.class); final IoSessionEx session = context.mock(IoSessionEx.class); final ResourceAddress address = context.mock(ResourceAddress.class); final HttpRequestMessage message = new HttpRequestMessage(); message.setMethod(HttpMethod.GET); message.setVersion(HttpVersion.HTTP_1_1); message.setRequestURI(URI.create(BASE_URI)); message.addHeader("Connection", "Upgrade"); message.addHeader("Upgrade", "WebSocket"); message.addHeader("Host", "localhost:8000"); message.addHeader("Forwarded", "for=127.0.0.1"); message.addHeader("Authorization", "Token gobbledegook"); message.setLocalAddress(address); final ResultAwareLoginContext loginContext = context.mock(ResultAwareLoginContext.class); final LoginContextFactory loginContextFactory = context.mock(DefaultLoginContextFactory.class); final CountDownLatch latch = new CountDownLatch(1); context.checking(new Expectations() { { allowing(address).getOption(HttpResourceAddress.REALM_NAME); will(returnValue("demo")); allowing(address).getOption(HttpResourceAddress.REALM_CHALLENGE_SCHEME); will(returnValue("Application Token")); allowing(address).getOption(HttpResourceAddress.REQUIRED_ROLES); will(returnValue(new String[]{"AUTHORIZED"})); allowing(address).getOption(HttpResourceAddress.REALM_AUTHORIZATION_MODE); will(returnValue("challenge")); // not already logged in oneOf(session).getSubject(); will(returnValue(null)); // login() method itself oneOf(address).getOption(HttpResourceAddress.REALM_AUTHENTICATION_HEADER_NAMES); will(returnValue(null)); oneOf(address).getOption(HttpResourceAddress.REALM_AUTHENTICATION_PARAMETER_NAMES); will(returnValue(null)); oneOf(address).getOption(HttpResourceAddress.REALM_AUTHENTICATION_COOKIE_NAMES); will(returnValue(null)); oneOf(address).getOption(HttpResourceAddress.LOGIN_CONTEXT_FACTORY); will(returnValue(loginContextFactory)); oneOf(loginContextFactory).createLoginContext(with(aNonNull(TypedCallbackHandlerMap.class))); will(returnValue(loginContext)); oneOf(session).suspendRead(); oneOf(loginContext).login(); will(throwException(new LoginException())); oneOf(nextFilter).filterWrite(with(same(session)), with(writeRequest(withStatus(HttpStatus.CLIENT_FORBIDDEN)))); will(VoidAction.INSTANCE); never(nextFilter).messageReceived(session, message); never(session).setAttribute(with(any(String.class)), with(any(Subject.class))); oneOf(session).getIoExecutor(); will(returnValue(HTTP_SUBJECT_SECURITY_FILTER_TEST_EXECUTOR)); oneOf(session).resumeRead(); will(new LoginContextTaskDoneAction(latch, "login context task done")); } }); HttpSubjectSecurityFilter filter = new HttpSubjectSecurityFilter(); filter.setSchedulerProvider(new SchedulerProvider()); filter.securityMessageReceived(nextFilter, session, message); latch.await(2000, TimeUnit.MILLISECONDS); context.assertIsSatisfied(); } @Test public void filterShouldEndChainWhenLoginFailsSoft() throws Exception { Mockery context = new Mockery() { { setImposteriser(ClassImposteriser.INSTANCE); setThreadingPolicy(new Synchroniser()); } }; context.setThreadingPolicy(new Synchroniser()); final NextFilter nextFilter = context.mock(NextFilter.class); final IoSessionEx session = context.mock(IoSessionEx.class); final ResourceAddress address = context.mock(ResourceAddress.class); final HttpRequestMessage message = new HttpRequestMessage(); message.setMethod(HttpMethod.GET); message.setVersion(HttpVersion.HTTP_1_1); message.setRequestURI(URI.create(BASE_URI)); message.addHeader("Connection", "Upgrade"); message.addHeader("Upgrade", "WebSocket"); message.addHeader("Host", "localhost:8000"); message.addHeader("Forwarded", "for=127.0.0.1"); message.addHeader("Authorization", "Token gobbledegook"); message.setLocalAddress(address); final ResultAwareLoginContext loginContext = context.mock(ResultAwareLoginContext.class); final DefaultLoginResult loginResult = context.mock(DefaultLoginResult.class); final LoginContextFactory loginContextFactory = context.mock(DefaultLoginContextFactory.class); final Set<Principal> principals = new HashSet<>(); principals.add(AUTHORIZED_PRINCIPAL); final Subject subject = new Subject(false, principals, Collections.EMPTY_SET, Collections.EMPTY_SET); final Matcher<WriteRequest> writeRequestMatcher = writeRequest(withStatus(HttpStatus.CLIENT_FORBIDDEN)); final CountDownLatch latch = new CountDownLatch(1); context.checking(new Expectations() { { allowing(address).getOption(HttpResourceAddress.REALM_NAME); will(returnValue("demo")); allowing(address).getOption(HttpResourceAddress.REQUIRED_ROLES); will(returnValue(new String[]{"AUTHORIZED"})); allowing(address).getOption(HttpResourceAddress.REALM_AUTHORIZATION_MODE); will(returnValue("challenge")); allowing(address).getOption(HttpResourceAddress.REALM_CHALLENGE_SCHEME); will(returnValue("Application Token")); // not already logged in oneOf(session).getSubject(); will(returnValue(null)); // login() method itself oneOf(address).getOption(HttpResourceAddress.REALM_AUTHENTICATION_HEADER_NAMES); will(returnValue(null)); oneOf(address).getOption(HttpResourceAddress.REALM_AUTHENTICATION_PARAMETER_NAMES); will(returnValue(null)); oneOf(address).getOption(HttpResourceAddress.REALM_AUTHENTICATION_COOKIE_NAMES); will(returnValue(null)); oneOf(address).getOption(HttpResourceAddress.LOGIN_CONTEXT_FACTORY); will(returnValue(loginContextFactory)); oneOf(loginContextFactory).createLoginContext(with(aNonNull(TypedCallbackHandlerMap.class))); will(returnValue(loginContext)); oneOf(session).suspendRead(); oneOf(loginContext).login(); oneOf(loginContext).getLoginResult(); will(returnValue(loginResult)); oneOf(loginResult).getType(); will(returnValue(LoginResult.Type.CHALLENGE)); oneOf(loginContext).getSubject(); will(returnValue(subject)); oneOf(loginResult).getLoginChallengeData(); will(returnValue(null)); oneOf(nextFilter).filterWrite(with(same(session)), with(writeRequestMatcher)); will(VoidAction.INSTANCE); never(nextFilter).messageReceived(session, message); never(session).setAttribute(with(any(String.class)), with(any(Subject.class))); oneOf(session).getIoExecutor(); will(returnValue(HTTP_SUBJECT_SECURITY_FILTER_TEST_EXECUTOR)); oneOf(session).resumeRead(); will(new LoginContextTaskDoneAction(latch, "login context task done")); } }); HttpSubjectSecurityFilter filter = new HttpSubjectSecurityFilter(); filter.setSchedulerProvider(new SchedulerProvider()); filter.securityMessageReceived(nextFilter, session, message); latch.await(2000, TimeUnit.MILLISECONDS); context.assertIsSatisfied(); } private WriteRequest withStatus(final HttpStatus httpStatus) { return new DefaultWriteRequestEx(httpResponseWith(httpStatus)); } private HttpResponseMessage httpResponseWith(final HttpStatus httpStatus) { final HttpResponseMessage httpResponse = new HttpResponseMessage(); httpResponse.setStatus(httpStatus); httpResponse.setVersion(HttpVersion.HTTP_1_1); return httpResponse; } private Matcher<WriteRequest> writeRequest(final WriteRequest writeRequest) { return new BaseMatcher<WriteRequest>() { @Override public boolean matches(Object o) { if (o != null && o instanceof WriteRequest) { if ( writeRequest.getMessage().equals(((WriteRequest) o).getMessage()) ){ return true; } } return false; } @Override public void describeTo(Description description) { description.appendText(writeRequest.toString()); } }; } private static final class LoginContextTaskDoneAction extends CustomAction { private final CountDownLatch latch; LoginContextTaskDoneAction(CountDownLatch latch, String description) { super(description); this.latch = latch; } @Override public Object invoke(Invocation invocation) throws Throwable { latch.countDown(); return null; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.drill.exec.server.rest; import org.apache.drill.exec.ExecConstants; import org.apache.drill.exec.util.ValueVectorElementFormatter; import org.apache.drill.shaded.guava.com.google.common.collect.Lists; import org.apache.drill.shaded.guava.com.google.common.collect.Maps; import org.apache.drill.shaded.guava.com.google.common.collect.Sets; import io.netty.buffer.ByteBuf; import io.netty.buffer.DrillBuf; import io.netty.channel.ChannelFuture; import org.apache.drill.common.exceptions.UserException; import org.apache.drill.common.types.TypeProtos; import org.apache.drill.exec.memory.BufferAllocator; import org.apache.drill.exec.physical.impl.materialize.QueryWritableBatch; import org.apache.drill.exec.proto.GeneralRPCProtos.Ack; import org.apache.drill.exec.record.RecordBatchLoader; import org.apache.drill.exec.record.VectorWrapper; import org.apache.drill.exec.rpc.AbstractDisposableUserClientConnection; import org.apache.drill.exec.rpc.Acks; import org.apache.drill.exec.rpc.ConnectionThrottle; import org.apache.drill.exec.rpc.RpcOutcomeListener; import org.apache.drill.exec.rpc.user.UserSession; import org.apache.drill.exec.vector.ValueVector.Accessor; import org.apache.drill.exec.record.MaterializedField; import java.net.SocketAddress; import java.util.List; import java.util.Map; import java.util.ArrayList; import java.util.Set; /** * WebUserConnectionWrapper which represents the UserClientConnection between WebServer and Foreman, for the WebUser * submitting the query. It provides access to the UserSession executing the query. There is no actual physical * channel corresponding to this connection wrapper. * * It returns a close future with no actual underlying {@link io.netty.channel.Channel} associated with it but do have an * EventExecutor out of BitServer EventLoopGroup. Since there is no actual connection established using this class, * hence the close event will never be fired by underlying layer and close future is set only when the * {@link WebSessionResources} are closed. */ public class WebUserConnection extends AbstractDisposableUserClientConnection implements ConnectionThrottle { private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(WebUserConnection.class); protected WebSessionResources webSessionResources; public final List<Map<String, String>> results = Lists.newArrayList(); public final Set<String> columns = Sets.newLinkedHashSet(); public final List<String> metadata = new ArrayList<>(); private int autoLimitRowCount; WebUserConnection(WebSessionResources webSessionResources) { this.webSessionResources = webSessionResources; } @Override public UserSession getSession() { return webSessionResources.getSession(); } @Override public void sendData(RpcOutcomeListener<Ack> listener, QueryWritableBatch result) { // There can be overflow here but DrillBuf doesn't support allocating with // bytes in long. Hence we are just preserving the earlier behavior and logging debug log for the case. final int dataByteCount = (int) result.getByteCount(); if (dataByteCount < 0) { if (logger.isDebugEnabled()) { logger.debug("There is BufferOverflow in dataByteCount: {}", dataByteCount); } listener.success(Acks.OK, null); return; } // Create a ByteBuf with all the data in it. final int rows = result.getHeader().getRowCount(); final BufferAllocator allocator = webSessionResources.getAllocator(); final DrillBuf bufferWithData = allocator.buffer(dataByteCount); try { final ByteBuf[] resultDataBuffers = result.getBuffers(); for (final ByteBuf buffer : resultDataBuffers) { bufferWithData.writeBytes(buffer); buffer.release(); } final RecordBatchLoader loader = new RecordBatchLoader(allocator); try { loader.load(result.getHeader().getDef(), bufferWithData); // TODO: Clean: DRILL-2933: That load(...) no longer throws // SchemaChangeException, so check/clean catch clause below. for (int i = 0; i < loader.getSchema().getFieldCount(); ++i) { //DRILL-6847: This section adds query metadata to the REST results MaterializedField col = loader.getSchema().getColumn(i); columns.add(col.getName()); StringBuilder dataType = new StringBuilder(col.getType().getMinorType().name()); //For DECIMAL type if (col.getType().hasPrecision()) { dataType.append("("); dataType.append(col.getType().getPrecision()); if (col.getType().hasScale()) { dataType.append(", "); dataType.append(col.getType().getScale()); } dataType.append(")"); } else if (col.getType().hasWidth()) { //Case for VARCHAR columns with specified width dataType.append("("); dataType.append(col.getType().getWidth()); dataType.append(")"); } metadata.add(dataType.toString()); } ValueVectorElementFormatter formatter = new ValueVectorElementFormatter(webSessionResources.getSession().getOptions()); for (int i = 0; i < rows; ++i) { final Map<String, String> record = Maps.newHashMap(); for (VectorWrapper<?> vw : loader) { final String field = vw.getValueVector().getMetadata().getNamePart().getName(); final TypeProtos.MinorType fieldMinorType = vw.getValueVector().getMetadata().getMajorType().getMinorType(); final Accessor accessor = vw.getValueVector().getAccessor(); final Object value = i < accessor.getValueCount() ? accessor.getObject(i) : null; final String display = value == null ? null : formatter.format(value, fieldMinorType); record.put(field, display); } results.add(record); } } finally { loader.clear(); } } catch (Exception e) { boolean verbose = webSessionResources.getSession().getOptions().getBoolean(ExecConstants.ENABLE_VERBOSE_ERRORS_KEY); // Wrapping the exception into UserException and then into DrillPBError. // It will be thrown as exception in QueryWrapper class. // It's verbosity depends on system option "exec.errors.verbose". error = UserException.systemError(e).build(logger).getOrCreatePBError(verbose); } finally { // Notify the listener with ACK.OK both in error/success case because data was send successfully from Drillbit. bufferWithData.release(); listener.success(Acks.OK, null); } } @Override public ChannelFuture getChannelClosureFuture() { return webSessionResources.getCloseFuture(); } @Override public SocketAddress getRemoteAddress() { return webSessionResources.getRemoteAddress(); } @Override public void setAutoRead(boolean enableAutoRead) { // no-op } /** * For authenticated WebUser no cleanup of {@link WebSessionResources} is done since it's re-used * for all the queries until lifetime of the web session. */ public void cleanupSession() { // no-op } public static class AnonWebUserConnection extends WebUserConnection { AnonWebUserConnection(WebSessionResources webSessionResources) { super(webSessionResources); } /** * For anonymous WebUser after each query request is completed the {@link WebSessionResources} is cleaned up. */ @Override public void cleanupSession() { webSessionResources.close(); } } /** * Sets an autolimit on the size of records to be sent back on the connection * @param autoLimitRowCount Max number of records to be sent back to WebServer */ void setAutoLimitRowCount(int autoLimitRowCount) { this.autoLimitRowCount = autoLimitRowCount; } /** * Gets the max size of records to be sent back by the query * @return Max number of records to be sent back to WebServer */ public int getAutoLimitRowCount() { return this.autoLimitRowCount; } }
package com.iappsam.entities; import java.util.ArrayList; import java.util.List; import com.iappsam.Account; import com.iappsam.Building; import com.iappsam.Contact; import com.iappsam.DivisionOffice; import com.iappsam.Employee; import com.iappsam.Item; import com.iappsam.ItemCategory; import com.iappsam.ItemCondition; import com.iappsam.ItemStatus; import com.iappsam.Person; import com.iappsam.Signatory; import com.iappsam.Supplier; import com.iappsam.Unit; import com.iappsam.forms.APP; import com.iappsam.forms.IE; import com.iappsam.forms.IIRUP; import com.iappsam.forms.ModeOfProcurement; import com.iappsam.forms.PO; import com.iappsam.forms.PR; import com.iappsam.managers.APPManager; import com.iappsam.managers.AccountManager; import com.iappsam.managers.ContactManager; import com.iappsam.managers.DivisionOfficeManager; import com.iappsam.managers.IEManager; import com.iappsam.managers.IIRUPManager; import com.iappsam.managers.ItemManager; import com.iappsam.managers.POManager; import com.iappsam.managers.PRManager; import com.iappsam.managers.PersonManager; import com.iappsam.managers.SupplierManager; import com.iappsam.managers.exceptions.DuplicateEntryException; import com.iappsam.managers.exceptions.TransactionException; import com.iappsam.managers.sessions.APPManagerSession; import com.iappsam.managers.sessions.AccountManagerSession; import com.iappsam.managers.sessions.ContactManagerSession; import com.iappsam.managers.sessions.DivisionOfficeManagerSession; import com.iappsam.managers.sessions.IEManagerSession; import com.iappsam.managers.sessions.IIRUPManagerSession; import com.iappsam.managers.sessions.ItemManagerSession; import com.iappsam.managers.sessions.POManagerSession; import com.iappsam.managers.sessions.PRManagerSession; import com.iappsam.managers.sessions.PersonManagerSession; import com.iappsam.managers.sessions.SupplierManagerSession; import com.iappsam.util.HibernateUtil; public class EntityRemover { private static IIRUPManager iirupm = new IIRUPManagerSession(); private static PersonManager pm = new PersonManagerSession(); private static ItemManager im = new ItemManagerSession(); private static APPManager appm = new APPManagerSession(); private static DivisionOfficeManager dom = new DivisionOfficeManagerSession(); private static ContactManager cm = new ContactManagerSession(); private static AccountManager am = new AccountManagerSession(); private static SupplierManager sm = new SupplierManagerSession(); private static POManager pom = new POManagerSession(); private static PRManager prm = new PRManagerSession(); private static IEManager iem = new IEManagerSession(); public static void removeAll() throws TransactionException { removeIEs(); removePOs(); removePRs(); removeAPPs(); removeIIRUPs(); removeItems(); removeItemProperties(); removeModeOfProcurements(); removeSignatories(); removeSuppliers(); removeEmployees(); removeAccounts(); removePersons(); removeContacts(); removeDivisionOffices(); removeBuilidings(); } private static void removeBuilidings() throws TransactionException { List<Building> buildings = dom.getAllBuildings(); for (Building building : buildings) dom.removeBuilding(building); } private static void removeIEs() throws TransactionException { List<IE> ies = iem.getAllIE(); for (IE ie : ies) iem.removeIE(ie); } public static void removePRs() throws TransactionException { List<PR> prs = prm.getAllPR(); for (PR pr : prs) prm.removePR(pr); } public static void removeModeOfProcurements() throws TransactionException { List<ModeOfProcurement> mops = pom.getAllModeOfProcurement(); for (ModeOfProcurement mop : mops) pom.removeModeOfProcurement(mop); } public static void removePOs() throws TransactionException { List<PO> pos = pom.getAllPO(); for (PO po : pos) pom.removePurchaseOrder(po); } public static void removeSuppliers() throws TransactionException { List<Supplier> suppliers = sm.getAllSuppliers(); for (Supplier s : suppliers) sm.removeSupplier(s); } public static void removeAccounts() throws TransactionException { List<Account> accounts = am.getAllAccounts(); for (Account i : accounts) am.removeAccount(i); } public static void removeContacts() throws TransactionException { List<Contact> contacts = cm.getAllContacts(); for (Contact i : contacts) cm.removeContact(i); } public static void removeDivisionOffices() throws TransactionException { List<DivisionOffice> offices = dom.getAllDivisionOffice(); for (DivisionOffice i : offices) dom.removeDivisionOffice(i); } public static void removeAPPs() throws TransactionException { List<APP> persons = appm.getAllAPP(); for (APP i : persons) appm.removeAPP(i); } public static void removePersons() throws TransactionException { List<Person> persons = pm.getAllPersons(); for (Person i : persons) pm.removePerson(i); } public static void removeSignatories() throws TransactionException { List<Signatory> signatories = pm.getAllSignatories(); for (Signatory i : signatories) pm.removeSignatory(i); } public static void removeItemProperties() throws TransactionException { removeItemConditions(); removeCategories(); removeUnits(); removeItemStatuses(); } public static void removeItemStatuses() throws TransactionException { List<ItemStatus> statuses = im.getAllItemStatus(); for (ItemStatus i : statuses) im.removeItemStatus(i); } public static void removeUnits() throws TransactionException { List<Unit> units = im.getAllUnits(); for (Unit i : units) im.removeUnit(i); } public static void removeCategories() throws TransactionException { List<ItemCategory> conditions = im.getAllItemCategory(); for (ItemCategory i : conditions) im.removeItemCategory(i); } public static void removeItemConditions() throws TransactionException { List<ItemCondition> conditions = im.getAllItemCondition(); for (ItemCondition i : conditions) im.removeItemCondition(i); } public static void removeItems() throws TransactionException { List<Item> items = im.getAllItems(); for (Item i : items) im.removeItem(i); } public static void removeIIRUPs() throws TransactionException { List<IIRUP> iirups = iirupm.getAllIIRUP(); for (IIRUP iirup : iirups) iirupm.removeIIRUP(iirup); } public static void removeEmployees() throws TransactionException { List<Employee> employees = pm.getAllEmployee(); for (Employee e : employees) pm.removeEmployee(e); } public static void reset() throws TransactionException, DuplicateEntryException { removeAll(); } }
package org.zstack.compute.allocator; import org.springframework.beans.factory.annotation.Autowire; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Configurable; import org.zstack.core.componentloader.PluginRegistry; import org.zstack.core.errorcode.ErrorFacade; import org.zstack.header.allocator.*; import org.zstack.header.core.ReturnValueCompletion; import org.zstack.header.errorcode.ErrorCode; import org.zstack.header.errorcode.OperationFailureException; import org.zstack.header.host.HostInventory; import org.zstack.header.host.HostVO; import org.zstack.header.vm.VmInstanceInventory; import org.zstack.utils.DebugUtils; import org.zstack.utils.Utils; import org.zstack.utils.gson.JSONObjectUtil; import org.zstack.utils.logging.CLogger; import java.util.*; /** */ @Configurable(preConstruction = true, autowire = Autowire.BY_TYPE) public class HostAllocatorChain implements HostAllocatorTrigger, HostAllocatorStrategy { private static final CLogger logger = Utils.getLogger(HostAllocatorChain.class); private HostAllocatorSpec allocationSpec; private String name; private List<AbstractHostAllocatorFlow> flows; private Iterator<AbstractHostAllocatorFlow> it; private ErrorCode errorCode; private List<HostVO> result = null; private boolean isDryRun; private ReturnValueCompletion<HostInventory> completion; private ReturnValueCompletion<List<HostInventory>> dryRunCompletion; private AbstractHostAllocatorFlow lastFlow; private HostAllocationPaginationInfo paginationInfo; private Set<String> seriesErrorWhenPagination = new HashSet<String>(); @Autowired private ErrorFacade errf; @Autowired private PluginRegistry pluginRgty; @Autowired private HostCapacityOverProvisioningManager ratioMgr; public HostAllocatorSpec getAllocationSpec() { return allocationSpec; } public void setAllocationSpec(HostAllocatorSpec allocationSpec) { this.allocationSpec = allocationSpec; } public String getName() { return name; } public void setName(String name) { this.name = name; } public List<AbstractHostAllocatorFlow> getFlows() { return flows; } public void setFlows(List<AbstractHostAllocatorFlow> flows) { this.flows = flows; } void reserveCapacity(final String hostUuid, final long cpu, final long memory) { HostCapacityUpdater updater = new HostCapacityUpdater(hostUuid); updater.run(new HostCapacityUpdaterRunnable() { @Override public HostCapacityVO call(HostCapacityVO cap) { long availCpu = cap.getAvailableCpu() - cpu; if (availCpu < 0) { throw new UnableToReserveHostCapacityException( String.format("no enough CPU[%s] on the host[uuid:%s]", cpu, hostUuid)); } cap.setAvailableCpu(availCpu); long availMemory = cap.getAvailableMemory() - ratioMgr.calculateMemoryByRatio(hostUuid, memory); if (availMemory < 0) { throw new UnableToReserveHostCapacityException( String.format("no enough memory[%s] on the host[uuid:%s]", memory, hostUuid)); } cap.setAvailableMemory(availMemory); return cap; } }); } protected void marshalResult() { Collections.shuffle(result); } private void done() { if (result == null) { if (isDryRun) { if (HostAllocatorError.NO_AVAILABLE_HOST.toString().equals(errorCode.getCode())) { dryRunCompletion.success(new ArrayList<HostInventory>()); } else { dryRunCompletion.fail(errorCode); } } else { completion.fail(errorCode); } return; } // in case a wrong flow returns an empty result set if (result.isEmpty()) { if (isDryRun) { dryRunCompletion.fail(errf.instantiateErrorCode(HostAllocatorError.NO_AVAILABLE_HOST, "host allocation flow doesn't indicate any details")); } else { completion.fail(errf.instantiateErrorCode(HostAllocatorError.NO_AVAILABLE_HOST, "host allocation flow doesn't indicate any details")); } return; } if (isDryRun) { dryRunCompletion.success(HostInventory.valueOf(result)); return; } marshalResult(); try { for (HostVO h : result) { try { reserveCapacity(h.getUuid(), allocationSpec.getCpuCapacity(), allocationSpec.getMemoryCapacity()); logger.debug(String.format("[Host Allocation]: successfully reserved cpu[%s], memory[%s bytes] on host[uuid:%s] for vm[uuid:%s]", allocationSpec.getCpuCapacity(), allocationSpec.getMemoryCapacity(), h.getUuid(), allocationSpec.getVmInstance().getUuid())); completion.success(HostInventory.valueOf(h)); return; } catch (UnableToReserveHostCapacityException e) { logger.debug(String.format("[Host Allocation]: %s on host[uuid:%s]. try next one", e.getMessage(), h.getUuid())); } } if (paginationInfo != null) { logger.debug("[Host Allocation]: unable to reserve cpu/memory on all candidate hosts; because of pagination is enabled, allocation will start over"); seriesErrorWhenPagination.add(String.format("{unable to reserve cpu[%s], memory[%s bytes] on all candidate hosts}", allocationSpec.getCpuCapacity(), allocationSpec.getMemoryCapacity())); startOver(); } else { completion.fail(errf.instantiateErrorCode(HostAllocatorError.NO_AVAILABLE_HOST, "reservation on cpu/memory failed on all candidates host")); } } catch (Throwable t) { logger.debug(t.getClass().getName(), t); completion.fail(errf.throwableToInternalError(t)); } } private void startOver() { it = flows.iterator(); result = null; runFlow(it.next()); } private void runFlow(AbstractHostAllocatorFlow flow) { try { lastFlow = flow; flow.setCandidates(result); flow.setSpec(allocationSpec); flow.setTrigger(this); flow.setPaginationInfo(paginationInfo); flow.allocate(); } catch (OperationFailureException ofe) { if (ofe.getErrorCode().getCode().equals(HostAllocatorConstant.PAGINATION_INTERMEDIATE_ERROR.getCode())) { logger.debug(String.format("[Host Allocation]: intermediate failure; " + "because of pagination, will start over allocation again; " + "current pagination info %s; failure details: %s", JSONObjectUtil.toJsonString(paginationInfo), ofe.getErrorCode().getDetails())); seriesErrorWhenPagination.add(String.format("{%s}", ofe.getErrorCode().getDetails())); startOver(); } else { fail(ofe.getErrorCode()); } } catch (Throwable t) { logger.warn("unhandled throwable", t); completion.fail(errf.throwableToInternalError(t)); } } private void start() { for (HostAllocatorPreStartExtensionPoint processor : pluginRgty.getExtensionList(HostAllocatorPreStartExtensionPoint.class)) { processor.beforeHostAllocatorStart(allocationSpec, flows); } if (HostAllocatorGlobalConfig.USE_PAGINATION.value(Boolean.class)) { paginationInfo = new HostAllocationPaginationInfo(); paginationInfo.setLimit(HostAllocatorGlobalConfig.PAGINATION_LIMIT.value(Integer.class)); } it = flows.iterator(); DebugUtils.Assert(it.hasNext(), "can not run an empty host allocation chain"); runFlow(it.next()); } private void allocate(ReturnValueCompletion<HostInventory> completion) { isDryRun = false; this.completion = completion; start(); } private void dryRun(ReturnValueCompletion<List<HostInventory>> completion) { isDryRun = true; this.dryRunCompletion = completion; start(); } @Override public void next(List<HostVO> candidates) { DebugUtils.Assert(candidates != null, "cannot pass null to next() method"); DebugUtils.Assert(!candidates.isEmpty(), "cannot pass empty candidates to next() method"); result = candidates; VmInstanceInventory vm = allocationSpec.getVmInstance(); logger.debug(String.format("[Host Allocation]: flow[%s] successfully found %s candidate hosts for vm[uuid:%s, name:%s]", lastFlow.getClass().getName(), result.size(), vm.getUuid(), vm.getName())); if (logger.isTraceEnabled()) { StringBuilder sb = new StringBuilder("[Host Allocation Details]:"); for (HostVO vo : result) { sb.append(String.format("\ncandidate host[name:%s, uuid:%s, zoneUuid:%s, clusterUuid:%s, hypervisorType:%s]", vo.getName(), vo.getUuid(), vo.getZoneUuid(), vo.getClusterUuid(), vo.getHypervisorType())); } logger.trace(sb.toString()); } if (it.hasNext()) { runFlow(it.next()); return; } done(); } @Override public void skip() { logger.debug(String.format("[Host Allocation]: flow[%s] asks to skip itself, we are running to the next flow", lastFlow.getClass())); if (it.hasNext()) { runFlow(it.next()); return; } done(); } @Override public int indexOfFlow(AbstractHostAllocatorFlow flow) { return flows.indexOf(flow); } private void fail(ErrorCode errorCode) { result = null; if (seriesErrorWhenPagination.isEmpty()) { logger.debug(String.format("[Host Allocation] flow[%s] failed to allocate host; %s", lastFlow.getClass().getName(), errorCode.getDetails())); this.errorCode = errorCode; } else { String err = String.format("unable to allocate hosts; due to pagination is enabled, " + "there might be several allocation failures happened before;" + " the error list is %s", seriesErrorWhenPagination); logger.debug(err); this.errorCode = errf.instantiateErrorCode(HostAllocatorError.NO_AVAILABLE_HOST, err); } done(); } @Override public void allocate(HostAllocatorSpec spec, ReturnValueCompletion<HostInventory> completion) { this.allocationSpec = spec; allocate(completion); } @Override public void dryRun(HostAllocatorSpec spec, ReturnValueCompletion<List<HostInventory>> completion) { this.allocationSpec = spec; dryRun(completion); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer; import java.util.LinkedList; import java.util.Queue; import java.util.concurrent.Semaphore; import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReadWriteLock; import java.util.concurrent.locks.ReentrantReadWriteLock; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.Path; import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.event.Dispatcher; import org.apache.hadoop.yarn.event.EventHandler; import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.ContainerResourceFailedEvent; import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.ContainerResourceLocalizedEvent; import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.event.LocalizerResourceRequestEvent; import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.event.ResourceEvent; import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.event.ResourceEventType; import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.event.ResourceFailedLocalizationEvent; import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.event.ResourceLocalizedEvent; import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.event.ResourceRecoveredEvent; import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.event.ResourceReleaseEvent; import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.event.ResourceRequestEvent; import org.apache.hadoop.yarn.state.InvalidStateTransitionException; import org.apache.hadoop.yarn.state.SingleArcTransition; import org.apache.hadoop.yarn.state.StateMachine; import org.apache.hadoop.yarn.state.StateMachineFactory; /** * Datum representing a localized resource. Holds the statemachine of a * resource. State of the resource is one of {@link ResourceState}. * */ public class LocalizedResource implements EventHandler<ResourceEvent> { private static final Log LOG = LogFactory.getLog(LocalizedResource.class); volatile Path localPath; volatile long size = -1; final LocalResourceRequest rsrc; final Dispatcher dispatcher; final StateMachine<ResourceState,ResourceEventType,ResourceEvent> stateMachine; final Semaphore sem = new Semaphore(1); final Queue<ContainerId> ref; // Queue of containers using this localized // resource private final Lock readLock; private final Lock writeLock; final AtomicLong timestamp = new AtomicLong(currentTime()); private static final StateMachineFactory<LocalizedResource,ResourceState, ResourceEventType,ResourceEvent> stateMachineFactory = new StateMachineFactory<LocalizedResource,ResourceState, ResourceEventType,ResourceEvent>(ResourceState.INIT) // From INIT (ref == 0, awaiting req) .addTransition(ResourceState.INIT, ResourceState.DOWNLOADING, ResourceEventType.REQUEST, new FetchResourceTransition()) .addTransition(ResourceState.INIT, ResourceState.LOCALIZED, ResourceEventType.RECOVERED, new RecoveredTransition()) // From DOWNLOADING (ref > 0, may be localizing) .addTransition(ResourceState.DOWNLOADING, ResourceState.DOWNLOADING, ResourceEventType.REQUEST, new FetchResourceTransition()) // TODO: Duplicate addition!! .addTransition(ResourceState.DOWNLOADING, ResourceState.LOCALIZED, ResourceEventType.LOCALIZED, new FetchSuccessTransition()) .addTransition(ResourceState.DOWNLOADING,ResourceState.DOWNLOADING, ResourceEventType.RELEASE, new ReleaseTransition()) .addTransition(ResourceState.DOWNLOADING, ResourceState.FAILED, ResourceEventType.LOCALIZATION_FAILED, new FetchFailedTransition()) // From LOCALIZED (ref >= 0, on disk) .addTransition(ResourceState.LOCALIZED, ResourceState.LOCALIZED, ResourceEventType.REQUEST, new LocalizedResourceTransition()) .addTransition(ResourceState.LOCALIZED, ResourceState.LOCALIZED, ResourceEventType.RELEASE, new ReleaseTransition()) .installTopology(); public LocalizedResource(LocalResourceRequest rsrc, Dispatcher dispatcher) { this.rsrc = rsrc; this.dispatcher = dispatcher; this.ref = new LinkedList<ContainerId>(); ReadWriteLock readWriteLock = new ReentrantReadWriteLock(); this.readLock = readWriteLock.readLock(); this.writeLock = readWriteLock.writeLock(); this.stateMachine = stateMachineFactory.make(this); } public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{ ").append(rsrc.toString()).append(",") .append(getState() == ResourceState.LOCALIZED ? getLocalPath() + "," + getSize() : "pending").append(",["); try { this.readLock.lock(); for (ContainerId c : ref) { sb.append("(").append(c.toString()).append(")"); } sb.append("],").append(getTimestamp()).append(",").append(getState()) .append("}"); return sb.toString(); } finally { this.readLock.unlock(); } } private void release(ContainerId container) { if (ref.remove(container)) { // updating the timestamp only in case of success. timestamp.set(currentTime()); } else { LOG.info("Container " + container + " doesn't exist in the container list of the Resource " + this + " to which it sent RELEASE event"); } } private long currentTime() { return System.nanoTime(); } public ResourceState getState() { this.readLock.lock(); try { return stateMachine.getCurrentState(); } finally { this.readLock.unlock(); } } public LocalResourceRequest getRequest() { return rsrc; } public Path getLocalPath() { return localPath; } public void setLocalPath(Path localPath) { this.localPath = Path.getPathWithoutSchemeAndAuthority(localPath); } public long getTimestamp() { return timestamp.get(); } public long getSize() { return size; } public int getRefCount() { return ref.size(); } public boolean tryAcquire() { return sem.tryAcquire(); } public void unlock() { sem.release(); } @Override public void handle(ResourceEvent event) { try { this.writeLock.lock(); Path resourcePath = event.getLocalResourceRequest().getPath(); LOG.debug("Processing " + resourcePath + " of type " + event.getType()); ResourceState oldState = this.stateMachine.getCurrentState(); ResourceState newState = null; try { newState = this.stateMachine.doTransition(event.getType(), event); } catch (InvalidStateTransitionException e) { LOG.warn("Can't handle this event at current state", e); } if (oldState != newState) { if (LOG.isDebugEnabled()) { LOG.debug("Resource " + resourcePath + (localPath != null ? "(->" + localPath + ")": "") + " transitioned from " + oldState + " to " + newState); } } } finally { this.writeLock.unlock(); } } static abstract class ResourceTransition implements SingleArcTransition<LocalizedResource,ResourceEvent> { // typedef } /** * Transition from INIT to DOWNLOADING. * Sends a {@link LocalizerResourceRequestEvent} to the * {@link ResourceLocalizationService}. */ @SuppressWarnings("unchecked") // dispatcher not typed private static class FetchResourceTransition extends ResourceTransition { @Override public void transition(LocalizedResource rsrc, ResourceEvent event) { ResourceRequestEvent req = (ResourceRequestEvent) event; LocalizerContext ctxt = req.getContext(); ContainerId container = ctxt.getContainerId(); rsrc.ref.add(container); rsrc.dispatcher.getEventHandler().handle( new LocalizerResourceRequestEvent(rsrc, req.getVisibility(), ctxt, req.getLocalResourceRequest().getPattern())); } } /** * Resource localized, notify waiting containers. */ @SuppressWarnings("unchecked") // dispatcher not typed private static class FetchSuccessTransition extends ResourceTransition { @Override public void transition(LocalizedResource rsrc, ResourceEvent event) { ResourceLocalizedEvent locEvent = (ResourceLocalizedEvent) event; rsrc.localPath = Path.getPathWithoutSchemeAndAuthority(locEvent.getLocation()); rsrc.size = locEvent.getSize(); for (ContainerId container : rsrc.ref) { rsrc.dispatcher.getEventHandler().handle( new ContainerResourceLocalizedEvent( container, rsrc.rsrc, rsrc.localPath)); } } } /** * Resource localization failed, notify waiting containers. */ @SuppressWarnings("unchecked") private static class FetchFailedTransition extends ResourceTransition { @Override public void transition(LocalizedResource rsrc, ResourceEvent event) { ResourceFailedLocalizationEvent failedEvent = (ResourceFailedLocalizationEvent) event; Queue<ContainerId> containers = rsrc.ref; for (ContainerId container : containers) { rsrc.dispatcher.getEventHandler().handle( new ContainerResourceFailedEvent(container, failedEvent .getLocalResourceRequest(), failedEvent.getDiagnosticMessage())); } } } /** * Resource already localized, notify immediately. */ @SuppressWarnings("unchecked") // dispatcher not typed private static class LocalizedResourceTransition extends ResourceTransition { @Override public void transition(LocalizedResource rsrc, ResourceEvent event) { // notify waiting containers ResourceRequestEvent reqEvent = (ResourceRequestEvent) event; ContainerId container = reqEvent.getContext().getContainerId(); rsrc.ref.add(container); rsrc.dispatcher.getEventHandler().handle( new ContainerResourceLocalizedEvent( container, rsrc.rsrc, rsrc.localPath)); } } /** * Decrement resource count, update timestamp. */ private static class ReleaseTransition extends ResourceTransition { @Override public void transition(LocalizedResource rsrc, ResourceEvent event) { // Note: assumes that localizing container must succeed or fail ResourceReleaseEvent relEvent = (ResourceReleaseEvent) event; rsrc.release(relEvent.getContainer()); } } private static class RecoveredTransition extends ResourceTransition { @Override public void transition(LocalizedResource rsrc, ResourceEvent event) { ResourceRecoveredEvent recoveredEvent = (ResourceRecoveredEvent) event; rsrc.localPath = recoveredEvent.getLocalPath(); rsrc.size = recoveredEvent.getSize(); } } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.index.mapper; import com.google.common.collect.Iterators; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESTestCase; import java.io.IOException; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Iterator; import java.util.List; import static org.hamcrest.Matchers.containsString; public class FieldTypeLookupTests extends ESTestCase { public void testEmpty() { FieldTypeLookup lookup = new FieldTypeLookup(); assertNull(lookup.get("foo")); assertNull(lookup.getByIndexName("foo")); assertEquals(Collections.emptySet(), lookup.getTypes("foo")); assertEquals(Collections.emptySet(), lookup.getTypesByIndexName("foo")); Collection<String> names = lookup.simpleMatchToFullName("foo"); assertNotNull(names); assertTrue(names.isEmpty()); names = lookup.simpleMatchToIndexNames("foo"); assertNotNull(names); assertTrue(names.isEmpty()); Iterator<MappedFieldType> itr = lookup.iterator(); assertNotNull(itr); assertFalse(itr.hasNext()); } public void testDefaultMapping() { FieldTypeLookup lookup = new FieldTypeLookup(); try { lookup.copyAndAddAll(MapperService.DEFAULT_MAPPING, Collections.<FieldMapper>emptyList(), randomBoolean()); fail(); } catch (IllegalArgumentException expected) { assertEquals("Default mappings should not be added to the lookup", expected.getMessage()); } } public void testAddNewField() { FieldTypeLookup lookup = new FieldTypeLookup(); FakeFieldMapper f = new FakeFieldMapper("foo", "bar"); FieldTypeLookup lookup2 = lookup.copyAndAddAll("type", newList(f), randomBoolean()); assertNull(lookup.get("foo")); assertNull(lookup.get("bar")); assertNull(lookup.getByIndexName("foo")); assertNull(lookup.getByIndexName("bar")); assertEquals(f.fieldType(), lookup2.get("foo")); assertNull(lookup.get("bar")); assertEquals(f.fieldType(), lookup2.getByIndexName("bar")); assertNull(lookup.getByIndexName("foo")); assertEquals(Collections.emptySet(), lookup.getTypes("foo")); assertEquals(Collections.emptySet(), lookup.getTypesByIndexName("foo")); assertEquals(Collections.emptySet(), lookup.getTypes("bar")); assertEquals(Collections.emptySet(), lookup.getTypesByIndexName("bar")); assertEquals(Collections.singleton("type"), lookup2.getTypes("foo")); assertEquals(Collections.emptySet(), lookup2.getTypesByIndexName("foo")); assertEquals(Collections.emptySet(), lookup2.getTypes("bar")); assertEquals(Collections.singleton("type"), lookup2.getTypesByIndexName("bar")); assertEquals(1, Iterators.size(lookup2.iterator())); } public void testAddExistingField() { FakeFieldMapper f = new FakeFieldMapper("foo", "foo"); FakeFieldMapper f2 = new FakeFieldMapper("foo", "foo"); FieldTypeLookup lookup = new FieldTypeLookup(); lookup = lookup.copyAndAddAll("type1", newList(f), randomBoolean()); FieldTypeLookup lookup2 = lookup.copyAndAddAll("type2", newList(f2), randomBoolean()); assertSame(f2.fieldType(), lookup2.get("foo")); assertSame(f2.fieldType(), lookup2.getByIndexName("foo")); assertEquals(1, Iterators.size(lookup2.iterator())); } public void testAddExistingIndexName() { FakeFieldMapper f = new FakeFieldMapper("foo", "foo"); FakeFieldMapper f2 = new FakeFieldMapper("bar", "foo"); FieldTypeLookup lookup = new FieldTypeLookup(); lookup = lookup.copyAndAddAll("type1", newList(f), randomBoolean()); FieldTypeLookup lookup2 = lookup.copyAndAddAll("type2", newList(f2), randomBoolean()); assertSame(f.fieldType(), lookup2.get("foo")); assertSame(f2.fieldType(), lookup2.get("bar")); assertSame(f2.fieldType(), lookup2.getByIndexName("foo")); assertEquals(2, Iterators.size(lookup2.iterator())); } public void testAddExistingFullName() { FakeFieldMapper f = new FakeFieldMapper("foo", "foo"); FakeFieldMapper f2 = new FakeFieldMapper("foo", "bar"); FieldTypeLookup lookup = new FieldTypeLookup(); lookup = lookup.copyAndAddAll("type1", newList(f), randomBoolean()); try { lookup.copyAndAddAll("type2", newList(f2), randomBoolean()); } catch (IllegalArgumentException e) { assertThat(e.getMessage(), containsString("mapper [foo] has different [index_name]")); } } public void testAddExistingBridgeName() { FakeFieldMapper f = new FakeFieldMapper("foo", "foo"); FakeFieldMapper f2 = new FakeFieldMapper("bar", "bar"); FieldTypeLookup lookup = new FieldTypeLookup(); lookup = lookup.copyAndAddAll("type1", newList(f, f2), randomBoolean()); try { FakeFieldMapper f3 = new FakeFieldMapper("foo", "bar"); lookup.copyAndAddAll("type2", newList(f3), randomBoolean()); } catch (IllegalStateException e) { assertTrue(e.getMessage().contains("insane mappings")); } try { FakeFieldMapper f3 = new FakeFieldMapper("bar", "foo"); lookup.copyAndAddAll("type2", newList(f3), randomBoolean()); } catch (IllegalStateException e) { assertTrue(e.getMessage().contains("insane mappings")); } } public void testCheckCompatibilityMismatchedTypes() { FieldMapper f1 = new FakeFieldMapper("foo", "bar"); FieldTypeLookup lookup = new FieldTypeLookup(); lookup = lookup.copyAndAddAll("type", newList(f1), randomBoolean()); MappedFieldType ft2 = FakeFieldMapper.makeOtherFieldType("foo", "foo"); FieldMapper f2 = new FakeFieldMapper("foo", ft2); try { lookup.copyAndAddAll("type2", newList(f2), false); fail("expected type mismatch"); } catch (IllegalArgumentException e) { assertTrue(e.getMessage().contains("cannot be changed from type [faketype] to [otherfaketype]")); } // fails even if updateAllTypes == true try { lookup.copyAndAddAll("type2", newList(f2), true); fail("expected type mismatch"); } catch (IllegalArgumentException e) { assertTrue(e.getMessage().contains("cannot be changed from type [faketype] to [otherfaketype]")); } } public void testCheckCompatibilityConflict() { FieldMapper f1 = new FakeFieldMapper("foo", "bar"); FieldTypeLookup lookup = new FieldTypeLookup(); lookup = lookup.copyAndAddAll("type", newList(f1), randomBoolean()); MappedFieldType ft2 = FakeFieldMapper.makeFieldType("foo", "bar"); ft2.setBoost(2.0f); FieldMapper f2 = new FakeFieldMapper("foo", ft2); try { // different type lookup.copyAndAddAll("type2", newList(f2), false); fail("expected conflict"); } catch (IllegalArgumentException e) { assertTrue(e.getMessage().contains("to update [boost] across all types")); } lookup.copyAndAddAll("type", newList(f2), false); // boost is updateable, so ok since we are implicitly updating all types lookup.copyAndAddAll("type2", newList(f2), true); // boost is updateable, so ok if forcing // now with a non changeable setting MappedFieldType ft3 = FakeFieldMapper.makeFieldType("foo", "bar"); ft3.setStored(true); FieldMapper f3 = new FakeFieldMapper("foo", ft3); try { lookup.copyAndAddAll("type2", newList(f3), false); fail("expected conflict"); } catch (IllegalArgumentException e) { assertTrue(e.getMessage().contains("has different [store] values")); } // even with updateAllTypes == true, incompatible try { lookup.copyAndAddAll("type2", newList(f3), true); fail("expected conflict"); } catch (IllegalArgumentException e) { assertTrue(e.getMessage().contains("has different [store] values")); } } public void testSimpleMatchIndexNames() { FakeFieldMapper f1 = new FakeFieldMapper("foo", "baz"); FakeFieldMapper f2 = new FakeFieldMapper("bar", "boo"); FieldTypeLookup lookup = new FieldTypeLookup(); lookup = lookup.copyAndAddAll("type", newList(f1, f2), randomBoolean()); Collection<String> names = lookup.simpleMatchToIndexNames("b*"); assertTrue(names.contains("baz")); assertTrue(names.contains("boo")); } public void testSimpleMatchFullNames() { FakeFieldMapper f1 = new FakeFieldMapper("foo", "baz"); FakeFieldMapper f2 = new FakeFieldMapper("bar", "boo"); FieldTypeLookup lookup = new FieldTypeLookup(); lookup = lookup.copyAndAddAll("type", newList(f1, f2), randomBoolean()); Collection<String> names = lookup.simpleMatchToFullName("b*"); assertTrue(names.contains("foo")); assertTrue(names.contains("bar")); } public void testIteratorImmutable() { FakeFieldMapper f1 = new FakeFieldMapper("foo", "bar"); FieldTypeLookup lookup = new FieldTypeLookup(); lookup = lookup.copyAndAddAll("type", newList(f1), randomBoolean()); try { Iterator<MappedFieldType> itr = lookup.iterator(); assertTrue(itr.hasNext()); assertEquals(f1.fieldType(), itr.next()); itr.remove(); fail("remove should have failed"); } catch (UnsupportedOperationException e) { // expected } } static List<FieldMapper> newList(FieldMapper... mapper) { return Arrays.asList(mapper); } // this sucks how much must be overridden just do get a dummy field mapper... static class FakeFieldMapper extends FieldMapper { static Settings dummySettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT.id).build(); public FakeFieldMapper(String fullName, String indexName) { super(fullName, makeFieldType(fullName, indexName), makeFieldType(fullName, indexName), dummySettings, null, null); } public FakeFieldMapper(String fullName, MappedFieldType fieldType) { super(fullName, fieldType, fieldType, dummySettings, null, null); } static MappedFieldType makeFieldType(String fullName, String indexName) { FakeFieldType fieldType = new FakeFieldType(); fieldType.setNames(new MappedFieldType.Names(indexName, indexName, fullName)); return fieldType; } static MappedFieldType makeOtherFieldType(String fullName, String indexName) { OtherFakeFieldType fieldType = new OtherFakeFieldType(); fieldType.setNames(new MappedFieldType.Names(indexName, indexName, fullName)); return fieldType; } static class FakeFieldType extends MappedFieldType { public FakeFieldType() {} protected FakeFieldType(FakeFieldType ref) { super(ref); } @Override public MappedFieldType clone() { return new FakeFieldType(this); } @Override public String typeName() { return "faketype"; } } static class OtherFakeFieldType extends MappedFieldType { public OtherFakeFieldType() {} protected OtherFakeFieldType(OtherFakeFieldType ref) { super(ref); } @Override public MappedFieldType clone() { return new OtherFakeFieldType(this); } @Override public String typeName() { return "otherfaketype"; } } @Override protected String contentType() { return null; } @Override protected void parseCreateField(ParseContext context, List list) throws IOException {} } }
/******************************************************************************* * Copyright 2015 EMBL - European Bioinformatics Institute * * Licensed under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, * either express or implied. See the License for the specific * language governing permissions and limitations under the * License. *******************************************************************************/ package uk.ac.ebi.phenotype.chart; import org.mousephenotype.cda.enumerations.SexType; import org.mousephenotype.cda.enumerations.ZygosityType; import org.springframework.boot.configurationprocessor.json.JSONException; import org.springframework.boot.configurationprocessor.json.JSONObject; import java.util.ArrayList; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * class to keep static variables for New Design colors * @author jwarren * */ public class ChartColors { public static final List<String> highDifferenceColors=java.util.Arrays.asList("239, 123, 11" , "9, 120, 161", "119, 119, 119", "238, 238, 180","36, 139, 75", "191, 75, 50", "255, 201, 67", "191, 151, 50", "239, 123, 11" , "247, 157, 70", "247, 181, 117", "191, 75, 50", "151, 51, 51", "144, 195, 212"); //HEX #EF7B0B //rgb(239, 123, 11) // NOTE: maleRgb and femaleRgb MUST be of the same length or an IndexOutOfBoundsException will occur. public static final List<String>maleRgb=java.util.Arrays.asList( "9, 120, 161" , "61, 167, 208" , "100, 178, 208" , "3, 77, 105" , "36, 139, 75" , "1, 121, 46" , "51, 51, 51" , "191, 151, 50"); //rgb(239, 123, 11) //rgb(247, 157, 70) //rgb(247, 181, 117) //rgb(191, 75, 50) //rgb(166, 30, 1) public static final List<String>femaleRgb=java.util.Arrays.asList( "239, 123, 11" , "247, 157, 70" , "247, 181, 117" , "191, 75, 50" , "166, 30, 1" , "255, 201, 67" , "144, 195, 212" , "119, 119, 119"); public static final Double alphaOpaque = 1.0; public static final Double alphaTranslucid70 = 0.7; public static final Double alphaTranslucid50 = 0.5; public static final Double alphaTranslucid20 = 0.2; private static String wtColor="239, 123, 11"; private static String mutantColor="9, 120, 161"; /** * get a string to represent rgba for highcharts for either sex and choose your alpha (opacity) 0.0-1.0 * @param sexType * @param index * @param alpha * @return */ public static String getRgbaString(SexType sexType, int index, Double alpha) { String defaultColor="\'rgba(9, 120, 161, 0.5)\'"; if(index>=maleRgb.size()) { System.err.println("no color found returning default"); index=index % maleRgb.size(); return defaultColor; } if(sexType.equals(SexType.male)) { return "\'rgba("+maleRgb.get(index)+"," +alpha+")\'"; } if(sexType.equals(SexType.female)) { return "\'rgba("+femaleRgb.get(index)+"," +alpha+")\'"; } System.err.println("no color found returning default"); return defaultColor; } public static String getDefaultControlColor (Double alpha){ return "\'rgba("+femaleRgb.get(3)+"," +alpha+")\'"; } public static List<String> getFemaleMaleColorsRgba(Double alpha) { List<String> colorStrings=new ArrayList<String>(); for(int i=0; i<ChartColors.maleRgb.size(); i++) { colorStrings.add(getRgbaString(SexType.female, i, alphaTranslucid70)); colorStrings.add(getRgbaString(SexType.male, i, alphaTranslucid70)); } return colorStrings; } public static List<String> getMaleColorsRgba(Double alpha) { List<String> colorStrings = new ArrayList<String>(); for (String colorString : ChartColors.maleRgb) { colorStrings.add("\'rgba(" + colorString + "," + alpha + ")\'"); } return colorStrings; } public static List<String> getHighDifferenceColorsRgba(Double alpha) { List<String> colorStrings=new ArrayList<String>(); for(String colorString:ChartColors.highDifferenceColors) { colorStrings.add("\'rgba("+colorString+"," +alpha+")\'"); } return colorStrings; } public static String getMutantColor(Double alpha) { return "\'rgba("+mutantColor+"," +alpha+")\'"; } public static String getWTColor(Double alpha) { return "\'rgba("+wtColor+"," +alpha+")\'"; } /** * convenience method that uses default scatter alpha for getMarkerString * @param sex * @param zygosityType * @return */ public static String getMarkerString(SexType sex, ZygosityType zygosityType) { return getMarkerString(sex, zygosityType, null); } /** * Get a marker string for use in highcharts to display the data with consitent colors and symbols based on these parameters * @param sex * @param zygosityType if null then its WT * @return */ public static String getMarkerString(SexType sex, ZygosityType zygosityType, Double alpha) { Double alphaMutants = alpha; Double alphaControl = alpha; Double alphaControlLine = alpha; if (alpha == null) { alphaMutants = ChartColors.alphaTranslucid70; alphaControl = ChartColors.alphaTranslucid20; alphaControlLine = ChartColors.alphaTranslucid50; } String symbol="circle"; String lineColor=ChartColors.getMutantColor(alphaMutants); String fillColor=ChartColors.getMutantColor(alphaMutants); if (zygosityType == null) {// then its WT fillColor = ChartColors.getWTColor(alphaControl); lineColor = ChartColors.getWTColor(alphaControlLine); } if (sex.equals(SexType.male)) { symbol="triangle"; } String marker = "marker:{" + " symbol: '" + symbol + "', " + " fillColor: " + fillColor + "," + " lineWidth: 1," + " radius: 3," + " lineColor: " + lineColor + " " + "}"; return marker; } public static JSONObject getMarkerJSONObject(SexType sex, ZygosityType zygosityType) throws JSONException { String markerString=getMarkerString(sex, zygosityType).replace("marker:", ""); return new JSONObject(markerString); } /** * get a deefault list of colors for WT and zygosities, color RGB String e.g. Homozygous, "239, 123, 11" * @return */ public static Map<String,String> getZygosityColorMap(){ //"239, 123, 11" , "9, 120, 161", "119, 119, 119", Map<String,String> zygColorMap=new LinkedHashMap<>(); zygColorMap.put("WT", ChartColors.getWTColor(alphaOpaque)); zygColorMap.put(ZygosityType.homozygote.name(),getHighDifferenceColorsRgba(alphaOpaque).get(1) ); zygColorMap.put(ZygosityType.heterozygote.name(),getHighDifferenceColorsRgba(alphaOpaque).get(2)); zygColorMap.put(ZygosityType.hemizygote.name(),getHighDifferenceColorsRgba(alphaOpaque).get(3) ); return zygColorMap; } }
/* * Copyright 2018 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.dialogflow.v2beta1; import static com.google.cloud.dialogflow.v2beta1.ContextsClient.ListContextsPagedResponse; import com.google.api.core.ApiFunction; import com.google.api.core.BetaApi; import com.google.api.gax.core.GoogleCredentialsProvider; import com.google.api.gax.core.InstantiatingExecutorProvider; import com.google.api.gax.grpc.InstantiatingGrpcChannelProvider; import com.google.api.gax.rpc.ApiClientHeaderProvider; import com.google.api.gax.rpc.ClientContext; import com.google.api.gax.rpc.ClientSettings; import com.google.api.gax.rpc.PagedCallSettings; import com.google.api.gax.rpc.TransportChannelProvider; import com.google.api.gax.rpc.UnaryCallSettings; import com.google.cloud.dialogflow.v2beta1.stub.ContextsStubSettings; import com.google.protobuf.Empty; import java.io.IOException; import java.util.List; import javax.annotation.Generated; // AUTO-GENERATED DOCUMENTATION AND CLASS /** * Settings class to configure an instance of {@link ContextsClient}. * * <p>The default instance has everything set to sensible defaults: * * <ul> * <li>The default service address (dialogflow.googleapis.com) and default port (443) are used. * <li>Credentials are acquired automatically through Application Default Credentials. * <li>Retries are configured for idempotent methods but not for non-idempotent methods. * </ul> * * <p>The builder of this class is recursive, so contained classes are themselves builders. When * build() is called, the tree of builders is called to create the complete settings object. For * example, to set the total timeout of getContext to 30 seconds: * * <pre> * <code> * ContextsSettings.Builder contextsSettingsBuilder = * ContextsSettings.newBuilder(); * contextsSettingsBuilder.getContextSettings().getRetrySettings().toBuilder() * .setTotalTimeout(Duration.ofSeconds(30)); * ContextsSettings contextsSettings = contextsSettingsBuilder.build(); * </code> * </pre> */ @Generated("by gapic-generator") @BetaApi public class ContextsSettings extends ClientSettings<ContextsSettings> { /** Returns the object with the settings used for calls to listContexts. */ public PagedCallSettings<ListContextsRequest, ListContextsResponse, ListContextsPagedResponse> listContextsSettings() { return ((ContextsStubSettings) getStubSettings()).listContextsSettings(); } /** Returns the object with the settings used for calls to getContext. */ public UnaryCallSettings<GetContextRequest, Context> getContextSettings() { return ((ContextsStubSettings) getStubSettings()).getContextSettings(); } /** Returns the object with the settings used for calls to createContext. */ public UnaryCallSettings<CreateContextRequest, Context> createContextSettings() { return ((ContextsStubSettings) getStubSettings()).createContextSettings(); } /** Returns the object with the settings used for calls to updateContext. */ public UnaryCallSettings<UpdateContextRequest, Context> updateContextSettings() { return ((ContextsStubSettings) getStubSettings()).updateContextSettings(); } /** Returns the object with the settings used for calls to deleteContext. */ public UnaryCallSettings<DeleteContextRequest, Empty> deleteContextSettings() { return ((ContextsStubSettings) getStubSettings()).deleteContextSettings(); } /** Returns the object with the settings used for calls to deleteAllContexts. */ public UnaryCallSettings<DeleteAllContextsRequest, Empty> deleteAllContextsSettings() { return ((ContextsStubSettings) getStubSettings()).deleteAllContextsSettings(); } public static final ContextsSettings create(ContextsStubSettings stub) throws IOException { return new ContextsSettings.Builder(stub.toBuilder()).build(); } /** Returns a builder for the default ExecutorProvider for this service. */ public static InstantiatingExecutorProvider.Builder defaultExecutorProviderBuilder() { return ContextsStubSettings.defaultExecutorProviderBuilder(); } /** Returns the default service endpoint. */ public static String getDefaultEndpoint() { return ContextsStubSettings.getDefaultEndpoint(); } /** Returns the default service scopes. */ public static List<String> getDefaultServiceScopes() { return ContextsStubSettings.getDefaultServiceScopes(); } /** Returns a builder for the default credentials for this service. */ public static GoogleCredentialsProvider.Builder defaultCredentialsProviderBuilder() { return ContextsStubSettings.defaultCredentialsProviderBuilder(); } /** Returns a builder for the default ChannelProvider for this service. */ public static InstantiatingGrpcChannelProvider.Builder defaultGrpcTransportProviderBuilder() { return ContextsStubSettings.defaultGrpcTransportProviderBuilder(); } public static TransportChannelProvider defaultTransportChannelProvider() { return ContextsStubSettings.defaultTransportChannelProvider(); } @BetaApi("The surface for customizing headers is not stable yet and may change in the future.") public static ApiClientHeaderProvider.Builder defaultApiClientHeaderProviderBuilder() { return ContextsStubSettings.defaultApiClientHeaderProviderBuilder(); } /** Returns a new builder for this class. */ public static Builder newBuilder() { return Builder.createDefault(); } /** Returns a new builder for this class. */ public static Builder newBuilder(ClientContext clientContext) { return new Builder(clientContext); } /** Returns a builder containing all the values of this settings class. */ public Builder toBuilder() { return new Builder(this); } protected ContextsSettings(Builder settingsBuilder) throws IOException { super(settingsBuilder); } /** Builder for ContextsSettings. */ public static class Builder extends ClientSettings.Builder<ContextsSettings, Builder> { protected Builder() throws IOException { this((ClientContext) null); } protected Builder(ClientContext clientContext) { super(ContextsStubSettings.newBuilder(clientContext)); } private static Builder createDefault() { return new Builder(ContextsStubSettings.newBuilder()); } protected Builder(ContextsSettings settings) { super(settings.getStubSettings().toBuilder()); } protected Builder(ContextsStubSettings.Builder stubSettings) { super(stubSettings); } public ContextsStubSettings.Builder getStubSettingsBuilder() { return ((ContextsStubSettings.Builder) getStubSettings()); } // NEXT_MAJOR_VER: remove 'throws Exception' /** * Applies the given settings updater function to all of the unary API methods in this service. * * <p>Note: This method does not support applying settings to streaming methods. */ public Builder applyToAllUnaryMethods( ApiFunction<UnaryCallSettings.Builder<?, ?>, Void> settingsUpdater) throws Exception { super.applyToAllUnaryMethods( getStubSettingsBuilder().unaryMethodSettingsBuilders(), settingsUpdater); return this; } /** Returns the builder for the settings used for calls to listContexts. */ public PagedCallSettings.Builder< ListContextsRequest, ListContextsResponse, ListContextsPagedResponse> listContextsSettings() { return getStubSettingsBuilder().listContextsSettings(); } /** Returns the builder for the settings used for calls to getContext. */ public UnaryCallSettings.Builder<GetContextRequest, Context> getContextSettings() { return getStubSettingsBuilder().getContextSettings(); } /** Returns the builder for the settings used for calls to createContext. */ public UnaryCallSettings.Builder<CreateContextRequest, Context> createContextSettings() { return getStubSettingsBuilder().createContextSettings(); } /** Returns the builder for the settings used for calls to updateContext. */ public UnaryCallSettings.Builder<UpdateContextRequest, Context> updateContextSettings() { return getStubSettingsBuilder().updateContextSettings(); } /** Returns the builder for the settings used for calls to deleteContext. */ public UnaryCallSettings.Builder<DeleteContextRequest, Empty> deleteContextSettings() { return getStubSettingsBuilder().deleteContextSettings(); } /** Returns the builder for the settings used for calls to deleteAllContexts. */ public UnaryCallSettings.Builder<DeleteAllContextsRequest, Empty> deleteAllContextsSettings() { return getStubSettingsBuilder().deleteAllContextsSettings(); } @Override public ContextsSettings build() throws IOException { return new ContextsSettings(this); } } }
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/logging/v2/logging_metrics.proto package com.google.logging.v2; /** * <pre> * Result returned from ListLogMetrics. * </pre> * * Protobuf type {@code google.logging.v2.ListLogMetricsResponse} */ public final class ListLogMetricsResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.logging.v2.ListLogMetricsResponse) ListLogMetricsResponseOrBuilder { // Use ListLogMetricsResponse.newBuilder() to construct. private ListLogMetricsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListLogMetricsResponse() { metrics_ = java.util.Collections.emptyList(); nextPageToken_ = ""; } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private ListLogMetricsResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!input.skipField(tag)) { done = true; } break; } case 10: { if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { metrics_ = new java.util.ArrayList<com.google.logging.v2.LogMetric>(); mutable_bitField0_ |= 0x00000001; } metrics_.add( input.readMessage(com.google.logging.v2.LogMetric.parser(), extensionRegistry)); break; } case 18: { java.lang.String s = input.readStringRequireUtf8(); nextPageToken_ = s; break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { metrics_ = java.util.Collections.unmodifiableList(metrics_); } makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.logging.v2.LoggingMetricsProto.internal_static_google_logging_v2_ListLogMetricsResponse_descriptor; } protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.logging.v2.LoggingMetricsProto.internal_static_google_logging_v2_ListLogMetricsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.logging.v2.ListLogMetricsResponse.class, com.google.logging.v2.ListLogMetricsResponse.Builder.class); } private int bitField0_; public static final int METRICS_FIELD_NUMBER = 1; private java.util.List<com.google.logging.v2.LogMetric> metrics_; /** * <pre> * A list of logs-based metrics. * </pre> * * <code>repeated .google.logging.v2.LogMetric metrics = 1;</code> */ public java.util.List<com.google.logging.v2.LogMetric> getMetricsList() { return metrics_; } /** * <pre> * A list of logs-based metrics. * </pre> * * <code>repeated .google.logging.v2.LogMetric metrics = 1;</code> */ public java.util.List<? extends com.google.logging.v2.LogMetricOrBuilder> getMetricsOrBuilderList() { return metrics_; } /** * <pre> * A list of logs-based metrics. * </pre> * * <code>repeated .google.logging.v2.LogMetric metrics = 1;</code> */ public int getMetricsCount() { return metrics_.size(); } /** * <pre> * A list of logs-based metrics. * </pre> * * <code>repeated .google.logging.v2.LogMetric metrics = 1;</code> */ public com.google.logging.v2.LogMetric getMetrics(int index) { return metrics_.get(index); } /** * <pre> * A list of logs-based metrics. * </pre> * * <code>repeated .google.logging.v2.LogMetric metrics = 1;</code> */ public com.google.logging.v2.LogMetricOrBuilder getMetricsOrBuilder( int index) { return metrics_.get(index); } public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2; private volatile java.lang.Object nextPageToken_; /** * <pre> * If there might be more results than appear in this response, then * `nextPageToken` is included. To get the next set of results, call this * method again using the value of `nextPageToken` as `pageToken`. * </pre> * * <code>string next_page_token = 2;</code> */ public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } } /** * <pre> * If there might be more results than appear in this response, then * `nextPageToken` is included. To get the next set of results, call this * method again using the value of `nextPageToken` as `pageToken`. * </pre> * * <code>string next_page_token = 2;</code> */ public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < metrics_.size(); i++) { output.writeMessage(1, metrics_.get(i)); } if (!getNextPageTokenBytes().isEmpty()) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_); } } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < metrics_.size(); i++) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, metrics_.get(i)); } if (!getNextPageTokenBytes().isEmpty()) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_); } memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.logging.v2.ListLogMetricsResponse)) { return super.equals(obj); } com.google.logging.v2.ListLogMetricsResponse other = (com.google.logging.v2.ListLogMetricsResponse) obj; boolean result = true; result = result && getMetricsList() .equals(other.getMetricsList()); result = result && getNextPageToken() .equals(other.getNextPageToken()); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getMetricsCount() > 0) { hash = (37 * hash) + METRICS_FIELD_NUMBER; hash = (53 * hash) + getMetricsList().hashCode(); } hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getNextPageToken().hashCode(); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static com.google.logging.v2.ListLogMetricsResponse parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.logging.v2.ListLogMetricsResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.logging.v2.ListLogMetricsResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.logging.v2.ListLogMetricsResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.logging.v2.ListLogMetricsResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.logging.v2.ListLogMetricsResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.logging.v2.ListLogMetricsResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.logging.v2.ListLogMetricsResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.google.logging.v2.ListLogMetricsResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.google.logging.v2.ListLogMetricsResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.google.logging.v2.ListLogMetricsResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.logging.v2.ListLogMetricsResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.logging.v2.ListLogMetricsResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * Result returned from ListLogMetrics. * </pre> * * Protobuf type {@code google.logging.v2.ListLogMetricsResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.logging.v2.ListLogMetricsResponse) com.google.logging.v2.ListLogMetricsResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.logging.v2.LoggingMetricsProto.internal_static_google_logging_v2_ListLogMetricsResponse_descriptor; } protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.logging.v2.LoggingMetricsProto.internal_static_google_logging_v2_ListLogMetricsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.logging.v2.ListLogMetricsResponse.class, com.google.logging.v2.ListLogMetricsResponse.Builder.class); } // Construct using com.google.logging.v2.ListLogMetricsResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getMetricsFieldBuilder(); } } public Builder clear() { super.clear(); if (metricsBuilder_ == null) { metrics_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); } else { metricsBuilder_.clear(); } nextPageToken_ = ""; return this; } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.logging.v2.LoggingMetricsProto.internal_static_google_logging_v2_ListLogMetricsResponse_descriptor; } public com.google.logging.v2.ListLogMetricsResponse getDefaultInstanceForType() { return com.google.logging.v2.ListLogMetricsResponse.getDefaultInstance(); } public com.google.logging.v2.ListLogMetricsResponse build() { com.google.logging.v2.ListLogMetricsResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public com.google.logging.v2.ListLogMetricsResponse buildPartial() { com.google.logging.v2.ListLogMetricsResponse result = new com.google.logging.v2.ListLogMetricsResponse(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (metricsBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001)) { metrics_ = java.util.Collections.unmodifiableList(metrics_); bitField0_ = (bitField0_ & ~0x00000001); } result.metrics_ = metrics_; } else { result.metrics_ = metricsBuilder_.build(); } result.nextPageToken_ = nextPageToken_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.logging.v2.ListLogMetricsResponse) { return mergeFrom((com.google.logging.v2.ListLogMetricsResponse)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.logging.v2.ListLogMetricsResponse other) { if (other == com.google.logging.v2.ListLogMetricsResponse.getDefaultInstance()) return this; if (metricsBuilder_ == null) { if (!other.metrics_.isEmpty()) { if (metrics_.isEmpty()) { metrics_ = other.metrics_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureMetricsIsMutable(); metrics_.addAll(other.metrics_); } onChanged(); } } else { if (!other.metrics_.isEmpty()) { if (metricsBuilder_.isEmpty()) { metricsBuilder_.dispose(); metricsBuilder_ = null; metrics_ = other.metrics_; bitField0_ = (bitField0_ & ~0x00000001); metricsBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getMetricsFieldBuilder() : null; } else { metricsBuilder_.addAllMessages(other.metrics_); } } } if (!other.getNextPageToken().isEmpty()) { nextPageToken_ = other.nextPageToken_; onChanged(); } onChanged(); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.logging.v2.ListLogMetricsResponse parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (com.google.logging.v2.ListLogMetricsResponse) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private java.util.List<com.google.logging.v2.LogMetric> metrics_ = java.util.Collections.emptyList(); private void ensureMetricsIsMutable() { if (!((bitField0_ & 0x00000001) == 0x00000001)) { metrics_ = new java.util.ArrayList<com.google.logging.v2.LogMetric>(metrics_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.logging.v2.LogMetric, com.google.logging.v2.LogMetric.Builder, com.google.logging.v2.LogMetricOrBuilder> metricsBuilder_; /** * <pre> * A list of logs-based metrics. * </pre> * * <code>repeated .google.logging.v2.LogMetric metrics = 1;</code> */ public java.util.List<com.google.logging.v2.LogMetric> getMetricsList() { if (metricsBuilder_ == null) { return java.util.Collections.unmodifiableList(metrics_); } else { return metricsBuilder_.getMessageList(); } } /** * <pre> * A list of logs-based metrics. * </pre> * * <code>repeated .google.logging.v2.LogMetric metrics = 1;</code> */ public int getMetricsCount() { if (metricsBuilder_ == null) { return metrics_.size(); } else { return metricsBuilder_.getCount(); } } /** * <pre> * A list of logs-based metrics. * </pre> * * <code>repeated .google.logging.v2.LogMetric metrics = 1;</code> */ public com.google.logging.v2.LogMetric getMetrics(int index) { if (metricsBuilder_ == null) { return metrics_.get(index); } else { return metricsBuilder_.getMessage(index); } } /** * <pre> * A list of logs-based metrics. * </pre> * * <code>repeated .google.logging.v2.LogMetric metrics = 1;</code> */ public Builder setMetrics( int index, com.google.logging.v2.LogMetric value) { if (metricsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureMetricsIsMutable(); metrics_.set(index, value); onChanged(); } else { metricsBuilder_.setMessage(index, value); } return this; } /** * <pre> * A list of logs-based metrics. * </pre> * * <code>repeated .google.logging.v2.LogMetric metrics = 1;</code> */ public Builder setMetrics( int index, com.google.logging.v2.LogMetric.Builder builderForValue) { if (metricsBuilder_ == null) { ensureMetricsIsMutable(); metrics_.set(index, builderForValue.build()); onChanged(); } else { metricsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * <pre> * A list of logs-based metrics. * </pre> * * <code>repeated .google.logging.v2.LogMetric metrics = 1;</code> */ public Builder addMetrics(com.google.logging.v2.LogMetric value) { if (metricsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureMetricsIsMutable(); metrics_.add(value); onChanged(); } else { metricsBuilder_.addMessage(value); } return this; } /** * <pre> * A list of logs-based metrics. * </pre> * * <code>repeated .google.logging.v2.LogMetric metrics = 1;</code> */ public Builder addMetrics( int index, com.google.logging.v2.LogMetric value) { if (metricsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureMetricsIsMutable(); metrics_.add(index, value); onChanged(); } else { metricsBuilder_.addMessage(index, value); } return this; } /** * <pre> * A list of logs-based metrics. * </pre> * * <code>repeated .google.logging.v2.LogMetric metrics = 1;</code> */ public Builder addMetrics( com.google.logging.v2.LogMetric.Builder builderForValue) { if (metricsBuilder_ == null) { ensureMetricsIsMutable(); metrics_.add(builderForValue.build()); onChanged(); } else { metricsBuilder_.addMessage(builderForValue.build()); } return this; } /** * <pre> * A list of logs-based metrics. * </pre> * * <code>repeated .google.logging.v2.LogMetric metrics = 1;</code> */ public Builder addMetrics( int index, com.google.logging.v2.LogMetric.Builder builderForValue) { if (metricsBuilder_ == null) { ensureMetricsIsMutable(); metrics_.add(index, builderForValue.build()); onChanged(); } else { metricsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * <pre> * A list of logs-based metrics. * </pre> * * <code>repeated .google.logging.v2.LogMetric metrics = 1;</code> */ public Builder addAllMetrics( java.lang.Iterable<? extends com.google.logging.v2.LogMetric> values) { if (metricsBuilder_ == null) { ensureMetricsIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll( values, metrics_); onChanged(); } else { metricsBuilder_.addAllMessages(values); } return this; } /** * <pre> * A list of logs-based metrics. * </pre> * * <code>repeated .google.logging.v2.LogMetric metrics = 1;</code> */ public Builder clearMetrics() { if (metricsBuilder_ == null) { metrics_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { metricsBuilder_.clear(); } return this; } /** * <pre> * A list of logs-based metrics. * </pre> * * <code>repeated .google.logging.v2.LogMetric metrics = 1;</code> */ public Builder removeMetrics(int index) { if (metricsBuilder_ == null) { ensureMetricsIsMutable(); metrics_.remove(index); onChanged(); } else { metricsBuilder_.remove(index); } return this; } /** * <pre> * A list of logs-based metrics. * </pre> * * <code>repeated .google.logging.v2.LogMetric metrics = 1;</code> */ public com.google.logging.v2.LogMetric.Builder getMetricsBuilder( int index) { return getMetricsFieldBuilder().getBuilder(index); } /** * <pre> * A list of logs-based metrics. * </pre> * * <code>repeated .google.logging.v2.LogMetric metrics = 1;</code> */ public com.google.logging.v2.LogMetricOrBuilder getMetricsOrBuilder( int index) { if (metricsBuilder_ == null) { return metrics_.get(index); } else { return metricsBuilder_.getMessageOrBuilder(index); } } /** * <pre> * A list of logs-based metrics. * </pre> * * <code>repeated .google.logging.v2.LogMetric metrics = 1;</code> */ public java.util.List<? extends com.google.logging.v2.LogMetricOrBuilder> getMetricsOrBuilderList() { if (metricsBuilder_ != null) { return metricsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(metrics_); } } /** * <pre> * A list of logs-based metrics. * </pre> * * <code>repeated .google.logging.v2.LogMetric metrics = 1;</code> */ public com.google.logging.v2.LogMetric.Builder addMetricsBuilder() { return getMetricsFieldBuilder().addBuilder( com.google.logging.v2.LogMetric.getDefaultInstance()); } /** * <pre> * A list of logs-based metrics. * </pre> * * <code>repeated .google.logging.v2.LogMetric metrics = 1;</code> */ public com.google.logging.v2.LogMetric.Builder addMetricsBuilder( int index) { return getMetricsFieldBuilder().addBuilder( index, com.google.logging.v2.LogMetric.getDefaultInstance()); } /** * <pre> * A list of logs-based metrics. * </pre> * * <code>repeated .google.logging.v2.LogMetric metrics = 1;</code> */ public java.util.List<com.google.logging.v2.LogMetric.Builder> getMetricsBuilderList() { return getMetricsFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.logging.v2.LogMetric, com.google.logging.v2.LogMetric.Builder, com.google.logging.v2.LogMetricOrBuilder> getMetricsFieldBuilder() { if (metricsBuilder_ == null) { metricsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.logging.v2.LogMetric, com.google.logging.v2.LogMetric.Builder, com.google.logging.v2.LogMetricOrBuilder>( metrics_, ((bitField0_ & 0x00000001) == 0x00000001), getParentForChildren(), isClean()); metrics_ = null; } return metricsBuilder_; } private java.lang.Object nextPageToken_ = ""; /** * <pre> * If there might be more results than appear in this response, then * `nextPageToken` is included. To get the next set of results, call this * method again using the value of `nextPageToken` as `pageToken`. * </pre> * * <code>string next_page_token = 2;</code> */ public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * <pre> * If there might be more results than appear in this response, then * `nextPageToken` is included. To get the next set of results, call this * method again using the value of `nextPageToken` as `pageToken`. * </pre> * * <code>string next_page_token = 2;</code> */ public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <pre> * If there might be more results than appear in this response, then * `nextPageToken` is included. To get the next set of results, call this * method again using the value of `nextPageToken` as `pageToken`. * </pre> * * <code>string next_page_token = 2;</code> */ public Builder setNextPageToken( java.lang.String value) { if (value == null) { throw new NullPointerException(); } nextPageToken_ = value; onChanged(); return this; } /** * <pre> * If there might be more results than appear in this response, then * `nextPageToken` is included. To get the next set of results, call this * method again using the value of `nextPageToken` as `pageToken`. * </pre> * * <code>string next_page_token = 2;</code> */ public Builder clearNextPageToken() { nextPageToken_ = getDefaultInstance().getNextPageToken(); onChanged(); return this; } /** * <pre> * If there might be more results than appear in this response, then * `nextPageToken` is included. To get the next set of results, call this * method again using the value of `nextPageToken` as `pageToken`. * </pre> * * <code>string next_page_token = 2;</code> */ public Builder setNextPageTokenBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); nextPageToken_ = value; onChanged(); return this; } public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return this; } public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return this; } // @@protoc_insertion_point(builder_scope:google.logging.v2.ListLogMetricsResponse) } // @@protoc_insertion_point(class_scope:google.logging.v2.ListLogMetricsResponse) private static final com.google.logging.v2.ListLogMetricsResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.logging.v2.ListLogMetricsResponse(); } public static com.google.logging.v2.ListLogMetricsResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListLogMetricsResponse> PARSER = new com.google.protobuf.AbstractParser<ListLogMetricsResponse>() { public ListLogMetricsResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new ListLogMetricsResponse(input, extensionRegistry); } }; public static com.google.protobuf.Parser<ListLogMetricsResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListLogMetricsResponse> getParserForType() { return PARSER; } public com.google.logging.v2.ListLogMetricsResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.model; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlAttribute; import javax.xml.bind.annotation.XmlRootElement; import org.apache.camel.CamelContext; import org.apache.camel.LoggingLevel; import org.apache.camel.processor.RedeliveryPolicy; import org.apache.camel.spi.Metadata; import org.apache.camel.util.CamelContextHelper; import org.apache.camel.util.ObjectHelper; /** * To configure re-delivery for error handling * * @version */ @Metadata(label = "configuration") @XmlRootElement(name = "redeliveryPolicy") @XmlAccessorType(XmlAccessType.FIELD) public class RedeliveryPolicyDefinition { @XmlAttribute private String maximumRedeliveries; @XmlAttribute private String redeliveryDelay; @XmlAttribute private String asyncDelayedRedelivery; @XmlAttribute private String backOffMultiplier; @XmlAttribute private String useExponentialBackOff; @XmlAttribute private String collisionAvoidanceFactor; @XmlAttribute private String useCollisionAvoidance; @XmlAttribute private String maximumRedeliveryDelay; @XmlAttribute private LoggingLevel retriesExhaustedLogLevel; @XmlAttribute private LoggingLevel retryAttemptedLogLevel; @XmlAttribute private String logRetryAttempted; @XmlAttribute private String logStackTrace; @XmlAttribute private String logRetryStackTrace; @XmlAttribute private String logHandled; @XmlAttribute private String logNewException; @XmlAttribute private String logContinued; @XmlAttribute private String logExhausted; @XmlAttribute private String logExhaustedMessageHistory; @XmlAttribute private String disableRedelivery; @XmlAttribute private String delayPattern; @XmlAttribute private String allowRedeliveryWhileStopping; @XmlAttribute private String exchangeFormatterRef; public RedeliveryPolicy createRedeliveryPolicy(CamelContext context, RedeliveryPolicy parentPolicy) { RedeliveryPolicy answer; if (parentPolicy != null) { answer = parentPolicy.copy(); } else { answer = new RedeliveryPolicy(); } try { // copy across the properties - if they are set if (maximumRedeliveries != null) { answer.setMaximumRedeliveries(CamelContextHelper.parseInteger(context, maximumRedeliveries)); } if (redeliveryDelay != null) { answer.setRedeliveryDelay(CamelContextHelper.parseLong(context, redeliveryDelay)); } if (asyncDelayedRedelivery != null) { if (CamelContextHelper.parseBoolean(context, asyncDelayedRedelivery)) { answer.asyncDelayedRedelivery(); } } if (retriesExhaustedLogLevel != null) { answer.setRetriesExhaustedLogLevel(retriesExhaustedLogLevel); } if (retryAttemptedLogLevel != null) { answer.setRetryAttemptedLogLevel(retryAttemptedLogLevel); } if (backOffMultiplier != null) { answer.setBackOffMultiplier(CamelContextHelper.parseDouble(context, backOffMultiplier)); } if (useExponentialBackOff != null) { answer.setUseExponentialBackOff(CamelContextHelper.parseBoolean(context, useExponentialBackOff)); } if (collisionAvoidanceFactor != null) { answer.setCollisionAvoidanceFactor(CamelContextHelper.parseDouble(context, collisionAvoidanceFactor)); } if (useCollisionAvoidance != null) { answer.setUseCollisionAvoidance(CamelContextHelper.parseBoolean(context, useCollisionAvoidance)); } if (maximumRedeliveryDelay != null) { answer.setMaximumRedeliveryDelay(CamelContextHelper.parseLong(context, maximumRedeliveryDelay)); } if (logStackTrace != null) { answer.setLogStackTrace(CamelContextHelper.parseBoolean(context, logStackTrace)); } if (logRetryStackTrace != null) { answer.setLogRetryStackTrace(CamelContextHelper.parseBoolean(context, logRetryStackTrace)); } if (logHandled != null) { answer.setLogHandled(CamelContextHelper.parseBoolean(context, logHandled)); } if (logNewException != null) { answer.setLogNewException(CamelContextHelper.parseBoolean(context, logNewException)); } if (logContinued != null) { answer.setLogContinued(CamelContextHelper.parseBoolean(context, logContinued)); } if (logRetryAttempted != null) { answer.setLogRetryAttempted(CamelContextHelper.parseBoolean(context, logRetryAttempted)); } if (logExhausted != null) { answer.setLogExhausted(CamelContextHelper.parseBoolean(context, logExhausted)); } if (logExhaustedMessageHistory != null) { answer.setLogExhaustedMessageHistory(CamelContextHelper.parseBoolean(context, logExhaustedMessageHistory)); } if (disableRedelivery != null) { if (CamelContextHelper.parseBoolean(context, disableRedelivery)) { answer.setMaximumRedeliveries(0); } } if (delayPattern != null) { answer.setDelayPattern(delayPattern); } if (allowRedeliveryWhileStopping != null) { answer.setAllowRedeliveryWhileStopping(CamelContextHelper.parseBoolean(context, allowRedeliveryWhileStopping)); } if (exchangeFormatterRef != null) { answer.setExchangeFormatterRef(exchangeFormatterRef); } } catch (Exception e) { throw ObjectHelper.wrapRuntimeCamelException(e); } return answer; } @Override public String toString() { return "RedeliveryPolicy[maximumRedeliveries: " + maximumRedeliveries + "]"; } // Fluent API //------------------------------------------------------------------------- /** * Allow synchronous delayed redelivery. * * @return the builder */ public RedeliveryPolicyDefinition asyncDelayedRedelivery() { setAsyncDelayedRedelivery("true"); return this; } /** * Controls whether to allow redelivery while stopping/shutting down a route that uses error handling. * * @param allowRedeliveryWhileStopping <tt>true</tt> to allow redelivery, <tt>false</tt> to reject redeliveries * @return the builder */ public RedeliveryPolicyDefinition allowRedeliveryWhileStopping(boolean allowRedeliveryWhileStopping) { return allowRedeliveryWhileStopping(Boolean.toString(allowRedeliveryWhileStopping)); } /** * Controls whether to allow redelivery while stopping/shutting down a route that uses error handling. * * @param allowRedeliveryWhileStopping <tt>true</tt> to allow redelivery, <tt>false</tt> to reject redeliveries * @return the builder */ public RedeliveryPolicyDefinition allowRedeliveryWhileStopping(String allowRedeliveryWhileStopping) { setAllowRedeliveryWhileStopping(allowRedeliveryWhileStopping); return this; } /** * Sets the back off multiplier * * @param backOffMultiplier the back off multiplier * @return the builder */ public RedeliveryPolicyDefinition backOffMultiplier(double backOffMultiplier) { return backOffMultiplier(Double.toString(backOffMultiplier)); } /** * Sets the back off multiplier (supports property placeholders) * * @param backOffMultiplier the back off multiplier * @return the builder */ public RedeliveryPolicyDefinition backOffMultiplier(String backOffMultiplier) { setBackOffMultiplier(backOffMultiplier); return this; } /** * Sets the collision avoidance percentage * * @param collisionAvoidancePercent the percentage * @return the builder */ public RedeliveryPolicyDefinition collisionAvoidancePercent(double collisionAvoidancePercent) { setCollisionAvoidanceFactor(Double.toString(collisionAvoidancePercent * 0.01d)); return this; } /** * Sets the collision avoidance factor * * @param collisionAvoidanceFactor the factor * @return the builder */ public RedeliveryPolicyDefinition collisionAvoidanceFactor(double collisionAvoidanceFactor) { return collisionAvoidanceFactor(Double.toString(collisionAvoidanceFactor)); } /** * Sets the collision avoidance factor (supports property placeholders) * * @param collisionAvoidanceFactor the factor * @return the builder */ public RedeliveryPolicyDefinition collisionAvoidanceFactor(String collisionAvoidanceFactor) { setCollisionAvoidanceFactor(collisionAvoidanceFactor); return this; } /** * Sets the initial redelivery delay * * @param delay delay in millis * @return the builder */ public RedeliveryPolicyDefinition redeliveryDelay(long delay) { return redeliveryDelay(Long.toString(delay)); } /** * Sets the initial redelivery delay (supports property placeholders) * * @param delay delay in millis * @return the builder */ public RedeliveryPolicyDefinition redeliveryDelay(String delay) { setRedeliveryDelay(delay); return this; } /** * Sets the logging level to use when retries has exhausted * * @param retriesExhaustedLogLevel the logging level * @return the builder */ public RedeliveryPolicyDefinition retriesExhaustedLogLevel(LoggingLevel retriesExhaustedLogLevel) { setRetriesExhaustedLogLevel(retriesExhaustedLogLevel); return this; } /** * Sets the logging level to use for logging retry attempts * * @param retryAttemptedLogLevel the logging level * @return the builder */ public RedeliveryPolicyDefinition retryAttemptedLogLevel(LoggingLevel retryAttemptedLogLevel) { setRetryAttemptedLogLevel(retryAttemptedLogLevel); return this; } /** * Sets whether stack traces should be logged. * Can be used to include or reduce verbose. * * @param logStackTrace whether stack traces should be logged or not * @return the builder */ public RedeliveryPolicyDefinition logStackTrace(boolean logStackTrace) { return logStackTrace(Boolean.toString(logStackTrace)); } /** * Sets whether stack traces should be logged (supports property placeholders) * Can be used to include or reduce verbose. * * @param logStackTrace whether stack traces should be logged or not * @return the builder */ public RedeliveryPolicyDefinition logStackTrace(String logStackTrace) { setLogStackTrace(logStackTrace); return this; } /** * Sets whether stack traces should be logged when an retry attempt failed. * Can be used to include or reduce verbose. * * @param logRetryStackTrace whether stack traces should be logged or not * @return the builder */ public RedeliveryPolicyDefinition logRetryStackTrace(boolean logRetryStackTrace) { return logRetryStackTrace(Boolean.toString(logRetryStackTrace)); } /** * Sets whether stack traces should be logged when an retry attempt failed (supports property placeholders). * Can be used to include or reduce verbose. * * @param logRetryStackTrace whether stack traces should be logged or not * @return the builder */ public RedeliveryPolicyDefinition logRetryStackTrace(String logRetryStackTrace) { setLogRetryStackTrace(logRetryStackTrace); return this; } /** * Sets whether retry attempts should be logged or not. * Can be used to include or reduce verbose. * * @param logRetryAttempted whether retry attempts should be logged or not * @return the builder */ public RedeliveryPolicyDefinition logRetryAttempted(boolean logRetryAttempted) { return logRetryAttempted(Boolean.toString(logRetryAttempted)); } /** * Sets whether retry attempts should be logged or not (supports property placeholders). * Can be used to include or reduce verbose. * * @param logRetryAttempted whether retry attempts should be logged or not * @return the builder */ public RedeliveryPolicyDefinition logRetryAttempted(String logRetryAttempted) { setLogRetryAttempted(logRetryAttempted); return this; } /** * Sets whether handled exceptions should be logged or not. * Can be used to include or reduce verbose. * * @param logHandled whether handled exceptions should be logged or not * @return the builder */ public RedeliveryPolicyDefinition logHandled(boolean logHandled) { return logHandled(Boolean.toString(logHandled)); } /** * Sets whether handled exceptions should be logged or not (supports property placeholders). * Can be used to include or reduce verbose. * * @param logHandled whether handled exceptions should be logged or not * @return the builder */ public RedeliveryPolicyDefinition logHandled(String logHandled) { setLogHandled(logHandled); return this; } /** * Sets whether new exceptions should be logged or not. * Can be used to include or reduce verbose. * <p/> * A new exception is an exception that was thrown while handling a previous exception. * * @param logNewException whether new exceptions should be logged or not * @return the builder */ public RedeliveryPolicyDefinition logNewException(boolean logNewException) { return logNewException(Boolean.toString(logNewException)); } /** * Sets whether new exceptions should be logged or not (supports property placeholders). * Can be used to include or reduce verbose. * <p/> * A new exception is an exception that was thrown while handling a previous exception. * * @param logNewException whether new exceptions should be logged or not * @return the builder */ public RedeliveryPolicyDefinition logNewException(String logNewException) { setLogNewException(logNewException); return this; } /** * Sets whether continued exceptions should be logged or not. * Can be used to include or reduce verbose. * * @param logContinued whether continued exceptions should be logged or not * @return the builder */ public RedeliveryPolicyDefinition logContinued(boolean logContinued) { return logContinued(Boolean.toString(logContinued)); } /** * Sets whether continued exceptions should be logged or not (supports property placeholders). * Can be used to include or reduce verbose. * * @param logContinued whether continued exceptions should be logged or not * @return the builder */ public RedeliveryPolicyDefinition logContinued(String logContinued) { setLogContinued(logContinued); return this; } /** * Sets whether exhausted exceptions should be logged or not. * Can be used to include or reduce verbose. * * @param logExhausted whether exhausted exceptions should be logged or not * @return the builder */ public RedeliveryPolicyDefinition logExhausted(boolean logExhausted) { return logExhausted(Boolean.toString(logExhausted)); } /** * Sets whether exhausted exceptions should be logged or not (supports property placeholders). * Can be used to include or reduce verbose. * * @param logExhausted whether exhausted exceptions should be logged or not * @return the builder */ public RedeliveryPolicyDefinition logExhausted(String logExhausted) { setLogExhausted(logExhausted); return this; } /** * Sets whether exhausted exceptions should be logged including message history or not (supports property placeholders). * Can be used to include or reduce verbose. * * @param logExhaustedMessageHistory whether exhausted exceptions should be logged with message history * @return the builder */ public RedeliveryPolicyDefinition logExhaustedMessageHistory(boolean logExhaustedMessageHistory) { setLogExhaustedMessageHistory(Boolean.toString(logExhaustedMessageHistory)); return this; } /** * Sets whether exhausted exceptions should be logged including message history or not (supports property placeholders). * Can be used to include or reduce verbose. * * @param logExhaustedMessageHistory whether exhausted exceptions should be logged with message history * @return the builder */ public RedeliveryPolicyDefinition logExhaustedMessageHistory(String logExhaustedMessageHistory) { setLogExhaustedMessageHistory(logExhaustedMessageHistory); return this; } /** * Sets the maximum redeliveries * <ul> * <li>x = redeliver at most x times</li> * <li>0 = no redeliveries</li> * <li>-1 = redeliver forever</li> * </ul> * * @param maximumRedeliveries the value * @return the builder */ public RedeliveryPolicyDefinition maximumRedeliveries(int maximumRedeliveries) { return maximumRedeliveries(Integer.toString(maximumRedeliveries)); } /** * Sets the maximum redeliveries (supports property placeholders) * <ul> * <li>x = redeliver at most x times</li> * <li>0 = no redeliveries</li> * <li>-1 = redeliver forever</li> * </ul> * * @param maximumRedeliveries the value * @return the builder */ public RedeliveryPolicyDefinition maximumRedeliveries(String maximumRedeliveries) { setMaximumRedeliveries(maximumRedeliveries); return this; } /** * Turn on collision avoidance. * * @return the builder */ public RedeliveryPolicyDefinition useCollisionAvoidance() { setUseCollisionAvoidance("true"); return this; } /** * Turn on exponential backk off * * @return the builder */ public RedeliveryPolicyDefinition useExponentialBackOff() { setUseExponentialBackOff("true"); return this; } /** * Sets the maximum delay between redelivery * * @param maximumRedeliveryDelay the delay in millis * @return the builder */ public RedeliveryPolicyDefinition maximumRedeliveryDelay(long maximumRedeliveryDelay) { return maximumRedeliveryDelay(Long.toString(maximumRedeliveryDelay)); } /** * Sets the maximum delay between redelivery (supports property placeholders) * * @param maximumRedeliveryDelay the delay in millis * @return the builder */ public RedeliveryPolicyDefinition maximumRedeliveryDelay(String maximumRedeliveryDelay) { setMaximumRedeliveryDelay(maximumRedeliveryDelay); return this; } /** * Sets the delay pattern with delay intervals. * * @param delayPattern the delay pattern * @return the builder */ public RedeliveryPolicyDefinition delayPattern(String delayPattern) { setDelayPattern(delayPattern); return this; } /** * Sets the reference of the instance of {@link org.apache.camel.spi.ExchangeFormatter} to generate the log message from exchange. * * @param exchangeFormatterRef name of the instance of {@link org.apache.camel.spi.ExchangeFormatter} * @return the builder */ public RedeliveryPolicyDefinition exchangeFormatterRef(String exchangeFormatterRef) { setExchangeFormatterRef(exchangeFormatterRef); return this; } // Properties //------------------------------------------------------------------------- public String getMaximumRedeliveries() { return maximumRedeliveries; } public void setMaximumRedeliveries(String maximumRedeliveries) { this.maximumRedeliveries = maximumRedeliveries; } public String getRedeliveryDelay() { return redeliveryDelay; } public void setRedeliveryDelay(String redeliveryDelay) { this.redeliveryDelay = redeliveryDelay; } public String getAsyncDelayedRedelivery() { return asyncDelayedRedelivery; } public boolean isAsyncDelayedRedelivery(CamelContext context) { if (getAsyncDelayedRedelivery() == null) { return false; } try { return CamelContextHelper.parseBoolean(context, getAsyncDelayedRedelivery()); } catch (Exception e) { throw ObjectHelper.wrapRuntimeCamelException(e); } } public void setAsyncDelayedRedelivery(String asyncDelayedRedelivery) { this.asyncDelayedRedelivery = asyncDelayedRedelivery; } public String getBackOffMultiplier() { return backOffMultiplier; } public void setBackOffMultiplier(String backOffMultiplier) { this.backOffMultiplier = backOffMultiplier; } public String getUseExponentialBackOff() { return useExponentialBackOff; } public void setUseExponentialBackOff(String useExponentialBackOff) { this.useExponentialBackOff = useExponentialBackOff; } public String getCollisionAvoidanceFactor() { return collisionAvoidanceFactor; } public void setCollisionAvoidanceFactor(String collisionAvoidanceFactor) { this.collisionAvoidanceFactor = collisionAvoidanceFactor; } public String getUseCollisionAvoidance() { return useCollisionAvoidance; } public void setUseCollisionAvoidance(String useCollisionAvoidance) { this.useCollisionAvoidance = useCollisionAvoidance; } public String getMaximumRedeliveryDelay() { return maximumRedeliveryDelay; } public void setMaximumRedeliveryDelay(String maximumRedeliveryDelay) { this.maximumRedeliveryDelay = maximumRedeliveryDelay; } public LoggingLevel getRetriesExhaustedLogLevel() { return retriesExhaustedLogLevel; } public void setRetriesExhaustedLogLevel(LoggingLevel retriesExhaustedLogLevel) { this.retriesExhaustedLogLevel = retriesExhaustedLogLevel; } public LoggingLevel getRetryAttemptedLogLevel() { return retryAttemptedLogLevel; } public void setRetryAttemptedLogLevel(LoggingLevel retryAttemptedLogLevel) { this.retryAttemptedLogLevel = retryAttemptedLogLevel; } public String getLogRetryAttempted() { return logRetryAttempted; } public void setLogRetryAttempted(String logRetryAttempted) { this.logRetryAttempted = logRetryAttempted; } public String getLogStackTrace() { return logStackTrace; } public void setLogStackTrace(String logStackTrace) { this.logStackTrace = logStackTrace; } public String getLogRetryStackTrace() { return logRetryStackTrace; } public void setLogRetryStackTrace(String logRetryStackTrace) { this.logRetryStackTrace = logRetryStackTrace; } public String getLogHandled() { return logHandled; } public void setLogHandled(String logHandled) { this.logHandled = logHandled; } public String getLogNewException() { return logNewException; } public void setLogNewException(String logNewException) { this.logNewException = logNewException; } public String getLogContinued() { return logContinued; } public void setLogContinued(String logContinued) { this.logContinued = logContinued; } public String getLogExhausted() { return logExhausted; } public void setLogExhausted(String logExhausted) { this.logExhausted = logExhausted; } public String getLogExhaustedMessageHistory() { return logExhaustedMessageHistory; } public void setLogExhaustedMessageHistory(String logExhaustedMessageHistory) { this.logExhaustedMessageHistory = logExhaustedMessageHistory; } public String getDisableRedelivery() { return disableRedelivery; } /** * Disables redelivery (same as setting maximum redeliveries to 0) */ public void setDisableRedelivery(String disableRedelivery) { this.disableRedelivery = disableRedelivery; } public String getDelayPattern() { return delayPattern; } public void setDelayPattern(String delayPattern) { this.delayPattern = delayPattern; } public String getAllowRedeliveryWhileStopping() { return allowRedeliveryWhileStopping; } public void setAllowRedeliveryWhileStopping(String allowRedeliveryWhileStopping) { this.allowRedeliveryWhileStopping = allowRedeliveryWhileStopping; } public String getExchangeFormatterRef() { return exchangeFormatterRef; } public void setExchangeFormatterRef(String exchangeFormatterRef) { this.exchangeFormatterRef = exchangeFormatterRef; } }
/** * $URL$ * $Id$ * * Copyright (c) 2006-2009 The Sakai Foundation * * Licensed under the Educational Community License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.opensource.org/licenses/ECL-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sakaiproject.sitestats.impl.event; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.sakaiproject.entity.api.ResourceProperties; import org.sakaiproject.entitybroker.entityprovider.EntityProviderManager; import org.sakaiproject.entitybroker.entityprovider.capabilities.Statisticable; import org.sakaiproject.entitybroker.entityprovider.extension.EntityProviderListener; import org.sakaiproject.memory.api.Cache; import org.sakaiproject.memory.api.MemoryService; import org.sakaiproject.sitestats.api.event.EventInfo; import org.sakaiproject.sitestats.api.event.EventRegistry; import org.sakaiproject.sitestats.api.event.EventRegistryService; import org.sakaiproject.sitestats.api.event.ToolInfo; import org.sakaiproject.tool.api.SessionManager; import org.sakaiproject.user.api.Preferences; import org.sakaiproject.user.api.PreferencesService; import org.sakaiproject.util.ResourceLoader; import java.util.*; public class EntityBrokerEventRegistry extends Observable implements EventRegistry, EntityProviderListener<Statisticable> { private static Logger LOG = LoggerFactory.getLogger(EntityBrokerEventRegistry.class); private static final String CACHENAME = EntityBrokerEventRegistry.class.getName(); /** Event Registry members */ private List<ToolInfo> eventRegistry = new ArrayList<ToolInfo>(); private Map<String, String> eventIdToEPPrefix = new HashMap<String, String>(); /** Caching */ private Cache eventNamesCache = null; /** Sakai Services */ private SessionManager M_sm; private PreferencesService M_ps; private EntityProviderManager M_epm; private MemoryService M_ms; // ################################################################ // Spring methods // ################################################################ public void setSessionManager(SessionManager sessionManager) { this.M_sm = sessionManager; } public void setPreferencesService(PreferencesService preferencesService) { this.M_ps = preferencesService; } public void setEntityProviderManager(EntityProviderManager entityProviderManager) { this.M_epm = entityProviderManager; } public void setMemoryService(MemoryService memoryService) { this.M_ms = memoryService; } public void init() { LOG.info("init()"); // configure cache eventNamesCache = M_ms.newCache(CACHENAME); // register EntityBrokerListener M_epm.registerListener(this, true); } // ################################################################ // Event Registry methods // ################################################################ /* (non-Javadoc) * @see org.sakaiproject.sitestats.api.event.EventRegistry#getEventRegistry() */ public List<ToolInfo> getEventRegistry() { LOG.debug("getEventRegistry(): #tools implementing Statisticable = "+eventRegistry.size()); return eventRegistry; } /* (non-Javadoc) * @see org.sakaiproject.sitestats.api.event.EventRegistry#isEventRegistryExpired() */ public boolean isEventRegistryExpired() { return hasChanged(); } /* (non-Javadoc) * @see org.sakaiproject.sitestats.api.event.EventRegistry#getEventName(java.lang.String) */ public String getEventName(String eventId) { Locale currentUserLocale = getCurrentUserLocale(); EventLocaleKey key = new EventLocaleKey(eventId, currentUserLocale.toString()); if(eventNamesCache.containsKey(key.toString())) { return (String) eventNamesCache.get(key.toString()); }else{ String eventName = null; try{ String prefix = eventIdToEPPrefix.get(eventId); Statisticable s = M_epm.getProviderByPrefixAndCapability(prefix, Statisticable.class); Map<String, String> eventIdNamesMap = s.getEventNames(currentUserLocale); if(eventIdNamesMap != null) { for(String thisEventId : eventIdNamesMap.keySet()) { EventLocaleKey thisCacheKey = new EventLocaleKey(thisEventId, currentUserLocale.toString()); String thisEventName = eventIdNamesMap.get(thisEventId); eventNamesCache.put(thisCacheKey.toString(), thisEventName); if(thisEventId.equals(eventId)) { eventName = thisEventName; } } LOG.debug("Cached event names for EB prefix '"+prefix+"', locale: "+currentUserLocale); } }catch(Exception e) { eventName = null; } return eventName; } } // ################################################################ // EntityProviderListener methods // ################################################################ public Class<Statisticable> getCapabilityFilter() { return Statisticable.class; } public String getPrefixFilter() { return null; } public void run(Statisticable provider) { LOG.info("Statisticable capability registered with prefix: " + provider.getEntityPrefix()); processStatisticableProvider(provider); } private void processStatisticableProvider(Statisticable provider) { String entityPrefix = provider.getEntityPrefix(); String entityToolId = provider.getAssociatedToolId(); String[] entityEventIds = provider.getEventKeys(); // Build tool for Event Registry (List<ToolInfo>) ToolInfo tool = new ToolInfo(entityToolId); tool.setSelected(true); for(String eventId : entityEventIds) { EventInfo event = new EventInfo(eventId); event.setSelected(true); // Add to eventID -> entityProfider_prefix mapping eventIdToEPPrefix.put(eventId, entityPrefix); tool.addEvent(event); } eventRegistry.add(tool); // Set expired flag on EventRegistry to true setChanged(); notifyObservers(EventRegistryService.NOTIF_EVENT_REGISTRY_EXPIRED); } // ################################################################ // Utility Methods // ################################################################ /** * Return current user locale. * @return user's Locale object */ private Locale getCurrentUserLocale() { Locale loc = null; try{ // check if locale is requested for specific user String userId = M_sm.getCurrentSessionUserId(); if(userId != null){ Preferences prefs = M_ps.getPreferences(userId); ResourceProperties locProps = prefs.getProperties(ResourceLoader.APPLICATION_ID); String localeString = locProps.getProperty(ResourceLoader.LOCALE_KEY); // Parse user locale preference if set if(localeString != null){ String[] locValues = localeString.split("_"); if(locValues.length > 1) // language, country loc = new Locale(locValues[0], locValues[1]); else if(locValues.length == 1) // language loc = new Locale(locValues[0]); } if(loc == null) loc = Locale.getDefault(); }else{ loc = (Locale) M_sm.getCurrentSession().getAttribute(ResourceLoader.LOCALE_KEY + M_sm.getCurrentSessionUserId()); } }catch(NullPointerException e){ loc = Locale.getDefault(); } return loc; } // ################################################################ // Utility Classes // ################################################################ public static class EventLocaleKey { String eventId = ""; String locale = ""; public EventLocaleKey(String eventId, String locale) { this.eventId = eventId; this.locale = locale; } public String getEventId() { return eventId; } public void setEventId(String eventId) { this.eventId = eventId; } public String getLocale() { return locale; } public void setLocale(String locale) { this.locale = locale; } @Override public String toString() { StringBuilder buff = new StringBuilder(); buff.append("["); buff.append(getEventId()); buff.append(", "); buff.append(getLocale()); buff.append("]"); return buff.toString(); } @Override public int hashCode() { return getEventId().hashCode() + getLocale().hashCode(); } @Override public boolean equals(Object obj) { if(obj == null || !(obj instanceof EventLocaleKey)) { return false; } EventLocaleKey o = (EventLocaleKey) obj; if(o.getEventId().equals(getEventId()) && o.getLocale().equals(o.getLocale())) { return true; } return false; } } }
package gerald1248.hollows; import android.app.Activity; import android.content.BroadcastReceiver; import android.content.Intent; import android.content.IntentFilter; import android.content.SharedPreferences; import android.content.res.AssetManager; import android.content.res.Resources; import android.graphics.Color; import android.graphics.Typeface; import android.os.Bundle; import android.util.DisplayMetrics; import android.view.Window; import android.view.WindowManager; import java.io.IOException; public class MainActivity extends Activity { private LoopMediaPlayer loopMediaPlayer = null; private Panel panel = null; private int levelIndex = 0; private int highestLevelIndex = 0; private int nonRedshiftHighestLevelIndex = 0; private boolean playAudio = false; private boolean redshift = false; private Typeface typeface = null; BroadcastReceiver receiver = null; private int masterColor = Color.WHITE; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN, WindowManager.LayoutParams.FLAG_FULLSCREEN); this.requestWindowFeature(Window.FEATURE_NO_TITLE); //filters IntentFilter filter = new IntentFilter(Intent.ACTION_SCREEN_ON); filter.addAction(Intent.ACTION_SCREEN_OFF); receiver = new ScreenReceiver(); registerReceiver(receiver, filter); DisplayMetrics dm = new DisplayMetrics(); getWindowManager().getDefaultDisplay().getMetrics(dm); Constants.SCREEN_WIDTH = dm.widthPixels; Constants.SCREEN_HEIGHT = dm.heightPixels; //level count Resources resources = MainActivity.this.getResources(); Constants.MAX_LEVEL = resources.getStringArray(R.array.levels).length; //preferences readPreferences(); //set panel font AssetManager am = this.getApplicationContext().getAssets(); typeface = Typeface.createFromAsset(am, "fonts/PressStart2P.ttf"); //media player loopMediaPlayer = LoopMediaPlayer.create(MainActivity.this, getAudioResource(levelIndex)); loopMediaPlayer.pause(); try { panel = new Panel(this, levelIndex, typeface); } catch (IOException | InstantiationException | IllegalAccessException e) { e.printStackTrace(); } setContentView(panel); } @Override protected void onResume() { super.onResume(); readPreferences(); if (playAudio == true) { loopMediaPlayer.start(); } panel.showPauseScreen(); } @Override protected void onPause() { super.onPause(); writePreferences(); panel.setRunning(false); panel.clearMultitouchState(); //double lock: onPause and onStop if (loopMediaPlayer.isPlaying()) { loopMediaPlayer.pause(); } } @Override protected void onStop() { super.onStop(); //double lock: onPause and onStop if (loopMediaPlayer.isPlaying()) { loopMediaPlayer.pause(); } } @Override protected void onDestroy() { super.onDestroy(); loopMediaPlayer.release(); panel = null; unregisterReceiver(receiver); } private void readPreferences() { SharedPreferences preferences = getPreferences(MODE_PRIVATE); if (preferences == null) { return; } levelIndex = preferences.getInt("levelIndex", 0); highestLevelIndex = preferences.getInt("highestLevelIndex", 0); nonRedshiftHighestLevelIndex = preferences.getInt("nonRedshiftHighestLevelIndex", 0); redshift = preferences.getBoolean("redshift", false); playAudio = preferences.getBoolean("playAudio", false); if (redshift) { masterColor = Color.RED; } } private void writePreferences() { SharedPreferences preferences = getPreferences(MODE_PRIVATE); SharedPreferences.Editor editor = preferences.edit(); editor.putInt("levelIndex", levelIndex); editor.putInt("highestLevelIndex", highestLevelIndex); editor.putInt("nonRedshiftHighestLevelIndex", highestLevelIndex); editor.putBoolean("redshift", redshift); editor.putBoolean("playAudio", playAudio); editor.commit(); } private int getAudioResource(int levelIndex) { int[] resources = { R.raw.synth_i, R.raw.synth_ii, R.raw.synth_iii, R.raw.synth_iv, R.raw.synth_v }; int i = levelIndex % resources.length; return resources[i]; } public void toggleAudio() { if (loopMediaPlayer.isPlaying()) { loopMediaPlayer.pause(); playAudio = false; } else { loopMediaPlayer.start(); playAudio = true; } } public void toggleRedshift() { redshift = !redshift; masterColor = (redshift) ? Color.RED : Color.WHITE; if (redshift) { nonRedshiftHighestLevelIndex = highestLevelIndex; highestLevelIndex = 9999; } else { highestLevelIndex = nonRedshiftHighestLevelIndex; } } public void setLevelIndex(int i) { levelIndex = i; if (i > highestLevelIndex) { highestLevelIndex = i; } if (loopMediaPlayer != null) { loopMediaPlayer.setResourceId(getAudioResource(i)); } } public int getHighestLevelIndex() { return highestLevelIndex; } public boolean getPlayAudio() { return playAudio; } public boolean getRedshift() { return redshift; } public int getMasterColor() { return masterColor; } }
package net.rystuff.mcmoddeobf; import argo.jdom.JsonNode; import argo.jdom.JsonRootNode; import net.lingala.zip4j.core.ZipFile; import net.lingala.zip4j.exception.ZipException; import net.lingala.zip4j.model.ZipParameters; import net.lingala.zip4j.util.Zip4jConstants; import net.rystuff.mcmoddeobf.gui.GuiMain; import org.apache.commons.io.FileUtils; import org.apache.commons.io.filefilter.TrueFileFilter; import java.io.*; import java.net.URL; import java.util.List; public class Util { // System Temp directory public static String tempDir = System.getProperty("java.io.tmpdir"); // MCModDeobf base temp directory public static String baseDir = tempDir + "MCModDeobf"; public static File baseDirFile = new File(baseDir); // Decompile Temp directory public static String decompString = baseDir + File.separator + "decomp"; public static File decompFile = new File(decompString); // Deobf Temp directory public static String deobfString = baseDir + File.separator + "deobf"; public static File deobfFile = new File(deobfString); // Path to Decompiler public static String decompilerString = baseDir + File.separator + "decompiler.jar"; public static File decompilerFile = new File(decompilerString); // Output zip public static File outputZip; // Output zip ZipFile public static ZipFile outputZipFile; // Input file public static File inputZip; // Get MCVersions for config public static String[] getMCVersions(JsonRootNode config) { List<JsonNode> versionNodes = config.getArrayNode("mcVersions"); String[] versions = new String[versionNodes.size()]; for (int i = 0; i < versionNodes.size(); i++) { versions[i] = versionNodes.get(i).getStringValue(); System.out.println(versions[i]); } return versions; } // Gets the decompiler download link public static String decompilerDownload(JsonRootNode config) { String decompiler = config.getStringValue("decompiler"); return decompiler; } // Initialization function public static void init() { // Prints out MCModDeobf temp directory location System.out.println("MCModDeobf temp directory location: " + baseDir); // Checks if decompFile exists and is a directory if (decompFile.exists() && decompFile.isDirectory()) { try { FileUtils.deleteDirectory(decompFile); } catch (IOException e) { e.printStackTrace(); } } // Checks if deobfFile exists and is a directory if (deobfFile.exists() && deobfFile.isDirectory()) { try { FileUtils.deleteDirectory(deobfFile); } catch (IOException e) { e.printStackTrace(); } } // Creates temp directories if (!baseDirFile.exists()) { baseDirFile.mkdir(); } decompFile.mkdir(); deobfFile.mkdir(); // Get MCVersions from config List<JsonNode> versionNodes = MCModDeobf.config.getArrayNode("mcVersions"); String[] versions = new String[versionNodes.size()]; for (int i = 0; i < versionNodes.size(); i++) { versions[i] = versionNodes.get(i).getStringValue(); // if csv files doesn't exists then download them if (!new File(baseDir + File.separator + versions[i] + File.separator + "fields.csv").exists()) { download("http://rystuff.net/data/MCModDeobf/" + versions[i] + "/fields.csv", baseDir + File.separator + versions[i] + File.separator + "fields.csv"); } if (!new File(baseDir + File.separator + versions[i] + File.separator + "methods.csv").exists()) { download("http://rystuff.net/data/MCModDeobf/" + versions[i] + "/methods.csv", baseDir + File.separator + versions[i] + File.separator + "methods.csv"); } if (!new File(baseDir + File.separator + versions[i] + File.separator + "params.csv").exists()) { download("http://rystuff.net/data/MCModDeobf/" + versions[i] + "/params.csv", baseDir + File.separator + versions[i] + File.separator + "params.csv"); } } } // This is the download function for use in other places public static boolean download(String url, String dest) { try { System.out.println("Downloading " + url + " to " + dest); FileUtils.copyURLToFile(new URL(url), new File(dest)); System.out.println("Downloaded " + url + " to " + dest); return true; } catch (Exception e) { System.out.println(e); return false; } } // Decompile function public static void decompile() { new Thread() { public void run() { System.out.println("Decompiling"); try { String line; // runs the decompiler on the selected archive file Process p = Runtime.getRuntime().exec("java -jar " + decompilerString + " -jar " + inputZip + " -o " + decompString); BufferedReader input = new BufferedReader(new InputStreamReader(p.getInputStream())); while ((line = input.readLine()) != null) System.out.println(line); } catch (IOException e) { e.printStackTrace(); } System.out.println("Done!"); } }.start(); } // Deobfuscate function public static void deobf() { try { FileUtils.copyDirectory(decompFile, deobfFile); } catch (IOException e) { e.printStackTrace(); } System.out.println("Deobfuscating"); // Gets all files to deobfuscate List<File> files = (List<File>) FileUtils.listFiles(deobfFile, TrueFileFilter.INSTANCE, TrueFileFilter.INSTANCE); for (File file : files) { try { System.out.println("Deobfuscating " + file.getCanonicalPath()); } catch (IOException e) { e.printStackTrace(); } // Fields deobf try { String csvFile = baseDir + File.separator + GuiMain.mcVersion + File.separator + "fields.csv"; BufferedReader br = null; String line = ""; String csvSplitBy = ","; br = new BufferedReader(new FileReader(csvFile)); while ((line = br.readLine()) != null) { String[] split = line.split(csvSplitBy); String content = FileUtils.readFileToString(new File(file.toString())); if (content.contains(split[0])) { FileUtils.writeStringToFile(file, content.replaceAll(split[0], split[1])); } } } catch (FileNotFoundException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } // Methods deobf try { String csvFile = baseDir + File.separator + GuiMain.mcVersion + File.separator + "methods.csv"; BufferedReader br = null; String line = ""; String csvSplitBy = ","; br = new BufferedReader(new FileReader(csvFile)); while ((line = br.readLine()) != null) { String[] split = line.split(csvSplitBy); String content = FileUtils.readFileToString(new File(file.toString())); if (content.contains(split[0])) { FileUtils.writeStringToFile(file, content.replaceAll(split[0], split[1])); } } } catch (FileNotFoundException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } // Params deobf try { String csvFile = baseDir + File.separator + GuiMain.mcVersion + File.separator + "params.csv"; BufferedReader br = null; String line = ""; String csvSplitBy = ","; br = new BufferedReader(new FileReader(csvFile)); while ((line = br.readLine()) != null) { String[] split = line.split(csvSplitBy); String content = FileUtils.readFileToString(new File(file.toString())); if (content.contains(split[0])) { FileUtils.writeStringToFile(file, content.replaceAll(split[0], split[1])); } } } catch (FileNotFoundException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } } System.out.println("Deobfuscated!"); } // Zipping output public static void Zip() { try { if (outputZip.toString().toLowerCase().contains(".zip")){ outputZipFile = new ZipFile(outputZip); } else if (outputZip.toString().toLowerCase().contains(".jar")) { outputZipFile = new ZipFile(outputZip); } else { outputZipFile = new ZipFile(outputZip + ".zip"); } ZipParameters parameters = new ZipParameters(); parameters.setCompressionMethod(Zip4jConstants.COMP_DEFLATE); parameters.setCompressionLevel(Zip4jConstants.DEFLATE_LEVEL_NORMAL); parameters.setIncludeRootFolder(false); outputZipFile.createZipFileFromFolder(deobfFile + File.separator, parameters, true, 10485760); } catch (ZipException e) { e.printStackTrace(); } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hdfs; import java.io.*; import java.net.*; import java.util.ArrayList; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.fs.*; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hdfs.protocol.DatanodeInfo; import org.apache.hadoop.hdfs.protocol.FSConstants; import org.apache.hadoop.hdfs.protocol.Block; import org.apache.hadoop.hdfs.protocol.HdfsFileStatus; import org.apache.hadoop.hdfs.protocol.LocatedBlock; import org.apache.hadoop.hdfs.protocol.DirectoryListing; import org.apache.hadoop.hdfs.protocol.FSConstants.DatanodeReportType; import org.apache.hadoop.hdfs.protocol.FSConstants.UpgradeAction; import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier; import org.apache.hadoop.hdfs.server.common.UpgradeStatusReport; import org.apache.hadoop.hdfs.server.namenode.NameNode; import org.apache.hadoop.hdfs.DFSClient.DFSOutputStream; import org.apache.hadoop.io.Text; import org.apache.hadoop.security.AccessControlException; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.SecretManager.InvalidToken; import org.apache.hadoop.util.Progressable; /**************************************************************** * Implementation of the abstract FileSystem for the DFS system. * This object is the way end-user code interacts with a Hadoop * DistributedFileSystem. * *****************************************************************/ public class DistributedFileSystem extends FileSystem { private Path workingDir; private URI uri; DFSClient dfs; private boolean verifyChecksum = true; static{ Configuration.addDefaultResource("hdfs-default.xml"); Configuration.addDefaultResource("hdfs-site.xml"); } public DistributedFileSystem() { } /** @deprecated */ public DistributedFileSystem(InetSocketAddress namenode, Configuration conf) throws IOException { initialize(NameNode.getUri(namenode), conf); } /** @deprecated */ public String getName() { return uri.getAuthority(); } private InetSocketAddress namenode=null; private Configuration conf=null; public URI getUri() { return uri; } public void initialize(URI uri, Configuration conf) throws IOException { this.conf=conf; super.initialize(uri, conf); setConf(conf); String host = uri.getHost(); if (host == null) { throw new IOException("Incomplete HDFS URI, no host: "+ uri); } this.namenode = NameNode.getAddress(uri.getAuthority()); this.dfs = new DFSClient(namenode, conf, statistics); this.uri = URI.create(uri.getScheme()+"://"+uri.getAuthority()); this.workingDir = getHomeDirectory(); } public Path getWorkingDirectory() { return workingDir; } public long getDefaultBlockSize() { return dfs.getDefaultBlockSize(); } public short getDefaultReplication() { return dfs.getDefaultReplication(); } private Path makeAbsolute(Path f) { if (f.isAbsolute()) { return f; } else { return new Path(workingDir, f); } } public void setWorkingDirectory(Path dir) { String result = makeAbsolute(dir).toUri().getPath(); if (!DFSUtil.isValidName(result)) { throw new IllegalArgumentException("Invalid DFS directory name " + result); } workingDir = makeAbsolute(dir); } /** {@inheritDoc} */ public Path getHomeDirectory() { return new Path("/user/" + dfs.ugi.getShortUserName()).makeQualified(this); } private String getPathName(Path file) { checkPath(file); String result = makeAbsolute(file).toUri().getPath(); if (!DFSUtil.isValidName(result)) { throw new IllegalArgumentException("Pathname " + result + " from " + file+" is not a valid DFS filename."); } return result; } public BlockLocation[] getFileBlockLocations(FileStatus file, long start, long len) throws IOException { if (file == null) { return null; } statistics.incrementReadOps(1); return dfs.getBlockLocations(getPathName(file.getPath()), start, len); } public void setVerifyChecksum(boolean verifyChecksum) { this.verifyChecksum = verifyChecksum; } public FSDataInputStream open(Path f, int bufferSize) throws IOException { statistics.incrementReadOps(1); return new DFSClient.DFSDataInputStream( dfs.open(getPathName(f), bufferSize, verifyChecksum, statistics)); } /** * Start the lease recovery of a file * * @param f a file * @return true if the file is already closed * @throws IOException if an error occurs */ public boolean recoverLease(Path f) throws IOException { return dfs.recoverLease(getPathName(f)); } /** This optional operation is not yet supported. */ public FSDataOutputStream append(Path f, int bufferSize, Progressable progress) throws IOException { statistics.incrementWriteOps(1); return dfs.append(getPathName(f), bufferSize, progress, statistics); } public FSDataOutputStream create(Path f, FsPermission permission, boolean overwrite, int bufferSize, short replication, long blockSize, Progressable progress) throws IOException { statistics.incrementWriteOps(1); return new FSDataOutputStream (dfs.create(getPathName(f), permission, overwrite, true, replication, blockSize, progress, bufferSize), statistics); } /** * Same as create(), except fails if parent directory doesn't already exist. * @see #create(Path, FsPermission, boolean, int, short, long, Progressable) */ @Override public FSDataOutputStream createNonRecursive(Path f, FsPermission permission, boolean overwrite, int bufferSize, short replication, long blockSize, Progressable progress) throws IOException { return new FSDataOutputStream (dfs.create(getPathName(f), permission, overwrite, false, replication, blockSize, progress, bufferSize), statistics); } public boolean setReplication(Path src, short replication ) throws IOException { statistics.incrementWriteOps(1); return dfs.setReplication(getPathName(src), replication); } /** * Rename files/dirs */ public boolean rename(Path src, Path dst) throws IOException { statistics.incrementWriteOps(1); return dfs.rename(getPathName(src), getPathName(dst)); } /** * Get rid of Path f, whether a true file or dir. */ @Deprecated public boolean delete(Path f) throws IOException { statistics.incrementWriteOps(1); return dfs.delete(getPathName(f)); } /** * requires a boolean check to delete a non * empty directory recursively. */ public boolean delete(Path f, boolean recursive) throws IOException { statistics.incrementWriteOps(1); return dfs.delete(getPathName(f), recursive); } /** {@inheritDoc} */ public ContentSummary getContentSummary(Path f) throws IOException { statistics.incrementReadOps(1); return dfs.getContentSummary(getPathName(f)); } /** Set a directory's quotas * @see org.apache.hadoop.hdfs.protocol.ClientProtocol#setQuota(String, long, long) */ public void setQuota(Path src, long namespaceQuota, long diskspaceQuota) throws IOException { dfs.setQuota(getPathName(src), namespaceQuota, diskspaceQuota); } private FileStatus makeQualified(HdfsFileStatus f, Path parent) { return new FileStatus(f.getLen(), f.isDir(), f.getReplication(), f.getBlockSize(), f.getModificationTime(), f.getAccessTime(), f.getPermission(), f.getOwner(), f.getGroup(), f.getFullPath(parent).makeQualified(this)); // fully-qualify path } /** * List all the entries of a directory * * Note that this operation is not atomic for a large directory. * The entries of a directory may be fetched from NameNode multiple times. * It only guarantees that each name occurs once if a directory * undergoes changes between the calls. */ @Override public FileStatus[] listStatus(Path p) throws IOException { String src = getPathName(p); // fetch the first batch of entries in the directory DirectoryListing thisListing =null; try{ thisListing = dfs.listPaths( src, HdfsFileStatus.EMPTY_NAME); }catch(IOException e){ this.dfs = new DFSClient(namenode, conf, statistics); thisListing = dfs.listPaths( src, HdfsFileStatus.EMPTY_NAME); } if (thisListing == null) { // the directory does not exist return null; } HdfsFileStatus[] partialListing = thisListing.getPartialListing(); if (!thisListing.hasMore()) { // got all entries of the directory FileStatus[] stats = new FileStatus[partialListing.length]; for (int i = 0; i < partialListing.length; i++) { stats[i] = makeQualified(partialListing[i], p); } statistics.incrementReadOps(1); return stats; } // The directory size is too big that it needs to fetch more // estimate the total number of entries in the directory int totalNumEntries = partialListing.length + thisListing.getRemainingEntries(); ArrayList<FileStatus> listing = new ArrayList<FileStatus>(totalNumEntries); // add the first batch of entries to the array list for (HdfsFileStatus fileStatus : partialListing) { listing.add(makeQualified(fileStatus, p)); } statistics.incrementLargeReadOps(1); // now fetch more entries do { thisListing = dfs.listPaths(src, thisListing.getLastName()); if (thisListing == null) { return null; // the directory is deleted } partialListing = thisListing.getPartialListing(); for (HdfsFileStatus fileStatus : partialListing) { listing.add(makeQualified(fileStatus, p)); } statistics.incrementLargeReadOps(1); } while (thisListing.hasMore()); return listing.toArray(new FileStatus[listing.size()]); } public boolean mkdirs(Path f, FsPermission permission) throws IOException { statistics.incrementWriteOps(1); return dfs.mkdirs(getPathName(f), permission); } /** {@inheritDoc} */ public void close() throws IOException { try { super.processDeleteOnExit(); dfs.close(); } finally { super.close(); } } public String toString() { return "DFS[" + dfs + "]"; } public DFSClient getClient() { return dfs; } public static class DiskStatus { private long capacity; private long dfsUsed; private long remaining; public DiskStatus(long capacity, long dfsUsed, long remaining) { this.capacity = capacity; this.dfsUsed = dfsUsed; this.remaining = remaining; } public long getCapacity() { return capacity; } public long getDfsUsed() { return dfsUsed; } public long getRemaining() { return remaining; } } /** Return the disk usage of the filesystem, including total capacity, * used space, and remaining space */ public DiskStatus getDiskStatus() throws IOException { return dfs.getDiskStatus(); } /** Return the total raw capacity of the filesystem, disregarding * replication .*/ public long getRawCapacity() throws IOException{ return dfs.totalRawCapacity(); } /** Return the total raw used space in the filesystem, disregarding * replication .*/ public long getRawUsed() throws IOException{ return dfs.totalRawUsed(); } /** * Returns count of blocks with no good replicas left. Normally should be * zero. * * @throws IOException */ public long getMissingBlocksCount() throws IOException { return dfs.getMissingBlocksCount(); } /** * Returns count of blocks with one of more replica missing. * * @throws IOException */ public long getUnderReplicatedBlocksCount() throws IOException { return dfs.getUnderReplicatedBlocksCount(); } /** * Returns count of blocks with at least one replica marked corrupt. * * @throws IOException */ public long getCorruptBlocksCount() throws IOException { return dfs.getCorruptBlocksCount(); } /** Return statistics for each datanode. */ public DatanodeInfo[] getDataNodeStats() throws IOException { return dfs.datanodeReport(DatanodeReportType.ALL); } /** * Enter, leave or get safe mode. * * @see org.apache.hadoop.hdfs.protocol.ClientProtocol#setSafeMode( * FSConstants.SafeModeAction) */ public boolean setSafeMode(FSConstants.SafeModeAction action) throws IOException { return dfs.setSafeMode(action); } /** * Save namespace image. * * @see org.apache.hadoop.hdfs.protocol.ClientProtocol#saveNamespace() */ public void saveNamespace() throws AccessControlException, IOException { dfs.saveNamespace(); } /** * Refreshes the list of hosts and excluded hosts from the configured * files. */ public void refreshNodes() throws IOException { dfs.refreshNodes(); } /** * Finalize previously upgraded files system state. * @throws IOException */ public void finalizeUpgrade() throws IOException { dfs.finalizeUpgrade(); } public UpgradeStatusReport distributedUpgradeProgress(UpgradeAction action ) throws IOException { return dfs.distributedUpgradeProgress(action); } /* * Requests the namenode to dump data strcutures into specified * file. */ public void metaSave(String pathname) throws IOException { dfs.metaSave(pathname); } /** * We need to find the blocks that didn't match. Likely only one * is corrupt but we will report both to the namenode. In the future, * we can consider figuring out exactly which block is corrupt. */ public boolean reportChecksumFailure(Path f, FSDataInputStream in, long inPos, FSDataInputStream sums, long sumsPos) { LocatedBlock lblocks[] = new LocatedBlock[2]; // Find block in data stream. DFSClient.DFSDataInputStream dfsIn = (DFSClient.DFSDataInputStream) in; Block dataBlock = dfsIn.getCurrentBlock(); if (dataBlock == null) { LOG.error("Error: Current block in data stream is null! "); return false; } DatanodeInfo[] dataNode = {dfsIn.getCurrentDatanode()}; lblocks[0] = new LocatedBlock(dataBlock, dataNode); LOG.info("Found checksum error in data stream at block=" + dataBlock + " on datanode=" + dataNode[0].getName()); // Find block in checksum stream DFSClient.DFSDataInputStream dfsSums = (DFSClient.DFSDataInputStream) sums; Block sumsBlock = dfsSums.getCurrentBlock(); if (sumsBlock == null) { LOG.error("Error: Current block in checksum stream is null! "); return false; } DatanodeInfo[] sumsNode = {dfsSums.getCurrentDatanode()}; lblocks[1] = new LocatedBlock(sumsBlock, sumsNode); LOG.info("Found checksum error in checksum stream at block=" + sumsBlock + " on datanode=" + sumsNode[0].getName()); // Ask client to delete blocks. dfs.reportChecksumFailure(f.toString(), lblocks); return true; } /** * Returns the stat information about the file. * @throws FileNotFoundException if the file does not exist. */ public FileStatus getFileStatus(Path f) throws IOException { statistics.incrementReadOps(1); HdfsFileStatus fi = dfs.getFileInfo(getPathName(f)); if (fi != null) { return makeQualified(fi, f); } else { throw new FileNotFoundException("File does not exist: " + f); } } /** {@inheritDoc} */ public MD5MD5CRC32FileChecksum getFileChecksum(Path f) throws IOException { statistics.incrementReadOps(1); return dfs.getFileChecksum(getPathName(f)); } /** {@inheritDoc }*/ public void setPermission(Path p, FsPermission permission ) throws IOException { statistics.incrementWriteOps(1); dfs.setPermission(getPathName(p), permission); } /** {@inheritDoc }*/ public void setOwner(Path p, String username, String groupname ) throws IOException { if (username == null && groupname == null) { throw new IOException("username == null && groupname == null"); } statistics.incrementWriteOps(1); dfs.setOwner(getPathName(p), username, groupname); } /** {@inheritDoc }*/ public void setTimes(Path p, long mtime, long atime ) throws IOException { statistics.incrementWriteOps(1); dfs.setTimes(getPathName(p), mtime, atime); } @Override protected int getDefaultPort() { return NameNode.DEFAULT_PORT; } @Override public Token<DelegationTokenIdentifier> getDelegationToken(String renewer ) throws IOException { Token<DelegationTokenIdentifier> result = dfs.getDelegationToken(renewer == null ? null : new Text(renewer)); return result; } /** * Delegation Token Operations * These are DFS only operations. */ /** * Get a valid Delegation Token. * * @param renewer Name of the designated renewer for the token * @return Token<DelegationTokenIdentifier> * @throws IOException * @Deprecated use {@link #getDelegationToken(String)} */ @Deprecated public Token<DelegationTokenIdentifier> getDelegationToken(Text renewer) throws IOException { return getDelegationToken(renewer.toString()); } /** * Renew an existing delegation token. * * @param token delegation token obtained earlier * @return the new expiration time * @throws IOException * @deprecated Use Token.renew instead. */ public long renewDelegationToken(Token<DelegationTokenIdentifier> token) throws InvalidToken, IOException { try { return token.renew(getConf()); } catch (InterruptedException ie) { throw new RuntimeException("Caught interrupted", ie); } } /** * Cancel an existing delegation token. * * @param token delegation token * @throws IOException * @deprecated Use Token.cancel instead. */ public void cancelDelegationToken(Token<DelegationTokenIdentifier> token) throws IOException { try { token.cancel(getConf()); } catch (InterruptedException ie) { throw new RuntimeException("Caught interrupted", ie); } } /** * Requests the namenode to tell all datanodes to use a new, non-persistent * bandwidth value for dfs.balance.bandwidthPerSec. * The bandwidth parameter is the max number of bytes per second of network * bandwidth to be used by a datanode during balancing. * * @param bandwidth Blanacer bandwidth in bytes per second for all datanodes. * @throws IOException */ public void setBalancerBandwidth(long bandwidth) throws IOException { dfs.setBalancerBandwidth(bandwidth); } }
/* * Copyright (C) 2016 The Flogger Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.common.flogger.backend.system; import static com.google.common.flogger.util.StaticMethodCaller.getInstanceFromSystemProperty; import com.google.common.flogger.backend.LoggerBackend; import com.google.common.flogger.backend.Platform; import com.google.common.flogger.context.ContextDataProvider; import java.util.ArrayList; import java.util.List; import java.util.ServiceLoader; import org.checkerframework.checker.nullness.compatqual.NullableDecl; /** * The default fluent logger platform for a server-side Java environment. * * <p>This class allows configuration via a number of service types. A single instance of each * service type may be provided, either via the classpath using <i>service providers</i> (see {@link * ServiceLoader}) or by system property. For most users, configuring one of these should just * require including the appropriate dependency. * * <p>If set, the system property for each service type takes precedence over any implementations * that may be found on the classpath. The value of the system property is expected to be of one of * two forms: * * <ul> * <li><b>A fully-qualified class name:</b> In this case, the platform will attempt to get an * instance of that class by invoking the public no-arg constructor. If the class defines a * public static no-arg {@code getInstance} method, the platform will call that instead. * <b>Note:</b> Support for {@code getInstance} is only provided to facilitate transition * from older service implementations that include a {@code getInstance} method and will * likely be removed in the future. * <li><b>A fully-qualified class name followed by "#" and the name of a static method:</b> In * this case, the platform will attempt to get an instance of that class by invoking either * the named no-arg static method or the public no-arg constructor. <b>Note:</b> This option * exists only for compatibility with previous Flogger behavior and may be removed in the * future; service implementations should prefer providing a no-arg public constructor rather * than a static method and system properties should prefer only including the class name. * </ul> * * <p>The services used by this platform are the following: * * <table> * <tr> * <th>Service Type</th> * <th>System Property</th> * <th>Default</th> * </tr> * <tr> * <td>{@link BackendFactory}</td> * <td>{@code flogger.backend_factory}</td> * <td>{@link SimpleBackendFactory}, a {@code java.util.logging} backend</td> * </tr> * <tr> * <td>{@link ContextDataProvider}</td> * <td>{@code flogger.logging_context}</td> * <td>A no-op {@code ContextDataProvider}</td> * </tr> * <tr> * <td>{@link Clock}</td> * <td>{@code flogger.clock}</td> * <td>{@link SystemClock}, a millisecond-precision clock</td> * </tr> * </table> */ // Non-final for testing. public class DefaultPlatform extends Platform { // System property names for properties expected to define "getters" for platform attributes. private static final String BACKEND_FACTORY = "flogger.backend_factory"; private static final String CONTEXT_DATA_PROVIDER = "flogger.logging_context"; private static final String CLOCK = "flogger.clock"; private final BackendFactory backendFactory; private final ContextDataProvider context; private final Clock clock; private final LogCallerFinder callerFinder; public DefaultPlatform() { // To avoid eagerly loading the default implementations of each service when they might not // be required, we return null from the loadService() method rather than accepting a default // instance. This avoids a bunch of potentially unnecessary static initialization. BackendFactory backendFactory = loadService(BackendFactory.class, BACKEND_FACTORY); this.backendFactory = backendFactory != null ? backendFactory : SimpleBackendFactory.getInstance(); ContextDataProvider contextDataProvider = loadService(ContextDataProvider.class, CONTEXT_DATA_PROVIDER); this.context = contextDataProvider != null ? contextDataProvider : ContextDataProvider.getNoOpProvider(); Clock clock = loadService(Clock.class, CLOCK); this.clock = clock != null ? clock : SystemClock.getInstance(); this.callerFinder = StackBasedCallerFinder.getInstance(); } /** * Attempts to load an implementation of the given {@code serviceType}: * * <ol> * <li>First looks for an implementation specified by the value of the given {@code * systemProperty}, if that system property is set correctly. If the property is set but * can't be used to get an instance of the service type, prints an error and returns {@code * null}. * <li>Then attempts to load an implementation from the classpath via {@code ServiceLoader}, if * there is exactly one. If there is more than one, prints an error and returns {@code * null}. * <li>If neither is present, returns {@code null}. * </ol> */ @NullableDecl private static <S> S loadService(Class<S> serviceType, String systemProperty) { // TODO(cgdecker): Throw an exception if configuration is present but invalid? // - If the system property is set but using it to get the service fails. // - If the system property is not set and more than one service is loaded by ServiceLoader // If no configuration is present, falling back to the default makes sense, but when invalid // configuration is present it may be best to attempt to fail fast. S service = getInstanceFromSystemProperty(systemProperty, serviceType); if (service != null) { // Service was loaded successfully via an explicitly overridden system property. return service; } List<S> loadedServices = new ArrayList<S>(); for (S loaded : ServiceLoader.load(serviceType)) { loadedServices.add(loaded); } switch (loadedServices.size()) { case 0: // Normal use of default service when nothing else exists. return null; case 1: // A single service implementation was found and loaded automatically. return loadedServices.get(0); default: System.err.printf( "Multiple implementations of service %s found on the classpath: %s%n" + "Ensure only the service implementation you want to use is included on the " + "classpath or else specify the service class at startup with the '%s' system " + "property. The default implementation will be used instead.%n", serviceType.getName(), loadedServices, systemProperty); return null; } } // Visible for testing DefaultPlatform( BackendFactory factory, ContextDataProvider context, Clock clock, LogCallerFinder callerFinder) { this.backendFactory = factory; this.context = context; this.clock = clock; this.callerFinder = callerFinder; } @Override protected LogCallerFinder getCallerFinderImpl() { return callerFinder; } @Override protected LoggerBackend getBackendImpl(String className) { return backendFactory.create(className); } @Override protected ContextDataProvider getContextDataProviderImpl() { return context; } @Override protected long getCurrentTimeNanosImpl() { return clock.getCurrentTimeNanos(); } @Override protected String getConfigInfoImpl() { return "Platform: " + getClass().getName() + "\n" + "BackendFactory: " + backendFactory + "\n" + "Clock: " + clock + "\n" + "ContextDataProvider: " + context + "\n" + "LogCallerFinder: " + callerFinder + "\n"; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.nifi.rules.handlers.script; import org.apache.nifi.annotation.behavior.Restricted; import org.apache.nifi.annotation.behavior.Restriction; import org.apache.nifi.annotation.documentation.CapabilityDescription; import org.apache.nifi.annotation.documentation.Tags; import org.apache.nifi.annotation.lifecycle.OnEnabled; import org.apache.nifi.components.PropertyDescriptor; import org.apache.nifi.components.RequiredPermission; import org.apache.nifi.components.ValidationResult; import org.apache.nifi.context.PropertyContext; import org.apache.nifi.controller.ConfigurationContext; import org.apache.nifi.logging.ComponentLog; import org.apache.nifi.processor.exception.ProcessException; import org.apache.nifi.rules.Action; import org.apache.nifi.rules.ActionHandler; import org.apache.nifi.rules.PropertyContextActionHandler; import org.apache.nifi.script.AbstractScriptedControllerService; import org.apache.nifi.script.ScriptingComponentHelper; import javax.script.Invocable; import javax.script.ScriptContext; import javax.script.ScriptEngine; import javax.script.ScriptException; import java.util.Collection; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.concurrent.atomic.AtomicReference; @Tags({"rules", "rules engine", "action", "action handler", "script", "invoke", "groovy", "python", "jython", "jruby", "ruby", "javascript", "js", "lua", "luaj"}) @CapabilityDescription("Allows the user to provide a scripted ActionHandler for custom firing of rules depending on the supplied facts. The script must set a variable 'actionHandler' to an " + "implementation of ActionHandler.") @Restricted( restrictions = { @Restriction( requiredPermission = RequiredPermission.EXECUTE_CODE, explanation = "Provides operator the ability to execute arbitrary code assuming all permissions that NiFi has.") } ) public class ScriptedActionHandler extends AbstractScriptedControllerService implements PropertyContextActionHandler { protected final AtomicReference<ActionHandler> actionHandler = new AtomicReference<>(); /** * Returns a list of property descriptors supported by this processor. The list always includes properties such as * script engine name, script file name, script body name, script arguments, and an external module path. If the * scripted processor also defines supported properties, those are added to the list as well. * * @return a List of PropertyDescriptor objects supported by this processor */ @Override protected List<PropertyDescriptor> getSupportedPropertyDescriptors() { synchronized (scriptingComponentHelper.isInitialized) { if (!scriptingComponentHelper.isInitialized.get()) { scriptingComponentHelper.createResources(); } } return Collections.unmodifiableList(scriptingComponentHelper.getDescriptors()); } public void setup() { if (scriptNeedsReload.get() || actionHandler.get() == null) { if (ScriptingComponentHelper.isFile(scriptingComponentHelper.getScriptPath())) { scriptNeedsReload.set(reloadScriptFile(scriptingComponentHelper.getScriptPath())); } else { scriptNeedsReload.set(reloadScriptBody(scriptingComponentHelper.getScriptBody())); } } } /** * Reloads the script ActionHandler. This must be called within the lock. * * @param scriptBody An input stream associated with the script content * @return Whether the script was successfully reloaded */ protected boolean reloadScript(final String scriptBody) { // note we are starting here with a fresh listing of validation // results since we are (re)loading a new/updated script. any // existing validation results are not relevant final Collection<ValidationResult> results = new HashSet<>(); try { // Create a single script engine, the Processor object is reused by each task if (scriptRunner == null) { scriptingComponentHelper.setupScriptRunners(1, scriptBody, getLogger()); scriptRunner = scriptingComponentHelper.scriptRunnerQ.poll(); } if (scriptRunner == null) { throw new ProcessException("No script runner available!"); } // get the engine and ensure its invocable ScriptEngine scriptEngine = scriptRunner.getScriptEngine(); if (scriptEngine instanceof Invocable) { final Invocable invocable = (Invocable) scriptEngine; // evaluate the script scriptRunner.run(scriptEngine.getBindings(ScriptContext.ENGINE_SCOPE)); // get configured processor from the script (if it exists) final Object obj = scriptRunner.getScriptEngine().get("actionHandler"); if (obj != null) { final ComponentLog logger = getLogger(); try { // set the logger if the processor wants it invocable.invokeMethod(obj, "setLogger", logger); } catch (final NoSuchMethodException nsme) { if (logger.isDebugEnabled()) { logger.debug("Configured script ActionHandler does not contain a setLogger method."); } } if (configurationContext != null) { try { // set the logger if the processor wants it invocable.invokeMethod(obj, "setConfigurationContext", configurationContext); } catch (final NoSuchMethodException nsme) { if (logger.isDebugEnabled()) { logger.debug("Configured script ActionHandler does not contain a setConfigurationContext method."); } } } // record the processor for use later final ActionHandler scriptedReader = invocable.getInterface(obj, ActionHandler.class); actionHandler.set(scriptedReader); } else { throw new ScriptException("No RecordReader was defined by the script."); } } } catch (final Exception ex) { final ComponentLog logger = getLogger(); final String message = "Unable to load script: " + ex.getLocalizedMessage(); logger.error(message, ex); results.add(new ValidationResult.Builder() .subject("ScriptValidation") .valid(false) .explanation("Unable to load script due to " + ex.getLocalizedMessage()) .input(scriptingComponentHelper.getScriptPath()) .build()); } // store the updated validation results validationResults.set(results); // return whether there was any issues loading the configured script return results.isEmpty(); } @Override @OnEnabled public void onEnabled(final ConfigurationContext context) { synchronized (scriptingComponentHelper.isInitialized) { if (!scriptingComponentHelper.isInitialized.get()) { scriptingComponentHelper.createResources(); } } super.onEnabled(context); // Call an non-interface method onEnabled(context), to allow a scripted ActionHandler the chance to set up as necessary if (scriptRunner != null) { final ScriptEngine scriptEngine = scriptRunner.getScriptEngine(); final Invocable invocable = (Invocable) scriptEngine; if (configurationContext != null) { try { // Get the actual object from the script engine, versus the proxy stored in ActionHandler. The object may have additional methods, // where ActionHandler is a proxied interface final Object obj = scriptRunner.getScriptEngine().get("actionHandler"); if (obj != null) { try { invocable.invokeMethod(obj, "onEnabled", context); } catch (final NoSuchMethodException nsme) { if (getLogger().isDebugEnabled()) { getLogger().debug("Configured script ActionHandler does not contain an onEnabled() method."); } } } else { throw new ScriptException("No ActionHandler was defined by the script."); } } catch (ScriptException se) { throw new ProcessException("Error executing onEnabled(context) method", se); } } } else { throw new ProcessException("Error creating ScriptRunner"); } } public void execute(PropertyContext context, Action action, Map<String, Object> facts) { // Attempt to call a non-ActionHandler interface method (i.e. execute(context, action, facts) from PropertyContextActionHandler) if (scriptRunner != null) { final ScriptEngine scriptEngine = scriptRunner.getScriptEngine(); final Invocable invocable = (Invocable) scriptEngine; try { // Get the actual object from the script engine, versus the proxy stored in ActionHandler. The object may have additional methods, // where ActionHandler is a proxied interface final Object obj = scriptRunner.getScriptEngine().get("actionHandler"); if (obj != null) { try { invocable.invokeMethod(obj, "execute", context, action, facts); } catch (final NoSuchMethodException nsme) { if (getLogger().isDebugEnabled()) { getLogger().debug("Configured script ActionHandler is not a PropertyContextActionHandler and has no execute(context, action, facts) method, falling back to" + "execute(action, facts)."); } execute(action, facts); } } else { throw new ScriptException("No ActionHandler was defined by the script."); } } catch (ScriptException se) { throw new ProcessException("Error executing onEnabled(context) method: " + se.getMessage(), se); } } else { throw new ProcessException("Error creating ScriptRunner"); } } @Override public void execute(Action action, Map<String, Object> facts) { if (actionHandler.get() != null) { actionHandler.get().execute(action, facts); } } }
/* * The MIT License * * Copyright (c) 2004-2009, Sun Microsystems, Inc., Kohsuke Kawaguchi, Geoff Cummings * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package hudson.util; import com.google.common.base.Predicate; import com.google.common.collect.Iterables; import com.google.common.collect.Iterators; import hudson.model.AbstractBuild; import hudson.model.Item; import hudson.model.Job; import hudson.model.Node; import hudson.model.Result; import hudson.model.Run; import hudson.model.TopLevelItem; import hudson.model.View; import hudson.util.Iterators.CountingPredicate; import java.util.*; /** * {@link List} of {@link Run}s, sorted in the descending date order. * * @author Kohsuke Kawaguchi */ public class RunList<R extends Run> extends AbstractList<R> { private Iterable<R> base; private R first; private Integer size; public RunList() { base = Collections.emptyList(); } public RunList(Job j) { base = j.getBuilds(); } public RunList(View view) {// this is a type unsafe operation Set<Job> jobs = new HashSet<Job>(); for (TopLevelItem item : view.getItems()) jobs.addAll(item.getAllJobs()); List<Iterable<R>> runs = new ArrayList<Iterable<R>>(); for (Job job : jobs) { runs.add(job.getBuilds()); } this.base = combine(runs); } public RunList(Collection<? extends Job> jobs) { List<Iterable<R>> src = new ArrayList<Iterable<R>>(); for (Job j : jobs) src.add(j.getBuilds()); this.base = combine(src); } private Iterable<R> combine(Iterable<Iterable<R>> jobs) { return Iterables.mergeSorted(jobs, new Comparator<R>() { public int compare(R o1, R o2) { long lhs = o1.getTimeInMillis(); long rhs = o2.getTimeInMillis(); if (lhs > rhs) return -1; if (lhs < rhs) return 1; return 0; } }); } private RunList(Iterable<R> c) { base = c; } @Override public Iterator<R> iterator() { return base.iterator(); } /** * @deprecated as of 1.485 * {@link RunList}, despite its name, should be really used as {@link Iterable}, not as {@link List}. */ @Override public int size() { if (size==null) { int sz=0; for (R r : this) { first = r; sz++; } size = sz; } return size; } /** * @deprecated as of 1.485 * {@link RunList}, despite its name, should be really used as {@link Iterable}, not as {@link List}. */ @Override public R get(int index) { return Iterators.get(iterator(),index); } /** * {@link AbstractList#subList(int, int)} isn't very efficient on our {@link Iterable} based implementation. * In fact the range check alone would require us to iterate all the elements, * so we'd be better off just copying into ArrayList. */ @Override public List<R> subList(int fromIndex, int toIndex) { List<R> r = new ArrayList<R>(); Iterator<R> itr = iterator(); Iterators.skip(itr,fromIndex); for (int i=toIndex-fromIndex; i>0; i--) { r.add(itr.next()); } return r; } @Override public int indexOf(Object o) { int index=0; for (R r : this) { if (r.equals(o)) return index; index++; } return -1; } @Override public int lastIndexOf(Object o) { int a = -1; int index=0; for (R r : this) { if (r.equals(o)) a = index; index++; } return a; } @Override public boolean isEmpty() { return !iterator().hasNext(); } public R getFirstBuild() { size(); return first; } public R getLastBuild() { Iterator<R> itr = iterator(); return itr.hasNext() ? itr.next() : null; } public static <R extends Run> RunList<R> fromRuns(Collection<? extends R> runs) { return new RunList<R>((Iterable)runs); } /** * Returns elements that satisfy the given predicate. */ // for compatibility reasons, this method doesn't create a new list but updates the current one private RunList<R> filter(Predicate<R> predicate) { size = null; first = null; base = Iterables.filter(base,predicate); return this; } /** * Returns the first streak of the elements that satisfy the given predicate. * * For example, {@code filter([1,2,3,4],odd)==[1,3]} but {@code limit([1,2,3,4],odd)==[1]}. */ private RunList<R> limit(final CountingPredicate<R> predicate) { size = null; first = null; final Iterable<R> nested = base; base = new Iterable<R>() { public Iterator<R> iterator() { return hudson.util.Iterators.limit(nested.iterator(),predicate); } @Override public String toString() { return Iterables.toString(this); } }; return this; } /** * Return only the most recent builds. * <em>Warning:</em> this method mutates the original list and then returns it. * @param n a count * @return the n most recent builds * @since 1.507 */ public RunList<R> limit(final int n) { return limit(new CountingPredicate<R>() { public boolean apply(int index, R input) { return index<n; } }); } /** * Filter the list to non-successful builds only. * <em>Warning:</em> this method mutates the original list and then returns it. */ public RunList<R> failureOnly() { return filter(new Predicate<R>() { public boolean apply(R r) { return r.getResult()!=Result.SUCCESS; } }); } /** * Filter the list to builds above threshold. * <em>Warning:</em> this method mutates the original list and then returns it. * @since 1.517 */ public RunList<R> overThresholdOnly(final Result threshold) { return filter(new Predicate<R>() { public boolean apply(R r) { return (r.getResult() != null && r.getResult().isBetterOrEqualTo(threshold)); } }); } /** * Filter the list to builds on a single node only * <em>Warning:</em> this method mutates the original list and then returns it. */ public RunList<R> node(final Node node) { return filter(new Predicate<R>() { public boolean apply(R r) { return (r instanceof AbstractBuild) && ((AbstractBuild)r).getBuiltOn()==node; } }); } /** * Filter the list to regression builds only. * <em>Warning:</em> this method mutates the original list and then returns it. */ public RunList<R> regressionOnly() { return filter(new Predicate<R>() { public boolean apply(R r) { return r.getBuildStatusSummary().isWorse; } }); } /** * Filter the list by timestamp. * * {@code s&lt=;e}. * <em>Warning:</em> this method mutates the original list and then returns it. */ public RunList<R> byTimestamp(final long start, final long end) { return limit(new CountingPredicate<R>() { public boolean apply(int index,R r) { return r.getTimeInMillis()<end; } }).filter(new Predicate<R>() { public boolean apply(R r) { return start<=r.getTimeInMillis(); } }); } /** * Reduce the size of the list by only leaving relatively new ones. * This also removes on-going builds, as RSS cannot be used to publish information * if it changes. * <em>Warning:</em> this method mutates the original list and then returns it. */ public RunList<R> newBuilds() { GregorianCalendar cal = new GregorianCalendar(); cal.add(Calendar.DAY_OF_YEAR, -7); final long t = cal.getTimeInMillis(); // can't publish on-going builds return filter(new Predicate<R>() { public boolean apply(R r) { return !r.isBuilding(); } }) // put at least 10 builds, but otherwise ignore old builds .limit(new CountingPredicate<R>() { public boolean apply(int index, R r) { return index < 10 || r.getTimeInMillis() >= t; } }); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.table.planner.plan.nodes.exec.stream; import org.apache.flink.annotation.VisibleForTesting; import org.apache.flink.api.common.typeutils.TypeSerializer; import org.apache.flink.api.dag.Transformation; import org.apache.flink.api.java.tuple.Tuple2; import org.apache.flink.cep.EventComparator; import org.apache.flink.cep.nfa.aftermatch.AfterMatchSkipStrategy; import org.apache.flink.cep.nfa.compiler.NFACompiler; import org.apache.flink.cep.operator.CepOperator; import org.apache.flink.cep.pattern.Pattern; import org.apache.flink.cep.pattern.Quantifier; import org.apache.flink.cep.pattern.conditions.BooleanConditions; import org.apache.flink.cep.pattern.conditions.IterativeCondition; import org.apache.flink.streaming.api.transformations.OneInputTransformation; import org.apache.flink.streaming.api.windowing.time.Time; import org.apache.flink.table.api.TableConfig; import org.apache.flink.table.api.TableException; import org.apache.flink.table.data.RowData; import org.apache.flink.table.planner.codegen.CodeGenUtils; import org.apache.flink.table.planner.codegen.CodeGeneratorContext; import org.apache.flink.table.planner.codegen.MatchCodeGenerator; import org.apache.flink.table.planner.codegen.sort.ComparatorCodeGenerator; import org.apache.flink.table.planner.delegation.PlannerBase; import org.apache.flink.table.planner.plan.nodes.exec.ExecEdge; import org.apache.flink.table.planner.plan.nodes.exec.ExecNode; import org.apache.flink.table.planner.plan.nodes.exec.ExecNodeBase; import org.apache.flink.table.planner.plan.nodes.exec.InputProperty; import org.apache.flink.table.planner.plan.nodes.exec.MultipleTransformationTranslator; import org.apache.flink.table.planner.plan.nodes.exec.spec.MatchSpec; import org.apache.flink.table.planner.plan.nodes.exec.spec.SortSpec; import org.apache.flink.table.planner.plan.nodes.exec.utils.ExecNodeUtil; import org.apache.flink.table.planner.plan.utils.KeySelectorUtil; import org.apache.flink.table.planner.plan.utils.RexDefaultVisitor; import org.apache.flink.table.planner.utils.JavaScalaConversionUtil; import org.apache.flink.table.runtime.generated.GeneratedRecordComparator; import org.apache.flink.table.runtime.keyselector.RowDataKeySelector; import org.apache.flink.table.runtime.operators.match.PatternProcessFunctionRunner; import org.apache.flink.table.runtime.operators.match.RowDataEventComparator; import org.apache.flink.table.runtime.operators.sink.StreamRecordTimestampInserter; import org.apache.flink.table.runtime.typeutils.InternalTypeInfo; import org.apache.flink.table.runtime.typeutils.TypeCheckUtils; import org.apache.flink.table.types.logical.LogicalType; import org.apache.flink.table.types.logical.RowType; import org.apache.flink.util.MathUtils; import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.annotation.JsonCreator; import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.annotation.JsonIgnoreProperties; import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.annotation.JsonProperty; import org.apache.calcite.rex.RexCall; import org.apache.calcite.rex.RexLiteral; import org.apache.calcite.rex.RexNode; import org.apache.calcite.sql.SqlMatchRecognize; import org.apache.calcite.sql.SqlOperator; import org.apache.calcite.sql.fun.SqlStdOperatorTable; import org.apache.calcite.sql.type.SqlTypeFamily; import org.apache.calcite.tools.RelBuilder; import java.util.ArrayList; import java.util.Collections; import java.util.LinkedHashSet; import java.util.List; import java.util.Optional; import static org.apache.flink.table.types.logical.utils.LogicalTypeChecks.getPrecision; import static org.apache.flink.util.Preconditions.checkArgument; import static org.apache.flink.util.Preconditions.checkNotNull; /** Stream {@link ExecNode} which matches along with MATCH_RECOGNIZE. */ @JsonIgnoreProperties(ignoreUnknown = true) public class StreamExecMatch extends ExecNodeBase<RowData> implements StreamExecNode<RowData>, MultipleTransformationTranslator<RowData> { public static final String FIELD_NAME_MATCH_SPEC = "matchSpec"; @JsonProperty(FIELD_NAME_MATCH_SPEC) private final MatchSpec matchSpec; public StreamExecMatch( MatchSpec matchSpec, InputProperty inputProperty, RowType outputType, String description) { this( matchSpec, getNewNodeId(), Collections.singletonList(inputProperty), outputType, description); } @JsonCreator public StreamExecMatch( @JsonProperty(FIELD_NAME_MATCH_SPEC) MatchSpec matchSpec, @JsonProperty(FIELD_NAME_ID) int id, @JsonProperty(FIELD_NAME_INPUT_PROPERTIES) List<InputProperty> inputProperties, @JsonProperty(FIELD_NAME_OUTPUT_TYPE) RowType outputType, @JsonProperty(FIELD_NAME_DESCRIPTION) String description) { super(id, inputProperties, outputType, description); checkArgument(inputProperties.size() == 1); this.matchSpec = checkNotNull(matchSpec); } @SuppressWarnings("unchecked") @Override protected Transformation<RowData> translateToPlanInternal(PlannerBase planner) { final ExecEdge inputEdge = getInputEdges().get(0); final Transformation<RowData> inputTransform = (Transformation<RowData>) inputEdge.translateToPlan(planner); final RowType inputRowType = (RowType) inputEdge.getOutputType(); checkOrderKeys(inputRowType); final TableConfig config = planner.getTableConfig(); final EventComparator<RowData> eventComparator = createEventComparator(config, inputRowType); final Transformation<RowData> timestampedInputTransform = translateOrder(inputTransform, inputRowType); final Tuple2<Pattern<RowData, RowData>, List<String>> cepPatternAndNames = translatePattern(matchSpec, config, planner.getRelBuilder(), inputRowType); final Pattern<RowData, RowData> cepPattern = cepPatternAndNames.f0; // TODO remove this once it is supported in CEP library if (NFACompiler.canProduceEmptyMatches(cepPattern)) { throw new TableException( "Patterns that can produce empty matches are not supported. There must be at least one non-optional state."); } // TODO remove this once it is supported in CEP library if (cepPattern.getQuantifier().hasProperty(Quantifier.QuantifierProperty.GREEDY)) { throw new TableException( "Greedy quantifiers are not allowed as the last element of a Pattern yet. " + "Finish your pattern with either a simple variable or reluctant quantifier."); } if (matchSpec.isAllRows()) { throw new TableException("All rows per match mode is not supported yet."); } final int[] partitionKeys = matchSpec.getPartition().getFieldIndices(); final SortSpec.SortFieldSpec timeOrderField = matchSpec.getOrderKeys().getFieldSpec(0); final LogicalType timeOrderFieldType = inputRowType.getTypeAt(timeOrderField.getFieldIndex()); final boolean isProctime = TypeCheckUtils.isProcTime(timeOrderFieldType); final InternalTypeInfo<RowData> inputTypeInfo = (InternalTypeInfo<RowData>) inputTransform.getOutputType(); final TypeSerializer<RowData> inputSerializer = inputTypeInfo.createSerializer(planner.getExecEnv().getConfig()); final NFACompiler.NFAFactory<RowData> nfaFactory = NFACompiler.compileFactory(cepPattern, false); final MatchCodeGenerator generator = new MatchCodeGenerator( new CodeGeneratorContext(config), planner.getRelBuilder(), false, // nullableInput JavaScalaConversionUtil.toScala(cepPatternAndNames.f1), JavaScalaConversionUtil.toScala(Optional.empty()), CodeGenUtils.DEFAULT_COLLECTOR_TERM()); generator.bindInput( inputRowType, CodeGenUtils.DEFAULT_INPUT1_TERM(), JavaScalaConversionUtil.toScala(Optional.empty())); final PatternProcessFunctionRunner patternProcessFunction = generator.generateOneRowPerMatchExpression( (RowType) getOutputType(), partitionKeys, matchSpec.getMeasures()); final CepOperator<RowData, RowData, RowData> operator = new CepOperator<>( inputSerializer, isProctime, nfaFactory, eventComparator, cepPattern.getAfterMatchSkipStrategy(), patternProcessFunction, null); final OneInputTransformation<RowData, RowData> transform = ExecNodeUtil.createOneInputTransformation( timestampedInputTransform, getOperatorName(config), getOperatorDescription(config), operator, InternalTypeInfo.of(getOutputType()), timestampedInputTransform.getParallelism()); final RowDataKeySelector selector = KeySelectorUtil.getRowDataSelector(partitionKeys, inputTypeInfo); transform.setStateKeySelector(selector); transform.setStateKeyType(selector.getProducedType()); if (inputsContainSingleton()) { transform.setParallelism(1); transform.setMaxParallelism(1); } return transform; } private void checkOrderKeys(RowType inputRowType) { SortSpec orderKeys = matchSpec.getOrderKeys(); if (orderKeys.getFieldSize() == 0) { throw new TableException("You must specify either rowtime or proctime for order by."); } SortSpec.SortFieldSpec timeOrderField = orderKeys.getFieldSpec(0); int timeOrderFieldIdx = timeOrderField.getFieldIndex(); LogicalType timeOrderFieldType = inputRowType.getTypeAt(timeOrderFieldIdx); // need to identify time between others order fields. Time needs to be first sort element if (!TypeCheckUtils.isRowTime(timeOrderFieldType) && !TypeCheckUtils.isProcTime(timeOrderFieldType)) { throw new TableException( "You must specify either rowtime or proctime for order by as the first one."); } // time ordering needs to be ascending if (!orderKeys.getAscendingOrders()[0]) { throw new TableException( "Primary sort order of a streaming table must be ascending on time."); } } private EventComparator<RowData> createEventComparator( TableConfig config, RowType inputRowType) { SortSpec orderKeys = matchSpec.getOrderKeys(); if (orderKeys.getFieldIndices().length > 1) { GeneratedRecordComparator rowComparator = ComparatorCodeGenerator.gen( config, "RowDataComparator", inputRowType, orderKeys); return new RowDataEventComparator(rowComparator); } else { return null; } } private Transformation<RowData> translateOrder( Transformation<RowData> inputTransform, RowType inputRowType) { SortSpec.SortFieldSpec timeOrderField = matchSpec.getOrderKeys().getFieldSpec(0); int timeOrderFieldIdx = timeOrderField.getFieldIndex(); LogicalType timeOrderFieldType = inputRowType.getTypeAt(timeOrderFieldIdx); if (TypeCheckUtils.isRowTime(timeOrderFieldType)) { // copy the rowtime field into the StreamRecord timestamp field int precision = getPrecision(timeOrderFieldType); Transformation<RowData> transform = ExecNodeUtil.createOneInputTransformation( inputTransform, "StreamRecordTimestampInserter", String.format( "StreamRecordTimestampInserter(rowtime field: %s)", timeOrderFieldIdx), new StreamRecordTimestampInserter(timeOrderFieldIdx, precision), inputTransform.getOutputType(), inputTransform.getParallelism()); if (inputsContainSingleton()) { transform.setParallelism(1); transform.setMaxParallelism(1); } return transform; } else { return inputTransform; } } @VisibleForTesting public static Tuple2<Pattern<RowData, RowData>, List<String>> translatePattern( MatchSpec matchSpec, TableConfig config, RelBuilder relBuilder, RowType inputRowType) { final PatternVisitor patternVisitor = new PatternVisitor(config, relBuilder, inputRowType, matchSpec); final Pattern<RowData, RowData> cepPattern; if (matchSpec.getInterval().isPresent()) { Time interval = translateTimeBound(matchSpec.getInterval().get()); cepPattern = matchSpec.getPattern().accept(patternVisitor).within(interval); } else { cepPattern = matchSpec.getPattern().accept(patternVisitor); } return new Tuple2<>(cepPattern, new ArrayList<>(patternVisitor.names)); } private static Time translateTimeBound(RexNode interval) { if (interval instanceof RexLiteral) { final RexLiteral l = (RexLiteral) interval; if (l.getTypeName().getFamily() == SqlTypeFamily.INTERVAL_DAY_TIME) { return Time.milliseconds(l.getValueAs(Long.class)); } } throw new TableException( "Only constant intervals with millisecond resolution are supported as time constraints of patterns."); } /** The visitor to traverse the pattern RexNode. */ private static class PatternVisitor extends RexDefaultVisitor<Pattern<RowData, RowData>> { private final TableConfig config; private final RelBuilder relBuilder; private final RowType inputRowType; private final MatchSpec matchSpec; private final LinkedHashSet<String> names; private Pattern<RowData, RowData> pattern; public PatternVisitor( TableConfig config, RelBuilder relBuilder, RowType inputRowType, MatchSpec matchSpec) { this.config = config; this.relBuilder = relBuilder; this.inputRowType = inputRowType; this.matchSpec = matchSpec; this.names = new LinkedHashSet<>(); } @Override public Pattern<RowData, RowData> visitLiteral(RexLiteral literal) { String patternName = literal.getValueAs(String.class); pattern = translateSingleVariable(pattern, patternName); RexNode patternDefinition = matchSpec.getPatternDefinitions().get(patternName); if (patternDefinition != null) { MatchCodeGenerator generator = new MatchCodeGenerator( new CodeGeneratorContext(config), relBuilder, false, // nullableInput JavaScalaConversionUtil.toScala(new ArrayList<>(names)), JavaScalaConversionUtil.toScala(Optional.of(patternName)), CodeGenUtils.DEFAULT_COLLECTOR_TERM()); generator.bindInput( inputRowType, CodeGenUtils.DEFAULT_INPUT1_TERM(), JavaScalaConversionUtil.toScala(Optional.empty())); IterativeCondition<RowData> condition = generator.generateIterativeCondition(patternDefinition); return pattern.where(condition); } else { return pattern.where(BooleanConditions.trueFunction()); } } @Override public Pattern<RowData, RowData> visitCall(RexCall call) { SqlOperator operator = call.getOperator(); if (operator == SqlStdOperatorTable.PATTERN_CONCAT) { pattern = call.operands.get(0).accept(this); pattern = call.operands.get(1).accept(this); return pattern; } else if (operator == SqlStdOperatorTable.PATTERN_QUANTIFIER) { final RexLiteral name; if (call.operands.get(0) instanceof RexLiteral) { name = (RexLiteral) call.operands.get(0); } else { throw new TableException( String.format( "Expression not supported: %s Group patterns are not supported yet.", call.operands.get(0))); } pattern = name.accept(this); int startNum = MathUtils.checkedDownCast( ((RexLiteral) call.operands.get(1)).getValueAs(Long.class)); int endNum = MathUtils.checkedDownCast( ((RexLiteral) call.operands.get(2)).getValueAs(Long.class)); boolean isGreedy = !((RexLiteral) call.operands.get(3)).getValueAs(Boolean.class); return applyQuantifier(pattern, startNum, endNum, isGreedy); } else if (operator == SqlStdOperatorTable.PATTERN_ALTER) { throw new TableException( String.format( "Expression not supported: %s. Currently, CEP doesn't support branching patterns.", call)); } else if (operator == SqlStdOperatorTable.PATTERN_PERMUTE) { throw new TableException( String.format( "Expression not supported: %s. Currently, CEP doesn't support PERMUTE patterns.", call)); } else if (operator == SqlStdOperatorTable.PATTERN_EXCLUDE) { throw new TableException( String.format( "Expression not supported: %s. Currently, CEP doesn't support '{-' '-}' patterns.", call)); } else { throw new TableException("This should not happen."); } } @Override public Pattern<RowData, RowData> visitNode(RexNode rexNode) { throw new TableException( String.format("Unsupported expression within Pattern: [%s]", rexNode)); } private Pattern<RowData, RowData> translateSingleVariable( Pattern<RowData, RowData> previousPattern, String patternName) { if (names.contains(patternName)) { throw new TableException( "Pattern variables must be unique. That might change in the future."); } else { names.add(patternName); } if (previousPattern != null) { return previousPattern.next(patternName); } else { return Pattern.begin(patternName, translateSkipStrategy()); } } private AfterMatchSkipStrategy translateSkipStrategy() { switch (matchSpec.getAfter().getKind()) { case LITERAL: SqlMatchRecognize.AfterOption afterOption = ((RexLiteral) matchSpec.getAfter()) .getValueAs(SqlMatchRecognize.AfterOption.class); switch (afterOption) { case SKIP_PAST_LAST_ROW: return AfterMatchSkipStrategy.skipPastLastEvent(); case SKIP_TO_NEXT_ROW: return AfterMatchSkipStrategy.skipToNext(); default: throw new TableException("This should not happen."); } case SKIP_TO_FIRST: return AfterMatchSkipStrategy.skipToFirst(getPatternTarget()) .throwExceptionOnMiss(); case SKIP_TO_LAST: return AfterMatchSkipStrategy.skipToLast(getPatternTarget()) .throwExceptionOnMiss(); default: throw new TableException( String.format( "Corrupted query tree. Unexpected %s for after match strategy.", matchSpec.getAfter())); } } private String getPatternTarget() { return ((RexLiteral) ((RexCall) matchSpec.getAfter()).getOperands().get(0)) .getValueAs(String.class); } private Pattern<RowData, RowData> applyQuantifier( Pattern<RowData, RowData> pattern, int startNum, int endNum, boolean greedy) { boolean isOptional = startNum == 0 && endNum == 1; final Pattern<RowData, RowData> newPattern; if (startNum == 0 && endNum == -1) { // zero or more newPattern = pattern.oneOrMore().optional().consecutive(); } else if (startNum == 1 && endNum == -1) { // one or more newPattern = pattern.oneOrMore().consecutive(); } else if (isOptional) { // optional newPattern = pattern.optional(); } else if (endNum != -1) { // times newPattern = pattern.times(startNum, endNum).consecutive(); } else { // times or more newPattern = pattern.timesOrMore(startNum).consecutive(); } if (greedy && (isOptional || startNum == endNum)) { return newPattern; } else if (greedy) { return newPattern.greedy(); } else if (isOptional) { throw new TableException("Reluctant optional variables are not supported yet."); } else { return newPattern; } } } }
// Copyright 2015 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.rules.java; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.devtools.build.lib.actions.Artifact; import com.google.devtools.build.lib.analysis.TransitiveInfoProvider; import com.google.devtools.build.lib.collect.nestedset.NestedSet; import com.google.devtools.build.lib.collect.nestedset.NestedSetBuilder; import com.google.devtools.build.lib.collect.nestedset.Order; import com.google.devtools.build.lib.concurrent.ThreadSafety.Immutable; import com.google.devtools.build.lib.vfs.PathFragment; import java.util.Collection; import java.util.Map; /** * A Provider describing the java sources directly belonging to a java rule. */ @Immutable public final class JavaSourceInfoProvider implements TransitiveInfoProvider { private final Collection<Artifact> sourceFiles; private final Collection<Artifact> sourceJars; private final Collection<Artifact> jarFiles; private final Collection<Artifact> sourceJarsForJarFiles; private final Map<PathFragment, Artifact> resources; private final Collection<String> processorNames; private final NestedSet<Artifact> processorPath; private JavaSourceInfoProvider( Collection<Artifact> sourceFiles, Collection<Artifact> sourceJars, Collection<Artifact> jarFiles, Collection<Artifact> sourceJarsForJarFiles, Map<PathFragment, Artifact> resources, Collection<String> processorNames, NestedSet<Artifact> processorPath) { this.sourceFiles = sourceFiles; this.sourceJars = sourceJars; this.jarFiles = jarFiles; this.sourceJarsForJarFiles = sourceJarsForJarFiles; this.resources = resources; this.processorNames = processorNames; this.processorPath = processorPath; } /** Gets the original Java source files provided as inputs to this rule. */ public Collection<Artifact> getSourceFiles() { return sourceFiles; } /** * Gets the original source jars provided as inputs to this rule. * * <p>These should contain Java source files, but can contain other files as well. */ public Collection<Artifact> getSourceJars() { return sourceJars; } /** * Gets the original pre-built jars provided as inputs to this rule. * * <p>These should be used where .class files are needed or wanted in place of recompiling the * sources from {@link #getSourceJarsForJarFiles()}, as this is the source of truth used by the * normal Java machinery. */ public Collection<Artifact> getJarFiles() { return jarFiles; } /** * Gets the source jars containing the sources of the jars contained in {@link #getJarFiles}. * * <p>These should be used in place of {@link #getJarFiles()} if and only if source is required. */ public Collection<Artifact> getSourceJarsForJarFiles() { return sourceJarsForJarFiles; } /** * Gets the Java resources which were included in this rule's output. * * <p>Each key in the map (a path within the jar) should correspond to the artifact which belongs * at that path. The path fragment should be some suffix of the artifact's exec path. */ public Map<PathFragment, Artifact> getResources() { return resources; } /** Gets the names of the annotation processors which operate on this rule's sources. */ public Collection<String> getProcessorNames() { return processorNames; } /** Gets the classpath for the annotation processors which operate on this rule's sources. */ public NestedSet<Artifact> getProcessorPath() { return processorPath; } /** * Constructs a JavaSourceInfoProvider using the sources in the given JavaTargetAttributes. * * @param attributes the object from which to draw the sources * @param semantics semantics used to find the path for a resource within the jar */ public static JavaSourceInfoProvider fromJavaTargetAttributes( JavaTargetAttributes attributes, JavaSemantics semantics) { return new Builder() .setSourceFiles(attributes.getSourceFiles()) .setSourceJars(attributes.getSourceJars()) .setResources(attributes.getResources()) .setProcessorNames(attributes.getProcessorNames()) .setProcessorPath(attributes.getProcessorPath()) .build(); } /** Builder class for constructing JavaSourceInfoProviders. */ public static final class Builder { private Collection<Artifact> sourceFiles = ImmutableList.<Artifact>of(); private Collection<Artifact> sourceJars = ImmutableList.<Artifact>of(); private Collection<Artifact> jarFiles = ImmutableList.<Artifact>of(); private Collection<Artifact> sourceJarsForJarFiles = ImmutableList.<Artifact>of(); private Map<PathFragment, Artifact> resources = ImmutableMap.<PathFragment, Artifact>of(); private Collection<String> processorNames = ImmutableList.<String>of(); private NestedSet<Artifact> processorPath = NestedSetBuilder.emptySet(Order.NAIVE_LINK_ORDER); /** Sets the source files included as part of the sources of this rule. */ public Builder setSourceFiles(Collection<Artifact> sourceFiles) { this.sourceFiles = Preconditions.checkNotNull(sourceFiles); return this; } /** Sets the source jars included as part of the sources of this rule. */ public Builder setSourceJars(Collection<Artifact> sourceJars) { this.sourceJars = Preconditions.checkNotNull(sourceJars); return this; } /** * Sets the pre-built jar files included as part of the sources of this rule. */ public Builder setJarFiles(Collection<Artifact> jarFiles) { this.jarFiles = Preconditions.checkNotNull(jarFiles); return this; } /** * Sets the source jars corresponding to the jar files included in this rule. * * <p>Used by, e.g., the srcjars attribute of {@link JavaImport}. */ public Builder setSourceJarsForJarFiles(Collection<Artifact> sourceJarsForJarFiles) { this.sourceJarsForJarFiles = Preconditions.checkNotNull(sourceJarsForJarFiles); return this; } /** * Sets the resources included in this rule. * * <p>Each key in the map (a path within the jar) should correspond to the artifact which * belongs at that path. The path fragment should be some tail of the artifact's exec path. */ public Builder setResources(Map<PathFragment, Artifact> resources) { this.resources = Preconditions.checkNotNull(resources); return this; } /** Sets the names of the annotation processors used by this rule. */ public Builder setProcessorNames(Collection<String> processorNames) { this.processorNames = Preconditions.checkNotNull(processorNames); return this; } /** Sets the classpath used by this rule for annotation processing. */ public Builder setProcessorPath(NestedSet<Artifact> processorPath) { Preconditions.checkNotNull(processorPath); this.processorPath = processorPath; return this; } /** Constructs the JavaSourceInfoProvider from the provided Java sources. */ public JavaSourceInfoProvider build() { return new JavaSourceInfoProvider( sourceFiles, sourceJars, jarFiles, sourceJarsForJarFiles, resources, processorNames, processorPath); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.table.data; import org.apache.flink.annotation.PublicEvolving; import org.apache.flink.core.memory.MemorySegment; import org.apache.flink.table.types.logical.DistinctType; import org.apache.flink.table.types.logical.LogicalType; import org.apache.flink.table.types.logical.RowType; import org.apache.flink.table.types.logical.StructuredType; import org.apache.flink.types.RowKind; import javax.annotation.Nullable; import java.io.Serializable; import static org.apache.flink.table.types.logical.utils.LogicalTypeChecks.getFieldCount; import static org.apache.flink.table.types.logical.utils.LogicalTypeChecks.getPrecision; import static org.apache.flink.table.types.logical.utils.LogicalTypeChecks.getScale; /** * Base interface for an internal data structure representing data of {@link RowType} and other * (possibly nested) structured types such as {@link StructuredType} in the table ecosystem. * * <p>All top-level records that are travelling through Table API or SQL pipelines during runtime * are instances of this interface. Each {@link RowData} contains a {@link RowKind} which represents * the kind of change that a row describes in a changelog. The {@link RowKind} is just metadata * information of row and thus not part of the table's schema, i.e., not a dedicated field. * * <p>Note: All fields of this data structure must be internal data structures. * * <p>The {@link RowData} interface has different implementations which are designed for different * scenarios: * * <ul> * <li>The binary-oriented implementation {@code BinaryRowData} is backed by references to {@link * MemorySegment} instead of using Java objects to reduce the serialization/deserialization * overhead. * <li>The object-oriented implementation {@link GenericRowData} is backed by an array of Java * {@link Object} which is easy to construct and efficient to update. * </ul> * * <p>{@link GenericRowData} is intended for public use and has stable behavior. It is recommended * to construct instances of {@link RowData} with this class if internal data structures are * required. * * <p>The mappings from Flink's Table API and SQL data types to the internal data structures are * listed in the following table: * * <pre> * +--------------------------------+-----------------------------------------+ * | SQL Data Types | Internal Data Structures | * +--------------------------------+-----------------------------------------+ * | BOOLEAN | boolean | * +--------------------------------+-----------------------------------------+ * | CHAR / VARCHAR / STRING | {@link StringData} | * +--------------------------------+-----------------------------------------+ * | BINARY / VARBINARY / BYTES | byte[] | * +--------------------------------+-----------------------------------------+ * | DECIMAL | {@link DecimalData} | * +--------------------------------+-----------------------------------------+ * | TINYINT | byte | * +--------------------------------+-----------------------------------------+ * | SMALLINT | short | * +--------------------------------+-----------------------------------------+ * | INT | int | * +--------------------------------+-----------------------------------------+ * | BIGINT | long | * +--------------------------------+-----------------------------------------+ * | FLOAT | float | * +--------------------------------+-----------------------------------------+ * | DOUBLE | double | * +--------------------------------+-----------------------------------------+ * | DATE | int (number of days since epoch) | * +--------------------------------+-----------------------------------------+ * | TIME | int (number of milliseconds of the day) | * +--------------------------------+-----------------------------------------+ * | TIMESTAMP | {@link TimestampData} | * +--------------------------------+-----------------------------------------+ * | TIMESTAMP WITH LOCAL TIME ZONE | {@link TimestampData} | * +--------------------------------+-----------------------------------------+ * | INTERVAL YEAR TO MONTH | int (number of months) | * +--------------------------------+-----------------------------------------+ * | INTERVAL DAY TO MONTH | long (number of milliseconds) | * +--------------------------------+-----------------------------------------+ * | ROW / structured types | {@link RowData} | * +--------------------------------+-----------------------------------------+ * | ARRAY | {@link ArrayData} | * +--------------------------------+-----------------------------------------+ * | MAP / MULTISET | {@link MapData} | * +--------------------------------+-----------------------------------------+ * | RAW | {@link RawValueData} | * +--------------------------------+-----------------------------------------+ * </pre> * * <p>Nullability is always handled by the container data structure. */ @PublicEvolving public interface RowData { /** * Returns the number of fields in this row. * * <p>The number does not include {@link RowKind}. It is kept separately. */ int getArity(); /** * Returns the kind of change that this row describes in a changelog. * * @see RowKind */ RowKind getRowKind(); /** * Sets the kind of change that this row describes in a changelog. * * @see RowKind */ void setRowKind(RowKind kind); // ------------------------------------------------------------------------------------------ // Read-only accessor methods // ------------------------------------------------------------------------------------------ /** Returns true if the field is null at the given position. */ boolean isNullAt(int pos); /** Returns the boolean value at the given position. */ boolean getBoolean(int pos); /** Returns the byte value at the given position. */ byte getByte(int pos); /** Returns the short value at the given position. */ short getShort(int pos); /** Returns the integer value at the given position. */ int getInt(int pos); /** Returns the long value at the given position. */ long getLong(int pos); /** Returns the float value at the given position. */ float getFloat(int pos); /** Returns the double value at the given position. */ double getDouble(int pos); /** Returns the string value at the given position. */ StringData getString(int pos); /** * Returns the decimal value at the given position. * * <p>The precision and scale are required to determine whether the decimal value was stored in * a compact representation (see {@link DecimalData}). */ DecimalData getDecimal(int pos, int precision, int scale); /** * Returns the timestamp value at the given position. * * <p>The precision is required to determine whether the timestamp value was stored in a compact * representation (see {@link TimestampData}). */ TimestampData getTimestamp(int pos, int precision); /** Returns the raw value at the given position. */ <T> RawValueData<T> getRawValue(int pos); /** Returns the binary value at the given position. */ byte[] getBinary(int pos); /** Returns the array value at the given position. */ ArrayData getArray(int pos); /** Returns the map value at the given position. */ MapData getMap(int pos); /** * Returns the row value at the given position. * * <p>The number of fields is required to correctly extract the row. */ RowData getRow(int pos, int numFields); // ------------------------------------------------------------------------------------------ // Access Utilities // ------------------------------------------------------------------------------------------ /** * Creates an accessor for getting elements in an internal row data structure at the given * position. * * @param fieldType the element type of the row * @param fieldPos the element type of the row */ static FieldGetter createFieldGetter(LogicalType fieldType, int fieldPos) { final FieldGetter fieldGetter; // ordered by type root definition switch (fieldType.getTypeRoot()) { case CHAR: case VARCHAR: fieldGetter = row -> row.getString(fieldPos); break; case BOOLEAN: fieldGetter = row -> row.getBoolean(fieldPos); break; case BINARY: case VARBINARY: fieldGetter = row -> row.getBinary(fieldPos); break; case DECIMAL: final int decimalPrecision = getPrecision(fieldType); final int decimalScale = getScale(fieldType); fieldGetter = row -> row.getDecimal(fieldPos, decimalPrecision, decimalScale); break; case TINYINT: fieldGetter = row -> row.getByte(fieldPos); break; case SMALLINT: fieldGetter = row -> row.getShort(fieldPos); break; case INTEGER: case DATE: case TIME_WITHOUT_TIME_ZONE: case INTERVAL_YEAR_MONTH: fieldGetter = row -> row.getInt(fieldPos); break; case BIGINT: case INTERVAL_DAY_TIME: fieldGetter = row -> row.getLong(fieldPos); break; case FLOAT: fieldGetter = row -> row.getFloat(fieldPos); break; case DOUBLE: fieldGetter = row -> row.getDouble(fieldPos); break; case TIMESTAMP_WITHOUT_TIME_ZONE: case TIMESTAMP_WITH_LOCAL_TIME_ZONE: final int timestampPrecision = getPrecision(fieldType); fieldGetter = row -> row.getTimestamp(fieldPos, timestampPrecision); break; case TIMESTAMP_WITH_TIME_ZONE: throw new UnsupportedOperationException(); case ARRAY: fieldGetter = row -> row.getArray(fieldPos); break; case MULTISET: case MAP: fieldGetter = row -> row.getMap(fieldPos); break; case ROW: case STRUCTURED_TYPE: final int rowFieldCount = getFieldCount(fieldType); fieldGetter = row -> row.getRow(fieldPos, rowFieldCount); break; case DISTINCT_TYPE: fieldGetter = createFieldGetter(((DistinctType) fieldType).getSourceType(), fieldPos); break; case RAW: fieldGetter = row -> row.getRawValue(fieldPos); break; case NULL: case SYMBOL: case UNRESOLVED: default: throw new IllegalArgumentException(); } if (!fieldType.isNullable()) { return fieldGetter; } return row -> { if (row.isNullAt(fieldPos)) { return null; } return fieldGetter.getFieldOrNull(row); }; } /** * Accessor for getting the field of a row during runtime. * * @see #createFieldGetter(LogicalType, int) */ interface FieldGetter extends Serializable { @Nullable Object getFieldOrNull(RowData row); } }
/* Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.activiti.engine.test.bpmn.mail; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import javax.activation.DataHandler; import javax.mail.MessagingException; import javax.mail.internet.MimeMessage; import javax.mail.internet.MimeMultipart; import org.activiti.engine.impl.util.CollectionUtil; import org.flowable.engine.common.api.FlowableException; import org.flowable.engine.impl.history.HistoryLevel; import org.flowable.engine.test.Deployment; import org.subethamail.wiser.WiserMessage; /** * @author Joram Barrez * @author Falko Menge */ public class EmailSendTaskTest extends EmailTestCase { @Deployment public void testSimpleTextMail() throws Exception { runtimeService.startProcessInstanceByKey("simpleTextOnly"); List<WiserMessage> messages = wiser.getMessages(); assertEquals(1, messages.size()); WiserMessage message = messages.get(0); assertEmailSend(message, false, "Hello Kermit!", "This a text only e-mail.", "flowable@localhost", Collections.singletonList("kermit@activiti.org"), null); } @Deployment public void testSimpleTextMailMultipleRecipients() { runtimeService.startProcessInstanceByKey("simpleTextOnlyMultipleRecipients"); // 3 recipients == 3 emails in wiser with different receivers List<WiserMessage> messages = wiser.getMessages(); assertEquals(3, messages.size()); // sort recipients for easy assertion List<String> recipients = new ArrayList<String>(); for (WiserMessage message : messages) { recipients.add(message.getEnvelopeReceiver()); } Collections.sort(recipients); assertEquals("fozzie@activiti.org", recipients.get(0)); assertEquals("kermit@activiti.org", recipients.get(1)); assertEquals("mispiggy@activiti.org", recipients.get(2)); } @Deployment public void testTextMailExpressions() throws Exception { String sender = "mispiggy@activiti.org"; String recipient = "fozziebear@activiti.org"; String recipientName = "Mr. Fozzie"; String subject = "Fozzie, you should see this!"; Map<String, Object> vars = new HashMap<String, Object>(); vars.put("sender", sender); vars.put("recipient", recipient); vars.put("recipientName", recipientName); vars.put("subject", subject); runtimeService.startProcessInstanceByKey("textMailExpressions", vars); List<WiserMessage> messages = wiser.getMessages(); assertEquals(1, messages.size()); WiserMessage message = messages.get(0); assertEmailSend(message, false, subject, "Hello " + recipientName + ", this is an e-mail", sender, Collections.singletonList(recipient), null); } @Deployment public void testCcAndBcc() throws Exception { runtimeService.startProcessInstanceByKey("ccAndBcc"); List<WiserMessage> messages = wiser.getMessages(); assertEmailSend(messages.get(0), false, "Hello world", "This is the content", "flowable@localhost", Collections.singletonList("kermit@activiti.org"), Collections.singletonList("fozzie@activiti.org")); // Bcc is not stored in the header (obviously) // so the only way to verify the bcc, is that there are three messages send. assertEquals(3, messages.size()); } @Deployment public void testHtmlMail() throws Exception { runtimeService.startProcessInstanceByKey("htmlMail", CollectionUtil.singletonMap("gender", "male")); List<WiserMessage> messages = wiser.getMessages(); assertEquals(1, messages.size()); assertEmailSend(messages.get(0), true, "Test", "Mr. <b>Kermit</b>", "flowable@localhost", Collections.singletonList("kermit@activiti.org"), null); } @Deployment public void testTextMailWithFileAttachment() throws Exception { HashMap<String, Object> vars = new HashMap<String, Object>(); vars.put("attachmentsBean", new AttachmentsBean()); runtimeService.startProcessInstanceByKey("textMailWithFileAttachment", vars); List<WiserMessage> messages = wiser.getMessages(); assertEquals(1, messages.size()); WiserMessage message = messages.get(0); MimeMultipart mm = (MimeMultipart) message.getMimeMessage().getContent(); assertEquals(2, mm.getCount()); String attachmentFileName = mm.getBodyPart(1).getDataHandler().getName(); assertEquals(new AttachmentsBean().getFile().getName(), attachmentFileName); } @Deployment public void testTextMailWithFileAttachments() throws Exception { HashMap<String, Object> vars = new HashMap<String, Object>(); vars.put("attachmentsBean", new AttachmentsBean()); runtimeService.startProcessInstanceByKey("textMailWithFileAttachments", vars); List<WiserMessage> messages = wiser.getMessages(); assertEquals(1, messages.size()); WiserMessage message = messages.get(0); MimeMultipart mm = (MimeMultipart) message.getMimeMessage().getContent(); File[] files = new AttachmentsBean().getFiles(); assertEquals(1 + files.length, mm.getCount()); for (int i = 0; i < files.length; i++) { String attachmentFileName = mm.getBodyPart(1 + i).getDataHandler().getName(); assertEquals(files[i].getName(), attachmentFileName); } } @Deployment public void testTextMailWithFileAttachmentsByPath() throws Exception { HashMap<String, Object> vars = new HashMap<String, Object>(); vars.put("attachmentsBean", new AttachmentsBean()); runtimeService.startProcessInstanceByKey("textMailWithFileAttachmentsByPath", vars); List<WiserMessage> messages = wiser.getMessages(); assertEquals(1, messages.size()); WiserMessage message = messages.get(0); MimeMultipart mm = (MimeMultipart) message.getMimeMessage().getContent(); File[] files = new AttachmentsBean().getFiles(); assertEquals(1 + files.length, mm.getCount()); for (int i = 0; i < files.length; i++) { String attachmentFileName = mm.getBodyPart(1 + i).getDataHandler().getName(); assertEquals(files[i].getName(), attachmentFileName); } } @Deployment public void testTextMailWithDataSourceAttachment() throws Exception { String fileName = "file-name-to-be-displayed"; String fileContent = "This is the file content"; HashMap<String, Object> vars = new HashMap<String, Object>(); vars.put("attachmentsBean", new AttachmentsBean()); vars.put("fileContent", fileContent); vars.put("fileName", fileName); runtimeService.startProcessInstanceByKey("textMailWithDataSourceAttachment", vars); List<WiserMessage> messages = wiser.getMessages(); assertEquals(1, messages.size()); WiserMessage message = messages.get(0); MimeMultipart mm = (MimeMultipart) message.getMimeMessage().getContent(); assertEquals(2, mm.getCount()); String attachmentFileName = mm.getBodyPart(1).getDataHandler().getName(); assertEquals(fileName, attachmentFileName); } @Deployment public void testTextMailWithNotExistingFileAttachment() throws Exception { HashMap<String, Object> vars = new HashMap<String, Object>(); vars.put("attachmentsBean", new AttachmentsBean()); runtimeService.startProcessInstanceByKey("textMailWithNotExistingFileAttachment", vars); List<WiserMessage> messages = wiser.getMessages(); assertEquals(1, messages.size()); WiserMessage message = messages.get(0); assertFalse(message.getMimeMessage().getContent() instanceof MimeMultipart); } @Deployment public void testHtmlMailWithFileAttachment() throws Exception { HashMap<String, Object> vars = new HashMap<String, Object>(); vars.put("attachmentsBean", new AttachmentsBean()); vars.put("gender", "male"); runtimeService.startProcessInstanceByKey("htmlMailWithFileAttachment", vars); List<WiserMessage> messages = wiser.getMessages(); assertEquals(1, messages.size()); WiserMessage message = messages.get(0); MimeMultipart mm = (MimeMultipart) message.getMimeMessage().getContent(); assertEquals(2, mm.getCount()); String attachmentFileName = mm.getBodyPart(1).getDataHandler().getName(); assertEquals(new AttachmentsBean().getFile().getName(), attachmentFileName); } @Deployment public void testInvalidAddress() throws Exception { try { runtimeService.startProcessInstanceByKey("invalidAddress").getId(); fail("An Invalid email address should not execute"); } catch (FlowableException e) { // fine } catch (Exception e) { fail("Only a FlowableException is expected here but not: " + e); } } @Deployment public void testInvalidAddressWithoutException() throws Exception { String piId = runtimeService.startProcessInstanceByKey("invalidAddressWithoutException").getId(); if (processEngineConfiguration.getHistoryLevel().isAtLeast(HistoryLevel.ACTIVITY)) { assertNotNull(historyService.createHistoricVariableInstanceQuery().processInstanceId(piId).variableName("emailError").singleResult()); } } @Deployment public void testInvalidAddressWithoutExceptionVariableName() throws Exception { String piId = runtimeService.startProcessInstanceByKey("invalidAddressWithoutException").getId(); if (processEngineConfiguration.getHistoryLevel().isAtLeast(HistoryLevel.ACTIVITY)) { assertNull(historyService.createHistoricVariableInstanceQuery().processInstanceId(piId).variableName("emailError").singleResult()); } } // Helper private void assertEmailSend(WiserMessage emailMessage, boolean htmlMail, String subject, String message, String from, List<String> to, List<String> cc) throws IOException { try { MimeMessage mimeMessage = emailMessage.getMimeMessage(); if (htmlMail) { assertTrue(mimeMessage.getContentType().contains("multipart/mixed")); } else { assertTrue(mimeMessage.getContentType().contains("text/plain")); } assertEquals(subject, mimeMessage.getHeader("Subject", null)); assertEquals(from, mimeMessage.getHeader("From", null)); assertTrue(getMessage(mimeMessage).contains(message)); for (String t : to) { assertTrue(mimeMessage.getHeader("To", null).contains(t)); } if (cc != null) { for (String c : cc) { assertTrue(mimeMessage.getHeader("Cc", null).contains(c)); } } } catch (MessagingException e) { fail(e.getMessage()); } } protected String getMessage(MimeMessage mimeMessage) throws MessagingException, IOException { DataHandler dataHandler = mimeMessage.getDataHandler(); ByteArrayOutputStream baos = new ByteArrayOutputStream(); dataHandler.writeTo(baos); baos.flush(); return baos.toString(); } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.jms; import java.util.HashMap; import java.util.Map; import java.util.concurrent.Callable; import java.util.concurrent.CompletionService; import java.util.concurrent.ExecutorCompletionService; import java.util.concurrent.ExecutorService; import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import javax.jms.ConnectionFactory; import org.apache.activemq.camel.component.ActiveMQComponent; import org.apache.camel.CamelContext; import org.apache.camel.Exchange; import org.apache.camel.ExchangeTimedOutException; import org.apache.camel.Message; import org.apache.camel.Processor; import org.apache.camel.RuntimeCamelException; import org.apache.camel.builder.RouteBuilder; import org.apache.camel.test.junit4.CamelTestSupport; import org.junit.Before; import org.junit.Ignore; import org.junit.Test; import static org.apache.camel.component.jms.JmsComponent.jmsComponentAutoAcknowledge; /** * @version */ public class JmsRouteRequestReplyTest extends CamelTestSupport { protected static final String REPLY_TO_DESTINATION_SELECTOR_NAME = "camelProducer"; protected static String componentName = "amq"; protected static String componentName1 = "amq1"; protected static String endpointUriA = componentName + ":queue:test.a"; protected static String endpointUriB = componentName + ":queue:test.b"; protected static String endpointUriB1 = componentName1 + ":queue:test.b"; // note that the replyTo both A and B endpoints share the persistent replyTo queue, // which is one more way to verify that reply listeners of A and B endpoints don't steal each other messages protected static String endpointReplyToUriA = componentName + ":queue:test.a?replyTo=queue:test.a.reply"; protected static String endpointReplyToUriB = componentName + ":queue:test.b?replyTo=queue:test.a.reply"; protected static String request = "Hello World"; protected static String expectedReply = "Re: " + request; protected static int maxTasks = 20; protected static int maxServerTasks = 1; protected static int maxCalls = 5; protected static AtomicBoolean inited = new AtomicBoolean(false); protected static Map<String, ContextBuilder> contextBuilders = new HashMap<String, ContextBuilder>(); protected static Map<String, RouteBuilder> routeBuilders = new HashMap<String, RouteBuilder>(); private interface ContextBuilder { CamelContext buildContext(CamelContext context) throws Exception; } public static class SingleNodeDeadEndRouteBuilder extends RouteBuilder { public void configure() throws Exception { from(endpointUriA).process(new Processor() { public void process(Exchange e) { // do nothing } }); } }; public static class SingleNodeRouteBuilder extends RouteBuilder { public void configure() throws Exception { from(endpointUriA).process(new Processor() { public void process(Exchange e) { String request = e.getIn().getBody(String.class); e.getOut().setBody(expectedReply + request.substring(request.indexOf('-'))); } }); } }; public static class MultiNodeRouteBuilder extends RouteBuilder { public void configure() throws Exception { from(endpointUriA).to(endpointUriB); from(endpointUriB).process(new Processor() { public void process(Exchange e) { String request = e.getIn().getBody(String.class); e.getOut().setBody(expectedReply + request.substring(request.indexOf('-'))); } }); } }; public static class MultiNodeReplyToRouteBuilder extends RouteBuilder { public void configure() throws Exception { from(endpointUriA).to(endpointReplyToUriB); from(endpointUriB).process(new Processor() { public void process(Exchange e) { Message in = e.getIn(); Message out = e.getOut(); String selectorValue = in.getHeader(REPLY_TO_DESTINATION_SELECTOR_NAME, String.class); String request = in.getBody(String.class); out.setHeader(REPLY_TO_DESTINATION_SELECTOR_NAME, selectorValue); out.setBody(expectedReply + request.substring(request.indexOf('-'))); } }); } }; public static class MultiNodeDiffCompRouteBuilder extends RouteBuilder { public void configure() throws Exception { from(endpointUriA).to(endpointUriB1); from(endpointUriB1).process(new Processor() { public void process(Exchange e) { String request = e.getIn().getBody(String.class); e.getOut().setBody(expectedReply + request.substring(request.indexOf('-'))); } }); } }; public static class ContextBuilderMessageID implements ContextBuilder { public CamelContext buildContext(CamelContext context) throws Exception { ConnectionFactory connectionFactory = CamelJmsTestHelper.createConnectionFactory(); JmsComponent jmsComponent = jmsComponentAutoAcknowledge(connectionFactory); jmsComponent.setUseMessageIDAsCorrelationID(true); jmsComponent.setConcurrentConsumers(maxServerTasks); context.addComponent(componentName, jmsComponent); return context; } }; protected static void init() { if (inited.compareAndSet(false, true)) { ContextBuilder contextBuilderMessageID = new ContextBuilderMessageID(); ContextBuilder contextBuilderCorrelationID = new ContextBuilder() { public CamelContext buildContext(CamelContext context) throws Exception { ConnectionFactory connectionFactory = CamelJmsTestHelper.createConnectionFactory(); ActiveMQComponent jmsComponent = ActiveMQComponent.activeMQComponent(); jmsComponent.setConnectionFactory(connectionFactory); jmsComponent.setUseMessageIDAsCorrelationID(false); jmsComponent.setConcurrentConsumers(maxServerTasks); context.addComponent(componentName, jmsComponent); return context; } }; ContextBuilder contextBuilderMessageIDNamedReplyToSelector = new ContextBuilder() { public CamelContext buildContext(CamelContext context) throws Exception { ConnectionFactory connectionFactory = CamelJmsTestHelper.createConnectionFactory(); ActiveMQComponent jmsComponent = ActiveMQComponent.activeMQComponent(); jmsComponent.setConnectionFactory(connectionFactory); jmsComponent.setUseMessageIDAsCorrelationID(true); jmsComponent.setConcurrentConsumers(maxServerTasks); jmsComponent.getConfiguration().setReplyToDestinationSelectorName(REPLY_TO_DESTINATION_SELECTOR_NAME); context.addComponent(componentName, jmsComponent); return context; } }; ContextBuilder contextBuilderCorrelationIDNamedReplyToSelector = new ContextBuilder() { public CamelContext buildContext(CamelContext context) throws Exception { ConnectionFactory connectionFactory = CamelJmsTestHelper.createConnectionFactory(); ActiveMQComponent jmsComponent = ActiveMQComponent.activeMQComponent(); jmsComponent.setConnectionFactory(connectionFactory); jmsComponent.setUseMessageIDAsCorrelationID(false); jmsComponent.setConcurrentConsumers(maxServerTasks); jmsComponent.getConfiguration().setReplyToDestinationSelectorName(REPLY_TO_DESTINATION_SELECTOR_NAME); context.addComponent(componentName, jmsComponent); return context; } }; ContextBuilder contextBuilderCorrelationIDDiffComp = new ContextBuilder() { public CamelContext buildContext(CamelContext context) throws Exception { ConnectionFactory connectionFactory = CamelJmsTestHelper.createConnectionFactory(); ActiveMQComponent jmsComponent = ActiveMQComponent.activeMQComponent(); jmsComponent.setConnectionFactory(connectionFactory); jmsComponent.setConcurrentConsumers(maxServerTasks); context.addComponent(componentName, jmsComponent); ActiveMQComponent jmsComponent1 = ActiveMQComponent.activeMQComponent(); jmsComponent1.setConnectionFactory(connectionFactory); jmsComponent1.setUseMessageIDAsCorrelationID(false); jmsComponent1.setConcurrentConsumers(maxServerTasks); context.addComponent(componentName1, jmsComponent1); return context; } }; ContextBuilder contextBuilderMessageIDDiffComp = new ContextBuilder() { public CamelContext buildContext(CamelContext context) throws Exception { ConnectionFactory connectionFactory = CamelJmsTestHelper.createConnectionFactory(); ActiveMQComponent jmsComponent = ActiveMQComponent.activeMQComponent(); jmsComponent.setConnectionFactory(connectionFactory); jmsComponent.setUseMessageIDAsCorrelationID(true); jmsComponent.setConcurrentConsumers(maxServerTasks); context.addComponent(componentName, jmsComponent); ActiveMQComponent jmsComponent1 = ActiveMQComponent.activeMQComponent(); jmsComponent1.setConnectionFactory(connectionFactory); jmsComponent1.setUseMessageIDAsCorrelationID(true); jmsComponent1.setConcurrentConsumers(maxServerTasks); context.addComponent(componentName1, jmsComponent1); return context; } }; contextBuilders.put("testUseMessageIDAsCorrelationID", contextBuilderMessageID); contextBuilders.put("testUseCorrelationID", contextBuilderCorrelationID); contextBuilders.put("testUseMessageIDAsCorrelationIDMultiNode", contextBuilderMessageID); contextBuilders.put("testUseCorrelationIDMultiNode", contextBuilderCorrelationID); contextBuilders.put("testUseMessageIDAsCorrelationIDPersistReplyToMultiNode", contextBuilderMessageID); contextBuilders.put("testUseCorrelationIDPersistReplyToMultiNode", contextBuilderCorrelationID); contextBuilders.put("testUseMessageIDAsCorrelationIDPersistMultiReplyToMultiNode", contextBuilderMessageID); // contextBuilders.put("testUseCorrelationIDPersistMultiReplyToMultiNode", contextBuilderCorrelationID); contextBuilders.put("testUseMessageIDAsCorrelationIDPersistMultiReplyToWithNamedSelectorMultiNode", contextBuilderMessageIDNamedReplyToSelector); contextBuilders.put("testUseCorrelationIDPersistMultiReplyToWithNamedSelectorMultiNode", contextBuilderCorrelationIDNamedReplyToSelector); contextBuilders.put("testUseCorrelationIDMultiNodeDiffComponents", contextBuilderCorrelationIDDiffComp); contextBuilders.put("testUseMessageIDAsCorrelationIDMultiNodeDiffComponents", contextBuilderMessageIDDiffComp); contextBuilders.put("testUseMessageIDAsCorrelationIDTimeout", contextBuilderMessageID); contextBuilders.put("testUseCorrelationIDTimeout", contextBuilderMessageID); routeBuilders.put("testUseMessageIDAsCorrelationID", new SingleNodeRouteBuilder()); routeBuilders.put("testUseMessageIDAsCorrelationIDReplyToTempDestinationPerComponent", new SingleNodeRouteBuilder()); routeBuilders.put("testUseMessageIDAsCorrelationIDReplyToTempDestinationPerProducer", new SingleNodeRouteBuilder()); routeBuilders.put("testUseCorrelationID", new SingleNodeRouteBuilder()); routeBuilders.put("testUseMessageIDAsCorrelationIDMultiNode", new MultiNodeRouteBuilder()); routeBuilders.put("testUseCorrelationIDMultiNode", new MultiNodeRouteBuilder()); routeBuilders.put("testUseMessageIDAsCorrelationIDPersistReplyToMultiNode", new MultiNodeRouteBuilder()); routeBuilders.put("testUseCorrelationIDPersistReplyToMultiNode", new MultiNodeRouteBuilder()); routeBuilders.put("testUseMessageIDAsCorrelationIDPersistMultiReplyToMultiNode", new MultiNodeReplyToRouteBuilder()); // routeBuilders.put("testUseCorrelationIDPersistMultiReplyToMultiNode", new MultiNodeReplyToRouteBuilder()); routeBuilders.put("testUseMessageIDAsCorrelationIDPersistMultiReplyToWithNamedSelectorMultiNode", new MultiNodeReplyToRouteBuilder()); routeBuilders.put("testUseCorrelationIDPersistMultiReplyToWithNamedSelectorMultiNode", new MultiNodeReplyToRouteBuilder()); routeBuilders.put("testUseCorrelationIDMultiNodeDiffComponents", new MultiNodeDiffCompRouteBuilder()); routeBuilders.put("testUseMessageIDAsCorrelationIDMultiNodeDiffComponents", new MultiNodeDiffCompRouteBuilder()); routeBuilders.put("testUseMessageIDAsCorrelationIDTimeout", new SingleNodeDeadEndRouteBuilder()); routeBuilders.put("testUseCorrelationIDTimeout", new SingleNodeDeadEndRouteBuilder()); } } public class Task implements Callable<Task> { private AtomicInteger counter; private String fromUri; private volatile boolean ok = true; private volatile String message = ""; public Task(AtomicInteger counter, String fromUri) { this.counter = counter; this.fromUri = fromUri; } public Task call() throws Exception { for (int i = 0; i < maxCalls; i++) { int callId = counter.incrementAndGet(); Object reply = ""; try { reply = template.requestBody(fromUri, request + "-" + callId); } catch (RuntimeCamelException e) { // expected in some cases } if (!reply.equals(expectedReply + "-" + callId)) { ok = false; message = "Unexpected reply. Expected: '" + expectedReply + "-" + callId + "'; Received: '" + reply + "'"; } } return this; } public void assertSuccess() { assertTrue(message, ok); } } @Before public void setUp() throws Exception { init(); super.setUp(); } @Test public void testUseMessageIDAsCorrelationID() throws Exception { runRequestReplyThreaded(endpointUriA); } @Test public void testUseCorrelationID() throws Exception { runRequestReplyThreaded(endpointUriA); } @Test public void testUseMessageIDAsCorrelationIDMultiNode() throws Exception { runRequestReplyThreaded(endpointUriA); } @Test public void testUseCorrelationIDMultiNode() throws Exception { runRequestReplyThreaded(endpointUriA); } @Test public void testUseMessageIDAsCorrelationIDPersistReplyToMultiNode() throws Exception { runRequestReplyThreaded(endpointReplyToUriA); } @Test public void testUseCorrelationIDPersistReplyToMultiNode() throws Exception { runRequestReplyThreaded(endpointUriA); } // (1) // note this is an inefficient way of correlating replies to a persistent queue // a consumer will have to be created for each reply message // see testUseMessageIDAsCorrelationIDPersistMultiReplyToWithNamedSelectorMultiNode // or testCorrelationIDPersistMultiReplyToWithNamedSelectorMultiNode // for a faster way to do this. Note however that in this case the message copy has to occur // between consumer -> producer as the selector value needs to be propagated to the ultimate // destination, which in turn will copy this value back into the reply message @Test public void testUseMessageIDAsCorrelationIDPersistMultiReplyToMultiNode() throws Exception { int oldMaxTasks = maxTasks; int oldMaxServerTasks = maxServerTasks; int oldMaxCalls = maxCalls; maxTasks = 10; maxServerTasks = 1; maxCalls = 2; try { runRequestReplyThreaded(endpointUriA); } finally { maxTasks = oldMaxTasks; maxServerTasks = oldMaxServerTasks; maxCalls = oldMaxCalls; } } // see (1) @Test @Ignore public void testUseCorrelationIDPersistMultiReplyToMultiNode() throws Exception { int oldMaxTasks = maxTasks; int oldMaxServerTasks = maxServerTasks; int oldMaxCalls = maxCalls; maxTasks = 10; maxServerTasks = 1; maxCalls = 2; try { runRequestReplyThreaded(endpointUriA); } finally { maxTasks = oldMaxTasks; maxServerTasks = oldMaxServerTasks; maxCalls = oldMaxCalls; } } @Test public void testUseMessageIDAsCorrelationIDPersistMultiReplyToWithNamedSelectorMultiNode() throws Exception { runRequestReplyThreaded(endpointUriA); } @Test public void testUseCorrelationIDPersistMultiReplyToWithNamedSelectorMultiNode() throws Exception { runRequestReplyThreaded(endpointUriA); } @Test public void testUseCorrelationIDTimeout() throws Exception { JmsComponent c = (JmsComponent)context.getComponent(componentName); c.getConfiguration().setRequestTimeout(1000); Object reply = ""; try { reply = template.requestBody(endpointUriA, request); fail("Should have thrown exception"); } catch (RuntimeCamelException e) { assertIsInstanceOf(ExchangeTimedOutException.class, e.getCause()); } assertEquals("", reply); } @Test public void testUseMessageIDAsCorrelationIDTimeout() throws Exception { JmsComponent c = (JmsComponent)context.getComponent(componentName); c.getConfiguration().setRequestTimeout(1000); Object reply = ""; try { reply = template.requestBody(endpointUriA, request); fail("Should have thrown exception"); } catch (RuntimeCamelException e) { assertIsInstanceOf(ExchangeTimedOutException.class, e.getCause()); } assertEquals("", reply); } @Test public void testUseCorrelationIDMultiNodeDiffComponents() throws Exception { runRequestReplyThreaded(endpointUriA); } @Test public void testUseMessageIDAsCorrelationIDMultiNodeDiffComponents() throws Exception { runRequestReplyThreaded(endpointUriA); } protected void runRequestReplyThreaded(String fromUri) throws Exception { // start template template.start(); ExecutorService executor = context.getExecutorServiceManager().newFixedThreadPool(this, "Task", maxTasks); CompletionService<Task> completionService = new ExecutorCompletionService<Task>(executor); final AtomicInteger counter = new AtomicInteger(-1); for (int i = 0; i < maxTasks; i++) { Task task = new Task(counter, fromUri); completionService.submit(task); } for (int i = 0; i < maxTasks; i++) { Future<Task> future = completionService.take(); Task task = future.get(60, TimeUnit.SECONDS); assertNotNull("Should complete the task", task); task.assertSuccess(); } context.getExecutorServiceManager().shutdownNow(executor); } protected CamelContext createCamelContext() throws Exception { CamelContext camelContext = super.createCamelContext(); return contextBuilders.get(testName.getMethodName()).buildContext(camelContext); } protected RouteBuilder createRouteBuilder() throws Exception { return routeBuilders.get(testName.getMethodName()); } }
/* * #%L * Fabric8 :: SPI * %% * Copyright (C) 2014 Red Hat * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package io.fabric8.spi.process; import io.fabric8.api.ContainerAttributes; import io.fabric8.api.process.ProcessOptions; import io.fabric8.spi.AgentIdentity; import io.fabric8.spi.AgentRegistration; import io.fabric8.spi.AgentTopology; import io.fabric8.spi.process.ManagedProcess.State; import io.fabric8.spi.utils.HostUtils; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.net.InetAddress; import java.util.concurrent.CountDownLatch; import java.util.concurrent.Future; import java.util.concurrent.atomic.AtomicReference; import java.util.zip.GZIPInputStream; import javax.management.InstanceNotFoundException; import javax.management.MBeanServer; import javax.management.Notification; import javax.management.NotificationListener; import org.apache.commons.compress.archivers.ArchiveEntry; import org.apache.commons.compress.archivers.ArchiveInputStream; import org.apache.commons.compress.archivers.ArchiveStreamFactory; import org.apache.commons.compress.archivers.tar.TarArchiveInputStream; import org.jboss.gravia.repository.DefaultMavenDelegateRepository; import org.jboss.gravia.repository.MavenDelegateRepository; import org.jboss.gravia.resource.MavenCoordinates; import org.jboss.gravia.resource.Resource; import org.jboss.gravia.resource.ResourceContent; import org.jboss.gravia.runtime.LifecycleException; import org.jboss.gravia.runtime.spi.PropertiesProvider; import org.jboss.gravia.utils.IOUtils; import org.jboss.gravia.utils.IllegalArgumentAssertion; import org.jboss.gravia.utils.IllegalStateAssertion; /** * The managed root container * * @author thomas.diesler@jboss.com * @since 26-Feb-2014 */ public abstract class AbstractProcessHandler implements ProcessHandler { private final MavenDelegateRepository mavenRepository; private final MBeanServer mbeanServer; private final AgentRegistration localAgent; private MutableManagedProcess managedProcess; private Process javaProcess; protected AbstractProcessHandler(MBeanServer mbeanServer, AgentRegistration localAgent, PropertiesProvider propsProvider) { IllegalArgumentAssertion.assertNotNull(mbeanServer, "mbeanServer"); IllegalArgumentAssertion.assertNotNull(localAgent, "localAgent"); IllegalArgumentAssertion.assertNotNull(propsProvider, "propsProvider"); this.mavenRepository = new DefaultMavenDelegateRepository(propsProvider); this.mbeanServer = mbeanServer; this.localAgent = localAgent; } protected MBeanServer getMBeanServer() { return mbeanServer; } protected AgentRegistration getAgentRegistration() { return localAgent; } protected Process getJavaProcess() { return javaProcess; } @Override public final ManagedProcess create(ProcessOptions options, ProcessIdentity identity) { File targetDir = options.getTargetPath().toAbsolutePath().toFile(); IllegalStateAssertion.assertTrue(targetDir.isDirectory() || targetDir.mkdirs(), "Cannot create target dir: " + targetDir); File homeDir = null; for (MavenCoordinates artefact : options.getMavenCoordinates()) { Resource resource = mavenRepository.findMavenResource(artefact); IllegalStateAssertion.assertNotNull(resource, "Cannot find maven resource: " + artefact); ResourceContent content = resource.adapt(ResourceContent.class); IllegalStateAssertion.assertNotNull(content, "Cannot obtain resource content for: " + artefact); try { ArchiveInputStream ais; if ("tar.gz".equals(artefact.getType())) { InputStream inputStream = content.getContent(); ais = new TarArchiveInputStream(new GZIPInputStream(inputStream)); } else { InputStream inputStream = content.getContent(); ais = new ArchiveStreamFactory().createArchiveInputStream(artefact.getType(), inputStream); } ArchiveEntry entry = null; boolean needContainerHome = homeDir == null; while ((entry = ais.getNextEntry()) != null) { File targetFile; if (needContainerHome) { targetFile = new File(targetDir, entry.getName()); } else { targetFile = new File(homeDir, entry.getName()); } if (!entry.isDirectory()) { File parentDir = targetFile.getParentFile(); IllegalStateAssertion.assertTrue(parentDir.exists() || parentDir.mkdirs(), "Cannot create target directory: " + parentDir); FileOutputStream fos = new FileOutputStream(targetFile); IOUtils.copyStream(ais, fos); fos.close(); if (needContainerHome && homeDir == null) { File currentDir = parentDir; while (!currentDir.getParentFile().equals(targetDir)) { currentDir = currentDir.getParentFile(); } homeDir = currentDir; } } } ais.close(); } catch (RuntimeException rte) { throw rte; } catch (Exception ex) { throw new IllegalStateException("Cannot extract artefact: " + artefact, ex); } } managedProcess = new DefaultManagedProcess(identity, options, homeDir.toPath(), State.CREATED); managedProcess.addAttribute(ContainerAttributes.ATTRIBUTE_KEY_REMOTE_AGENT_URL, localAgent.getJolokiaEndpoint()); try { doConfigure(managedProcess); } catch (Exception ex) { throw new LifecycleException("Cannot configure container", ex); } return new ImmutableManagedProcess(managedProcess); } @Override public final Future<ManagedProcess> start() { State state = managedProcess.getState(); assertNotDestroyed(state); // Setup a call back notification for Agent registration final AtomicReference<CountDownLatch> latchRef = new AtomicReference<>(); try { mbeanServer.addNotificationListener(AgentTopology.OBJECT_NAME, new NotificationListener() { @Override public void handleNotification(Notification notification, Object handback) { String eventType = notification.getType(); if (AgentTopology.NOTIFICATION_TYPE_AGENT_REGISTRATION.equals(eventType)) { AgentRegistration agentReg = (AgentRegistration) notification.getSource(); String agentName = agentReg.getIdentity().getName(); String procName = (String) handback; if (agentName.equals(procName)) { try { mbeanServer.removeNotificationListener(AgentTopology.OBJECT_NAME, this); } catch (Exception ex) { // ignore } latchRef.get().countDown(); } } } }, null, managedProcess.getIdentity().getName()); } catch (InstanceNotFoundException ex) { throw new IllegalStateException(ex); } try { if (state == State.CREATED || state == State.STOPPED) { latchRef.set(new CountDownLatch(1)); doStart(managedProcess); managedProcess.setState(State.STARTED); } else { latchRef.set(new CountDownLatch(0)); } } catch (Exception ex) { throw new LifecycleException("Cannot start container", ex); } return new ProcessFuture(managedProcess, latchRef.get()); } @Override public final Future<ManagedProcess> stop() { State state = managedProcess.getState(); assertNotDestroyed(state); // Setup a call back notification for Agent registration final AtomicReference<CountDownLatch> latchRef = new AtomicReference<>(); try { mbeanServer.addNotificationListener(AgentTopology.OBJECT_NAME, new NotificationListener() { @Override public void handleNotification(Notification notification, Object handback) { String eventType = notification.getType(); if (AgentTopology.NOTIFICATION_TYPE_AGENT_DEREGISTRATION.equals(eventType)) { AgentIdentity agentId = (AgentIdentity) notification.getSource(); String agentName = agentId.getName(); String procName = (String) handback; if (agentName.equals(procName)) { try { mbeanServer.removeNotificationListener(AgentTopology.OBJECT_NAME, this); } catch (Exception ex) { // ignore } latchRef.get().countDown(); } } } }, null, managedProcess.getIdentity().getName()); } catch (InstanceNotFoundException ex) { throw new IllegalStateException(ex); } // Setup a shutdown monitor thread Thread shutdownMonitor = new Thread("ShutdownMonitor") { @Override public void run() { Process process = getJavaProcess(); if (process != null) { try { process.waitFor(); } catch (InterruptedException ex) { // ignore } } latchRef.get().countDown(); } }; shutdownMonitor.start(); try { if (state == State.STARTED) { latchRef.set(new CountDownLatch(2)); doStop(managedProcess); managedProcess.setState(State.STOPPED); } else { latchRef.set(new CountDownLatch(0)); } } catch (Exception ex) { throw new LifecycleException("Cannot stop container", ex); } return new ProcessFuture(managedProcess, latchRef.get()); } @Override public final ManagedProcess destroy() { State state = managedProcess.getState(); assertNotDestroyed(state); if (state == State.STARTED) { try { stop(); } catch (Exception ex) { // ignore } } try { doDestroy(managedProcess); } catch (Exception ex) { throw new LifecycleException("Cannot destroy container", ex); } finally { managedProcess.setState(State.DESTROYED); } return new ImmutableManagedProcess(managedProcess); } private void assertNotDestroyed(State state) { IllegalStateAssertion.assertFalse(state == State.DESTROYED, "Cannot start container in state: " + state); } protected void doConfigure(MutableManagedProcess process) throws Exception { } protected void doStart(MutableManagedProcess process) throws Exception { } protected void doStop(MutableManagedProcess process) throws Exception { } protected void doDestroy(MutableManagedProcess process) throws Exception { } protected final int nextAvailablePort(int portValue) { return nextAvailablePort(portValue, null); } protected int nextAvailablePort(int portValue, InetAddress bindAddr) { return HostUtils.nextAvailablePort(portValue, bindAddr); } protected void startProcess(ProcessBuilder processBuilder, ProcessOptions options) throws IOException { javaProcess = processBuilder.start(); new Thread(new ConsoleConsumer(javaProcess, options)).start(); } protected void destroyProcess(boolean waitFor) { if (javaProcess != null) { javaProcess.destroy(); if (waitFor) { try { javaProcess.waitFor(); } catch (InterruptedException ex) { // ignore } } } } /** * Runnable that consumes the output of the process. * If nothing consumes the output the container may hang on some platforms */ public static class ConsoleConsumer implements Runnable { private final Process process; private final ProcessOptions options; public ConsoleConsumer(Process process, ProcessOptions options) { this.process = process; this.options = options; } @Override public void run() { final InputStream stream = process.getInputStream(); try { byte[] buf = new byte[32]; int num; // Do not try reading a line cos it considers '\r' end of line while ((num = stream.read(buf)) != -1) { if (options.isOutputToConsole()) System.out.write(buf, 0, num); } } catch (IOException e) { } } } }
/** * OLAT - Online Learning and Training<br> * http://www.olat.org * <p> * Licensed under the Apache License, Version 2.0 (the "License"); <br> * you may not use this file except in compliance with the License.<br> * You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing,<br> * software distributed under the License is distributed on an "AS IS" BASIS, <br> * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <br> * See the License for the specific language governing permissions and <br> * limitations under the License. * <p> * Copyright (c) frentix GmbH<br> * http://www.frentix.com<br> * <p> */ package org.olat.portfolio.ui.structel.edit; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import org.json.JSONException; import org.olat.core.CoreSpringFactory; import org.olat.core.gui.UserRequest; import org.olat.core.gui.components.Component; import org.olat.core.gui.components.link.Link; import org.olat.core.gui.components.link.LinkFactory; import org.olat.core.gui.components.velocity.VelocityContainer; import org.olat.core.gui.control.Controller; import org.olat.core.gui.control.Event; import org.olat.core.gui.control.WindowControl; import org.olat.core.gui.control.controller.BasicController; import org.olat.core.gui.control.generic.ajax.tree.AjaxTreeModel; import org.olat.core.gui.control.generic.ajax.tree.AjaxTreeNode; import org.olat.core.gui.control.generic.ajax.tree.MoveTreeNodeEvent; import org.olat.core.gui.control.generic.ajax.tree.TreeController; import org.olat.core.gui.control.generic.ajax.tree.TreeNodeClickedEvent; import org.olat.core.logging.OLATRuntimeException; import org.olat.core.util.filter.FilterFactory; import org.olat.portfolio.EPSecurityCallback; import org.olat.portfolio.manager.EPFrontendManager; import org.olat.portfolio.manager.EPStructureManager; import org.olat.portfolio.model.artefacts.AbstractArtefact; import org.olat.portfolio.model.structel.EPAbstractMap; import org.olat.portfolio.model.structel.EPPage; import org.olat.portfolio.model.structel.EPStructureElement; import org.olat.portfolio.model.structel.PortfolioStructure; import org.olat.portfolio.ui.structel.EPAddElementsController; import org.olat.portfolio.ui.structel.EPArtefactClicked; import org.olat.portfolio.ui.structel.EPStructureChangeEvent; /** * Description:<br> * Controller shows a TOC (table of content) of the given PortfolioStructure elements can be moved around by d&d * <P> * Initial Date: 13.09.2010 <br> * * @author Roman Haag, roman.haag@frentix.com, http://www.frentix.com */ public class EPTOCController extends BasicController { protected static final String ARTEFACT_NODE_CLICKED = "artefactNodeClicked"; private static final String DELETE_LINK_CMD = "delete"; private static final String ARTEFACT_NODE_IDENTIFIER = "art"; private static final String ROOT_NODE_IDENTIFIER = "root"; protected final EPFrontendManager ePFMgr; protected final EPStructureManager eSTMgr; protected PortfolioStructure rootNode; protected final EPSecurityCallback secCallback; private final TreeController treeCtr; private final VelocityContainer tocV; private PortfolioStructure structureClicked; private String artefactNodeClicked; protected final Map<Long, String> idToPath = new HashMap<Long, String>(); protected final Map<String, PortfolioStructure> pathToStructure = new HashMap<String, PortfolioStructure>(); private EPAddElementsController addElCtrl; private final Link delButton; public EPTOCController(final UserRequest ureq, final WindowControl wControl, final PortfolioStructure selectedEl, final PortfolioStructure rootNode, final EPSecurityCallback secCallback) { super(ureq, wControl); this.secCallback = secCallback; tocV = createVelocityContainer("toc"); ePFMgr = (EPFrontendManager) CoreSpringFactory.getBean("epFrontendManager"); eSTMgr = (EPStructureManager) CoreSpringFactory.getBean("epStructureManager"); this.rootNode = rootNode; final AjaxTreeModel treeModel = buildTreeModel(); treeCtr = new TreeController(ureq, getWindowControl(), translate("toc.root"), treeModel, null); treeCtr.setTreeSorting(false, false, false); listenTo(treeCtr); tocV.put("tocTree", treeCtr.getInitialComponent()); delButton = LinkFactory.createCustomLink("deleteButton", DELETE_LINK_CMD, "deleteButton", Link.LINK_CUSTOM_CSS, tocV, this); delButton.setCustomEnabledLinkCSS("b_with_small_icon_left b_delete_icon"); tocV.put("deleteButton", delButton); if (selectedEl == null) { treeCtr.selectPath("/" + ROOT_NODE_IDENTIFIER + "/" + rootNode.getKey()); // select map refreshAddElements(ureq, rootNode); } else { final String pagePath = calculatePathByDeepestNode(selectedEl); treeCtr.selectPath("/" + ROOT_NODE_IDENTIFIER + "/" + rootNode.getKey() + pagePath); structureClicked = selectedEl; refreshAddElements(ureq, selectedEl); } putInitialPanel(tocV); } private String calculatePathByDeepestNode(final PortfolioStructure pStruct) { final StringBuffer path = new StringBuffer(); PortfolioStructure ps = pStruct; while (ps.getRootMap() != null) { path.insert(0, "/" + ps.getKey().toString()); ps = ps.getRoot(); } return path.toString(); } protected void refreshTree(final PortfolioStructure root) { this.rootNode = root; treeCtr.reloadPath("/" + ROOT_NODE_IDENTIFIER + "/" + rootNode.getKey()); } /** * refreshing the add elements link to actual structure * * @param ureq * @param struct maybe null -> hiding the add-button */ private void refreshAddElements(final UserRequest ureq, final PortfolioStructure struct) { tocV.remove(tocV.getComponent("addElement")); removeAsListenerAndDispose(addElCtrl); if (struct != null) { addElCtrl = new EPAddElementsController(ureq, getWindowControl(), struct); if (struct instanceof EPPage) { if (secCallback.canAddStructure()) { addElCtrl.setShowLink(EPAddElementsController.ADD_STRUCTUREELEMENT); } if (secCallback.canAddArtefact()) { addElCtrl.setShowLink(EPAddElementsController.ADD_ARTEFACT); } } else if (struct instanceof EPAbstractMap) { if (secCallback.canAddPage()) { addElCtrl.setShowLink(EPAddElementsController.ADD_PAGE); } } else { // its a structure element if (secCallback.canAddArtefact()) { addElCtrl.setShowLink(EPAddElementsController.ADD_ARTEFACT); } } listenTo(addElCtrl); tocV.put("addElement", addElCtrl.getInitialComponent()); } } private AjaxTreeModel buildTreeModel() { idToPath.put(rootNode.getKey(), "/" + ROOT_NODE_IDENTIFIER); final AjaxTreeModel model = new AjaxTreeModel(ROOT_NODE_IDENTIFIER) { @Override public List<AjaxTreeNode> getChildrenFor(final String nodeId) { final List<AjaxTreeNode> children = new ArrayList<AjaxTreeNode>(); AjaxTreeNode child; boolean isRoot = false; PortfolioStructure selStruct = null; try { List<PortfolioStructure> structs = new ArrayList<PortfolioStructure>(); if (nodeId.equals(ROOT_NODE_IDENTIFIER)) { structs.add(rootNode); isRoot = true; } else if (!nodeId.startsWith(ARTEFACT_NODE_IDENTIFIER)) { selStruct = ePFMgr.loadPortfolioStructureByKey(new Long(nodeId)); structs = ePFMgr.loadStructureChildren(selStruct); } else { // its an artefact -> no childs anymore return null; } if (structs != null && structs.size() != 0) { for (final PortfolioStructure portfolioStructure : structs) { final String childNodeId = String.valueOf(portfolioStructure.getKey()); child = new AjaxTreeNode(childNodeId, portfolioStructure.getTitle()); final boolean hasStructureChild = eSTMgr.countStructureChildren(portfolioStructure) > 0; final boolean hasArtefacts = eSTMgr.countArtefacts(portfolioStructure) > 0; final boolean hasChilds = hasStructureChild || hasArtefacts; // TODO: epf: RH: seems to be a bug, nothing can be dropped on a leaf, why that?? // child.put(AjaxTreeNode.CONF_LEAF, !hasChilds); child.put(AjaxTreeNode.CONF_IS_TYPE_LEAF, !hasChilds); child.put(AjaxTreeNode.CONF_ALLOWDRAG, !isRoot); final boolean isOpen = hasStructureChild; // boolean isOpen =(((EPStructureElement) portfolioStructure).getChildren().size() != 0); child.put(AjaxTreeNode.CONF_EXPANDED, isOpen); child.put(AjaxTreeNode.CONF_ALLOWDROP, !isRoot); child.put(AjaxTreeNode.CONF_ICON_CSS_CLASS, portfolioStructure.getIcon()); final String description = FilterFactory.getHtmlTagAndDescapingFilter().filter(portfolioStructure.getDescription()); child.put(AjaxTreeNode.CONF_QTIP, description); children.add(child); String path; if (isRoot) { path = "/" + ROOT_NODE_IDENTIFIER; } else { path = idToPath.get(selStruct.getKey()); } idToPath.put(portfolioStructure.getKey(), path + "/" + childNodeId); } } if (selStruct != null && ePFMgr.countArtefactsRecursively(selStruct) != 0) { final List<AbstractArtefact> artList = ePFMgr.getArtefacts(selStruct); for (final AbstractArtefact abstractArtefact : artList) { // include struct also, to still be unique if an artefact is linked multiple times final String childNodeId = ARTEFACT_NODE_IDENTIFIER + String.valueOf(selStruct.getKey()) + "_" + String.valueOf(abstractArtefact.getKey()); child = new AjaxTreeNode(childNodeId, abstractArtefact.getTitle()); child.put(AjaxTreeNode.CONF_LEAF, true); child.put(AjaxTreeNode.CONF_IS_TYPE_LEAF, true); child.put(AjaxTreeNode.CONF_ALLOWDRAG, true); child.put(AjaxTreeNode.CONF_EXPANDED, false); child.put(AjaxTreeNode.CONF_ALLOWDROP, false); child.put(AjaxTreeNode.CONF_ICON_CSS_CLASS, abstractArtefact.getIcon()); final String description = FilterFactory.getHtmlTagAndDescapingFilter().filter(abstractArtefact.getDescription()); child.put(AjaxTreeNode.CONF_QTIP, description); children.add(child); final String path = idToPath.get(selStruct.getKey()); final String artefactPath = path + "/" + childNodeId; idToPath.put(abstractArtefact.getKey(), artefactPath); pathToStructure.put(artefactPath, selStruct); } } } catch (final JSONException e) { throw new OLATRuntimeException("Error while creating tree model for map/page/structure selection", e); } return children; } }; model.setCustomRootIconCssClass("o_st_icon"); return model; } public void update(final PortfolioStructure structure) { final String path = idToPath.get(structure.getKey()); if (path != null) { treeCtr.reloadPath(path); treeCtr.selectPath(path); } } /** * @see org.olat.core.gui.control.DefaultController#event(org.olat.core.gui.UserRequest, org.olat.core.gui.components.Component, org.olat.core.gui.control.Event) */ @SuppressWarnings("unused") @Override protected void event(final UserRequest ureq, final Component source, final Event event) { if (source instanceof Link) { final Link link = (Link) source; if (link.getCommand().equals(DELETE_LINK_CMD)) { if (artefactNodeClicked != null) { final AbstractArtefact artefact = ePFMgr.loadArtefactByKey(new Long(getArtefactIdFromNodeId(artefactNodeClicked))); final PortfolioStructure parentStruct = ePFMgr.loadPortfolioStructureByKey(new Long(getArtefactParentStructIdFromNodeId(artefactNodeClicked))); ePFMgr.removeArtefactFromStructure(artefact, parentStruct); // refresh the view fireEvent(ureq, Event.CHANGED_EVENT); } else if (structureClicked != null) { if ((structureClicked instanceof EPPage || structureClicked instanceof EPStructureElement) && !(structureClicked instanceof EPAbstractMap)) { PortfolioStructure ps = structureClicked; while (ePFMgr.loadStructureParent(ps) != null) { ps = ePFMgr.loadStructureParent(ps); } final int childPages = ePFMgr.countStructureChildren(ps); if (childPages > 1) { eSTMgr.removeStructureRecursively(structureClicked); // refresh the view fireEvent(ureq, Event.CHANGED_EVENT); } else { showError("last.page.not.deletable"); } } else { showInfo("element.not.deletable"); } } } } } /** * @see org.olat.core.gui.control.DefaultController#event(org.olat.core.gui.UserRequest, org.olat.core.gui.control.Controller, org.olat.core.gui.control.Event) */ @Override protected void event(final UserRequest ureq, final Controller source, final Event event) { if (event instanceof TreeNodeClickedEvent) { resetClickedNodes(); final TreeNodeClickedEvent treeEv = (TreeNodeClickedEvent) event; final String nodeClicked = treeEv.getNodeId(); final boolean isArtefactNode = nodeClicked.startsWith(ARTEFACT_NODE_IDENTIFIER); if (!nodeClicked.equals(ROOT_NODE_IDENTIFIER) && !isArtefactNode) { structureClicked = ePFMgr.loadPortfolioStructureByKey(new Long(nodeClicked)); refreshAddElements(ureq, structureClicked); delButton.setVisible(true); // send event to load this page fireEvent(ureq, new EPStructureChangeEvent(EPStructureChangeEvent.SELECTED, structureClicked)); // needed because refreshAddElements set flc dirty, therefore selected node gets lost final String path = idToPath.get(structureClicked.getKey()); treeCtr.selectPath(path); } else if (isArtefactNode) { artefactNodeClicked = nodeClicked; refreshAddElements(ureq, null); delButton.setVisible(true); final String artIdent = getArtefactIdFromNodeId(nodeClicked); final String path = idToPath.get(new Long(artIdent)); final PortfolioStructure structure = pathToStructure.get(path); fireEvent(ureq, new EPArtefactClicked(ARTEFACT_NODE_CLICKED, structure)); // needed because refreshAddElements set flc dirty, therefore selected node gets lost treeCtr.selectPath(path); } else { // root tree node clicked, no add/delete link delButton.setVisible(false); refreshAddElements(ureq, null); fireEvent(ureq, new Event(ARTEFACT_NODE_CLICKED)); } } else if (event instanceof MoveTreeNodeEvent) { resetClickedNodes(); final MoveTreeNodeEvent moveEvent = (MoveTreeNodeEvent) event; final String movedNode = moveEvent.getNodeId(); final String oldParent = moveEvent.getOldParentNodeId(); final String newParent = moveEvent.getNewParentNodeId(); final boolean isArtefactNode = movedNode.startsWith(ARTEFACT_NODE_IDENTIFIER); if (isArtefactNode) { final String nodeId = getArtefactIdFromNodeId(movedNode); if (checkNewArtefactTarget(nodeId, newParent)) { if (moveArtefactToNewParent(nodeId, oldParent, newParent)) { if (isLogDebugEnabled()) { logInfo("moved artefact " + nodeId + " from structure " + oldParent + " to " + newParent, null); } moveEvent.setResult(true, null, null); // refresh the view final EPMoveEvent movedEvent = new EPMoveEvent(newParent, nodeId); fireEvent(ureq, movedEvent); } else { moveEvent.setResult(false, translate("move.error.title"), translate("move.artefact.error.move")); } } else { moveEvent.setResult(false, translate("move.error.title"), translate("move.artefact.error.target")); } } else { if (checkNewStructureTarget(movedNode, oldParent, newParent)) { if (moveStructureToNewParent(movedNode, oldParent, newParent)) { if (isLogDebugEnabled()) { logInfo("moved structure " + movedNode + " from structure " + oldParent + " to " + newParent, null); } moveEvent.setResult(true, null, null); // refresh the view final EPMoveEvent movedEvent = new EPMoveEvent(newParent, movedNode); fireEvent(ureq, movedEvent); } else { moveEvent.setResult(false, translate("move.error.title"), translate("move.struct.error.move")); } } else { moveEvent.setResult(false, translate("move.error.title"), translate("move.struct.error.target")); } } } else if (source == addElCtrl) { // refresh the view, this is a EPStructureChangeEvent fireEvent(ureq, event); } } // reset previously choosen nodes. reference were there to be able to delete a node. private void resetClickedNodes() { structureClicked = null; artefactNodeClicked = null; } private String getArtefactIdFromNodeId(final String nodeId) { String artId = nodeId.substring(ARTEFACT_NODE_IDENTIFIER.length()); if (artId.contains("_")) { artId = artId.substring(artId.indexOf("_") + 1); } return artId; } private String getArtefactParentStructIdFromNodeId(final String nodeId) { String structId = nodeId.substring(ARTEFACT_NODE_IDENTIFIER.length()); if (structId.contains("_")) { structId = structId.substring(0, structId.indexOf("_")); } return structId; } /** * check if an artefact might be moved to this new parent node artefact might be moved to pages or structureElements, but not on maps * * @param artefactId * @param structureId * @return */ private boolean checkNewArtefactTarget(final String artefactId, final String structureId) { // artefact cannot be moved directly under root if (ROOT_NODE_IDENTIFIER.equals(structureId)) { return false; } PortfolioStructure newParStruct; AbstractArtefact artefact; try { artefact = ePFMgr.loadArtefactByKey(new Long(artefactId)); newParStruct = ePFMgr.loadPortfolioStructureByKey(new Long(structureId)); } catch (final Exception e) { logWarn("could not check for valid artefact target", e); return false; } final boolean sameTarget = ePFMgr.isArtefactInStructure(artefact, newParStruct); if (sameTarget) { return false; } if (newParStruct instanceof EPAbstractMap) { return false; } return true; } // really do the move! private boolean moveArtefactToNewParent(final String artefactId, final String oldParentId, final String newParentId) { PortfolioStructure newParStruct; PortfolioStructure oldParStruct; AbstractArtefact artefact; try { artefact = ePFMgr.loadArtefactByKey(new Long(artefactId)); oldParStruct = ePFMgr.loadPortfolioStructureByKey(new Long(oldParentId)); newParStruct = ePFMgr.loadPortfolioStructureByKey(new Long(newParentId)); } catch (final Exception e) { logError("could not load artefact, old and new parent", e); return false; } return ePFMgr.moveArtefactFromStructToStruct(artefact, oldParStruct, newParStruct); } /** * check if a structure (page/structEl/map may be dropped here! its only allowed to move: - StructureElement from page -> page - change the order of pages - change * the order of structures * * @param subjectStructId * @param oldParStructId * @param newParStructId * @return */ private boolean checkNewStructureTarget(final String subjectStructId, final String oldParStructId, final String newParStructId) { PortfolioStructure structToBeMvd; PortfolioStructure oldParStruct; PortfolioStructure newParStruct; if (newParStructId.equals(ROOT_NODE_IDENTIFIER)) { return false; } try { structToBeMvd = ePFMgr.loadPortfolioStructureByKey(new Long(subjectStructId)); oldParStruct = ePFMgr.loadPortfolioStructureByKey(new Long(oldParStructId)); newParStruct = ePFMgr.loadPortfolioStructureByKey(new Long(newParStructId)); } catch (final Exception e) { logError("could not check for valid structure target", e); return false; } if (newParStruct instanceof EPAbstractMap) { return false; } if (oldParStruct.getKey().equals(newParStruct.getKey())) { return false; } if (structToBeMvd instanceof EPPage && newParStruct instanceof EPPage) { return false; } if (structToBeMvd instanceof EPStructureElement && !(newParStruct instanceof EPPage)) { return false; } // how to allow changing of order?? // TODO: epf: RH: allow move, it seems this needs to fix in js on gui // if (structToBeMvd instanceof EPPage && (newParStruct instanceof EPPage || newParStruct instanceof ) return false; // if (structToBeMvd instanceof EPStructureElement) return true; return true; } // really do the move private boolean moveStructureToNewParent(final String subjectStructId, final String oldParStructId, final String newParStructId) { PortfolioStructure structToBeMvd; PortfolioStructure oldParStruct; PortfolioStructure newParStruct; try { structToBeMvd = ePFMgr.loadPortfolioStructureByKey(new Long(subjectStructId)); oldParStruct = ePFMgr.loadPortfolioStructureByKey(new Long(oldParStructId)); newParStruct = ePFMgr.loadPortfolioStructureByKey(new Long(newParStructId)); } catch (final Exception e) { logError("could not load: structure to be moved, old or new structure while trying to move", e); return false; } return ePFMgr.moveStructureToNewParentStructure(structToBeMvd, oldParStruct, newParStruct); } /** * @see org.olat.core.gui.control.DefaultController#doDispose() */ @Override protected void doDispose() { // } }
package org.ovirt.engine.core.bll.storage.export; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import org.apache.commons.lang.StringUtils; import org.ovirt.engine.core.bll.Backend; import org.ovirt.engine.core.bll.DisableInPrepareMode; import org.ovirt.engine.core.bll.LockMessagesMatchUtil; import org.ovirt.engine.core.bll.NonTransactiveCommandAttribute; import org.ovirt.engine.core.bll.VmHandler; import org.ovirt.engine.core.bll.VmTemplateHandler; import org.ovirt.engine.core.bll.context.EngineContext; import org.ovirt.engine.core.bll.memory.MemoryUtils; import org.ovirt.engine.core.bll.snapshots.SnapshotsValidator; import org.ovirt.engine.core.bll.storage.disk.image.ImagesHandler; import org.ovirt.engine.core.bll.storage.ovfstore.OvfUpdateProcessHelper; import org.ovirt.engine.core.bll.utils.ClusterUtils; import org.ovirt.engine.core.bll.utils.VmDeviceUtils; import org.ovirt.engine.core.bll.validator.VmValidator; import org.ovirt.engine.core.bll.validator.storage.DiskImagesValidator; import org.ovirt.engine.core.bll.validator.storage.MultipleStorageDomainsValidator; import org.ovirt.engine.core.bll.validator.storage.StorageDomainValidator; import org.ovirt.engine.core.bll.validator.storage.StoragePoolValidator; import org.ovirt.engine.core.common.AuditLogType; import org.ovirt.engine.core.common.VdcObjectType; import org.ovirt.engine.core.common.action.LockProperties; import org.ovirt.engine.core.common.action.LockProperties.Scope; import org.ovirt.engine.core.common.action.MoveOrCopyImageGroupParameters; import org.ovirt.engine.core.common.action.MoveVmParameters; import org.ovirt.engine.core.common.action.VdcActionType; import org.ovirt.engine.core.common.action.VdcReturnValueBase; import org.ovirt.engine.core.common.asynctasks.EntityInfo; import org.ovirt.engine.core.common.businessentities.Snapshot; import org.ovirt.engine.core.common.businessentities.Snapshot.SnapshotType; import org.ovirt.engine.core.common.businessentities.StorageDomainStatic; import org.ovirt.engine.core.common.businessentities.StorageDomainType; import org.ovirt.engine.core.common.businessentities.StoragePoolIsoMapId; import org.ovirt.engine.core.common.businessentities.VM; import org.ovirt.engine.core.common.businessentities.VmTemplate; import org.ovirt.engine.core.common.businessentities.network.VmNetworkInterface; import org.ovirt.engine.core.common.businessentities.storage.CopyVolumeType; import org.ovirt.engine.core.common.businessentities.storage.Disk; import org.ovirt.engine.core.common.businessentities.storage.DiskImage; import org.ovirt.engine.core.common.businessentities.storage.VolumeFormat; import org.ovirt.engine.core.common.businessentities.storage.VolumeType; import org.ovirt.engine.core.common.errors.EngineException; import org.ovirt.engine.core.common.errors.EngineMessage; import org.ovirt.engine.core.common.locks.LockingGroup; import org.ovirt.engine.core.common.queries.GetAllFromExportDomainQueryParameters; import org.ovirt.engine.core.common.queries.VdcQueryReturnValue; import org.ovirt.engine.core.common.queries.VdcQueryType; import org.ovirt.engine.core.common.utils.Pair; import org.ovirt.engine.core.common.vdscommands.GetImageInfoVDSCommandParameters; import org.ovirt.engine.core.common.vdscommands.UpdateVMVDSCommandParameters; import org.ovirt.engine.core.common.vdscommands.VDSCommandType; import org.ovirt.engine.core.common.vdscommands.VDSReturnValue; import org.ovirt.engine.core.compat.Guid; import org.ovirt.engine.core.compat.KeyValuePairCompat; import org.ovirt.engine.core.utils.GuidUtils; import org.ovirt.engine.core.utils.ovf.OvfManager; import org.ovirt.engine.core.utils.transaction.TransactionSupport; @DisableInPrepareMode @NonTransactiveCommandAttribute(forceCompensation = true) public class ExportVmCommand<T extends MoveVmParameters> extends MoveOrCopyTemplateCommand<T> { private List<DiskImage> disksImages; private Collection<Snapshot> snapshotsWithMemory; /** * Constructor for command creation when compensation is applied on startup */ public ExportVmCommand(Guid commandId) { super(commandId); } public ExportVmCommand(T parameters) { super(parameters); setVmId(parameters.getContainerId()); parameters.setEntityInfo(new EntityInfo(VdcObjectType.VM, getVmId())); } @Override protected LockProperties applyLockProperties(LockProperties lockProperties) { return lockProperties.withScope(Scope.Execution); } @Override protected boolean validate() { if (getVm() == null) { return failValidation(EngineMessage.ACTION_TYPE_FAILED_VM_NOT_FOUND); } setDescription(getVmName()); setStoragePoolId(getVm().getStoragePoolId()); // check that target domain exists StorageDomainValidator targetstorageDomainValidator = new StorageDomainValidator(getStorageDomain()); if (!validate(targetstorageDomainValidator.isDomainExistAndActive())) { return false; } // load the disks of vm from database VmHandler.updateDisksFromDb(getVm()); List<DiskImage> disksForExport = getDisksBasedOnImage(); DiskImagesValidator diskImagesValidator = new DiskImagesValidator(disksForExport); if (!validate(diskImagesValidator.diskImagesNotIllegal()) || !validate(diskImagesValidator.diskImagesNotLocked())) { return false; } // update vm snapshots for storage free space check ImagesHandler.fillImagesBySnapshots(getVm()); // check that the target and source domain are in the same storage_pool if (getDbFacade().getStoragePoolIsoMapDao() .get(new StoragePoolIsoMapId(getStorageDomain().getId(), getVm().getStoragePoolId())) == null) { addValidationMessage(EngineMessage.ACTION_TYPE_FAILED_STORAGE_POOL_NOT_MATCH); return false; } // check if template exists only if asked for if (getParameters().getTemplateMustExists()) { if (!checkTemplateInStorageDomain(getVm().getStoragePoolId(), getParameters().getStorageDomainId(), getVm().getVmtGuid(), getContext().getEngineContext())) { return failValidation(EngineMessage.ACTION_TYPE_FAILED_TEMPLATE_NOT_FOUND_ON_EXPORT_DOMAIN, String.format("$TemplateName %1$s", getVm().getVmtName())); } } // check that the images requested format are valid (COW+Sparse) if (!ImagesHandler.checkImagesConfiguration(getParameters().getStorageDomainId(), disksForExport, getReturnValue().getValidationMessages())) { return false; } Map<Guid, ? extends Disk> images = getVm().getDiskMap(); if (getParameters().getCopyCollapse()) { for (DiskImage img : disksForExport) { if (images.containsKey(img.getId())) { // check that no RAW format exists (we are in collapse mode) if (((DiskImage) images.get(img.getId())).getVolumeFormat() == VolumeFormat.RAW && img.getVolumeFormat() != VolumeFormat.RAW) { addValidationMessage(EngineMessage.VM_CANNOT_EXPORT_RAW_FORMAT); return false; } } } } // check destination storage is Export domain if (getStorageDomain().getStorageDomainType() != StorageDomainType.ImportExport) { return failValidation(EngineMessage.ACTION_TYPE_FAILED_SPECIFY_DOMAIN_IS_NOT_EXPORT_DOMAIN, String.format("$storageDomainName %1$s", getStorageDomainName())); } // get the snapshots that are going to be exported and have memory snapshotsWithMemory = getSnapshotsToBeExportedWithMemory(); // check destination storage have free space if (!handleDestStorageDomain(disksForExport)) { return false; } SnapshotsValidator snapshotValidator = new SnapshotsValidator(); if (!(checkVmInStorageDomain() && validate(new StoragePoolValidator(getStoragePool()).isUp()) && validate(snapshotValidator.vmNotDuringSnapshot(getVmId())) && validate(snapshotValidator.vmNotInPreview(getVmId())) && validate(new VmValidator(getVm()).vmDown()) && validate(new MultipleStorageDomainsValidator(getVm().getStoragePoolId(), ImagesHandler.getAllStorageIdsForImageIds(disksForExport)).allDomainsExistAndActive()))) { return false; } return true; } private boolean handleDestStorageDomain(List<DiskImage> disksList) { ensureDomainMap(disksList, getStorageDomainId()); List<DiskImage> dummiesDisksList = createDiskDummiesForSpaceValidations(disksList); dummiesDisksList.addAll(getMemoryVolumes()); return validateSpaceRequirements(dummiesDisksList); } private List<DiskImage> getMemoryVolumes() { int numOfSnapshots = snapshotsWithMemory.size(); long memorySize = numOfSnapshots * getVm().getTotalMemorySizeInBytes(); long metadataSize = numOfSnapshots * MemoryUtils.METADATA_SIZE_IN_BYTES; List<DiskImage> memoryDisksList = MemoryUtils.createDiskDummies(memorySize, metadataSize); //Set target domain in memory disks ArrayList<Guid> sdId = new ArrayList<>(Collections.singletonList(getStorageDomainId())); for (DiskImage diskImage : memoryDisksList) { diskImage.setStorageIds(sdId); } return memoryDisksList; } private Collection<Snapshot> getSnapshotsToBeExportedWithMemory() { if (getParameters().getCopyCollapse()) { Snapshot activeSnapshot = getSnapshotDao().get(getVmId(), SnapshotType.ACTIVE); return !activeSnapshot.getMemoryVolume().isEmpty() ? Collections.<Snapshot>singleton(activeSnapshot) : Collections.<Snapshot>emptyList(); } else { Map<String, Snapshot> memory2snapshot = new HashMap<>(); for (Snapshot snapshot : getSnapshotDao().getAll(getVmId())) { memory2snapshot.put(snapshot.getMemoryVolume(), snapshot); } memory2snapshot.remove(StringUtils.EMPTY); return memory2snapshot.values(); } } @Override protected void setActionMessageParameters() { addValidationMessage(EngineMessage.VAR__ACTION__EXPORT); addValidationMessage(EngineMessage.VAR__TYPE__VM); } @Override protected void executeCommand() { VmHandler.lockVm(getVm().getDynamicData(), getCompensationContext()); freeLock(); // update vm init VmHandler.updateVmInitFromDB(getVm().getStaticData(), true); // Means that there are no asynchronous tasks to execute - so we can end the command // immediately after the execution of the previous steps if (!hasSnappableDisks() && snapshotsWithMemory.isEmpty()) { endSuccessfully(); } else { TransactionSupport.executeInNewTransaction(() -> { moveOrCopyAllImageGroups(); return null; }); if (!getReturnValue().getVdsmTaskIdList().isEmpty()) { setSucceeded(true); } } } private boolean hasSnappableDisks() { return !getDisksBasedOnImage().isEmpty(); } private boolean updateCopyVmInSpm(Guid storagePoolId, VM vm, Guid storageDomainId) { HashMap<Guid, KeyValuePairCompat<String, List<Guid>>> vmsAndMetaDictionary = new HashMap<>(); OvfManager ovfManager = new OvfManager(); ArrayList<DiskImage> AllVmImages = new ArrayList<>(); List<VmNetworkInterface> interfaces = vm.getInterfaces(); if (interfaces != null) { // TODO remove this when the API changes interfaces.clear(); interfaces.addAll(getDbFacade().getVmNetworkInterfaceDao().getAllForVm(vm.getId())); } List<Guid> imageGroupIds = new ArrayList<>(); for (Disk disk : getDisksBasedOnImage()) { DiskImage diskImage = (DiskImage) disk; diskImage.setParentId(VmTemplateHandler.BLANK_VM_TEMPLATE_ID); diskImage.setImageTemplateId(VmTemplateHandler.BLANK_VM_TEMPLATE_ID); diskImage.setStorageIds(new ArrayList<>(Arrays.asList(storageDomainId))); DiskImage diskForVolumeInfo = getDiskForVolumeInfo(diskImage); diskImage.setvolumeFormat(diskForVolumeInfo.getVolumeFormat()); diskImage.setVolumeType(diskForVolumeInfo.getVolumeType()); VDSReturnValue vdsReturnValue = runVdsCommand( VDSCommandType.GetImageInfo, new GetImageInfoVDSCommandParameters(storagePoolId, storageDomainId, diskImage .getId(), diskImage.getImageId())); if (vdsReturnValue != null && vdsReturnValue.getSucceeded()) { DiskImage fromVdsm = (DiskImage) vdsReturnValue.getReturnValue(); diskImage.setActualSizeInBytes(fromVdsm.getActualSizeInBytes()); } AllVmImages.add(diskImage); imageGroupIds.add(disk.getId()); } if (StringUtils.isEmpty(vm.getVmtName())) { VmTemplate t = getDbFacade().getVmTemplateDao().get(vm.getVmtGuid()); vm.setVmtName(t.getName()); } getVm().setVmtGuid(VmTemplateHandler.BLANK_VM_TEMPLATE_ID); String vmMeta = ovfManager.exportVm(vm, AllVmImages, ClusterUtils.getCompatibilityVersion(vm)); vmsAndMetaDictionary.put(vm.getId(), new KeyValuePairCompat<>(vmMeta, imageGroupIds)); UpdateVMVDSCommandParameters tempVar = new UpdateVMVDSCommandParameters(storagePoolId, vmsAndMetaDictionary); tempVar.setStorageDomainId(storageDomainId); return runVdsCommand(VDSCommandType.UpdateVM, tempVar) .getSucceeded(); } @Override protected void moveOrCopyAllImageGroups() { // Disks moveOrCopyAllImageGroups(getVm().getId(), getDisksBasedOnImage()); // Memory volumes copyAllMemoryImages(getVm().getId()); } private List<DiskImage> getDisksBasedOnImage() { if (disksImages == null) { disksImages = ImagesHandler.filterImageDisks(getVm().getDiskMap().values(), true, false, true); } return disksImages; } private void copyAllMemoryImages(Guid containerID) { for (Snapshot snapshot : snapshotsWithMemory) { List<Guid> guids = GuidUtils.getGuidListFromString(snapshot.getMemoryVolume()); // copy the memory dump image VdcReturnValueBase vdcRetValue = runInternalActionWithTasksContext( VdcActionType.CopyImageGroup, buildMoveOrCopyImageGroupParametersForMemoryDumpImage( containerID, guids.get(0), guids.get(2), guids.get(3))); if (!vdcRetValue.getSucceeded()) { throw new EngineException(vdcRetValue.getFault().getError(), "Failed during ExportVmCommand"); } getReturnValue().getVdsmTaskIdList().addAll(vdcRetValue.getInternalVdsmTaskIdList()); // copy the memory configuration (of the VM) image vdcRetValue = runInternalActionWithTasksContext( VdcActionType.CopyImageGroup, buildMoveOrCopyImageGroupParametersForMemoryConfImage( containerID, guids.get(0), guids.get(4), guids.get(5))); if (!vdcRetValue.getSucceeded()) { throw new EngineException(vdcRetValue.getFault().getError(), "Failed during ExportVmCommand"); } getReturnValue().getVdsmTaskIdList().addAll(vdcRetValue.getInternalVdsmTaskIdList()); } } private MoveOrCopyImageGroupParameters buildMoveOrCopyImageGroupParametersForMemoryDumpImage( Guid containerID, Guid storageDomainId, Guid imageId, Guid volumeId) { MoveOrCopyImageGroupParameters params = new MoveOrCopyImageGroupParameters(containerID, imageId, volumeId, getParameters().getStorageDomainId(), getMoveOrCopyImageOperation()); params.setParentCommand(getActionType()); params.setCopyVolumeType(CopyVolumeType.LeafVol); params.setForceOverride(getParameters().getForceOverride()); params.setSourceDomainId(storageDomainId); params.setEntityInfo(getParameters().getEntityInfo()); params.setParentParameters(getParameters()); StorageDomainStatic sourceDomain = getStorageDomainStaticDao().get(storageDomainId); // if the data domain is a block based storage, the memory volume type is preallocated // so we need to use copy collapse in order to convert it to be sparsed in the export domain if (sourceDomain.getStorageType().isBlockDomain()) { params.setUseCopyCollapse(true); params.setVolumeType(VolumeType.Sparse); params.setVolumeFormat(VolumeFormat.RAW); } return params; } private MoveOrCopyImageGroupParameters buildMoveOrCopyImageGroupParametersForMemoryConfImage( Guid containerID, Guid storageDomainId, Guid imageId, Guid volumeId) { MoveOrCopyImageGroupParameters params = new MoveOrCopyImageGroupParameters(containerID, imageId, volumeId, getParameters().getStorageDomainId(), getMoveOrCopyImageOperation()); params.setParentCommand(getActionType()); // This volume is always of type 'sparse' and format 'cow' so no need to convert, // and there're no snapshots for it so no reason to use copy collapse params.setUseCopyCollapse(false); params.setEntityInfo(getParameters().getEntityInfo()); params.setCopyVolumeType(CopyVolumeType.LeafVol); params.setForceOverride(getParameters().getForceOverride()); params.setParentParameters(getParameters()); params.setSourceDomainId(storageDomainId); return params; } @Override protected void moveOrCopyAllImageGroups(Guid containerID, Iterable<DiskImage> disks) { for (DiskImage disk : disks) { VdcReturnValueBase vdcRetValue = runInternalActionWithTasksContext( VdcActionType.CopyImageGroup, buildMoveOrCopyImageGroupParametersForDisk(containerID, disk)); if (!vdcRetValue.getSucceeded()) { throw new EngineException(vdcRetValue.getFault().getError(), "Failed during ExportVmCommand"); } getReturnValue().getVdsmTaskIdList().addAll(vdcRetValue.getInternalVdsmTaskIdList()); } } private MoveOrCopyImageGroupParameters buildMoveOrCopyImageGroupParametersForDisk(Guid containerID, DiskImage disk) { MoveOrCopyImageGroupParameters params = new MoveOrCopyImageGroupParameters(containerID, disk.getId(), disk.getImageId(), getParameters().getStorageDomainId(), getMoveOrCopyImageOperation()); params.setParentCommand(getActionType()); params.setEntityInfo(getParameters().getEntityInfo()); params.setUseCopyCollapse(getParameters().getCopyCollapse()); DiskImage diskForVolumeInfo = getDiskForVolumeInfo(disk); params.setVolumeFormat(diskForVolumeInfo.getVolumeFormat()); params.setVolumeType(diskForVolumeInfo.getVolumeType()); params.setCopyVolumeType(CopyVolumeType.LeafVol); params.setForceOverride(getParameters().getForceOverride()); params.setParentParameters(getParameters()); return params; } /** * Return the correct disk to get the volume info (type & allocation) from. For copy collapse it's the ancestral * disk of the given disk, and otherwise it's the disk itself. * * @param disk * The disk for which to get the disk with the info. * @return The disk with the correct volume info. */ private DiskImage getDiskForVolumeInfo(DiskImage disk) { if (getParameters().getCopyCollapse()) { return getDiskImageDao().getAncestor(disk.getImageId()); } else { return disk; } } /** * Check that vm is in export domain */ protected boolean checkVmInStorageDomain() { boolean retVal = true; GetAllFromExportDomainQueryParameters tempVar = new GetAllFromExportDomainQueryParameters(getVm() .getStoragePoolId(), getParameters().getStorageDomainId()); VdcQueryReturnValue qretVal = runInternalQuery(VdcQueryType.GetVmsFromExportDomain, tempVar); if (qretVal.getSucceeded()) { ArrayList<VM> vms = qretVal.getReturnValue(); for (VM vm : vms) { if (vm.getId().equals(getVm().getId())) { if (!getParameters().getForceOverride()) { addValidationMessage(EngineMessage.ACTION_TYPE_FAILED_VM_GUID_ALREADY_EXIST); retVal = false; break; } } else if (vm.getName().equals(getVm().getName())) { addValidationMessage(EngineMessage.ACTION_TYPE_FAILED_NAME_ALREADY_USED); retVal = false; break; } } } return retVal; } public static boolean checkTemplateInStorageDomain(Guid storagePoolId, Guid storageDomainId, final Guid tmplId, EngineContext engineContext) { boolean retVal = false; GetAllFromExportDomainQueryParameters tempVar = new GetAllFromExportDomainQueryParameters(storagePoolId, storageDomainId); VdcQueryReturnValue qretVal = Backend.getInstance().runInternalQuery(VdcQueryType.GetTemplatesFromExportDomain, tempVar, engineContext); if (qretVal.getSucceeded()) { if (!VmTemplateHandler.BLANK_VM_TEMPLATE_ID.equals(tmplId)) { Map<VmTemplate, List<DiskImage>> templates = qretVal.getReturnValue(); retVal = templates.keySet().stream().anyMatch(vmTemplate -> vmTemplate.getId().equals(tmplId)); } else { retVal = true; } } return retVal; } @Override public AuditLogType getAuditLogTypeValue() { switch (getActionState()) { case EXECUTE: return getSucceeded() ? AuditLogType.IMPORTEXPORT_STARTING_EXPORT_VM : AuditLogType.IMPORTEXPORT_EXPORT_VM_FAILED; case END_SUCCESS: return getSucceeded() ? AuditLogType.IMPORTEXPORT_EXPORT_VM : AuditLogType.IMPORTEXPORT_EXPORT_VM_FAILED; case END_FAILURE: return AuditLogType.IMPORTEXPORT_EXPORT_VM_FAILED; } return super.getAuditLogTypeValue(); } protected boolean updateVmInSpm() { OvfUpdateProcessHelper ovfHelper = new OvfUpdateProcessHelper(); Map<Guid, KeyValuePairCompat<String, List<Guid>>> metaDictionary = new HashMap<>(); ovfHelper.loadVmData(getVm()); ovfHelper.buildMetadataDictionaryForVm(getVm(), metaDictionary, ovfHelper.getVmImagesFromDb(getVm())); return ovfHelper.executeUpdateVmInSpmCommand(getVm().getStoragePoolId(), metaDictionary, getParameters().getStorageDomainId()); } @Override protected void endSuccessfully() { endActionOnAllImageGroups(); VM vm = getVm(); populateVmData(vm); if (getParameters().getCopyCollapse()) { endCopyCollapseOperations(vm); } else { updateSnapshotOvf(vm); } VmHandler.unLockVm(vm); setSucceeded(true); } private void populateVmData(VM vm) { VmHandler.updateDisksFromDb(vm); VmHandler.updateVmInitFromDB(vm.getStaticData(), true); VmDeviceUtils.setVmDevices(vm.getStaticData()); } private void endCopyCollapseOperations(VM vm) { vm.setVmtGuid(VmTemplateHandler.BLANK_VM_TEMPLATE_ID); vm.setVmtName(null); Snapshot activeSnapshot = getDbFacade().getSnapshotDao().get( getDbFacade().getSnapshotDao().getId(vm.getId(), SnapshotType.ACTIVE)); vm.setSnapshots(Arrays.asList(activeSnapshot)); try { updateCopyVmInSpm(getVm().getStoragePoolId(), vm, getParameters() .getStorageDomainId()); } catch (EngineException e) { log.error("Updating VM OVF in export domain failed.", e); auditLogDirector.log(this, AuditLogType.IMPORTEXPORT_IMPORT_VM_FAILED_UPDATING_OVF); } } private void updateSnapshotOvf(VM vm) { vm.setSnapshots(getDbFacade().getSnapshotDao().getAllWithConfiguration(getVm().getId())); updateVmInSpm(); } @Override protected Map<String, Pair<String, String>> getExclusiveLocks() { return Collections.singletonMap(getVmId().toString(), LockMessagesMatchUtil.makeLockingPair(LockingGroup.VM, EngineMessage.ACTION_TYPE_FAILED_OBJECT_LOCKED)); } @Override protected void endWithFailure() { endActionOnAllImageGroups(); VM vm = getVm(); VmHandler.unLockVm(vm); setSucceeded(true); } @Override public Map<String, String> getJobMessageProperties() { if (jobProperties == null) { jobProperties = super.getJobMessageProperties(); jobProperties.put(VdcObjectType.VM.name().toLowerCase(), (getVmName() == null) ? "" : getVmName()); } return jobProperties; } }
/** * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for * license information. * * Code generated by Microsoft (R) AutoRest Code Generator 0.14.0.0 * Changes may cause incorrect behavior and will be lost if the code is * regenerated. */ package fixtures.bodycomplex; import com.microsoft.rest.ServiceCallback; import com.microsoft.rest.ServiceResponse; import com.squareup.okhttp.ResponseBody; import fixtures.bodycomplex.models.BooleanWrapper; import fixtures.bodycomplex.models.ByteWrapper; import fixtures.bodycomplex.models.Datetimerfc1123Wrapper; import fixtures.bodycomplex.models.DatetimeWrapper; import fixtures.bodycomplex.models.DateWrapper; import fixtures.bodycomplex.models.DoubleWrapper; import fixtures.bodycomplex.models.DurationWrapper; import fixtures.bodycomplex.models.ErrorException; import fixtures.bodycomplex.models.FloatWrapper; import fixtures.bodycomplex.models.IntWrapper; import fixtures.bodycomplex.models.LongWrapper; import fixtures.bodycomplex.models.StringWrapper; import java.io.IOException; import retrofit.Call; import retrofit.http.Body; import retrofit.http.GET; import retrofit.http.PUT; /** * An instance of this class provides access to all the operations defined * in Primitive. */ public interface Primitive { /** * The interface defining all the services for Primitive to be * used by Retrofit to perform actually REST calls. */ interface PrimitiveService { @GET("/complex/primitive/integer") Call<ResponseBody> getInt(); @PUT("/complex/primitive/integer") Call<ResponseBody> putInt(@Body IntWrapper complexBody); @GET("/complex/primitive/long") Call<ResponseBody> getLong(); @PUT("/complex/primitive/long") Call<ResponseBody> putLong(@Body LongWrapper complexBody); @GET("/complex/primitive/float") Call<ResponseBody> getFloat(); @PUT("/complex/primitive/float") Call<ResponseBody> putFloat(@Body FloatWrapper complexBody); @GET("/complex/primitive/double") Call<ResponseBody> getDouble(); @PUT("/complex/primitive/double") Call<ResponseBody> putDouble(@Body DoubleWrapper complexBody); @GET("/complex/primitive/bool") Call<ResponseBody> getBool(); @PUT("/complex/primitive/bool") Call<ResponseBody> putBool(@Body BooleanWrapper complexBody); @GET("/complex/primitive/string") Call<ResponseBody> getString(); @PUT("/complex/primitive/string") Call<ResponseBody> putString(@Body StringWrapper complexBody); @GET("/complex/primitive/date") Call<ResponseBody> getDate(); @PUT("/complex/primitive/date") Call<ResponseBody> putDate(@Body DateWrapper complexBody); @GET("/complex/primitive/datetime") Call<ResponseBody> getDateTime(); @PUT("/complex/primitive/datetime") Call<ResponseBody> putDateTime(@Body DatetimeWrapper complexBody); @GET("/complex/primitive/datetimerfc1123") Call<ResponseBody> getDateTimeRfc1123(); @PUT("/complex/primitive/datetimerfc1123") Call<ResponseBody> putDateTimeRfc1123(@Body Datetimerfc1123Wrapper complexBody); @GET("/complex/primitive/duration") Call<ResponseBody> getDuration(); @PUT("/complex/primitive/duration") Call<ResponseBody> putDuration(@Body DurationWrapper complexBody); @GET("/complex/primitive/byte") Call<ResponseBody> getByte(); @PUT("/complex/primitive/byte") Call<ResponseBody> putByte(@Body ByteWrapper complexBody); } /** * Get complex types with integer properties. * * @throws ErrorException exception thrown from REST call * @throws IOException exception thrown from serialization/deserialization * @return the IntWrapper object wrapped in {@link ServiceResponse} if successful. */ ServiceResponse<IntWrapper> getInt() throws ErrorException, IOException; /** * Get complex types with integer properties. * * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @return the {@link Call} object */ Call<ResponseBody> getIntAsync(final ServiceCallback<IntWrapper> serviceCallback); /** * Put complex types with integer properties. * * @param complexBody Please put -1 and 2 * @throws ErrorException exception thrown from REST call * @throws IOException exception thrown from serialization/deserialization * @throws IllegalArgumentException exception thrown from invalid parameters * @return the {@link ServiceResponse} object if successful. */ ServiceResponse<Void> putInt(IntWrapper complexBody) throws ErrorException, IOException, IllegalArgumentException; /** * Put complex types with integer properties. * * @param complexBody Please put -1 and 2 * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @return the {@link Call} object */ Call<ResponseBody> putIntAsync(IntWrapper complexBody, final ServiceCallback<Void> serviceCallback); /** * Get complex types with long properties. * * @throws ErrorException exception thrown from REST call * @throws IOException exception thrown from serialization/deserialization * @return the LongWrapper object wrapped in {@link ServiceResponse} if successful. */ ServiceResponse<LongWrapper> getLong() throws ErrorException, IOException; /** * Get complex types with long properties. * * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @return the {@link Call} object */ Call<ResponseBody> getLongAsync(final ServiceCallback<LongWrapper> serviceCallback); /** * Put complex types with long properties. * * @param complexBody Please put 1099511627775 and -999511627788 * @throws ErrorException exception thrown from REST call * @throws IOException exception thrown from serialization/deserialization * @throws IllegalArgumentException exception thrown from invalid parameters * @return the {@link ServiceResponse} object if successful. */ ServiceResponse<Void> putLong(LongWrapper complexBody) throws ErrorException, IOException, IllegalArgumentException; /** * Put complex types with long properties. * * @param complexBody Please put 1099511627775 and -999511627788 * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @return the {@link Call} object */ Call<ResponseBody> putLongAsync(LongWrapper complexBody, final ServiceCallback<Void> serviceCallback); /** * Get complex types with float properties. * * @throws ErrorException exception thrown from REST call * @throws IOException exception thrown from serialization/deserialization * @return the FloatWrapper object wrapped in {@link ServiceResponse} if successful. */ ServiceResponse<FloatWrapper> getFloat() throws ErrorException, IOException; /** * Get complex types with float properties. * * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @return the {@link Call} object */ Call<ResponseBody> getFloatAsync(final ServiceCallback<FloatWrapper> serviceCallback); /** * Put complex types with float properties. * * @param complexBody Please put 1.05 and -0.003 * @throws ErrorException exception thrown from REST call * @throws IOException exception thrown from serialization/deserialization * @throws IllegalArgumentException exception thrown from invalid parameters * @return the {@link ServiceResponse} object if successful. */ ServiceResponse<Void> putFloat(FloatWrapper complexBody) throws ErrorException, IOException, IllegalArgumentException; /** * Put complex types with float properties. * * @param complexBody Please put 1.05 and -0.003 * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @return the {@link Call} object */ Call<ResponseBody> putFloatAsync(FloatWrapper complexBody, final ServiceCallback<Void> serviceCallback); /** * Get complex types with double properties. * * @throws ErrorException exception thrown from REST call * @throws IOException exception thrown from serialization/deserialization * @return the DoubleWrapper object wrapped in {@link ServiceResponse} if successful. */ ServiceResponse<DoubleWrapper> getDouble() throws ErrorException, IOException; /** * Get complex types with double properties. * * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @return the {@link Call} object */ Call<ResponseBody> getDoubleAsync(final ServiceCallback<DoubleWrapper> serviceCallback); /** * Put complex types with double properties. * * @param complexBody Please put 3e-100 and -0.000000000000000000000000000000000000000000000000000000005 * @throws ErrorException exception thrown from REST call * @throws IOException exception thrown from serialization/deserialization * @throws IllegalArgumentException exception thrown from invalid parameters * @return the {@link ServiceResponse} object if successful. */ ServiceResponse<Void> putDouble(DoubleWrapper complexBody) throws ErrorException, IOException, IllegalArgumentException; /** * Put complex types with double properties. * * @param complexBody Please put 3e-100 and -0.000000000000000000000000000000000000000000000000000000005 * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @return the {@link Call} object */ Call<ResponseBody> putDoubleAsync(DoubleWrapper complexBody, final ServiceCallback<Void> serviceCallback); /** * Get complex types with bool properties. * * @throws ErrorException exception thrown from REST call * @throws IOException exception thrown from serialization/deserialization * @return the BooleanWrapper object wrapped in {@link ServiceResponse} if successful. */ ServiceResponse<BooleanWrapper> getBool() throws ErrorException, IOException; /** * Get complex types with bool properties. * * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @return the {@link Call} object */ Call<ResponseBody> getBoolAsync(final ServiceCallback<BooleanWrapper> serviceCallback); /** * Put complex types with bool properties. * * @param complexBody Please put true and false * @throws ErrorException exception thrown from REST call * @throws IOException exception thrown from serialization/deserialization * @throws IllegalArgumentException exception thrown from invalid parameters * @return the {@link ServiceResponse} object if successful. */ ServiceResponse<Void> putBool(BooleanWrapper complexBody) throws ErrorException, IOException, IllegalArgumentException; /** * Put complex types with bool properties. * * @param complexBody Please put true and false * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @return the {@link Call} object */ Call<ResponseBody> putBoolAsync(BooleanWrapper complexBody, final ServiceCallback<Void> serviceCallback); /** * Get complex types with string properties. * * @throws ErrorException exception thrown from REST call * @throws IOException exception thrown from serialization/deserialization * @return the StringWrapper object wrapped in {@link ServiceResponse} if successful. */ ServiceResponse<StringWrapper> getString() throws ErrorException, IOException; /** * Get complex types with string properties. * * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @return the {@link Call} object */ Call<ResponseBody> getStringAsync(final ServiceCallback<StringWrapper> serviceCallback); /** * Put complex types with string properties. * * @param complexBody Please put 'goodrequest', '', and null * @throws ErrorException exception thrown from REST call * @throws IOException exception thrown from serialization/deserialization * @throws IllegalArgumentException exception thrown from invalid parameters * @return the {@link ServiceResponse} object if successful. */ ServiceResponse<Void> putString(StringWrapper complexBody) throws ErrorException, IOException, IllegalArgumentException; /** * Put complex types with string properties. * * @param complexBody Please put 'goodrequest', '', and null * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @return the {@link Call} object */ Call<ResponseBody> putStringAsync(StringWrapper complexBody, final ServiceCallback<Void> serviceCallback); /** * Get complex types with date properties. * * @throws ErrorException exception thrown from REST call * @throws IOException exception thrown from serialization/deserialization * @return the DateWrapper object wrapped in {@link ServiceResponse} if successful. */ ServiceResponse<DateWrapper> getDate() throws ErrorException, IOException; /** * Get complex types with date properties. * * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @return the {@link Call} object */ Call<ResponseBody> getDateAsync(final ServiceCallback<DateWrapper> serviceCallback); /** * Put complex types with date properties. * * @param complexBody Please put '0001-01-01' and '2016-02-29' * @throws ErrorException exception thrown from REST call * @throws IOException exception thrown from serialization/deserialization * @throws IllegalArgumentException exception thrown from invalid parameters * @return the {@link ServiceResponse} object if successful. */ ServiceResponse<Void> putDate(DateWrapper complexBody) throws ErrorException, IOException, IllegalArgumentException; /** * Put complex types with date properties. * * @param complexBody Please put '0001-01-01' and '2016-02-29' * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @return the {@link Call} object */ Call<ResponseBody> putDateAsync(DateWrapper complexBody, final ServiceCallback<Void> serviceCallback); /** * Get complex types with datetime properties. * * @throws ErrorException exception thrown from REST call * @throws IOException exception thrown from serialization/deserialization * @return the DatetimeWrapper object wrapped in {@link ServiceResponse} if successful. */ ServiceResponse<DatetimeWrapper> getDateTime() throws ErrorException, IOException; /** * Get complex types with datetime properties. * * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @return the {@link Call} object */ Call<ResponseBody> getDateTimeAsync(final ServiceCallback<DatetimeWrapper> serviceCallback); /** * Put complex types with datetime properties. * * @param complexBody Please put '0001-01-01T12:00:00-04:00' and '2015-05-18T11:38:00-08:00' * @throws ErrorException exception thrown from REST call * @throws IOException exception thrown from serialization/deserialization * @throws IllegalArgumentException exception thrown from invalid parameters * @return the {@link ServiceResponse} object if successful. */ ServiceResponse<Void> putDateTime(DatetimeWrapper complexBody) throws ErrorException, IOException, IllegalArgumentException; /** * Put complex types with datetime properties. * * @param complexBody Please put '0001-01-01T12:00:00-04:00' and '2015-05-18T11:38:00-08:00' * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @return the {@link Call} object */ Call<ResponseBody> putDateTimeAsync(DatetimeWrapper complexBody, final ServiceCallback<Void> serviceCallback); /** * Get complex types with datetimeRfc1123 properties. * * @throws ErrorException exception thrown from REST call * @throws IOException exception thrown from serialization/deserialization * @return the Datetimerfc1123Wrapper object wrapped in {@link ServiceResponse} if successful. */ ServiceResponse<Datetimerfc1123Wrapper> getDateTimeRfc1123() throws ErrorException, IOException; /** * Get complex types with datetimeRfc1123 properties. * * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @return the {@link Call} object */ Call<ResponseBody> getDateTimeRfc1123Async(final ServiceCallback<Datetimerfc1123Wrapper> serviceCallback); /** * Put complex types with datetimeRfc1123 properties. * * @param complexBody Please put 'Mon, 01 Jan 0001 12:00:00 GMT' and 'Mon, 18 May 2015 11:38:00 GMT' * @throws ErrorException exception thrown from REST call * @throws IOException exception thrown from serialization/deserialization * @throws IllegalArgumentException exception thrown from invalid parameters * @return the {@link ServiceResponse} object if successful. */ ServiceResponse<Void> putDateTimeRfc1123(Datetimerfc1123Wrapper complexBody) throws ErrorException, IOException, IllegalArgumentException; /** * Put complex types with datetimeRfc1123 properties. * * @param complexBody Please put 'Mon, 01 Jan 0001 12:00:00 GMT' and 'Mon, 18 May 2015 11:38:00 GMT' * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @return the {@link Call} object */ Call<ResponseBody> putDateTimeRfc1123Async(Datetimerfc1123Wrapper complexBody, final ServiceCallback<Void> serviceCallback); /** * Get complex types with duration properties. * * @throws ErrorException exception thrown from REST call * @throws IOException exception thrown from serialization/deserialization * @return the DurationWrapper object wrapped in {@link ServiceResponse} if successful. */ ServiceResponse<DurationWrapper> getDuration() throws ErrorException, IOException; /** * Get complex types with duration properties. * * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @return the {@link Call} object */ Call<ResponseBody> getDurationAsync(final ServiceCallback<DurationWrapper> serviceCallback); /** * Put complex types with duration properties. * * @param complexBody Please put 'P123DT22H14M12.011S' * @throws ErrorException exception thrown from REST call * @throws IOException exception thrown from serialization/deserialization * @throws IllegalArgumentException exception thrown from invalid parameters * @return the {@link ServiceResponse} object if successful. */ ServiceResponse<Void> putDuration(DurationWrapper complexBody) throws ErrorException, IOException, IllegalArgumentException; /** * Put complex types with duration properties. * * @param complexBody Please put 'P123DT22H14M12.011S' * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @return the {@link Call} object */ Call<ResponseBody> putDurationAsync(DurationWrapper complexBody, final ServiceCallback<Void> serviceCallback); /** * Get complex types with byte properties. * * @throws ErrorException exception thrown from REST call * @throws IOException exception thrown from serialization/deserialization * @return the ByteWrapper object wrapped in {@link ServiceResponse} if successful. */ ServiceResponse<ByteWrapper> getByte() throws ErrorException, IOException; /** * Get complex types with byte properties. * * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @return the {@link Call} object */ Call<ResponseBody> getByteAsync(final ServiceCallback<ByteWrapper> serviceCallback); /** * Put complex types with byte properties. * * @param complexBody Please put non-ascii byte string hex(FF FE FD FC 00 FA F9 F8 F7 F6) * @throws ErrorException exception thrown from REST call * @throws IOException exception thrown from serialization/deserialization * @throws IllegalArgumentException exception thrown from invalid parameters * @return the {@link ServiceResponse} object if successful. */ ServiceResponse<Void> putByte(ByteWrapper complexBody) throws ErrorException, IOException, IllegalArgumentException; /** * Put complex types with byte properties. * * @param complexBody Please put non-ascii byte string hex(FF FE FD FC 00 FA F9 F8 F7 F6) * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @return the {@link Call} object */ Call<ResponseBody> putByteAsync(ByteWrapper complexBody, final ServiceCallback<Void> serviceCallback); }
/* * Copyright (C) 2014 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.android.settings.dashboard; import android.content.BroadcastReceiver; import android.content.Context; import android.content.Intent; import android.content.IntentFilter; import android.content.pm.PackageManager.NameNotFoundException; import android.content.res.Resources; import android.graphics.drawable.Drawable; import android.os.Bundle; import android.os.Handler; import android.os.Message; import android.telephony.SubscriptionManager; import android.text.TextUtils; import android.util.Log; import android.util.TypedValue; import android.view.LayoutInflater; import android.view.Menu; import android.view.MenuInflater; import android.view.View; import android.view.ViewGroup; import android.widget.ImageView; import android.widget.TextView; import com.android.internal.logging.MetricsLogger; import com.android.settings.HelpUtils; import com.android.settings.InstrumentedFragment; import com.android.settings.R; import com.android.settings.SettingsActivity; import com.mediatek.settings.UtilsExt; import com.mediatek.settings.ext.ISettingsMiscExt; import java.util.List; /* BIRD_A200_CUSTOM add by qinzhifeng 20160415 begin*/ import com.bird.settings.BirdFeatureOption; import com.bird.settings.ArrangeShowSettingsDialog; import android.provider.Settings; import android.database.ContentObserver; /* BIRD_A200_CUSTOM add by qinzhifeng 20160415 end*/ import com.bird.settings.BirdFeatureOption;//add by liuzhiling 20160707 import android.widget.LinearLayout;//add by liuzhiling 20160707 public class DashboardSummary extends InstrumentedFragment { private static final String LOG_TAG = "DashboardSummary"; private LayoutInflater mLayoutInflater; private ViewGroup mDashboard; private LinearLayout mCommonDashboard;//add by liuzhiling 20160707 private static final int MSG_REBUILD_UI = 1; private ISettingsMiscExt mExt; private Handler mHandler = new Handler() { @Override public void handleMessage(Message msg) { switch (msg.what) { case MSG_REBUILD_UI: { final Context context = getActivity(); rebuildUI(context); //add by liuzhiling 20160707 if(BirdFeatureOption.BIRD_TWO_TAB_SETTINGS) rebuildCommonSettingsUI(context); //add by liuzhiling 20160707 end } break; } } }; private class HomePackageReceiver extends BroadcastReceiver { @Override public void onReceive(Context context, Intent intent) { rebuildUI(context); //add by liuzhiling 20160707 if(BirdFeatureOption.BIRD_TWO_TAB_SETTINGS) rebuildCommonSettingsUI(context); //add by liuzhiling 20160707 end } } private HomePackageReceiver mHomePackageReceiver = new HomePackageReceiver(); @Override protected int getMetricsCategory() { return MetricsLogger.DASHBOARD_SUMMARY; } @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setHasOptionsMenu(true); } @Override public void onCreateOptionsMenu(Menu menu, MenuInflater inflater) { super.onCreateOptionsMenu(menu, inflater); HelpUtils.prepareHelpMenuItem(getActivity(), menu, R.string.help_uri_dashboard, getClass().getName()); } @Override public void onResume() { super.onResume(); sendRebuildUI(); final IntentFilter filter = new IntentFilter(Intent.ACTION_PACKAGE_ADDED); filter.addAction(Intent.ACTION_PACKAGE_REMOVED); filter.addAction(Intent.ACTION_PACKAGE_CHANGED); filter.addAction(Intent.ACTION_PACKAGE_REPLACED); filter.addDataScheme("package"); getActivity().registerReceiver(mHomePackageReceiver, filter); /*BIRD_A200_CUSTOM add by qinzhifeng 20160415 begin*/ if(BirdFeatureOption.BIRD_A200_CUSTOM){ getActivity().getContentResolver().registerContentObserver(Settings.System.getUriFor(ArrangeShowSettingsDialog.SETTINGS_ARRANGE_TYPE), false, mArrangeTypeObserver); } /*BIRD_A200_CUSTOM add by qinzhifeng 20160415 end*/ } @Override public void onPause() { super.onPause(); getActivity().unregisterReceiver(mHomePackageReceiver); /*BIRD_A200_CUSTOM add by qinzhifeng 20160415 begin*/ if(BirdFeatureOption.BIRD_A200_CUSTOM){ getActivity().getContentResolver().unregisterContentObserver(mArrangeTypeObserver); } /*BIRD_A200_CUSTOM add by qinzhifeng 20160415 end*/ } @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { mExt = UtilsExt.getMiscPlugin(this.getActivity()); mLayoutInflater = inflater; final View rootView = inflater.inflate(R.layout.dashboard, container, false); mDashboard = (ViewGroup) rootView.findViewById(R.id.dashboard_container); //add by liuzhiling 20160707 if(BirdFeatureOption.BIRD_TWO_TAB_SETTINGS){ Context context = getActivity(); mCommonDashboard=((SettingsActivity) context).getCommonSettings(); } //add by liuzhiling 20160707 end return rootView; } private void rebuildUI(Context context) { if (!isAdded()) { Log.w(LOG_TAG, "Cannot build the DashboardSummary UI yet as the Fragment is not added"); return; } long start = System.currentTimeMillis(); final Resources res = getResources(); mDashboard.removeAllViews(); List<DashboardCategory> categories = ((SettingsActivity) context).getDashboardCategories(true); final int count = categories.size(); for (int n = 0; n < count; n++) { DashboardCategory category = categories.get(n); View categoryView = mLayoutInflater.inflate(R.layout.dashboard_category, mDashboard, false); TextView categoryLabel = (TextView) categoryView.findViewById(R.id.category_title); categoryLabel.setText(category.getTitle(res)); ViewGroup categoryContent = (ViewGroup) categoryView.findViewById(R.id.category_content); final int tilesCount = category.getTilesCount(); for (int i = 0; i < tilesCount; i++) { DashboardTile tile = category.getTile(i); DashboardTileView tileView = new DashboardTileView(context); updateTileView(context, res, tile, tileView.getImageView(), tileView.getTitleTextView(), tileView.getStatusTextView()); tileView.setTile(tile); categoryContent.addView(tileView); } // Add the category mDashboard.addView(categoryView); } long delta = System.currentTimeMillis() - start; Log.d(LOG_TAG, "rebuildUI took: " + delta + " ms"); } //add by liuzhiling 20160707 private void rebuildCommonSettingsUI(Context context) { if (!isAdded()) { Log.w(LOG_TAG, "Cannot build the DashboardSummary UI yet as the Fragment is not added"); return; } long start = System.currentTimeMillis(); final Resources res = getResources(); mCommonDashboard.removeAllViews(); List<DashboardCategory> categoriesCommon = ((SettingsActivity) context).getDashboardCategories2(true); final int count = categoriesCommon.size(); for (int n = 0; n < count; n++) { DashboardCategory category = categoriesCommon.get(n); View categoryCommonView = mLayoutInflater.inflate(R.layout.dashboard_category, mCommonDashboard, false); TextView categoryLabel = (TextView) categoryCommonView.findViewById(R.id.category_title); categoryLabel.setText(category.getTitle(res)); ViewGroup categoryContent = (ViewGroup) categoryCommonView.findViewById(R.id.category_content); final int tilesCount = category.getTilesCount(); for (int i = 0; i < tilesCount; i++) { DashboardTile tile = category.getTile(i); DashboardTileView tileView = new DashboardTileView(context); updateTileView(context, res, tile, tileView.getImageView(), tileView.getTitleTextView(), tileView.getStatusTextView()); tileView.setTile(tile); categoryContent.addView(tileView); } // Add the category mCommonDashboard.addView(categoryCommonView); } long delta = System.currentTimeMillis() - start; Log.d(LOG_TAG, "rebuildUI common settings took: " + delta + " ms"); } //add by liuzhiling 20160707 end private void updateTileView(Context context, Resources res, DashboardTile tile, ImageView tileIcon, TextView tileTextView, TextView statusTextView) { if (!TextUtils.isEmpty(tile.iconPkg)) { try { Drawable drawable = context.getPackageManager() .getResourcesForApplication(tile.iconPkg).getDrawable(tile.iconRes, null); if (!tile.iconPkg.equals(context.getPackageName()) && drawable != null) { // If this drawable is coming from outside Settings, tint it to match the color. TypedValue tintColor = new TypedValue(); context.getTheme().resolveAttribute(com.android.internal.R.attr.colorAccent, tintColor, true); drawable.setTint(tintColor.data); } tileIcon.setImageDrawable(drawable); } catch (NameNotFoundException | Resources.NotFoundException e) { tileIcon.setImageDrawable(null); tileIcon.setBackground(null); } } else if (tile.iconRes > 0) { tileIcon.setImageResource(tile.iconRes); } else { tileIcon.setImageDrawable(null); tileIcon.setBackground(null); mExt.customizeDashboardTile(tile, tileIcon); } ///M: feature replace sim to uim tileTextView.setText(mExt.customizeSimDisplayString( tile.getTitle(res).toString(), SubscriptionManager.INVALID_SUBSCRIPTION_ID)); CharSequence summary = tile.getSummary(res); if (!TextUtils.isEmpty(summary)) { statusTextView.setVisibility(View.VISIBLE); statusTextView.setText(summary); } else { statusTextView.setVisibility(View.GONE); } } private void sendRebuildUI() { if (!mHandler.hasMessages(MSG_REBUILD_UI)) { mHandler.sendEmptyMessage(MSG_REBUILD_UI); } } /*BIRD_A200_CUSTOM add by qinzhifeng 20160415 begin*/ private ContentObserver mArrangeTypeObserver = new ContentObserver(new Handler()) { @Override public void onChange(boolean selfChange) { sendRebuildUI(); } }; /*BIRD_A200_CUSTOM add by qinzhifeng 20160415 end*/ }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.flexunit.ant.report; import java.io.File; import java.io.FileOutputStream; import java.net.InetAddress; import java.net.UnknownHostException; import java.text.MessageFormat; import java.util.ArrayList; import java.util.Date; import java.util.List; import org.apache.tools.ant.BuildException; import org.apache.tools.ant.util.DateUtils; import org.dom4j.Document; import org.dom4j.DocumentHelper; import org.dom4j.Element; import org.dom4j.io.OutputFormat; import org.dom4j.io.XMLWriter; import org.flexunit.ant.LoggingUtil; public class Report { private static final String FAILURE = "failure"; private static final String ERROR = "error"; private static final String IGNORE = "ignore"; private static final String TEST_SUITE = "testsuite"; private static final String NAME_ATTRIBUTE_LABEL = "name"; private static final String FAILURE_ATTRIBUTE_LABEL = "failures"; private static final String ERROR_ATTRIBUTE_LABEL = "errors"; private static final String IGNORE_ATTRIBUTE_LABEL = "skipped"; private static final String TIME_ATTRIBUTE_LABEL = "time"; private static final String TESTS_ATTRIBUTE_LABEL = "tests"; private static final String HOSTNAME_ATTRIBUTE_LABEL = "hostname"; private static final String TIMESTAMP_ATTRIBUTE_LABEL = "timestamp"; private static final String FILENAME_PREFIX = "TEST-"; private static final String FILENAME_EXTENSION = ".xml"; // Exception messages private static final String FAILED_TEST = "FlexUnit test {0} in suite {1} failed."; private static final String ERRORED_TEST = "FlexUnit test {0} in suite {1} had errors."; private static final String IGNORED_TEST = "FlexUnit test {0} in suite {1} was ignored."; private static final String TEST_INFO = "Suite: {0}\nTests run: {1}, Failures: {2}, Errors: {3}, Skipped: {4}, Time elapsed: {5} sec"; private static final String ERROR_SAVING_REPORT = "Error saving report."; // XML attribute labels private static final String CLASSNAME_ATTRIBUTE = "classname"; private static final String NAME_ATTRIBUTE = "name"; private static final String STATUS_ATTRIBUTE = "status"; private static final String TIME_ATTRIBUTE = "time"; protected Suite suite; private Document document; private List<String> recordedRuns; public Report(Suite suite) { this.recordedRuns = new ArrayList<String>(); this.suite = suite; // Create a new XML document document = DocumentHelper.createDocument(); // Add the test suite attributes to the document document .addElement(TEST_SUITE) .addAttribute(NAME_ATTRIBUTE_LABEL, suite.getName()) .addAttribute(TESTS_ATTRIBUTE_LABEL, String.valueOf(suite.getTests())) .addAttribute(FAILURE_ATTRIBUTE_LABEL, String.valueOf(suite.getFailures())) .addAttribute(ERROR_ATTRIBUTE_LABEL, String.valueOf(suite.getErrors())) .addAttribute(IGNORE_ATTRIBUTE_LABEL, String.valueOf(suite.getSkips())) .addAttribute(TIME_ATTRIBUTE_LABEL, String.valueOf(suite.getTime())); } /** * Adds the test to the suite report given an XML test document */ public void addTest(Document test) { Element root = test.getRootElement(); // Add to the number of tests in this suite if not seen and not null String testMethod = root.attributeValue(NAME_ATTRIBUTE); if(!recordedRuns.contains(testMethod) && !testMethod.equals("null")) { recordedRuns.add(testMethod); suite.addTest(); } //add test time to total time long time = Long.parseLong(root.attributeValue(TIME_ATTRIBUTE)); suite.addTime(time); root.attribute(TIME_ATTRIBUTE).setText(formatTime(time)); //If the test method name is null, then make it the classname if(root.attributeValue(NAME_ATTRIBUTE).equals("null")) { root.attribute(NAME_ATTRIBUTE).setText(root.attributeValue(CLASSNAME_ATTRIBUTE)); } // Add the test to the report document document.getRootElement().add(root); // Check for special status adjustments to make to suite checkForStatus(test); //remove status attribute since it's only used by the report root.remove(root.attribute(STATUS_ATTRIBUTE)); } private String formatTime(long time) { return String.format("%.3f", new Double(time / 1000.0000)); } /** * Updates counts for failed, error, and ignore on suite as well as logs what * failed if told to use logging. * * @param test * Test XML document */ private void checkForStatus(Document test) { // Get the root element and pull the test name and status final Element root = test.getRootElement(); final String name = root.attributeValue(NAME_ATTRIBUTE); final String status = root.attributeValue(STATUS_ATTRIBUTE); String format = null; if (status.equals(FAILURE)) { format = FAILED_TEST; suite.addFailure(); } else if (status.equals(ERROR)) { format = ERRORED_TEST; suite.addError(); } else if (status.equals(IGNORE)) { format = IGNORED_TEST; suite.addSkip(); } // Creates the fail message for use with verbose if (format != null) { final String message = MessageFormat.format(format, new Object[] { name, suite }); LoggingUtil.log(message); } } /** * Determines if any failures (errors or failures) have occurred in this * report. */ public boolean hasFailures() { return (suite.getErrors() > 0 || suite.getFailures() > 0); } /** * Write the report XML document out to file * * @param reportDir * Directory to hold report file. */ public void save(File reportDir) throws BuildException { try { // Open the file matching the parameter suite final File file = new File(reportDir, FILENAME_PREFIX + suite + FILENAME_EXTENSION); // Retrieve the root element and adjust the failures and test attributes Element root = document.getRootElement(); root.addAttribute(FAILURE_ATTRIBUTE_LABEL, String.valueOf(suite.getFailures())); root.addAttribute(ERROR_ATTRIBUTE_LABEL, String.valueOf(suite.getErrors())); root.addAttribute(TESTS_ATTRIBUTE_LABEL, String.valueOf(suite.getTests())); root.addAttribute(IGNORE_ATTRIBUTE_LABEL, String.valueOf(suite.getSkips())); root.addAttribute(TIME_ATTRIBUTE_LABEL, String.valueOf(formatTime(suite.getTime()))); root.addAttribute(HOSTNAME_ATTRIBUTE_LABEL, getHostname()); final String timestamp = DateUtils.format(new Date(), DateUtils.ISO8601_DATETIME_PATTERN); root.addAttribute(TIMESTAMP_ATTRIBUTE_LABEL, timestamp); // Write the updated suite final OutputFormat format = OutputFormat.createPrettyPrint(); final XMLWriter writer = new XMLWriter(new FileOutputStream(file), format); writer.write(document); writer.close(); } catch (Exception e) { throw new BuildException(ERROR_SAVING_REPORT, e); } } private String getHostname() { try { return InetAddress.getLocalHost().getHostName(); } catch (UnknownHostException e) { return "localhost"; } } public String getSummary() { String summary = ""; try { summary = MessageFormat.format(TEST_INFO, new Object[] { new String(suite.getName()), new Integer(suite.getTests()), new Integer(suite.getFailures()), new Integer(suite.getErrors()), new Integer(suite.getSkips()), formatTime(suite.getTime()) }); } catch (Exception e) { // ignore } return summary; } }
package io.kvh.media.sound; import android.util.Log; import io.kvh.media.KCacheUtils; import java.io.BufferedOutputStream; import java.io.DataOutputStream; import java.io.File; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.io.OutputStream; import java.util.Collections; import java.util.LinkedList; import java.util.List; import java.util.UUID; /** * Created by changbinhe on 14/11/22. */ public class Filer implements Runnable, Supporter.AmrConsumer, Supporter.OnOffSwitcher { private static final String TAG = "Filer"; private static final boolean DEBUG = false; final private static byte[] header = new byte[]{0x23, 0x21, 0x41, 0x4D, 0x52, 0x0A}; private List<byte[]> amrFrames; private DataOutputStream sliceStream; private File sliceFile; private Supporter.FileConsumer fileConsumer; private boolean isRunning; private Thread runningThread; final private Object waitingObject; public Filer() { amrFrames = Collections.synchronizedList(new LinkedList<byte[]>()); waitingObject = new Object(); } @Override public void onAmrFeed(byte[] buffer, int length) { byte[] tempData = new byte[length]; System.arraycopy(buffer, 0, tempData, 0, length); amrFrames.add(tempData); if (DEBUG) Log.i(TAG, "add one amr frame, try to notify"); synchronized (waitingObject) { waitingObject.notify(); } } public void nextSlice() { if (DEBUG) Log.i(TAG, "next slice"); makeSlice(); } synchronized private void makeSlice() { //flush if (sliceStream != null) { try { sliceStream.flush(); //send to file consumer fileConsumer.onFileFeed(sliceFile); } catch (IOException e) { e.printStackTrace(); } } File folder = new File(KCacheUtils.getCacheDirectory() + "/record"); if (!folder.exists()) { folder.mkdirs(); } File file = new File(folder.getAbsolutePath(), UUID.randomUUID().toString() + ".amr"); if (file.exists()) file.delete(); try { file.createNewFile(); Log.i(TAG, "new slice file at:" + file.getAbsolutePath()); } catch (IOException e) { e.printStackTrace(); } DataOutputStream dos = null; try { OutputStream os = new FileOutputStream(file); BufferedOutputStream bos = new BufferedOutputStream(os); dos = new DataOutputStream(bos); } catch (FileNotFoundException e) { e.printStackTrace(); } if (file.exists() && dos != null) { sliceFile = file; sliceStream = dos; try { sliceStream.write(header); } catch (IOException e) { e.printStackTrace(); } if (DEBUG) Log.i(TAG, "file create succeed, try to notify"); synchronized (waitingObject) { waitingObject.notify(); } return; } Log.i(TAG, "file create failed"); } @Override public void start() { if (DEBUG) Log.i(TAG, "try to start"); if (isRunning) { Log.i(TAG, "already started"); return; } if (DEBUG) Log.i(TAG, "start succeed"); isRunning = true; if (sliceStream == null) makeSlice(); //start runningThread = new Thread(this); runningThread.start(); } @Override public void stop() { if (DEBUG) Log.i(TAG, "stop, clean up"); if (!isRunning) { Log.i(TAG, "not running"); return; } isRunning = false; runningThread.interrupt(); runningThread = null; //finish all writing if (sliceStream != null) { while (amrFrames.size() > 0) { byte[] buffer = amrFrames.remove(0); try { // if (DEBUG) Log.i(TAG, "clean up write"); sliceStream.write(buffer, 0, buffer.length); } catch (IOException e) { e.printStackTrace(); } } try { sliceStream.flush(); } catch (IOException e) { e.printStackTrace(); } } sliceStream = null; sliceFile = null; } @Override public void run() { while (isRunning) { synchronized (waitingObject) { if (amrFrames.size() == 0 || sliceStream == null) { if (DEBUG) Log.i(TAG, "waiting :" + amrFrames.size() + sliceStream); try { waitingObject.wait(); } catch (InterruptedException e) { e.printStackTrace(); } } else { //try to write byte[] buffer = amrFrames.get(0); try { if (DEBUG) Log.i(TAG, "writing :" + buffer.length); sliceStream.write(buffer, 0, buffer.length); amrFrames.remove(0); } catch (IOException e) { e.printStackTrace(); } } } } } public void setFileConsumer(Supporter.FileConsumer fileConsumer) { this.fileConsumer = fileConsumer; } }
// Copyright (c) 2008 The Board of Trustees of The Leland Stanford Junior University // Copyright (c) 2011, 2012 Open Networking Foundation // Copyright (c) 2012, 2013 Big Switch Networks, Inc. // This library was generated by the LoxiGen Compiler. // See the file LICENSE.txt which should have been included in the source distribution // Automatically generated by LOXI from template of_class.java // Do not modify package org.projectfloodlight.openflow.protocol.ver13; import org.projectfloodlight.openflow.protocol.*; import org.projectfloodlight.openflow.protocol.action.*; import org.projectfloodlight.openflow.protocol.actionid.*; import org.projectfloodlight.openflow.protocol.bsntlv.*; import org.projectfloodlight.openflow.protocol.errormsg.*; import org.projectfloodlight.openflow.protocol.meterband.*; import org.projectfloodlight.openflow.protocol.instruction.*; import org.projectfloodlight.openflow.protocol.instructionid.*; import org.projectfloodlight.openflow.protocol.match.*; import org.projectfloodlight.openflow.protocol.oxm.*; import org.projectfloodlight.openflow.protocol.queueprop.*; import org.projectfloodlight.openflow.types.*; import org.projectfloodlight.openflow.util.*; import org.projectfloodlight.openflow.exceptions.*; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.Set; import com.google.common.collect.ImmutableSet; import java.util.List; import com.google.common.collect.ImmutableList; import org.jboss.netty.buffer.ChannelBuffer; import com.google.common.hash.PrimitiveSink; import com.google.common.hash.Funnel; class OFBsnPortCounterStatsReplyVer13 implements OFBsnPortCounterStatsReply { private static final Logger logger = LoggerFactory.getLogger(OFBsnPortCounterStatsReplyVer13.class); // version: 1.3 final static byte WIRE_VERSION = 4; final static int MINIMUM_LENGTH = 24; private final static long DEFAULT_XID = 0x0L; private final static Set<OFStatsReplyFlags> DEFAULT_FLAGS = ImmutableSet.<OFStatsReplyFlags>of(); private final static List<OFBsnPortCounterStatsEntry> DEFAULT_ENTRIES = ImmutableList.<OFBsnPortCounterStatsEntry>of(); // OF message fields private final long xid; private final Set<OFStatsReplyFlags> flags; private final List<OFBsnPortCounterStatsEntry> entries; // // Immutable default instance final static OFBsnPortCounterStatsReplyVer13 DEFAULT = new OFBsnPortCounterStatsReplyVer13( DEFAULT_XID, DEFAULT_FLAGS, DEFAULT_ENTRIES ); // package private constructor - used by readers, builders, and factory OFBsnPortCounterStatsReplyVer13(long xid, Set<OFStatsReplyFlags> flags, List<OFBsnPortCounterStatsEntry> entries) { if(flags == null) { throw new NullPointerException("OFBsnPortCounterStatsReplyVer13: property flags cannot be null"); } if(entries == null) { throw new NullPointerException("OFBsnPortCounterStatsReplyVer13: property entries cannot be null"); } this.xid = xid; this.flags = flags; this.entries = entries; } // Accessors for OF message fields @Override public OFVersion getVersion() { return OFVersion.OF_13; } @Override public OFType getType() { return OFType.STATS_REPLY; } @Override public long getXid() { return xid; } @Override public OFStatsType getStatsType() { return OFStatsType.EXPERIMENTER; } @Override public Set<OFStatsReplyFlags> getFlags() { return flags; } @Override public long getExperimenter() { return 0x5c16c7L; } @Override public long getSubtype() { return 0x8L; } @Override public List<OFBsnPortCounterStatsEntry> getEntries() { return entries; } public OFBsnPortCounterStatsReply.Builder createBuilder() { return new BuilderWithParent(this); } static class BuilderWithParent implements OFBsnPortCounterStatsReply.Builder { final OFBsnPortCounterStatsReplyVer13 parentMessage; // OF message fields private boolean xidSet; private long xid; private boolean flagsSet; private Set<OFStatsReplyFlags> flags; private boolean entriesSet; private List<OFBsnPortCounterStatsEntry> entries; BuilderWithParent(OFBsnPortCounterStatsReplyVer13 parentMessage) { this.parentMessage = parentMessage; } @Override public OFVersion getVersion() { return OFVersion.OF_13; } @Override public OFType getType() { return OFType.STATS_REPLY; } @Override public long getXid() { return xid; } @Override public OFBsnPortCounterStatsReply.Builder setXid(long xid) { this.xid = xid; this.xidSet = true; return this; } @Override public OFStatsType getStatsType() { return OFStatsType.EXPERIMENTER; } @Override public Set<OFStatsReplyFlags> getFlags() { return flags; } @Override public OFBsnPortCounterStatsReply.Builder setFlags(Set<OFStatsReplyFlags> flags) { this.flags = flags; this.flagsSet = true; return this; } @Override public long getExperimenter() { return 0x5c16c7L; } @Override public long getSubtype() { return 0x8L; } @Override public List<OFBsnPortCounterStatsEntry> getEntries() { return entries; } @Override public OFBsnPortCounterStatsReply.Builder setEntries(List<OFBsnPortCounterStatsEntry> entries) { this.entries = entries; this.entriesSet = true; return this; } @Override public OFBsnPortCounterStatsReply build() { long xid = this.xidSet ? this.xid : parentMessage.xid; Set<OFStatsReplyFlags> flags = this.flagsSet ? this.flags : parentMessage.flags; if(flags == null) throw new NullPointerException("Property flags must not be null"); List<OFBsnPortCounterStatsEntry> entries = this.entriesSet ? this.entries : parentMessage.entries; if(entries == null) throw new NullPointerException("Property entries must not be null"); // return new OFBsnPortCounterStatsReplyVer13( xid, flags, entries ); } } static class Builder implements OFBsnPortCounterStatsReply.Builder { // OF message fields private boolean xidSet; private long xid; private boolean flagsSet; private Set<OFStatsReplyFlags> flags; private boolean entriesSet; private List<OFBsnPortCounterStatsEntry> entries; @Override public OFVersion getVersion() { return OFVersion.OF_13; } @Override public OFType getType() { return OFType.STATS_REPLY; } @Override public long getXid() { return xid; } @Override public OFBsnPortCounterStatsReply.Builder setXid(long xid) { this.xid = xid; this.xidSet = true; return this; } @Override public OFStatsType getStatsType() { return OFStatsType.EXPERIMENTER; } @Override public Set<OFStatsReplyFlags> getFlags() { return flags; } @Override public OFBsnPortCounterStatsReply.Builder setFlags(Set<OFStatsReplyFlags> flags) { this.flags = flags; this.flagsSet = true; return this; } @Override public long getExperimenter() { return 0x5c16c7L; } @Override public long getSubtype() { return 0x8L; } @Override public List<OFBsnPortCounterStatsEntry> getEntries() { return entries; } @Override public OFBsnPortCounterStatsReply.Builder setEntries(List<OFBsnPortCounterStatsEntry> entries) { this.entries = entries; this.entriesSet = true; return this; } // @Override public OFBsnPortCounterStatsReply build() { long xid = this.xidSet ? this.xid : DEFAULT_XID; Set<OFStatsReplyFlags> flags = this.flagsSet ? this.flags : DEFAULT_FLAGS; if(flags == null) throw new NullPointerException("Property flags must not be null"); List<OFBsnPortCounterStatsEntry> entries = this.entriesSet ? this.entries : DEFAULT_ENTRIES; if(entries == null) throw new NullPointerException("Property entries must not be null"); return new OFBsnPortCounterStatsReplyVer13( xid, flags, entries ); } } final static Reader READER = new Reader(); static class Reader implements OFMessageReader<OFBsnPortCounterStatsReply> { @Override public OFBsnPortCounterStatsReply readFrom(ChannelBuffer bb) throws OFParseError { int start = bb.readerIndex(); // fixed value property version == 4 byte version = bb.readByte(); if(version != (byte) 0x4) throw new OFParseError("Wrong version: Expected=OFVersion.OF_13(4), got="+version); // fixed value property type == 19 byte type = bb.readByte(); if(type != (byte) 0x13) throw new OFParseError("Wrong type: Expected=OFType.STATS_REPLY(19), got="+type); int length = U16.f(bb.readShort()); if(length < MINIMUM_LENGTH) throw new OFParseError("Wrong length: Expected to be >= " + MINIMUM_LENGTH + ", was: " + length); if(bb.readableBytes() + (bb.readerIndex() - start) < length) { // Buffer does not have all data yet bb.readerIndex(start); return null; } if(logger.isTraceEnabled()) logger.trace("readFrom - length={}", length); long xid = U32.f(bb.readInt()); // fixed value property statsType == 65535 short statsType = bb.readShort(); if(statsType != (short) 0xffff) throw new OFParseError("Wrong statsType: Expected=OFStatsType.EXPERIMENTER(65535), got="+statsType); Set<OFStatsReplyFlags> flags = OFStatsReplyFlagsSerializerVer13.readFrom(bb); // pad: 4 bytes bb.skipBytes(4); // fixed value property experimenter == 0x5c16c7L int experimenter = bb.readInt(); if(experimenter != 0x5c16c7) throw new OFParseError("Wrong experimenter: Expected=0x5c16c7L(0x5c16c7L), got="+experimenter); // fixed value property subtype == 0x8L int subtype = bb.readInt(); if(subtype != 0x8) throw new OFParseError("Wrong subtype: Expected=0x8L(0x8L), got="+subtype); List<OFBsnPortCounterStatsEntry> entries = ChannelUtils.readList(bb, length - (bb.readerIndex() - start), OFBsnPortCounterStatsEntryVer13.READER); OFBsnPortCounterStatsReplyVer13 bsnPortCounterStatsReplyVer13 = new OFBsnPortCounterStatsReplyVer13( xid, flags, entries ); if(logger.isTraceEnabled()) logger.trace("readFrom - read={}", bsnPortCounterStatsReplyVer13); return bsnPortCounterStatsReplyVer13; } } public void putTo(PrimitiveSink sink) { FUNNEL.funnel(this, sink); } final static OFBsnPortCounterStatsReplyVer13Funnel FUNNEL = new OFBsnPortCounterStatsReplyVer13Funnel(); static class OFBsnPortCounterStatsReplyVer13Funnel implements Funnel<OFBsnPortCounterStatsReplyVer13> { private static final long serialVersionUID = 1L; @Override public void funnel(OFBsnPortCounterStatsReplyVer13 message, PrimitiveSink sink) { // fixed value property version = 4 sink.putByte((byte) 0x4); // fixed value property type = 19 sink.putByte((byte) 0x13); // FIXME: skip funnel of length sink.putLong(message.xid); // fixed value property statsType = 65535 sink.putShort((short) 0xffff); OFStatsReplyFlagsSerializerVer13.putTo(message.flags, sink); // skip pad (4 bytes) // fixed value property experimenter = 0x5c16c7L sink.putInt(0x5c16c7); // fixed value property subtype = 0x8L sink.putInt(0x8); FunnelUtils.putList(message.entries, sink); } } public void writeTo(ChannelBuffer bb) { WRITER.write(bb, this); } final static Writer WRITER = new Writer(); static class Writer implements OFMessageWriter<OFBsnPortCounterStatsReplyVer13> { @Override public void write(ChannelBuffer bb, OFBsnPortCounterStatsReplyVer13 message) { int startIndex = bb.writerIndex(); // fixed value property version = 4 bb.writeByte((byte) 0x4); // fixed value property type = 19 bb.writeByte((byte) 0x13); // length is length of variable message, will be updated at the end int lengthIndex = bb.writerIndex(); bb.writeShort(U16.t(0)); bb.writeInt(U32.t(message.xid)); // fixed value property statsType = 65535 bb.writeShort((short) 0xffff); OFStatsReplyFlagsSerializerVer13.writeTo(bb, message.flags); // pad: 4 bytes bb.writeZero(4); // fixed value property experimenter = 0x5c16c7L bb.writeInt(0x5c16c7); // fixed value property subtype = 0x8L bb.writeInt(0x8); ChannelUtils.writeList(bb, message.entries); // update length field int length = bb.writerIndex() - startIndex; bb.setShort(lengthIndex, length); } } @Override public String toString() { StringBuilder b = new StringBuilder("OFBsnPortCounterStatsReplyVer13("); b.append("xid=").append(xid); b.append(", "); b.append("flags=").append(flags); b.append(", "); b.append("entries=").append(entries); b.append(")"); return b.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; OFBsnPortCounterStatsReplyVer13 other = (OFBsnPortCounterStatsReplyVer13) obj; if( xid != other.xid) return false; if (flags == null) { if (other.flags != null) return false; } else if (!flags.equals(other.flags)) return false; if (entries == null) { if (other.entries != null) return false; } else if (!entries.equals(other.entries)) return false; return true; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * (int) (xid ^ (xid >>> 32)); result = prime * result + ((flags == null) ? 0 : flags.hashCode()); result = prime * result + ((entries == null) ? 0 : entries.hashCode()); return result; } }
/* * Copyright 2012-present Facebook, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may obtain * a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.facebook.buck.rules; import com.facebook.buck.android.AndroidAarDescription; import com.facebook.buck.android.AndroidBinaryDescription; import com.facebook.buck.android.AndroidBuckConfig; import com.facebook.buck.android.AndroidBuildConfigDescription; import com.facebook.buck.android.AndroidDirectoryResolver; import com.facebook.buck.android.AndroidInstrumentationApkDescription; import com.facebook.buck.android.AndroidInstrumentationTestDescription; import com.facebook.buck.android.AndroidLibraryDescription; import com.facebook.buck.android.AndroidManifestDescription; import com.facebook.buck.android.AndroidPrebuiltAarDescription; import com.facebook.buck.android.AndroidResourceDescription; import com.facebook.buck.android.ApkGenruleDescription; import com.facebook.buck.android.GenAidlDescription; import com.facebook.buck.android.NdkCxxPlatform; import com.facebook.buck.android.NdkCxxPlatformCompiler; import com.facebook.buck.android.NdkCxxPlatforms; import com.facebook.buck.android.NdkLibraryDescription; import com.facebook.buck.android.PrebuiltNativeLibraryDescription; import com.facebook.buck.android.ProGuardConfig; import com.facebook.buck.android.RobolectricTestDescription; import com.facebook.buck.android.SmartDexingStep; import com.facebook.buck.apple.AppleAssetCatalogDescription; import com.facebook.buck.apple.AppleBinaryDescription; import com.facebook.buck.apple.AppleBundleDescription; import com.facebook.buck.apple.AppleConfig; import com.facebook.buck.apple.AppleCxxPlatform; import com.facebook.buck.apple.AppleCxxPlatforms; import com.facebook.buck.apple.AppleLibraryDescription; import com.facebook.buck.apple.ApplePackageDescription; import com.facebook.buck.apple.AppleResourceDescription; import com.facebook.buck.apple.AppleSdk; import com.facebook.buck.apple.AppleSdkDiscovery; import com.facebook.buck.apple.AppleSdkPaths; import com.facebook.buck.apple.AppleTestDescription; import com.facebook.buck.apple.AppleToolchain; import com.facebook.buck.apple.AppleToolchainDiscovery; import com.facebook.buck.apple.CodeSignIdentityStore; import com.facebook.buck.apple.CoreDataModelDescription; import com.facebook.buck.apple.ProvisioningProfileStore; import com.facebook.buck.apple.XcodePostbuildScriptDescription; import com.facebook.buck.apple.XcodePrebuildScriptDescription; import com.facebook.buck.apple.XcodeWorkspaceConfigDescription; import com.facebook.buck.cli.BuckConfig; import com.facebook.buck.cli.DownloadConfig; import com.facebook.buck.cxx.CxxBinaryDescription; import com.facebook.buck.cxx.CxxBuckConfig; import com.facebook.buck.cxx.CxxLibraryDescription; import com.facebook.buck.cxx.CxxPlatform; import com.facebook.buck.cxx.CxxPlatforms; import com.facebook.buck.cxx.CxxTestDescription; import com.facebook.buck.cxx.DefaultCxxPlatforms; import com.facebook.buck.cxx.InferBuckConfig; import com.facebook.buck.cxx.PrebuiltCxxLibraryDescription; import com.facebook.buck.d.DBinaryDescription; import com.facebook.buck.d.DBuckConfig; import com.facebook.buck.d.DLibraryDescription; import com.facebook.buck.d.DTestDescription; import com.facebook.buck.dotnet.CSharpLibraryDescription; import com.facebook.buck.dotnet.PrebuiltDotNetLibraryDescription; import com.facebook.buck.file.Downloader; import com.facebook.buck.file.ExplodingDownloader; import com.facebook.buck.file.RemoteFileDescription; import com.facebook.buck.file.StackedDownloader; import com.facebook.buck.go.GoBinaryDescription; import com.facebook.buck.go.GoBuckConfig; import com.facebook.buck.go.GoLibraryDescription; import com.facebook.buck.go.GoTestDescription; import com.facebook.buck.gwt.GwtBinaryDescription; import com.facebook.buck.halide.HalideBuckConfig; import com.facebook.buck.halide.HalideLibraryDescription; import com.facebook.buck.haskell.HaskellBinaryDescription; import com.facebook.buck.haskell.HaskellBuckConfig; import com.facebook.buck.haskell.HaskellLibraryDescription; import com.facebook.buck.haskell.PrebuiltHaskellLibraryDescription; import com.facebook.buck.io.ExecutableFinder; import com.facebook.buck.io.ProjectFilesystem; import com.facebook.buck.js.AndroidReactNativeLibraryDescription; import com.facebook.buck.js.IosReactNativeLibraryDescription; import com.facebook.buck.js.ReactNativeBuckConfig; import com.facebook.buck.jvm.groovy.GroovyBuckConfig; import com.facebook.buck.jvm.groovy.GroovyLibraryDescription; import com.facebook.buck.jvm.groovy.GroovyTestDescription; import com.facebook.buck.jvm.java.JavaBinaryDescription; import com.facebook.buck.jvm.java.JavaBuckConfig; import com.facebook.buck.jvm.java.JavaLibraryDescription; import com.facebook.buck.jvm.java.JavaOptions; import com.facebook.buck.jvm.java.JavaTestDescription; import com.facebook.buck.jvm.java.JavacOptions; import com.facebook.buck.jvm.java.KeystoreDescription; import com.facebook.buck.jvm.java.PrebuiltJarDescription; import com.facebook.buck.jvm.scala.ScalaBuckConfig; import com.facebook.buck.jvm.scala.ScalaLibraryDescription; import com.facebook.buck.jvm.scala.ScalaTestDescription; import com.facebook.buck.log.CommandThreadFactory; import com.facebook.buck.log.Logger; import com.facebook.buck.lua.CxxLuaExtensionDescription; import com.facebook.buck.lua.LuaBinaryDescription; import com.facebook.buck.lua.LuaBuckConfig; import com.facebook.buck.lua.LuaConfig; import com.facebook.buck.lua.LuaLibraryDescription; import com.facebook.buck.model.Flavor; import com.facebook.buck.model.FlavorDomain; import com.facebook.buck.model.ImmutableFlavor; import com.facebook.buck.ocaml.OCamlBinaryDescription; import com.facebook.buck.ocaml.OCamlBuckConfig; import com.facebook.buck.ocaml.OCamlLibraryDescription; import com.facebook.buck.ocaml.PrebuiltOCamlLibraryDescription; import com.facebook.buck.python.CxxPythonExtensionDescription; import com.facebook.buck.python.PrebuiltPythonLibraryDescription; import com.facebook.buck.python.PythonBinaryDescription; import com.facebook.buck.python.PythonBuckConfig; import com.facebook.buck.python.PythonLibraryDescription; import com.facebook.buck.python.PythonPlatform; import com.facebook.buck.python.PythonTestDescription; import com.facebook.buck.rust.RustBinaryDescription; import com.facebook.buck.rust.RustBuckConfig; import com.facebook.buck.rust.RustLibraryDescription; import com.facebook.buck.shell.ExportFileDescription; import com.facebook.buck.shell.GenruleDescription; import com.facebook.buck.shell.ShBinaryDescription; import com.facebook.buck.shell.ShTestDescription; import com.facebook.buck.swift.SwiftLibraryDescription; import com.facebook.buck.shell.WorkerToolDescription; import com.facebook.buck.thrift.ThriftBuckConfig; import com.facebook.buck.thrift.ThriftCxxEnhancer; import com.facebook.buck.thrift.ThriftJavaEnhancer; import com.facebook.buck.thrift.ThriftLibraryDescription; import com.facebook.buck.thrift.ThriftPythonEnhancer; import com.facebook.buck.util.HumanReadableException; import com.facebook.buck.util.ProcessExecutor; import com.facebook.buck.util.environment.Platform; import com.facebook.buck.zip.ZipDescription; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Optional; import com.google.common.base.Preconditions; import com.google.common.base.Supplier; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Maps; import com.google.common.util.concurrent.ListeningExecutorService; import com.google.common.util.concurrent.MoreExecutors; import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; import java.util.Map; import java.util.concurrent.Executors; import javax.annotation.Nullable; /** * A registry of all the build rules types understood by Buck. */ public class KnownBuildRuleTypes { private static final Logger LOG = Logger.get(KnownBuildRuleTypes.class); private final ImmutableMap<BuildRuleType, Description<?>> descriptions; private final ImmutableMap<String, BuildRuleType> types; private final FlavorDomain<CxxPlatform> cxxPlatforms; private final CxxPlatform defaultCxxPlatforms; private KnownBuildRuleTypes( Map<BuildRuleType, Description<?>> descriptions, Map<String, BuildRuleType> types, FlavorDomain<CxxPlatform> cxxPlatforms, CxxPlatform defaultCxxPlatforms) { this.descriptions = ImmutableMap.copyOf(descriptions); this.types = ImmutableMap.copyOf(types); this.cxxPlatforms = cxxPlatforms; this.defaultCxxPlatforms = defaultCxxPlatforms; } public BuildRuleType getBuildRuleType(String named) { BuildRuleType type = types.get(named); if (type == null) { throw new HumanReadableException("Unable to find build rule type: " + named); } return type; } public Description<?> getDescription(BuildRuleType buildRuleType) { Description<?> description = descriptions.get(buildRuleType); if (description == null) { throw new HumanReadableException( "Unable to find description for build rule type: " + buildRuleType); } return description; } public ImmutableSet<Description<?>> getAllDescriptions() { return ImmutableSet.copyOf(descriptions.values()); } public FlavorDomain<CxxPlatform> getCxxPlatforms() { return cxxPlatforms; } public CxxPlatform getDefaultCxxPlatforms() { return defaultCxxPlatforms; } public static Builder builder() { return new Builder(); } public static KnownBuildRuleTypes createInstance( BuckConfig config, ProcessExecutor processExecutor, AndroidDirectoryResolver androidDirectoryResolver, Optional<Path> testTempDirOverride) throws InterruptedException, IOException { return createBuilder( config, processExecutor, androidDirectoryResolver, testTempDirOverride).build(); } private static ImmutableList<AppleCxxPlatform> buildAppleCxxPlatforms( Supplier<Optional<Path>> appleDeveloperDirectorySupplier, ImmutableList<Path> extraToolchainPaths, ImmutableList<Path> extraPlatformPaths, BuckConfig buckConfig, AppleConfig appleConfig, ProcessExecutor processExecutor) throws IOException { Optional<Path> appleDeveloperDirectory = appleDeveloperDirectorySupplier.get(); if (appleDeveloperDirectory.isPresent() && !Files.isDirectory(appleDeveloperDirectory.get())) { LOG.error( "Developer directory is set to %s, but is not a directory", appleDeveloperDirectory.get()); return ImmutableList.of(); } ImmutableList.Builder<AppleCxxPlatform> appleCxxPlatformsBuilder = ImmutableList.builder(); ImmutableMap<String, AppleToolchain> toolchains = AppleToolchainDiscovery.discoverAppleToolchains( appleDeveloperDirectory, extraToolchainPaths); ImmutableMap<AppleSdk, AppleSdkPaths> sdkPaths = AppleSdkDiscovery.discoverAppleSdkPaths( appleDeveloperDirectory, extraPlatformPaths, toolchains); for (Map.Entry<AppleSdk, AppleSdkPaths> entry : sdkPaths.entrySet()) { AppleSdk sdk = entry.getKey(); AppleSdkPaths appleSdkPaths = entry.getValue(); String targetSdkVersion = appleConfig.getTargetSdkVersion( sdk.getApplePlatform()).or(sdk.getVersion()); LOG.debug("SDK %s using default version %s", sdk, targetSdkVersion); for (String architecture : sdk.getArchitectures()) { AppleCxxPlatform appleCxxPlatform = AppleCxxPlatforms.build( sdk, targetSdkVersion, architecture, appleSdkPaths, buckConfig, appleConfig, Optional.of(processExecutor)); appleCxxPlatformsBuilder.add(appleCxxPlatform); } } return appleCxxPlatformsBuilder.build(); } @VisibleForTesting static CxxPlatform getHostCxxPlatformFromConfig( CxxBuckConfig cxxBuckConfig, ImmutableMap<Flavor, CxxPlatform> cxxPlatforms, CxxPlatform defaultCxxPlatform) { Optional<String> hostCxxPlatform = cxxBuckConfig.getHostPlatform(); if (hostCxxPlatform.isPresent()) { ImmutableFlavor hostFlavor = ImmutableFlavor.of(hostCxxPlatform.get()); if (cxxPlatforms.containsKey(hostFlavor)) { return CxxPlatforms.copyPlatformWithFlavorAndConfig( cxxPlatforms.get(hostFlavor), cxxBuckConfig, DefaultCxxPlatforms.FLAVOR); } } return defaultCxxPlatform; } @VisibleForTesting static Builder createBuilder( BuckConfig config, ProcessExecutor processExecutor, AndroidDirectoryResolver androidDirectoryResolver, Optional<Path> testTempDirOverride) throws InterruptedException, IOException { Platform platform = Platform.detect(); AndroidBuckConfig androidConfig = new AndroidBuckConfig(config, platform); Optional<String> ndkVersion = androidConfig.getNdkVersion(); // If a NDK version isn't specified, we've got to reach into the runtime environment to find // out which one we will end up using. if (!ndkVersion.isPresent()) { ndkVersion = androidDirectoryResolver.getNdkVersion(); } AppleConfig appleConfig = new AppleConfig(config); ImmutableList<AppleCxxPlatform> appleCxxPlatforms = buildAppleCxxPlatforms( appleConfig.getAppleDeveloperDirectorySupplier(processExecutor), appleConfig.getExtraToolchainPaths(), appleConfig.getExtraPlatformPaths(), config, appleConfig, processExecutor); FlavorDomain<AppleCxxPlatform> platformFlavorsToAppleCxxPlatforms = FlavorDomain.from("Apple C++ Platform", appleCxxPlatforms); // Setup the NDK C/C++ platforms. Optional<Path> ndkRoot = androidDirectoryResolver.findAndroidNdkDir(); ImmutableMap.Builder<NdkCxxPlatforms.TargetCpuType, NdkCxxPlatform> ndkCxxPlatformsBuilder = ImmutableMap.builder(); if (ndkRoot.isPresent()) { NdkCxxPlatformCompiler.Type compilerType = androidConfig.getNdkCompiler().or(NdkCxxPlatforms.DEFAULT_COMPILER_TYPE); String gccVersion = androidConfig.getNdkGccVersion().or(NdkCxxPlatforms.DEFAULT_GCC_VERSION); NdkCxxPlatformCompiler compiler = NdkCxxPlatformCompiler.builder() .setType(compilerType) .setVersion( compilerType == NdkCxxPlatformCompiler.Type.GCC ? gccVersion : androidConfig.getNdkClangVersion().or(NdkCxxPlatforms.DEFAULT_CLANG_VERSION)) .setGccVersion(gccVersion) .build(); ndkCxxPlatformsBuilder.putAll( NdkCxxPlatforms.getPlatforms( new ProjectFilesystem(ndkRoot.get()), compiler, androidConfig.getNdkCxxRuntime().or(NdkCxxPlatforms.DEFAULT_CXX_RUNTIME), androidConfig.getNdkAppPlatform().or(NdkCxxPlatforms.DEFAULT_TARGET_APP_PLATFORM), androidConfig.getNdkCpuAbis().or(NdkCxxPlatforms.DEFAULT_CPU_ABIS), platform)); } ImmutableMap<NdkCxxPlatforms.TargetCpuType, NdkCxxPlatform> ndkCxxPlatforms = ndkCxxPlatformsBuilder.build(); // Construct the C/C++ config wrapping the buck config. CxxBuckConfig cxxBuckConfig = new CxxBuckConfig(config); ImmutableMap.Builder<Flavor, CxxPlatform> cxxPlatformsBuilder = ImmutableMap.builder(); // If an Android NDK is present, add platforms for that. This is mostly useful for // testing our Android NDK support for right now. for (NdkCxxPlatform ndkCxxPlatform : ndkCxxPlatforms.values()) { cxxPlatformsBuilder.put( ndkCxxPlatform.getCxxPlatform().getFlavor(), ndkCxxPlatform.getCxxPlatform()); } for (AppleCxxPlatform appleCxxPlatform : platformFlavorsToAppleCxxPlatforms.getValues()) { cxxPlatformsBuilder.put( appleCxxPlatform.getCxxPlatform().getFlavor(), appleCxxPlatform.getCxxPlatform()); } // Add the host's own C/C++ platform. CxxPlatform systemDefaultCxxPlatform = getHostCxxPlatformFromConfig( cxxBuckConfig, cxxPlatformsBuilder.build(), DefaultCxxPlatforms.build(platform, cxxBuckConfig)); cxxPlatformsBuilder.put(systemDefaultCxxPlatform.getFlavor(), systemDefaultCxxPlatform); ImmutableMap<Flavor, CxxPlatform> cxxPlatformsMap = cxxPlatformsBuilder.build(); // Get the default platform from config. CxxPlatform defaultCxxPlatform = CxxPlatforms.getConfigDefaultCxxPlatform( cxxBuckConfig, cxxPlatformsMap, systemDefaultCxxPlatform); // Add platforms for each cxx flavor obtained from the buck config files // from sections of the form cxx#{flavor name} ImmutableSet<Flavor> cxxFlavors = CxxBuckConfig.getCxxFlavors(config); for (Flavor flavor: cxxFlavors) { CxxBuckConfig flavoredCxxBuckConfig = new CxxBuckConfig(config, flavor); CxxPlatform defaultPlatformForFlavor = CxxPlatforms.getConfigDefaultCxxPlatform( flavoredCxxBuckConfig, cxxPlatformsMap, systemDefaultCxxPlatform); cxxPlatformsBuilder.put(flavor, CxxPlatforms.copyPlatformWithFlavorAndConfig( defaultPlatformForFlavor, flavoredCxxBuckConfig, flavor)); } cxxPlatformsMap = cxxPlatformsBuilder.build(); ExecutableFinder executableFinder = new ExecutableFinder(); // Build up the final list of C/C++ platforms. FlavorDomain<CxxPlatform> cxxPlatforms = new FlavorDomain<>( "C/C++ platform", cxxPlatformsMap); DBuckConfig dBuckConfig = new DBuckConfig(config); ReactNativeBuckConfig reactNativeBuckConfig = new ReactNativeBuckConfig(config); RustBuckConfig rustBuckConfig = new RustBuckConfig(config); GoBuckConfig goBuckConfig = new GoBuckConfig(config, processExecutor, cxxPlatforms); HalideBuckConfig halideBuckConfig = new HalideBuckConfig(config); ProGuardConfig proGuardConfig = new ProGuardConfig(config); PythonBuckConfig pyConfig = new PythonBuckConfig(config, executableFinder); ImmutableList<PythonPlatform> pythonPlatformsList = pyConfig.getPythonPlatforms(processExecutor); FlavorDomain<PythonPlatform> pythonPlatforms = FlavorDomain.from("Python Platform", pythonPlatformsList); PythonBinaryDescription pythonBinaryDescription = new PythonBinaryDescription( pyConfig, pythonPlatforms, cxxBuckConfig, defaultCxxPlatform, cxxPlatforms); // Look up the timeout to apply to entire test rules. Optional<Long> defaultTestRuleTimeoutMs = config.getLong("test", "rule_timeout"); // Prepare the downloader if we're allowing mid-build downloads Downloader downloader; DownloadConfig downloadConfig = new DownloadConfig(config); if (downloadConfig.isDownloadAtRuntimeOk()) { downloader = StackedDownloader.createFromConfig( config, androidDirectoryResolver.findAndroidSdkDirSafe()); } else { // Or just set one that blows up downloader = new ExplodingDownloader(); } Builder builder = builder(); JavaBuckConfig javaConfig = new JavaBuckConfig(config); JavacOptions defaultJavacOptions = javaConfig.getDefaultJavacOptions(); JavaOptions defaultJavaOptions = javaConfig.getDefaultJavaOptions(); ScalaBuckConfig scalaConfig = new ScalaBuckConfig(config); InferBuckConfig inferBuckConfig = new InferBuckConfig(config); LuaConfig luaConfig = new LuaBuckConfig(config, executableFinder); CxxBinaryDescription cxxBinaryDescription = new CxxBinaryDescription( cxxBuckConfig, inferBuckConfig, defaultCxxPlatform, cxxPlatforms); CxxLibraryDescription cxxLibraryDescription = new CxxLibraryDescription( cxxBuckConfig, defaultCxxPlatform, inferBuckConfig, cxxPlatforms); CodeSignIdentityStore codeSignIdentityStore = CodeSignIdentityStore.fromSystem(processExecutor); ProvisioningProfileStore provisioningProfileStore = ProvisioningProfileStore.fromSearchPath(appleConfig.getProvisioningProfileSearchPath()); AppleLibraryDescription appleLibraryDescription = new AppleLibraryDescription( cxxLibraryDescription, platformFlavorsToAppleCxxPlatforms, defaultCxxPlatform, codeSignIdentityStore, provisioningProfileStore, appleConfig.getDefaultDebugInfoFormat()); builder.register(appleLibraryDescription); AppleBinaryDescription appleBinaryDescription = new AppleBinaryDescription( cxxBinaryDescription, platformFlavorsToAppleCxxPlatforms, codeSignIdentityStore, provisioningProfileStore, appleConfig.getDefaultDebugInfoFormat()); builder.register(appleBinaryDescription); SwiftLibraryDescription swiftLibraryDescription = new SwiftLibraryDescription( cxxPlatforms, platformFlavorsToAppleCxxPlatforms, defaultCxxPlatform); builder.register(swiftLibraryDescription); HaskellBuckConfig haskellBuckConfig = new HaskellBuckConfig(config, executableFinder); builder.register(new HaskellLibraryDescription(haskellBuckConfig, cxxBuckConfig, cxxPlatforms)); builder.register( new HaskellBinaryDescription(haskellBuckConfig, cxxPlatforms, defaultCxxPlatform)); builder.register(new PrebuiltHaskellLibraryDescription()); // Create an executor service exclusively for the smart dexing step. ListeningExecutorService dxExecutorService = MoreExecutors.listeningDecorator( Executors.newFixedThreadPool( SmartDexingStep.determineOptimalThreadCount(), new CommandThreadFactory("SmartDexing"))); builder.register( new AndroidAarDescription( new AndroidManifestDescription(), cxxBuckConfig, ndkCxxPlatforms)); builder.register( new AndroidBinaryDescription( defaultJavaOptions, defaultJavacOptions, proGuardConfig, ndkCxxPlatforms, dxExecutorService, cxxBuckConfig)); builder.register(new AndroidBuildConfigDescription(defaultJavacOptions)); builder.register( new AndroidInstrumentationApkDescription( proGuardConfig, defaultJavacOptions, ndkCxxPlatforms, dxExecutorService, cxxBuckConfig)); builder.register(new AndroidInstrumentationTestDescription( defaultJavaOptions, defaultTestRuleTimeoutMs)); builder.register(new AndroidLibraryDescription(defaultJavacOptions)); builder.register(new AndroidManifestDescription()); builder.register(new AndroidPrebuiltAarDescription(defaultJavacOptions)); builder.register(new AndroidReactNativeLibraryDescription(reactNativeBuckConfig)); builder.register(new AndroidResourceDescription()); builder.register(new ApkGenruleDescription()); builder.register(new AppleAssetCatalogDescription()); builder.register(new ApplePackageDescription(appleConfig, platformFlavorsToAppleCxxPlatforms)); AppleBundleDescription appleBundleDescription = new AppleBundleDescription( appleBinaryDescription, appleLibraryDescription, cxxPlatforms, platformFlavorsToAppleCxxPlatforms, defaultCxxPlatform, codeSignIdentityStore, provisioningProfileStore, appleConfig.getDefaultDebugInfoFormat()); builder.register(appleBundleDescription); builder.register(new AppleResourceDescription()); builder.register( new AppleTestDescription( appleConfig, appleLibraryDescription, cxxPlatforms, platformFlavorsToAppleCxxPlatforms, defaultCxxPlatform, codeSignIdentityStore, provisioningProfileStore, appleConfig.getAppleDeveloperDirectorySupplierForTests(processExecutor), appleConfig.getDefaultDebugInfoFormat())); builder.register(new CoreDataModelDescription()); builder.register(new CSharpLibraryDescription()); builder.register(cxxBinaryDescription); builder.register(cxxLibraryDescription); builder.register(new CxxLuaExtensionDescription(luaConfig, cxxBuckConfig, cxxPlatforms)); builder.register( new CxxPythonExtensionDescription(pythonPlatforms, cxxBuckConfig, cxxPlatforms)); builder.register( new CxxTestDescription( cxxBuckConfig, defaultCxxPlatform, cxxPlatforms, defaultTestRuleTimeoutMs)); builder.register(new DBinaryDescription(dBuckConfig, cxxBuckConfig, defaultCxxPlatform)); builder.register(new DLibraryDescription(dBuckConfig, cxxBuckConfig, defaultCxxPlatform)); builder.register( new DTestDescription( dBuckConfig, cxxBuckConfig, defaultCxxPlatform, defaultTestRuleTimeoutMs)); builder.register(new ExportFileDescription()); builder.register(new GenruleDescription()); builder.register(new GenAidlDescription()); builder.register(new GoBinaryDescription(goBuckConfig)); builder.register(new GoLibraryDescription(goBuckConfig)); builder.register( new GoTestDescription( goBuckConfig, defaultTestRuleTimeoutMs)); GroovyBuckConfig groovyBuckConfig = new GroovyBuckConfig(config); builder.register( new GroovyLibraryDescription( groovyBuckConfig, defaultJavacOptions)); builder.register( new GroovyTestDescription( groovyBuckConfig, defaultJavaOptions, defaultJavacOptions, defaultTestRuleTimeoutMs, testTempDirOverride ) ); builder.register(new GwtBinaryDescription(defaultJavaOptions)); builder.register( new HalideLibraryDescription( cxxBuckConfig, defaultCxxPlatform, cxxPlatforms, halideBuckConfig)); builder.register(new IosReactNativeLibraryDescription(reactNativeBuckConfig)); builder.register(new JavaBinaryDescription( defaultJavaOptions, defaultJavacOptions, defaultCxxPlatform)); builder.register(new JavaLibraryDescription(defaultJavacOptions)); builder.register( new JavaTestDescription( defaultJavaOptions, defaultJavacOptions, defaultTestRuleTimeoutMs, defaultCxxPlatform, testTempDirOverride)); builder.register(new KeystoreDescription()); builder.register( new LuaBinaryDescription(luaConfig, cxxBuckConfig, defaultCxxPlatform, cxxPlatforms)); builder.register(new LuaLibraryDescription()); builder.register(new NdkLibraryDescription(ndkVersion, ndkCxxPlatforms)); OCamlBuckConfig ocamlBuckConfig = new OCamlBuckConfig(platform, config); builder.register(new OCamlBinaryDescription(ocamlBuckConfig)); builder.register(new OCamlLibraryDescription(ocamlBuckConfig)); builder.register(new PrebuiltCxxLibraryDescription(cxxBuckConfig, cxxPlatforms)); builder.register(new PrebuiltDotNetLibraryDescription()); builder.register(new PrebuiltJarDescription()); builder.register(new PrebuiltNativeLibraryDescription()); builder.register(new PrebuiltOCamlLibraryDescription()); builder.register(new PrebuiltPythonLibraryDescription()); builder.register(new ProjectConfigDescription()); builder.register(pythonBinaryDescription); builder.register(new PythonLibraryDescription()); builder.register( new PythonTestDescription( pythonBinaryDescription, pyConfig, pythonPlatforms, cxxBuckConfig, defaultCxxPlatform, defaultTestRuleTimeoutMs, cxxPlatforms)); builder.register(new RemoteFileDescription(downloader)); builder.register(new RobolectricTestDescription( defaultJavaOptions, defaultJavacOptions, defaultTestRuleTimeoutMs, defaultCxxPlatform, testTempDirOverride)); builder.register(new RustBinaryDescription(rustBuckConfig)); builder.register(new RustLibraryDescription(rustBuckConfig)); builder.register(new ScalaLibraryDescription(scalaConfig)); builder.register(new ScalaTestDescription( scalaConfig, defaultJavaOptions, defaultTestRuleTimeoutMs, defaultCxxPlatform, testTempDirOverride)); builder.register(new ShBinaryDescription()); builder.register(new ShTestDescription(defaultTestRuleTimeoutMs)); ThriftBuckConfig thriftBuckConfig = new ThriftBuckConfig(config); builder.register( new ThriftLibraryDescription( thriftBuckConfig, ImmutableList.of( new ThriftJavaEnhancer(thriftBuckConfig, defaultJavacOptions), new ThriftCxxEnhancer( thriftBuckConfig, cxxLibraryDescription, /* cpp2 */ false), new ThriftCxxEnhancer( thriftBuckConfig, cxxLibraryDescription, /* cpp2 */ true), new ThriftPythonEnhancer(thriftBuckConfig, ThriftPythonEnhancer.Type.NORMAL), new ThriftPythonEnhancer(thriftBuckConfig, ThriftPythonEnhancer.Type.TWISTED), new ThriftPythonEnhancer(thriftBuckConfig, ThriftPythonEnhancer.Type.ASYNCIO)))); builder.register(new WorkerToolDescription()); builder.register(new XcodePostbuildScriptDescription()); builder.register(new XcodePrebuildScriptDescription()); builder.register(new XcodeWorkspaceConfigDescription()); builder.register(new ZipDescription()); builder.setCxxPlatforms(cxxPlatforms); builder.setDefaultCxxPlatform(defaultCxxPlatform); return builder; } public static class Builder { private final Map<BuildRuleType, Description<?>> descriptions; private final Map<String, BuildRuleType> types; @Nullable private FlavorDomain<CxxPlatform> cxxPlatforms; @Nullable private CxxPlatform defaultCxxPlatform; protected Builder() { this.descriptions = Maps.newConcurrentMap(); this.types = Maps.newConcurrentMap(); } public Builder register(Description<?> description) { BuildRuleType type = description.getBuildRuleType(); types.put(type.getName(), type); descriptions.put(type, description); return this; } public Builder setCxxPlatforms(FlavorDomain<CxxPlatform> cxxPlatforms) { this.cxxPlatforms = cxxPlatforms; return this; } public Builder setDefaultCxxPlatform(CxxPlatform defaultCxxPlatform) { this.defaultCxxPlatform = defaultCxxPlatform; return this; } public KnownBuildRuleTypes build() { return new KnownBuildRuleTypes( descriptions, types, Preconditions.checkNotNull(cxxPlatforms), Preconditions.checkNotNull(defaultCxxPlatform)); } } }
/* * Copyright 2017 MovingBlocks * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.terasology.engine.subsystem.lwjgl; import com.google.common.collect.Lists; import com.google.common.collect.Queues; import org.lwjgl.LWJGLException; import org.lwjgl.opengl.*; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.terasology.assets.AssetFactory; import org.terasology.assets.module.ModuleAssetDataProducer; import org.terasology.assets.module.ModuleAwareAssetTypeManager; import org.terasology.config.Config; import org.terasology.config.RenderingConfig; import org.terasology.context.Context; import org.terasology.engine.GameEngine; import org.terasology.engine.GameThread; import org.terasology.engine.modes.GameState; import org.terasology.engine.subsystem.DisplayDevice; import org.terasology.engine.subsystem.RenderingSubsystemFactory; import org.terasology.rendering.ShaderManager; import org.terasology.rendering.ShaderManagerLwjgl; import org.terasology.rendering.assets.animation.MeshAnimation; import org.terasology.rendering.assets.animation.MeshAnimationData; import org.terasology.rendering.assets.animation.MeshAnimationImpl; import org.terasology.rendering.assets.atlas.Atlas; import org.terasology.rendering.assets.atlas.AtlasData; import org.terasology.rendering.assets.font.Font; import org.terasology.rendering.assets.font.FontData; import org.terasology.rendering.assets.font.FontImpl; import org.terasology.rendering.assets.material.Material; import org.terasology.rendering.assets.material.MaterialData; import org.terasology.rendering.assets.mesh.Mesh; import org.terasology.rendering.assets.mesh.MeshData; import org.terasology.rendering.assets.shader.Shader; import org.terasology.rendering.assets.shader.ShaderData; import org.terasology.rendering.assets.skeletalmesh.SkeletalMesh; import org.terasology.rendering.assets.skeletalmesh.SkeletalMeshData; import org.terasology.rendering.assets.texture.PNGTextureFormat; import org.terasology.rendering.assets.texture.Texture; import org.terasology.rendering.assets.texture.TextureData; import org.terasology.rendering.assets.texture.TextureUtil; import org.terasology.rendering.assets.texture.subtexture.Subtexture; import org.terasology.rendering.assets.texture.subtexture.SubtextureData; import org.terasology.rendering.nui.internal.CanvasRenderer; import org.terasology.rendering.nui.internal.LwjglCanvasRenderer; import org.terasology.rendering.opengl.*; import javax.imageio.ImageIO; import java.awt.image.BufferedImage; import java.io.IOException; import java.nio.ByteBuffer; import java.util.List; import java.util.concurrent.BlockingDeque; import java.util.function.Consumer; import static org.lwjgl.opengl.GL11.*; public class LwjglGraphics extends BaseLwjglSubsystem { private static final Logger logger = LoggerFactory.getLogger(LwjglGraphics.class); private GLBufferPool bufferPool = new GLBufferPool(false); private BlockingDeque<Runnable> displayThreadActions = Queues.newLinkedBlockingDeque(); private Context context; private RenderingConfig config; private GameEngine engine; private LwjglDisplayDevice lwjglDisplay; @Override public String getName() { return "Graphics"; } @Override public void initialise(GameEngine gameEngine, Context rootContext) { logger.info("Starting initialization of LWJGL"); this.engine = gameEngine; this.context = rootContext; this.config = context.get(Config.class).getRendering(); lwjglDisplay = new LwjglDisplayDevice(context); context.put(DisplayDevice.class, lwjglDisplay); logger.info("Initial initialization complete"); } @Override public void registerCoreAssetTypes(ModuleAwareAssetTypeManager assetTypeManager) { // cast lambdas explicitly to avoid inconsistent compiler behavior wrt. type inference assetTypeManager.registerCoreAssetType(Font.class, (AssetFactory<Font, FontData>) FontImpl::new, "fonts"); assetTypeManager.registerCoreAssetType(Texture.class, (AssetFactory<Texture, TextureData>) (urn, assetType, data) -> (new OpenGLTexture(urn, assetType, data, this)), "textures", "fonts"); assetTypeManager.registerCoreFormat(Texture.class, new PNGTextureFormat(Texture.FilterMode.NEAREST, path -> { if (path.getName(1).toString().equals(ModuleAssetDataProducer.OVERRIDE_FOLDER)) { return path.getName(3).toString().equals("textures"); } else { return path.getName(2).toString().equals("textures"); } })); assetTypeManager.registerCoreFormat(Texture.class, new PNGTextureFormat(Texture.FilterMode.LINEAR, path -> { if (path.getName(1).toString().equals(ModuleAssetDataProducer.OVERRIDE_FOLDER)) { return path.getName(3).toString().equals("fonts"); } else { return path.getName(2).toString().equals("fonts"); } })); assetTypeManager.registerCoreAssetType(Shader.class, (AssetFactory<Shader, ShaderData>) GLSLShader::new, "shaders"); assetTypeManager.registerCoreAssetType(Material.class, (AssetFactory<Material, MaterialData>) GLSLMaterial::new, "materials"); assetTypeManager.registerCoreAssetType(Mesh.class, (AssetFactory<Mesh, MeshData>) (urn, assetType, data) -> new OpenGLMesh(urn, assetType, bufferPool, data), "mesh"); assetTypeManager.registerCoreAssetType(SkeletalMesh.class, (AssetFactory<SkeletalMesh, SkeletalMeshData>) (urn, assetType, data) -> new OpenGLSkeletalMesh(urn, assetType, data, bufferPool), "skeletalMesh"); assetTypeManager.registerCoreAssetType(MeshAnimation.class, (AssetFactory<MeshAnimation, MeshAnimationData>) MeshAnimationImpl::new, "animations"); assetTypeManager.registerCoreAssetType(Atlas.class, (AssetFactory<Atlas, AtlasData>) Atlas::new, "atlas"); assetTypeManager.registerCoreAssetType(Subtexture.class, (AssetFactory<Subtexture, SubtextureData>) Subtexture::new); } @Override public void postInitialise(Context rootContext) { context.put(RenderingSubsystemFactory.class, new LwjglRenderingSubsystemFactory(bufferPool)); initDisplay(); initOpenGL(context); context.put(CanvasRenderer.class, new LwjglCanvasRenderer(context)); } @Override public void postUpdate(GameState currentState, float delta) { Display.update(); if (!displayThreadActions.isEmpty()) { List<Runnable> actions = Lists.newArrayListWithExpectedSize(displayThreadActions.size()); displayThreadActions.drainTo(actions); actions.forEach(Runnable::run); } int frameLimit = context.get(Config.class).getRendering().getFrameLimit(); if (frameLimit > 0) { Display.sync(frameLimit); } currentState.render(); lwjglDisplay.update(); if (lwjglDisplay.isCloseRequested()) { engine.shutdown(); } } @Override public void preShutdown() { if (Display.isCreated() && !Display.isFullscreen() && Display.isVisible()) { config.setWindowPosX(Display.getX()); config.setWindowPosY(Display.getY()); } } @Override public void shutdown() { Display.destroy(); } private void initDisplay() { logger.info("Initializing display (if last line in log then likely the game crashed from an issue with your video card)"); try { lwjglDisplay.setDisplayModeSetting(config.getDisplayModeSetting(), false); Display.setTitle("Terasology" + " | " + "Alpha"); try { String root = "org/terasology/icons/"; ClassLoader classLoader = getClass().getClassLoader(); BufferedImage icon16 = ImageIO.read(classLoader.getResourceAsStream(root + "gooey_sweet_16.png")); BufferedImage icon32 = ImageIO.read(classLoader.getResourceAsStream(root + "gooey_sweet_32.png")); BufferedImage icon64 = ImageIO.read(classLoader.getResourceAsStream(root + "gooey_sweet_64.png")); BufferedImage icon128 = ImageIO.read(classLoader.getResourceAsStream(root + "gooey_sweet_128.png")); Display.setIcon(new ByteBuffer[]{ TextureUtil.convertToByteBuffer(icon16), TextureUtil.convertToByteBuffer(icon32), TextureUtil.convertToByteBuffer(icon64), TextureUtil.convertToByteBuffer(icon128) }); } catch (IOException | IllegalArgumentException e) { logger.warn("Could not set icon", e); } if (config.getDebug().isEnabled()) { try { ContextAttribs ctxAttribs = new ContextAttribs().withDebug(true); Display.create(config.getPixelFormat(), ctxAttribs); try { GL43.glDebugMessageCallback(new KHRDebugCallback(new DebugCallback())); } catch (IllegalStateException e) { logger.warn("Unable to specify DebugCallback to receive debugging messages from the GL."); } } catch (LWJGLException e) { logger.warn("Unable to create an OpenGL debug context. Maybe your graphics card does not support it.", e); Display.create(config.getPixelFormat()); // Create a normal context instead } } else { Display.create(config.getPixelFormat()); } Display.setVSyncEnabled(config.isVSync()); } catch (LWJGLException e) { throw new RuntimeException("Can not initialize graphics device.", e); } } private void initOpenGL(Context currentContext) { logger.info("Initializing OpenGL"); checkOpenGL(); glViewport(0, 0, Display.getWidth(), Display.getHeight()); initOpenGLParams(); currentContext.put(ShaderManager.class, new ShaderManagerLwjgl()); } private void checkOpenGL() { boolean[] requiredCapabilities = { GLContext.getCapabilities().OpenGL12, GLContext.getCapabilities().OpenGL14, GLContext.getCapabilities().OpenGL15, GLContext.getCapabilities().OpenGL20, GLContext.getCapabilities().OpenGL21, // needed as we use GLSL 1.20 GLContext.getCapabilities().GL_ARB_framebuffer_object, // Extensions eventually included in GLContext.getCapabilities().GL_ARB_texture_float, // OpenGl 3.0 according to GLContext.getCapabilities().GL_ARB_half_float_pixel}; // http://en.wikipedia.org/wiki/OpenGL#OpenGL_3.0 String[] capabilityNames = {"OpenGL12", "OpenGL14", "OpenGL15", "OpenGL20", "OpenGL21", "GL_ARB_framebuffer_object", "GL_ARB_texture_float", "GL_ARB_half_float_pixel"}; boolean canRunTheGame = true; String missingCapabilitiesMessage = ""; for (int index = 0; index < requiredCapabilities.length; index++) { if (!requiredCapabilities[index]) { missingCapabilitiesMessage += " - " + capabilityNames[index] + "\n"; canRunTheGame = false; } } if (!canRunTheGame) { String completeErrorMessage = completeErrorMessage(missingCapabilitiesMessage); throw new IllegalStateException(completeErrorMessage); } } private String completeErrorMessage(String errorMessage) { return "\n" + "\nThe following OpenGL versions/extensions are required but are not supported by your GPU driver:\n" + "\n" + errorMessage + "\n" + "GPU Information:\n" + "\n" + " Vendor: " + GL11.glGetString(GL11.GL_VENDOR) + "\n" + " Model: " + GL11.glGetString(GL11.GL_RENDERER) + "\n" + " Driver: " + GL11.glGetString(GL11.GL_VERSION) + "\n" + "\n" + "Try updating the driver to the latest version available.\n" + "If that fails you might need to use a different GPU (graphics card). Sorry!\n"; } public static void initOpenGLParams() { glEnable(GL_CULL_FACE); glEnable(GL_DEPTH_TEST); glEnable(GL_NORMALIZE); glDepthFunc(GL_LEQUAL); } public void asynchToDisplayThread(Runnable action) { if (GameThread.isCurrentThread()) { action.run(); } else { displayThreadActions.add(action); } } public void createTexture3D(ByteBuffer alignedBuffer, Texture.WrapMode wrapMode, Texture.FilterMode filterMode, int size, Consumer<Integer> idConsumer) { asynchToDisplayThread(() -> { int id = glGenTextures(); reloadTexture3D(id, alignedBuffer, wrapMode, filterMode, size); idConsumer.accept(id); }); } public void reloadTexture3D(int id, ByteBuffer alignedBuffer, Texture.WrapMode wrapMode, Texture.FilterMode filterMode, int size) { asynchToDisplayThread(() -> { glBindTexture(GL12.GL_TEXTURE_3D, id); glTexParameterf(GL12.GL_TEXTURE_3D, GL_TEXTURE_WRAP_S, LwjglGraphicsUtil.getGLMode(wrapMode)); glTexParameterf(GL12.GL_TEXTURE_3D, GL_TEXTURE_WRAP_T, LwjglGraphicsUtil.getGLMode(wrapMode)); glTexParameterf(GL12.GL_TEXTURE_3D, GL12.GL_TEXTURE_WRAP_R, LwjglGraphicsUtil.getGLMode(wrapMode)); GL11.glTexParameteri(GL12.GL_TEXTURE_3D, GL11.GL_TEXTURE_MIN_FILTER, LwjglGraphicsUtil.getGlMinFilter(filterMode)); GL11.glTexParameteri(GL12.GL_TEXTURE_3D, GL11.GL_TEXTURE_MAG_FILTER, LwjglGraphicsUtil.getGlMagFilter(filterMode)); GL11.glPixelStorei(GL11.GL_UNPACK_ALIGNMENT, 4); GL11.glTexParameteri(GL12.GL_TEXTURE_3D, GL12.GL_TEXTURE_MAX_LEVEL, 0); GL12.glTexImage3D(GL12.GL_TEXTURE_3D, 0, GL11.GL_RGBA, size, size, size, 0, GL11.GL_RGBA, GL11.GL_UNSIGNED_BYTE, alignedBuffer); }); } public void createTexture2D(ByteBuffer[] buffers, Texture.WrapMode wrapMode, Texture.FilterMode filterMode, int width, int height, Consumer<Integer> idConsumer) { asynchToDisplayThread(() -> { int id = glGenTextures(); reloadTexture2D(id, buffers, wrapMode, filterMode, width, height); idConsumer.accept(id); }); } public void reloadTexture2D(int id, ByteBuffer[] buffers, Texture.WrapMode wrapMode, Texture.FilterMode filterMode, int width, int height) { asynchToDisplayThread(() -> { glBindTexture(GL11.GL_TEXTURE_2D, id); glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, LwjglGraphicsUtil.getGLMode(wrapMode)); glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, LwjglGraphicsUtil.getGLMode(wrapMode)); GL11.glTexParameteri(GL_TEXTURE_2D, GL11.GL_TEXTURE_MIN_FILTER, LwjglGraphicsUtil.getGlMinFilter(filterMode)); GL11.glTexParameteri(GL_TEXTURE_2D, GL11.GL_TEXTURE_MAG_FILTER, LwjglGraphicsUtil.getGlMagFilter(filterMode)); GL11.glPixelStorei(GL11.GL_UNPACK_ALIGNMENT, 4); GL11.glTexParameteri(GL11.GL_TEXTURE_2D, GL12.GL_TEXTURE_MAX_LEVEL, buffers.length - 1); if (buffers.length > 0) { for (int i = 0; i < buffers.length; i++) { GL11.glTexImage2D(GL11.GL_TEXTURE_2D, i, GL11.GL_RGBA, width >> i, height >> i, 0, GL11.GL_RGBA, GL11.GL_UNSIGNED_BYTE, buffers[i]); } } else { GL11.glTexImage2D(GL11.GL_TEXTURE_2D, 0, GL11.GL_RGBA, width, height, 0, GL11.GL_RGBA, GL11.GL_UNSIGNED_BYTE, (ByteBuffer) null); } }); } public void disposeTexture(int id) { asynchToDisplayThread(() -> glDeleteTextures(id)); } }
/* * Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.elasticache.model; import java.io.Serializable; import javax.annotation.Generated; /** * <p> * The status of the service update for a specific replication group * </p> * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/elasticache-2015-02-02/UpdateAction" target="_top">AWS API * Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class UpdateAction implements Serializable, Cloneable { /** * <p> * The ID of the replication group * </p> */ private String replicationGroupId; /** * <p> * The unique ID of the service update * </p> */ private String serviceUpdateName; /** * <p> * The date the update is first available * </p> */ private java.util.Date serviceUpdateReleaseDate; /** * <p> * The severity of the service update * </p> */ private String serviceUpdateSeverity; /** * <p> * The status of the service update * </p> */ private String serviceUpdateStatus; /** * <p> * The recommended date to apply the service update to ensure compliance. For information on compliance, see <a * href= * "https://docs.aws.amazon.com/AmazonElastiCache/latest/red-ug/elasticache-compliance.html#elasticache-compliance-self-service" * >Self-Service Security Updates for Compliance</a>. * </p> */ private java.util.Date serviceUpdateRecommendedApplyByDate; /** * <p> * Reflects the nature of the service update * </p> */ private String serviceUpdateType; /** * <p> * The date that the service update is available to a replication group * </p> */ private java.util.Date updateActionAvailableDate; /** * <p> * The status of the update action * </p> */ private String updateActionStatus; /** * <p> * The progress of the service update on the replication group * </p> */ private String nodesUpdated; /** * <p> * The date when the UpdateActionStatus was last modified * </p> */ private java.util.Date updateActionStatusModifiedDate; /** * <p> * If yes, all nodes in the replication group have been updated by the recommended apply-by date. If no, at least * one node in the replication group have not been updated by the recommended apply-by date. If N/A, the replication * group was created after the recommended apply-by date. * </p> */ private String slaMet; /** * <p> * The status of the service update on the node group * </p> */ private com.amazonaws.internal.SdkInternalList<NodeGroupUpdateStatus> nodeGroupUpdateStatus; /** * <p> * The estimated length of time for the update to complete * </p> */ private String estimatedUpdateTime; /** * <p> * The ID of the replication group * </p> * * @param replicationGroupId * The ID of the replication group */ public void setReplicationGroupId(String replicationGroupId) { this.replicationGroupId = replicationGroupId; } /** * <p> * The ID of the replication group * </p> * * @return The ID of the replication group */ public String getReplicationGroupId() { return this.replicationGroupId; } /** * <p> * The ID of the replication group * </p> * * @param replicationGroupId * The ID of the replication group * @return Returns a reference to this object so that method calls can be chained together. */ public UpdateAction withReplicationGroupId(String replicationGroupId) { setReplicationGroupId(replicationGroupId); return this; } /** * <p> * The unique ID of the service update * </p> * * @param serviceUpdateName * The unique ID of the service update */ public void setServiceUpdateName(String serviceUpdateName) { this.serviceUpdateName = serviceUpdateName; } /** * <p> * The unique ID of the service update * </p> * * @return The unique ID of the service update */ public String getServiceUpdateName() { return this.serviceUpdateName; } /** * <p> * The unique ID of the service update * </p> * * @param serviceUpdateName * The unique ID of the service update * @return Returns a reference to this object so that method calls can be chained together. */ public UpdateAction withServiceUpdateName(String serviceUpdateName) { setServiceUpdateName(serviceUpdateName); return this; } /** * <p> * The date the update is first available * </p> * * @param serviceUpdateReleaseDate * The date the update is first available */ public void setServiceUpdateReleaseDate(java.util.Date serviceUpdateReleaseDate) { this.serviceUpdateReleaseDate = serviceUpdateReleaseDate; } /** * <p> * The date the update is first available * </p> * * @return The date the update is first available */ public java.util.Date getServiceUpdateReleaseDate() { return this.serviceUpdateReleaseDate; } /** * <p> * The date the update is first available * </p> * * @param serviceUpdateReleaseDate * The date the update is first available * @return Returns a reference to this object so that method calls can be chained together. */ public UpdateAction withServiceUpdateReleaseDate(java.util.Date serviceUpdateReleaseDate) { setServiceUpdateReleaseDate(serviceUpdateReleaseDate); return this; } /** * <p> * The severity of the service update * </p> * * @param serviceUpdateSeverity * The severity of the service update * @see ServiceUpdateSeverity */ public void setServiceUpdateSeverity(String serviceUpdateSeverity) { this.serviceUpdateSeverity = serviceUpdateSeverity; } /** * <p> * The severity of the service update * </p> * * @return The severity of the service update * @see ServiceUpdateSeverity */ public String getServiceUpdateSeverity() { return this.serviceUpdateSeverity; } /** * <p> * The severity of the service update * </p> * * @param serviceUpdateSeverity * The severity of the service update * @return Returns a reference to this object so that method calls can be chained together. * @see ServiceUpdateSeverity */ public UpdateAction withServiceUpdateSeverity(String serviceUpdateSeverity) { setServiceUpdateSeverity(serviceUpdateSeverity); return this; } /** * <p> * The severity of the service update * </p> * * @param serviceUpdateSeverity * The severity of the service update * @return Returns a reference to this object so that method calls can be chained together. * @see ServiceUpdateSeverity */ public UpdateAction withServiceUpdateSeverity(ServiceUpdateSeverity serviceUpdateSeverity) { this.serviceUpdateSeverity = serviceUpdateSeverity.toString(); return this; } /** * <p> * The status of the service update * </p> * * @param serviceUpdateStatus * The status of the service update * @see ServiceUpdateStatus */ public void setServiceUpdateStatus(String serviceUpdateStatus) { this.serviceUpdateStatus = serviceUpdateStatus; } /** * <p> * The status of the service update * </p> * * @return The status of the service update * @see ServiceUpdateStatus */ public String getServiceUpdateStatus() { return this.serviceUpdateStatus; } /** * <p> * The status of the service update * </p> * * @param serviceUpdateStatus * The status of the service update * @return Returns a reference to this object so that method calls can be chained together. * @see ServiceUpdateStatus */ public UpdateAction withServiceUpdateStatus(String serviceUpdateStatus) { setServiceUpdateStatus(serviceUpdateStatus); return this; } /** * <p> * The status of the service update * </p> * * @param serviceUpdateStatus * The status of the service update * @return Returns a reference to this object so that method calls can be chained together. * @see ServiceUpdateStatus */ public UpdateAction withServiceUpdateStatus(ServiceUpdateStatus serviceUpdateStatus) { this.serviceUpdateStatus = serviceUpdateStatus.toString(); return this; } /** * <p> * The recommended date to apply the service update to ensure compliance. For information on compliance, see <a * href= * "https://docs.aws.amazon.com/AmazonElastiCache/latest/red-ug/elasticache-compliance.html#elasticache-compliance-self-service" * >Self-Service Security Updates for Compliance</a>. * </p> * * @param serviceUpdateRecommendedApplyByDate * The recommended date to apply the service update to ensure compliance. For information on compliance, see * <a href= * "https://docs.aws.amazon.com/AmazonElastiCache/latest/red-ug/elasticache-compliance.html#elasticache-compliance-self-service" * >Self-Service Security Updates for Compliance</a>. */ public void setServiceUpdateRecommendedApplyByDate(java.util.Date serviceUpdateRecommendedApplyByDate) { this.serviceUpdateRecommendedApplyByDate = serviceUpdateRecommendedApplyByDate; } /** * <p> * The recommended date to apply the service update to ensure compliance. For information on compliance, see <a * href= * "https://docs.aws.amazon.com/AmazonElastiCache/latest/red-ug/elasticache-compliance.html#elasticache-compliance-self-service" * >Self-Service Security Updates for Compliance</a>. * </p> * * @return The recommended date to apply the service update to ensure compliance. For information on compliance, see * <a href= * "https://docs.aws.amazon.com/AmazonElastiCache/latest/red-ug/elasticache-compliance.html#elasticache-compliance-self-service" * >Self-Service Security Updates for Compliance</a>. */ public java.util.Date getServiceUpdateRecommendedApplyByDate() { return this.serviceUpdateRecommendedApplyByDate; } /** * <p> * The recommended date to apply the service update to ensure compliance. For information on compliance, see <a * href= * "https://docs.aws.amazon.com/AmazonElastiCache/latest/red-ug/elasticache-compliance.html#elasticache-compliance-self-service" * >Self-Service Security Updates for Compliance</a>. * </p> * * @param serviceUpdateRecommendedApplyByDate * The recommended date to apply the service update to ensure compliance. For information on compliance, see * <a href= * "https://docs.aws.amazon.com/AmazonElastiCache/latest/red-ug/elasticache-compliance.html#elasticache-compliance-self-service" * >Self-Service Security Updates for Compliance</a>. * @return Returns a reference to this object so that method calls can be chained together. */ public UpdateAction withServiceUpdateRecommendedApplyByDate(java.util.Date serviceUpdateRecommendedApplyByDate) { setServiceUpdateRecommendedApplyByDate(serviceUpdateRecommendedApplyByDate); return this; } /** * <p> * Reflects the nature of the service update * </p> * * @param serviceUpdateType * Reflects the nature of the service update * @see ServiceUpdateType */ public void setServiceUpdateType(String serviceUpdateType) { this.serviceUpdateType = serviceUpdateType; } /** * <p> * Reflects the nature of the service update * </p> * * @return Reflects the nature of the service update * @see ServiceUpdateType */ public String getServiceUpdateType() { return this.serviceUpdateType; } /** * <p> * Reflects the nature of the service update * </p> * * @param serviceUpdateType * Reflects the nature of the service update * @return Returns a reference to this object so that method calls can be chained together. * @see ServiceUpdateType */ public UpdateAction withServiceUpdateType(String serviceUpdateType) { setServiceUpdateType(serviceUpdateType); return this; } /** * <p> * Reflects the nature of the service update * </p> * * @param serviceUpdateType * Reflects the nature of the service update * @return Returns a reference to this object so that method calls can be chained together. * @see ServiceUpdateType */ public UpdateAction withServiceUpdateType(ServiceUpdateType serviceUpdateType) { this.serviceUpdateType = serviceUpdateType.toString(); return this; } /** * <p> * The date that the service update is available to a replication group * </p> * * @param updateActionAvailableDate * The date that the service update is available to a replication group */ public void setUpdateActionAvailableDate(java.util.Date updateActionAvailableDate) { this.updateActionAvailableDate = updateActionAvailableDate; } /** * <p> * The date that the service update is available to a replication group * </p> * * @return The date that the service update is available to a replication group */ public java.util.Date getUpdateActionAvailableDate() { return this.updateActionAvailableDate; } /** * <p> * The date that the service update is available to a replication group * </p> * * @param updateActionAvailableDate * The date that the service update is available to a replication group * @return Returns a reference to this object so that method calls can be chained together. */ public UpdateAction withUpdateActionAvailableDate(java.util.Date updateActionAvailableDate) { setUpdateActionAvailableDate(updateActionAvailableDate); return this; } /** * <p> * The status of the update action * </p> * * @param updateActionStatus * The status of the update action * @see UpdateActionStatus */ public void setUpdateActionStatus(String updateActionStatus) { this.updateActionStatus = updateActionStatus; } /** * <p> * The status of the update action * </p> * * @return The status of the update action * @see UpdateActionStatus */ public String getUpdateActionStatus() { return this.updateActionStatus; } /** * <p> * The status of the update action * </p> * * @param updateActionStatus * The status of the update action * @return Returns a reference to this object so that method calls can be chained together. * @see UpdateActionStatus */ public UpdateAction withUpdateActionStatus(String updateActionStatus) { setUpdateActionStatus(updateActionStatus); return this; } /** * <p> * The status of the update action * </p> * * @param updateActionStatus * The status of the update action * @return Returns a reference to this object so that method calls can be chained together. * @see UpdateActionStatus */ public UpdateAction withUpdateActionStatus(UpdateActionStatus updateActionStatus) { this.updateActionStatus = updateActionStatus.toString(); return this; } /** * <p> * The progress of the service update on the replication group * </p> * * @param nodesUpdated * The progress of the service update on the replication group */ public void setNodesUpdated(String nodesUpdated) { this.nodesUpdated = nodesUpdated; } /** * <p> * The progress of the service update on the replication group * </p> * * @return The progress of the service update on the replication group */ public String getNodesUpdated() { return this.nodesUpdated; } /** * <p> * The progress of the service update on the replication group * </p> * * @param nodesUpdated * The progress of the service update on the replication group * @return Returns a reference to this object so that method calls can be chained together. */ public UpdateAction withNodesUpdated(String nodesUpdated) { setNodesUpdated(nodesUpdated); return this; } /** * <p> * The date when the UpdateActionStatus was last modified * </p> * * @param updateActionStatusModifiedDate * The date when the UpdateActionStatus was last modified */ public void setUpdateActionStatusModifiedDate(java.util.Date updateActionStatusModifiedDate) { this.updateActionStatusModifiedDate = updateActionStatusModifiedDate; } /** * <p> * The date when the UpdateActionStatus was last modified * </p> * * @return The date when the UpdateActionStatus was last modified */ public java.util.Date getUpdateActionStatusModifiedDate() { return this.updateActionStatusModifiedDate; } /** * <p> * The date when the UpdateActionStatus was last modified * </p> * * @param updateActionStatusModifiedDate * The date when the UpdateActionStatus was last modified * @return Returns a reference to this object so that method calls can be chained together. */ public UpdateAction withUpdateActionStatusModifiedDate(java.util.Date updateActionStatusModifiedDate) { setUpdateActionStatusModifiedDate(updateActionStatusModifiedDate); return this; } /** * <p> * If yes, all nodes in the replication group have been updated by the recommended apply-by date. If no, at least * one node in the replication group have not been updated by the recommended apply-by date. If N/A, the replication * group was created after the recommended apply-by date. * </p> * * @param slaMet * If yes, all nodes in the replication group have been updated by the recommended apply-by date. If no, at * least one node in the replication group have not been updated by the recommended apply-by date. If N/A, * the replication group was created after the recommended apply-by date. * @see SlaMet */ public void setSlaMet(String slaMet) { this.slaMet = slaMet; } /** * <p> * If yes, all nodes in the replication group have been updated by the recommended apply-by date. If no, at least * one node in the replication group have not been updated by the recommended apply-by date. If N/A, the replication * group was created after the recommended apply-by date. * </p> * * @return If yes, all nodes in the replication group have been updated by the recommended apply-by date. If no, at * least one node in the replication group have not been updated by the recommended apply-by date. If N/A, * the replication group was created after the recommended apply-by date. * @see SlaMet */ public String getSlaMet() { return this.slaMet; } /** * <p> * If yes, all nodes in the replication group have been updated by the recommended apply-by date. If no, at least * one node in the replication group have not been updated by the recommended apply-by date. If N/A, the replication * group was created after the recommended apply-by date. * </p> * * @param slaMet * If yes, all nodes in the replication group have been updated by the recommended apply-by date. If no, at * least one node in the replication group have not been updated by the recommended apply-by date. If N/A, * the replication group was created after the recommended apply-by date. * @return Returns a reference to this object so that method calls can be chained together. * @see SlaMet */ public UpdateAction withSlaMet(String slaMet) { setSlaMet(slaMet); return this; } /** * <p> * If yes, all nodes in the replication group have been updated by the recommended apply-by date. If no, at least * one node in the replication group have not been updated by the recommended apply-by date. If N/A, the replication * group was created after the recommended apply-by date. * </p> * * @param slaMet * If yes, all nodes in the replication group have been updated by the recommended apply-by date. If no, at * least one node in the replication group have not been updated by the recommended apply-by date. If N/A, * the replication group was created after the recommended apply-by date. * @return Returns a reference to this object so that method calls can be chained together. * @see SlaMet */ public UpdateAction withSlaMet(SlaMet slaMet) { this.slaMet = slaMet.toString(); return this; } /** * <p> * The status of the service update on the node group * </p> * * @return The status of the service update on the node group */ public java.util.List<NodeGroupUpdateStatus> getNodeGroupUpdateStatus() { if (nodeGroupUpdateStatus == null) { nodeGroupUpdateStatus = new com.amazonaws.internal.SdkInternalList<NodeGroupUpdateStatus>(); } return nodeGroupUpdateStatus; } /** * <p> * The status of the service update on the node group * </p> * * @param nodeGroupUpdateStatus * The status of the service update on the node group */ public void setNodeGroupUpdateStatus(java.util.Collection<NodeGroupUpdateStatus> nodeGroupUpdateStatus) { if (nodeGroupUpdateStatus == null) { this.nodeGroupUpdateStatus = null; return; } this.nodeGroupUpdateStatus = new com.amazonaws.internal.SdkInternalList<NodeGroupUpdateStatus>(nodeGroupUpdateStatus); } /** * <p> * The status of the service update on the node group * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if any). Use * {@link #setNodeGroupUpdateStatus(java.util.Collection)} or * {@link #withNodeGroupUpdateStatus(java.util.Collection)} if you want to override the existing values. * </p> * * @param nodeGroupUpdateStatus * The status of the service update on the node group * @return Returns a reference to this object so that method calls can be chained together. */ public UpdateAction withNodeGroupUpdateStatus(NodeGroupUpdateStatus... nodeGroupUpdateStatus) { if (this.nodeGroupUpdateStatus == null) { setNodeGroupUpdateStatus(new com.amazonaws.internal.SdkInternalList<NodeGroupUpdateStatus>(nodeGroupUpdateStatus.length)); } for (NodeGroupUpdateStatus ele : nodeGroupUpdateStatus) { this.nodeGroupUpdateStatus.add(ele); } return this; } /** * <p> * The status of the service update on the node group * </p> * * @param nodeGroupUpdateStatus * The status of the service update on the node group * @return Returns a reference to this object so that method calls can be chained together. */ public UpdateAction withNodeGroupUpdateStatus(java.util.Collection<NodeGroupUpdateStatus> nodeGroupUpdateStatus) { setNodeGroupUpdateStatus(nodeGroupUpdateStatus); return this; } /** * <p> * The estimated length of time for the update to complete * </p> * * @param estimatedUpdateTime * The estimated length of time for the update to complete */ public void setEstimatedUpdateTime(String estimatedUpdateTime) { this.estimatedUpdateTime = estimatedUpdateTime; } /** * <p> * The estimated length of time for the update to complete * </p> * * @return The estimated length of time for the update to complete */ public String getEstimatedUpdateTime() { return this.estimatedUpdateTime; } /** * <p> * The estimated length of time for the update to complete * </p> * * @param estimatedUpdateTime * The estimated length of time for the update to complete * @return Returns a reference to this object so that method calls can be chained together. */ public UpdateAction withEstimatedUpdateTime(String estimatedUpdateTime) { setEstimatedUpdateTime(estimatedUpdateTime); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getReplicationGroupId() != null) sb.append("ReplicationGroupId: ").append(getReplicationGroupId()).append(","); if (getServiceUpdateName() != null) sb.append("ServiceUpdateName: ").append(getServiceUpdateName()).append(","); if (getServiceUpdateReleaseDate() != null) sb.append("ServiceUpdateReleaseDate: ").append(getServiceUpdateReleaseDate()).append(","); if (getServiceUpdateSeverity() != null) sb.append("ServiceUpdateSeverity: ").append(getServiceUpdateSeverity()).append(","); if (getServiceUpdateStatus() != null) sb.append("ServiceUpdateStatus: ").append(getServiceUpdateStatus()).append(","); if (getServiceUpdateRecommendedApplyByDate() != null) sb.append("ServiceUpdateRecommendedApplyByDate: ").append(getServiceUpdateRecommendedApplyByDate()).append(","); if (getServiceUpdateType() != null) sb.append("ServiceUpdateType: ").append(getServiceUpdateType()).append(","); if (getUpdateActionAvailableDate() != null) sb.append("UpdateActionAvailableDate: ").append(getUpdateActionAvailableDate()).append(","); if (getUpdateActionStatus() != null) sb.append("UpdateActionStatus: ").append(getUpdateActionStatus()).append(","); if (getNodesUpdated() != null) sb.append("NodesUpdated: ").append(getNodesUpdated()).append(","); if (getUpdateActionStatusModifiedDate() != null) sb.append("UpdateActionStatusModifiedDate: ").append(getUpdateActionStatusModifiedDate()).append(","); if (getSlaMet() != null) sb.append("SlaMet: ").append(getSlaMet()).append(","); if (getNodeGroupUpdateStatus() != null) sb.append("NodeGroupUpdateStatus: ").append(getNodeGroupUpdateStatus()).append(","); if (getEstimatedUpdateTime() != null) sb.append("EstimatedUpdateTime: ").append(getEstimatedUpdateTime()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof UpdateAction == false) return false; UpdateAction other = (UpdateAction) obj; if (other.getReplicationGroupId() == null ^ this.getReplicationGroupId() == null) return false; if (other.getReplicationGroupId() != null && other.getReplicationGroupId().equals(this.getReplicationGroupId()) == false) return false; if (other.getServiceUpdateName() == null ^ this.getServiceUpdateName() == null) return false; if (other.getServiceUpdateName() != null && other.getServiceUpdateName().equals(this.getServiceUpdateName()) == false) return false; if (other.getServiceUpdateReleaseDate() == null ^ this.getServiceUpdateReleaseDate() == null) return false; if (other.getServiceUpdateReleaseDate() != null && other.getServiceUpdateReleaseDate().equals(this.getServiceUpdateReleaseDate()) == false) return false; if (other.getServiceUpdateSeverity() == null ^ this.getServiceUpdateSeverity() == null) return false; if (other.getServiceUpdateSeverity() != null && other.getServiceUpdateSeverity().equals(this.getServiceUpdateSeverity()) == false) return false; if (other.getServiceUpdateStatus() == null ^ this.getServiceUpdateStatus() == null) return false; if (other.getServiceUpdateStatus() != null && other.getServiceUpdateStatus().equals(this.getServiceUpdateStatus()) == false) return false; if (other.getServiceUpdateRecommendedApplyByDate() == null ^ this.getServiceUpdateRecommendedApplyByDate() == null) return false; if (other.getServiceUpdateRecommendedApplyByDate() != null && other.getServiceUpdateRecommendedApplyByDate().equals(this.getServiceUpdateRecommendedApplyByDate()) == false) return false; if (other.getServiceUpdateType() == null ^ this.getServiceUpdateType() == null) return false; if (other.getServiceUpdateType() != null && other.getServiceUpdateType().equals(this.getServiceUpdateType()) == false) return false; if (other.getUpdateActionAvailableDate() == null ^ this.getUpdateActionAvailableDate() == null) return false; if (other.getUpdateActionAvailableDate() != null && other.getUpdateActionAvailableDate().equals(this.getUpdateActionAvailableDate()) == false) return false; if (other.getUpdateActionStatus() == null ^ this.getUpdateActionStatus() == null) return false; if (other.getUpdateActionStatus() != null && other.getUpdateActionStatus().equals(this.getUpdateActionStatus()) == false) return false; if (other.getNodesUpdated() == null ^ this.getNodesUpdated() == null) return false; if (other.getNodesUpdated() != null && other.getNodesUpdated().equals(this.getNodesUpdated()) == false) return false; if (other.getUpdateActionStatusModifiedDate() == null ^ this.getUpdateActionStatusModifiedDate() == null) return false; if (other.getUpdateActionStatusModifiedDate() != null && other.getUpdateActionStatusModifiedDate().equals(this.getUpdateActionStatusModifiedDate()) == false) return false; if (other.getSlaMet() == null ^ this.getSlaMet() == null) return false; if (other.getSlaMet() != null && other.getSlaMet().equals(this.getSlaMet()) == false) return false; if (other.getNodeGroupUpdateStatus() == null ^ this.getNodeGroupUpdateStatus() == null) return false; if (other.getNodeGroupUpdateStatus() != null && other.getNodeGroupUpdateStatus().equals(this.getNodeGroupUpdateStatus()) == false) return false; if (other.getEstimatedUpdateTime() == null ^ this.getEstimatedUpdateTime() == null) return false; if (other.getEstimatedUpdateTime() != null && other.getEstimatedUpdateTime().equals(this.getEstimatedUpdateTime()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getReplicationGroupId() == null) ? 0 : getReplicationGroupId().hashCode()); hashCode = prime * hashCode + ((getServiceUpdateName() == null) ? 0 : getServiceUpdateName().hashCode()); hashCode = prime * hashCode + ((getServiceUpdateReleaseDate() == null) ? 0 : getServiceUpdateReleaseDate().hashCode()); hashCode = prime * hashCode + ((getServiceUpdateSeverity() == null) ? 0 : getServiceUpdateSeverity().hashCode()); hashCode = prime * hashCode + ((getServiceUpdateStatus() == null) ? 0 : getServiceUpdateStatus().hashCode()); hashCode = prime * hashCode + ((getServiceUpdateRecommendedApplyByDate() == null) ? 0 : getServiceUpdateRecommendedApplyByDate().hashCode()); hashCode = prime * hashCode + ((getServiceUpdateType() == null) ? 0 : getServiceUpdateType().hashCode()); hashCode = prime * hashCode + ((getUpdateActionAvailableDate() == null) ? 0 : getUpdateActionAvailableDate().hashCode()); hashCode = prime * hashCode + ((getUpdateActionStatus() == null) ? 0 : getUpdateActionStatus().hashCode()); hashCode = prime * hashCode + ((getNodesUpdated() == null) ? 0 : getNodesUpdated().hashCode()); hashCode = prime * hashCode + ((getUpdateActionStatusModifiedDate() == null) ? 0 : getUpdateActionStatusModifiedDate().hashCode()); hashCode = prime * hashCode + ((getSlaMet() == null) ? 0 : getSlaMet().hashCode()); hashCode = prime * hashCode + ((getNodeGroupUpdateStatus() == null) ? 0 : getNodeGroupUpdateStatus().hashCode()); hashCode = prime * hashCode + ((getEstimatedUpdateTime() == null) ? 0 : getEstimatedUpdateTime().hashCode()); return hashCode; } @Override public UpdateAction clone() { try { return (UpdateAction) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } }
/* * Copyright 2016 - 2020 Michael Rapp * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package de.mrapp.android.tabswitcher.layout; import android.content.Context; import android.content.res.ColorStateList; import android.graphics.PorterDuff; import android.graphics.drawable.Drawable; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.ImageButton; import androidx.annotation.CallSuper; import androidx.annotation.ColorInt; import androidx.annotation.MenuRes; import androidx.annotation.NonNull; import androidx.annotation.Nullable; import androidx.appcompat.widget.Toolbar; import de.mrapp.android.tabswitcher.Animation; import de.mrapp.android.tabswitcher.Layout; import de.mrapp.android.tabswitcher.R; import de.mrapp.android.tabswitcher.Tab; import de.mrapp.android.tabswitcher.TabCloseListener; import de.mrapp.android.tabswitcher.TabSwitcher; import de.mrapp.android.tabswitcher.TabSwitcherDecorator; import de.mrapp.android.tabswitcher.iterator.AbstractItemIterator; import de.mrapp.android.tabswitcher.iterator.ItemIterator; import de.mrapp.android.tabswitcher.model.AbstractItem; import de.mrapp.android.tabswitcher.model.Model; import de.mrapp.android.tabswitcher.model.State; import de.mrapp.android.tabswitcher.model.TabItem; import de.mrapp.android.tabswitcher.model.TabSwitcherModel; import de.mrapp.android.tabswitcher.model.TabSwitcherStyle; import de.mrapp.android.util.logging.LogLevel; import de.mrapp.android.util.view.AbstractViewRecycler; import de.mrapp.android.util.view.AttachedViewRecycler; import de.mrapp.util.Condition; /** * An abstract base class for all view recycler adapters, which allow to inflate the views, which * are used to visualize the tabs of a {@link TabSwitcher}. * * @author Michael Rapp * @since 1.0.0 */ public abstract class AbstractTabRecyclerAdapter extends AbstractViewRecycler.Adapter<AbstractItem, Integer> implements Tab.Callback, Model.Listener { /** * The view type of a tab. */ private static final int TAB_VIEW_TYPE = 0; /** * The tab switcher, the tabs belong to. */ private final TabSwitcher tabSwitcher; /** * The model, which belongs to the tab switcher. */ private final TabSwitcherModel model; /* * The style, which allows to retrieve style attributes of the tab switcher. */ private final TabSwitcherStyle style; /** * The view recycler, the adapter is bound to. */ private AttachedViewRecycler<AbstractItem, Integer> viewRecycler; /** * Adapts the title of a tab. * * @param tabItem * The tab item, which corresponds to the tab, whose title should be adapted, as an * instance of the class {@link TabItem}. The tab item may not be null */ private void adaptTitle(@NonNull final TabItem tabItem) { Tab tab = tabItem.getTab(); AbstractTabViewHolder viewHolder = tabItem.getViewHolder(); viewHolder.titleTextView.setText(tab.getTitle()); } /** * Adapts the icon of a tab. * * @param tabItem * The tab item, which corresponds to the tab, whose icon should be adapted, as an * instance of the class {@link TabItem}. The tab item may not be null */ private void adaptIcon(@NonNull final TabItem tabItem) { Tab tab = tabItem.getTab(); AbstractTabViewHolder viewHolder = tabItem.getViewHolder(); Drawable icon = style.getTabIcon(tab); viewHolder.iconImageView.setImageDrawable(icon); } /** * Adapts the visibility of a tab's close button. * * @param tabItem * The tab item, which corresponds to the tab, whose close button should be adapted, as * an instance of the class {@link TabItem}. The tab item may not be null */ private void adaptCloseButtonVisibility(@NonNull final TabItem tabItem) { Tab tab = tabItem.getTab(); AbstractTabViewHolder viewHolder = tabItem.getViewHolder(); viewHolder.closeButton.setVisibility(tab.isCloseable() ? View.VISIBLE : View.GONE); viewHolder.closeButton.setTag(R.id.tag_visibility, tab.isCloseable()); viewHolder.closeButton.setOnClickListener( tab.isCloseable() ? createCloseButtonClickListener(viewHolder.closeButton, tab) : null); } /** * Adapts the icon of a tab's close button. * * @param tabItem * The tab item, which corresponds to the tab, whose close button icon should be * adapted, as an instance of the class {@link TabItem}. The tab item may not be null */ private void adaptCloseButtonIcon(@NonNull final TabItem tabItem) { Tab tab = tabItem.getTab(); Drawable icon = style.getTabCloseButtonIcon(tab); AbstractTabViewHolder viewHolder = tabItem.getViewHolder(); viewHolder.closeButton.setImageDrawable(icon); } /** * Adapts the background color of a tab. * * @param tabItem * The tab item, which corresponds to the tab, whose background should be adapted, as an * instance of the class {@link TabItem}. The tab item may not be null */ private void adaptBackgroundColor(@NonNull final TabItem tabItem) { Tab tab = tabItem.getTab(); ColorStateList colorStateList = style.getTabBackgroundColor(tab); int[] stateSet = model.getSelectedTab() == tab ? new int[]{android.R.attr.state_selected} : new int[]{}; int color = colorStateList.getColorForState(stateSet, colorStateList.getDefaultColor()); View view = tabItem.getView(); Drawable background = view.getBackground(); background.setColorFilter(color, PorterDuff.Mode.MULTIPLY); onAdaptBackgroundColor(color, tabItem); } /** * Adapts the text color of a tab's title. * * @param tabItem * The tab item, which corresponds to the tab, whose title should be adapted, as an * instance of the class {@link TabItem}. The tab item may not be null */ private void adaptTitleTextColor(@NonNull final TabItem tabItem) { Tab tab = tabItem.getTab(); ColorStateList colorStateList = style.getTabTitleTextColor(tab); AbstractTabViewHolder viewHolder = tabItem.getViewHolder(); viewHolder.titleTextView.setTextColor(colorStateList); } /** * Adapts the visibility of a tab's progress bar. * * @param tabItem * The tab item, which corresponds to the tab, whose progress bar should be adapted, as * an instance of the class {@link TabItem}. The tab item may not be null */ private void adaptProgressBarVisibility(@NonNull final TabItem tabItem) { Tab tab = tabItem.getTab(); AbstractTabViewHolder viewHolder = tabItem.getViewHolder(); viewHolder.progressBar.setVisibility(tab.isProgressBarShown() ? View.VISIBLE : View.GONE); viewHolder.iconImageView.setVisibility(tab.isProgressBarShown() ? View.GONE : View.VISIBLE); } /** * Adapts the color of a tab's progress bar. * * @param tabItem * The tab item, which corresponds to the tab, whose progress bar should be adapted, as * an instance of the class {@link TabItem}. The tab item may not be null */ private void adaptProgressBarColor(@NonNull final TabItem tabItem) { Tab tab = tabItem.getTab(); AbstractTabViewHolder viewHolder = tabItem.getViewHolder(); int color = style.getTabProgressBarColor(tab); viewHolder.progressBar.setColor(color); } /** * Adapts the selection state of a tab's views. * * @param tabItem * The tab item, which corresponds to the tab, whose selection state should be adapted, * as an instance of the class {@link TabItem}. The tab item may not be null */ private void adaptSelectionState(@NonNull final TabItem tabItem) { boolean selected = model.getSelectedTab() == tabItem.getTab(); tabItem.getView().setSelected(selected); AbstractTabViewHolder viewHolder = tabItem.getViewHolder(); viewHolder.titleTextView.setSelected(selected); viewHolder.closeButton.setSelected(selected); } /** * Adapts the appearance of all currently inflated tabs, depending on whether they are currently * selected, or not. */ private void adaptAllSelectionStates() { AbstractItemIterator iterator = new ItemIterator.Builder(model, getViewRecyclerOrThrowException()).create(); AbstractItem item; while ((item = iterator.next()) != null) { if (item.isInflated() && item instanceof TabItem) { TabItem tabItem = (TabItem) item; adaptSelectionState(tabItem); adaptBackgroundColor(tabItem); } } } /** * Creates and returns a listener, which allows to close a specific tab, when its close button * is clicked. * * @param closeButton * The tab's close button as an instance of the class {@link ImageButton}. The button * may not be null * @param tab * The tab, which should be closed, as an instance of the class {@link Tab}. The tab may * not be null * @return The listener, which has been created, as an instance of the class {@link * View.OnClickListener}. The listener may not be null */ @NonNull private View.OnClickListener createCloseButtonClickListener( @NonNull final ImageButton closeButton, @NonNull final Tab tab) { return new View.OnClickListener() { @Override public void onClick(final View v) { TabItem tabItem = getTabItem(tab); if (tabItem != null) { State state = tabItem.getTag().getState(); if (state == State.FLOATING || state == State.STACKED_START_ATOP) { if (notifyOnCloseTab(tab)) { closeButton.setOnClickListener(null); tabSwitcher.removeTab(tab); } } } } }; } /** * Notifies all listeners, that a tab is about to be closed by clicking its close button. * * @param tab * The tab, which is about to be closed, as an instance of the class {@link Tab}. The * tab may not be null * @return True, if the tab should be closed, false otherwise */ private boolean notifyOnCloseTab(@NonNull final Tab tab) { boolean result = true; for (TabCloseListener listener : model.getTabCloseListeners()) { result &= listener.onCloseTab(tabSwitcher, tab); } return result; } /** * Returns the tab switcher, which contains the tabs. * * @return The tab switcher, which contains the tabs, as an instance of the class {@link * TabSwitcher}. The tab switcher may not be null */ @NonNull protected final TabSwitcher getTabSwitcher() { return tabSwitcher; } /** * Returns the model of the tab switcher. * * @return The model of the tab switcher as an instance of the class {@link TabSwitcherModel}. * The model may not be null */ @NonNull protected final TabSwitcherModel getModel() { return model; } /** * Returns the style, which allow to retrieve style attributes of the tab switcher. * * @return The style, which allows to retrieve style attributes of the tab switcher, as an * instance of the class {@link TabSwitcherStyle}. The style may not be null */ @NonNull protected final TabSwitcherStyle getStyle() { return style; } /** * Returns the tab item, which corresponds to a specific tab. * * @param tab * The tab, whose tab item should be returned, as an instance of the class {@link Tab}. * The tab may not be null * @return The tab item, which corresponds to the given tab, as an instance of the class {@link * TabItem} or null, if no view, which visualizes the tab, is currently inflated */ @Nullable protected final TabItem getTabItem(@NonNull final Tab tab) { int index = model.indexOf(tab); if (index != -1) { TabItem tabItem = TabItem.create(model, getViewRecyclerOrThrowException(), index); if (tabItem.isInflated()) { return tabItem; } } return null; } /** * Returns the view recycler, the adapter is bound to, or throws an {@link * IllegalStateException}, if no view recycler has been set. * * @return The view recycler, the adapter is bound to, as an instance of the class * AttachedViewRecycler. The view recycler may not be null */ @NonNull protected final AttachedViewRecycler<AbstractItem, Integer> getViewRecyclerOrThrowException() { Condition.INSTANCE.ensureNotNull(viewRecycler, "No view recycler has been set", IllegalStateException.class); return viewRecycler; } /** * The method, which is invoked on implementing subclasses, when the background color of a tab * has been changed. * * @param color * The color, which has been set, as an {@link Integer} value * @param tabItem * The tab item, which corresponds to the tab, whose background color has been changed, * as an instance of the class {@link TabItem}. The tab item may not be null */ protected void onAdaptBackgroundColor(@ColorInt final int color, @NonNull final TabItem tabItem) { } /** * The method, which is invoked on implementing subclasses in order to inflate the view, which * is used to visualize tabs. * * @param inflater * The layout inflater, which should be used, as an instance of the class {@link * LayoutInflater}. The layout inflater may not be null * @param parent * The parent of the view, which should be inflated, as an instance of the class {@link * ViewGroup} or null, if no parent is available * @param viewHolder * The view holder, which should hold references to the child views of the view, which * should be inflated, as an instance of the class {@link AbstractTabViewHolder}. The * view holder may not be null * @return The view, which has been inflated, as an instance of the class {@link View}. The view * may not be null */ @NonNull protected abstract View onInflateTabView(@NonNull final LayoutInflater inflater, @Nullable final ViewGroup parent, @NonNull final AbstractTabViewHolder viewHolder); /** * The method, which is invoked on implementing subclasses in order to adapt the appearance of a * view, which is used to visualize a tab. * * @param view * The view, which is used to visualize the tab, as an instance of the class {@link * View}. The view may not be null * @param tabItem * The tab item, which corresponds to the tab, which is visualized by the given view, as * an instance of the class {@link TabItem}. The tab item may not be null * @param params * An array, which may contain optional parameters, as an array of the generic type * ParamType or an empty array, if no optional parameters are available */ @SuppressWarnings("unchecked") protected abstract void onShowTabView(@NonNull final View view, @NonNull final TabItem tabItem, @NonNull final Integer... params); /** * The method, which is invoked on implementing subclasses in order to create the view holder, * which should be associated with an inflated view. * * @return The view holder, which has been created, as an instance of the class {@link * AbstractTabViewHolder}. The view holder may not be null */ @NonNull protected abstract AbstractTabViewHolder onCreateTabViewHolder(); /** * The method, which is invoked on implementing subclasses in order to retrieve the layout, * which is used by the tab switcher. * * @return The layout, which is used by the tab switcher, as a value of the enum {@link Layout}. * The layout may not be null */ @NonNull protected abstract Layout getLayout(); /** * Creates a new view recycler adapter, which allows to inflate the views, which are used to * visualize the tabs of a {@link TabSwitcher}. * * @param tabSwitcher * The tab switcher as an instance of the class {@link TabSwitcher}. The tab switcher * may not be null * @param model * The model, which belongs to the tab switcher, as an instance of the class {@link * TabSwitcherModel}. The model may not be null * @param style * The style, which allows to retrieve style attributes of the tab switcher switcher, as * an instance of the class {@link TabSwitcherStyle}. The style may not be null */ public AbstractTabRecyclerAdapter(@NonNull final TabSwitcher tabSwitcher, @NonNull final TabSwitcherModel model, @NonNull final TabSwitcherStyle style) { Condition.INSTANCE.ensureNotNull(tabSwitcher, "The tab switcher may not be null"); Condition.INSTANCE.ensureNotNull(model, "The model may not be null"); Condition.INSTANCE.ensureNotNull(style, "The style may not be null"); this.tabSwitcher = tabSwitcher; this.model = model; this.style = style; this.viewRecycler = null; } /** * Sets the view recycler, which allows to inflate the views, which are used to visualize tabs. * * @param viewRecycler * The view recycler, which should be set, as an instance of the class * AttachedViewRecycler. The view recycler may not be null */ public final void setViewRecycler( @NonNull final AttachedViewRecycler<AbstractItem, Integer> viewRecycler) { Condition.INSTANCE.ensureNotNull(viewRecycler, "The view recycler may not be null"); this.viewRecycler = viewRecycler; } @Override public void onLogLevelChanged(@NonNull final LogLevel logLevel) { } @Override public final void onDecoratorChanged(@NonNull final TabSwitcherDecorator decorator) { } @Override public final void onSwitcherShown() { } @Override public final void onSwitcherHidden() { } @Override public final void onSelectionChanged(final int previousIndex, final int index, @Nullable final Tab selectedTab, final boolean switcherHidden) { adaptAllSelectionStates(); } @Override public final void onTabAdded(final int index, @NonNull final Tab tab, final int previousSelectedTabIndex, final int selectedTabIndex, final boolean selectionChanged, final boolean switcherVisibilityChanged, @NonNull final Animation animation) { if (selectionChanged) { adaptAllSelectionStates(); } } @Override public final void onAllTabsAdded(final int index, @NonNull final Tab[] tabs, final int previousSelectedTabIndex, final int selectedTabIndex, final boolean selectionChanged, @NonNull final Animation animation) { if (selectionChanged) { adaptAllSelectionStates(); } } @Override public final void onTabRemoved(final int index, @NonNull final Tab tab, final int previousSelectedTabIndex, final int selectedTabIndex, final boolean selectionChanged, @NonNull final Animation animation) { if (selectionChanged) { adaptAllSelectionStates(); } } @Override public final void onAllTabsRemoved(@NonNull final Tab[] tabs, @NonNull final Animation animation) { } @Override public void onPaddingChanged(final int left, final int top, final int right, final int bottom) { } @Override public void onApplyPaddingToTabsChanged(final boolean applyPaddingToTabs) { } @Override public final void onTabIconChanged(@Nullable final Drawable icon) { ItemIterator iterator = new ItemIterator.Builder(model, getViewRecyclerOrThrowException()).create(); AbstractItem item; while ((item = iterator.next()) != null) { if (item.isInflated() && item instanceof TabItem) { adaptIcon((TabItem) item); } } } @CallSuper @Override public void onTabBackgroundColorChanged(@Nullable final ColorStateList colorStateList) { ItemIterator iterator = new ItemIterator.Builder(model, getViewRecyclerOrThrowException()).create(); AbstractItem item; while ((item = iterator.next()) != null) { if (item.isInflated() && item instanceof TabItem) { adaptBackgroundColor((TabItem) item); } } } @Override public void onTabContentBackgroundColorChanged(@ColorInt final int color) { } @Override public final void onTabTitleColorChanged(@Nullable final ColorStateList colorStateList) { ItemIterator iterator = new ItemIterator.Builder(model, getViewRecyclerOrThrowException()).create(); AbstractItem item; while ((item = iterator.next()) != null) { if (item.isInflated() && item instanceof TabItem) { adaptTitleTextColor((TabItem) item); } } } @Override public final void onTabCloseButtonIconChanged(@Nullable final Drawable icon) { ItemIterator iterator = new ItemIterator.Builder(model, getViewRecyclerOrThrowException()).create(); AbstractItem item; while ((item = iterator.next()) != null) { if (item.isInflated() && item instanceof TabItem) { adaptCloseButtonIcon((TabItem) item); } } } @Override public final void onTabProgressBarColorChanged(@ColorInt final int color) { ItemIterator iterator = new ItemIterator.Builder(model, getViewRecyclerOrThrowException()).create(); AbstractItem item; while ((item = iterator.next()) != null) { if (item.isInflated() && item instanceof TabItem) { adaptProgressBarColor((TabItem) item); } } } @Override public final void onAddTabButtonVisibilityChanged(final boolean visible) { } @Override public void onAddTabButtonColorChanged(@Nullable final ColorStateList colorStateList) { } @Override public final void onToolbarVisibilityChanged(final boolean visible) { } @Override public final void onToolbarTitleChanged(@Nullable final CharSequence title) { } @Override public final void onToolbarNavigationIconChanged(@Nullable final Drawable icon, @Nullable final View.OnClickListener listener) { } @Override public final void onToolbarMenuInflated(@MenuRes final int resourceId, @Nullable final Toolbar.OnMenuItemClickListener listener) { } @Override public final void onEmptyViewChanged(@Nullable final View view, final long animationDuration) { } @Override public final void onTitleChanged(@NonNull final Tab tab) { TabItem tabItem = getTabItem(tab); if (tabItem != null) { adaptTitle(tabItem); } } @Override public final void onIconChanged(@NonNull final Tab tab) { TabItem tabItem = getTabItem(tab); if (tabItem != null) { adaptIcon(tabItem); } } @Override public final void onCloseableChanged(@NonNull final Tab tab) { TabItem tabItem = getTabItem(tab); if (tabItem != null) { adaptCloseButtonVisibility(tabItem); } } @Override public final void onCloseButtonIconChanged(@NonNull final Tab tab) { TabItem tabItem = getTabItem(tab); if (tabItem != null) { adaptCloseButtonIcon(tabItem); } } @Override public final void onBackgroundColorChanged(@NonNull final Tab tab) { TabItem tabItem = getTabItem(tab); if (tabItem != null) { adaptBackgroundColor(tabItem); } } @Override public void onContentBackgroundColorChanged(@NonNull final Tab tab) { } @Override public final void onTitleTextColorChanged(@NonNull final Tab tab) { TabItem tabItem = getTabItem(tab); if (tabItem != null) { adaptTitleTextColor(tabItem); } } @Override public final void onProgressBarVisibilityChanged(@NonNull final Tab tab) { TabItem tabItem = getTabItem(tab); if (tabItem != null) { adaptProgressBarVisibility(tabItem); } } @Override public final void onProgressBarColorChanged(@NonNull final Tab tab) { TabItem tabItem = getTabItem(tab); if (tabItem != null) { adaptProgressBarColor(tabItem); } } @CallSuper @Override public int getViewType(@NonNull final AbstractItem item) { if (item instanceof TabItem) { return TAB_VIEW_TYPE; } else { throw new IllegalArgumentException("Unknown item type"); } } @SuppressWarnings("unchecked") @CallSuper @NonNull @Override public View onInflateView(@NonNull final LayoutInflater inflater, @Nullable final ViewGroup parent, @NonNull final AbstractItem item, final int viewType, @NonNull final Integer... params) { if (viewType == TAB_VIEW_TYPE) { TabItem tabItem = (TabItem) item; AbstractTabViewHolder viewHolder = onCreateTabViewHolder(); View view = onInflateTabView(inflater, parent, viewHolder); viewHolder.titleContainer = view.findViewById(R.id.tab_title_container); viewHolder.titleTextView = view.findViewById(R.id.tab_title_text_view); viewHolder.iconImageView = view.findViewById(R.id.tab_icon_image_view); viewHolder.progressBar = view.findViewById(R.id.tab_progress_bar); viewHolder.closeButton = view.findViewById(R.id.close_tab_button); view.setTag(R.id.tag_view_holder, viewHolder); tabItem.setViewHolder(viewHolder); item.setView(view); view.setTag(R.id.tag_properties, item.getTag()); return view; } else { throw new IllegalArgumentException("Unknown view type"); } } @SuppressWarnings("unchecked") @CallSuper @Override public void onShowView(@NonNull final Context context, @NonNull final View view, @NonNull final AbstractItem item, final boolean inflated, @NonNull final Integer... params) { if (item instanceof TabItem) { TabItem tabItem = (TabItem) item; AbstractTabViewHolder viewHolder = (AbstractTabViewHolder) view.getTag(R.id.tag_view_holder); if (!tabItem.isInflated()) { tabItem.setView(view); tabItem.setViewHolder(viewHolder); view.setTag(R.id.tag_properties, tabItem.getTag()); } Tab tab = tabItem.getTab(); tab.addCallback(this); adaptTitle(tabItem); adaptIcon(tabItem); adaptProgressBarVisibility(tabItem); adaptCloseButtonVisibility(tabItem); adaptCloseButtonIcon(tabItem); adaptBackgroundColor(tabItem); adaptTitleTextColor(tabItem); adaptSelectionState(tabItem); onShowTabView(view, tabItem, params); } else { throw new IllegalArgumentException("Unknown item type"); } } @CallSuper @Override public void onRemoveView(@NonNull final View view, @NonNull final AbstractItem item) { if (item instanceof TabItem) { TabItem tabItem = (TabItem) item; Tab tab = tabItem.getTab(); tab.removeCallback(this); view.setTag(R.id.tag_properties, null); } else { throw new IllegalArgumentException("Unknown item type"); } } }
/* * The MIT License (MIT) * * Copyright (c) 2011 Curt Binder * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package info.curtbinder.reefangel.phone; public final class Globals { public static final String PACKAGE = buildPackage(); public static final String PRE10_LOCATIONS = "PreLocations"; public static final String RELAY_BOX_NUMBER = "RelayBoxNumber"; public static final String loggingFile = "ra_log.txt"; public static final String dateConversionFile = "ra_convert_dates.txt"; public static final int logReplace = 0; public static final int logAppend = 1; public static final int errorRetryNone = 0; public static final int memoryReadOnly = -1; public static final int defaultPort = 9; // profile updating public static final int profileAlways = 0; public static final int profileOnlyAway = 1; public static final int profileOnlyHome = 2; public static final int profileHome = 0; public static final int profileAway = 1; // notification conditions public static final int condGreaterThan = 0; public static final int condGreaterThanOrEqualTo = 1; public static final int condEqual = 2; public static final int condLessThan = 3; public static final int condLessThanOrEqualTo = 4; public static final int condNotEqual = 5; // notification parameters public static final int paramT1 = 0; public static final int paramT2 = 1; public static final int paramT3 = 2; public static final int paramPH = 3; public static final int paramPHExpansion = 4; public static final int paramDaylightPWM = 5; public static final int paramActinicPWM = 6; public static final int paramSalinity = 7; public static final int paramORP = 8; public static final int paramWaterLevel = 9; public static final int paramATOHigh = 10; public static final int paramATOLow = 11; public static final int paramPWMExp0 = 12; public static final int paramPWMExp1 = 13; public static final int paramPWMExp2 = 14; public static final int paramPWMExp3 = 15; public static final int paramPWMExp4 = 16; public static final int paramPWMExp5 = 17; public static final int paramAIWhite = 18; public static final int paramAIBlue = 19; public static final int paramAIRoyalBlue = 20; public static final int paramVortechMode = 21; public static final int paramVortechSpeed = 22; public static final int paramVortechDuration = 23; public static final int paramRadionWhite = 24; public static final int paramRadionRoyalBlue = 25; public static final int paramRadionRed = 26; public static final int paramRadionGreen = 27; public static final int paramRadionBlue = 28; public static final int paramRadionIntensity = 29; public static final int paramIOCh0 = 30; public static final int paramIOCh1 = 31; public static final int paramIOCh2 = 32; public static final int paramIOCh3 = 33; public static final int paramIOCh4 = 34; public static final int paramIOCh5 = 35; public static final int paramCustom0 = 36; public static final int paramCustom1 = 37; public static final int paramCustom2 = 38; public static final int paramCustom3 = 39; public static final int paramCustom4 = 40; public static final int paramCustom5 = 41; public static final int paramCustom6 = 42; public static final int paramCustom7 = 43; public static final int paramWaterLevel1 = 44; public static final int paramWaterLevel2 = 45; public static final int paramWaterLevel3 = 46; public static final int paramWaterLevel4 = 47; public static final int paramHumidity = 48; public static final int paramSCPWMExp0 = 49; public static final int paramSCPWMExp1 = 50; public static final int paramSCPWMExp2 = 51; public static final int paramSCPWMExp3 = 52; public static final int paramSCPWMExp4 = 53; public static final int paramSCPWMExp5 = 54; public static final int paramSCPWMExp6 = 55; public static final int paramSCPWMExp7 = 56; public static final int paramSCPWMExp8 = 57; public static final int paramSCPWMExp9 = 58; public static final int paramSCPWMExp10 = 59; public static final int paramSCPWMExp11 = 60; public static final int paramSCPWMExp12 = 61; public static final int paramSCPWMExp13 = 62; public static final int paramSCPWMExp14 = 63; public static final int paramSCPWMExp15 = 64; public static final int paramDaylight2PWM = 65; public static final int paramActinic2PWM = 66; public static final int paramAlarm = 67; public static final int paramPAR = 68; // override locations public static final int OVERRIDE_DISABLE = 255; public static final int OVERRIDE_MAX_VALUE = 100; public static final int OVERRIDE_DAYLIGHT = 0; public static final int OVERRIDE_ACTINIC = 1; public static final int OVERRIDE_CHANNEL0 = 2; public static final int OVERRIDE_CHANNEL1 = 3; public static final int OVERRIDE_CHANNEL2 = 4; public static final int OVERRIDE_CHANNEL3 = 5; public static final int OVERRIDE_CHANNEL4 = 6; public static final int OVERRIDE_CHANNEL5 = 7; public static final int OVERRIDE_AI_WHITE = 8; public static final int OVERRIDE_AI_BLUE = 9; public static final int OVERRIDE_AI_ROYALBLUE = 10; public static final int OVERRIDE_RF_WHITE = 11; public static final int OVERRIDE_RF_ROYALBLUE = 12; public static final int OVERRIDE_RF_RED = 13; public static final int OVERRIDE_RF_GREEN = 14; public static final int OVERRIDE_RF_BLUE = 15; public static final int OVERRIDE_RF_INTENSITY = 16; public static final int OVERRIDE_DAYLIGHT2 = 17; public static final int OVERRIDE_ACTINIC2 = 18; public static final int OVERRIDE_16CH_CHANNEL0 = 19; public static final int OVERRIDE_16CH_CHANNEL1 = 20; public static final int OVERRIDE_16CH_CHANNEL2 = 21; public static final int OVERRIDE_16CH_CHANNEL3 = 22; public static final int OVERRIDE_16CH_CHANNEL4 = 23; public static final int OVERRIDE_16CH_CHANNEL5 = 24; public static final int OVERRIDE_16CH_CHANNEL6 = 25; public static final int OVERRIDE_16CH_CHANNEL7 = 26; public static final int OVERRIDE_16CH_CHANNEL8 = 27; public static final int OVERRIDE_16CH_CHANNEL9 = 28; public static final int OVERRIDE_16CH_CHANNEL10 = 29; public static final int OVERRIDE_16CH_CHANNEL11 = 30; public static final int OVERRIDE_16CH_CHANNEL12 = 31; public static final int OVERRIDE_16CH_CHANNEL13 = 32; public static final int OVERRIDE_16CH_CHANNEL14 = 33; public static final int OVERRIDE_16CH_CHANNEL15 = 34; //public static final int OVERRIDE_CHANNELS = 35; // calibrate locations public static final int CALIBRATE_PH = 0; public static final int CALIBRATE_SALINITY = 1; public static final int CALIBRATE_ORP = 2; public static final int CALIBRATE_PHE = 3; public static final int CALIBRATE_WATERLEVEL = 4; // Controller Indices public static final int T1_INDEX = 0; public static final int T2_INDEX = 1; public static final int T3_INDEX = 2; public static final int PH_INDEX = 3; public static final int DP_INDEX = 4; public static final int AP_INDEX = 5; public static final int DP2_INDEX = 6; public static final int AP2_INDEX = 7; public static final int ATOLO_INDEX = 8; public static final int ATOHI_INDEX = 9; public static final int SALINITY_INDEX = 10; public static final int ORP_INDEX = 11; public static final int PHE_INDEX = 12; public static final int WL_INDEX = 13; public static final int WL1_INDEX = 14; public static final int WL2_INDEX = 15; public static final int WL3_INDEX = 16; public static final int WL4_INDEX = 17; public static final int HUMIDITY_INDEX = 18; public static final int PAR_INDEX = 19; // variable ranges public static final int BYTE_MIN = 0; public static final int BYTE_MAX = 255; public static final int INT_MIN = 0; public static final int INT_MAX = 32767; private static String buildPackage() { String p = Globals.class.getPackage().getName(); if (BuildConfig.DEBUG) { p += ".debug"; } return p; } }
/** * Generated with Acceleo */ package org.wso2.developerstudio.eclipse.gmf.esb.parts.impl; import java.util.ArrayList; // Start of user code for imports import org.eclipse.emf.common.util.Enumerator; import org.eclipse.emf.ecore.EObject; import org.eclipse.emf.edit.ui.provider.AdapterFactoryLabelProvider; import org.eclipse.emf.eef.runtime.EEFRuntimePlugin; import org.eclipse.emf.eef.runtime.api.component.IPropertiesEditionComponent; import org.eclipse.emf.eef.runtime.api.notify.IPropertiesEditionEvent; import org.eclipse.emf.eef.runtime.api.parts.ISWTPropertiesEditionPart; import org.eclipse.emf.eef.runtime.impl.notify.PropertiesEditionEvent; import org.eclipse.emf.eef.runtime.impl.parts.CompositePropertiesEditionPart; import org.eclipse.emf.eef.runtime.ui.parts.PartComposer; import org.eclipse.emf.eef.runtime.ui.parts.sequence.BindingCompositionSequence; import org.eclipse.emf.eef.runtime.ui.parts.sequence.CompositionSequence; import org.eclipse.emf.eef.runtime.ui.parts.sequence.CompositionStep; import org.eclipse.emf.eef.runtime.ui.utils.EditingUtils; import org.eclipse.emf.eef.runtime.ui.widgets.ButtonsModeEnum; import org.eclipse.emf.eef.runtime.ui.widgets.EMFComboViewer; import org.eclipse.emf.eef.runtime.ui.widgets.EObjectFlatComboViewer; import org.eclipse.emf.eef.runtime.ui.widgets.SWTUtils; import org.eclipse.emf.eef.runtime.ui.widgets.eobjflatcombo.EObjectFlatComboSettings; import org.eclipse.jface.viewers.ArrayContentProvider; import org.eclipse.jface.viewers.ISelectionChangedListener; import org.eclipse.jface.viewers.SelectionChangedEvent; import org.eclipse.jface.viewers.StructuredSelection; import org.eclipse.jface.viewers.ViewerFilter; import org.eclipse.swt.SWT; import org.eclipse.swt.events.FocusAdapter; import org.eclipse.swt.events.FocusEvent; import org.eclipse.swt.events.KeyAdapter; import org.eclipse.swt.events.KeyEvent; import org.eclipse.swt.events.KeyListener; import org.eclipse.swt.events.MouseEvent; import org.eclipse.swt.events.MouseListener; import org.eclipse.swt.layout.GridData; import org.eclipse.swt.layout.GridLayout; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.Control; import org.eclipse.swt.widgets.Event; import org.eclipse.swt.widgets.Group; import org.eclipse.swt.widgets.Listener; import org.eclipse.swt.widgets.Text; import org.eclipse.ui.forms.widgets.FormToolkit; import org.wso2.developerstudio.eclipse.gmf.esb.RegistryKeyProperty; import org.wso2.developerstudio.eclipse.gmf.esb.impl.EsbFactoryImpl; import org.wso2.developerstudio.eclipse.gmf.esb.parts.CloneTargetPropertiesEditionPart; import org.wso2.developerstudio.eclipse.gmf.esb.parts.EsbViewsRepository; import org.wso2.developerstudio.eclipse.gmf.esb.presentation.EEFPropertyViewUtil; import org.wso2.developerstudio.eclipse.gmf.esb.presentation.EEFRegistryKeyPropertyEditorDialog; import org.wso2.developerstudio.eclipse.gmf.esb.providers.EsbMessages; import org.wso2.developerstudio.esb.form.editors.article.providers.NamedEntityDescriptor; // End of user code /** * * */ public class CloneTargetPropertiesEditionPartImpl extends CompositePropertiesEditionPart implements ISWTPropertiesEditionPart, CloneTargetPropertiesEditionPart { protected EMFComboViewer sequenceType; protected EObjectFlatComboViewer sequence; protected EMFComboViewer endpointType; protected Text soapAction; protected Text toAddress; // Start of user code for endpointRegistryKey widgets declarations protected RegistryKeyProperty endpointKey; protected Text endpointKeyText; // End of user code // Start of user code for sequenceRegistryKey widgets declarations protected RegistryKeyProperty sequenceKey; protected Text sequenceKeyText; protected Control[] sequenceTypeElements; protected Control[] sequenceElements; protected Control[] sequenceKeyElements; protected Control[] endpointTypeElements; protected Control[] endpointKeyElements; protected Control[] soapActionElements; protected Control[] toAddressElements; protected Group propertiesGroup; // End of user code /** * Default constructor * @param editionComponent the {@link IPropertiesEditionComponent} that manage this part * */ public CloneTargetPropertiesEditionPartImpl(IPropertiesEditionComponent editionComponent) { super(editionComponent); } /** * {@inheritDoc} * * @see org.eclipse.emf.eef.runtime.api.parts.ISWTPropertiesEditionPart# * createFigure(org.eclipse.swt.widgets.Composite) * */ public Composite createFigure(final Composite parent) { view = new Composite(parent, SWT.NONE); GridLayout layout = new GridLayout(); layout.numColumns = 3; view.setLayout(layout); createControls(view); return view; } /** * {@inheritDoc} * * @see org.eclipse.emf.eef.runtime.api.parts.ISWTPropertiesEditionPart# * createControls(org.eclipse.swt.widgets.Composite) * */ public void createControls(Composite view) { CompositionSequence cloneTargetStep = new BindingCompositionSequence(propertiesEditionComponent); CompositionStep propertiesStep = cloneTargetStep.addStep(EsbViewsRepository.CloneTarget.Properties.class); propertiesStep.addStep(EsbViewsRepository.CloneTarget.Properties.sequenceType); propertiesStep.addStep(EsbViewsRepository.CloneTarget.Properties.sequence); propertiesStep.addStep(EsbViewsRepository.CloneTarget.Properties.endpointType); propertiesStep.addStep(EsbViewsRepository.CloneTarget.Properties.soapAction); propertiesStep.addStep(EsbViewsRepository.CloneTarget.Properties.toAddress); propertiesStep.addStep(EsbViewsRepository.CloneTarget.Properties.endpointRegistryKey); propertiesStep.addStep(EsbViewsRepository.CloneTarget.Properties.sequenceRegistryKey); composer = new PartComposer(cloneTargetStep) { @Override public Composite addToPart(Composite parent, Object key) { if (key == EsbViewsRepository.CloneTarget.Properties.class) { return createPropertiesGroup(parent); } if (key == EsbViewsRepository.CloneTarget.Properties.sequenceType) { return createSequenceTypeEMFComboViewer(parent); } if (key == EsbViewsRepository.CloneTarget.Properties.sequence) { return createSequenceFlatComboViewer(parent); } if (key == EsbViewsRepository.CloneTarget.Properties.endpointType) { return createEndpointTypeEMFComboViewer(parent); } if (key == EsbViewsRepository.CloneTarget.Properties.soapAction) { return createSoapActionText(parent); } if (key == EsbViewsRepository.CloneTarget.Properties.toAddress) { return createToAddressText(parent); } // Start of user code for endpointRegistryKey addToPart creation if (key == EsbViewsRepository.CloneTarget.Properties.sequenceRegistryKey) { return createSequenceKeyWidget(parent); } // End of user code // Start of user code for sequenceRegistryKey addToPart creation if (key == EsbViewsRepository.CloneTarget.Properties.endpointRegistryKey) { return createEndpointKeyWidget(parent); } // End of user code return parent; } }; composer.compose(view); } /** * @generated NOT */ protected Composite createPropertiesGroup(Composite parent) { propertiesGroup = new Group(parent, SWT.NONE); propertiesGroup.setText(EsbMessages.CloneTargetPropertiesEditionPart_PropertiesGroupLabel); GridData propertiesGroupData = new GridData(GridData.FILL_HORIZONTAL); propertiesGroupData.horizontalSpan = 3; propertiesGroup.setLayoutData(propertiesGroupData); GridLayout propertiesGroupLayout = new GridLayout(); propertiesGroupLayout.numColumns = 3; propertiesGroup.setLayout(propertiesGroupLayout); return propertiesGroup; } /** * @generated NOT */ protected Composite createSequenceTypeEMFComboViewer(Composite parent) { Control sequenceTypeLabel = createDescription(parent, EsbViewsRepository.CloneTarget.Properties.sequenceType, EsbMessages.CloneTargetPropertiesEditionPart_SequenceTypeLabel); sequenceType = new EMFComboViewer(parent); sequenceType.setContentProvider(new ArrayContentProvider()); sequenceType.setLabelProvider(new AdapterFactoryLabelProvider(EEFRuntimePlugin.getDefault().getAdapterFactory())); GridData sequenceTypeData = new GridData(GridData.FILL_HORIZONTAL); sequenceType.getCombo().setLayoutData(sequenceTypeData); sequenceType.getCombo().addListener(SWT.MouseVerticalWheel, new Listener() { @Override public void handleEvent(Event arg0) { arg0.doit = false; } }); sequenceType.addSelectionChangedListener(new ISelectionChangedListener() { /** * {@inheritDoc} * * @see org.eclipse.jface.viewers.ISelectionChangedListener#selectionChanged(org.eclipse.jface.viewers.SelectionChangedEvent) * */ public void selectionChanged(SelectionChangedEvent event) { if (propertiesEditionComponent != null) propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent(CloneTargetPropertiesEditionPartImpl.this, EsbViewsRepository.CloneTarget.Properties.sequenceType, PropertiesEditionEvent.COMMIT, PropertiesEditionEvent.SET, null, getSequenceType())); } }); sequenceType.setID(EsbViewsRepository.CloneTarget.Properties.sequenceType); Control sequenceTypeHelp = SWTUtils.createHelpButton(parent, propertiesEditionComponent.getHelpContent(EsbViewsRepository.CloneTarget.Properties.sequenceType, EsbViewsRepository.SWT_KIND), null); //$NON-NLS-1$ // Start of user code for createSequenceTypeEMFComboViewer sequenceTypeElements = new Control[] { sequenceTypeLabel, sequenceType.getCombo(), sequenceTypeHelp }; sequenceType.addSelectionChangedListener(new ISelectionChangedListener() { /** * {@inheritDoc} * * @see org.eclipse.jface.viewers.ISelectionChangedListener#selectionChanged(org.eclipse.jface.viewers.SelectionChangedEvent) * */ public void selectionChanged(SelectionChangedEvent event) { validate(); } }); // End of user code return parent; } /** * @param parent the parent composite * @generated NOT */ protected Composite createSequenceFlatComboViewer(Composite parent) { Control sequenceLabel = createDescription(parent, EsbViewsRepository.CloneTarget.Properties.sequence, EsbMessages.CloneTargetPropertiesEditionPart_SequenceLabel); sequence = new EObjectFlatComboViewer(parent, !propertiesEditionComponent.isRequired(EsbViewsRepository.CloneTarget.Properties.sequence, EsbViewsRepository.SWT_KIND)); sequence.setLabelProvider(new AdapterFactoryLabelProvider(adapterFactory)); sequence.addSelectionChangedListener(new ISelectionChangedListener() { public void selectionChanged(SelectionChangedEvent event) { propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent(CloneTargetPropertiesEditionPartImpl.this, EsbViewsRepository.CloneTarget.Properties.sequence, PropertiesEditionEvent.CHANGE, PropertiesEditionEvent.SET, null, getSequence())); } }); GridData sequenceData = new GridData(GridData.FILL_HORIZONTAL); sequence.setLayoutData(sequenceData); sequence.setID(EsbViewsRepository.CloneTarget.Properties.sequence); Control sequenceHelp = SWTUtils.createHelpButton(parent, propertiesEditionComponent.getHelpContent(EsbViewsRepository.CloneTarget.Properties.sequence, EsbViewsRepository.SWT_KIND), null); //$NON-NLS-1$ // Start of user code for createSequenceFlatComboViewer sequenceElements = new Control[] {sequenceLabel, sequence, sequenceHelp}; // End of user code return parent; } /** * @generated NOT */ protected Composite createEndpointTypeEMFComboViewer(Composite parent) { Control endpointTypeLabel = createDescription(parent, EsbViewsRepository.CloneTarget.Properties.endpointType, EsbMessages.CloneTargetPropertiesEditionPart_EndpointTypeLabel); endpointType = new EMFComboViewer(parent); endpointType.setContentProvider(new ArrayContentProvider()); endpointType.setLabelProvider(new AdapterFactoryLabelProvider(EEFRuntimePlugin.getDefault().getAdapterFactory())); GridData endpointTypeData = new GridData(GridData.FILL_HORIZONTAL); endpointType.getCombo().setLayoutData(endpointTypeData); endpointType.getCombo().addListener(SWT.MouseVerticalWheel, new Listener() { @Override public void handleEvent(Event arg0) { arg0.doit = false; } }); endpointType.addSelectionChangedListener(new ISelectionChangedListener() { /** * {@inheritDoc} * * @see org.eclipse.jface.viewers.ISelectionChangedListener#selectionChanged(org.eclipse.jface.viewers.SelectionChangedEvent) * */ public void selectionChanged(SelectionChangedEvent event) { if (propertiesEditionComponent != null) propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent(CloneTargetPropertiesEditionPartImpl.this, EsbViewsRepository.CloneTarget.Properties.endpointType, PropertiesEditionEvent.COMMIT, PropertiesEditionEvent.SET, null, getEndpointType())); } }); endpointType.setID(EsbViewsRepository.CloneTarget.Properties.endpointType); Control endpointTypeHelp = SWTUtils.createHelpButton(parent, propertiesEditionComponent.getHelpContent(EsbViewsRepository.CloneTarget.Properties.endpointType, EsbViewsRepository.SWT_KIND), null); //$NON-NLS-1$ // Start of user code for createEndpointTypeEMFComboViewer endpointTypeElements = new Control[] {endpointTypeLabel, endpointType.getCombo(), endpointTypeHelp}; endpointType.addSelectionChangedListener(new ISelectionChangedListener() { /** * {@inheritDoc} * * @see org.eclipse.jface.viewers.ISelectionChangedListener#selectionChanged(org.eclipse.jface.viewers.SelectionChangedEvent) * */ public void selectionChanged(SelectionChangedEvent event) { validate(); } }); // End of user code return parent; } /** * @generated NOT */ protected Composite createSoapActionText(Composite parent) { Control soapActionLabel = createDescription(parent, EsbViewsRepository.CloneTarget.Properties.soapAction, EsbMessages.CloneTargetPropertiesEditionPart_SoapActionLabel); soapAction = SWTUtils.createScrollableText(parent, SWT.BORDER); GridData soapActionData = new GridData(GridData.FILL_HORIZONTAL); soapAction.setLayoutData(soapActionData); soapAction.addFocusListener(new FocusAdapter() { /** * {@inheritDoc} * * @see org.eclipse.swt.events.FocusAdapter#focusLost(org.eclipse.swt.events.FocusEvent) * */ @Override @SuppressWarnings("synthetic-access") public void focusLost(FocusEvent e) { if (propertiesEditionComponent != null) propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent(CloneTargetPropertiesEditionPartImpl.this, EsbViewsRepository.CloneTarget.Properties.soapAction, PropertiesEditionEvent.COMMIT, PropertiesEditionEvent.SET, null, soapAction.getText())); } }); soapAction.addKeyListener(new KeyAdapter() { /** * {@inheritDoc} * * @see org.eclipse.swt.events.KeyAdapter#keyPressed(org.eclipse.swt.events.KeyEvent) * */ @Override @SuppressWarnings("synthetic-access") public void keyPressed(KeyEvent e) { } @Override public void keyReleased(KeyEvent e) { if (!EEFPropertyViewUtil.isReservedKeyCombination(e)) { if (propertiesEditionComponent != null) propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent(CloneTargetPropertiesEditionPartImpl.this, EsbViewsRepository.CloneTarget.Properties.soapAction, PropertiesEditionEvent.COMMIT, PropertiesEditionEvent.SET, null, soapAction.getText())); } } }); EditingUtils.setID(soapAction, EsbViewsRepository.CloneTarget.Properties.soapAction); EditingUtils.setEEFtype(soapAction, "eef::Text"); //$NON-NLS-1$ Control soapActionHelp = SWTUtils.createHelpButton(parent, propertiesEditionComponent.getHelpContent(EsbViewsRepository.CloneTarget.Properties.soapAction, EsbViewsRepository.SWT_KIND), null); //$NON-NLS-1$ // Start of user code for createSoapActionText soapActionElements = new Control[] {soapActionLabel, soapAction, soapActionHelp}; // End of user code return parent; } /** * @generated NOT */ protected Composite createToAddressText(Composite parent) { Control toAddressLabel = createDescription(parent, EsbViewsRepository.CloneTarget.Properties.toAddress, EsbMessages.CloneTargetPropertiesEditionPart_ToAddressLabel); toAddress = SWTUtils.createScrollableText(parent, SWT.BORDER); GridData toAddressData = new GridData(GridData.FILL_HORIZONTAL); toAddress.setLayoutData(toAddressData); toAddress.addFocusListener(new FocusAdapter() { /** * {@inheritDoc} * * @see org.eclipse.swt.events.FocusAdapter#focusLost(org.eclipse.swt.events.FocusEvent) * */ @Override @SuppressWarnings("synthetic-access") public void focusLost(FocusEvent e) { if (propertiesEditionComponent != null) propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent(CloneTargetPropertiesEditionPartImpl.this, EsbViewsRepository.CloneTarget.Properties.toAddress, PropertiesEditionEvent.COMMIT, PropertiesEditionEvent.SET, null, toAddress.getText())); } }); toAddress.addKeyListener(new KeyAdapter() { /** * {@inheritDoc} * * @see org.eclipse.swt.events.KeyAdapter#keyPressed(org.eclipse.swt.events.KeyEvent) * */ @Override @SuppressWarnings("synthetic-access") public void keyPressed(KeyEvent e) { } @Override public void keyReleased(KeyEvent e) { if (!EEFPropertyViewUtil.isReservedKeyCombination(e)) { if (propertiesEditionComponent != null) propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent(CloneTargetPropertiesEditionPartImpl.this, EsbViewsRepository.CloneTarget.Properties.toAddress, PropertiesEditionEvent.COMMIT, PropertiesEditionEvent.SET, null, toAddress.getText())); } } }); EditingUtils.setID(toAddress, EsbViewsRepository.CloneTarget.Properties.toAddress); EditingUtils.setEEFtype(toAddress, "eef::Text"); //$NON-NLS-1$ Control toAddressHelp = SWTUtils.createHelpButton(parent, propertiesEditionComponent.getHelpContent(EsbViewsRepository.CloneTarget.Properties.toAddress, EsbViewsRepository.SWT_KIND), null); //$NON-NLS-1$ // Start of user code for createToAddressText toAddressElements = new Control[] {toAddressLabel, toAddress, toAddressHelp}; // End of user code return parent; } /** * {@inheritDoc} * * @see org.eclipse.emf.eef.runtime.api.notify.IPropertiesEditionListener#firePropertiesChanged(org.eclipse.emf.eef.runtime.api.notify.IPropertiesEditionEvent) * */ public void firePropertiesChanged(IPropertiesEditionEvent event) { // Start of user code for tab synchronization // End of user code } /** * {@inheritDoc} * * @see org.wso2.developerstudio.eclipse.gmf.esb.parts.CloneTargetPropertiesEditionPart#getSequenceType() * */ public Enumerator getSequenceType() { Enumerator selection = (Enumerator) ((StructuredSelection) sequenceType.getSelection()).getFirstElement(); return selection; } /** * {@inheritDoc} * * @see org.wso2.developerstudio.eclipse.gmf.esb.parts.CloneTargetPropertiesEditionPart#initSequenceType(Object input, Enumerator current) */ public void initSequenceType(Object input, Enumerator current) { sequenceType.setInput(input); sequenceType.modelUpdating(new StructuredSelection(current)); boolean eefElementEditorReadOnlyState = isReadOnly(EsbViewsRepository.CloneTarget.Properties.sequenceType); if (eefElementEditorReadOnlyState && sequenceType.isEnabled()) { sequenceType.setEnabled(false); sequenceType.setToolTipText(EsbMessages.CloneTarget_ReadOnly); } else if (!eefElementEditorReadOnlyState && !sequenceType.isEnabled()) { sequenceType.setEnabled(true); } } /** * {@inheritDoc} * * @see org.wso2.developerstudio.eclipse.gmf.esb.parts.CloneTargetPropertiesEditionPart#setSequenceType(Enumerator newValue) * */ public void setSequenceType(Enumerator newValue) { sequenceType.modelUpdating(new StructuredSelection(newValue)); boolean eefElementEditorReadOnlyState = isReadOnly(EsbViewsRepository.CloneTarget.Properties.sequenceType); if (eefElementEditorReadOnlyState && sequenceType.isEnabled()) { sequenceType.setEnabled(false); sequenceType.setToolTipText(EsbMessages.CloneTarget_ReadOnly); } else if (!eefElementEditorReadOnlyState && !sequenceType.isEnabled()) { sequenceType.setEnabled(true); } } /** * {@inheritDoc} * * @see org.wso2.developerstudio.eclipse.gmf.esb.parts.CloneTargetPropertiesEditionPart#getSequence() * */ public EObject getSequence() { if (sequence.getSelection() instanceof StructuredSelection) { Object firstElement = ((StructuredSelection) sequence.getSelection()).getFirstElement(); if (firstElement instanceof EObject) return (EObject) firstElement; } return null; } /** * {@inheritDoc} * * @see org.wso2.developerstudio.eclipse.gmf.esb.parts.CloneTargetPropertiesEditionPart#initSequence(EObjectFlatComboSettings) */ public void initSequence(EObjectFlatComboSettings settings) { sequence.setInput(settings); if (current != null) { sequence.setSelection(new StructuredSelection(settings.getValue())); } boolean eefElementEditorReadOnlyState = isReadOnly(EsbViewsRepository.CloneTarget.Properties.sequence); if (eefElementEditorReadOnlyState && sequence.isEnabled()) { sequence.setEnabled(false); sequence.setToolTipText(EsbMessages.CloneTarget_ReadOnly); } else if (!eefElementEditorReadOnlyState && !sequence.isEnabled()) { sequence.setEnabled(true); } } /** * {@inheritDoc} * * @see org.wso2.developerstudio.eclipse.gmf.esb.parts.CloneTargetPropertiesEditionPart#setSequence(EObject newValue) * */ public void setSequence(EObject newValue) { if (newValue != null) { sequence.setSelection(new StructuredSelection(newValue)); } else { sequence.setSelection(new StructuredSelection()); //$NON-NLS-1$ } boolean eefElementEditorReadOnlyState = isReadOnly(EsbViewsRepository.CloneTarget.Properties.sequence); if (eefElementEditorReadOnlyState && sequence.isEnabled()) { sequence.setEnabled(false); sequence.setToolTipText(EsbMessages.CloneTarget_ReadOnly); } else if (!eefElementEditorReadOnlyState && !sequence.isEnabled()) { sequence.setEnabled(true); } } /** * {@inheritDoc} * * @see org.wso2.developerstudio.eclipse.gmf.esb.parts.CloneTargetPropertiesEditionPart#setSequenceButtonMode(ButtonsModeEnum newValue) */ public void setSequenceButtonMode(ButtonsModeEnum newValue) { sequence.setButtonMode(newValue); } /** * {@inheritDoc} * * @see org.wso2.developerstudio.eclipse.gmf.esb.parts.CloneTargetPropertiesEditionPart#addFilterSequence(ViewerFilter filter) * */ public void addFilterToSequence(ViewerFilter filter) { sequence.addFilter(filter); } /** * {@inheritDoc} * * @see org.wso2.developerstudio.eclipse.gmf.esb.parts.CloneTargetPropertiesEditionPart#addBusinessFilterSequence(ViewerFilter filter) * */ public void addBusinessFilterToSequence(ViewerFilter filter) { sequence.addBusinessRuleFilter(filter); } /** * {@inheritDoc} * * @see org.wso2.developerstudio.eclipse.gmf.esb.parts.CloneTargetPropertiesEditionPart#getEndpointType() * */ public Enumerator getEndpointType() { Enumerator selection = (Enumerator) ((StructuredSelection) endpointType.getSelection()).getFirstElement(); return selection; } /** * {@inheritDoc} * * @see org.wso2.developerstudio.eclipse.gmf.esb.parts.CloneTargetPropertiesEditionPart#initEndpointType(Object input, Enumerator current) */ public void initEndpointType(Object input, Enumerator current) { endpointType.setInput(input); endpointType.modelUpdating(new StructuredSelection(current)); boolean eefElementEditorReadOnlyState = isReadOnly(EsbViewsRepository.CloneTarget.Properties.endpointType); if (eefElementEditorReadOnlyState && endpointType.isEnabled()) { endpointType.setEnabled(false); endpointType.setToolTipText(EsbMessages.CloneTarget_ReadOnly); } else if (!eefElementEditorReadOnlyState && !endpointType.isEnabled()) { endpointType.setEnabled(true); } } /** * {@inheritDoc} * * @see org.wso2.developerstudio.eclipse.gmf.esb.parts.CloneTargetPropertiesEditionPart#setEndpointType(Enumerator newValue) * */ public void setEndpointType(Enumerator newValue) { endpointType.modelUpdating(new StructuredSelection(newValue)); boolean eefElementEditorReadOnlyState = isReadOnly(EsbViewsRepository.CloneTarget.Properties.endpointType); if (eefElementEditorReadOnlyState && endpointType.isEnabled()) { endpointType.setEnabled(false); endpointType.setToolTipText(EsbMessages.CloneTarget_ReadOnly); } else if (!eefElementEditorReadOnlyState && !endpointType.isEnabled()) { endpointType.setEnabled(true); } } /** * {@inheritDoc} * * @see org.wso2.developerstudio.eclipse.gmf.esb.parts.CloneTargetPropertiesEditionPart#getSoapAction() * */ public String getSoapAction() { return soapAction.getText(); } /** * {@inheritDoc} * * @see org.wso2.developerstudio.eclipse.gmf.esb.parts.CloneTargetPropertiesEditionPart#setSoapAction(String newValue) * */ public void setSoapAction(String newValue) { if (newValue != null) { soapAction.setText(newValue); } else { soapAction.setText(""); //$NON-NLS-1$ } boolean eefElementEditorReadOnlyState = isReadOnly(EsbViewsRepository.CloneTarget.Properties.soapAction); if (eefElementEditorReadOnlyState && soapAction.isEnabled()) { soapAction.setEnabled(false); soapAction.setToolTipText(EsbMessages.CloneTarget_ReadOnly); } else if (!eefElementEditorReadOnlyState && !soapAction.isEnabled()) { soapAction.setEnabled(true); } } /** * {@inheritDoc} * * @see org.wso2.developerstudio.eclipse.gmf.esb.parts.CloneTargetPropertiesEditionPart#getToAddress() * */ public String getToAddress() { return toAddress.getText(); } /** * {@inheritDoc} * * @see org.wso2.developerstudio.eclipse.gmf.esb.parts.CloneTargetPropertiesEditionPart#setToAddress(String newValue) * */ public void setToAddress(String newValue) { if (newValue != null) { toAddress.setText(newValue); } else { toAddress.setText(""); //$NON-NLS-1$ } boolean eefElementEditorReadOnlyState = isReadOnly(EsbViewsRepository.CloneTarget.Properties.toAddress); if (eefElementEditorReadOnlyState && toAddress.isEnabled()) { toAddress.setEnabled(false); toAddress.setToolTipText(EsbMessages.CloneTarget_ReadOnly); } else if (!eefElementEditorReadOnlyState && !toAddress.isEnabled()) { toAddress.setEnabled(true); } } // Start of user code for endpointRegistryKey specific getters and setters implementation @Override public RegistryKeyProperty getEndpointKey() { return endpointKey; } @Override public void setEndpointKey(RegistryKeyProperty registryKeyProperty) { if (registryKeyProperty != null) { endpointKeyText.setText(registryKeyProperty.getKeyValue()); endpointKey = registryKeyProperty; } } // End of user code // Start of user code for sequenceRegistryKey specific getters and setters implementation @Override public RegistryKeyProperty getSequenceKey() { return sequenceKey; } @Override public void setSequenceKey(RegistryKeyProperty registryKeyProperty) { if (registryKeyProperty != null) { sequenceKeyText.setText(registryKeyProperty.getKeyValue()); sequenceKey = registryKeyProperty; } } // End of user code /** * {@inheritDoc} * * @see org.eclipse.emf.eef.runtime.api.parts.IPropertiesEditionPart#getTitle() * */ public String getTitle() { return EsbMessages.CloneTarget_Part_Title; } // Start of user code additional methods protected Composite createEndpointKeyWidget(Composite parent) { Control endpointKeyLabel = createDescription(parent, EsbViewsRepository.CloneTarget.Properties.endpointRegistryKey, EsbMessages.CloneTargetPropertiesEditionPart_EndpointRegistryKeyLabel); if (endpointKey == null) { endpointKey = EsbFactoryImpl.eINSTANCE.createRegistryKeyProperty(); } String initValueExpression = endpointKey.getKeyValue().isEmpty() ? "" : endpointKey.getKeyValue(); endpointKeyText = SWTUtils.createScrollableText(parent, SWT.BORDER | SWT.READ_ONLY); endpointKeyText.setText(initValueExpression); endpointKeyText.setData(FormToolkit.KEY_DRAW_BORDER, FormToolkit.TEXT_BORDER); GridData valueData = new GridData(GridData.FILL_HORIZONTAL); endpointKeyText.setLayoutData(valueData); endpointKeyText.addMouseListener(new MouseListener(){ @Override public void mouseDoubleClick(MouseEvent e) { // TODO Auto-generated method stub } @Override public void mouseDown(MouseEvent e) { EEFRegistryKeyPropertyEditorDialog dialog = new EEFRegistryKeyPropertyEditorDialog(view.getShell(), SWT.NULL, endpointKey, new ArrayList<NamedEntityDescriptor>()); dialog.open(); endpointKeyText.setText(endpointKey.getKeyValue()); propertiesEditionComponent .firePropertiesChanged(new PropertiesEditionEvent(CloneTargetPropertiesEditionPartImpl.this, EsbViewsRepository.CloneTarget.Properties.endpointRegistryKey, PropertiesEditionEvent.COMMIT, PropertiesEditionEvent.SET, null, getEndpointKey())); } @Override public void mouseUp(MouseEvent e) { // TODO Auto-generated method stub } }); endpointKeyText.addKeyListener(new KeyListener() { @Override public void keyReleased(KeyEvent e) { if (!EEFPropertyViewUtil.isReservedKeyCombination(e)) { EEFRegistryKeyPropertyEditorDialog dialog = new EEFRegistryKeyPropertyEditorDialog(view.getShell(), SWT.NULL, endpointKey, new ArrayList<NamedEntityDescriptor>()); dialog.open(); endpointKeyText.setText(endpointKey.getKeyValue()); propertiesEditionComponent .firePropertiesChanged(new PropertiesEditionEvent(CloneTargetPropertiesEditionPartImpl.this, EsbViewsRepository.CloneTarget.Properties.endpointRegistryKey, PropertiesEditionEvent.COMMIT, PropertiesEditionEvent.SET, null, getEndpointKey())); } } @Override public void keyPressed(KeyEvent e) { // TODO Auto-generated method stub } }); EditingUtils.setID(endpointKeyText, EsbViewsRepository.AggregateMediator.OnComplete.sequenceKey); EditingUtils.setEEFtype(endpointKeyText, "eef::Text"); Control endpointKeyHelp = SWTUtils.createHelpButton(parent, propertiesEditionComponent.getHelpContent(EsbViewsRepository.CloneTarget.Properties.endpointRegistryKey, EsbViewsRepository.FORM_KIND), null); // $NON-NLS-1$ endpointKeyElements = new Control[] { endpointKeyLabel, endpointKeyText, endpointKeyHelp }; return parent; } protected Composite createSequenceKeyWidget(Composite parent) { Control sequenceKeyLabel = createDescription(parent, EsbViewsRepository.CloneTarget.Properties.sequenceRegistryKey, EsbMessages.CloneTargetPropertiesEditionPart_SequenceRegistryKeyLabel); if (sequenceKey == null) { sequenceKey = EsbFactoryImpl.eINSTANCE.createRegistryKeyProperty(); } String initValueExpression = sequenceKey.getKeyValue().isEmpty() ? "" : sequenceKey.getKeyValue(); sequenceKeyText = SWTUtils.createScrollableText(parent, SWT.BORDER | SWT.READ_ONLY); sequenceKeyText.setText(initValueExpression); sequenceKeyText.setData(FormToolkit.KEY_DRAW_BORDER, FormToolkit.TEXT_BORDER); GridData valueData = new GridData(GridData.FILL_HORIZONTAL); sequenceKeyText.setLayoutData(valueData); sequenceKeyText.addMouseListener(new MouseListener(){ @Override public void mouseDoubleClick(MouseEvent e) { // TODO Auto-generated method stub } @Override public void mouseDown(MouseEvent e) { EEFRegistryKeyPropertyEditorDialog dialog = new EEFRegistryKeyPropertyEditorDialog(view.getShell(), SWT.NULL, sequenceKey, new ArrayList<NamedEntityDescriptor>()); dialog.open(); sequenceKeyText.setText(sequenceKey.getKeyValue()); propertiesEditionComponent .firePropertiesChanged(new PropertiesEditionEvent(CloneTargetPropertiesEditionPartImpl.this, EsbViewsRepository.CloneTarget.Properties.sequenceRegistryKey, PropertiesEditionEvent.COMMIT, PropertiesEditionEvent.SET, null, getSequenceKey())); } @Override public void mouseUp(MouseEvent e) { // TODO Auto-generated method stub } }); sequenceKeyText.addKeyListener(new KeyListener() { @Override public void keyReleased(KeyEvent e) { if (!EEFPropertyViewUtil.isReservedKeyCombination(e)) { EEFRegistryKeyPropertyEditorDialog dialog = new EEFRegistryKeyPropertyEditorDialog(view.getShell(), SWT.NULL, sequenceKey, new ArrayList<NamedEntityDescriptor>()); dialog.open(); sequenceKeyText.setText(sequenceKey.getKeyValue()); propertiesEditionComponent .firePropertiesChanged(new PropertiesEditionEvent(CloneTargetPropertiesEditionPartImpl.this, EsbViewsRepository.CloneTarget.Properties.sequenceRegistryKey, PropertiesEditionEvent.COMMIT, PropertiesEditionEvent.SET, null, getSequenceKey())); } } @Override public void keyPressed(KeyEvent e) { // TODO Auto-generated method stub } }); EditingUtils.setID(sequenceKeyText, EsbViewsRepository.AggregateMediator.OnComplete.sequenceKey); EditingUtils.setEEFtype(sequenceKeyText, "eef::Text"); Control sequenceKeyHelp = SWTUtils.createHelpButton(parent, propertiesEditionComponent.getHelpContent(EsbViewsRepository.CloneTarget.Properties.sequenceRegistryKey, EsbViewsRepository.FORM_KIND), null); // $NON-NLS-1$ sequenceKeyElements = new Control[] { sequenceKeyLabel, sequenceKeyText, sequenceKeyHelp }; return parent; } @Override public void refresh() { super.refresh(); validate(); } public void validate() { EEFPropertyViewUtil eu = new EEFPropertyViewUtil(view); eu.clearElements(new Composite[] {propertiesGroup}); eu.showEntry(sequenceTypeElements, false); if(getSequenceType().getName().equals("REGISTRY_REFERENCE")) { eu.showEntry(sequenceKeyElements, false); } eu.showEntry(endpointTypeElements, false); if(getEndpointType().getName().equals("REGISTRY_REFERENCE")) { eu.showEntry(endpointKeyElements, false); } eu.showEntry(soapActionElements, false); eu.showEntry(toAddressElements, true); } // End of user code }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.pulsar.client.impl; import java.time.Clock; import java.util.Map; import java.util.concurrent.TimeUnit; import org.apache.commons.lang3.StringUtils; import org.apache.pulsar.client.api.Authentication; import org.apache.pulsar.client.api.AuthenticationFactory; import org.apache.pulsar.client.api.ClientBuilder; import org.apache.pulsar.client.api.PulsarClient; import org.apache.pulsar.client.api.PulsarClientException; import org.apache.pulsar.client.api.PulsarClientException.UnsupportedAuthenticationException; import org.apache.pulsar.client.api.ServiceUrlProvider; import org.apache.pulsar.client.impl.conf.ClientConfigurationData; import org.apache.pulsar.client.impl.conf.ConfigurationDataUtils; public class ClientBuilderImpl implements ClientBuilder { ClientConfigurationData conf; public ClientBuilderImpl() { this(new ClientConfigurationData()); } public ClientBuilderImpl(ClientConfigurationData conf) { this.conf = conf; } @Override public PulsarClient build() throws PulsarClientException { if (StringUtils.isBlank(conf.getServiceUrl()) && conf.getServiceUrlProvider() == null) { throw new IllegalArgumentException("service URL or service URL provider needs to be specified on the ClientBuilder object."); } if (StringUtils.isNotBlank(conf.getServiceUrl()) && conf.getServiceUrlProvider() != null) { throw new IllegalArgumentException("Can only chose one way service URL or service URL provider."); } if (conf.getServiceUrlProvider() != null) { if (StringUtils.isBlank(conf.getServiceUrlProvider().getServiceUrl())) { throw new IllegalArgumentException("Cannot get service url from service url provider."); } else { conf.setServiceUrl(conf.getServiceUrlProvider().getServiceUrl()); } } PulsarClient client = new PulsarClientImpl(conf); if (conf.getServiceUrlProvider() != null) { conf.getServiceUrlProvider().initialize(client); } return client; } @Override public ClientBuilder clone() { return new ClientBuilderImpl(conf.clone()); } @Override public ClientBuilder loadConf(Map<String, Object> config) { conf = ConfigurationDataUtils.loadData( config, conf, ClientConfigurationData.class); return this; } @Override public ClientBuilder serviceUrl(String serviceUrl) { if (StringUtils.isBlank(serviceUrl)) { throw new IllegalArgumentException("Param serviceUrl must not be blank."); } conf.setServiceUrl(serviceUrl); if (!conf.isUseTls()) { enableTls(serviceUrl.startsWith("pulsar+ssl") || serviceUrl.startsWith("https")); } return this; } @Override public ClientBuilder serviceUrlProvider(ServiceUrlProvider serviceUrlProvider) { if (serviceUrlProvider == null) { throw new IllegalArgumentException("Param serviceUrlProvider must not be null."); } conf.setServiceUrlProvider(serviceUrlProvider); return this; } @Override public ClientBuilder authentication(Authentication authentication) { conf.setAuthentication(authentication); return this; } @Override public ClientBuilder authentication(String authPluginClassName, String authParamsString) throws UnsupportedAuthenticationException { conf.setAuthentication(AuthenticationFactory.create(authPluginClassName, authParamsString)); return this; } @Override public ClientBuilder authentication(String authPluginClassName, Map<String, String> authParams) throws UnsupportedAuthenticationException { conf.setAuthentication(AuthenticationFactory.create(authPluginClassName, authParams)); return this; } @Override public ClientBuilder operationTimeout(int operationTimeout, TimeUnit unit) { conf.setOperationTimeoutMs(unit.toMillis(operationTimeout)); return this; } @Override public ClientBuilder ioThreads(int numIoThreads) { conf.setNumIoThreads(numIoThreads); return this; } @Override public ClientBuilder listenerThreads(int numListenerThreads) { conf.setNumListenerThreads(numListenerThreads); return this; } @Override public ClientBuilder connectionsPerBroker(int connectionsPerBroker) { conf.setConnectionsPerBroker(connectionsPerBroker); return this; } @Override public ClientBuilder enableTcpNoDelay(boolean useTcpNoDelay) { conf.setUseTcpNoDelay(useTcpNoDelay); return this; } @Override public ClientBuilder enableTls(boolean useTls) { conf.setUseTls(useTls); return this; } @Override public ClientBuilder enableTlsHostnameVerification(boolean enableTlsHostnameVerification) { conf.setTlsHostnameVerificationEnable(enableTlsHostnameVerification); return this; } @Override public ClientBuilder tlsTrustCertsFilePath(String tlsTrustCertsFilePath) { conf.setTlsTrustCertsFilePath(tlsTrustCertsFilePath); return this; } @Override public ClientBuilder allowTlsInsecureConnection(boolean tlsAllowInsecureConnection) { conf.setTlsAllowInsecureConnection(tlsAllowInsecureConnection); return this; } @Override public ClientBuilder statsInterval(long statsInterval, TimeUnit unit) { conf.setStatsIntervalSeconds(unit.toSeconds(statsInterval)); return this; } @Override public ClientBuilder maxConcurrentLookupRequests(int concurrentLookupRequests) { conf.setConcurrentLookupRequest(concurrentLookupRequests); return this; } @Override public ClientBuilder maxLookupRequests(int maxLookupRequests) { conf.setMaxLookupRequest(maxLookupRequests); return this; } @Override public ClientBuilder maxNumberOfRejectedRequestPerConnection(int maxNumberOfRejectedRequestPerConnection) { conf.setMaxNumberOfRejectedRequestPerConnection(maxNumberOfRejectedRequestPerConnection); return this; } @Override public ClientBuilder keepAliveInterval(int keepAliveInterval, TimeUnit unit) { conf.setKeepAliveIntervalSeconds((int)unit.toSeconds(keepAliveInterval)); return this; } @Override public ClientBuilder connectionTimeout(int duration, TimeUnit unit) { conf.setConnectionTimeoutMs((int)unit.toMillis(duration)); return this; } @Override public ClientBuilder startingBackoffInterval(long duration, TimeUnit unit) { conf.setInitialBackoffIntervalNanos(unit.toNanos(duration)); return this; } @Override public ClientBuilder maxBackoffInterval(long duration, TimeUnit unit) { conf.setMaxBackoffIntervalNanos(unit.toNanos(duration)); return this; } public ClientConfigurationData getClientConfigurationData() { return conf; } @Override public ClientBuilder clock(Clock clock) { conf.setClock(clock); return this; } }
/* * Copyright 2018 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.j2cl.integration.jsenum; import static com.google.j2cl.integration.testing.Asserts.assertEquals; import static com.google.j2cl.integration.testing.Asserts.assertFalse; import static com.google.j2cl.integration.testing.Asserts.assertThrows; import static com.google.j2cl.integration.testing.Asserts.assertThrowsClassCastException; import static com.google.j2cl.integration.testing.Asserts.assertTrue; import static com.google.j2cl.integration.testing.Asserts.assertUnderlyingTypeEquals; import static com.google.j2cl.integration.testing.Asserts.fail; import java.io.Serializable; import java.util.Arrays; import java.util.SortedSet; import java.util.TreeSet; import java.util.function.Function; import java.util.function.Supplier; import javaemul.internal.annotations.DoNotAutobox; import javaemul.internal.annotations.UncheckedCast; import jsinterop.annotations.JsEnum; import jsinterop.annotations.JsMethod; import jsinterop.annotations.JsOverlay; public class Main { private static final Object OK_STRING = "Ok"; private static final Object HELLO_STRING = "Hello"; private static final Object ONE_DOUBLE = 1.0d; private static final Object FALSE_BOOLEAN = false; public static void main(String... args) { testNativeJsEnum(); testStringNativeJsEnum(); testCastOnNative(); testJsEnum(); testBooleanJsEnum(); testStringJsEnum(); testJsEnumClassInitialization(); testNativeEnumClassInitialization(); testDoNotAutoboxJsEnum(); testUnckeckedCastJsEnum(); testAutoBoxing_relationalOperations(); testAutoBoxing_typeInference(); testAutoBoxing_specialMethods(); testAutoBoxing_parameterizedLambda(); testAutoBoxing_intersectionCasts(); } @JsEnum(isNative = true, namespace = "test") enum NativeEnum { OK, CANCEL } private static void testNativeJsEnum() { NativeEnum v = NativeEnum.OK; switch (v) { case OK: break; case CANCEL: fail(); break; default: fail(); break; } assertThrows( NullPointerException.class, () -> { NativeEnum nullJsEnum = null; switch (nullJsEnum) { } }); assertTrue(v == NativeEnum.OK); assertTrue(v != NativeEnum.CANCEL); // Native JsEnums are not boxed. assertTrue(v == OK_STRING); assertTrue(v == (Object) StringNativeEnum.OK); // No boxing Object o = NativeEnum.OK; assertTrue(o == NativeEnum.OK); // Object methods calls on a variable of JsEnum type. assertTrue(v.hashCode() == NativeEnum.OK.hashCode()); assertTrue(v.hashCode() != NativeEnum.CANCEL.hashCode()); assertTrue(v.hashCode() == StringNativeEnum.OK.hashCode()); assertTrue(v.toString().equals(OK_STRING)); assertTrue(v.equals(NativeEnum.OK)); assertTrue(v.equals(OK_STRING)); assertTrue(v.equals(StringNativeEnum.OK)); // Object methods calls on a variable of Object type. assertTrue(o.hashCode() == NativeEnum.OK.hashCode()); assertTrue(o.hashCode() != NativeEnum.CANCEL.hashCode()); assertTrue(o.hashCode() == StringNativeEnum.OK.hashCode()); assertTrue(o.toString().equals(OK_STRING)); assertTrue(o.equals(NativeEnum.OK)); assertTrue(o.equals(OK_STRING)); assertTrue(v.equals(StringNativeEnum.OK)); assertFalse(v instanceof Enum); assertTrue((Object) v instanceof String); assertTrue(v instanceof Comparable); assertTrue(v instanceof Serializable); assertFalse((Object) v instanceof PlainJsEnum); NativeEnum ne = (NativeEnum) o; StringNativeEnum sne = (StringNativeEnum) o; Comparable ce = (Comparable) o; ce = (NativeEnum & Comparable<NativeEnum>) o; Serializable s = (Serializable) o; assertThrowsClassCastException( () -> { Object unused = (Enum) o; }, Enum.class); assertThrowsClassCastException( () -> { Object unused = (Boolean) o; }, Boolean.class); assertTrue(asSeenFromJs(NativeEnum.OK) == OK_STRING); } @JsMethod(name = "passThrough") private static native Object asSeenFromJs(NativeEnum s); @JsEnum(isNative = true, namespace = "test", name = "NativeEnum", hasCustomValue = true) enum StringNativeEnum { OK, CANCEL; private String value; @JsOverlay public String getValue() { return value; } } private static void testStringNativeJsEnum() { StringNativeEnum v = StringNativeEnum.OK; switch (v) { case OK: break; case CANCEL: fail(); break; default: fail(); break; } assertThrows( NullPointerException.class, () -> { StringNativeEnum nullJsEnum = null; switch (nullJsEnum) { } }); assertTrue(v == StringNativeEnum.OK); assertTrue(v != StringNativeEnum.CANCEL); assertTrue((Object) v == OK_STRING); assertTrue(v == (Object) NativeEnum.OK); Object o = StringNativeEnum.OK; assertTrue(o == StringNativeEnum.OK); // Object methods calls on a variable of JsEnum type. assertTrue(v.hashCode() == StringNativeEnum.OK.hashCode()); assertTrue(v.hashCode() != StringNativeEnum.CANCEL.hashCode()); assertTrue(v.toString().equals(OK_STRING)); assertTrue(v.equals(StringNativeEnum.OK)); assertTrue(v.equals(NativeEnum.OK)); assertTrue(v.equals(OK_STRING)); // Object methods calls on a variable of Object type. assertTrue(o.hashCode() == StringNativeEnum.OK.hashCode()); assertTrue(o.hashCode() != StringNativeEnum.CANCEL.hashCode()); assertTrue(o.toString().equals(OK_STRING)); assertTrue(o.equals(StringNativeEnum.OK)); assertTrue(o.equals(NativeEnum.OK)); assertTrue(o.equals(OK_STRING)); assertTrue(v.getValue().equals(v.toString())); assertTrue(v.getValue().equals(OK_STRING)); assertFalse(v instanceof Enum); assertTrue((Object) v instanceof String); assertTrue(v instanceof Comparable); assertTrue(v instanceof Serializable); assertFalse((Object) v instanceof PlainJsEnum); Serializable se = (Serializable) o; StringNativeEnum sne = (StringNativeEnum) o; NativeEnum ne = (NativeEnum) o; Comparable ce = (Comparable) o; Comparable seAndC = (StringNativeEnum & Comparable<StringNativeEnum>) o; assertUnderlyingTypeEquals(String.class, seAndC); assertThrowsClassCastException( () -> { Object unused = (Enum) o; }, Enum.class); assertThrowsClassCastException( () -> { Object unused = (Boolean) o; }, Boolean.class); assertTrue(asSeenFromJs(StringNativeEnum.OK) == OK_STRING); } @JsEnum(isNative = true, namespace = "test", name = "NativeEnumOfNumber", hasCustomValue = true) enum NumberNativeEnum { ONE, TWO; short value; } public static void testCastOnNative() { castToNativeEnum(NativeEnum.OK); castToNativeEnum(StringNativeEnum.OK); castToNativeEnum(NumberNativeEnum.ONE); castToNativeEnum(PlainJsEnum.ONE); castToNativeEnum(OK_STRING); castToNativeEnum((Double) 2.0); castToNativeEnum((Integer) 1); castToStringNativeEnum(StringNativeEnum.OK); castToStringNativeEnum(NativeEnum.OK); castToStringNativeEnum(OK_STRING); assertThrowsClassCastException(() -> castToStringNativeEnum(NumberNativeEnum.ONE)); assertThrowsClassCastException(() -> castToStringNativeEnum(PlainJsEnum.ONE)); assertThrowsClassCastException(() -> castToStringNativeEnum((Integer) 1)); assertThrowsClassCastException(() -> castToStringNativeEnum((Double) 2.0)); castToNumberNativeEnum(NumberNativeEnum.ONE); castToNumberNativeEnum((Double) 2.0); assertThrowsClassCastException(() -> castToNumberNativeEnum(NativeEnum.OK)); assertThrowsClassCastException(() -> castToNumberNativeEnum(StringNativeEnum.OK)); assertThrowsClassCastException(() -> castToNumberNativeEnum(PlainJsEnum.ONE)); assertThrowsClassCastException(() -> castToNumberNativeEnum((Integer) 1)); assertThrowsClassCastException(() -> castToNumberNativeEnum(OK_STRING)); } private static NativeEnum castToNativeEnum(Object o) { return (NativeEnum) o; } private static StringNativeEnum castToStringNativeEnum(Object o) { return (StringNativeEnum) o; } private static NumberNativeEnum castToNumberNativeEnum(Object o) { return (NumberNativeEnum) o; } @JsMethod(name = "passThrough") private static native Object asSeenFromJs(StringNativeEnum s); @JsEnum enum PlainJsEnum { ZERO, ONE, TWO, THREE, FOUR, FIVE, SIX, SEVEN, EIGHT, NINE, TEN; public int getValue() { return ordinal(); } } @JsEnum enum OtherPlainJsEnum { NONE, UNIT } private static void testJsEnum() { PlainJsEnum v = PlainJsEnum.ONE; switch (v) { case ZERO: fail(); break; case ONE: break; default: fail(); break; } assertThrows( NullPointerException.class, () -> { PlainJsEnum nullJsEnum = null; switch (nullJsEnum) { } }); assertTrue(v == PlainJsEnum.ONE); assertTrue(v != PlainJsEnum.ZERO); assertTrue((Object) v != ONE_DOUBLE); // Boxing preserves equality. Object o = PlainJsEnum.ONE; assertTrue(o == PlainJsEnum.ONE); // Object methods calls on a variable of JsEnum type. assertTrue(v.hashCode() == PlainJsEnum.ONE.hashCode()); assertTrue(v.hashCode() != PlainJsEnum.ZERO.hashCode()); assertTrue(v.toString().equals(String.valueOf(ONE_DOUBLE))); assertTrue(v.equals(PlainJsEnum.ONE)); assertFalse(v.equals(ONE_DOUBLE)); assertFalse(PlainJsEnum.ZERO.equals(OtherPlainJsEnum.NONE)); // Object methods calls on a variable of Object type. assertTrue(o.hashCode() == PlainJsEnum.ONE.hashCode()); assertTrue(o.hashCode() != PlainJsEnum.ZERO.hashCode()); assertTrue(o.toString().equals(String.valueOf(ONE_DOUBLE))); assertTrue(o.equals(PlainJsEnum.ONE)); assertFalse(o.equals(ONE_DOUBLE)); assertTrue(v.getValue() == 1); assertTrue(v.ordinal() == 1); assertTrue(PlainJsEnum.ONE.compareTo(v) == 0); assertTrue(PlainJsEnum.ZERO.compareTo(v) < 0); assertThrowsClassCastException( () -> { Comparable comparable = PlainJsEnum.ONE; comparable.compareTo(OtherPlainJsEnum.UNIT); }); assertThrowsClassCastException( () -> { Comparable comparable = PlainJsEnum.ONE; comparable.compareTo(ONE_DOUBLE); }); assertThrowsClassCastException( () -> { Comparable comparable = (Comparable) ONE_DOUBLE; comparable.compareTo(PlainJsEnum.ONE); }); assertThrowsClassCastException( () -> { Object unused = (Enum<PlainJsEnum> & Comparable<PlainJsEnum>) PlainJsEnum.ONE; }, Enum.class); // Test that boxing of special method 'ordinal()' call is not broken by normalization. Integer i = v.ordinal(); assertTrue(i.intValue() == 1); assertFalse(v instanceof Enum); assertTrue(v instanceof PlainJsEnum); assertFalse((Object) v instanceof Double); assertTrue(v instanceof Comparable); assertTrue(v instanceof Serializable); assertFalse((Object) v instanceof BooleanJsEnum); assertFalse(new Object() instanceof PlainJsEnum); assertFalse((Object) ONE_DOUBLE instanceof PlainJsEnum); PlainJsEnum pe = (PlainJsEnum) o; Comparable c = (Comparable) o; Serializable s = (Serializable) o; // Intersection casts box/or unbox depending on the destination type. Comparable otherC = (PlainJsEnum & Comparable<PlainJsEnum>) o; assertUnderlyingTypeEquals(PlainJsEnum.class, otherC); PlainJsEnum otherPe = (PlainJsEnum & Comparable<PlainJsEnum>) o; assertUnderlyingTypeEquals(Double.class, otherPe); assertThrowsClassCastException( () -> { Object unused = (Enum) o; }, Enum.class); assertThrowsClassCastException( () -> { Object unused = (Double) o; }, Double.class); assertTrue(asSeenFromJs(PlainJsEnum.ONE) == ONE_DOUBLE); // Comparable test. SortedSet<Comparable> sortedSet = new TreeSet<>(Comparable::compareTo); sortedSet.add(PlainJsEnum.ONE); sortedSet.add(PlainJsEnum.ZERO); assertTrue(sortedSet.iterator().next() == PlainJsEnum.ZERO); assertTrue(sortedSet.iterator().next() instanceof PlainJsEnum); } @JsMethod(name = "passThrough") private static native Object asSeenFromJs(PlainJsEnum d); @JsEnum(hasCustomValue = true) enum BooleanJsEnum { TRUE(true), FALSE(false); boolean value; BooleanJsEnum(boolean value) { this.value = value; } } private static void testBooleanJsEnum() { BooleanJsEnum v = BooleanJsEnum.FALSE; switch (v) { case TRUE: fail(); break; case FALSE: break; default: fail(); break; } assertThrows( NullPointerException.class, () -> { BooleanJsEnum nullJsEnum = null; switch (nullJsEnum) { } }); assertTrue(v == BooleanJsEnum.FALSE); assertTrue(v != BooleanJsEnum.TRUE); assertTrue((Object) v != FALSE_BOOLEAN); // Boxing preserves equality. Object o = BooleanJsEnum.FALSE; assertTrue(o == BooleanJsEnum.FALSE); // Object methods calls on a variable of JsEnum type. assertTrue(v.hashCode() == BooleanJsEnum.FALSE.hashCode()); assertTrue(v.hashCode() != BooleanJsEnum.TRUE.hashCode()); assertTrue(v.toString().equals(String.valueOf(FALSE_BOOLEAN))); assertTrue(v.equals(BooleanJsEnum.FALSE)); assertFalse(v.equals(FALSE_BOOLEAN)); // Object methods calls on a variable of Object type. assertTrue(o.hashCode() == BooleanJsEnum.FALSE.hashCode()); assertTrue(o.hashCode() != BooleanJsEnum.TRUE.hashCode()); assertTrue(o.toString().equals(String.valueOf(FALSE_BOOLEAN))); assertTrue(o.equals(BooleanJsEnum.FALSE)); assertFalse(o.equals(FALSE_BOOLEAN)); assertTrue((Object) v.value == FALSE_BOOLEAN); // Test that boxing of special field 'value' call is not broken by normalization. Boolean b = v.value; assertTrue(b == FALSE_BOOLEAN); assertFalse(v instanceof Enum); assertTrue(v instanceof BooleanJsEnum); assertFalse((Object) v instanceof Boolean); assertFalse(v instanceof Comparable); assertTrue(v instanceof Serializable); assertFalse((Object) v instanceof PlainJsEnum); assertFalse(new Object() instanceof BooleanJsEnum); assertFalse((Object) FALSE_BOOLEAN instanceof BooleanJsEnum); BooleanJsEnum be = (BooleanJsEnum) o; Serializable s = (Serializable) o; assertThrowsClassCastException( () -> { Object unused = (Enum) o; }, Enum.class); assertThrowsClassCastException( () -> { Object unused = (Comparable) o; }, Comparable.class); assertThrowsClassCastException( () -> { Object unused = (Boolean) o; }, Boolean.class); assertThrowsClassCastException( () -> { Object unused = (BooleanJsEnum & Comparable<BooleanJsEnum>) o; }, Comparable.class); assertTrue(asSeenFromJs(BooleanJsEnum.FALSE) == FALSE_BOOLEAN); } @JsMethod(name = "passThrough") private static native Object asSeenFromJs(BooleanJsEnum b); @JsEnum(hasCustomValue = true) enum StringJsEnum { HELLO("Hello"), GOODBYE("Good Bye"); String value; StringJsEnum(String value) { this.value = value; } } private static void testStringJsEnum() { StringJsEnum v = StringJsEnum.HELLO; switch (v) { case GOODBYE: fail(); break; case HELLO: break; default: fail(); break; } assertThrows( NullPointerException.class, () -> { StringJsEnum nullJsEnum = null; switch (nullJsEnum) { } }); assertTrue(v == StringJsEnum.HELLO); assertTrue(v != StringJsEnum.GOODBYE); assertTrue((Object) v != HELLO_STRING); // Boxing preserves equality. Object o = StringJsEnum.HELLO; assertTrue(o == StringJsEnum.HELLO); // Object methods calls on a variable of JsEnum type. assertTrue(v.hashCode() == StringJsEnum.HELLO.hashCode()); assertTrue(v.hashCode() != StringJsEnum.GOODBYE.hashCode()); assertTrue(v.toString().equals(HELLO_STRING)); assertTrue(v.equals(StringJsEnum.HELLO)); assertFalse(v.equals(HELLO_STRING)); // Object methods calls on a variable of Object type. assertTrue(o.hashCode() == StringJsEnum.HELLO.hashCode()); assertTrue(o.hashCode() != StringJsEnum.GOODBYE.hashCode()); assertTrue(o.toString().equals(HELLO_STRING)); assertTrue(o.equals(StringJsEnum.HELLO)); assertFalse(o.equals(HELLO_STRING)); assertTrue(v.value.equals(HELLO_STRING)); assertFalse(v instanceof Enum); assertTrue(v instanceof StringJsEnum); assertFalse((Object) v instanceof String); assertFalse(v instanceof Comparable); assertTrue(v instanceof Serializable); assertFalse((Object) v instanceof PlainJsEnum); assertFalse(new Object() instanceof StringJsEnum); assertFalse((Object) HELLO_STRING instanceof StringJsEnum); StringJsEnum se = (StringJsEnum) o; Serializable s = (Serializable) o; assertThrowsClassCastException( () -> { Object unused = (Enum) o; }, Enum.class); assertThrowsClassCastException( () -> { Object unused = (Comparable) o; }, Comparable.class); assertThrowsClassCastException( () -> { Object unused = (String) o; }, String.class); assertThrowsClassCastException( () -> { Object unused = (StringJsEnum & Comparable<StringJsEnum>) o; }, Comparable.class); assertTrue(asSeenFromJs(StringJsEnum.HELLO) == HELLO_STRING); } @JsMethod(name = "passThrough") private static native Object asSeenFromJs(StringJsEnum b); private static boolean nonNativeClinitCalled = false; @JsEnum enum EnumWithClinit { A; static { nonNativeClinitCalled = true; } int getValue() { return ordinal(); } } private static void testJsEnumClassInitialization() { assertFalse(nonNativeClinitCalled); // Access to an enum value does not trigger clinit. Object o = EnumWithClinit.A; assertFalse(nonNativeClinitCalled); // Cast and instanceof do not trigger clinit. if (o instanceof EnumWithClinit) { o = (EnumWithClinit) o; } assertFalse(nonNativeClinitCalled); // Access to ordinal() does not trigger clinit. int n = EnumWithClinit.A.ordinal(); assertFalse(nonNativeClinitCalled); // Access to any devirtualized method triggers clinit. EnumWithClinit.A.getValue(); assertTrue(nonNativeClinitCalled); } private static boolean nativeClinitCalled = false; @JsEnum(isNative = true, hasCustomValue = true, namespace = "test", name = "NativeEnum") enum NativeEnumWithClinit { OK; static { nativeClinitCalled = true; } String value; @JsOverlay String getValue() { return value; } } private static void testNativeEnumClassInitialization() { assertFalse(nativeClinitCalled); // Access to an enum value does not trigger clinit. Object o = NativeEnumWithClinit.OK; assertFalse(nativeClinitCalled); // Cast does not trigger clinit. o = (NativeEnumWithClinit) o; assertFalse(nativeClinitCalled); // Access to value does not trigger clinit. String s = NativeEnumWithClinit.OK.value; assertFalse(nativeClinitCalled); // Access to any devirtualized method triggers clinit. NativeEnumWithClinit.OK.getValue(); assertTrue(nativeClinitCalled); } private static void testDoNotAutoboxJsEnum() { assertTrue(returnsObject(StringJsEnum.HELLO) == HELLO_STRING); assertTrue(returnsObject(0, StringJsEnum.HELLO) == HELLO_STRING); } private static Object returnsObject(@DoNotAutobox Object object) { return object; } private static Object returnsObject(int n, @DoNotAutobox Object... object) { return object[0]; } private static void testUnckeckedCastJsEnum() { StringJsEnum s = uncheckedCast(HELLO_STRING); assertTrue(s == StringJsEnum.HELLO); } @UncheckedCast private static <T> T uncheckedCast(@DoNotAutobox Object object) { return (T) object; } private static void testAutoBoxing_relationalOperations() { PlainJsEnum one = PlainJsEnum.ONE; Object boxedOne = PlainJsEnum.ONE; assertTrue(one == boxingPassthrough(one)); assertTrue(boxedOne == boxingPassthrough(one)); assertTrue(boxingPassthrough(one) == one); assertTrue(boxingPassthrough(one) == boxedOne); assertFalse(one != boxedOne); assertFalse(boxedOne != one); assertFalse(one != boxingPassthrough(one)); assertFalse(boxedOne != boxingPassthrough(one)); assertFalse(boxingPassthrough(one) != one); assertFalse(boxingPassthrough(one) != boxedOne); } private static <T> T boxingPassthrough(T t) { return t; } private static void testAutoBoxing_specialMethods() { assertTrue(PlainJsEnum.ONE.equals(PlainJsEnum.ONE)); assertTrue(PlainJsEnum.ONE.compareTo(PlainJsEnum.ONE) == 0); assertTrue(PlainJsEnum.ONE.compareTo(PlainJsEnum.ZERO) > 0); assertTrue(PlainJsEnum.TWO.compareTo(PlainJsEnum.TEN) < 0); } private static void testAutoBoxing_intersectionCasts() { Comparable c = (PlainJsEnum & Comparable<PlainJsEnum>) PlainJsEnum.ONE; assertTrue(c.compareTo(PlainJsEnum.ZERO) > 0); PlainJsEnum e = (PlainJsEnum & Comparable<PlainJsEnum>) PlainJsEnum.ONE; // e correcly holds an unboxed value. assertUnderlyingTypeEquals(Double.class, e); assertTrue(PlainJsEnum.ONE == (PlainJsEnum & Comparable<PlainJsEnum>) PlainJsEnum.ONE); // Intersection cast with a JsEnum does not unbox like the simple cast. assertUnderlyingTypeEquals( PlainJsEnum.class, (PlainJsEnum & Comparable<PlainJsEnum>) PlainJsEnum.ONE); } private static void testAutoBoxing_typeInference() { assertUnderlyingTypeEquals(Double.class, PlainJsEnum.ONE); assertUnderlyingTypeEquals(PlainJsEnum.class, boxingIdentity(PlainJsEnum.ONE)); // Make sure the enum is boxed even when assigned to a field that is inferred to be JsEnum. TemplatedField<PlainJsEnum> templatedField = new TemplatedField<PlainJsEnum>(PlainJsEnum.ONE); PlainJsEnum unboxed = templatedField.getValue(); assertUnderlyingTypeEquals(Double.class, unboxed); // Boxing through specialized method parameter assignment. assertUnderlyingTypeEquals(PlainJsEnum.class, boxingIdentity(unboxed)); // Unboxing as a qualifier to ordinal. assertUnderlyingTypeEquals(Double.class, templatedField.getValue().ordinal()); // Boxing through specialized method parameter assignment. assertUnderlyingTypeEquals(PlainJsEnum.class, boxingIdentity(templatedField.getValue())); // Checks what is actually returned by getValue(). assertUnderlyingTypeEquals(PlainJsEnum.class, ((TemplatedField) templatedField).getValue()); unboxed = templatedField.value; assertUnderlyingTypeEquals(Double.class, unboxed); templatedField.value = PlainJsEnum.ONE; // Boxing through specialized method parameter assignment. assertUnderlyingTypeEquals(PlainJsEnum.class, boxingIdentity(templatedField.value)); // Checks what is actually stored in value. assertUnderlyingTypeEquals(PlainJsEnum.class, ((TemplatedField) templatedField).value); // Unboxing as a qualifier to ordinal. assertUnderlyingTypeEquals(Double.class, templatedField.value.ordinal()); // Boxing/unboxing in varargs. assertUnderlyingTypeEquals(Double.class, Arrays.asList(PlainJsEnum.ONE).get(0)); // TODO(b/118615488): Rewrite the following checks when JsEnum arrays are allowed. // In Java the varargs array will be of the inferred argument type. Since non native JsEnum // arrays are not allowed, the created array is of the declared type. assertUnderlyingTypeEquals(Comparable[].class, varargsToComparableArray(PlainJsEnum.ONE)); assertUnderlyingTypeEquals(PlainJsEnum.class, varargsToComparableArray(PlainJsEnum.ONE)[0]); assertUnderlyingTypeEquals(Object[].class, varargsToObjectArray(PlainJsEnum.ONE)); assertUnderlyingTypeEquals(PlainJsEnum.class, varargsToObjectArray(PlainJsEnum.ONE)[0]); } private static class TemplatedField<T> { T value; TemplatedField(T value) { this.value = value; } T getValue() { return this.value; } } private static <T> Object boxingIdentity(T o) { return o; } private static <T extends Comparable> Object[] varargsToComparableArray(T... elements) { return elements; } private static <T> Object[] varargsToObjectArray(T... elements) { return elements; } private static void testAutoBoxing_parameterizedLambda() { Function<Object, Double> ordinalWithCast = e -> (double) ((PlainJsEnum) e).ordinal(); assertTrue(1 == ordinalWithCast.apply(PlainJsEnum.ONE)); Function<PlainJsEnum, Double> ordinal = e -> (double) e.ordinal(); assertTrue(1 == ordinal.apply(PlainJsEnum.ONE)); Function<? super PlainJsEnum, String> function = e -> { switch (e) { case ONE: return "ONE"; default: return "None"; } }; assertEquals("ONE", function.apply(PlainJsEnum.ONE)); Supplier<PlainJsEnum> supplier = () -> PlainJsEnum.ONE; assertEquals(PlainJsEnum.ONE, supplier.get()); } @JsMethod // Pass through an enum value as if it were coming from and going to JavaScript. private static Object passThrough(Object o) { // Supported closure enums can only have number, boolean or string as their underlying type. // Make sure that boxed enums are not passing though here. assertTrue(o instanceof String || o instanceof Double || o instanceof Boolean); return o; } }
/* * Copyright 2007 Alin Dreghiciu. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. * * See the License for the specific language governing permissions and * limitations under the License. */ package org.ops4j.pax.web.extender.war.internal.model; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Set; import javax.servlet.MultipartConfigElement; import javax.servlet.Servlet; import org.ops4j.lang.NullArgumentException; /** * Servlet element in web.xml. * * @author Alin Dreghiciu * @author Marc Klinger - mklinger[at]nightlabs[dot]de * @since 0.3.0, December 27, 2007 */ public class WebAppServlet { /** * Servlet name. */ private String servletName; /** * Servlet class name. */ private String servletClassName; /** * Servlet class. This is set during registration process and set to null * during unregistration. */ private Class<? extends Servlet> servletClass; /** * Servlet mapped url paterns. This is not set by the parser but by the web * app while adding a servlet mapping. */ private final List<WebAppInitParam> initParams; /** * Aliases corresponding to servlet mapped url paterns. This is not set by * the parser but by the web app while adding a servlet mapping. */ private final Set<String> aliases; private int loadOnStartup; private boolean asyncSupported; private MultipartConfigElement multipartConfigurations; /** * Creates a new web app servlet. */ public WebAppServlet() { aliases = new HashSet<>(); initParams = new ArrayList<>(); } /** * Getter. * * @return servlet name */ public String getServletName() { return servletName; } /** * Setter. * * @param servletName value to set. Cannot be null * @throws NullArgumentException if servlet name is null */ public void setServletName(final String servletName) { NullArgumentException.validateNotNull(servletName, "Servlet name"); this.servletName = servletName; // sepcify filter name for Pax Web final WebAppInitParam initParam = new WebAppInitParam(); initParam.setParamName("servlet-name"); initParam.setParamValue(servletName); initParams.add(initParam); } /** * Getter. * * @return servlet class name */ public String getServletClassName() { return servletClassName; } /** * Setter. * * @param servletClassName value to set. Cannot be null * @throws NullArgumentException if servlet class is null */ public void setServletClassName(final String servletClassName) { NullArgumentException.validateNotNull(servletClassName, "Servlet class name"); this.servletClassName = servletClassName; } /** * Getter. * * @return servletClass */ public Class<? extends Servlet> getServletClass() { return servletClass; } /** * Setter. * * @param servletClass value to set. */ public void setServletClass(final Class<? extends Servlet> servletClass) { this.servletClass = servletClass; } /** * Returns the aliases associated with this servlet. If there are no * associated aliases an empty array is returned. * * @return array of aliases */ public String[] getAliases() { return aliases.toArray(new String[aliases.size()]); } /** * Add an url mapping for servlet. The url mapping is converted to an alias * by emoving trailing "*" * * @param urlPattern to be added. Cannot be null * @throws NullArgumentException if url pattern is null */ public void addUrlPattern(final String urlPattern) { NullArgumentException.validateNotNull(urlPattern, "Url pattern"); aliases.add(urlPattern); } /** * Add a init param for filter. * * @param param to be added * @throws NullArgumentException if param, param name, param value is null */ public void addInitParam(final WebAppInitParam param) { NullArgumentException.validateNotNull(param, "Init param"); NullArgumentException.validateNotNull(param.getParamName(), "Init param name"); NullArgumentException.validateNotNull(param.getParamValue(), "Init param value"); initParams.add(param); } public void setMultipartConfig(MultipartConfigElement multipartConfigElement) { NullArgumentException.validateNotNull(multipartConfigElement, "MultipartConfig"); multipartConfigurations = multipartConfigElement; } /** * Returns the init params associated with this filter. If there are no * associated init params an empty array is returned. * * @return array of url patterns */ public WebAppInitParam[] getInitParams() { return initParams.toArray(new WebAppInitParam[initParams.size()]); } public MultipartConfigElement getMultipartConfig() { return multipartConfigurations; } public void setLoadOnStartup(String value) { if (value != null) { try { loadOnStartup = Integer.parseInt(value); } catch (NumberFormatException e) { loadOnStartup = Integer.MAX_VALUE; } } else { loadOnStartup = Integer.MAX_VALUE; } } public int getLoadOnStartup() { return loadOnStartup; } public void setLoadOnStartup(int loadOnStartup) { this.loadOnStartup = loadOnStartup; } public void setAsyncSupported(boolean asyncSupported) { this.asyncSupported = asyncSupported; } public void setAsyncSupported(String value) { if (value != null) { asyncSupported = Boolean.parseBoolean(value); } } /* TODO: need to find the right spot to retrieve this information */ public Boolean getAsyncSupported() { return asyncSupported; } @Override public String toString() { return new StringBuilder().append(this.getClass().getSimpleName()) .append("{").append("servletName=").append(servletName) .append(",servletClass=").append(servletClassName) .append(",aliases=").append(aliases).append("}").toString(); } }
/* * Copyright (C) 2015 Willi Ye * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.swapnil133609.zeuscontrols.utils.json; import android.content.Context; import com.swapnil133609.zeuscontrols.utils.Utils; import com.zeuscontrols.library.Tools; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import java.util.ArrayList; import java.util.List; /** * Created by willi on 20.06.15. */ public class Downloads { private String link; public Downloads(Context context) { try { String json = Utils.existFile(context.getFilesDir() + "/downloads.json") ? Tools.readFile(context.getFilesDir() + "/downloads.json", false) : Utils.readAssetFile(context, "downloads.json"); JSONArray devices = new JSONArray(json); for (int i = 0; i < devices.length(); i++) { JSONObject device = devices.getJSONObject(i); JSONArray vendors = device.getJSONArray("vendor"); for (int x = 0; x < vendors.length(); x++) if (vendors.getString(x).equals(Utils.getVendorName())) { JSONArray names = device.getJSONArray("device"); for (int y = 0; y < names.length(); y++) if (names.getString(y).equals(Utils.getDeviceName())) link = device.getString("link"); } } } catch (JSONException e) { e.printStackTrace(); } } public String getLink() { return link; } public boolean isSupported() { return link != null; } public static class Kernels { private JSONArray kernels; public Kernels(String json) { try { kernels = new JSONArray(json); } catch (JSONException e) { e.printStackTrace(); } } public String getLink(int position) { try { return kernels.getString(position); } catch (JSONException e) { return null; } } public int length() { return kernels.length(); } public boolean readable() { return kernels != null; } } public static class KernelContent { private JSONObject kernel; private String json; public KernelContent(String json) { try { this.json = json; kernel = new JSONObject(json); } catch (JSONException e) { e.printStackTrace(); } } public String getName() { return getString("name"); } public String getShortDescription() { return getString("short_description"); } public String getLongDescription() { return getString("long_description"); } public String getLogo() { return getString("logo"); } public String getXDA() { return getString("xda"); } public String getGithub() { return getString("github"); } public String getGooglePlus() { return getString("google_plus"); } public String getPaypal() { return getString("paypal"); } public List<Feature> getFeatures() { List<Feature> list = new ArrayList<>(); try { JSONArray features = kernel.getJSONArray("features"); for (int i = 0; i < features.length(); i++) try { list.add(new Feature(features.getJSONObject(i))); } catch (JSONException ignored) { try { list.add(new Feature(features.getString(i))); } catch (JSONException e) { e.printStackTrace(); } } } catch (JSONException e) { e.printStackTrace(); } return list; } public List<Download> getDownloads() { List<Download> list = new ArrayList<>(); try { JSONArray downloads = kernel.getJSONArray("downloads"); for (int i = 0; i < downloads.length(); i++) list.add(new Download(downloads.getJSONObject(i))); } catch (JSONException e) { e.printStackTrace(); } return list; } public String getJSON() { return json; } public boolean readable() { return kernel != null; } private String getString(String name) { try { return kernel.getString(name); } catch (JSONException e) { return null; } } } public static class Feature { private String feature; private JSONObject features; public Feature(String feature) { this.feature = feature; } public Feature(JSONObject features) { this.features = features; } public String getItem() { try { if (feature != null) return feature; else if (features != null) return features.getString("name"); } catch (JSONException e) { e.printStackTrace(); } return null; } public List<String> getItems() { List<String> list = new ArrayList<>(); try { JSONArray items = features.getJSONArray("items"); for (int i = 0; i < items.length(); i++) list.add(items.getString(i)); } catch (Exception e) { e.printStackTrace(); } return list; } public boolean hasItems() { return features != null; } } public static class Download { private final JSONObject content; public Download(JSONObject content) { this.content = content; } public String getName() { return getString("name"); } public String getDescription() { return getString("description"); } public String getUrl() { return getString("url"); } public String getMD5sum() { return getString("md5sum"); } public String getInstallMethod() { return getString("install_method"); } public List<String> getChangelogs() { List<String> list = new ArrayList<>(); try { JSONArray changes = content.getJSONArray("changelog"); for (int i = 0; i < changes.length(); i++) list.add(changes.getString(i)); } catch (JSONException ignored) { } return list; } private String getString(String name) { try { return content.getString(name); } catch (JSONException e) { return null; } } } }
package translation; import com.google.gson.Gson; import config.ConfigurationLoader; import hdfs.FileUtil; import messaging.Worker; import models.*; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.neo4j.graphdb.*; import org.neo4j.graphdb.traversal.Evaluators; import org.neo4j.helpers.collection.IteratorUtil; import org.neo4j.tooling.GlobalGraphOperations; import java.io.BufferedReader; import java.io.BufferedWriter; import java.io.IOException; import java.io.OutputStreamWriter; import java.net.URISyntaxException; import java.text.MessageFormat; import java.util.*; import java.util.concurrent.ForkJoinPool; import java.util.stream.Stream; import static java.lang.String.format; import static org.neo4j.graphdb.DynamicRelationshipType.withName; /** * Copyright (C) 2014 Kenny Bastani * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ public class Writer { public static final String EDGE_LIST_RELATIVE_FILE_PATH = "/neo4j/mazerunner/jobs/{job_id}/edgeList.txt"; public static Integer updateCounter = 0; public static Integer counter = 0; public static void dispatchJob(GraphDatabaseService db, String type) throws IOException, URISyntaxException { // Export the subgraph to HDFS Path pt = exportSubgraphToHDFSParallel(db); // Serialize processor message ProcessorMessage message = new ProcessorMessage(pt.toString(), type, ProcessorMode.Unpartitioned); Gson gson = new Gson(); String strMessage = gson.toJson(message); // Send message to the Spark graph processor Worker.sendMessage(strMessage); } public static void dispatchPartitionedJob(GraphDatabaseService db, String type, PartitionDescription partitionDescription, Path pt) throws IOException, URISyntaxException { // Serialize processor message in partitioned mode ProcessorMessage message = new ProcessorMessage(pt.toString(), type, ProcessorMode.Partitioned); message.setPartitionDescription(partitionDescription); Gson gson = new Gson(); String strMessage = gson.toJson(message); // Send message to the Spark graph processor Worker.sendMessage(strMessage); } public static Path exportPartitionToHDFSParallel(GraphDatabaseService db, Node partitionNode, PartitionDescription partitionDescription) throws IOException, URISyntaxException { FileSystem fs = FileUtil.getHadoopFileSystem(); Path pt = new Path(ConfigurationLoader.getInstance().getHadoopHdfsUri() + EDGE_LIST_RELATIVE_FILE_PATH.replace("{job_id}", partitionDescription.getPartitionId().toString())); BufferedWriter br = new BufferedWriter(new OutputStreamWriter(fs.create(pt))); Integer reportBlockSize = 20000; Transaction tx = db.beginTx(); ResourceIterable<Node> nodes = db.traversalDescription() .depthFirst() .relationships(withName(partitionDescription.getGroupRelationship()), Direction.OUTGOING) .evaluator(Evaluators.toDepth(1)) .traverse(partitionNode) .nodes(); if (nodes.iterator().hasNext()) { br.write("# Adacency list" + "\n"); List<Spliterator<Node>> spliteratorList = new ArrayList<>(); boolean hasSpliterator = true; Spliterator<Node> nodeSpliterator = nodes.spliterator(); while (hasSpliterator) { Spliterator<Node> localSpliterator = nodeSpliterator.trySplit(); hasSpliterator = localSpliterator != null; if (hasSpliterator) spliteratorList.add(localSpliterator); } counter = 0; if (spliteratorList.size() > 4) { // Fork join ParallelWriter parallelWriter = new ParallelWriter<Node>(spliteratorList.toArray(new Spliterator[spliteratorList.size()]), new GraphWriter(0, spliteratorList.size(), br, spliteratorList.size(), reportBlockSize, db, partitionDescription.getTargetRelationship())); ForkJoinPool pool = new ForkJoinPool(); pool.invoke(parallelWriter); } else { // Sequential spliteratorList.forEach(sl -> sl.forEachRemaining(n -> { try { writeBlockForNode(n, db, br, reportBlockSize, partitionDescription.getTargetRelationship()); } catch (IOException e) { e.printStackTrace(); } })); } System.out.println("Mazerunner Partition Export Status: " + MessageFormat.format("{0,number,#.##%}", 1.0)); br.flush(); br.close(); tx.success(); tx.close(); return pt; } else { return null; } } public static Path exportCypherQueryToHDFSParallel(GraphDatabaseService db, String cypherQuery, JobRequestType jobRequestType) throws IOException, URISyntaxException { FileSystem fs = FileUtil.getHadoopFileSystem(); Path pt = new Path(ConfigurationLoader.getInstance().getHadoopHdfsUri() + EDGE_LIST_RELATIVE_FILE_PATH.replace("{job_id}", "")); BufferedWriter br = new BufferedWriter(new OutputStreamWriter(fs.create(pt))); Integer reportBlockSize = 20000; // Query database using cypher query from jobRequest try (Transaction ignored = db.beginTx(); Result result = db.execute(cypherQuery)) { if(result.columns().size() != JobRequestType.parameterSize(jobRequestType)) { ignored.close(); throw new RuntimeException(format("The Cypher query must return %s columns per row for %s jobs", JobRequestType.parameterSize(jobRequestType), jobRequestType.toString())); } List<Spliterator<Map<String, Object>>> spliteratorList = new ArrayList<>(); boolean hasSpliterator = true; Spliterator<Map<String, Object>> nodeSpliterator; nodeSpliterator = Spliterators.spliteratorUnknownSize(result, Spliterator.SIZED); while (hasSpliterator) { Spliterator<Map<String, Object>> localSpliterator = nodeSpliterator.trySplit(); hasSpliterator = localSpliterator != null; if (hasSpliterator) spliteratorList.add(localSpliterator); } counter = 0; if (spliteratorList.size() > 4) { // Fork join ParallelWriter parallelWriter = new ParallelWriter<Map<String, Object>>(spliteratorList.toArray(new Spliterator[spliteratorList.size()]), new CypherWriter(0, spliteratorList.size(), br, spliteratorList.size(), reportBlockSize, result.columns())); ForkJoinPool pool = new ForkJoinPool(); pool.invoke(parallelWriter); } else { // Sequential spliteratorList.forEach(sl -> sl.forEachRemaining(n -> { try { writeBlockForQueryResult(n, br, reportBlockSize, result.columns()); } catch (IOException e) { e.printStackTrace(); } })); } System.out.println("Mazerunner Export Status: " + MessageFormat.format("{0,number,#.##%}", 1.0)); br.flush(); br.close(); return pt; } } public static Path exportSubgraphToHDFSParallel(GraphDatabaseService db) throws IOException, URISyntaxException { FileSystem fs = FileUtil.getHadoopFileSystem(); Path pt = new Path(ConfigurationLoader.getInstance().getHadoopHdfsUri() + EDGE_LIST_RELATIVE_FILE_PATH.replace("{job_id}", "")); BufferedWriter br = new BufferedWriter(new OutputStreamWriter(fs.create(pt))); Integer reportBlockSize = 20000; Transaction tx = db.beginTx(); // Get all nodes in the graph Iterable<Node> nodes = GlobalGraphOperations.at(db) .getAllNodes(); br.write("# Adacency list" + "\n"); List<Spliterator<Node>> spliteratorList = new ArrayList<>(); boolean hasSpliterator = true; Spliterator<Node> nodeSpliterator = nodes.spliterator(); while (hasSpliterator) { Spliterator<Node> localSpliterator = nodeSpliterator.trySplit(); hasSpliterator = localSpliterator != null; if (hasSpliterator) spliteratorList.add(localSpliterator); } tx.success(); tx.close(); counter = 0; if (spliteratorList.size() > 4) { // Fork join ParallelWriter parallelWriter = new ParallelWriter<Node>(spliteratorList.toArray(new Spliterator[spliteratorList.size()]), new GraphWriter(0, spliteratorList.size(), br, spliteratorList.size(), reportBlockSize, db, ConfigurationLoader.getInstance().getMazerunnerRelationshipType())); ForkJoinPool pool = new ForkJoinPool(); pool.invoke(parallelWriter); } else { // Sequential spliteratorList.forEach(sl -> sl.forEachRemaining(n -> { try { writeBlockForNode(n, db, br, reportBlockSize, ConfigurationLoader.getInstance().getMazerunnerRelationshipType()); } catch (IOException e) { e.printStackTrace(); } })); } System.out.println("Mazerunner Export Status: " + MessageFormat.format("{0,number,#.##%}", 1.0)); br.flush(); br.close(); return pt; } public static void writeBlockForNode(Node n, GraphDatabaseService db, BufferedWriter bufferedWriter, int reportBlockSize, String relationshipType) throws IOException { Transaction tx = db.beginTx(); Iterator<Relationship> rels = n.getRelationships(withName(relationshipType), Direction.OUTGOING).iterator(); while (rels.hasNext()) { try { Relationship rel = rels.next(); String line = rel.getStartNode().getId() + " " + rel.getEndNode().getId(); bufferedWriter.write(line + "\n"); Writer.counter++; if (Writer.counter % reportBlockSize == 0) { // Report status System.out.println("Records exported: " + Writer.counter); } } catch (Exception ex) { System.out.println(ex.getMessage()); } } } public static void writeBlockForQueryResult(Map<String, Object> row, BufferedWriter bufferedWriter, int reportBlockSize, List<String> columns) throws IOException { try { String line = ""; for (String column : columns) { line += row.get(column) + ","; } bufferedWriter.write(line.replaceFirst("[,]$", "\n")); Writer.counter++; if (Writer.counter % reportBlockSize == 0) { // Report status System.out.println("Records exported: " + Writer.counter); } } catch (Exception ex) { System.out.println(ex.getMessage()); } } /** * Applies the result of the analysis as a partitioned value connecting the partition node to the target node. * * @param line The line from the HDFS text file containing the analysis results. * @param db The Neo4j graph database context. * @param reportBlockSize The report block size for progress status. * @param processorMessage The processor message containing the description of the analysis. * @param partitionNode The partition node that will be the source node for creating partitioned relationships to the target node. */ public static void updatePartitionBlockForRow(String line, GraphDatabaseService db, int reportBlockSize, ProcessorMessage processorMessage, Node partitionNode) { if (line != null && !line.startsWith("#")) { String[] rowVal = line.split("\\s"); Long nodeId = Long.parseLong(rowVal[0]); Double weight = Double.parseDouble(rowVal[1]); Node targetNode = db.getNodeById(nodeId); Iterator<Relationship> rels = db.traversalDescription() .depthFirst() .relationships(withName(processorMessage.getAnalysis()), Direction.INCOMING) .evaluator(Evaluators.fromDepth(1)) .evaluator(Evaluators.toDepth(1)) .traverse(targetNode) .relationships() .iterator(); // Get the relationship to update Relationship updateRel = null; // Scan the relationships while (rels.hasNext() && updateRel == null) { Relationship currentRel = rels.next(); if (currentRel.getStartNode().getId() == partitionNode.getId()) updateRel = currentRel; } // Create or update the relationship for the analysis on the partition if (updateRel != null) { updateRel.setProperty("value", weight); } else { Relationship newRel = partitionNode.createRelationshipTo(targetNode, withName(processorMessage.getAnalysis())); newRel.setProperty("value", weight); } Writer.updateCounter++; if (Writer.updateCounter % reportBlockSize == 0) { System.out.println("Nodes updated: " + Writer.updateCounter); } } } public static void updateBlockForRow(String line, GraphDatabaseService db, int reportBlockSize, String analysis) { if (line != null && !line.startsWith("#")) { String[] rowVal = line.split("\\s"); Long nodeId = Long.parseLong(rowVal[0]); Double weight = Double.parseDouble(rowVal[1]); db.getNodeById(nodeId).setProperty(analysis, weight); Writer.updateCounter++; if (Writer.updateCounter % reportBlockSize == 0) { System.out.println("Nodes updated: " + Writer.updateCounter); } } } public static void asyncPartitionedUpdate(BufferedReader bufferedReader, GraphDatabaseService graphDb, ProcessorMessage processorMessage) throws IOException { Integer reportBlockSize = 10000; Stream<String> iterator = bufferedReader.lines(); List<Spliterator<String>> spliteratorList = new ArrayList<>(); boolean hasSpliterator = true; Spliterator<String> nodeSpliterator = iterator.spliterator(); while (hasSpliterator) { Spliterator<String> localSpliterator = nodeSpliterator.trySplit(); hasSpliterator = localSpliterator != null; if (hasSpliterator) spliteratorList.add(localSpliterator); } counter = 0; if (spliteratorList.size() > 4) { // Fork join ParallelBatchTransaction parallelBatchTransaction = new ParallelBatchTransaction(spliteratorList.toArray(new Spliterator[spliteratorList.size()]), 0, spliteratorList.size(), graphDb, reportBlockSize, spliteratorList.size(), processorMessage); ForkJoinPool pool = new ForkJoinPool(); pool.invoke(parallelBatchTransaction); } else { // Sequential Transaction tx = graphDb.beginTx(); Node partitionNode = graphDb.getNodeById(processorMessage.getPartitionDescription().getPartitionId()); spliteratorList.forEach(sl -> sl.forEachRemaining(n -> updatePartitionBlockForRow(n, graphDb, reportBlockSize, processorMessage, partitionNode))); tx.success(); tx.close(); } System.out.println("Job completed"); } public static void asyncImportCollaborativeFiltering(BufferedReader bufferedReader, GraphDatabaseService graphDb) throws IOException { Integer reportBlockSize = 10000; Stream<String> iterator = bufferedReader.lines(); List<Spliterator<String>> spliteratorList = new ArrayList<>(); boolean hasSpliterator = true; Spliterator<String> nodeSpliterator = iterator.spliterator(); while (hasSpliterator) { Spliterator<String> localSpliterator = nodeSpliterator.trySplit(); hasSpliterator = localSpliterator != null; if (hasSpliterator) spliteratorList.add(localSpliterator); } counter = 0; if (spliteratorList.size() > 4) { // Fork join ParallelReader<String> parallelBatchTransaction = new ParallelReader<String>(spliteratorList.toArray(new Spliterator[spliteratorList.size()]), new CFBatchTransaction(0, spliteratorList.size(), bufferedReader, reportBlockSize, spliteratorList.size(), graphDb)); ForkJoinPool pool = new ForkJoinPool(); pool.invoke(parallelBatchTransaction); } else { // Sequential Transaction tx = graphDb.beginTx(); spliteratorList.forEach(sl -> sl.forEachRemaining(n -> updateCollaborativeFilteringForRow(n, graphDb, reportBlockSize))); tx.success(); tx.close(); } System.out.println("Job completed"); } public static void updateCollaborativeFilteringForRow(String line, GraphDatabaseService db, int reportBlockSize) { if (line != null && !line.startsWith("#")) { String[] rowVal = line.split(","); Long from = Long.parseLong(rowVal[0]); Long to = Long.parseLong(rowVal[1]); Integer rank = Integer.parseInt(rowVal[2]); Node fromNode = db.getNodeById(from); final String recommendation = "RECOMMENDATION"; Iterator<Relationship> rels = db.traversalDescription() .depthFirst() .relationships(withName(recommendation), Direction.INCOMING) .evaluator(Evaluators.fromDepth(1)) .evaluator(Evaluators.toDepth(1)) .traverse(fromNode) .relationships() .iterator(); Relationship updateRel = null; // Scan the relationships while (rels.hasNext()) { Relationship currentRel = rels.next(); if(currentRel.hasProperty("rank") && Objects.equals(currentRel.getProperty("rank"), rank)) { if(currentRel.getEndNode().getId() != to) { currentRel.delete(); } else updateRel = currentRel; break; } } // Create or update the relationship for the analysis on the partition if (updateRel == null) { Relationship newRel = fromNode.createRelationshipTo(db.getNodeById(to), withName(recommendation)); newRel.setProperty("rank", rank); } Writer.updateCounter++; if (Writer.updateCounter % reportBlockSize == 0) { System.out.println("Nodes updated: " + Writer.updateCounter); } } } public static void asyncUpdate(ProcessorMessage analysis, BufferedReader bufferedReader, GraphDatabaseService graphDb) throws IOException { Integer reportBlockSize = 10000; Stream<String> iterator = bufferedReader.lines(); List<Spliterator<String>> spliteratorList = new ArrayList<>(); boolean hasSpliterator = true; Spliterator<String> nodeSpliterator = iterator.spliterator(); while (hasSpliterator) { Spliterator<String> localSpliterator = nodeSpliterator.trySplit(); hasSpliterator = localSpliterator != null; if (hasSpliterator) spliteratorList.add(localSpliterator); } counter = 0; if (spliteratorList.size() > 4) { // Fork join ParallelBatchTransaction parallelBatchTransaction = new ParallelBatchTransaction(spliteratorList.toArray(new Spliterator[spliteratorList.size()]), 0, spliteratorList.size(), graphDb, reportBlockSize, spliteratorList.size(), analysis); ForkJoinPool pool = new ForkJoinPool(); pool.invoke(parallelBatchTransaction); } else { // Sequential Transaction tx = graphDb.beginTx(); spliteratorList.forEach(sl -> sl.forEachRemaining(n -> updateBlockForRow(n, graphDb, reportBlockSize, analysis.getAnalysis()))); tx.success(); tx.close(); } System.out.println("Job completed"); } public static Path exportSubgraphToHDFS(GraphDatabaseService db) throws IOException, URISyntaxException { FileSystem fs = FileUtil.getHadoopFileSystem(); Path pt = new Path(ConfigurationLoader.getInstance().getHadoopHdfsUri() + EDGE_LIST_RELATIVE_FILE_PATH.replace("/{job_id}", "")); BufferedWriter br = new BufferedWriter(new OutputStreamWriter(fs.create(pt))); Transaction tx = db.beginTx(); // Get all nodes in the graph Iterable<Node> nodes = GlobalGraphOperations.at(db) .getAllNodes(); br.write("# Adacency list" + "\n"); int nodeTotal = IteratorUtil.count(nodes); final int[] nodeCount = {0}; final int[] pathCount = {0}; int pathCountBlocks = 10000; int size = IteratorUtil.count(nodes.iterator()); //System.out.println(nodes.spliterator().trySplit().estimateSize()); // Fork join nodes.iterator().forEachRemaining(n -> { // Filter nodes by all paths connected by the relationship type described in the configuration properties Iterable<org.neo4j.graphdb.Path> nPaths = db.traversalDescription() .depthFirst() .relationships(withName(ConfigurationLoader.getInstance().getMazerunnerRelationshipType()), Direction.OUTGOING) .evaluator(Evaluators.fromDepth(1)) .evaluator(Evaluators.toDepth(1)) .traverse(n); for (org.neo4j.graphdb.Path path : nPaths) { try { String line = path.startNode().getId() + " " + path.endNode().getId(); br.write(line + "\n"); pathCount[0]++; if (pathCount[0] > pathCountBlocks) { pathCount[0] = 0; System.out.println("Mazerunner Export Status: " + MessageFormat.format("{0,number,#%}", ((double) nodeCount[0] / (double) nodeTotal))); } } catch (Exception ex) { System.out.println(ex.getMessage()); } } nodeCount[0]++; }); System.out.println("Mazerunner Export Status: " + MessageFormat.format("{0,number,#.##%}", 1.0)); br.flush(); br.close(); tx.success(); tx.close(); return pt; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.regionserver; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InterruptedIOException; import java.net.ConnectException; import java.net.SocketTimeoutException; import java.util.Optional; import org.apache.commons.io.IOUtils; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.NotServingRegionException; import org.apache.hadoop.hbase.Server; import org.apache.hadoop.hbase.client.RetriesExhaustedException; import org.apache.hadoop.hbase.coordination.SplitLogWorkerCoordination; import org.apache.hadoop.hbase.regionserver.SplitLogWorker.TaskExecutor.Status; import org.apache.hadoop.hbase.replication.ReplicationPeerImpl; import org.apache.hadoop.hbase.replication.ReplicationUtils; import org.apache.hadoop.hbase.replication.SyncReplicationState; import org.apache.hadoop.hbase.util.CancelableProgressable; import org.apache.hadoop.hbase.util.CommonFSUtils; import org.apache.hadoop.hbase.util.ExceptionUtil; import org.apache.hadoop.hbase.util.FSUtils; import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.hbase.wal.SyncReplicationWALProvider; import org.apache.hadoop.hbase.wal.WALFactory; import org.apache.hadoop.hbase.wal.WALSplitter; import org.apache.yetus.audience.InterfaceAudience; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * This worker is spawned in every regionserver, including master. The Worker waits for log * splitting tasks to be put up by the {@link org.apache.hadoop.hbase.master.SplitLogManager} * running in the master and races with other workers in other serves to acquire those tasks. * The coordination is done via coordination engine. * <p> * If a worker has successfully moved the task from state UNASSIGNED to OWNED then it owns the task. * It keeps heart beating the manager by periodically moving the task from UNASSIGNED to OWNED * state. On success it moves the task to TASK_DONE. On unrecoverable error it moves task state to * ERR. If it cannot continue but wants the master to retry the task then it moves the task state to * RESIGNED. * <p> * The manager can take a task away from a worker by moving the task from OWNED to UNASSIGNED. In * the absence of a global lock there is a unavoidable race here - a worker might have just finished * its task when it is stripped of its ownership. Here we rely on the idempotency of the log * splitting task for correctness * @deprecated since 2.4.0 and in 3.0.0, to be removed in 4.0.0, replaced by procedure-based * distributed WAL splitter, see SplitWALRemoteProcedure */ @Deprecated @InterfaceAudience.Private public class SplitLogWorker implements Runnable { private static final Logger LOG = LoggerFactory.getLogger(SplitLogWorker.class); Thread worker; // thread pool which executes recovery work private final SplitLogWorkerCoordination coordination; private final RegionServerServices server; public SplitLogWorker(Server hserver, Configuration conf, RegionServerServices server, TaskExecutor splitTaskExecutor) { this.server = server; this.coordination = hserver.getCoordinatedStateManager().getSplitLogWorkerCoordination(); coordination.init(server, conf, splitTaskExecutor, this); } public SplitLogWorker(Configuration conf, RegionServerServices server, LastSequenceId sequenceIdChecker, WALFactory factory) { this(server, conf, server, (f, p) -> splitLog(f, p, conf, server, sequenceIdChecker, factory)); } // returns whether we need to continue the split work private static boolean processSyncReplicationWAL(String name, Configuration conf, RegionServerServices server, FileSystem fs, Path walDir) throws IOException { Path walFile = new Path(walDir, name); String filename = walFile.getName(); Optional<String> optSyncPeerId = SyncReplicationWALProvider.getSyncReplicationPeerIdFromWALName(filename); if (!optSyncPeerId.isPresent()) { return true; } String peerId = optSyncPeerId.get(); ReplicationPeerImpl peer = server.getReplicationSourceService().getReplicationPeers().getPeer(peerId); if (peer == null || !peer.getPeerConfig().isSyncReplication()) { return true; } Pair<SyncReplicationState, SyncReplicationState> stateAndNewState = peer.getSyncReplicationStateAndNewState(); if (stateAndNewState.getFirst().equals(SyncReplicationState.ACTIVE) && stateAndNewState.getSecond().equals(SyncReplicationState.NONE)) { // copy the file to remote and overwrite the previous one String remoteWALDir = peer.getPeerConfig().getRemoteWALDir(); Path remoteWALDirForPeer = ReplicationUtils.getPeerRemoteWALDir(remoteWALDir, peerId); Path tmpRemoteWAL = new Path(remoteWALDirForPeer, filename + ".tmp"); FileSystem remoteFs = ReplicationUtils.getRemoteWALFileSystem(conf, remoteWALDir); try (FSDataInputStream in = fs.open(walFile); FSDataOutputStream out = remoteFs.createNonRecursive(tmpRemoteWAL, true, CommonFSUtils.getDefaultBufferSize(remoteFs), remoteFs.getDefaultReplication(tmpRemoteWAL), remoteFs.getDefaultBlockSize(tmpRemoteWAL), null)) { IOUtils.copy(in, out); } Path toCommitRemoteWAL = new Path(remoteWALDirForPeer, filename + ReplicationUtils.RENAME_WAL_SUFFIX); // Some FileSystem implementations may not support atomic rename so we need to do it in two // phases FSUtils.renameFile(remoteFs, tmpRemoteWAL, toCommitRemoteWAL); FSUtils.renameFile(remoteFs, toCommitRemoteWAL, new Path(remoteWALDirForPeer, filename)); } else if ((stateAndNewState.getFirst().equals(SyncReplicationState.ACTIVE) && stateAndNewState.getSecond().equals(SyncReplicationState.STANDBY)) || stateAndNewState.getFirst().equals(SyncReplicationState.STANDBY)) { // check whether we still need to process this file // actually we only write wal file which name is ended with .syncrep in A state, and after // transiting to a state other than A, we will reopen all the regions so the data in the wal // will be flushed so the wal file will be archived soon. But it is still possible that there // is a server crash when we are transiting from A to S, to simplify the logic of the transit // procedure, here we will also check the remote snapshot directory in state S, so that we do // not need wait until all the wal files with .syncrep suffix to be archived before finishing // the procedure. String remoteWALDir = peer.getPeerConfig().getRemoteWALDir(); Path remoteSnapshotDirForPeer = ReplicationUtils.getPeerSnapshotWALDir(remoteWALDir, peerId); FileSystem remoteFs = ReplicationUtils.getRemoteWALFileSystem(conf, remoteWALDir); if (remoteFs.exists(new Path(remoteSnapshotDirForPeer, filename))) { // the file has been replayed when the remote cluster was transited from S to DA, the // content will be replicated back to us so give up split it. LOG.warn("Giveup splitting {} since it has been replayed in the remote cluster and " + "the content will be replicated back", filename); return false; } } return true; } /** * @return Result either DONE, RESIGNED, or ERR. */ static Status splitLog(String filename, CancelableProgressable p, Configuration conf, RegionServerServices server, LastSequenceId sequenceIdChecker, WALFactory factory) { Path walDir; FileSystem fs; try { walDir = CommonFSUtils.getWALRootDir(conf); fs = walDir.getFileSystem(conf); } catch (IOException e) { LOG.warn("Resigning, could not find root dir or fs", e); return Status.RESIGNED; } try { if (!processSyncReplicationWAL(filename, conf, server, fs, walDir)) { return Status.DONE; } } catch (IOException e) { LOG.warn("failed to process sync replication wal {}", filename, e); return Status.RESIGNED; } // TODO have to correctly figure out when log splitting has been // interrupted or has encountered a transient error and when it has // encountered a bad non-retry-able persistent error. try { SplitLogWorkerCoordination splitLogWorkerCoordination = server.getCoordinatedStateManager() == null ? null : server.getCoordinatedStateManager().getSplitLogWorkerCoordination(); if (!WALSplitter.splitLogFile(walDir, fs.getFileStatus(new Path(walDir, filename)), fs, conf, p, sequenceIdChecker, splitLogWorkerCoordination, factory, server)) { return Status.PREEMPTED; } } catch (InterruptedIOException iioe) { LOG.warn("Resigning, interrupted splitting WAL {}", filename, iioe); return Status.RESIGNED; } catch (IOException e) { if (e instanceof FileNotFoundException) { // A wal file may not exist anymore. Nothing can be recovered so move on LOG.warn("Done, WAL {} does not exist anymore", filename, e); return Status.DONE; } Throwable cause = e.getCause(); if (e instanceof RetriesExhaustedException && (cause instanceof NotServingRegionException || cause instanceof ConnectException || cause instanceof SocketTimeoutException)) { LOG.warn("Resigning, can't connect to target regionserver splitting WAL {}", filename, e); return Status.RESIGNED; } else if (cause instanceof InterruptedException) { LOG.warn("Resigning, interrupted splitting WAL {}", filename, e); return Status.RESIGNED; } LOG.warn("Error splitting WAL {}", filename, e); return Status.ERR; } LOG.debug("Done splitting WAL {}", filename); return Status.DONE; } @Override public void run() { try { LOG.info("SplitLogWorker " + server.getServerName() + " starting"); coordination.registerListener(); // wait for Coordination Engine is ready boolean res = false; while (!res && !coordination.isStop()) { res = coordination.isReady(); } if (!coordination.isStop()) { coordination.taskLoop(); } } catch (Throwable t) { if (ExceptionUtil.isInterrupt(t)) { LOG.info("SplitLogWorker interrupted. Exiting. " + (coordination.isStop() ? "" : " (ERROR: exitWorker is not set, exiting anyway)")); } else { // only a logical error can cause here. Printing it out // to make debugging easier LOG.error("unexpected error ", t); } } finally { coordination.removeListener(); LOG.info("SplitLogWorker " + server.getServerName() + " exiting"); } } /** * If the worker is doing a task i.e. splitting a log file then stop the task. * It doesn't exit the worker thread. */ public void stopTask() { LOG.info("Sending interrupt to stop the worker thread"); worker.interrupt(); // TODO interrupt often gets swallowed, do what else? } /** * start the SplitLogWorker thread */ public void start() { worker = new Thread(null, this, "SplitLogWorker-" + server.getServerName().toShortString()); worker.start(); } /** * stop the SplitLogWorker thread */ public void stop() { coordination.stopProcessingTasks(); stopTask(); } /** * Objects implementing this interface actually do the task that has been * acquired by a {@link SplitLogWorker}. Since there isn't a water-tight * guarantee that two workers will not be executing the same task therefore it * is better to have workers prepare the task and then have the * {@link org.apache.hadoop.hbase.master.SplitLogManager} commit the work in * SplitLogManager.TaskFinisher */ @FunctionalInterface public interface TaskExecutor { enum Status { DONE(), ERR(), RESIGNED(), PREEMPTED() } Status exec(String name, CancelableProgressable p); } /** * Returns the number of tasks processed by coordination. * This method is used by tests only */ public int getTaskReadySeq() { return coordination.getTaskReadySeq(); } }
package com.mageeyang.app.tinker.loader.util; import android.content.Context; import android.content.pm.ApplicationInfo; import android.support.annotation.Nullable; import android.util.Log; import com.mageeyang.kingkong.FileUtils; import com.mageeyang.smtt.sdk.WebView; import org.jetbrains.annotations.Contract; import java.io.BufferedInputStream; import java.io.Closeable; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.nio.channels.FileChannel; import java.security.MessageDigest; import java.util.jar.JarEntry; import java.util.jar.JarFile; import java.util.zip.ZipEntry; /** * e */ public final class FileUtil { private static final String TAG = "FileUtil"; @Nullable /** * gp */ public static File getDataDir(Context context) { ApplicationInfo appInfo = context.getApplicationInfo(); if (appInfo == null) { return null; } return new File(appInfo.dataDir, "tinker"); } /** * Ly * @param str * @return */ @Contract("null -> false") public static boolean isDex(String str) { if (str == null) { return false; } return str.endsWith(".dex"); } @Contract("_ -> !null") /** * lt */ public static File createPatchInfo(String path) { return new File(path + "/patch.info"); } /** * lu */ @Contract("_ -> !null") public static File createInfoLock(String path) { return new File(path + "/info.lock"); } /** * Lv */ public static String createPatchName(String str) { if (str == null || str.length() != 32) { return null; } return "patch-" + str.substring(0, 8); } /** * Lw */ public static String createApkName(String str) { if (str == null || str.length() != 32) { return null; } return createPatchName(str) + ".apk"; } /** * Lx */ public static boolean checkPathName(String str) { if (str == null || str.length() != 32) { return false; } return true; } /** * D */ public static final boolean deleteFile(File file) { boolean z = true; if (file != null) { Log.v(TAG, "safeDeleteFile, try to delete path: " + file.getPath()); if (file.exists()) { z = file.delete(); if (!z) { Log.v(TAG, "Failed to delete file, try to delete when exit. path: " + file.getPath()); file.deleteOnExit(); } } } return z; } /** * e */ public static final boolean deleteFileOrDirectory(File file) { int i = 0; if (file == null || !file.exists()) { return false; } if (file.isFile()) { deleteFile(file); } else if (file.isDirectory()) { File[] listFiles = file.listFiles(); if (listFiles != null) { int length = listFiles.length; while (i < length) { deleteFileOrDirectory(listFiles[i]); i++; } deleteFile(file); } } return true; } /** * c */ public static boolean compareFile(File file, String str) throws Throwable { if (str == null) { return false; } String E = getStringInfo(file); if (E != null) { return str.equals(E); } return false; } /** * d * @param file * @param str * @return */ public static boolean compareFileByDex(File file, String str) throws Throwable { if (file == null || str == null) { return false; } Object E; if (isDex(file.getName())) { E = getStringInfo(file); } else { try { JarFile jarFile = new JarFile(file); ZipEntry entry = jarFile.getEntry("classes.dex"); if (entry == null) { return false; } E = inputStreamMd5(jarFile.getInputStream(entry)); } catch (IOException e) { return false; } } return str.equals(E); } /** * f * @param file * @param file2 */ public static void copyFileData(File file, File file2) throws Throwable { FileChannel inChannel; FileChannel outChannel; Throwable th; Closeable closeable = null; File parentFile = file2.getParentFile(); if (!(parentFile == null || parentFile.exists())) { parentFile.mkdirs(); } try { inChannel = new FileInputStream(file).getChannel(); try { outChannel = new FileOutputStream(file2).getChannel(); } catch (Throwable th2) { th = th2; closeable = inChannel; inChannel = null; close(closeable); close(inChannel); throw th; } try { outChannel.transferFrom(inChannel, 0, inChannel.size()); close(inChannel); close(outChannel); } catch (Throwable th3) { Throwable th4 = th3; closeable = inChannel; inChannel = outChannel; th = th4; close(closeable); close(inChannel); throw th; } } catch (Throwable th5) { th = th5; inChannel = null; close(closeable); close(inChannel); throw th; } } /** * a * * @param jarFile * @param jarEntry * @return */ public static String readJarFile(JarFile jarFile, JarEntry jarEntry) throws Throwable { Throwable th; StringBuilder stringBuilder = new StringBuilder(); BufferedInputStream bufferedInputStream; try { byte[] bArr = new byte[16384]; int byteRead = 0; bufferedInputStream = new BufferedInputStream(jarFile.getInputStream(jarEntry)); while ((byteRead = bufferedInputStream.read(bArr)) != -1) { try { stringBuilder.append(new String(bArr, 0, byteRead)); } catch (Throwable th2) { th = th2; close(bufferedInputStream); throw th; } } } catch (Throwable th3) { th = th3; bufferedInputStream = null; close(bufferedInputStream); throw th; } return stringBuilder.toString(); } /** * p * @param inputStream * @return */ private static String inputStreamMd5(InputStream inputStream) { String str = null; if (inputStream != null) { try { BufferedInputStream bufferedInputStream = new BufferedInputStream(inputStream); MessageDigest instance = MessageDigest.getInstance("MD5"); StringBuilder stringBuilder = new StringBuilder(32); byte[] bArr = new byte[102400]; while (true) { int read = bufferedInputStream.read(bArr); if (read == -1) { break; } instance.update(bArr, 0, read); } byte[] digest = instance.digest(); for (byte b : digest) { stringBuilder.append(Integer.toString((b & WebView.NORMAL_MODE_ALPHA) + FileUtils.S_IRUSR, 16).substring(1)); } str = stringBuilder.toString(); } catch (Exception e) { } } return str; } /** * E * * @param file * @return */ public static String getStringInfo(File file) throws Throwable { FileInputStream fileInputStream; FileInputStream fileInputStream2; Throwable th; if (file == null || !file.exists()) { return null; } try { fileInputStream = new FileInputStream(file); try { String p = inputStreamMd5(fileInputStream); fileInputStream.close(); try { fileInputStream.close(); return p; } catch (IOException e) { return p; } } catch (Exception e2) { fileInputStream2 = fileInputStream; if (fileInputStream2 != null) { try { fileInputStream2.close(); } catch (IOException e3) { } } return null; } catch (Throwable th2) { th = th2; if (fileInputStream != null) { try { fileInputStream.close(); } catch (IOException e4) { } } throw th; } } catch (Exception e5) { fileInputStream2 = null; if (fileInputStream2 != null) { fileInputStream2.close(); } return null; } catch (Throwable th3) { th = th3; fileInputStream = null; if (fileInputStream != null) { fileInputStream.close(); } throw th; } } /** * g * @param file * @param file2 * @return */ public static String createDexFileName(File file, File file2) { String name = file.getName(); if (!name.endsWith(".dex")) { int lastIndexOf = name.lastIndexOf("."); if (lastIndexOf < 0) { name = name + ".dex"; } else { StringBuilder stringBuilder = new StringBuilder(lastIndexOf + 4); stringBuilder.append(name, 0, lastIndexOf); stringBuilder.append(".dex"); name = stringBuilder.toString(); } } return new File(file2, name).getPath(); } /** * c * * @param closeable */ public static void close(Closeable closeable) { if (closeable != null) { try { closeable.close(); } catch (IOException e) { e.printStackTrace(); } } } }
// Copyright 2015 Cloudera Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.cloudera.impala.analysis; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.List; import com.cloudera.impala.authorization.Privilege; import com.cloudera.impala.authorization.PrivilegeRequestBuilder; import com.cloudera.impala.catalog.Column; import com.cloudera.impala.catalog.KuduTable; import com.cloudera.impala.catalog.Table; import com.cloudera.impala.catalog.Type; import com.cloudera.impala.common.AnalysisException; import com.cloudera.impala.common.Pair; import com.cloudera.impala.planner.DataSink; import com.google.common.base.Joiner; import com.google.common.base.Preconditions; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Sets; import org.slf4j.LoggerFactory; import static java.lang.String.format; /** * Abstract super class for statements that modify existing data like * UPDATE and DELETE. * * The ModifyStmt has four major parts: * - targetTablePath (not null) * - fromClause (not null) * - assignmentExprs (not null, can be empty) * - wherePredicate (nullable) * * In the analysis phase, a SelectStmt is created with the result expressions set to * match the right-hand side of the assignments in addition to projecting the key columns * of the underlying table. During query execution, the plan that * is generated from this SelectStmt produces all rows that need to be modified. * * Currently, only Kudu tables can be modified. */ public abstract class ModifyStmt extends StatementBase { private final static org.slf4j.Logger LOG = LoggerFactory.getLogger(ModifyStmt.class); // List of explicitly mentioned assignment expressions in the UPDATE's SET clause protected final List<Pair<SlotRef, Expr>> assignments_; // Optional WHERE clause of the statement protected final Expr wherePredicate_; // Path identifying the target table. protected final List<String> targetTablePath_; // TableRef identifying the target table, set during analysis. protected TableRef targetTableRef_; protected FromClause fromClause_; // Result of the analysis of the internal SelectStmt that produces the rows that // will be modified. protected SelectStmt sourceStmt_; // Target Kudu table. Since currently only Kudu tables are supported, we use a // concrete table class. Result of analysis. protected KuduTable table_; // Position mapping of output expressions of the sourceStmt_ to column indices in the // target table. The i'th position in this list maps to the referencedColumns_[i]'th // position in the target table. Set in createSourceStmt() during analysis. protected ArrayList<Integer> referencedColumns_; // END: Members that need to be reset() ///////////////////////////////////////// // On tables with a primary key, ignore key not found errors. protected final boolean ignoreNotFound_; public ModifyStmt(List<String> targetTablePath, FromClause fromClause, List<Pair<SlotRef, Expr>> assignmentExprs, Expr wherePredicate, boolean ignoreNotFound) { targetTablePath_ = Preconditions.checkNotNull(targetTablePath); fromClause_ = Preconditions.checkNotNull(fromClause); assignments_ = Preconditions.checkNotNull(assignmentExprs); wherePredicate_ = wherePredicate; ignoreNotFound_ = ignoreNotFound; } /** * The analysis of the ModifyStmt proceeds as follows: First, the FROM clause is * analyzed and the targetTablePath is verified to be a valid alias into the FROM * clause. When the target table is identified, the assignment expressions are * validated and as a last step the internal SelectStmt is produced and analyzed. * Potential query rewrites for the select statement are implemented here and are not * triggered externally by the statement rewriter. */ @Override public void analyze(Analyzer analyzer) throws AnalysisException { super.analyze(analyzer); fromClause_.analyze(analyzer); List<Path> candidates = analyzer.getTupleDescPaths(targetTablePath_); if (candidates.isEmpty()) { throw new AnalysisException(format("'%s' is not a valid table alias or reference.", Joiner.on(".").join(targetTablePath_))); } Preconditions.checkState(candidates.size() == 1); Path path = candidates.get(0); path.resolve(); if (path.destTupleDesc() == null) { throw new AnalysisException(format( "'%s' is not a table alias. Using the FROM clause requires the target table " + "to be a table alias.", Joiner.on(".").join(targetTablePath_))); } targetTableRef_ = analyzer.getTableRef(path.getRootDesc().getId()); if (targetTableRef_ instanceof InlineViewRef) { throw new AnalysisException(format("Cannot modify view: '%s'", targetTableRef_.toSql())); } Preconditions.checkNotNull(targetTableRef_); Table dstTbl = targetTableRef_.getTable(); // Only Kudu tables can be updated if (!(dstTbl instanceof KuduTable)) { throw new AnalysisException( format("Impala does not support modifying a non-Kudu table: %s", dstTbl.getFullName())); } table_ = (KuduTable) dstTbl; // Make sure that the user is allowed to modify the target table, since no // UPDATE / DELETE privilege exists, we reuse the INSERT one. analyzer.registerPrivReq(new PrivilegeRequestBuilder() .onTable(table_.getDb().getName(), table_.getName()) .allOf(Privilege.INSERT).toRequest()); // Validates the assignments_ and creates the sourceStmt_. if (sourceStmt_ == null) createSourceStmt(analyzer); sourceStmt_.analyze(analyzer); } @Override public void reset() { super.reset(); fromClause_.reset(); if (sourceStmt_ != null) sourceStmt_.reset(); table_ = null; referencedColumns_.clear(); } /** * Builds and validates the sourceStmt_. The select list of the sourceStmt_ contains * first the SlotRefs for the key Columns, followed by the expressions representing the * assignments. This method sets the member variables for the sourceStmt_ and the * referencedColumns_. * * This is only run once, on the first analysis. Following analysis will reset() and * reuse previously created statements. */ private void createSourceStmt(Analyzer analyzer) throws AnalysisException { // Builds the select list and column position mapping for the target table. ArrayList<SelectListItem> selectList = Lists.newArrayList(); referencedColumns_ = Lists.newArrayList(); buildAndValidateAssignmentExprs(analyzer, selectList, referencedColumns_); // Analyze the generated select statement. sourceStmt_ = new SelectStmt(new SelectList(selectList), fromClause_, wherePredicate_, null, null, null, null); // cast result expressions to the correct type of the referenced slot of the // target table int keyColumnsOffset = table_.getKuduKeyColumnNames().size(); for (int i = keyColumnsOffset; i < sourceStmt_.resultExprs_.size(); ++i) { sourceStmt_.resultExprs_.set(i, sourceStmt_.resultExprs_.get(i).castTo( assignments_.get(i - keyColumnsOffset).first.getType())); } } /** * Validates the list of value assignments that should be used to modify the target * table. It verifies that only those columns are referenced that belong to the target * table, no key columns are modified, and that a single column is not modified multiple * times. Analyzes the Exprs and SlotRefs of assignments_ and writes a list of * SelectListItems to the out parameter selectList that is used to build the select list * for sourceStmt_. A list of integers indicating the column position of an entry in the * select list in the target table is written to the out parameter referencedColumns. * * In addition to the expressions that are generated for each assignment, the * expression list contains an expression for each key column. The key columns * are always prepended to the list of expression representing the assignments. */ private void buildAndValidateAssignmentExprs(Analyzer analyzer, ArrayList<SelectListItem> selectList, ArrayList<Integer> referencedColumns) throws AnalysisException { // The order of the referenced columns equals the order of the result expressions HashSet<SlotId> uniqueSlots = Sets.newHashSet(); HashSet<SlotId> keySlots = Sets.newHashSet(); // Mapping from column name to index ArrayList<Column> cols = table_.getColumnsInHiveOrder(); HashMap<String, Integer> colIndexMap = Maps.newHashMap(); for (int i = 0; i < cols.size(); i++) { colIndexMap.put(cols.get(i).getName(), i); } // Add the key columns as slot refs for (String k : table_.getKuduKeyColumnNames()) { ArrayList<String> path = Path.createRawPath(targetTableRef_.getUniqueAlias(), k); SlotRef ref = new SlotRef(path); ref.analyze(analyzer); selectList.add(new SelectListItem(ref, null)); uniqueSlots.add(ref.getSlotId()); keySlots.add(ref.getSlotId()); referencedColumns.add(colIndexMap.get(k)); } // Assignments are only used in the context of updates. for (Pair<SlotRef, Expr> valueAssignment : assignments_) { Expr rhsExpr = valueAssignment.second; rhsExpr.analyze(analyzer); SlotRef lhsSlotRef = valueAssignment.first; lhsSlotRef.analyze(analyzer); // Correct target table if (!lhsSlotRef.isBoundByTupleIds(targetTableRef_.getId().asList())) { throw new AnalysisException( format("Left-hand side column '%s' in assignment expression '%s=%s' does not " + "belong to target table '%s'", lhsSlotRef.toSql(), lhsSlotRef.toSql(), rhsExpr.toSql(), targetTableRef_.getDesc().getTable().getFullName())); } // No subqueries for rhs expression if (rhsExpr.contains(Subquery.class)) { throw new AnalysisException( format("Subqueries are not supported as update expressions for column '%s'", lhsSlotRef.toSql())); } Column c = lhsSlotRef.getResolvedPath().destColumn(); // TODO(Kudu) Add test for this code-path when Kudu supports nested types if (c == null) { throw new AnalysisException( format("Left-hand side in assignment expression '%s=%s' must be a column " + "reference", lhsSlotRef.toSql(), rhsExpr.toSql())); } if (keySlots.contains(lhsSlotRef.getSlotId())) { throw new AnalysisException(format("Key column '%s' cannot be updated.", lhsSlotRef.toSql())); } if (uniqueSlots.contains(lhsSlotRef.getSlotId())) { throw new AnalysisException( format("Duplicate value assignment to column: '%s'", lhsSlotRef.toSql())); } rhsExpr = checkTypeCompatibility(c, rhsExpr); uniqueSlots.add(lhsSlotRef.getSlotId()); selectList.add(new SelectListItem(rhsExpr, null)); referencedColumns.add(colIndexMap.get(c.getName())); } } /** * Checks for type compatibility of column and expr. * Returns compatible (possibly cast) expr. * TODO(kudu-merge) Find a way to consolidate this with * InsertStmt#checkTypeCompatibility() */ private Expr checkTypeCompatibility(Column column, Expr expr) throws AnalysisException { // Check for compatible type, and add casts to the selectListExprs if necessary. // We don't allow casting to a lower precision type. Type colType = column.getType(); Type exprType = expr.getType(); // Trivially compatible, unless the type is complex. if (colType.equals(exprType) && !colType.isComplexType()) return expr; Type compatibleType = Type.getAssignmentCompatibleType(colType, exprType, false); // Incompatible types. if (!compatibleType.isValid()) { throw new AnalysisException( format("Target table '%s' is incompatible with source expressions.\nExpression " + "'%s' (type: %s) is not compatible with column '%s' (type: %s)", targetTableRef_.getDesc().getTable().getFullName(), expr.toSql(), exprType.toSql(), column.getName(), colType.toSql())); } // Loss of precision when inserting into the table. if (!compatibleType.equals(colType) && !compatibleType.isNull()) { throw new AnalysisException( format("Possible loss of precision for target table '%s'.\n" + "Expression '%s' (type: %s) would need to be cast to %s" + " for column '%s'", targetTableRef_.getDesc().getTable().getFullName(), expr.toSql(), exprType.toSql(), colType.toSql(), column.getName())); } // Add a cast to the selectListExpr to the higher type. return expr.castTo(compatibleType); } public QueryStmt getQueryStmt() { return sourceStmt_; } public abstract DataSink createDataSink(); public abstract String toSql(); }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to you under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.calcite.sql; import org.apache.calcite.adapter.enumerable.EnumUtils; import org.apache.calcite.rel.type.RelDataType; import org.apache.calcite.rel.type.RelDataTypeFactory; import org.apache.calcite.rel.type.RelDataTypeFactoryImpl; import org.apache.calcite.runtime.CalciteException; import org.apache.calcite.runtime.Resources; import org.apache.calcite.sql.validate.SqlMonotonicity; import org.apache.calcite.sql.validate.SqlValidatorException; import org.apache.calcite.util.NlsString; import org.checkerframework.checker.nullness.qual.Nullable; import java.util.AbstractList; import java.util.List; /** * <code>SqlOperatorBinding</code> represents the binding of an * {@link SqlOperator} to actual operands, along with any additional information * required to validate those operands if needed. */ public abstract class SqlOperatorBinding { //~ Instance fields -------------------------------------------------------- protected final RelDataTypeFactory typeFactory; private final SqlOperator sqlOperator; //~ Constructors ----------------------------------------------------------- /** * Creates a SqlOperatorBinding. * * @param typeFactory Type factory * @param sqlOperator Operator which is subject of this call */ protected SqlOperatorBinding( RelDataTypeFactory typeFactory, SqlOperator sqlOperator) { this.typeFactory = typeFactory; this.sqlOperator = sqlOperator; } //~ Methods ---------------------------------------------------------------- /** * If the operator call occurs in an aggregate query, returns the number of * columns in the GROUP BY clause. For example, for "SELECT count(*) FROM emp * GROUP BY deptno, gender", returns 2. * * <p>Returns 0 if the query is implicitly "GROUP BY ()" because of an * aggregate expression. For example, "SELECT sum(sal) FROM emp".</p> * * <p>Returns -1 if the query is not an aggregate query.</p> */ public int getGroupCount() { return -1; } /** * Returns whether the operator is an aggregate function with a filter. */ public boolean hasFilter() { return false; } /** Returns the bound operator. */ public SqlOperator getOperator() { return sqlOperator; } /** Returns the factory for type creation. */ public RelDataTypeFactory getTypeFactory() { return typeFactory; } /** * Gets the string value of a string literal operand. * * @param ordinal zero-based ordinal of operand of interest * @return string value */ @Deprecated // to be removed before 2.0 public @Nullable String getStringLiteralOperand(int ordinal) { throw new UnsupportedOperationException(); } /** * Gets the integer value of a numeric literal operand. * * @param ordinal zero-based ordinal of operand of interest * @return integer value */ @Deprecated // to be removed before 2.0 public int getIntLiteralOperand(int ordinal) { throw new UnsupportedOperationException(); } /** * Gets the value of a literal operand. * * <p>Cases: * <ul> * <li>If the operand is not a literal, the value is null. * * <li>If the operand is a string literal, * the value will be of type {@link org.apache.calcite.util.NlsString}. * * <li>If the operand is a numeric literal, * the value will be of type {@link java.math.BigDecimal}. * * <li>If the operand is an interval qualifier, * the value will be of type {@link SqlIntervalQualifier}</li> * * <li>Otherwise the type is undefined, and the value may be null. * </ul> * * @param ordinal zero-based ordinal of operand of interest * @param clazz Desired valued type * * @return value of operand */ public <T extends Object> @Nullable T getOperandLiteralValue(int ordinal, Class<T> clazz) { throw new UnsupportedOperationException(); } /** * Gets the value of a literal operand as a Calcite type. * * @param ordinal zero-based ordinal of operand of interest * @param type Desired valued type * * @return value of operand */ public @Nullable Object getOperandLiteralValue(int ordinal, RelDataType type) { if (!(type instanceof RelDataTypeFactoryImpl.JavaType)) { return null; } final Class<?> clazz = ((RelDataTypeFactoryImpl.JavaType) type).getJavaClass(); final Object o = getOperandLiteralValue(ordinal, Object.class); if (o == null) { return null; } if (clazz.isInstance(o)) { return clazz.cast(o); } final Object o2 = o instanceof NlsString ? ((NlsString) o).getValue() : o; return EnumUtils.evaluate(o2, clazz); } @Deprecated // to be removed before 2.0 public @Nullable Comparable getOperandLiteralValue(int ordinal) { return getOperandLiteralValue(ordinal, Comparable.class); } /** * Determines whether a bound operand is NULL. * * <p>This is only relevant for SQL validation. * * @param ordinal zero-based ordinal of operand of interest * @param allowCast whether to regard CAST(constant) as a constant * @return whether operand is null; false for everything except SQL * validation */ public boolean isOperandNull(int ordinal, boolean allowCast) { throw new UnsupportedOperationException(); } /** * Determines whether an operand is a literal. * * @param ordinal zero-based ordinal of operand of interest * @param allowCast whether to regard CAST(literal) as a literal * @return whether operand is literal */ public boolean isOperandLiteral(int ordinal, boolean allowCast) { throw new UnsupportedOperationException(); } /** Returns the number of bound operands. */ public abstract int getOperandCount(); /** * Gets the type of a bound operand. * * @param ordinal zero-based ordinal of operand of interest * @return bound operand type */ public abstract RelDataType getOperandType(int ordinal); /** * Gets the monotonicity of a bound operand. * * @param ordinal zero-based ordinal of operand of interest * @return monotonicity of operand */ public SqlMonotonicity getOperandMonotonicity(int ordinal) { return SqlMonotonicity.NOT_MONOTONIC; } /** * Collects the types of the bound operands into a list. * * @return collected list */ public List<RelDataType> collectOperandTypes() { return new AbstractList<RelDataType>() { @Override public RelDataType get(int index) { return getOperandType(index); } @Override public int size() { return getOperandCount(); } }; } /** * Returns the rowtype of the <code>ordinal</code>th operand, which is a * cursor. * * <p>This is only implemented for {@link SqlCallBinding}. * * @param ordinal Ordinal of the operand * @return Rowtype of the query underlying the cursor */ public @Nullable RelDataType getCursorOperand(int ordinal) { throw new UnsupportedOperationException(); } /** * Retrieves information about a column list parameter. * * @param ordinal ordinal position of the column list parameter * @param paramName name of the column list parameter * @param columnList returns a list of the column names that are referenced * in the column list parameter * @return the name of the parent cursor referenced by the column list * parameter if it is a column list parameter; otherwise, null is returned */ public @Nullable String getColumnListParamInfo( int ordinal, String paramName, List<String> columnList) { throw new UnsupportedOperationException(); } /** * Wraps a validation error with context appropriate to this operator call. * * @param e Validation error, not null * @return Error wrapped, if possible, with positional information */ public abstract CalciteException newError( Resources.ExInst<SqlValidatorException> e); }
/* * Copyright 2015-2016 Red Hat, Inc. and/or its affiliates * and other contributors as indicated by the @author tags. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.hawkular.inventory.impl.tinkerpop.spi; import static org.hawkular.inventory.impl.tinkerpop.spi.Constants.Property.__contentHash; import static org.hawkular.inventory.impl.tinkerpop.spi.Constants.Property.__identityHash; import static org.hawkular.inventory.impl.tinkerpop.spi.Constants.Property.__metric_data_type; import static org.hawkular.inventory.impl.tinkerpop.spi.Constants.Property.__metric_interval; import static org.hawkular.inventory.impl.tinkerpop.spi.Constants.Property.__sourceCp; import static org.hawkular.inventory.impl.tinkerpop.spi.Constants.Property.__sourceEid; import static org.hawkular.inventory.impl.tinkerpop.spi.Constants.Property.__sourceType; import static org.hawkular.inventory.impl.tinkerpop.spi.Constants.Property.__structuredDataIndex; import static org.hawkular.inventory.impl.tinkerpop.spi.Constants.Property.__structuredDataKey; import static org.hawkular.inventory.impl.tinkerpop.spi.Constants.Property.__structuredDataType; import static org.hawkular.inventory.impl.tinkerpop.spi.Constants.Property.__structuredDataValue_b; import static org.hawkular.inventory.impl.tinkerpop.spi.Constants.Property.__structuredDataValue_f; import static org.hawkular.inventory.impl.tinkerpop.spi.Constants.Property.__structuredDataValue_i; import static org.hawkular.inventory.impl.tinkerpop.spi.Constants.Property.__structuredDataValue_s; import static org.hawkular.inventory.impl.tinkerpop.spi.Constants.Property.__syncHash; import static org.hawkular.inventory.impl.tinkerpop.spi.Constants.Property.__targetCp; import static org.hawkular.inventory.impl.tinkerpop.spi.Constants.Property.__targetEid; import static org.hawkular.inventory.impl.tinkerpop.spi.Constants.Property.__targetType; import static org.hawkular.inventory.impl.tinkerpop.spi.Constants.Property.__unit; import static org.hawkular.inventory.impl.tinkerpop.spi.Constants.Property.name; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.stream.Collectors; import org.hawkular.inventory.api.model.AbstractElement; import org.hawkular.inventory.api.model.DataEntity; import org.hawkular.inventory.api.model.ElementVisitor; import org.hawkular.inventory.api.model.Environment; import org.hawkular.inventory.api.model.Feed; import org.hawkular.inventory.api.model.MetadataPack; import org.hawkular.inventory.api.model.Metric; import org.hawkular.inventory.api.model.MetricType; import org.hawkular.inventory.api.model.OperationType; import org.hawkular.inventory.api.model.Relationship; import org.hawkular.inventory.api.model.Resource; import org.hawkular.inventory.api.model.ResourceType; import org.hawkular.inventory.api.model.StructuredData; import org.hawkular.inventory.api.model.Tenant; import org.hawkular.inventory.paths.ElementTypeVisitor; import org.hawkular.inventory.paths.SegmentType; /** * @author Lukas Krejci * @since 0.0.1 */ public final class Constants { private Constants() { //no instances, thank you } /** * The vertices in the graph have certain well-known properties. The __foo form is used internally to decrease the * chance of collision with any user defined properties. However, for sorting purposes it's quite cumbersome to use * it directly, so the property can have also the user-friendly name (sortName). */ public enum Property { /** * The user-defined human-readable name of the entity. We don't use the "__" prefix here as with the rest of * the properties, because this is not really hidden. */ name(String.class), /** * This is the name of the property that we use to store the type of the entity represented by the vertex */ __type("type", String.class), /** * This is the name of the property that we use to store the user-defined ID of the entity represented by the * vertex. These ID are required to be unique only amongst their "siblings" as determined by the "contains" * hierarchy. */ __eid("id", String.class), /** * Present on metric type, this is the name of the propety that we use to store the unit of the metric type * represented by the vertex. */ __unit("unit", String.class), /** * Property used on metric type to distinguish type of metric e.g. gauge, counter... */ __metric_data_type("metricDataType", String.class), /** * Property used to store interval in seconds at which metrics are collected */ __metric_interval("collectionInterval", Long.class), /** * Property used to store the canonical path of an element. */ __cp("path", String.class), /** * The type of the data stored by the structured data vertex */ __structuredDataType(String.class), /** * The key using which a structured data value is stored in a map. */ __structuredDataKey(String.class), /** * The index on which a structured data value is stored in a list. */ __structuredDataIndex(Integer.class), /** * The name of the property on the structured data vertex that holds the primitive value of that vertex. * List and maps don't hold the value directly but instead have edges going out to the child vertices. */ __structuredDataValue_b(Boolean.class), __structuredDataValue_i(Long.class), __structuredDataValue_f(Double.class), __structuredDataValue_s(String.class), __sourceType("sourceType", String.class), __targetType("targetType", String.class), __sourceCp("source", String.class), __targetCp("target", String.class), __sourceEid(String.class), __targetEid(String.class), __identityHash("identityHash", String.class), __targetIdentityHash(String.class), __contentHash("contentHash", String.class), __syncHash("syncHash", String.class); private final String sortName; private final Class<?> propertyType; Property(Class<?> type) { this(null, type); } Property(String sortName, Class<?> type) { this.sortName = sortName; this.propertyType = type; } public String getSortName() { return sortName; } public Class<?> getPropertyType() { return propertyType; } private static final HashSet<String> MIRRORED_PROPERTIES = new HashSet<>(Arrays.asList(__type.name(), __eid.name(), __cp.name())); private static Map<String, String> NAME_TO_PROPERTY = new HashMap<>(); static { try { NAME_TO_PROPERTY = Collections.unmodifiableMap(Arrays.asList(values()).stream() .filter(prop -> prop.getSortName() != null) .collect(Collectors.toMap(Property::getSortName, Property::name))); } catch (Exception e) { // this may happen if there is a duplicate key when doing Collectors.toMap -> duplicated sortName // it's better to swallow the exception and let the backend initialize properly Log.LOG.error("Unable to initialize Constants.Property enum: " + e.getMessage()); } } public static String mapUserDefined(String property) { if (NAME_TO_PROPERTY.get(property) != null) { return NAME_TO_PROPERTY.get(property); } return property; } public static boolean isMirroredInEdges(String property) { return MIRRORED_PROPERTIES.contains(property); } } /** * The type of entities known to Hawkular. */ public enum Type { tenant(Tenant.class, name, __contentHash), environment(Environment.class, name, __contentHash), feed(Feed.class, name, __identityHash, __contentHash, __syncHash), resourceType(ResourceType.class, name, __identityHash, __contentHash, __syncHash), metricType(MetricType.class, name, __unit, __metric_data_type, __metric_interval, __identityHash, __contentHash, __syncHash), operationType(OperationType.class, name, __identityHash, __contentHash, __syncHash), resource(Resource.class, name, __identityHash, __contentHash, __syncHash), metric(Metric.class, name, __metric_interval, __identityHash, __contentHash, __syncHash), metadatapack(MetadataPack.class, name), relationship(Relationship.class, __sourceType, __targetType, __sourceCp, __targetCp, __sourceEid, __targetEid), dataEntity(DataEntity.class, name, __identityHash, __contentHash, __syncHash), structuredData(StructuredData.class, __structuredDataType, __structuredDataValue_b, __structuredDataValue_i, __structuredDataValue_f, __structuredDataValue_s, __structuredDataIndex, __structuredDataKey); private final String[] mappedProperties; private final Class<?> entityType; Type(Class<?> entityType, Property... mappedProperties) { this.entityType = entityType; this.mappedProperties = new String[mappedProperties.length + 3]; Arrays.setAll(this.mappedProperties, i -> { switch (i) { case 0: return Property.__type.name(); case 1: return Property.__eid.name(); case 2: return Property.__cp.name(); default: return mappedProperties[i - 3].name(); } }); } public static Type of(AbstractElement<?, ?> e) { return e.accept(new ElementVisitor<Type, Void>() { @Override public Type visitRelationship(Relationship relationship, Void parameter) { return Type.relationship; } @Override public Type visitTenant(Tenant tenant, Void parameter) { return Type.tenant; } @Override public Type visitEnvironment(Environment environment, Void parameter) { return Type.environment; } @Override public Type visitFeed(Feed feed, Void parameter) { return Type.feed; } @Override public Type visitMetric(Metric metric, Void parameter) { return Type.metric; } @Override public Type visitMetricType(MetricType definition, Void parameter) { return Type.metricType; } @Override public Type visitResource(Resource resource, Void parameter) { return Type.resource; } @Override public Type visitResourceType(ResourceType type, Void parameter) { return Type.resourceType; } @Override public Type visitData(DataEntity data, Void parameter) { return Type.dataEntity; } @Override public Type visitOperationType(OperationType operationType, Void parameter) { return Type.operationType; } @Override public Type visitMetadataPack(MetadataPack metadataPack, Void parameter) { return Type.metadatapack; } @Override public Type visitUnknown(Object entity, Void parameter) { return null; } }, null); } public static Type of(Class<?> ec) { return of(AbstractElement.segmentTypeFromType(ec)); } public static Type of(SegmentType st) { return ElementTypeVisitor.accept(st, new ElementTypeVisitor<Type, Void>() { @Override public Type visitTenant(Void parameter) { return tenant; } @Override public Type visitEnvironment(Void parameter) { return environment; } @Override public Type visitFeed(Void parameter) { return feed; } @Override public Type visitMetric(Void parameter) { return metric; } @Override public Type visitMetricType(Void parameter) { return metricType; } @Override public Type visitResource(Void parameter) { return resource; } @Override public Type visitResourceType(Void parameter) { return resourceType; } @Override public Type visitRelationship(Void parameter) { return relationship; } @Override public Type visitData(Void parameter) { return dataEntity; } @Override public Type visitOperationType(Void parameter) { return operationType; } @Override public Type visitMetadataPack(Void parameter) { return Type.metadatapack; } @Override public Type visitUnknown(Void parameter) { if (st == SegmentType.sd) { return structuredData; } throw new IllegalArgumentException("Unsupported entity type " + st); } }, null); } public Class<?> getEntityType() { return entityType; } /** * @return list of properties that are explicitly mapped to entity class properties. */ public String[] getMappedProperties() { return mappedProperties; } } public enum InternalEdge { __withIdentityHash, __containsIdentityHash } public enum InternalType { __identityHash } }
/******************************************************************************* * Copyright 2011, 2012 Chris Banes. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. *******************************************************************************/ package com.stone.EasyTeaching.utilities.pulltorefresh; import android.content.Context; import android.content.res.TypedArray; import android.graphics.drawable.Drawable; import android.os.Build.VERSION; import android.os.Build.VERSION_CODES; import android.os.Bundle; import android.os.Parcelable; import android.util.AttributeSet; import android.util.Log; import android.view.*; import android.view.animation.DecelerateInterpolator; import android.view.animation.Interpolator; import android.widget.FrameLayout; import android.widget.LinearLayout; import com.stone.EasyTeaching.R; import com.stone.EasyTeaching.utilities.pulltorefresh.internal.*; public abstract class PullToRefreshBase<T extends View> extends LinearLayout implements IPullToRefresh<T> { // =========================================================== // Constants // =========================================================== static final boolean DEBUG = true; static final boolean USE_HW_LAYERS = false; static final String LOG_TAG = "PullToRefresh"; static final float FRICTION = 2.0f; public static final int SMOOTH_SCROLL_DURATION_MS = 200; public static final int SMOOTH_SCROLL_LONG_DURATION_MS = 325; static final int DEMO_SCROLL_INTERVAL = 225; static final String STATE_STATE = "ptr_state"; static final String STATE_MODE = "ptr_mode"; static final String STATE_CURRENT_MODE = "ptr_current_mode"; static final String STATE_SCROLLING_REFRESHING_ENABLED = "ptr_disable_scrolling"; static final String STATE_SHOW_REFRESHING_VIEW = "ptr_show_refreshing_view"; static final String STATE_SUPER = "ptr_super"; // =========================================================== // Fields // =========================================================== private int mTouchSlop; private float mLastMotionX, mLastMotionY; private float mInitialMotionX, mInitialMotionY; private boolean mIsBeingDragged = false; private State mState = State.RESET; private Mode mMode = Mode.getDefault(); private Mode mCurrentMode; T mRefreshableView; private FrameLayout mRefreshableViewWrapper; private boolean mShowViewWhileRefreshing = true; private boolean mScrollingWhileRefreshingEnabled = false; private boolean mFilterTouchEvents = true; private boolean mOverScrollEnabled = true; private boolean mLayoutVisibilityChangesEnabled = true; private Interpolator mScrollAnimationInterpolator; private AnimationStyle mLoadingAnimationStyle = AnimationStyle.getDefault(); private LoadingLayout mHeaderLayout; private LoadingLayout mFooterLayout; private OnRefreshListener<T> mOnRefreshListener; private OnRefreshListener2<T> mOnRefreshListener2; private OnPullEventListener<T> mOnPullEventListener; private SmoothScrollRunnable mCurrentSmoothScrollRunnable; // =========================================================== // Constructors // =========================================================== public PullToRefreshBase(Context context) { super(context); init(context, null); } public PullToRefreshBase(Context context, AttributeSet attrs) { super(context, attrs); init(context, attrs); } public PullToRefreshBase(Context context, Mode mode) { super(context); mMode = mode; init(context, null); } public PullToRefreshBase(Context context, Mode mode, AnimationStyle animStyle) { super(context); mMode = mode; mLoadingAnimationStyle = animStyle; init(context, null); } @Override public void addView(View child, int index, ViewGroup.LayoutParams params) { if (DEBUG) { Log.d(LOG_TAG, "addView: " + child.getClass().getSimpleName()); } final T refreshableView = getRefreshableView(); if (refreshableView instanceof ViewGroup) { ((ViewGroup) refreshableView).addView(child, index, params); } else { throw new UnsupportedOperationException("Refreshable View is not a ViewGroup so can't addView"); } } @Override public final boolean demo() { if (mMode.showHeaderLoadingLayout() && isReadyForPullStart()) { smoothScrollToAndBack(-getHeaderSize() * 2); return true; } else if (mMode.showFooterLoadingLayout() && isReadyForPullEnd()) { smoothScrollToAndBack(getFooterSize() * 2); return true; } return false; } @Override public final Mode getCurrentMode() { return mCurrentMode; } @Override public final boolean getFilterTouchEvents() { return mFilterTouchEvents; } @Override public final ILoadingLayout getLoadingLayoutProxy() { return getLoadingLayoutProxy(true, true); } @Override public final ILoadingLayout getLoadingLayoutProxy(boolean includeStart, boolean includeEnd) { return createLoadingLayoutProxy(includeStart, includeEnd); } @Override public final Mode getMode() { return mMode; } @Override public final T getRefreshableView() { return mRefreshableView; } @Override public final boolean getShowViewWhileRefreshing() { return mShowViewWhileRefreshing; } @Override public final State getState() { return mState; } /** * @deprecated See {@link #isScrollingWhileRefreshingEnabled()}. */ public final boolean isDisableScrollingWhileRefreshing() { return !isScrollingWhileRefreshingEnabled(); } @Override public final boolean isPullToRefreshEnabled() { return mMode.permitsPullToRefresh(); } @Override public final boolean isPullToRefreshOverScrollEnabled() { return VERSION.SDK_INT >= VERSION_CODES.GINGERBREAD && mOverScrollEnabled && OverscrollHelper.isAndroidOverScrollEnabled(mRefreshableView); } @Override public final boolean isRefreshing() { return mState == State.REFRESHING || mState == State.MANUAL_REFRESHING; } @Override public final boolean isScrollingWhileRefreshingEnabled() { return mScrollingWhileRefreshingEnabled; } @Override public final boolean onInterceptTouchEvent(MotionEvent event) { if (!isPullToRefreshEnabled()) { return false; } final int action = event.getAction(); if (action == MotionEvent.ACTION_CANCEL || action == MotionEvent.ACTION_UP) { mIsBeingDragged = false; return false; } if (action != MotionEvent.ACTION_DOWN && mIsBeingDragged) { return true; } switch (action) { case MotionEvent.ACTION_MOVE: { // If we're refreshing, and the flag is set. Eat all MOVE events if (!mScrollingWhileRefreshingEnabled && isRefreshing()) { return true; } if (isReadyForPull()) { final float y = event.getY(), x = event.getX(); final float diff, oppositeDiff, absDiff; // We need to use the correct values, based on scroll // direction switch (getPullToRefreshScrollDirection()) { case HORIZONTAL: diff = x - mLastMotionX; oppositeDiff = y - mLastMotionY; break; case VERTICAL: default: diff = y - mLastMotionY; oppositeDiff = x - mLastMotionX; break; } absDiff = Math.abs(diff); if (absDiff > mTouchSlop && (!mFilterTouchEvents || absDiff > Math.abs(oppositeDiff))) { if (mMode.showHeaderLoadingLayout() && diff >= 1f && isReadyForPullStart()) { mLastMotionY = y; mLastMotionX = x; mIsBeingDragged = true; if (mMode == Mode.BOTH) { mCurrentMode = Mode.PULL_FROM_START; } } else if (mMode.showFooterLoadingLayout() && diff <= -1f && isReadyForPullEnd()) { mLastMotionY = y; mLastMotionX = x; mIsBeingDragged = true; if (mMode == Mode.BOTH) { mCurrentMode = Mode.PULL_FROM_END; } } } } break; } case MotionEvent.ACTION_DOWN: { if (isReadyForPull()) { mLastMotionY = mInitialMotionY = event.getY(); mLastMotionX = mInitialMotionX = event.getX(); mIsBeingDragged = false; } break; } } return mIsBeingDragged; } @Override public final void onRefreshComplete() { if (isRefreshing()) { setState(State.RESET); } } @Override public final boolean onTouchEvent(MotionEvent event) { if (!isPullToRefreshEnabled()) { return false; } // If we're refreshing, and the flag is set. Eat the event if (!mScrollingWhileRefreshingEnabled && isRefreshing()) { return true; } if (event.getAction() == MotionEvent.ACTION_DOWN && event.getEdgeFlags() != 0) { return false; } switch (event.getAction()) { case MotionEvent.ACTION_MOVE: { if (mIsBeingDragged) { mLastMotionY = event.getY(); mLastMotionX = event.getX(); pullEvent(); return true; } break; } case MotionEvent.ACTION_DOWN: { if (isReadyForPull()) { mLastMotionY = mInitialMotionY = event.getY(); mLastMotionX = mInitialMotionX = event.getX(); return true; } break; } case MotionEvent.ACTION_CANCEL: case MotionEvent.ACTION_UP: { if (mIsBeingDragged) { mIsBeingDragged = false; if (mState == State.RELEASE_TO_REFRESH && (null != mOnRefreshListener || null != mOnRefreshListener2)) { setState(State.REFRESHING, true); return true; } // If we're already refreshing, just scroll back to the top if (isRefreshing()) { smoothScrollTo(0); return true; } // If we haven't returned by here, then we're not in a state // to pull, so just reset setState(State.RESET); return true; } break; } } return false; } public final void setScrollingWhileRefreshingEnabled(boolean allowScrollingWhileRefreshing) { mScrollingWhileRefreshingEnabled = allowScrollingWhileRefreshing; } /** * @deprecated See {@link #setScrollingWhileRefreshingEnabled(boolean)} */ public void setDisableScrollingWhileRefreshing(boolean disableScrollingWhileRefreshing) { setScrollingWhileRefreshingEnabled(!disableScrollingWhileRefreshing); } @Override public final void setFilterTouchEvents(boolean filterEvents) { mFilterTouchEvents = filterEvents; } /** * @deprecated You should now call this method on the result of * {@link #getLoadingLayoutProxy()}. */ public void setLastUpdatedLabel(CharSequence label) { getLoadingLayoutProxy().setLastUpdatedLabel(label); } /** * @deprecated You should now call this method on the result of * {@link #getLoadingLayoutProxy()}. */ public void setLoadingDrawable(Drawable drawable) { getLoadingLayoutProxy().setLoadingDrawable(drawable); } /** * @deprecated You should now call this method on the result of * {@link #getLoadingLayoutProxy(boolean, boolean)}. */ public void setLoadingDrawable(Drawable drawable, Mode mode) { getLoadingLayoutProxy(mode.showHeaderLoadingLayout(), mode.showFooterLoadingLayout()).setLoadingDrawable( drawable); } @Override public void setLongClickable(boolean longClickable) { getRefreshableView().setLongClickable(longClickable); } @Override public final void setMode(Mode mode) { if (mode != mMode) { if (DEBUG) { Log.d(LOG_TAG, "Setting mode to: " + mode); } mMode = mode; updateUIForMode(); } } public void setOnPullEventListener(OnPullEventListener<T> listener) { mOnPullEventListener = listener; } @Override public final void setOnRefreshListener(OnRefreshListener<T> listener) { mOnRefreshListener = listener; mOnRefreshListener2 = null; } @Override public final void setOnRefreshListener(OnRefreshListener2<T> listener) { mOnRefreshListener2 = listener; mOnRefreshListener = null; } /** * @deprecated You should now call this method on the result of * {@link #getLoadingLayoutProxy()}. */ public void setPullLabel(CharSequence pullLabel) { getLoadingLayoutProxy().setPullLabel(pullLabel); } /** * @deprecated You should now call this method on the result of * {@link #getLoadingLayoutProxy(boolean, boolean)}. */ public void setPullLabel(CharSequence pullLabel, Mode mode) { getLoadingLayoutProxy(mode.showHeaderLoadingLayout(), mode.showFooterLoadingLayout()).setPullLabel(pullLabel); } /** * @param enable Whether Pull-To-Refresh should be used * @deprecated This simple calls setMode with an appropriate mode based on * the passed value. */ public final void setPullToRefreshEnabled(boolean enable) { setMode(enable ? Mode.getDefault() : Mode.DISABLED); } @Override public final void setPullToRefreshOverScrollEnabled(boolean enabled) { mOverScrollEnabled = enabled; } @Override public final void setRefreshing() { setRefreshing(true); } @Override public final void setRefreshing(boolean doScroll) { if (!isRefreshing()) { setState(State.MANUAL_REFRESHING, doScroll); } } /** * @deprecated You should now call this method on the result of * {@link #getLoadingLayoutProxy()}. */ public void setRefreshingLabel(CharSequence refreshingLabel) { getLoadingLayoutProxy().setRefreshingLabel(refreshingLabel); } /** * @deprecated You should now call this method on the result of * {@link #getLoadingLayoutProxy(boolean, boolean)}. */ public void setRefreshingLabel(CharSequence refreshingLabel, Mode mode) { getLoadingLayoutProxy(mode.showHeaderLoadingLayout(), mode.showFooterLoadingLayout()).setRefreshingLabel( refreshingLabel); } /** * @deprecated You should now call this method on the result of * {@link #getLoadingLayoutProxy()}. */ public void setReleaseLabel(CharSequence releaseLabel) { setReleaseLabel(releaseLabel, Mode.BOTH); } /** * @deprecated You should now call this method on the result of * {@link #getLoadingLayoutProxy(boolean, boolean)}. */ public void setReleaseLabel(CharSequence releaseLabel, Mode mode) { getLoadingLayoutProxy(mode.showHeaderLoadingLayout(), mode.showFooterLoadingLayout()).setReleaseLabel( releaseLabel); } public void setScrollAnimationInterpolator(Interpolator interpolator) { mScrollAnimationInterpolator = interpolator; } @Override public final void setShowViewWhileRefreshing(boolean showView) { mShowViewWhileRefreshing = showView; } /** */ public abstract Orientation getPullToRefreshScrollDirection(); final void setState(State state, final boolean... params) { mState = state; if (DEBUG) { Log.d(LOG_TAG, "State: " + mState.name()); } switch (mState) { case RESET: onReset(); break; case PULL_TO_REFRESH: onPullToRefresh(); break; case RELEASE_TO_REFRESH: onReleaseToRefresh(); break; case REFRESHING: case MANUAL_REFRESHING: onRefreshing(params[0]); break; case OVERSCROLLING: // NO-OP break; } // Call OnPullEventListener if (null != mOnPullEventListener) { mOnPullEventListener.onPullEvent(this, mState, mCurrentMode); } } /** * Used internally for adding view. Need because we override addView to * pass-through to the Refreshable View */ protected final void addViewInternal(View child, int index, ViewGroup.LayoutParams params) { super.addView(child, index, params); } /** * Used internally for adding view. Need because we override addView to * pass-through to the Refreshable View */ protected final void addViewInternal(View child, ViewGroup.LayoutParams params) { super.addView(child, -1, params); } protected LoadingLayout createLoadingLayout(Context context, Mode mode, TypedArray attrs) { LoadingLayout layout = mLoadingAnimationStyle.createLoadingLayout(context, mode, getPullToRefreshScrollDirection(), attrs); layout.setVisibility(View.INVISIBLE); return layout; } /** * Used internally for {@link #getLoadingLayoutProxy(boolean, boolean)}. * Allows derivative classes to include any extra LoadingLayouts. */ protected LoadingLayoutProxy createLoadingLayoutProxy(final boolean includeStart, final boolean includeEnd) { LoadingLayoutProxy proxy = new LoadingLayoutProxy(); if (includeStart && mMode.showHeaderLoadingLayout()) { proxy.addLayout(mHeaderLayout); } if (includeEnd && mMode.showFooterLoadingLayout()) { proxy.addLayout(mFooterLayout); } return proxy; } /** * This is implemented by derived classes to return the created View. If you * need to use a custom View (such as a custom ListView), override this * method and return an instance of your custom class. * <p/> * Be sure to set the ID of the view in this method, especially if you're * using a ListActivity or ListFragment. * * @param context Context to create view with * @param attrs AttributeSet from wrapped class. Means that anything you * include in the XML layout declaration will be routed to the * created View * @return New instance of the Refreshable View */ protected abstract T createRefreshableView(Context context, AttributeSet attrs); protected final void disableLoadingLayoutVisibilityChanges() { mLayoutVisibilityChangesEnabled = false; } protected final LoadingLayout getFooterLayout() { return mFooterLayout; } protected final int getFooterSize() { return mFooterLayout.getContentSize(); } protected final LoadingLayout getHeaderLayout() { return mHeaderLayout; } protected final int getHeaderSize() { return mHeaderLayout.getContentSize(); } protected int getPullToRefreshScrollDuration() { return SMOOTH_SCROLL_DURATION_MS; } protected int getPullToRefreshScrollDurationLonger() { return SMOOTH_SCROLL_LONG_DURATION_MS; } protected FrameLayout getRefreshableViewWrapper() { return mRefreshableViewWrapper; } /** * Allows Derivative classes to handle the XML Attrs without creating a * TypedArray themsevles * * @param a - TypedArray of PullToRefresh Attributes */ protected void handleStyledAttributes(TypedArray a) { } /** * Implemented by derived class to return whether the View is in a state * where the user can Pull to Refresh by scrolling from the end. * * @return true if the View is currently in the correct state (for example, * bottom of a ListView) */ protected abstract boolean isReadyForPullEnd(); /** * Implemented by derived class to return whether the View is in a state * where the user can Pull to Refresh by scrolling from the start. * * @return true if the View is currently the correct state (for example, top * of a ListView) */ protected abstract boolean isReadyForPullStart(); /** * Called by {@link #onRestoreInstanceState(android.os.Parcelable)} so that derivative * classes can handle their saved instance state. * * @param savedInstanceState - Bundle which contains saved instance state. */ protected void onPtrRestoreInstanceState(Bundle savedInstanceState) { } /** * Called by {@link #onSaveInstanceState()} so that derivative classes can * save their instance state. * * @param saveState - Bundle to be updated with saved state. */ protected void onPtrSaveInstanceState(Bundle saveState) { } /** * Called when the UI has been to be updated to be in the */ protected void onPullToRefresh() { switch (mCurrentMode) { case PULL_FROM_END: mFooterLayout.pullToRefresh(); break; case PULL_FROM_START: mHeaderLayout.pullToRefresh(); break; default: // NO-OP break; } } /** * Called when the UI has been to be updated to be in the * * @param doScroll - Whether the UI should scroll for this event. */ protected void onRefreshing(final boolean doScroll) { if (mMode.showHeaderLoadingLayout()) { mHeaderLayout.refreshing(); } if (mMode.showFooterLoadingLayout()) { mFooterLayout.refreshing(); } if (doScroll) { if (mShowViewWhileRefreshing) { // Call Refresh Listener when the Scroll has finished OnSmoothScrollFinishedListener listener = new OnSmoothScrollFinishedListener() { @Override public void onSmoothScrollFinished() { callRefreshListener(); } }; switch (mCurrentMode) { case MANUAL_REFRESH_ONLY: case PULL_FROM_END: smoothScrollTo(getFooterSize(), listener); break; default: case PULL_FROM_START: smoothScrollTo(-getHeaderSize(), listener); break; } } else { smoothScrollTo(0); } } else { // We're not scrolling, so just call Refresh Listener now callRefreshListener(); } } /** * Called when the UI has been to be updated to be in the */ protected void onReleaseToRefresh() { switch (mCurrentMode) { case PULL_FROM_END: mFooterLayout.releaseToRefresh(); break; case PULL_FROM_START: mHeaderLayout.releaseToRefresh(); break; default: // NO-OP break; } } /** * Called when the UI has been to be updated to be in the */ protected void onReset() { mIsBeingDragged = false; mLayoutVisibilityChangesEnabled = true; smoothScrollTo(0); // Always reset both layouts, just in case... mHeaderLayout.reset(); mFooterLayout.reset(); } @Override protected final void onRestoreInstanceState(Parcelable state) { if (state instanceof Bundle) { Bundle bundle = (Bundle) state; setMode(Mode.mapIntToValue(bundle.getInt(STATE_MODE, 0))); mCurrentMode = Mode.mapIntToValue(bundle.getInt(STATE_CURRENT_MODE, 0)); mScrollingWhileRefreshingEnabled = bundle.getBoolean(STATE_SCROLLING_REFRESHING_ENABLED, false); mShowViewWhileRefreshing = bundle.getBoolean(STATE_SHOW_REFRESHING_VIEW, true); // Let super Restore Itself super.onRestoreInstanceState(bundle.getParcelable(STATE_SUPER)); State viewState = State.mapIntToValue(bundle.getInt(STATE_STATE, 0)); if (viewState == State.REFRESHING || viewState == State.MANUAL_REFRESHING) { setState(viewState, true); } // Now let derivative classes restore their state onPtrRestoreInstanceState(bundle); return; } super.onRestoreInstanceState(state); } @Override protected final Parcelable onSaveInstanceState() { Bundle bundle = new Bundle(); // Let derivative classes get a chance to save state first, that way we // can make sure they don't overrite any of our values onPtrSaveInstanceState(bundle); bundle.putInt(STATE_STATE, mState.getIntValue()); bundle.putInt(STATE_MODE, mMode.getIntValue()); bundle.putInt(STATE_CURRENT_MODE, mCurrentMode.getIntValue()); bundle.putBoolean(STATE_SCROLLING_REFRESHING_ENABLED, mScrollingWhileRefreshingEnabled); bundle.putBoolean(STATE_SHOW_REFRESHING_VIEW, mShowViewWhileRefreshing); bundle.putParcelable(STATE_SUPER, super.onSaveInstanceState()); return bundle; } @Override protected final void onSizeChanged(int w, int h, int oldw, int oldh) { if (DEBUG) { Log.d(LOG_TAG, String.format("onSizeChanged. W: %d, H: %d", w, h)); } super.onSizeChanged(w, h, oldw, oldh); // We need to update the header/footer when our size changes refreshLoadingViewsSize(); // Update the Refreshable View layout refreshRefreshableViewSize(w, h); /** * As we're currently in a Layout Pass, we need to schedule another one * to layout any changes we've made here */ post(new Runnable() { @Override public void run() { requestLayout(); } }); } /** * Re-measure the Loading Views height, and adjust internal padding as * necessary */ protected final void refreshLoadingViewsSize() { final int maximumPullScroll = (int) (getMaximumPullScroll() * 1.2f); int pLeft = getPaddingLeft(); int pTop = getPaddingTop(); int pRight = getPaddingRight(); int pBottom = getPaddingBottom(); switch (getPullToRefreshScrollDirection()) { case HORIZONTAL: if (mMode.showHeaderLoadingLayout()) { mHeaderLayout.setWidth(maximumPullScroll); pLeft = -maximumPullScroll; } else { pLeft = 0; } if (mMode.showFooterLoadingLayout()) { mFooterLayout.setWidth(maximumPullScroll); pRight = -maximumPullScroll; } else { pRight = 0; } break; case VERTICAL: if (mMode.showHeaderLoadingLayout()) { mHeaderLayout.setHeight(maximumPullScroll); pTop = -maximumPullScroll; } else { pTop = 0; } if (mMode.showFooterLoadingLayout()) { mFooterLayout.setHeight(maximumPullScroll); pBottom = -maximumPullScroll; } else { pBottom = 0; } break; } if (DEBUG) { Log.d(LOG_TAG, String.format("Setting Padding. L: %d, T: %d, R: %d, B: %d", pLeft, pTop, pRight, pBottom)); } setPadding(pLeft, pTop, pRight, pBottom); } protected final void refreshRefreshableViewSize(int width, int height) { // We need to set the Height of the Refreshable View to the same as // this layout LayoutParams lp = (LayoutParams) mRefreshableViewWrapper.getLayoutParams(); switch (getPullToRefreshScrollDirection()) { case HORIZONTAL: if (lp.width != width) { lp.width = width; mRefreshableViewWrapper.requestLayout(); } break; case VERTICAL: if (lp.height != height) { lp.height = height; mRefreshableViewWrapper.requestLayout(); } break; } } /** * Helper method which just calls scrollTo() in the correct scrolling * direction. * * @param value - New Scroll value */ protected final void setHeaderScroll(int value) { if (DEBUG) { Log.d(LOG_TAG, "setHeaderScroll: " + value); } // Clamp value to with pull scroll range final int maximumPullScroll = getMaximumPullScroll(); value = Math.min(maximumPullScroll, Math.max(-maximumPullScroll, value)); if (mLayoutVisibilityChangesEnabled) { if (value < 0) { mHeaderLayout.setVisibility(View.VISIBLE); } else if (value > 0) { mFooterLayout.setVisibility(View.VISIBLE); } else { mHeaderLayout.setVisibility(View.INVISIBLE); mFooterLayout.setVisibility(View.INVISIBLE); } } if (USE_HW_LAYERS) { /** * Use a Hardware Layer on the Refreshable View if we've scrolled at * all. We don't use them on the Header/Footer Views as they change * often, which would negate any HW layer performance boost. */ ViewCompat.setLayerType(mRefreshableViewWrapper, value != 0 ? View.LAYER_TYPE_HARDWARE : View.LAYER_TYPE_NONE); } switch (getPullToRefreshScrollDirection()) { case VERTICAL: scrollTo(0, value); break; case HORIZONTAL: scrollTo(value, 0); break; } } /** * Smooth Scroll to position using the default duration of * {@value #SMOOTH_SCROLL_DURATION_MS} ms. * * @param scrollValue - Position to scroll to */ protected final void smoothScrollTo(int scrollValue) { smoothScrollTo(scrollValue, getPullToRefreshScrollDuration()); } /** * Smooth Scroll to position using the default duration of * {@value #SMOOTH_SCROLL_DURATION_MS} ms. * * @param scrollValue - Position to scroll to * @param listener - Listener for scroll */ protected final void smoothScrollTo(int scrollValue, OnSmoothScrollFinishedListener listener) { smoothScrollTo(scrollValue, getPullToRefreshScrollDuration(), 0, listener); } /** * Smooth Scroll to position using the longer default duration of * {@value #SMOOTH_SCROLL_LONG_DURATION_MS} ms. * * @param scrollValue - Position to scroll to */ protected final void smoothScrollToLonger(int scrollValue) { smoothScrollTo(scrollValue, getPullToRefreshScrollDurationLonger()); } /** * Updates the View State when the mode has been set. This does not do any * checking that the mode is different to current state so always updates. */ protected void updateUIForMode() { // We need to use the correct LayoutParam values, based on scroll // direction final LayoutParams lp = getLoadingLayoutLayoutParams(); // Remove Header, and then add Header Loading View again if needed if (this == mHeaderLayout.getParent()) { removeView(mHeaderLayout); } if (mMode.showHeaderLoadingLayout()) { addViewInternal(mHeaderLayout, 0, lp); } // Remove Footer, and then add Footer Loading View again if needed if (this == mFooterLayout.getParent()) { removeView(mFooterLayout); } if (mMode.showFooterLoadingLayout()) { addViewInternal(mFooterLayout, lp); } // Hide Loading Views refreshLoadingViewsSize(); // If we're not using Mode.BOTH, set mCurrentMode to mMode, otherwise // set it to pull down mCurrentMode = (mMode != Mode.BOTH) ? mMode : Mode.PULL_FROM_START; } private void addRefreshableView(Context context, T refreshableView) { mRefreshableViewWrapper = new FrameLayout(context); mRefreshableViewWrapper.addView(refreshableView, ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.MATCH_PARENT); addViewInternal(mRefreshableViewWrapper, new LayoutParams(LayoutParams.MATCH_PARENT, LayoutParams.MATCH_PARENT)); } private void callRefreshListener() { if (null != mOnRefreshListener) { mOnRefreshListener.onRefresh(this); } else if (null != mOnRefreshListener2) { if (mCurrentMode == Mode.PULL_FROM_START) { mOnRefreshListener2.onPullDownToRefresh(this); } else if (mCurrentMode == Mode.PULL_FROM_END) { mOnRefreshListener2.onPullUpToRefresh(this); } } } @SuppressWarnings("deprecation") private void init(Context context, AttributeSet attrs) { switch (getPullToRefreshScrollDirection()) { case HORIZONTAL: setOrientation(LinearLayout.HORIZONTAL); break; case VERTICAL: default: setOrientation(LinearLayout.VERTICAL); break; } setGravity(Gravity.CENTER); ViewConfiguration config = ViewConfiguration.get(context); mTouchSlop = config.getScaledTouchSlop(); // Styleables from XML TypedArray a = context.obtainStyledAttributes(attrs, R.styleable.PullToRefresh); if (a.hasValue(R.styleable.PullToRefresh_ptrMode)) { mMode = Mode.mapIntToValue(a.getInteger(R.styleable.PullToRefresh_ptrMode, 0)); } if (a.hasValue(R.styleable.PullToRefresh_ptrAnimationStyle)) { mLoadingAnimationStyle = AnimationStyle.mapIntToValue(a.getInteger( R.styleable.PullToRefresh_ptrAnimationStyle, 0)); } // Refreshable View // By passing the attrs, we can add ListView/GridView params via XML mRefreshableView = createRefreshableView(context, attrs); addRefreshableView(context, mRefreshableView); // We need to create now layouts now mHeaderLayout = createLoadingLayout(context, Mode.PULL_FROM_START, a); mFooterLayout = createLoadingLayout(context, Mode.PULL_FROM_END, a); /** * Styleables from XML */ if (a.hasValue(R.styleable.PullToRefresh_ptrRefreshableViewBackground)) { Drawable background = a.getDrawable(R.styleable.PullToRefresh_ptrRefreshableViewBackground); if (null != background) { mRefreshableView.setBackgroundDrawable(background); } } else if (a.hasValue(R.styleable.PullToRefresh_ptrAdapterViewBackground)) { Utils.warnDeprecation("ptrAdapterViewBackground", "ptrRefreshableViewBackground"); Drawable background = a.getDrawable(R.styleable.PullToRefresh_ptrAdapterViewBackground); if (null != background) { mRefreshableView.setBackgroundDrawable(background); } } if (a.hasValue(R.styleable.PullToRefresh_ptrOverScroll)) { mOverScrollEnabled = a.getBoolean(R.styleable.PullToRefresh_ptrOverScroll, true); } if (a.hasValue(R.styleable.PullToRefresh_ptrScrollingWhileRefreshingEnabled)) { mScrollingWhileRefreshingEnabled = a.getBoolean( R.styleable.PullToRefresh_ptrScrollingWhileRefreshingEnabled, false); } // Let the derivative classes have a go at handling attributes, then // recycle them... handleStyledAttributes(a); a.recycle(); // Finally update the UI for the modes updateUIForMode(); } private boolean isReadyForPull() { switch (mMode) { case PULL_FROM_START: return isReadyForPullStart(); case PULL_FROM_END: return isReadyForPullEnd(); case BOTH: return isReadyForPullEnd() || isReadyForPullStart(); default: return false; } } /** * Actions a Pull Event * * @return true if the Event has been handled, false if there has been no * change */ private void pullEvent() { final int newScrollValue; final int itemDimension; final float initialMotionValue, lastMotionValue; switch (getPullToRefreshScrollDirection()) { case HORIZONTAL: initialMotionValue = mInitialMotionX; lastMotionValue = mLastMotionX; break; case VERTICAL: default: initialMotionValue = mInitialMotionY; lastMotionValue = mLastMotionY; break; } switch (mCurrentMode) { case PULL_FROM_END: newScrollValue = Math.round(Math.max(initialMotionValue - lastMotionValue, 0) / FRICTION); itemDimension = getFooterSize(); break; case PULL_FROM_START: default: newScrollValue = Math.round(Math.min(initialMotionValue - lastMotionValue, 0) / FRICTION); itemDimension = getHeaderSize(); break; } setHeaderScroll(newScrollValue); if (newScrollValue != 0 && !isRefreshing()) { float scale = Math.abs(newScrollValue) / (float) itemDimension; switch (mCurrentMode) { case PULL_FROM_END: mFooterLayout.onPull(scale); break; case PULL_FROM_START: default: mHeaderLayout.onPull(scale); break; } if (mState != State.PULL_TO_REFRESH && itemDimension >= Math.abs(newScrollValue)) { setState(State.PULL_TO_REFRESH); } else if (mState == State.PULL_TO_REFRESH && itemDimension < Math.abs(newScrollValue)) { setState(State.RELEASE_TO_REFRESH); } } } private LayoutParams getLoadingLayoutLayoutParams() { switch (getPullToRefreshScrollDirection()) { case HORIZONTAL: return new LayoutParams(LayoutParams.WRAP_CONTENT, LayoutParams.MATCH_PARENT); case VERTICAL: default: return new LayoutParams(LayoutParams.MATCH_PARENT, LayoutParams.WRAP_CONTENT); } } private int getMaximumPullScroll() { switch (getPullToRefreshScrollDirection()) { case HORIZONTAL: return Math.round(getWidth() / FRICTION); case VERTICAL: default: return Math.round(getHeight() / FRICTION); } } /** * Smooth Scroll to position using the specific duration * * @param scrollValue - Position to scroll to * @param duration - Duration of animation in milliseconds */ private final void smoothScrollTo(int scrollValue, long duration) { smoothScrollTo(scrollValue, duration, 0, null); } private final void smoothScrollTo(int newScrollValue, long duration, long delayMillis, OnSmoothScrollFinishedListener listener) { if (null != mCurrentSmoothScrollRunnable) { mCurrentSmoothScrollRunnable.stop(); } final int oldScrollValue; switch (getPullToRefreshScrollDirection()) { case HORIZONTAL: oldScrollValue = getScrollX(); break; case VERTICAL: default: oldScrollValue = getScrollY(); break; } if (oldScrollValue != newScrollValue) { if (null == mScrollAnimationInterpolator) { // Default interpolator is a Decelerate Interpolator mScrollAnimationInterpolator = new DecelerateInterpolator(); } mCurrentSmoothScrollRunnable = new SmoothScrollRunnable(oldScrollValue, newScrollValue, duration, listener); if (delayMillis > 0) { postDelayed(mCurrentSmoothScrollRunnable, delayMillis); } else { post(mCurrentSmoothScrollRunnable); } } } private final void smoothScrollToAndBack(int y) { smoothScrollTo(y, SMOOTH_SCROLL_DURATION_MS, 0, new OnSmoothScrollFinishedListener() { @Override public void onSmoothScrollFinished() { smoothScrollTo(0, SMOOTH_SCROLL_DURATION_MS, DEMO_SCROLL_INTERVAL, null); } }); } public static enum AnimationStyle { /** * This is the default for Android-PullToRefresh. Allows you to use any * drawable, which is automatically rotated and used as a Progress Bar. */ ROTATE, /** * This is the old default, and what is commonly used on iOS. Uses an * arrow image which flips depending on where the user has scrolled. */ FLIP; static AnimationStyle getDefault() { return ROTATE; } /** * Maps an int to a specific mode. This is needed when saving state, or * inflating the view from XML where the mode is given through a attr * int. * * @param modeInt - int to map a Mode to * @return Mode that modeInt maps to, or ROTATE by default. */ static AnimationStyle mapIntToValue(int modeInt) { switch (modeInt) { case 0x0: default: return ROTATE; case 0x1: return FLIP; } } LoadingLayout createLoadingLayout(Context context, Mode mode, Orientation scrollDirection, TypedArray attrs) { switch (this) { case ROTATE: default: return new RotateLoadingLayout(context, mode, scrollDirection, attrs); case FLIP: return new FlipLoadingLayout(context, mode, scrollDirection, attrs); } } } public static enum Mode { /** * Disable all Pull-to-Refresh gesture and Refreshing handling */ DISABLED(0x0), /** * Only allow the user to Pull from the start of the Refreshable View to * refresh. The start is either the Top or Left, depending on the * scrolling direction. */ PULL_FROM_START(0x1), /** * Only allow the user to Pull from the end of the Refreshable View to * refresh. The start is either the Bottom or Right, depending on the * scrolling direction. */ PULL_FROM_END(0x2), /** * Allow the user to both Pull from the start, from the end to refresh. */ BOTH(0x3), /** * Disables Pull-to-Refresh gesture handling, but allows manually * setting the Refresh state via */ MANUAL_REFRESH_ONLY(0x4); /** * @deprecated Use {@link #PULL_FROM_START} from now on. */ public static Mode PULL_DOWN_TO_REFRESH = Mode.PULL_FROM_START; /** * @deprecated Use {@link #PULL_FROM_END} from now on. */ public static Mode PULL_UP_TO_REFRESH = Mode.PULL_FROM_END; /** * Maps an int to a specific mode. This is needed when saving state, or * inflating the view from XML where the mode is given through a attr * int. * * @param modeInt - int to map a Mode to * @return Mode that modeInt maps to, or PULL_FROM_START by default. */ static Mode mapIntToValue(final int modeInt) { for (Mode value : Mode.values()) { if (modeInt == value.getIntValue()) { return value; } } // If not, return default return getDefault(); } static Mode getDefault() { return PULL_FROM_START; } private int mIntValue; // The modeInt values need to match those from attrs.xml Mode(int modeInt) { mIntValue = modeInt; } /** * @return true if the mode permits Pull-to-Refresh */ boolean permitsPullToRefresh() { return !(this == DISABLED || this == MANUAL_REFRESH_ONLY); } /** * @return true if this mode wants the Loading Layout Header to be shown */ public boolean showHeaderLoadingLayout() { return this == PULL_FROM_START || this == BOTH; } /** * @return true if this mode wants the Loading Layout Footer to be shown */ public boolean showFooterLoadingLayout() { return this == PULL_FROM_END || this == BOTH || this == MANUAL_REFRESH_ONLY; } int getIntValue() { return mIntValue; } } // =========================================================== // Inner, Anonymous Classes, and Enumerations // =========================================================== /** * Simple Listener that allows you to be notified when the user has scrolled * to the end of the AdapterView. See ( * {@link PullToRefreshAdapterViewBase#setOnLastItemVisibleListener}. * * @author Chris Banes */ public static interface OnLastItemVisibleListener { /** * Called when the user has scrolled to the end of the list */ public void onLastItemVisible(); } /** * Listener that allows you to be notified when the user has started or * finished a touch event. Useful when you want to append extra UI events * (such as sounds). See ( * {@link PullToRefreshAdapterViewBase#setOnPullEventListener}. * * @author Chris Banes */ public static interface OnPullEventListener<V extends View> { /** * Called when the internal state has been changed, usually by the user * pulling. * * @param refreshView - View which has had it's state change. * @param state - The new state of View. */ public void onPullEvent(final PullToRefreshBase<V> refreshView, State state, Mode direction); } /** * Simple Listener to listen for any callbacks to Refresh. * * @author Chris Banes */ public static interface OnRefreshListener<V extends View> { /** * onRefresh will be called for both a Pull from start, and Pull from * end */ public void onRefresh(final PullToRefreshBase<V> refreshView); } /** * An advanced version of the Listener to listen for callbacks to Refresh. * This listener is different as it allows you to differentiate between Pull * Ups, and Pull Downs. * * @author Chris Banes */ public static interface OnRefreshListener2<V extends View> { // TODO These methods need renaming to START/END rather than DOWN/UP /** * onPullDownToRefresh will be called only when the user has Pulled from * the start, and released. */ public void onPullDownToRefresh(final PullToRefreshBase<V> refreshView); /** * onPullUpToRefresh will be called only when the user has Pulled from * the end, and released. */ public void onPullUpToRefresh(final PullToRefreshBase<V> refreshView); } public static enum Orientation { VERTICAL, HORIZONTAL; } public static enum State { /** * When the UI is in a state which means that user is not interacting * with the Pull-to-Refresh function. */ RESET(0x0), /** * When the UI is being pulled by the user, but has not been pulled far * enough so that it refreshes when released. */ PULL_TO_REFRESH(0x1), /** * When the UI is being pulled by the user, and <strong>has</strong> * been pulled far enough so that it will refresh when released. */ RELEASE_TO_REFRESH(0x2), /** * When the UI is currently refreshing, caused by a pull gesture. */ REFRESHING(0x8), /** * When the UI is currently refreshing, caused by a call to */ MANUAL_REFRESHING(0x9), /** * When the UI is currently overscrolling, caused by a fling on the * Refreshable View. */ OVERSCROLLING(0x10); /** * Maps an int to a specific state. This is needed when saving state. * * @param stateInt - int to map a State to * @return State that stateInt maps to */ static State mapIntToValue(final int stateInt) { for (State value : State.values()) { if (stateInt == value.getIntValue()) { return value; } } // If not, return default return RESET; } private int mIntValue; State(int intValue) { mIntValue = intValue; } int getIntValue() { return mIntValue; } } final class SmoothScrollRunnable implements Runnable { private final Interpolator mInterpolator; private final int mScrollToY; private final int mScrollFromY; private final long mDuration; private OnSmoothScrollFinishedListener mListener; private boolean mContinueRunning = true; private long mStartTime = -1; private int mCurrentY = -1; public SmoothScrollRunnable(int fromY, int toY, long duration, OnSmoothScrollFinishedListener listener) { mScrollFromY = fromY; mScrollToY = toY; mInterpolator = mScrollAnimationInterpolator; mDuration = duration; mListener = listener; } @Override public void run() { /** * Only set mStartTime if this is the first time we're starting, * else actually calculate the Y delta */ if (mStartTime == -1) { mStartTime = System.currentTimeMillis(); } else { /** * We do do all calculations in long to reduce software float * calculations. We use 1000 as it gives us good accuracy and * small rounding errors */ long normalizedTime = (1000 * (System.currentTimeMillis() - mStartTime)) / mDuration; normalizedTime = Math.max(Math.min(normalizedTime, 1000), 0); final int deltaY = Math.round((mScrollFromY - mScrollToY) * mInterpolator.getInterpolation(normalizedTime / 1000f)); mCurrentY = mScrollFromY - deltaY; setHeaderScroll(mCurrentY); } // If we're not at the target Y, keep going... if (mContinueRunning && mScrollToY != mCurrentY) { ViewCompat.postOnAnimation(PullToRefreshBase.this, this); } else { if (null != mListener) { mListener.onSmoothScrollFinished(); } } } public void stop() { mContinueRunning = false; removeCallbacks(this); } } static interface OnSmoothScrollFinishedListener { void onSmoothScrollFinished(); } }
/* * Copyright 2016 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.tools.eclipse.login.ui; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import static org.mockito.Mockito.when; import com.google.api.client.auth.oauth2.Credential; import com.google.cloud.tools.eclipse.login.IGoogleLoginService; import com.google.cloud.tools.eclipse.test.util.ui.ShellTestResource; import com.google.cloud.tools.login.Account; import java.util.Arrays; import java.util.Collections; import java.util.HashSet; import org.eclipse.swt.widgets.Shell; import org.eclipse.swtbot.swt.finder.widgets.SWTBotCombo; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.Mock; import org.mockito.runners.MockitoJUnitRunner; @RunWith(MockitoJUnitRunner.class) public class AccountSelectorTest { @Rule public ShellTestResource shellTestResource = new ShellTestResource(); private Shell shell; @Mock private IGoogleLoginService loginService; @Mock private Account account1; @Mock private Account account2; @Mock private Account account3; @Mock private Credential credential1; @Mock private Credential credential2; @Mock private Credential credential3; @Before public void setUp() { shell = shellTestResource.getShell(); when(account1.getEmail()).thenReturn("some-email-1@example.com"); when(account1.getOAuth2Credential()).thenReturn(credential1); when(account2.getEmail()).thenReturn("some-email-2@example.com"); when(account2.getOAuth2Credential()).thenReturn(credential2); when(account3.getEmail()).thenReturn("some-email-3@example.com"); when(account3.getOAuth2Credential()).thenReturn(credential3); } @Test public void testComboSetup_noAccount() { when(loginService.getAccounts()).thenReturn(new HashSet<Account>()); AccountSelector selector = new AccountSelector(shell, loginService); assertEquals(-1, selector.combo.getSelectionIndex()); assertNull(selector.getSelectedCredential()); assertTrue(selector.getSelectedEmail().isEmpty()); assertEquals(1, selector.combo.getItemCount()); assertEquals("<Sign into another account...>", selector.combo.getItem(0)); } @Test public void testComboSetup_oneAccount() { when(loginService.getAccounts()).thenReturn(Collections.singleton(account1)); AccountSelector selector = new AccountSelector(shell, loginService); assertEquals(-1, selector.combo.getSelectionIndex()); assertNull(selector.getSelectedCredential()); assertTrue(selector.getSelectedEmail().isEmpty()); assertEquals(2, selector.combo.getItemCount()); assertEquals("some-email-1@example.com", selector.combo.getItem(0)); assertEquals("<Sign into another account...>", selector.combo.getItem(1)); } @Test public void testComboSetup_threeAccounts() { when(loginService.getAccounts()) .thenReturn(new HashSet<>(Arrays.asList(account1, account2, account3))); AccountSelector selector = new AccountSelector(shell, loginService); assertEquals(-1, selector.combo.getSelectionIndex()); assertNull(selector.getSelectedCredential()); assertTrue(selector.getSelectedEmail().isEmpty()); assertEquals(4, selector.combo.getItemCount()); String allEmails = selector.combo.getItem(0) + selector.combo.getItem(1) + selector.combo.getItem(2); assertTrue(allEmails.contains("some-email-1@example.com")); assertTrue(allEmails.contains("some-email-2@example.com")); assertTrue(allEmails.contains("some-email-3@example.com")); assertEquals("<Sign into another account...>", selector.combo.getItem(3)); } @Test public void testIsEmailAvailable_noAccount() { when(loginService.getAccounts()).thenReturn(new HashSet<Account>()); AccountSelector selector = new AccountSelector(shell, loginService); assertFalse(selector.isEmailAvailable(null)); assertFalse(selector.isEmailAvailable("")); } @Test public void testIsEmailAvailable_threeAccounts() { when(loginService.getAccounts()) .thenReturn(new HashSet<>(Arrays.asList(account1, account2, account3))); AccountSelector selector = new AccountSelector(shell, loginService); assertFalse(selector.isEmailAvailable(null)); assertFalse(selector.isEmailAvailable("")); assertTrue(selector.isEmailAvailable("some-email-1@example.com")); assertTrue(selector.isEmailAvailable("some-email-2@example.com")); assertTrue(selector.isEmailAvailable("some-email-3@example.com")); } @Test public void testGetSelectedCredential() { when(loginService.getAccounts()) .thenReturn(new HashSet<>(Arrays.asList(account1, account2, account3))); AccountSelector selector = new AccountSelector(shell, loginService); assertEquals(-1, selector.combo.getSelectionIndex()); assertNull(selector.getSelectedCredential()); int index = selector.combo.indexOf("some-email-2@example.com"); simulateSelect(selector, index); assertEquals(index, selector.combo.getSelectionIndex()); assertEquals("some-email-2@example.com", selector.getSelectedEmail()); assertEquals("some-email-2@example.com", selector.combo.getItem(index)); assertEquals(credential2, selector.getSelectedCredential()); } @Test public void testSelectAccount_nonExistingEmailClearsSelection() { when(loginService.getAccounts()).thenReturn(new HashSet<>(Arrays.asList(account1, account2))); AccountSelector selector = new AccountSelector(shell, loginService); selector.selectAccount("some-email-2@example.com"); assertEquals("some-email-2@example.com", selector.getSelectedEmail()); selector.selectAccount("non-existing-email@example.com"); assertEquals(-1, selector.combo.getSelectionIndex()); assertTrue(selector.getSelectedEmail().isEmpty()); } @Test public void testSelectAccount() { when(loginService.getAccounts()) .thenReturn(new HashSet<>(Arrays.asList(account1, account2, account3))); AccountSelector selector = new AccountSelector(shell, loginService); assertEquals(-1, selector.combo.getSelectionIndex()); assertTrue(selector.getSelectedEmail().isEmpty()); assertNull(selector.getSelectedCredential()); selector.selectAccount("some-email-2@example.com"); assertNotEquals(-1, selector.combo.getSelectionIndex()); assertEquals("some-email-2@example.com", selector.getSelectedEmail()); assertEquals(credential2, selector.getSelectedCredential()); } @Test public void testSelectAccount_nonExistingEmail() { when(loginService.getAccounts()).thenReturn(new HashSet<>(Arrays.asList(account1, account2))); AccountSelector selector = new AccountSelector(shell, loginService); assertEquals(-1, selector.combo.getSelectionIndex()); assertTrue(selector.getSelectedEmail().isEmpty()); selector.selectAccount("non-existing-email@example.com"); assertEquals(-1, selector.combo.getSelectionIndex()); assertTrue(selector.getSelectedEmail().isEmpty()); } @Test public void testSelectAccount_nullOrEmptyEmail() { when(loginService.getAccounts()).thenReturn(new HashSet<>(Arrays.asList(account1, account2))); AccountSelector selector = new AccountSelector(shell, loginService); assertEquals(-1, selector.combo.getSelectionIndex()); assertTrue(selector.getSelectedEmail().isEmpty()); selector.selectAccount(null); assertEquals(-1, selector.combo.getSelectionIndex()); assertTrue(selector.getSelectedEmail().isEmpty()); selector.selectAccount(""); assertEquals(-1, selector.combo.getSelectionIndex()); assertTrue(selector.getSelectedEmail().isEmpty()); } @Test public void testLogin_itemAddedAtTopAndSelected() { when(loginService.getAccounts()).thenReturn(new HashSet<>(Arrays.asList(account1, account2))); when(loginService.logIn()).thenReturn(account3); AccountSelector selector = new AccountSelector(shell, loginService); assertEquals(3, selector.combo.getItemCount()); assertEquals("<Sign into another account...>", selector.combo.getItem(2)); simulateSelect(selector, 2); assertEquals(4, selector.combo.getItemCount()); assertEquals("some-email-3@example.com", selector.getSelectedEmail()); assertEquals("some-email-3@example.com", selector.combo.getItem(0)); assertEquals(0, selector.combo.getSelectionIndex()); assertEquals(credential3, selector.getSelectedCredential()); assertEquals("<Sign into another account...>", selector.combo.getItem(3)); } @Test public void testLogin_existingEmail() { when(loginService.getAccounts()) .thenReturn(new HashSet<>(Arrays.asList(account1, account2, account3))); when(loginService.logIn()).thenReturn(account1); AccountSelector selector = new AccountSelector(shell, loginService); assertEquals(4, selector.combo.getItemCount()); assertEquals(-1, selector.combo.getSelectionIndex()); assertEquals("<Sign into another account...>", selector.combo.getItem(3)); simulateSelect(selector, 3); assertEquals(4, selector.combo.getItemCount()); assertNotEquals(-1, selector.combo.getSelectionIndex()); assertEquals("some-email-1@example.com", selector.getSelectedEmail()); assertEquals("some-email-1@example.com", selector.combo.getText()); assertEquals(credential1, selector.getSelectedCredential()); } @Test public void testFailedLogin_reselectLoginLinkItem() { when(loginService.getAccounts()).thenReturn(new HashSet<>(Arrays.asList(account1, account2))); when(loginService.logIn()).thenReturn(null); AccountSelector selector = new AccountSelector(shell, loginService); assertEquals(3, selector.combo.getItemCount()); simulateSelect(selector, 1); assertEquals(1, selector.combo.getSelectionIndex()); assertNotNull(selector.getSelectedCredential()); assertFalse(selector.getSelectedEmail().isEmpty()); assertEquals("<Sign into another account...>", selector.combo.getItem(2)); simulateSelect(selector, 2); assertEquals(3, selector.combo.getItemCount()); assertEquals(1, selector.combo.getSelectionIndex()); assertEquals(credential2, selector.getSelectedCredential()); assertEquals("some-email-2@example.com", selector.getSelectedEmail()); assertEquals("some-email-2@example.com", selector.combo.getText()); } @Test public void testIsSignedIn_notSignedIn() { AccountSelector selector = new AccountSelector(shell, loginService); assertFalse(selector.isSignedIn()); } @Test public void testIsSignedIn_signedIn() { when(loginService.getAccounts()).thenReturn(Collections.singleton(account1)); AccountSelector selector = new AccountSelector(shell, loginService); assertTrue(selector.isSignedIn()); } @Test public void testGetAccountCount() { AccountSelector selector = new AccountSelector(shell, loginService); assertEquals(0, selector.getAccountCount()); } @Test public void testGetAccountCount_oneAccount() { when(loginService.getAccounts()).thenReturn(Collections.singleton(account1)); AccountSelector selector = new AccountSelector(shell, loginService); assertEquals(1, selector.getAccountCount()); } @Test public void testGetAccountCount_threeAccounts() { when(loginService.getAccounts()).thenReturn( new HashSet<>(Arrays.asList(account1, account2, account3))); AccountSelector selector = new AccountSelector(shell, loginService); assertEquals(3, selector.getAccountCount()); } @Test public void testInitialItemOrder() { when(loginService.getAccounts()) .thenReturn(new HashSet<>(Arrays.asList(account3, account2, account1))); AccountSelector selector = new AccountSelector(shell, loginService); assertEquals(4, selector.combo.getItemCount()); assertEquals(account1.getEmail(), selector.combo.getItem(0)); assertEquals(account2.getEmail(), selector.combo.getItem(1)); assertEquals(account3.getEmail(), selector.combo.getItem(2)); } @Test(expected = IllegalStateException.class) public void testGetFirstEmail_noAccount() { AccountSelector selector = new AccountSelector(shell, loginService); selector.getFirstEmail(); } @Test public void testGetFirstEmail_oneAccount() { when(loginService.getAccounts()).thenReturn(Collections.singleton(account2)); AccountSelector selector = new AccountSelector(shell, loginService); assertEquals(account2.getEmail(), selector.getFirstEmail()); } @Test public void testGetFirstEmail_threeAccounts() { when(loginService.getAccounts()) .thenReturn(new HashSet<>(Arrays.asList(account3, account2, account1))); AccountSelector selector = new AccountSelector(shell, loginService); assertEquals(account1.getEmail(), selector.getFirstEmail()); // Accounts are sorted. } private void simulateSelect(AccountSelector selector, int index) { new SWTBotCombo(selector.combo).setSelection(index); } }
/** * Copyright MaDgIK Group 2010 - 2013. */ package madgik.exareme.master.connector; import com.google.gson.Gson; import madgik.exareme.common.art.entity.EntityName; import madgik.exareme.common.schema.Partition; import madgik.exareme.common.schema.PhysicalTable; import madgik.exareme.master.engine.parser.SemanticException; import madgik.exareme.master.engine.util.SchemaUtil; import madgik.exareme.master.registry.Registry; import madgik.exareme.utils.embedded.db.DBUtils; import madgik.exareme.utils.embedded.db.SQLDatabase; import madgik.exareme.utils.properties.AdpDBProperties; import madgik.exareme.utils.stream.StreamUtil; import madgik.exareme.worker.art.container.ContainerProxy; import madgik.exareme.worker.art.container.buffer.SocketBuffer; import madgik.exareme.worker.art.container.buffer.tcp.TcpSocketBuffer; import madgik.exareme.worker.art.container.netMgr.NetSession; import madgik.exareme.worker.art.container.netMgr.session.NetSessionSimple; import madgik.exareme.worker.art.executionEngine.ExecutionEngineLocator; import madgik.exareme.worker.art.executionEngine.ExecutionEngineProxy; import madgik.exareme.worker.art.executionEngine.session.ExecutionEngineSession; import madgik.exareme.worker.art.executionEngine.session.ExecutionEngineSessionPlan; import madgik.exareme.worker.art.executionEngine.statusMgr.PlanSessionStatusManagerProxy; import madgik.exareme.worker.art.executionPlan.ExecutionPlan; import madgik.exareme.worker.art.executionPlan.ExecutionPlanParser; import madgik.exareme.worker.art.registry.ArtRegistryLocator; import org.apache.avro.Schema; import org.apache.http.entity.ContentType; import org.apache.log4j.Logger; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.nio.charset.Charset; import java.rmi.RemoteException; import java.rmi.ServerException; import java.sql.ResultSet; import java.util.*; /** * @author heraldkllapi */ public class AdpDBConnectorUtil { private static Logger log = Logger.getLogger(AdpDBConnectorUtil.class); public static void readRemoteTablePart(Registry registry, PhysicalTable table, Partition p, Map<String, Object> includeProps, OutputStream out) throws RemoteException { log.info("Remote Table Part: " + p.getTable() + "." + p.getpNum() + " ..."); ExecutionEngineProxy engine = ExecutionEngineLocator.getExecutionEngineProxy(); ContainerProxy[] containerProxies = ArtRegistryLocator.getArtRegistryProxy().getContainers(); BitSet filter = new BitSet(containerProxies.length); SchemaUtil.getLocationsOfPartitions(containerProxies, table, p.getpNum(), filter); int[] locations = new int[filter.cardinality()]; if (locations.length == 0) { throw new SemanticException("Partition not found: " + table.getName() + "/" + p); } int l = 0; for (int i = filter.nextSetBit(0); i >= 0; i = filter.nextSetBit(i + 1)) { locations[l++] = i; } log.debug( "Table '" + table.getName() + "/" + p + "' located at " + Arrays.toString(locations)); SocketBuffer socketBuffer = new TcpSocketBuffer(); EntityName name = socketBuffer.getNetEntityName(); ContainerProxy proxy = containerProxies[locations[0]]; boolean sendHeader = p.getpNum() == 0; boolean hasdataSerialization = (includeProps != null) && includeProps.containsKey("dataSerialization"); DataSerialization ds = !hasdataSerialization ? DataSerialization.ldjson : (DataSerialization) includeProps.get("dataSerialization"); // String artPlan = "container c('" + proxy.getEntityName().getName() + "', 1000); \n" // + "operator op c('AdpDBNetReaderOperator', " + "database='" + registry.getDatabase() // + "', " + "table='" + table.getName() + "', " + "part='" + p.getpNum() + "', " // + "sendHeader='" + sendHeader + "', " + "ip='" + name.getIP() + "', " + "port='" + name // .getPort() + "');"; String artPlan = "{\n" + " \"containers\": [\n" + " {\n" + " \"name\": \"c\",\n" + " \"IP\":" + "\"" + proxy.getEntityName().getName() + "\"" + ",\n" + " \"port\": \"1000\",\n" + " \"data_transfer_port\": \"1000\"\n" + " }],\n" + " \"operators\": [\n" + " {\n" + " \"name\": \"op\",\n" + " \"container\": \"c\",\n" + " \"operator\": \"madgik.exareme.master.engine.executor.remote.operator.admin.AdpDBNetReaderOperator\",\n" + " \"parameters\": [\n" + " [\n" + " \"database\",\n" + " \"" + new File(registry.getDatabase()).getParent() + "\"\n" + " ],\n" + " [\n" + " \"table\",\n" + " \"" + table.getName() + "\"\n" + " ],\n" + " [\n" + " \"part\",\n" + " \"" + p.getpNum() + "\"\n" + " ],\n" + " [\n" + " \"sendHeader\",\n" + " \"" + sendHeader + "\"\n" + " ],\n" + " [\n" + " \"dataSerialization\",\n" + " \"" + ds + "\"\n" + " ],\n" + " [\n" + " \"ip\",\n" + " \"" + name.getIP() + "\"\n" + " ],\n" + " [\n" + " \"port\",\n" + " \"" + name.getPort() + "\"\n" + " ]\n" + " ]\n" + " }]\n" + "}"; log.debug("Executing... \n" + artPlan); ExecutionPlan plan = null; try { ExecutionPlanParser parser = new ExecutionPlanParser(); plan = parser.parse(artPlan.toString()); } catch (Exception e) { throw new ServerException("Cannot parse generated plan.", e); } ExecutionEngineSession session = engine.createSession(); ExecutionEngineSessionPlan sessionPlan = session.startSession(); sessionPlan.submitPlan(plan); PlanSessionStatusManagerProxy sessionManager = sessionPlan.getPlanSessionStatusManagerProxy(); int waifForMs = 1000 * AdpDBProperties.getAdpDBProps().getInt("db.client.statisticsUpdate_sec"); try { NetSession net = new NetSessionSimple(); InputStream inputStream = net.openInputStream(socketBuffer); StreamUtil.copyStreams(inputStream, out); inputStream.close(); socketBuffer.close(); } catch (IOException e) { throw new RemoteException("Cannot read table", e); } while (sessionManager.hasFinished() == false && sessionManager.hasError() == false) { try { Thread.sleep(waifForMs); } catch (Exception e) { } } boolean error = sessionManager.hasError(); sessionPlan.close(); session.close(); if (error) { throw new RemoteException("Conncector error."); } } public static void readLocalTablePart(String tabName, int part, String database, Map<String, Object> alsoIncludeProps, OutputStream out) throws RemoteException { readLocalTablePart(tabName, part, database, alsoIncludeProps, DataSerialization.ldjson, out); } public static void readLocalTablePart(String tabName, int part, String database, Map<String, Object> alsoIncludeProps, DataSerialization ds, OutputStream out) throws RemoteException { try { log.info("Local Table Part: " + tabName + "." + part + " ..." + alsoIncludeProps == null); Gson g = new Gson(); SQLDatabase db = DBUtils.createEmbeddedSqliteDB(database + "/" + tabName + "." + part + ".db"); ResultSet rs = db.executeAndGetResults("select * from " + tabName + ";"); int cols = rs.getMetaData().getColumnCount(); if (alsoIncludeProps != null) { if ( ds.equals(DataSerialization.ldjson)) { Map<String, Object> schema = new HashMap<String, Object>(); schema.putAll(alsoIncludeProps); ArrayList<String[]> names = new ArrayList<String[]>(); schema.put("schema", names); for (int c = 0; c < cols; ++c) { names.add(new String[]{rs.getMetaData().getColumnName(c + 1), rs.getMetaData().getColumnTypeName(c + 1)}); } out.write((g.toJson(schema) + "\n").getBytes()); } } if ( ds.equals(DataSerialization.ldjson)) { ArrayList<Object> row = new ArrayList<Object>(); while (rs.next()) { for (int c = 0; c < cols; ++c) { row.add(rs.getObject(c + 1)); } out.write((g.toJson(row) + "\n").getBytes()); row.clear(); } rs.close(); db.close(); } else if (ds.equals(DataSerialization.avro)) { Map<String, Object> row = new HashMap<String, Object>(); while (rs.next()) { for (int c = 0; c < cols; ++c) { row.put(rs.getMetaData().getColumnName(c + 1), rs.getObject(c + 1)); } out.write((g.toJson(row) + "\n").getBytes()); row.clear(); } rs.close(); db.close(); } else if(ds.equals(DataSerialization.summary)) { // only 1 row, 1 col String json = (String) rs.getObject(1); rs.close(); db.close(); out.write(g.toJson(g.fromJson(json, Map.class)).getBytes()); } else throw new RemoteException("Unable to use " + ds + "serialization."); } catch (Exception e) { throw new RemoteException("Cannot get results", e); } } private static Schema.Type convertToAvroType(String type){ type = type.trim().toUpperCase(); if ("TEXT".equals(type)) return Schema.Type.STRING; else if ("INTEGER".equals(type)) return Schema.Type.INT; else if ("REAL".equals(type)) return Schema.Type.DOUBLE; else if ("FLOAT".equals(type)) return Schema.Type.DOUBLE; return Schema.Type.valueOf(type); } }
/* * Copyright 2016 Red Hat, Inc. and/or its affiliates * and other contributors as indicated by the @author tags. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.keycloak.models.jpa.session; import org.jboss.logging.Logger; import org.keycloak.common.util.Time; import org.keycloak.models.AuthenticatedClientSessionModel; import org.keycloak.models.ClientModel; import org.keycloak.models.KeycloakSession; import org.keycloak.models.RealmModel; import org.keycloak.models.UserModel; import org.keycloak.models.UserSessionModel; import org.keycloak.models.session.PersistentAuthenticatedClientSessionAdapter; import org.keycloak.models.session.PersistentClientSessionModel; import org.keycloak.models.session.PersistentUserSessionAdapter; import org.keycloak.models.session.PersistentUserSessionModel; import org.keycloak.models.session.UserSessionPersisterProvider; import org.keycloak.models.utils.SessionTimeoutHelper; import org.keycloak.storage.StorageId; import javax.persistence.EntityManager; import javax.persistence.Query; import javax.persistence.TypedQuery; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Set; import java.util.function.Function; import java.util.stream.Collectors; import java.util.stream.Stream; import javax.persistence.LockModeType; import static org.keycloak.models.jpa.PaginationUtils.paginateQuery; import static org.keycloak.utils.StreamsUtil.closing; /** * @author <a href="mailto:mposolda@redhat.com">Marek Posolda</a> */ public class JpaUserSessionPersisterProvider implements UserSessionPersisterProvider { private static final Logger logger = Logger.getLogger(JpaUserSessionPersisterProvider.class); private final KeycloakSession session; private final EntityManager em; public JpaUserSessionPersisterProvider(KeycloakSession session, EntityManager em) { this.session = session; this.em = em; } @Override public void createUserSession(UserSessionModel userSession, boolean offline) { PersistentUserSessionAdapter adapter = new PersistentUserSessionAdapter(userSession); PersistentUserSessionModel model = adapter.getUpdatedModel(); PersistentUserSessionEntity entity = new PersistentUserSessionEntity(); entity.setUserSessionId(model.getUserSessionId()); entity.setCreatedOn(model.getStarted()); entity.setRealmId(adapter.getRealm().getId()); entity.setUserId(adapter.getUser().getId()); String offlineStr = offlineToString(offline); entity.setOffline(offlineStr); entity.setLastSessionRefresh(model.getLastSessionRefresh()); entity.setData(model.getData()); em.persist(entity); em.flush(); } @Override public void createClientSession(AuthenticatedClientSessionModel clientSession, boolean offline) { PersistentAuthenticatedClientSessionAdapter adapter = new PersistentAuthenticatedClientSessionAdapter(session, clientSession); PersistentClientSessionModel model = adapter.getUpdatedModel(); PersistentClientSessionEntity entity = new PersistentClientSessionEntity(); StorageId clientStorageId = new StorageId(clientSession.getClient().getId()); if (clientStorageId.isLocal()) { entity.setClientId(clientStorageId.getId()); entity.setClientStorageProvider(PersistentClientSessionEntity.LOCAL); entity.setExternalClientId(PersistentClientSessionEntity.LOCAL); } else { entity.setClientId(PersistentClientSessionEntity.EXTERNAL); entity.setClientStorageProvider(clientStorageId.getProviderId()); entity.setExternalClientId(clientStorageId.getExternalId()); } entity.setTimestamp(clientSession.getTimestamp()); String offlineStr = offlineToString(offline); entity.setOffline(offlineStr); entity.setUserSessionId(clientSession.getUserSession().getId()); entity.setData(model.getData()); em.persist(entity); em.flush(); } @Override public void removeUserSession(String userSessionId, boolean offline) { String offlineStr = offlineToString(offline); em.createNamedQuery("deleteClientSessionsByUserSession") .setParameter("userSessionId", userSessionId) .setParameter("offline", offlineStr) .executeUpdate(); PersistentUserSessionEntity sessionEntity = em.find(PersistentUserSessionEntity.class, new PersistentUserSessionEntity.Key(userSessionId, offlineStr), LockModeType.PESSIMISTIC_WRITE); if (sessionEntity != null) { em.remove(sessionEntity); em.flush(); } } @Override public void removeClientSession(String userSessionId, String clientUUID, boolean offline) { String offlineStr = offlineToString(offline); StorageId clientStorageId = new StorageId(clientUUID); String clientId = PersistentClientSessionEntity.EXTERNAL; String clientStorageProvider = PersistentClientSessionEntity.LOCAL; String externalId = PersistentClientSessionEntity.LOCAL; if (clientStorageId.isLocal()) { clientId = clientUUID; } else { clientStorageProvider = clientStorageId.getProviderId(); externalId = clientStorageId.getExternalId(); } PersistentClientSessionEntity sessionEntity = em.find(PersistentClientSessionEntity.class, new PersistentClientSessionEntity.Key(userSessionId, clientId, clientStorageProvider, externalId, offlineStr), LockModeType.PESSIMISTIC_WRITE); if (sessionEntity != null) { em.remove(sessionEntity); // Remove userSession if it was last clientSession List<PersistentClientSessionEntity> clientSessions = getClientSessionsByUserSession(sessionEntity.getUserSessionId(), offline); if (clientSessions.size() == 0) { offlineStr = offlineToString(offline); PersistentUserSessionEntity userSessionEntity = em.find(PersistentUserSessionEntity.class, new PersistentUserSessionEntity.Key(sessionEntity.getUserSessionId(), offlineStr), LockModeType.PESSIMISTIC_WRITE); if (userSessionEntity != null) { em.remove(userSessionEntity); } } em.flush(); } } private List<PersistentClientSessionEntity> getClientSessionsByUserSession(String userSessionId, boolean offline) { String offlineStr = offlineToString(offline); TypedQuery<PersistentClientSessionEntity> query = em.createNamedQuery("findClientSessionsByUserSession", PersistentClientSessionEntity.class); query.setParameter("userSessionId", userSessionId); query.setParameter("offline", offlineStr); return query.getResultList(); } @Override public void onRealmRemoved(RealmModel realm) { int deletedClientSessions = em.createNamedQuery("deleteClientSessionsByRealm") .setParameter("realmId", realm.getId()) .executeUpdate(); int deletedUserSessions = em.createNamedQuery("deleteUserSessionsByRealm") .setParameter("realmId", realm.getId()) .executeUpdate(); } @Override public void onClientRemoved(RealmModel realm, ClientModel client) { onClientRemoved(client.getId()); } private void onClientRemoved(String clientUUID) { int num = 0; StorageId clientStorageId = new StorageId(clientUUID); if (clientStorageId.isLocal()) { num = em.createNamedQuery("deleteClientSessionsByClient").setParameter("clientId", clientUUID).executeUpdate(); } else { num = em.createNamedQuery("deleteClientSessionsByExternalClient") .setParameter("clientStorageProvider", clientStorageId.getProviderId()) .setParameter("externalClientId", clientStorageId.getExternalId()) .executeUpdate(); } } @Override public void onUserRemoved(RealmModel realm, UserModel user) { onUserRemoved(realm, user.getId()); } private void onUserRemoved(RealmModel realm, String userId) { int num = em.createNamedQuery("deleteClientSessionsByUser").setParameter("userId", userId).executeUpdate(); num = em.createNamedQuery("deleteUserSessionsByUser").setParameter("userId", userId).executeUpdate(); } @Override public void updateLastSessionRefreshes(RealmModel realm, int lastSessionRefresh, Collection<String> userSessionIds, boolean offline) { String offlineStr = offlineToString(offline); int us = em.createNamedQuery("updateUserSessionLastSessionRefresh") .setParameter("lastSessionRefresh", lastSessionRefresh) .setParameter("realmId", realm.getId()) .setParameter("offline", offlineStr) .setParameter("userSessionIds", userSessionIds) .executeUpdate(); logger.debugf("Updated lastSessionRefresh of %d user sessions in realm '%s'", us, realm.getName()); } @Override public void removeExpired(RealmModel realm) { int expiredOffline = Time.currentTime() - realm.getOfflineSessionIdleTimeout() - SessionTimeoutHelper.PERIODIC_CLEANER_IDLE_TIMEOUT_WINDOW_SECONDS; // prefer client session timeout if set int expiredClientOffline = expiredOffline; if (realm.getClientOfflineSessionIdleTimeout() > 0) { expiredClientOffline = Time.currentTime() - realm.getClientOfflineSessionIdleTimeout() - SessionTimeoutHelper.PERIODIC_CLEANER_IDLE_TIMEOUT_WINDOW_SECONDS; } String offlineStr = offlineToString(true); logger.tracef("Trigger removing expired user sessions for realm '%s'", realm.getName()); int cs = em.createNamedQuery("deleteExpiredClientSessions") .setParameter("realmId", realm.getId()) .setParameter("lastSessionRefresh", expiredClientOffline) .setParameter("offline", offlineStr) .executeUpdate(); int us = em.createNamedQuery("deleteExpiredUserSessions") .setParameter("realmId", realm.getId()) .setParameter("lastSessionRefresh", expiredOffline) .setParameter("offline", offlineStr) .executeUpdate(); logger.debugf("Removed %d expired user sessions and %d expired client sessions in realm '%s'", us, cs, realm.getName()); } @Override public Map<String, Long> getUserSessionsCountsByClients(RealmModel realm, boolean offline) { String offlineStr = offlineToString(offline); TypedQuery<Object[]> query = em.createNamedQuery("findClientSessionsClientIds", Object[].class); query.setParameter("offline", offlineStr); query.setParameter("realmId", realm.getId()); return closing(query.getResultStream()) .collect(Collectors.toMap(row -> { String clientId = row[0].toString(); if (clientId.equals(PersistentClientSessionEntity.EXTERNAL)) { final String externalClientId = row[1].toString(); final String clientStorageProvider = row[2].toString(); clientId = new StorageId(clientStorageProvider, externalClientId).getId(); } return clientId; }, row -> (Long) row[3])); } @Override public UserSessionModel loadUserSession(RealmModel realm, String userSessionId, boolean offline) { String offlineStr = offlineToString(offline); TypedQuery<PersistentUserSessionEntity> userSessionQuery = em.createNamedQuery("findUserSession", PersistentUserSessionEntity.class); userSessionQuery.setParameter("realmId", realm.getId()); userSessionQuery.setParameter("offline", offlineStr); userSessionQuery.setParameter("userSessionId", userSessionId); userSessionQuery.setMaxResults(1); Stream<PersistentUserSessionAdapter> persistentUserSessions = closing(userSessionQuery.getResultStream().map(this::toAdapter)); return persistentUserSessions.findAny().map(userSession -> { TypedQuery<PersistentClientSessionEntity> clientSessionQuery = em.createNamedQuery("findClientSessionsByUserSession", PersistentClientSessionEntity.class); clientSessionQuery.setParameter("userSessionId", Collections.singleton(userSessionId)); clientSessionQuery.setParameter("offline", offlineStr); Set<String> removedClientUUIDs = new HashSet<>(); closing(clientSessionQuery.getResultStream()).forEach(clientSession -> { boolean added = addClientSessionToAuthenticatedClientSessionsIfPresent(userSession, clientSession); if (!added) { // client was removed in the meantime removedClientUUIDs.add(clientSession.getClientId()); } } ); removedClientUUIDs.forEach(this::onClientRemoved); return userSession; }).orElse(null); } @Override public Stream<UserSessionModel> loadUserSessionsStream(RealmModel realm, ClientModel client, boolean offline, Integer firstResult, Integer maxResults) { String offlineStr = offlineToString(offline); TypedQuery<PersistentUserSessionEntity> query; StorageId clientStorageId = new StorageId(client.getId()); if (clientStorageId.isLocal()) { query = paginateQuery( em.createNamedQuery("findUserSessionsByClientId", PersistentUserSessionEntity.class), firstResult, maxResults); query.setParameter("clientId", client.getId()); } else { query = paginateQuery( em.createNamedQuery("findUserSessionsByExternalClientId", PersistentUserSessionEntity.class), firstResult, maxResults); query.setParameter("clientStorageProvider", clientStorageId.getProviderId()); query.setParameter("externalClientId", clientStorageId.getExternalId()); } query.setParameter("offline", offlineStr); query.setParameter("realmId", realm.getId()); return loadUserSessionsWithClientSessions(query, offlineStr); } @Override public Stream<UserSessionModel> loadUserSessionsStream(RealmModel realm, UserModel user, boolean offline, Integer firstResult, Integer maxResults) { String offlineStr = offlineToString(offline); TypedQuery<PersistentUserSessionEntity> query = paginateQuery( em.createNamedQuery("findUserSessionsByUserId", PersistentUserSessionEntity.class), firstResult, maxResults); query.setParameter("offline", offlineStr); query.setParameter("realmId", realm.getId()); query.setParameter("userId", user.getId()); return loadUserSessionsWithClientSessions(query, offlineStr); } public Stream<UserSessionModel> loadUserSessionsStream(Integer firstResult, Integer maxResults, boolean offline, String lastUserSessionId) { String offlineStr = offlineToString(offline); TypedQuery<PersistentUserSessionEntity> query = paginateQuery(em.createNamedQuery("findUserSessionsOrderedById", PersistentUserSessionEntity.class) .setParameter("offline", offlineStr) .setParameter("lastSessionId", lastUserSessionId), firstResult, maxResults); return loadUserSessionsWithClientSessions(query, offlineStr); } private Stream<UserSessionModel> loadUserSessionsWithClientSessions(TypedQuery<PersistentUserSessionEntity> query, String offlineStr) { List<PersistentUserSessionAdapter> userSessionAdapters = closing(query.getResultStream() .map(this::toAdapter) .filter(Objects::nonNull)) .collect(Collectors.toList()); Map<String, PersistentUserSessionAdapter> sessionsById = userSessionAdapters.stream() .collect(Collectors.toMap(UserSessionModel::getId, Function.identity())); Set<String> removedClientUUIDs = new HashSet<>(); if (!sessionsById.isEmpty()) { String fromUserSessionId = userSessionAdapters.get(0).getId(); String toUserSessionId = userSessionAdapters.get(userSessionAdapters.size() - 1).getId(); TypedQuery<PersistentClientSessionEntity> queryClientSessions = em.createNamedQuery("findClientSessionsOrderedById", PersistentClientSessionEntity.class); queryClientSessions.setParameter("offline", offlineStr); queryClientSessions.setParameter("fromSessionId", fromUserSessionId); queryClientSessions.setParameter("toSessionId", toUserSessionId); closing(queryClientSessions.getResultStream()).forEach(clientSession -> { PersistentUserSessionAdapter userSession = sessionsById.get(clientSession.getUserSessionId()); // check if we have a user session for the client session if (userSession != null) { boolean added = addClientSessionToAuthenticatedClientSessionsIfPresent(userSession, clientSession); if (!added) { // client was removed in the meantime removedClientUUIDs.add(clientSession.getClientId()); } } }); } for (String clientUUID : removedClientUUIDs) { onClientRemoved(clientUUID); } return userSessionAdapters.stream().map(UserSessionModel.class::cast); } private boolean addClientSessionToAuthenticatedClientSessionsIfPresent(PersistentUserSessionAdapter userSession, PersistentClientSessionEntity clientSessionEntity) { PersistentAuthenticatedClientSessionAdapter clientSessAdapter = toAdapter(userSession.getRealm(), userSession, clientSessionEntity); if (clientSessAdapter.getClient() == null) { return false; } userSession.getAuthenticatedClientSessions().put(clientSessionEntity.getClientId(), clientSessAdapter); return true; } private PersistentUserSessionAdapter toAdapter(PersistentUserSessionEntity entity) { RealmModel realm = session.realms().getRealm(entity.getRealmId()); if (realm == null) { // Realm has been deleted concurrently, ignore the entity return null; } return toAdapter(realm, entity); } private PersistentUserSessionAdapter toAdapter(RealmModel realm, PersistentUserSessionEntity entity) { PersistentUserSessionModel model = new PersistentUserSessionModel(); model.setUserSessionId(entity.getUserSessionId()); model.setStarted(entity.getCreatedOn()); model.setLastSessionRefresh(entity.getLastSessionRefresh()); model.setData(entity.getData()); model.setOffline(offlineFromString(entity.getOffline())); Map<String, AuthenticatedClientSessionModel> clientSessions = new HashMap<>(); return new PersistentUserSessionAdapter(session, model, realm, entity.getUserId(), clientSessions); } private PersistentAuthenticatedClientSessionAdapter toAdapter(RealmModel realm, PersistentUserSessionAdapter userSession, PersistentClientSessionEntity entity) { String clientId = entity.getClientId(); if (!entity.getExternalClientId().equals("local")) { clientId = new StorageId(entity.getClientStorageProvider(), entity.getExternalClientId()).getId(); } ClientModel client = realm.getClientById(clientId); PersistentClientSessionModel model = new PersistentClientSessionModel(); model.setClientId(clientId); model.setUserSessionId(userSession.getId()); model.setUserId(userSession.getUserId()); model.setTimestamp(entity.getTimestamp()); model.setData(entity.getData()); return new PersistentAuthenticatedClientSessionAdapter(session, model, realm, client, userSession); } @Override public int getUserSessionsCount(boolean offline) { String offlineStr = offlineToString(offline); Query query = em.createNamedQuery("findUserSessionsCount"); query.setParameter("offline", offlineStr); Number n = (Number) query.getSingleResult(); return n.intValue(); } @Override public int getUserSessionsCount(RealmModel realm, ClientModel clientModel, boolean offline) { String offlineStr = offlineToString(offline); Query query; StorageId clientStorageId = new StorageId(clientModel.getId()); if (clientStorageId.isLocal()) { query = em.createNamedQuery("findClientSessionsCountByClient"); query.setParameter("clientId", clientModel.getId()); } else { query = em.createNamedQuery("findClientSessionsCountByExternalClient"); query.setParameter("clientStorageProvider", clientStorageId.getProviderId()); query.setParameter("externalClientId", clientStorageId.getExternalId()); } // Note, that realm is unused here, since the clientModel id already determines the offline user-sessions bound to an owning realm. query.setParameter("offline", offlineStr); Number n = (Number) query.getSingleResult(); return n.intValue(); } @Override public void close() { // NOOP } private String offlineToString(boolean offline) { return offline ? "1" : "0"; } private boolean offlineFromString(String offlineStr) { return "1".equals(offlineStr); } }
package org.apache.lucene.index; /** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import java.io.IOException; import java.util.BitSet; import java.util.Collection; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.HashMap; import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.analysis.WhitespaceAnalyzer; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; import org.apache.lucene.index.IndexWriter.MaxFieldLength; import org.apache.lucene.store.Directory; import org.apache.lucene.store.MockRAMDirectory; /** * Test class to illustrate using IndexDeletionPolicy to provide multi-level rollback capability. * This test case creates an index of records 1 to 100, introducing a commit point every 10 records. * * A "keep all" deletion policy is used to ensure we keep all commit points for testing purposes */ public class TestTransactionRollback extends LuceneTestCase { private static final String FIELD_RECORD_ID = "record_id"; private Directory dir; //Rolls back index to a chosen ID private void rollBackLast(int id) throws Exception { // System.out.println("Attempting to rollback to "+id); String ids="-"+id; IndexCommit last=null; Collection commits = IndexReader.listCommits(dir); for (Iterator iterator = commits.iterator(); iterator.hasNext();) { IndexCommit commit = (IndexCommit) iterator.next(); Map ud=commit.getUserData(); if (ud.size() > 0) if (((String) ud.get("index")).endsWith(ids)) last=commit; } if (last==null) throw new RuntimeException("Couldn't find commit point "+id); IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(), new RollbackDeletionPolicy(id), MaxFieldLength.UNLIMITED, last); Map data = new HashMap(); data.put("index", "Rolled back to 1-"+id); w.commit(data); w.close(); } public void testRepeatedRollBacks() throws Exception { int expectedLastRecordId=100; while (expectedLastRecordId>10) { expectedLastRecordId -=10; rollBackLast(expectedLastRecordId); BitSet expecteds = new BitSet(100); expecteds.set(1,(expectedLastRecordId+1),true); checkExpecteds(expecteds); } } private void checkExpecteds(BitSet expecteds) throws Exception { IndexReader r = IndexReader.open(dir, true); //Perhaps not the most efficient approach but meets our needs here. for (int i = 0; i < r.maxDoc(); i++) { if(!r.isDeleted(i)) { String sval=r.document(i).get(FIELD_RECORD_ID); if(sval!=null) { int val=Integer.parseInt(sval); assertTrue("Did not expect document #"+val, expecteds.get(val)); expecteds.set(val,false); } } } r.close(); assertEquals("Should have 0 docs remaining ", 0 ,expecteds.cardinality()); } /* private void showAvailableCommitPoints() throws Exception { Collection commits = IndexReader.listCommits(dir); for (Iterator iterator = commits.iterator(); iterator.hasNext();) { IndexCommit comm = (IndexCommit) iterator.next(); System.out.print("\t Available commit point:["+comm.getUserData()+"] files="); Collection files = comm.getFileNames(); for (Iterator iterator2 = files.iterator(); iterator2.hasNext();) { String filename = (String) iterator2.next(); System.out.print(filename+", "); } System.out.println(); } } */ @Override protected void setUp() throws Exception { super.setUp(); dir = new MockRAMDirectory(); //Build index, of records 1 to 100, committing after each batch of 10 IndexDeletionPolicy sdp=new KeepAllDeletionPolicy(); IndexWriter w=new IndexWriter(dir,new WhitespaceAnalyzer(),sdp,MaxFieldLength.UNLIMITED); for(int currentRecordId=1;currentRecordId<=100;currentRecordId++) { Document doc=new Document(); doc.add(new Field(FIELD_RECORD_ID,""+currentRecordId,Field.Store.YES,Field.Index.ANALYZED)); w.addDocument(doc); if (currentRecordId%10 == 0) { Map data = new HashMap(); data.put("index", "records 1-"+currentRecordId); w.commit(data); } } w.close(); } // Rolls back to previous commit point class RollbackDeletionPolicy implements IndexDeletionPolicy { private int rollbackPoint; public RollbackDeletionPolicy(int rollbackPoint) { this.rollbackPoint = rollbackPoint; } public void onCommit(List commits) throws IOException { } public void onInit(List commits) throws IOException { for (Iterator iterator = commits.iterator(); iterator.hasNext();) { IndexCommit commit = (IndexCommit) iterator.next(); Map userData=commit.getUserData(); if (userData.size() > 0) { // Label for a commit point is "Records 1-30" // This code reads the last id ("30" in this example) and deletes it // if it is after the desired rollback point String x = (String) userData.get("index"); String lastVal = x.substring(x.lastIndexOf("-")+1); int last = Integer.parseInt(lastVal); if (last>rollbackPoint) { /* System.out.print("\tRolling back commit point:" + " UserData="+commit.getUserData() +") ("+(commits.size()-1)+" commit points left) files="); Collection files = commit.getFileNames(); for (Iterator iterator2 = files.iterator(); iterator2.hasNext();) { System.out.print(" "+iterator2.next()); } System.out.println(); */ commit.delete(); } } } } } class DeleteLastCommitPolicy implements IndexDeletionPolicy { public void onCommit(List commits) throws IOException {} public void onInit(List commits) throws IOException { ((IndexCommit) commits.get(commits.size()-1)).delete(); } } public void testRollbackDeletionPolicy() throws Exception { for(int i=0;i<2;i++) { // Unless you specify a prior commit point, rollback // should not work: new IndexWriter(dir,new WhitespaceAnalyzer(), new DeleteLastCommitPolicy(), MaxFieldLength.UNLIMITED).close(); IndexReader r = IndexReader.open(dir, true); assertEquals(100, r.numDocs()); r.close(); } } // Keeps all commit points (used to build index) class KeepAllDeletionPolicy implements IndexDeletionPolicy { public void onCommit(List commits) throws IOException {} public void onInit(List commits) throws IOException {} } }
/* * Autopsy * * Copyright 2019-2021 Basis Technology Corp. * Contact: carrier <at> sleuthkit <dot> org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.autopsy.logicalimager.dsp; import java.io.BufferedReader; import java.io.File; import java.io.FileInputStream; import java.io.FilenameFilter; import java.io.IOException; import java.io.InputStreamReader; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.logging.Level; import javax.annotation.concurrent.GuardedBy; import org.apache.commons.io.FileUtils; import org.openide.util.NbBundle.Messages; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; import org.sleuthkit.autopsy.corecomponentinterfaces.DataSourceProcessorCallback; import org.sleuthkit.autopsy.corecomponentinterfaces.DataSourceProcessorProgressMonitor; import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.datamodel.utils.LocalFileImporter; import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.Blackboard; import org.sleuthkit.datamodel.BlackboardArtifact; import org.sleuthkit.datamodel.BlackboardAttribute; import org.sleuthkit.datamodel.Content; import org.sleuthkit.datamodel.Host; import org.sleuthkit.datamodel.LocalFilesDataSource; import org.sleuthkit.datamodel.Score; import org.sleuthkit.datamodel.SleuthkitCase; import org.sleuthkit.datamodel.TskCoreException; /** * A runnable that - copy the logical image folder to a destination folder - add * SearchResults.txt and *_users.txt files to report - add an image data source * to the case database. */ final class AddLogicalImageTask implements Runnable { /** * Information about a file including the object id of the file as well as * the object id of the data source. */ private static class FileId { private final long dataSourceId; private final long fileId; /** * Main constructor. * * @param dataSourceId Object Id of the data source. * @param fileId Object Id of the file. */ FileId(long dataSourceId, long fileId) { this.dataSourceId = dataSourceId; this.fileId = fileId; } /** * Returns the data source id of the file. * * @return The data source id of the file. */ long getDataSourceId() { return dataSourceId; } /** * Returns the object id of the file. * * @return The object id of the file. */ long getFileId() { return fileId; } } private final static Logger LOGGER = Logger.getLogger(AddLogicalImageTask.class.getName()); private final static String SEARCH_RESULTS_TXT = "SearchResults.txt"; //NON-NLS private final static String USERS_TXT = "_users.txt"; //NON-NLS private final static String MODULE_NAME = "Logical Imager"; //NON-NLS private final static String ROOT_STR = "root"; // NON-NLS private final static String VHD_EXTENSION = ".vhd"; // NON-NLS private final static int REPORT_PROGRESS_INTERVAL = 100; private final static int POST_ARTIFACT_INTERVAL = 1000; private final String deviceId; private final String timeZone; private final File src; private final File dest; private final Host host; private final DataSourceProcessorCallback callback; private final DataSourceProcessorProgressMonitor progressMonitor; private final Blackboard blackboard; private final Case currentCase; private volatile boolean cancelled; private volatile boolean createVHD; private long totalFiles; private Map<String, Long> imagePathToObjIdMap; private final Object addMultipleImagesLock; @GuardedBy("addMultipleImagesLock") private AddMultipleImagesTask addMultipleImagesTask = null; AddLogicalImageTask(String deviceId, String timeZone, File src, File dest, Host host, DataSourceProcessorProgressMonitor progressMonitor, DataSourceProcessorCallback callback ) throws NoCurrentCaseException { this.deviceId = deviceId; this.timeZone = timeZone; this.src = src; this.dest = dest; this.host = host; this.progressMonitor = progressMonitor; this.callback = callback; this.currentCase = Case.getCurrentCase(); this.blackboard = this.currentCase.getServices().getArtifactsBlackboard(); this.addMultipleImagesLock = new Object(); } /** * Add SearchResults.txt and *_users.txt to the case report Adds the image * to the case database. */ @Messages({ "# {0} - src", "# {1} - dest", "AddLogicalImageTask.copyingImageFromTo=Copying image from {0} to {1}", "AddLogicalImageTask.doneCopying=Done copying", "# {0} - src", "# {1} - dest", "AddLogicalImageTask.failedToCopyDirectory=Failed to copy directory {0} to {1}", "# {0} - file", "AddLogicalImageTask.addingToReport=Adding {0} to report", "# {0} - file", "AddLogicalImageTask.doneAddingToReport=Done adding {0} to report", "AddLogicalImageTask.ingestionCancelled=Ingestion cancelled", "# {0} - file", "AddLogicalImageTask.failToGetCanonicalPath=Fail to get canonical path for {0}", "# {0} - sparseImageDirectory", "AddLogicalImageTask.directoryDoesNotContainSparseImage=Directory {0} does not contain any images", "AddLogicalImageTask.noCurrentCase=No current case", "AddLogicalImageTask.addingInterestingFiles=Adding search results as interesting files", "AddLogicalImageTask.doneAddingInterestingFiles=Done adding search results as interesting files", "# {0} - SearchResults.txt", "# {1} - directory", "AddLogicalImageTask.cannotFindFiles=Cannot find {0} in {1}", "# {0} - reason", "AddLogicalImageTask.failedToAddInterestingFiles=Failed to add interesting files: {0}", "AddLogicalImageTask.addingExtractedFiles=Adding extracted files", "AddLogicalImageTask.doneAddingExtractedFiles=Done adding extracted files", "# {0} - reason", "AddLogicalImageTask.failedToGetTotalFilesCount=Failed to get total files count: {0}", "AddLogicalImageTask.addImageCancelled=Add image cancelled" }) @Override public void run() { List<String> errorList = new ArrayList<>(); List<Content> emptyDataSources = new ArrayList<>(); try { progressMonitor.setProgressText(Bundle.AddLogicalImageTask_copyingImageFromTo(src.toString(), dest.toString())); FileUtils.copyDirectory(src, dest); progressMonitor.setProgressText(Bundle.AddLogicalImageTask_doneCopying()); } catch (IOException ex) { // Copy directory failed String msg = Bundle.AddLogicalImageTask_failedToCopyDirectory(src.toString(), dest.toString()); errorList.add(msg); } if (cancelled) { // Don't delete destination directory once we started adding interesting files. // At this point the database and destination directory are complete. deleteDestinationDirectory(); errorList.add(Bundle.AddLogicalImageTask_addImageCancelled()); callback.done(DataSourceProcessorCallback.DataSourceProcessorResult.CRITICAL_ERRORS, errorList, emptyDataSources); return; } // Add the SearchResults.txt and *_users.txt to the case report String resultsFilename; if (Paths.get(dest.toString(), SEARCH_RESULTS_TXT).toFile().exists()) { resultsFilename = SEARCH_RESULTS_TXT; } else { errorList.add(Bundle.AddLogicalImageTask_cannotFindFiles(SEARCH_RESULTS_TXT, dest.toString())); callback.done(DataSourceProcessorCallback.DataSourceProcessorResult.CRITICAL_ERRORS, errorList, emptyDataSources); return; } progressMonitor.setProgressText(Bundle.AddLogicalImageTask_addingToReport(resultsFilename)); String status = addReport(Paths.get(dest.toString(), resultsFilename), resultsFilename + " " + src.getName()); if (status != null) { errorList.add(status); callback.done(DataSourceProcessorCallback.DataSourceProcessorResult.CRITICAL_ERRORS, errorList, emptyDataSources); return; } progressMonitor.setProgressText(Bundle.AddLogicalImageTask_doneAddingToReport(resultsFilename)); // All all *_users.txt files to report File[] userFiles = dest.listFiles(new FilenameFilter() { @Override public boolean accept(File dir, String name) { return name.endsWith(USERS_TXT); } }); for (File userFile : userFiles) { progressMonitor.setProgressText(Bundle.AddLogicalImageTask_addingToReport(userFile.getName())); status = addReport(userFile.toPath(), userFile.getName() + " " + src.getName()); if (status != null) { errorList.add(status); callback.done(DataSourceProcessorCallback.DataSourceProcessorResult.CRITICAL_ERRORS, errorList, emptyDataSources); return; } progressMonitor.setProgressText(Bundle.AddLogicalImageTask_doneAddingToReport(userFile.getName())); } // Get all VHD files in the dest directory List<String> imagePaths = new ArrayList<>(); for (File f : dest.listFiles()) { if (f.getName().endsWith(VHD_EXTENSION)) { try { imagePaths.add(f.getCanonicalPath()); } catch (IOException ioe) { String msg = Bundle.AddLogicalImageTask_failToGetCanonicalPath(f.getName()); errorList.add(msg); callback.done(DataSourceProcessorCallback.DataSourceProcessorResult.CRITICAL_ERRORS, errorList, emptyDataSources); return; } } } Path resultsPath = Paths.get(dest.toString(), resultsFilename); try { totalFiles = Files.lines(resultsPath).count() - 1; // skip the header line } catch (IOException ex) { errorList.add(Bundle.AddLogicalImageTask_failedToGetTotalFilesCount(ex.getMessage())); callback.done(DataSourceProcessorCallback.DataSourceProcessorResult.CRITICAL_ERRORS, errorList, emptyDataSources); return; } List<Content> newDataSources = new ArrayList<>(); Map<String, List<FileId>> interestingFileMap = new HashMap<>(); if (imagePaths.isEmpty()) { createVHD = false; // No VHD in src directory, try ingest the root directory as local files File root = Paths.get(dest.toString(), ROOT_STR).toFile(); if (root.exists() && root.isDirectory()) { imagePaths.add(root.getAbsolutePath()); } else { String msg = Bundle.AddLogicalImageTask_directoryDoesNotContainSparseImage(dest); errorList.add(msg); callback.done(DataSourceProcessorCallback.DataSourceProcessorResult.CRITICAL_ERRORS, errorList, emptyDataSources); return; } try { progressMonitor.setProgressText(Bundle.AddLogicalImageTask_addingExtractedFiles()); interestingFileMap = addExtractedFiles(dest, resultsPath, host, newDataSources); progressMonitor.setProgressText(Bundle.AddLogicalImageTask_doneAddingExtractedFiles()); } catch (IOException | TskCoreException ex) { errorList.add(ex.getMessage()); LOGGER.log(Level.SEVERE, String.format("Failed to add datasource: %s", ex.getMessage()), ex); // NON-NLS callback.done(DataSourceProcessorCallback.DataSourceProcessorResult.CRITICAL_ERRORS, errorList, emptyDataSources); return; } } else { createVHD = true; // ingest the VHDs try { synchronized (addMultipleImagesLock) { if (cancelled) { LOGGER.log(Level.SEVERE, "Add VHD cancelled"); // NON-NLS errorList.add(Bundle.AddLogicalImageTask_addImageCancelled()); callback.done(DataSourceProcessorCallback.DataSourceProcessorResult.CRITICAL_ERRORS, errorList, emptyDataSources); return; } addMultipleImagesTask = new AddMultipleImagesTask(deviceId, imagePaths, timeZone, host, progressMonitor); } addMultipleImagesTask.run(); if (addMultipleImagesTask.getResult() == DataSourceProcessorCallback.DataSourceProcessorResult.CRITICAL_ERRORS) { LOGGER.log(Level.SEVERE, "Failed to add VHD datasource"); // NON-NLS callback.done(DataSourceProcessorCallback.DataSourceProcessorResult.CRITICAL_ERRORS, addMultipleImagesTask.getErrorMessages(), emptyDataSources); return; } try { interestingFileMap = getInterestingFileMapForVHD(Paths.get(dest.toString(), resultsFilename)); } catch (TskCoreException | IOException ex) { errorList.add(Bundle.AddLogicalImageTask_failedToAddInterestingFiles(ex.getMessage())); LOGGER.log(Level.SEVERE, "Failed to add interesting files", ex); // NON-NLS callback.done(DataSourceProcessorCallback.DataSourceProcessorResult.NONCRITICAL_ERRORS, errorList, emptyDataSources); } } catch (NoCurrentCaseException ex) { String msg = Bundle.AddLogicalImageTask_noCurrentCase(); errorList.add(msg); callback.done(DataSourceProcessorCallback.DataSourceProcessorResult.CRITICAL_ERRORS, errorList, emptyDataSources); return; } } if (cancelled) { if (!createVHD) { // TODO: When 5453 is fixed, we should be able to delete it when adding VHD. deleteDestinationDirectory(); } errorList.add(Bundle.AddLogicalImageTask_addImageCancelled()); callback.done(DataSourceProcessorCallback.DataSourceProcessorResult.CRITICAL_ERRORS, errorList, emptyDataSources); return; } try { progressMonitor.setProgressText(Bundle.AddLogicalImageTask_addingInterestingFiles()); addInterestingFiles(interestingFileMap); progressMonitor.setProgressText(Bundle.AddLogicalImageTask_doneAddingInterestingFiles()); if (createVHD) { callback.done(addMultipleImagesTask.getResult(), addMultipleImagesTask.getErrorMessages(), addMultipleImagesTask.getNewDataSources()); } else { callback.done(DataSourceProcessorCallback.DataSourceProcessorResult.NO_ERRORS, errorList, newDataSources); } } catch (IOException | TskCoreException ex) { errorList.add(Bundle.AddLogicalImageTask_failedToAddInterestingFiles(ex.getMessage())); LOGGER.log(Level.SEVERE, "Failed to add interesting files", ex); // NON-NLS callback.done(DataSourceProcessorCallback.DataSourceProcessorResult.NONCRITICAL_ERRORS, errorList, emptyDataSources); } } /** * Add a file specified by the reportPath to the case report. * * @param reportPath Path to the report to be added * @param reportName Name associated the report * * @returns null if success, or exception message if failure * */ @Messages({ "# {0} - file", "# {1} - exception message", "AddLogicalImageTask.failedToAddReport=Failed to add report {0}. Reason= {1}" }) private String addReport(Path reportPath, String reportName) { if (!reportPath.toFile().exists()) { return null; // if the reportPath doesn't exist, just ignore it. } try { Case.getCurrentCase().addReport(reportPath.toString(), "LogicalImager", reportName); //NON-NLS return null; } catch (TskCoreException ex) { String msg = Bundle.AddLogicalImageTask_failedToAddReport(reportPath.toString(), ex.getMessage()); LOGGER.log(Level.SEVERE, String.format("Failed to add report %s. Reason= %s", reportPath.toString(), ex.getMessage()), ex); // NON-NLS return msg; } } /** * Attempts to cancel the processing of the input image files. May result in * partial processing of the input. */ void cancelTask() { LOGGER.log(Level.WARNING, "AddLogicalImageTask cancelled, processing may be incomplete"); // NON-NLS synchronized (addMultipleImagesLock) { cancelled = true; if (addMultipleImagesTask != null) { addMultipleImagesTask.cancelTask(); } } } private Map<String, Long> imagePathsToDataSourceObjId(Map<Long, List<String>> imagePaths) { Map<String, Long> imagePathToObjId = new HashMap<>(); for (Map.Entry<Long, List<String>> entry : imagePaths.entrySet()) { Long key = entry.getKey(); List<String> names = entry.getValue(); for (String name : names) { imagePathToObjId.put(name, key); } } return imagePathToObjId; } @Messages({ "# {0} - line number", "# {1} - fields length", "# {2} - expected length", "AddLogicalImageTask.notEnoughFields=File does not contain enough fields at line {0}, got {1}, expecting {2}", "# {0} - target image path", "AddLogicalImageTask.cannotFindDataSourceObjId=Cannot find obj_id in tsk_image_names for {0}", "# {0} - file number", "# {1} - total files", "AddLogicalImageTask.addingInterestingFile=Adding interesting files ({0}/{1})", "AddLogicalImageTask.logicalImagerResults=Logical Imager results" }) private void addInterestingFiles(Map<String, List<FileId>> interestingFileMap) throws IOException, TskCoreException { int lineNumber = 0; List<BlackboardArtifact> artifacts = new ArrayList<>(); Iterator<Map.Entry<String, List<FileId>>> iterator = interestingFileMap.entrySet().iterator(); while (iterator.hasNext()) { if (cancelled) { // Don't delete destination directory once we started adding interesting files. // At this point the database and destination directory are complete. break; } Map.Entry<String, List<FileId>> entry = iterator.next(); String key = entry.getKey(); String ruleName; String[] split = key.split("\t"); ruleName = split[1]; List<FileId> fileIds = entry.getValue(); for (FileId fileId : fileIds) { if (cancelled) { postArtifacts(artifacts); return; } if (lineNumber % REPORT_PROGRESS_INTERVAL == 0) { progressMonitor.setProgressText(Bundle.AddLogicalImageTask_addingInterestingFile(lineNumber, totalFiles)); } if (lineNumber % POST_ARTIFACT_INTERVAL == 0) { postArtifacts(artifacts); artifacts.clear(); } addInterestingFileToArtifacts(fileId.getFileId(), fileId.getDataSourceId(), Bundle.AddLogicalImageTask_logicalImagerResults(), ruleName, artifacts); lineNumber++; } iterator.remove(); } postArtifacts(artifacts); } private void addInterestingFileToArtifacts(long fileId, long dataSourceId, String ruleSetName, String ruleName, List<BlackboardArtifact> artifacts) throws TskCoreException { BlackboardArtifact artifact; try { artifact = this.blackboard.newAnalysisResult( BlackboardArtifact.Type.TSK_INTERESTING_ITEM, fileId, dataSourceId, Score.SCORE_LIKELY_NOTABLE, null, ruleSetName, null, Arrays.asList( new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME, MODULE_NAME, ruleSetName), new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_CATEGORY, MODULE_NAME, ruleName) )) .getAnalysisResult(); } catch (Blackboard.BlackboardException ex) { throw new TskCoreException("Unable to create analysis result.", ex); } artifacts.add(artifact); } @Messages({ "# {0} - file number", "# {1} - total files", "AddLogicalImageTask.searchingInterestingFile=Searching for interesting files ({0}/{1})" }) private Map<String, List<FileId>> getInterestingFileMapForVHD(Path resultsPath) throws TskCoreException, IOException { Map<Long, List<String>> objIdToimagePathsMap = currentCase.getSleuthkitCase().getImagePaths(); imagePathToObjIdMap = imagePathsToDataSourceObjId(objIdToimagePathsMap); Map<String, List<FileId>> interestingFileMap = new HashMap<>(); try (BufferedReader br = new BufferedReader(new InputStreamReader( new FileInputStream(resultsPath.toFile()), "UTF8"))) { // NON-NLS String line; br.readLine(); // skip the header line int lineNumber = 2; while ((line = br.readLine()) != null) { if (cancelled) { // Don't delete destination directory once we started adding interesting files. // At this point the database and destination directory are complete. break; } String[] fields = line.split("\t", -1); // NON-NLS if (fields.length != 14) { throw new IOException(Bundle.AddLogicalImageTask_notEnoughFields(lineNumber, fields.length, 14)); } String vhdFilename = fields[0]; // String fileSystemOffsetStr = fields[1]; String fileMetaAddressStr = fields[2]; // String extractStatusStr = fields[3]; String ruleSetName = fields[4]; String ruleName = fields[5]; // String description = fields[6]; String filename = fields[7]; String parentPath = fields[8]; if (lineNumber % REPORT_PROGRESS_INTERVAL == 0) { progressMonitor.setProgressText(Bundle.AddLogicalImageTask_searchingInterestingFile(lineNumber, totalFiles)); } String query = makeQuery(vhdFilename, fileMetaAddressStr, parentPath, filename); List<AbstractFile> matchedFiles = Case.getCurrentCase().getSleuthkitCase().findAllFilesWhere(query); List<FileId> fileIds = new ArrayList<>(); for (AbstractFile file : matchedFiles) { fileIds.add(new FileId(file.getDataSourceObjectId(), file.getId())); } String key = String.format("%s\t%s", ruleSetName, ruleName); interestingFileMap.computeIfAbsent(key, (k) -> new ArrayList<>()) .addAll(fileIds); lineNumber++; } // end reading file } return interestingFileMap; } private void postArtifacts(List<BlackboardArtifact> artifacts) { try { blackboard.postArtifacts(artifacts, MODULE_NAME, null); } catch (Blackboard.BlackboardException ex) { LOGGER.log(Level.SEVERE, "Unable to post artifacts to blackboard", ex); //NON-NLS } } @Messages({ "# {0} - file number", "# {1} - total files", "AddLogicalImageTask.addingExtractedFile=Adding extracted files ({0}/{1})" }) private Map<String, List<FileId>> addExtractedFiles(File src, Path resultsPath, Host host, List<Content> newDataSources) throws TskCoreException, IOException { SleuthkitCase skCase = Case.getCurrentCase().getSleuthkitCase(); SleuthkitCase.CaseDbTransaction trans = null; Map<String, List<FileId>> interestingFileMap = new HashMap<>(); try { trans = skCase.beginTransaction(); LocalFilesDataSource localFilesDataSource = skCase.addLocalFilesDataSource(deviceId, this.src.getName(), timeZone, host, trans); LocalFileImporter fileImporter = new LocalFileImporter(skCase, trans); try (BufferedReader br = new BufferedReader(new InputStreamReader( new FileInputStream(resultsPath.toFile()), "UTF8"))) { // NON-NLS String line; br.readLine(); // skip the header line int lineNumber = 2; while ((line = br.readLine()) != null) { if (cancelled) { rollbackTransaction(trans); return new HashMap<>(); } String[] fields = line.split("\t", -1); // NON-NLS if (fields.length != 14) { rollbackTransaction(trans); throw new IOException(Bundle.AddLogicalImageTask_notEnoughFields(lineNumber, fields.length, 14)); } String vhdFilename = fields[0]; // String fileSystemOffsetStr = fields[1]; // String fileMetaAddressStr = fields[2]; // String extractStatusStr = fields[3]; String ruleSetName = fields[4]; String ruleName = fields[5]; // String description = fields[6]; String filename = fields[7]; String parentPath = fields[8]; String extractedFilePath = fields[9]; String crtime = fields[10]; String mtime = fields[11]; String atime = fields[12]; String ctime = fields[13]; parentPath = ROOT_STR + "/" + vhdFilename + "/" + parentPath; if (lineNumber % REPORT_PROGRESS_INTERVAL == 0) { progressMonitor.setProgressText(Bundle.AddLogicalImageTask_addingExtractedFile(lineNumber, totalFiles)); } //addLocalFile here AbstractFile fileAdded = fileImporter.addLocalFile( Paths.get(src.toString(), extractedFilePath).toFile(), filename, parentPath, Long.parseLong(ctime), Long.parseLong(crtime), Long.parseLong(atime), Long.parseLong(mtime), localFilesDataSource); String key = String.format("%s\t%s", ruleSetName, ruleName); long dataSourceId = fileAdded.getDataSourceObjectId(); long fileId = fileAdded.getId(); interestingFileMap.computeIfAbsent(key, (k) -> new ArrayList<>()) .add(new FileId(dataSourceId, fileId)); lineNumber++; } // end reading file } trans.commit(); newDataSources.add(localFilesDataSource); return interestingFileMap; } catch (NumberFormatException | TskCoreException ex) { LOGGER.log(Level.SEVERE, "Error adding extracted files", ex); // NON-NLS rollbackTransaction(trans); throw new TskCoreException("Error adding extracted files", ex); } } private void rollbackTransaction(SleuthkitCase.CaseDbTransaction trans) throws TskCoreException { if (null != trans) { try { trans.rollback(); } catch (TskCoreException ex) { LOGGER.log(Level.SEVERE, String.format("Failed to rollback transaction: %s", ex.getMessage()), ex); // NON-NLS } } } private boolean deleteDestinationDirectory() { try { FileUtils.deleteDirectory(dest); LOGGER.log(Level.INFO, String.format("Cancellation: Deleted directory %s", dest.toString())); // NON-NLS return true; } catch (IOException ex) { LOGGER.log(Level.WARNING, String.format("Cancellation: Failed to delete directory %s", dest.toString()), ex); // NON-NLS return false; } } String makeQuery(String vhdFilename, String fileMetaAddressStr, String parentPath, String filename) throws TskCoreException { String query; String targetImagePath = Paths.get(dest.toString(), vhdFilename).toString(); Long dataSourceObjId = imagePathToObjIdMap.get(targetImagePath); if (dataSourceObjId == null) { throw new TskCoreException(Bundle.AddLogicalImageTask_cannotFindDataSourceObjId(targetImagePath)); } query = String.format("data_source_obj_id = '%s' AND meta_addr = '%s' AND name = '%s'", // NON-NLS dataSourceObjId.toString(), fileMetaAddressStr, filename.replace("'", "''")); // TODO - findAllFilesWhere should SQL-escape the query return query; } }
package com.mangopay.core; import com.mangopay.core.enumerations.*; import com.mangopay.entities.*; import com.mangopay.entities.subentities.*; import org.junit.Assert; import org.junit.Ignore; import org.junit.Test; import java.io.File; import java.net.URL; import java.nio.file.Files; import java.nio.file.Paths; import java.util.Calendar; import java.util.List; import static org.junit.Assert.*; /** * UserApiImpl test methods */ public class UserApiImplTest extends BaseTest { @Test public void createNatural() throws Exception { UserNatural john = this.getJohn(); assertTrue(john.getId().length() > 0); assertTrue(john.getPersonType().equals(PersonType.NATURAL)); } @Test public void createLegal() throws Exception { UserLegal matrix = this.getMatrix(); assertTrue(matrix.getId().length() > 0); assertEquals(matrix.getPersonType(), PersonType.LEGAL); assertEquals("LU12345678", matrix.getCompanyNumber()); } @Test public void createLegalFailsIfRequiredPropsNotProvided() throws Exception { UserLegal user = new UserLegal(); User ret = null; try { ret = this.api.getUserApi().create(user); Assert.fail("CreateLegal() should throw an exception when required props are not provided"); } catch (ResponseException ex) { assertNull(ret); } } @Test public void createLegalPassesIfRequiredPropsProvided() throws Exception { UserLegal user = new UserLegal(); user.setHeadquartersAddress(new Address()); user.getHeadquartersAddress().setAddressLine1("AddressLine1"); user.getHeadquartersAddress().setAddressLine2("AddressLine2"); user.getHeadquartersAddress().setCity("City"); user.getHeadquartersAddress().setCountry(CountryIso.FR); user.getHeadquartersAddress().setPostalCode("11222"); user.getHeadquartersAddress().setRegion("Region"); user.setName("SomeOtherSampleOrg"); user.setLegalPersonType(LegalPersonType.BUSINESS); user.setLegalRepresentativeFirstName("RepFName"); user.setLegalRepresentativeLastName("RepLName"); Calendar c = Calendar.getInstance(); c.set(1975, 12, 21, 0, 0, 0); user.setLegalRepresentativeBirthday(c.getTimeInMillis() / 1000); user.setLegalRepresentativeNationality(CountryIso.FR); user.setLegalRepresentativeCountryOfResidence(CountryIso.FR); user.setEmail("email@email.org"); user.setCompanyNumber("LU12345678"); User ret = null; ret = this.api.getUserApi().create(user); assertTrue("Created successfully after required props set", ret.getId().length() > 0); assertEqualInputProps(user, ret); } @Test public void getAllUsers() throws Exception { Sorting sort = new Sorting(); sort.addField("CreationDate", SortDirection.desc); List<User> users = this.api.getUserApi().getAll(null, sort); assertTrue(users.get(0).getCreationDate() > users.get(users.size() - 1).getCreationDate()); sort = new Sorting(); sort.addField("CreationDate", SortDirection.asc); users = this.api.getUserApi().getAll(null, sort); assertTrue(users.get(0).getCreationDate() < users.get(users.size() - 1).getCreationDate()); } @Test public void getNatural() throws Exception { UserNatural john = this.getJohn(); User user1 = this.api.getUserApi().get(john.getId()); UserNatural user2 = this.api.getUserApi().getNatural(john.getId()); assertTrue(user1.getPersonType().equals(PersonType.NATURAL)); assertTrue(user1.getId().equals(john.getId())); assertTrue(user2.getPersonType().equals(PersonType.NATURAL)); assertTrue(user2.getId().equals(john.getId())); assertEqualInputProps(user1, john); } @Test public void getNaturalFailsForLegalUser() throws Exception { UserLegal matrix = this.getMatrix(); UserNatural user = null; try { user = this.api.getUserApi().getNatural(matrix.getId()); Assert.fail("GetUser() should throw an exception when called with legal user id"); } catch (ResponseException ex) { assertNull(user); } } @Test public void getLegalFailsForNaturalUser() throws Exception { UserNatural john = this.getJohn(); User user = null; try { user = this.api.getUserApi().getLegal(john.getId()); Assert.fail("GetLegal() should throw an exception when called with natural user id"); } catch (ResponseException ex) { assertNull(user); } } @Test public void getLegal() throws Exception { UserLegal matrix = this.getMatrix(); User user1 = this.api.getUserApi().get(matrix.getId()); User user2 = this.api.getUserApi().getLegal(matrix.getId()); assertEqualInputProps(user1, matrix); assertEqualInputProps(user2, matrix); } @Test public void updateNatural() throws Exception { UserNatural john = this.getJohn(); john.setLastName(john.getLastName() + " - CHANGED"); User userSaved = this.api.getUserApi().update(john); User userFetched = this.api.getUserApi().get(john.getId()); assertEqualInputProps(john, userSaved); assertEqualInputProps(john, userFetched); } @Test public void updateNaturalNonASCII() throws Exception { UserNatural john = this.getJohn(); john.setLastName(john.getLastName() + " - CHANGED"); User userSaved = this.api.getUserApi().update(john); User userFetched = this.api.getUserApi().get(john.getId()); assertEqualInputProps(john, userSaved); assertEqualInputProps(john, userFetched); } @Test public void updateLegal() throws Exception { UserLegal matrix = this.getMatrix(); matrix.setLegalRepresentativeLastName(matrix.getLegalRepresentativeLastName() + " - CHANGED"); User userSaved = this.api.getUserApi().update(matrix); User userFetched = this.api.getUserApi().get(matrix.getId()); assertEqualInputProps(userSaved, matrix); assertEqualInputProps(userFetched, matrix); } @Test public void updateLegalWithoutAddresses() throws Exception { UserLegal matrix = this.getMatrixWithoutOptionalFields(); matrix.setLegalRepresentativeLastName(matrix.getLegalRepresentativeLastName() + " - CHANGED"); User userSaved = this.api.getUserApi().update(matrix); User userFetched = this.api.getUserApi().get(matrix.getId()); assertEqualInputProps(userSaved, matrix); assertEqualInputProps(userFetched, matrix); } @Test public void createBankAccountIBAN() { try { UserNatural john = this.getJohn(); BankAccount account = this.getJohnsAccount(); assertTrue(account.getId().length() > 0); assertEquals(account.getUserId(), john.getId()); } catch (Exception ex) { Assert.fail(ex.getMessage()); } } @Test public void createBankAccountGB() throws Exception { UserNatural john = this.getJohn(); BankAccount account = new BankAccount(); account.setType(BankAccountType.GB); account.setOwnerName(john.getFirstName() + " " + john.getLastName()); account.setOwnerAddress(john.getAddress()); account.setDetails(new BankAccountDetailsGB()); ((BankAccountDetailsGB) account.getDetails()).setAccountNumber("63956474"); ((BankAccountDetailsGB) account.getDetails()).setSortCode("200000"); account.setType(BankAccountType.GB); BankAccount createAccount = this.api.getUserApi().createBankAccount(john.getId(), account); assertTrue(createAccount.getId().length() > 0); assertEquals(createAccount.getUserId(), john.getId()); assertSame(createAccount.getType(), BankAccountType.GB); assertEquals("63956474", ((BankAccountDetailsGB) createAccount.getDetails()).getAccountNumber()); assertEquals("200000", ((BankAccountDetailsGB) createAccount.getDetails()).getSortCode()); } @Test public void createBankAccountUS() throws Exception { UserNatural john = this.getJohn(); BankAccount account = new BankAccount(); account.setType(BankAccountType.US); account.setOwnerName(john.getFirstName() + " " + john.getLastName()); account.setOwnerAddress(john.getAddress()); account.setDetails(new BankAccountDetailsUS()); ((BankAccountDetailsUS) account.getDetails()).setAccountNumber("234234234234"); ((BankAccountDetailsUS) account.getDetails()).setAba("234334789"); BankAccount createAccount = this.api.getUserApi().createBankAccount(john.getId(), account); assertTrue(createAccount.getId().length() > 0); assertEquals(createAccount.getUserId(), john.getId()); assertSame(createAccount.getType(), BankAccountType.US); assertEquals("234234234234", ((BankAccountDetailsUS) createAccount.getDetails()).getAccountNumber()); assertEquals("234334789", ((BankAccountDetailsUS) createAccount.getDetails()).getAba()); assertEquals(((BankAccountDetailsUS) createAccount.getDetails()).getDepositAccountType(), DepositAccountType.CHECKING); ((BankAccountDetailsUS) account.getDetails()).setDepositAccountType(DepositAccountType.SAVINGS); BankAccount createAccountSavings = this.api.getUserApi().createBankAccount(john.getId(), account); assertTrue(createAccountSavings.getId().length() > 0); assertEquals(createAccountSavings.getUserId(), john.getId()); assertSame(createAccountSavings.getType(), BankAccountType.US); assertEquals("234234234234", ((BankAccountDetailsUS) createAccountSavings.getDetails()).getAccountNumber()); assertEquals("234334789", ((BankAccountDetailsUS) createAccountSavings.getDetails()).getAba()); assertEquals(((BankAccountDetailsUS) createAccountSavings.getDetails()).getDepositAccountType(), DepositAccountType.SAVINGS); } @Test public void createBankAccountCA() throws Exception { UserNatural john = this.getJohn(); BankAccount account = new BankAccount(); account.setType(BankAccountType.CA); account.setOwnerName(john.getFirstName() + " " + john.getLastName()); account.setOwnerAddress(john.getAddress()); account.setDetails(new BankAccountDetailsCA()); ((BankAccountDetailsCA) account.getDetails()).setBankName("TestBankName"); ((BankAccountDetailsCA) account.getDetails()).setBranchCode("12345"); ((BankAccountDetailsCA) account.getDetails()).setAccountNumber("234234234234"); ((BankAccountDetailsCA) account.getDetails()).setInstitutionNumber("123"); BankAccount createAccount = this.api.getUserApi().createBankAccount(john.getId(), account); assertTrue(createAccount.getId().length() > 0); assertEquals(createAccount.getUserId(), john.getId()); assertSame(createAccount.getType(), BankAccountType.CA); assertEquals("234234234234", ((BankAccountDetailsCA) createAccount.getDetails()).getAccountNumber()); assertEquals("TestBankName", ((BankAccountDetailsCA) createAccount.getDetails()).getBankName()); assertEquals("12345", ((BankAccountDetailsCA) createAccount.getDetails()).getBranchCode()); assertEquals("123", ((BankAccountDetailsCA) createAccount.getDetails()).getInstitutionNumber()); } @Test public void createBankAccountOTHER() { try { UserNatural john = this.getJohn(); BankAccount account = new BankAccount(); account.setOwnerName(john.getFirstName() + " " + john.getLastName()); account.setOwnerAddress(john.getAddress()); account.setDetails(new BankAccountDetailsOTHER()); account.setType(BankAccountType.OTHER); ((BankAccountDetailsOTHER) account.getDetails()).setCountry(CountryIso.FR); ((BankAccountDetailsOTHER) account.getDetails()).setAccountNumber("234234234234"); ((BankAccountDetailsOTHER) account.getDetails()).setBic("BINAADADXXX"); BankAccount createAccount = this.api.getUserApi().createBankAccount(john.getId(), account); assertTrue(createAccount.getId().length() > 0); assertTrue(createAccount.getUserId().equals(john.getId())); assertTrue(createAccount.getType() == BankAccountType.OTHER); assertTrue(((BankAccountDetailsOTHER) createAccount.getDetails()).getCountry().equals(CountryIso.FR)); assertTrue(((BankAccountDetailsOTHER) createAccount.getDetails()).getAccountNumber().equals("234234234234")); assertTrue(((BankAccountDetailsOTHER) createAccount.getDetails()).getBic().equals("BINAADADXXX")); } catch (Exception ex) { Assert.fail(ex.getMessage()); } } @Test public void createBankAccount() throws Exception { UserNatural john = this.getJohn(); BankAccount account = this.getJohnsAccount(); assertTrue(account.getId().length() > 0); assertTrue(account.getUserId().equals(john.getId())); } @Test public void updateBankAccount() throws Exception { UserNatural john = this.getJohn(); BankAccount account = this.getJohnsAccount(); assertTrue(account.getId().length() > 0); assertTrue(account.getUserId().equals(john.getId())); // disactivate bank account BankAccount disactivateBankAccount = new BankAccount(); disactivateBankAccount.setActive(false); disactivateBankAccount.setType(BankAccountType.IBAN); BankAccountDetailsIBAN bankAccountDetails = new BankAccountDetailsIBAN(); bankAccountDetails.setIban("FR7630004000031234567890143"); bankAccountDetails.setBic("BNPAFRPP"); disactivateBankAccount.setDetails(bankAccountDetails); BankAccount result = this.api.getUserApi().updateBankAccount(john.getId(), disactivateBankAccount, account.getId()); assertNotNull(result); assertEquals(account.getId(), result.getId()); assertFalse(result.isActive()); } @Test public void getBankAccount() throws Exception { UserNatural john = this.getJohn(); BankAccount account = this.getJohnsAccount(); BankAccount accountFetched = this.api.getUserApi().getBankAccount(john.getId(), account.getId()); assertEqualInputProps(account, accountFetched); } @Test public void getBankAccounts() throws Exception { UserNatural john = this.getJohn(); BankAccount account = this.getJohnsAccount(); Pagination pagination = new Pagination(1, 12); List<BankAccount> list = this.api.getUserApi().getBankAccounts(john.getId(), pagination, null); int index = -1; for (int i = 0; i < list.size(); i++) { if (account.getId().equals(list.get(i).getId())) { index = i; break; } } assertTrue(list.get(0) instanceof BankAccount); assertTrue(index > -1); assertEqualInputProps(account, list.get(index)); assertTrue(pagination.getPage() == 1); assertTrue(pagination.getItemsPerPage() == 12); } @Test public void getActiveBankAccounts() { try { UserNatural john = this.getJohn(); BankAccount account = this.getJohnsAccount(); List<BankAccount> list = this.api.getUserApi().getActiveBankAccounts(john.getId(), true, null, null); } catch (Exception e) { Assert.fail(e.getMessage()); } } @Test public void getBankAccountsAndSortByCreationDate() throws Exception { UserNatural john = this.getJohn(); this.getJohnsAccount(); this.holdOn(2); this.getNewBankAccount(); Pagination pagination = new Pagination(1, 12); Sorting sorting = new Sorting(); sorting.addField("CreationDate", SortDirection.desc); List<BankAccount> list = this.api.getUserApi().getBankAccounts(john.getId(), pagination, sorting); assertNotNull(list); assertTrue(list.get(0) instanceof BankAccount); assertTrue(list.size() > 1); assertTrue(list.get(0).getCreationDate() > list.get(1).getCreationDate()); } @Test public void createKycDocument() throws Exception { KycDocument kycDocument = this.getJohnsKycDocument(); assertNotNull(kycDocument); assertTrue(kycDocument.getStatus() == KycStatus.CREATED); } @Test public void updateKycDocument() throws Exception { UserNatural john = this.getJohn(); KycDocument kycDocument = this.getJohnsKycDocument(); URL url = getClass().getResource("/com/mangopay/core/TestKycPageFile.png"); String filePath = new File(url.toURI()).getAbsolutePath(); this.api.getUserApi().createKycPage(john.getId(), kycDocument.getId(), filePath); kycDocument.setStatus(KycStatus.VALIDATION_ASKED); KycDocument result = this.api.getUserApi().updateKycDocument(john.getId(), kycDocument); assertNotNull(result); assertTrue(kycDocument.getType().equals(result.getType())); assertTrue(kycDocument.getStatus() == KycStatus.VALIDATION_ASKED); } @Test public void getKycDocument() throws Exception { UserNatural john = this.getJohn(); KycDocument kycDocument = this.getJohnsKycDocument(); KycDocument result = this.api.getUserApi().getKycDocument(john.getId(), kycDocument.getId()); assertNotNull(result); assertTrue(kycDocument.getId().equals(result.getId())); assertTrue(kycDocument.getType().equals(result.getType())); //there might be a problem with the JUnit. if you run the test separately, it works, if it's in the bunch, it doesn't. //assertTrue(kycDocument.getStatus().equals(result.getStatus())); assertTrue(kycDocument.getCreationDate() == result.getCreationDate()); } @Test public void createKycPage() throws Exception { UserNatural john = this.getJohn(); KycDocument kycDocument = this.getNewKycDocument(); URL url = getClass().getResource("/com/mangopay/core/TestKycPageFile.png"); String filePath = new File(url.toURI()).getAbsolutePath(); this.api.getUserApi().createKycPage(john.getId(), kycDocument.getId(), filePath); kycDocument = this.getNewKycDocument(); this.api.getUserApi().createKycPage(john.getId(), kycDocument.getId(), Files.readAllBytes(Paths.get(filePath))); } @Ignore("Can't be tested at this moment") @Test public void getCards() throws Exception { UserNatural john = this.getJohn(); Pagination pagination = new Pagination(1, 20); List<Card> cardsBefore = this.api.getUserApi().getCards(john.getId(), pagination, null); PayIn payIn = this.getNewPayInCardDirect(); Card card = this.api.getCardApi().get(((PayInPaymentDetailsCard) payIn.getPaymentDetails()).getCardId()); List<Card> cardsAfter = this.api.getUserApi().getCards(john.getId(), pagination, null); assertNotNull(cardsBefore); assertTrue(cardsAfter.size() > cardsBefore.size()); } @Test public void getCardsAndSortByCreationDate() throws Exception { UserNatural john = this.getJohn(); this.getNewPayInCardDirect(); this.holdOn(2); this.getNewPayInCardDirect(); Pagination pagination = new Pagination(1, 20); Sorting sorting = new Sorting(); sorting.addField("CreationDate", SortDirection.desc); List<Card> cards = this.api.getUserApi().getCards(john.getId(), pagination, sorting); assertNotNull(cards); assertTrue(cards.size() > 1); assertTrue(cards.get(0).getCreationDate() > cards.get(1).getCreationDate()); } @Test public void getTransactions() throws Exception { UserNatural john = this.getJohn(); Transfer transfer = this.getNewTransfer(); Pagination pagination = new Pagination(1, 20); List<Transaction> transactions = this.api.getUserApi().getTransactions(john.getId(), pagination, new FilterTransactions(), null); assertTrue(transactions.size() > 0); assertTrue(transactions.get(0).getType() != null); assertTrue(transactions.get(0).getStatus() != null); } @Test public void getTransactionsAndSortByCreationDate() throws Exception { UserNatural john = this.getJohn(); this.getNewTransfer(); this.holdOn(2); this.getNewTransfer(); Pagination pagination = new Pagination(1, 20); Sorting sorting = new Sorting(); sorting.addField("CreationDate", SortDirection.desc); List<Transaction> transactions = this.api.getUserApi().getTransactions(john.getId(), pagination, new FilterTransactions(), sorting); assertNotNull(transactions); assertTrue(transactions.size() > 1); assertTrue(transactions.get(0).getCreationDate() > transactions.get(1).getCreationDate()); } @Test public void getKycDocuments() throws Exception { KycDocument kycDocument = this.getJohnsKycDocument(); UserNatural user = this.getJohn(); Pagination pagination = new Pagination(1, 20); List<KycDocument> getKycDocuments = this.api.getUserApi().getKycDocuments(user.getId(), pagination, null); assertTrue(getKycDocuments.get(0) instanceof KycDocument); KycDocument kycFromList = null; for (KycDocument item : getKycDocuments) { if (item.getId().equals(kycDocument.getId())) { kycFromList = item; break; } } assertNotNull(kycFromList); assertEquals(kycDocument.getId(), kycFromList.getId()); assertEqualInputProps(kycDocument, kycFromList); } @Test public void getKycDocumentsAndSortByCreationDate() throws Exception { this.getJohnsKycDocument(); this.holdOn(2); this.getNewKycDocument(); UserNatural user = this.getJohn(); Pagination pagination = new Pagination(1, 20); Sorting sorting = new Sorting(); sorting.addField("CreationDate", SortDirection.desc); List<KycDocument> getKycDocuments = this.api.getUserApi().getKycDocuments(user.getId(), pagination, sorting); assertNotNull(getKycDocuments); assertTrue(getKycDocuments.get(0) instanceof KycDocument); assertTrue(getKycDocuments.size() > 1); assertTrue(getKycDocuments.get(0).getCreationDate() > getKycDocuments.get(1).getCreationDate()); } @Test public void getUserEMoney() throws Exception { User john = getJohn(); String year = "2019"; String month = "04"; EMoney eMoney = this.api.getUserApi().getEMoney(john.getId(), year); assertNotNull(eMoney); assertEquals(eMoney.getUserId(), john.getId()); eMoney = this.api.getUserApi().getEMoney(john.getId(), year, month); assertNotNull(eMoney); assertEquals(eMoney.getUserId(), john.getId()); } @Test public void getUserEMoneyChf() throws Exception { getUserEMoney(CurrencyIso.CHF); } @Test public void getUserEMoneyUsd() throws Exception { getUserEMoney(CurrencyIso.USD); } @Test public void testUserEMoneyNullCurrency() throws Exception { getUserEMoney(null, CurrencyIso.EUR); } private void getUserEMoney(CurrencyIso currencySentInRequest) throws Exception { getUserEMoney(currencySentInRequest, currencySentInRequest); } private void getUserEMoney(CurrencyIso currencySentInRequest, CurrencyIso currencyExpected) throws Exception { User john = getJohn(); String year = "2019"; String month = "04"; EMoney eMoney = this.api.getUserApi().getEMoney(john.getId(), year, currencySentInRequest); assertNotNull(eMoney); assertEquals(john.getId(), eMoney.getUserId()); assertEquals(currencyExpected, eMoney.getCreditedEMoney().getCurrency()); eMoney = this.api.getUserApi().getEMoney(john.getId(), year, month, currencySentInRequest); assertNotNull(eMoney); assertEquals(john.getId(), eMoney.getUserId()); assertEquals(currencyExpected, eMoney.getCreditedEMoney().getCurrency()); } @Test @Ignore public void getBankAccountTransactions() throws Exception { BankAccount johnsAccount = getJohnsAccount(); PayOut johnsPayOutBankWire = getJohnsPayOutBankWire(); Pagination pagination = new Pagination(1, 1); List<Transaction> bankAccountTransactions = this.api.getUserApi().getBankAccountTransactions(johnsAccount.getId(), pagination, null); assertNotNull("List of bank account transactions is null", bankAccountTransactions); assertFalse("List of bank account transactions is empty", bankAccountTransactions.isEmpty()); assertTrue("List of bank account transactions size does not match pagination", bankAccountTransactions.size() == 1); assertEquals("Returned transaction is not the expected one", bankAccountTransactions.get(0).getId(), johnsPayOutBankWire.getId()); } @Test public void getUserPreAuthorizations() throws Exception { CardPreAuthorization johnsCardPreAuthorization = getJohnsCardPreAuthorization(); assertNotNull(johnsCardPreAuthorization); List<CardPreAuthorization> preAuthorizations = this.api.getUserApi().getPreAuthorizations(johnsCardPreAuthorization.getAuthorId()); assertNotNull(preAuthorizations); assertFalse(preAuthorizations.isEmpty()); assertNotNull(preAuthorizations.get(0)); assertTrue(preAuthorizations.get(0).getAuthorId().equals(johnsCardPreAuthorization.getAuthorId())); } @Test public void getUserPreAuthorizationsWithPagination() throws Exception { CardPreAuthorization johnsCardPreAuthorization = getJohnsCardPreAuthorization(); assertNotNull(johnsCardPreAuthorization); Pagination pagination = new Pagination(1, 20); List<CardPreAuthorization> preAuthorizations = this.api.getUserApi().getPreAuthorizations(johnsCardPreAuthorization.getAuthorId(), pagination, null); assertNotNull(preAuthorizations); assertFalse(preAuthorizations.isEmpty()); assertNotNull(preAuthorizations.get(0)); assertTrue(preAuthorizations.get(0).getAuthorId().equals(johnsCardPreAuthorization.getAuthorId())); } @Test public void getUserPreAuthorizationsWithNullPaginationObject() throws Exception { CardPreAuthorization johnsCardPreAuthorization = getJohnsCardPreAuthorization(); assertNotNull(johnsCardPreAuthorization); Pagination pagination = null; List<CardPreAuthorization> preAuthorizations = this.api.getUserApi().getPreAuthorizations(johnsCardPreAuthorization.getAuthorId(), null, null); assertNotNull(preAuthorizations); assertFalse(preAuthorizations.isEmpty()); assertNotNull(preAuthorizations.get(0)); assertTrue(preAuthorizations.get(0).getAuthorId().equals(johnsCardPreAuthorization.getAuthorId())); } @Test public void getUserPreAuthorizationsWithPaginationAndFilterReturnsValue() throws Exception { CardPreAuthorization johnsCardPreAuthorization = getJohnsCardPreAuthorization(); assertNotNull(johnsCardPreAuthorization); Pagination pagination = new Pagination(1, 20); FilterPreAuthorizations fpa = new FilterPreAuthorizations(); fpa.setPreAuthorizationStatus(PreAuthorizationStatus.SUCCEEDED); fpa.setResultCode("000000"); List<CardPreAuthorization> preAuthorizations = this.api.getUserApi().getPreAuthorizations(johnsCardPreAuthorization.getAuthorId(), pagination, fpa, null); assertNotNull(preAuthorizations); assertFalse(preAuthorizations.isEmpty()); assertNotNull(preAuthorizations.get(0)); assertTrue(preAuthorizations.get(0).getAuthorId().equals(johnsCardPreAuthorization.getAuthorId())); } @Test public void getUserPreAuthorizationsWithPaginationAndFilterDoesNotReturnValue() throws Exception { CardPreAuthorization johnsCardPreAuthorization = getJohnsCardPreAuthorization(); assertNotNull(johnsCardPreAuthorization); Pagination pagination = new Pagination(1, 20); FilterPreAuthorizations fpa = new FilterPreAuthorizations(); fpa.setResultCode("000001"); List<CardPreAuthorization> preAuthorizations = this.api.getUserApi().getPreAuthorizations(johnsCardPreAuthorization.getAuthorId(), pagination, fpa, null); assertNotNull(preAuthorizations); assertTrue(preAuthorizations.isEmpty()); } @Test @Ignore // this endpoind isn't on the api just yet public void getBlockStatus() throws Exception{ UserNatural user = this.getJohn(); UserBlockStatus blockStatus = this.api.getUserApi().getBlockStatus(user.getId()); assertNotNull(blockStatus); } @Test @Ignore // this endpoind isn't on the api just yet public void getRegulatory() throws Exception{ UserNatural user = this.getJohn(); UserBlockStatus blockStatus = this.api.getUserApi().getRegulatory(user.getId()); assertNotNull(blockStatus); } }
/* * Licensed to Metamarkets Group Inc. (Metamarkets) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. Metamarkets licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package io.druid.indexing.overlord; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.base.Function; import com.google.common.base.Joiner; import com.google.common.base.Predicate; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import com.google.common.collect.Sets; import com.google.common.util.concurrent.ListenableFuture; import com.metamx.emitter.EmittingLogger; import com.metamx.emitter.service.ServiceEmitter; import io.druid.indexer.TaskState; import io.druid.indexing.common.IndexingServiceCondition; import io.druid.indexing.common.TaskStatus; import io.druid.indexing.common.TestRealtimeTask; import io.druid.indexing.common.TestTasks; import io.druid.indexing.common.TestUtils; import io.druid.indexing.common.task.Task; import io.druid.indexing.common.task.TaskResource; import io.druid.indexing.overlord.config.RemoteTaskRunnerConfig; import io.druid.indexing.worker.Worker; import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.StringUtils; import org.apache.curator.framework.CuratorFramework; import org.apache.zookeeper.CreateMode; import org.easymock.EasyMock; import org.joda.time.Period; import org.junit.After; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import java.util.ArrayList; import java.util.Collection; import java.util.Set; import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; public class RemoteTaskRunnerTest { private static final Joiner joiner = RemoteTaskRunnerTestUtils.joiner; private static final String workerHost = "worker"; private static final String announcementsPath = joiner.join(RemoteTaskRunnerTestUtils.announcementsPath, workerHost); private static final String statusPath = joiner.join(RemoteTaskRunnerTestUtils.statusPath, workerHost); private static final int TIMEOUT_SECONDS = 20; private RemoteTaskRunner remoteTaskRunner; private RemoteTaskRunnerTestUtils rtrTestUtils = new RemoteTaskRunnerTestUtils(); private ObjectMapper jsonMapper; private CuratorFramework cf; private Task task; private Worker worker; @Before public void setUp() throws Exception { rtrTestUtils.setUp(); jsonMapper = rtrTestUtils.getObjectMapper(); cf = rtrTestUtils.getCuratorFramework(); task = TestTasks.unending("task"); } @After public void tearDown() throws Exception { if (remoteTaskRunner != null) { remoteTaskRunner.stop(); } rtrTestUtils.tearDown(); } @Test public void testRun() throws Exception { doSetup(); ListenableFuture<TaskStatus> result = remoteTaskRunner.run(task); Assert.assertTrue(taskAnnounced(task.getId())); mockWorkerRunningTask(task); Assert.assertTrue(workerRunningTask(task.getId())); mockWorkerCompleteSuccessfulTask(task); Assert.assertTrue(workerCompletedTask(result)); Assert.assertEquals(task.getId(), result.get().getId()); Assert.assertEquals(TaskState.SUCCESS, result.get().getStatusCode()); } @Test public void testStartWithNoWorker() throws Exception { makeRemoteTaskRunner(new TestRemoteTaskRunnerConfig(new Period("PT1S"))); } @Test public void testRunExistingTaskThatHasntStartedRunning() throws Exception { doSetup(); remoteTaskRunner.run(task); Assert.assertTrue(taskAnnounced(task.getId())); ListenableFuture<TaskStatus> result = remoteTaskRunner.run(task); Assert.assertFalse(result.isDone()); mockWorkerRunningTask(task); Assert.assertTrue(workerRunningTask(task.getId())); mockWorkerCompleteSuccessfulTask(task); Assert.assertTrue(workerCompletedTask(result)); Assert.assertEquals(task.getId(), result.get().getId()); Assert.assertEquals(TaskState.SUCCESS, result.get().getStatusCode()); } @Test public void testRunExistingTaskThatHasStartedRunning() throws Exception { doSetup(); remoteTaskRunner.run(task); Assert.assertTrue(taskAnnounced(task.getId())); mockWorkerRunningTask(task); Assert.assertTrue(workerRunningTask(task.getId())); ListenableFuture<TaskStatus> result = remoteTaskRunner.run(task); Assert.assertFalse(result.isDone()); mockWorkerCompleteSuccessfulTask(task); Assert.assertTrue(workerCompletedTask(result)); Assert.assertEquals(task.getId(), result.get().getId()); Assert.assertEquals(TaskState.SUCCESS, result.get().getStatusCode()); } @Test public void testRunTooMuchZKData() throws Exception { ServiceEmitter emitter = EasyMock.createMock(ServiceEmitter.class); EmittingLogger.registerEmitter(emitter); EasyMock.replay(emitter); doSetup(); remoteTaskRunner.run(TestTasks.unending(new String(new char[5000]))); EasyMock.verify(emitter); } @Test public void testRunSameAvailabilityGroup() throws Exception { doSetup(); TestRealtimeTask task1 = new TestRealtimeTask( "rt1", new TaskResource("rt1", 1), "foo", TaskStatus.running("rt1"), jsonMapper ); remoteTaskRunner.run(task1); Assert.assertTrue(taskAnnounced(task1.getId())); mockWorkerRunningTask(task1); TestRealtimeTask task2 = new TestRealtimeTask( "rt2", new TaskResource("rt1", 1), "foo", TaskStatus.running("rt2"), jsonMapper ); remoteTaskRunner.run(task2); TestRealtimeTask task3 = new TestRealtimeTask( "rt3", new TaskResource("rt2", 1), "foo", TaskStatus.running("rt3"), jsonMapper ); remoteTaskRunner.run(task3); Assert.assertTrue( TestUtils.conditionValid( new IndexingServiceCondition() { @Override public boolean isValid() { return remoteTaskRunner.getRunningTasks().size() == 2; } } ) ); Assert.assertTrue( TestUtils.conditionValid( new IndexingServiceCondition() { @Override public boolean isValid() { return remoteTaskRunner.getPendingTasks().size() == 1; } } ) ); Assert.assertTrue(remoteTaskRunner.getPendingTasks().iterator().next().getTaskId().equals("rt2")); } @Test public void testRunWithCapacity() throws Exception { doSetup(); TestRealtimeTask task1 = new TestRealtimeTask( "rt1", new TaskResource("rt1", 1), "foo", TaskStatus.running("rt1"), jsonMapper ); remoteTaskRunner.run(task1); Assert.assertTrue(taskAnnounced(task1.getId())); mockWorkerRunningTask(task1); TestRealtimeTask task2 = new TestRealtimeTask( "rt2", new TaskResource("rt2", 3), "foo", TaskStatus.running("rt2"), jsonMapper ); remoteTaskRunner.run(task2); TestRealtimeTask task3 = new TestRealtimeTask( "rt3", new TaskResource("rt3", 2), "foo", TaskStatus.running("rt3"), jsonMapper ); remoteTaskRunner.run(task3); Assert.assertTrue(taskAnnounced(task3.getId())); mockWorkerRunningTask(task3); Assert.assertTrue( TestUtils.conditionValid( new IndexingServiceCondition() { @Override public boolean isValid() { return remoteTaskRunner.getRunningTasks().size() == 2; } } ) ); Assert.assertTrue( TestUtils.conditionValid( new IndexingServiceCondition() { @Override public boolean isValid() { return remoteTaskRunner.getPendingTasks().size() == 1; } } ) ); Assert.assertTrue(remoteTaskRunner.getPendingTasks().iterator().next().getTaskId().equals("rt2")); } @Test public void testStatusRemoved() throws Exception { doSetup(); ListenableFuture<TaskStatus> future = remoteTaskRunner.run(task); Assert.assertTrue(taskAnnounced(task.getId())); mockWorkerRunningTask(task); Assert.assertTrue(workerRunningTask(task.getId())); Assert.assertTrue(remoteTaskRunner.getRunningTasks().iterator().next().getTaskId().equals("task")); cf.delete().forPath(joiner.join(statusPath, task.getId())); TaskStatus status = future.get(TIMEOUT_SECONDS, TimeUnit.SECONDS); Assert.assertEquals(status.getStatusCode(), TaskState.FAILED); } @Test public void testBootstrap() throws Exception { cf.create() .creatingParentsIfNeeded() .withMode(CreateMode.EPHEMERAL) .forPath(joiner.join(statusPath, "first"), jsonMapper.writeValueAsBytes(TaskStatus.running("first"))); cf.create() .creatingParentsIfNeeded() .withMode(CreateMode.EPHEMERAL) .forPath(joiner.join(statusPath, "second"), jsonMapper.writeValueAsBytes(TaskStatus.running("second"))); doSetup(); final Set<String> existingTasks = Sets.newHashSet(); for (ImmutableWorkerInfo workerInfo : remoteTaskRunner.getWorkers()) { existingTasks.addAll(workerInfo.getRunningTasks()); } Assert.assertEquals("existingTasks", ImmutableSet.of("first", "second"), existingTasks); final Set<String> runningTasks = Sets.newHashSet( Iterables.transform( remoteTaskRunner.getRunningTasks(), new Function<RemoteTaskRunnerWorkItem, String>() { @Override public String apply(RemoteTaskRunnerWorkItem input) { return input.getTaskId(); } } ) ); Assert.assertEquals("runningTasks", ImmutableSet.of("first", "second"), runningTasks); } @Test public void testRunWithTaskComplete() throws Exception { cf.create() .creatingParentsIfNeeded() .withMode(CreateMode.EPHEMERAL) .forPath(joiner.join(statusPath, task.getId()), jsonMapper.writeValueAsBytes(TaskStatus.success(task.getId()))); doSetup(); ListenableFuture<TaskStatus> future = remoteTaskRunner.run(task); TaskStatus status = future.get(TIMEOUT_SECONDS, TimeUnit.SECONDS); Assert.assertEquals(TaskState.SUCCESS, status.getStatusCode()); } @Test public void testWorkerRemoved() throws Exception { doSetup(); Future<TaskStatus> future = remoteTaskRunner.run(task); Assert.assertTrue(taskAnnounced(task.getId())); mockWorkerRunningTask(task); Assert.assertTrue(workerRunningTask(task.getId())); cf.delete().forPath(announcementsPath); TaskStatus status = future.get(TIMEOUT_SECONDS, TimeUnit.SECONDS); Assert.assertEquals(TaskState.FAILED, status.getStatusCode()); RemoteTaskRunnerConfig config = remoteTaskRunner.getRemoteTaskRunnerConfig(); Assert.assertTrue( TestUtils.conditionValid( new IndexingServiceCondition() { @Override public boolean isValid() { return remoteTaskRunner.getRemovedWorkerCleanups().isEmpty(); } }, // cleanup task is independently scheduled by event listener. we need to wait some more time. config.getTaskCleanupTimeout().toStandardDuration().getMillis() * 2 ) ); Assert.assertNull(cf.checkExists().forPath(statusPath)); } @Test public void testWorkerDisabled() throws Exception { doSetup(); final ListenableFuture<TaskStatus> result = remoteTaskRunner.run(task); Assert.assertTrue(taskAnnounced(task.getId())); mockWorkerRunningTask(task); Assert.assertTrue(workerRunningTask(task.getId())); // Disable while task running disableWorker(); // Continue test mockWorkerCompleteSuccessfulTask(task); Assert.assertTrue(workerCompletedTask(result)); Assert.assertEquals(task.getId(), result.get().getId()); Assert.assertEquals(TaskState.SUCCESS, result.get().getStatusCode()); // Confirm RTR thinks the worker is disabled. Assert.assertEquals("", Iterables.getOnlyElement(remoteTaskRunner.getWorkers()).getWorker().getVersion()); } private void doSetup() throws Exception { makeWorker(); makeRemoteTaskRunner(new TestRemoteTaskRunnerConfig(new Period("PT5S"))); } private void makeRemoteTaskRunner(RemoteTaskRunnerConfig config) throws Exception { remoteTaskRunner = rtrTestUtils.makeRemoteTaskRunner(config); } private void makeWorker() throws Exception { worker = rtrTestUtils.makeWorker(workerHost, 3); } private void disableWorker() throws Exception { rtrTestUtils.disableWorker(worker); } private boolean taskAnnounced(final String taskId) { return rtrTestUtils.taskAnnounced(workerHost, taskId); } private boolean workerRunningTask(final String taskId) { return rtrTestUtils.workerRunningTask(workerHost, taskId); } private boolean workerCompletedTask(final ListenableFuture<TaskStatus> result) { return TestUtils.conditionValid( new IndexingServiceCondition() { @Override public boolean isValid() { return result.isDone(); } } ); } private void mockWorkerRunningTask(final Task task) throws Exception { rtrTestUtils.mockWorkerRunningTask("worker", task); } private void mockWorkerCompleteSuccessfulTask(final Task task) throws Exception { rtrTestUtils.mockWorkerCompleteSuccessfulTask("worker", task); } private void mockWorkerCompleteFailedTask(final Task task) throws Exception { rtrTestUtils.mockWorkerCompleteFailedTask("worker", task); } @Test public void testFindLazyWorkerTaskRunning() throws Exception { doSetup(); remoteTaskRunner.start(); remoteTaskRunner.run(task); Assert.assertTrue(taskAnnounced(task.getId())); mockWorkerRunningTask(task); Collection<Worker> lazyworkers = remoteTaskRunner.markWorkersLazy( new Predicate<ImmutableWorkerInfo>() { @Override public boolean apply(ImmutableWorkerInfo input) { return true; } }, 1 ); Assert.assertTrue(lazyworkers.isEmpty()); Assert.assertTrue(remoteTaskRunner.getLazyWorkers().isEmpty()); Assert.assertEquals(1, remoteTaskRunner.getWorkers().size()); } @Test public void testFindLazyWorkerForWorkerJustAssignedTask() throws Exception { doSetup(); remoteTaskRunner.run(task); Assert.assertTrue(taskAnnounced(task.getId())); Collection<Worker> lazyworkers = remoteTaskRunner.markWorkersLazy( new Predicate<ImmutableWorkerInfo>() { @Override public boolean apply(ImmutableWorkerInfo input) { return true; } }, 1 ); Assert.assertTrue(lazyworkers.isEmpty()); Assert.assertTrue(remoteTaskRunner.getLazyWorkers().isEmpty()); Assert.assertEquals(1, remoteTaskRunner.getWorkers().size()); } @Test public void testFindLazyWorkerNotRunningAnyTask() throws Exception { doSetup(); Collection<Worker> lazyworkers = remoteTaskRunner.markWorkersLazy( new Predicate<ImmutableWorkerInfo>() { @Override public boolean apply(ImmutableWorkerInfo input) { return true; } }, 1 ); Assert.assertEquals(1, lazyworkers.size()); Assert.assertEquals(1, remoteTaskRunner.getLazyWorkers().size()); } @Test public void testWorkerZKReconnect() throws Exception { makeWorker(); makeRemoteTaskRunner(new TestRemoteTaskRunnerConfig(new Period("PT5M"))); Future<TaskStatus> future = remoteTaskRunner.run(task); Assert.assertTrue(taskAnnounced(task.getId())); mockWorkerRunningTask(task); Assert.assertTrue(workerRunningTask(task.getId())); byte[] bytes = cf.getData().forPath(announcementsPath); cf.delete().forPath(announcementsPath); // worker task cleanup scheduled Assert.assertTrue( TestUtils.conditionValid( new IndexingServiceCondition() { @Override public boolean isValid() { return remoteTaskRunner.getRemovedWorkerCleanups().containsKey(worker.getHost()); } } ) ); // Worker got reconnected cf.create().forPath(announcementsPath, bytes); // worker task cleanup should get cancelled and removed Assert.assertTrue( TestUtils.conditionValid( new IndexingServiceCondition() { @Override public boolean isValid() { return !remoteTaskRunner.getRemovedWorkerCleanups().containsKey(worker.getHost()); } } ) ); mockWorkerCompleteSuccessfulTask(task); TaskStatus status = future.get(TIMEOUT_SECONDS, TimeUnit.SECONDS); Assert.assertEquals(status.getStatusCode(), TaskState.SUCCESS); Assert.assertEquals(TaskState.SUCCESS, status.getStatusCode()); } @Test public void testSortByInsertionTime() throws Exception { RemoteTaskRunnerWorkItem item1 = new RemoteTaskRunnerWorkItem("b", null, null) .withQueueInsertionTime(DateTimes.of("2015-01-01T00:00:03Z")); RemoteTaskRunnerWorkItem item2 = new RemoteTaskRunnerWorkItem("a", null, null) .withQueueInsertionTime(DateTimes.of("2015-01-01T00:00:02Z")); RemoteTaskRunnerWorkItem item3 = new RemoteTaskRunnerWorkItem("c", null, null) .withQueueInsertionTime(DateTimes.of("2015-01-01T00:00:01Z")); ArrayList<RemoteTaskRunnerWorkItem> workItems = Lists.newArrayList(item1, item2, item3); RemoteTaskRunner.sortByInsertionTime(workItems); Assert.assertEquals(item3, workItems.get(0)); Assert.assertEquals(item2, workItems.get(1)); Assert.assertEquals(item1, workItems.get(2)); } @Test public void testBlacklistZKWorkers() throws Exception { Period timeoutPeriod = Period.millis(1000); makeWorker(); RemoteTaskRunnerConfig rtrConfig = new TestRemoteTaskRunnerConfig(timeoutPeriod); rtrConfig.setMaxPercentageBlacklistWorkers(100); makeRemoteTaskRunner(rtrConfig); TestRealtimeTask task1 = new TestRealtimeTask( "realtime1", new TaskResource("realtime1", 1), "foo", TaskStatus.success("realtime1"), jsonMapper ); Future<TaskStatus> taskFuture1 = remoteTaskRunner.run(task1); Assert.assertTrue(taskAnnounced(task1.getId())); mockWorkerRunningTask(task1); mockWorkerCompleteFailedTask(task1); Assert.assertTrue(taskFuture1.get(TIMEOUT_SECONDS, TimeUnit.SECONDS).isFailure()); Assert.assertEquals(0, remoteTaskRunner.getBlackListedWorkers().size()); Assert.assertEquals( 1, remoteTaskRunner.findWorkerRunningTask(task1.getId()).getContinuouslyFailedTasksCount() ); TestRealtimeTask task2 = new TestRealtimeTask( "realtime2", new TaskResource("realtime2", 1), "foo", TaskStatus.running("realtime2"), jsonMapper ); Future<TaskStatus> taskFuture2 = remoteTaskRunner.run(task2); Assert.assertTrue(taskAnnounced(task2.getId())); mockWorkerRunningTask(task2); mockWorkerCompleteFailedTask(task2); Assert.assertTrue(taskFuture2.get(TIMEOUT_SECONDS, TimeUnit.SECONDS).isFailure()); Assert.assertEquals(1, remoteTaskRunner.getBlackListedWorkers().size()); Assert.assertEquals( 2, remoteTaskRunner.findWorkerRunningTask(task2.getId()).getContinuouslyFailedTasksCount() ); ((RemoteTaskRunnerTestUtils.TestableRemoteTaskRunner) remoteTaskRunner) .setCurrentTimeMillis(System.currentTimeMillis()); remoteTaskRunner.checkBlackListedNodes(); Assert.assertEquals(1, remoteTaskRunner.getBlackListedWorkers().size()); ((RemoteTaskRunnerTestUtils.TestableRemoteTaskRunner) remoteTaskRunner) .setCurrentTimeMillis(System.currentTimeMillis() + 2 * timeoutPeriod.toStandardDuration().getMillis()); remoteTaskRunner.checkBlackListedNodes(); // After backOffTime the nodes are removed from blacklist Assert.assertEquals(0, remoteTaskRunner.getBlackListedWorkers().size()); Assert.assertEquals( 0, remoteTaskRunner.findWorkerRunningTask(task2.getId()).getContinuouslyFailedTasksCount() ); TestRealtimeTask task3 = new TestRealtimeTask( "realtime3", new TaskResource("realtime3", 1), "foo", TaskStatus.running("realtime3"), jsonMapper ); Future<TaskStatus> taskFuture3 = remoteTaskRunner.run(task3); Assert.assertTrue(taskAnnounced(task3.getId())); mockWorkerRunningTask(task3); mockWorkerCompleteSuccessfulTask(task3); Assert.assertTrue(taskFuture3.get(TIMEOUT_SECONDS, TimeUnit.SECONDS).isSuccess()); Assert.assertEquals(0, remoteTaskRunner.getBlackListedWorkers().size()); Assert.assertEquals( 0, remoteTaskRunner.findWorkerRunningTask(task3.getId()).getContinuouslyFailedTasksCount() ); } /** * With 2 workers and maxPercentageBlacklistWorkers(25), neither worker should ever be blacklisted even after * exceeding maxRetriesBeforeBlacklist. */ @Test public void testBlacklistZKWorkers25Percent() throws Exception { Period timeoutPeriod = Period.millis(1000); rtrTestUtils.makeWorker("worker", 10); rtrTestUtils.makeWorker("worker2", 10); RemoteTaskRunnerConfig rtrConfig = new TestRemoteTaskRunnerConfig(timeoutPeriod); rtrConfig.setMaxPercentageBlacklistWorkers(25); makeRemoteTaskRunner(rtrConfig); String firstWorker = null; String secondWorker = null; for (int i = 1; i < 13; i++) { String taskId = StringUtils.format("rt-%d", i); TestRealtimeTask task = new TestRealtimeTask( taskId, new TaskResource(taskId, 1), "foo", TaskStatus.success(taskId), jsonMapper ); Future<TaskStatus> taskFuture = remoteTaskRunner.run(task); if (i == 1) { if (rtrTestUtils.taskAnnounced("worker2", task.getId())) { firstWorker = "worker2"; secondWorker = "worker"; } else { firstWorker = "worker"; secondWorker = "worker2"; } } final String expectedWorker = i % 2 == 0 ? secondWorker : firstWorker; Assert.assertTrue(rtrTestUtils.taskAnnounced(expectedWorker, task.getId())); rtrTestUtils.mockWorkerRunningTask(expectedWorker, task); rtrTestUtils.mockWorkerCompleteFailedTask(expectedWorker, task); Assert.assertTrue(taskFuture.get(TIMEOUT_SECONDS, TimeUnit.SECONDS).isFailure()); Assert.assertEquals(0, remoteTaskRunner.getBlackListedWorkers().size()); Assert.assertEquals( ((i + 1) / 2), remoteTaskRunner.findWorkerRunningTask(task.getId()).getContinuouslyFailedTasksCount() ); } } /** * With 2 workers and maxPercentageBlacklistWorkers(50), one worker should get blacklisted after the second failure * and the second worker should never be blacklisted even after exceeding maxRetriesBeforeBlacklist. */ @Test public void testBlacklistZKWorkers50Percent() throws Exception { Period timeoutPeriod = Period.millis(1000); rtrTestUtils.makeWorker("worker", 10); rtrTestUtils.makeWorker("worker2", 10); RemoteTaskRunnerConfig rtrConfig = new TestRemoteTaskRunnerConfig(timeoutPeriod); rtrConfig.setMaxPercentageBlacklistWorkers(50); makeRemoteTaskRunner(rtrConfig); String firstWorker = null; String secondWorker = null; for (int i = 1; i < 13; i++) { String taskId = StringUtils.format("rt-%d", i); TestRealtimeTask task = new TestRealtimeTask( taskId, new TaskResource(taskId, 1), "foo", TaskStatus.success(taskId), jsonMapper ); Future<TaskStatus> taskFuture = remoteTaskRunner.run(task); if (i == 1) { if (rtrTestUtils.taskAnnounced("worker2", task.getId())) { firstWorker = "worker2"; secondWorker = "worker"; } else { firstWorker = "worker"; secondWorker = "worker2"; } } final String expectedWorker = i % 2 == 0 || i > 4 ? secondWorker : firstWorker; Assert.assertTrue(rtrTestUtils.taskAnnounced(expectedWorker, task.getId())); rtrTestUtils.mockWorkerRunningTask(expectedWorker, task); rtrTestUtils.mockWorkerCompleteFailedTask(expectedWorker, task); Assert.assertTrue(taskFuture.get(TIMEOUT_SECONDS, TimeUnit.SECONDS).isFailure()); Assert.assertEquals(i > 2 ? 1 : 0, remoteTaskRunner.getBlackListedWorkers().size()); Assert.assertEquals( i > 4 ? i - 2 : ((i + 1) / 2), remoteTaskRunner.findWorkerRunningTask(task.getId()).getContinuouslyFailedTasksCount() ); } } @Test public void testSuccessfulTaskOnBlacklistedWorker() throws Exception { Period timeoutPeriod = Period.millis(1000); makeWorker(); RemoteTaskRunnerConfig rtrConfig = new TestRemoteTaskRunnerConfig(timeoutPeriod); rtrConfig.setMaxPercentageBlacklistWorkers(100); makeRemoteTaskRunner(rtrConfig); TestRealtimeTask task1 = new TestRealtimeTask( "realtime1", new TaskResource("realtime1", 1), "foo", TaskStatus.success("realtime1"), jsonMapper ); TestRealtimeTask task2 = new TestRealtimeTask( "realtime2", new TaskResource("realtime2", 1), "foo", TaskStatus.success("realtime2"), jsonMapper ); TestRealtimeTask task3 = new TestRealtimeTask( "realtime3", new TaskResource("realtime3", 1), "foo", TaskStatus.success("realtime3"), jsonMapper ); Future<TaskStatus> taskFuture1 = remoteTaskRunner.run(task1); Assert.assertTrue(taskAnnounced(task1.getId())); mockWorkerRunningTask(task1); mockWorkerCompleteFailedTask(task1); Assert.assertTrue(taskFuture1.get(TIMEOUT_SECONDS, TimeUnit.SECONDS).isFailure()); Assert.assertEquals(0, remoteTaskRunner.getBlackListedWorkers().size()); Future<TaskStatus> taskFuture2 = remoteTaskRunner.run(task2); Assert.assertTrue(taskAnnounced(task2.getId())); mockWorkerRunningTask(task2); Future<TaskStatus> taskFuture3 = remoteTaskRunner.run(task3); Assert.assertTrue(taskAnnounced(task3.getId())); mockWorkerRunningTask(task3); mockWorkerCompleteFailedTask(task3); Assert.assertTrue(taskFuture3.get(TIMEOUT_SECONDS, TimeUnit.SECONDS).isFailure()); Assert.assertEquals(1, remoteTaskRunner.getBlackListedWorkers().size()); mockWorkerCompleteSuccessfulTask(task2); Assert.assertTrue(taskFuture2.get(TIMEOUT_SECONDS, TimeUnit.SECONDS).isSuccess()); Assert.assertEquals(0, remoteTaskRunner.getBlackListedWorkers().size()); } }
package org.motechproject.mds.annotations.internal; import com.thoughtworks.paranamer.Paranamer; import org.joda.time.DateTime; import org.junit.Before; import org.junit.Test; import org.mockito.InjectMocks; import org.mockito.Mock; import org.motechproject.commons.api.Range; import org.motechproject.commons.date.model.Time; import org.motechproject.mds.annotations.Lookup; import org.motechproject.mds.annotations.LookupField; import org.motechproject.mds.annotations.RestExposed; import org.motechproject.mds.dto.AdvancedSettingsDto; import org.motechproject.mds.dto.EntityDto; import org.motechproject.mds.dto.FieldDto; import org.motechproject.mds.dto.LookupDto; import org.motechproject.mds.dto.LookupFieldDto; import org.motechproject.mds.dto.RestOptionsDto; import org.motechproject.mds.dto.TypeDto; import org.motechproject.mds.ex.lookup.LookupWrongParameterTypeException; import org.motechproject.mds.service.EntityService; import org.reflections.Reflections; import java.lang.reflect.Method; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Set; import static ch.lambdaj.Lambda.extract; import static ch.lambdaj.Lambda.on; import static java.util.Arrays.asList; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; import static org.mockito.MockitoAnnotations.initMocks; import static org.motechproject.mds.dto.LookupFieldType.RANGE; import static org.motechproject.mds.dto.LookupFieldType.SET; import static org.motechproject.mds.dto.LookupFieldType.VALUE; import static org.motechproject.mds.testutil.FieldTestHelper.lookupFieldDto; import static org.motechproject.mds.testutil.FieldTestHelper.lookupFieldDtos; public class LookupProcessorTest { @Mock Reflections reflections; @Mock Paranamer paranamer; @Mock EntityService entityService; @InjectMocks LookupProcessor lookupProcessor; private String[] argNames = {"arg0", "arg1", "arg2"}; private static final String TEST_CLASS_NAME = TestClass.class.getName(); @Before public void setUp() throws NoSuchMethodException { lookupProcessor = new LookupProcessor(); initMocks(this); } private EntityProcessorOutput mockEntityProcessorOutput(EntityDto entity, List<FieldDto> fields) { EntityProcessorOutput output = new EntityProcessorOutput(); output.setEntityProcessingResult(entity); output.setFieldProcessingResult(fields); return output; } @Test public void shouldProcessMethodWithLookupFields() throws NoSuchMethodException { FieldDto arg1Field = new FieldDto("arg1", "Arg1", TypeDto.INTEGER); FieldDto secondArgumentField = new FieldDto("secondArgument", "Second Argument", TypeDto.STRING); lookupProcessor.setEntityProcessingResult (Arrays.asList(mockEntityProcessorOutput(new EntityDto(TestClass.class.getName()), Arrays.asList(arg1Field, secondArgumentField)))); when(paranamer.lookupParameterNames(getTestMethod(1))).thenReturn(argNames); Method method = getTestMethod(1); lookupProcessor.process(method); Map<String, List<LookupDto>> elements = lookupProcessor.getProcessingResult(); assertTrue(elements.containsKey(TEST_CLASS_NAME)); List<LookupDto> list = elements.get(TEST_CLASS_NAME); LookupDto expected = new LookupDto("Test Method 1", true, false, asList(lookupFieldDto("arg1"), lookupFieldDto("secondArgument", "LIKE")), true, "testMethod1", asList("arg1", "secondArgument")); assertEquals(1, list.size()); assertEquals(expected, list.get(0)); } @Test (expected = LookupWrongParameterTypeException.class) public void shouldNotProcessMethodWithLookupFieldsWithWrongType() throws NoSuchMethodException { FieldDto arg1Field = new FieldDto("arg1", "Arg1", TypeDto.STRING); FieldDto secondArgumentField = new FieldDto("secondArgument", "Second Argument", TypeDto.STRING); lookupProcessor.setEntityProcessingResult (Arrays.asList(mockEntityProcessorOutput(new EntityDto(TestClass.class.getName()), Arrays.asList(arg1Field, secondArgumentField)))); when(paranamer.lookupParameterNames(getTestMethod(1))).thenReturn(argNames); Method method = getTestMethod(1); lookupProcessor.process(method); } @Test public void shouldProcessMethodWithNotAnnotatedParameters() throws NoSuchMethodException { FieldDto arg1Field = new FieldDto("arg1", "Arg1", TypeDto.INTEGER); FieldDto secondArgumentField = new FieldDto("secondArgument", "Second Argument", TypeDto.STRING); lookupProcessor.setEntityProcessingResult (Arrays.asList(mockEntityProcessorOutput(new EntityDto(TestClass.class.getName()), Arrays.asList(arg1Field, secondArgumentField)))); when(paranamer.lookupParameterNames(getTestMethod(2))).thenReturn(argNames); Method method = getTestMethod(2); lookupProcessor.process(method); Map<String, List<LookupDto>> elements = lookupProcessor.getElements(); assertTrue(elements.containsKey(TEST_CLASS_NAME)); List<LookupDto> list = elements.get(TEST_CLASS_NAME); LookupDto expected = new LookupDto("Test Method 2", false, false, lookupFieldDtos(argNames), true, "testMethod2", asList(argNames)); assertEquals(1, list.size()); assertEquals(expected, list.get(0)); } @Test public void shouldProcessMethodWithCustomLookupName() throws NoSuchMethodException { lookupProcessor.setEntityProcessingResult (Arrays.asList(mockEntityProcessorOutput(new EntityDto(TestClass.class.getName()), Collections.EMPTY_LIST))); when(paranamer.lookupParameterNames(getTestMethod(3))).thenReturn(argNames); Method method = getTestMethod(3); LookupDto dto = new LookupDto("My new custom lookup", false, false, lookupFieldDtos(argNames), true, "testMethod3", asList(argNames)); lookupProcessor.process(method); Map<String, List<LookupDto>> elements = lookupProcessor.getProcessingResult(); assertTrue(elements.containsKey(TEST_CLASS_NAME)); List<LookupDto> list = elements.get(TEST_CLASS_NAME); assertEquals(1, list.size()); assertEquals(dto, list.get(0)); } @Test public void shouldProcessMethodWithRangeParam() throws NoSuchMethodException { FieldDto arg0Field = new FieldDto("arg0Field", "Arg 0 Field", TypeDto.BOOLEAN); FieldDto rangeField = new FieldDto("rangeField", "Range Field", TypeDto.STRING); FieldDto regularFieldField = new FieldDto("regularField", "Regular Field", TypeDto.BOOLEAN); FieldDto rangeFieldField = new FieldDto("rangeFieldDouble", "Range Field Double", TypeDto.DOUBLE); EntityProcessorOutput eop = mockEntityProcessorOutput(new EntityDto(TestClass.class.getName()), Arrays.asList(arg0Field, rangeField, regularFieldField, rangeFieldField)); lookupProcessor.setEntityProcessingResult(Arrays.asList(eop)); LookupFieldDto[][] expectedFields = {{lookupFieldDto("arg0"), lookupFieldDto("range", RANGE)}, {lookupFieldDto("regularField"), lookupFieldDto("rangeField", RANGE)}}; String [][] expectedFieldsOrder = {{"arg0", "range"}, {"regularField", "rangeField"}}; // test two methods, one with @LookupField annotations, second without for (int i = 0; i < 2; i++) { Method method = getTestMethodWithRangeParam(i); when(paranamer.lookupParameterNames(method)).thenReturn(new String[]{"arg0", "range"}); LookupDto expectedLookup = new LookupDto("Test Method With Range Param " + i, false, false, asList(expectedFields[i]), true, "testMethodWithRangeParam" + i, asList(expectedFieldsOrder[i])); lookupProcessor.process(method); Map<String, List<LookupDto>> elements = lookupProcessor.getProcessingResult(); assertTrue(elements.containsKey(TEST_CLASS_NAME)); List<LookupDto> list = elements.get(TEST_CLASS_NAME); assertEquals(1, list.size()); assertEquals(expectedLookup, list.get(0)); assertEquals(asList(VALUE, RANGE), extract(list.get(0).getLookupFields(), on(LookupFieldDto.class).getType())); lookupProcessor.clear(); } } @Test public void shouldProcessMethodWithSetParam() throws NoSuchMethodException { FieldDto arg0Field = new FieldDto("arg0Field", "Arg 0 Field", TypeDto.STRING); FieldDto setField = new FieldDto("setField", "Range Field", TypeDto.STRING); FieldDto regularField = new FieldDto("regularField", "Regular Field", TypeDto.STRING); FieldDto setFieldDouble = new FieldDto("setFieldDouble", "Set Field", TypeDto.DOUBLE); EntityProcessorOutput eop = mockEntityProcessorOutput(new EntityDto(TestClass.class.getName()), Arrays.asList(arg0Field, setField, regularField, setFieldDouble)); lookupProcessor.setEntityProcessingResult(Arrays.asList(eop)); LookupFieldDto[][] expectedFields = {{lookupFieldDto("arg0"), lookupFieldDto("set", SET)}, {lookupFieldDto("regularField"), lookupFieldDto("setField", SET)}}; String [][] expectedFieldsOrder = {{"arg0", "range"}, {"regularField", "rangeField"}}; // test two methods, one with @LookupField annotations, second without for (int i = 0; i < 2; i++) { Method method = getTestMethodWithSetParam(i); when(paranamer.lookupParameterNames(method)).thenReturn(new String[]{"arg0", "set"}); LookupDto expectedLookup = new LookupDto("Test Method With Set Param " + i, true, false, asList(expectedFields[i]), true, "testMethodWithSetParam" + i, asList(expectedFieldsOrder[i])); lookupProcessor.process(method); Map<String, List<LookupDto>> elements = lookupProcessor.getProcessingResult(); assertTrue(elements.containsKey(TEST_CLASS_NAME)); List<LookupDto> list = elements.get(TEST_CLASS_NAME); assertEquals(1, list.size()); assertEquals(expectedLookup, list.get(0)); assertEquals(asList(VALUE, SET), extract(list.get(0).getLookupFields(), on(LookupFieldDto.class).getType())); lookupProcessor.clear(); } } @Test public void shouldBreakProcessingWhenEntityNotFound() throws NoSuchMethodException { when(paranamer.lookupParameterNames(getTestMethod(4))).thenReturn(argNames); EntityProcessorOutput eop = mockEntityProcessorOutput(new EntityDto(TestClass.class.getName()), Arrays.asList(new FieldDto("aaa", "bbb", TypeDto.STRING))); lookupProcessor.setEntityProcessingResult(Arrays.asList(eop)); Method method = getTestMethod(4); lookupProcessor.process(method); assertTrue(lookupProcessor.getProcessingResult().isEmpty()); } @Test public void shouldReturnCorrectAnnotation() { assertEquals(Lookup.class, lookupProcessor.getAnnotationType()); } @Test public void shouldProcessMethodWithRestExposedAnnotation() throws Exception { when(paranamer.lookupParameterNames(getTestMethodExposedViaRest())).thenReturn(argNames); EntityProcessorOutput eop = mockEntityProcessorOutput(new EntityDto(TestClass.class.getName()), Arrays.asList(new FieldDto("aaa", "bbb", TypeDto.STRING))); lookupProcessor.setEntityProcessingResult(Arrays.asList(eop)); Method method = getTestMethodExposedViaRest(); LookupDto dto = new LookupDto("Test Method Exposed Via Rest", true, true, lookupFieldDtos(argNames), true, "testMethodExposedViaRest", asList(argNames)); lookupProcessor.process(method); Map<String, List<LookupDto>> elements = lookupProcessor.getProcessingResult(); assertTrue(elements.containsKey(TEST_CLASS_NAME)); List<LookupDto> list = elements.get(TEST_CLASS_NAME); assertEquals(1, list.size()); assertEquals(dto, list.get(0)); } @Test public void shouldNotUpdateRestExposedValueForLookupsThatHaveThatModifiedByUser() throws Exception { when(paranamer.lookupParameterNames(getTestMethodExposedViaRest())).thenReturn(argNames); AdvancedSettingsDto advanced = mock(AdvancedSettingsDto.class); RestOptionsDto restOptions = mock(RestOptionsDto.class); when(entityService.safeGetAdvancedSettingsCommitted(TEST_CLASS_NAME)).thenReturn(advanced); when(advanced.getRestOptions()).thenReturn(restOptions); when(restOptions.isModifiedByUser()).thenReturn(true); EntityProcessorOutput eop = mockEntityProcessorOutput(new EntityDto(TestClass.class.getName()), Arrays.asList(new FieldDto("aaa", "bbb", TypeDto.STRING))); lookupProcessor.setEntityProcessingResult(Arrays.asList(eop)); Method method = getTestMethodExposedViaRest(); LookupDto dto = new LookupDto("Test Method Exposed Via Rest", true, false, lookupFieldDtos(argNames), true, "testMethodExposedViaRest", asList(argNames)); lookupProcessor.process(method); Map<String, List<LookupDto>> elements = lookupProcessor.getProcessingResult(); assertTrue(elements.containsKey(TEST_CLASS_NAME)); List<LookupDto> list = elements.get(TEST_CLASS_NAME); assertEquals(1, list.size()); assertEquals(dto, list.get(0)); } private Method getTestMethod(int number) throws NoSuchMethodException { return TestClass.class.getMethod("testMethod" + number, String.class, Integer.class, String.class); } private Method getTestMethodWithRangeParam(int number) throws NoSuchMethodException { return TestClass.class.getMethod("testMethodWithRangeParam" + number, Boolean.class, Range.class); } private Method getTestMethodWithSetParam(int number) throws NoSuchMethodException { return TestClass.class.getMethod("testMethodWithSetParam" + number, String.class, Set.class); } private Method getTestMethodExposedViaRest() throws NoSuchMethodException { return TestClass.class.getMethod("testMethodExposedViaRest", String.class, Integer.class, String.class); } private class TestClass { @Lookup public TestClass testMethod1(String arg0, @LookupField Integer arg1, @LookupField(name = "secondArgument", customOperator = "LIKE") String arg2) { return null; } @Lookup public List<TestClass> testMethod2(String arg0, Integer arg1, String arg2) { return new ArrayList<>(); } @Lookup(name = "My new custom lookup") public List<TestClass> testMethod3(String arg0, Integer arg1, String arg2) { return new ArrayList<>(); } @Lookup public Integer testMethod4(String arg0, Integer arg1, String arg2) { return 42; } @Lookup public List<TestClass> testMethodWithRangeParam0(Boolean arg0, Range<DateTime> range) { return Collections.emptyList(); } @Lookup public List<TestClass> testMethodWithRangeParam1(@LookupField(name = "regularField") Boolean arg0, @LookupField(name = "rangeField") Range<DateTime> range) { return Collections.emptyList(); } @Lookup public TestClass testMethodWithSetParam0(String arg0, Set<Time> set) { return null; } @Lookup public TestClass testMethodWithSetParam1(@LookupField(name = "regularField") String arg0, @LookupField(name = "setField") Set<Time> range) { return null; } @Lookup @RestExposed public TestClass testMethodExposedViaRest(String arg0, Integer arg1, String arg2) { return null; } } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.hadoop.integration.hive; import java.sql.SQLException; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Date; import java.util.List; import org.elasticsearch.hadoop.HdpBootstrap; import org.elasticsearch.hadoop.QueryTestParams; import org.elasticsearch.hadoop.cfg.ConfigurationOptions; import org.elasticsearch.hadoop.EsAssume; import org.elasticsearch.hadoop.rest.RestUtils; import org.elasticsearch.hadoop.util.EsMajorVersion; import org.elasticsearch.hadoop.util.TestUtils; import org.junit.After; import org.junit.Assert; import org.junit.Assume; import org.junit.Before; import org.junit.ClassRule; import org.junit.Ignore; import org.junit.Test; import org.junit.rules.LazyTempFolder; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import org.junit.runners.Parameterized.Parameters; import static org.elasticsearch.hadoop.util.TestUtils.resource; import static org.junit.Assert.*; import static org.elasticsearch.hadoop.integration.hive.HiveSuite.*; @RunWith(Parameterized.class) public class AbstractHiveSearchTest { private static int testInstance = 0; @ClassRule public static LazyTempFolder tempFolder = new LazyTempFolder(); @Parameters public static Collection<Object[]> queries() { return new QueryTestParams(tempFolder).params(); } private String query; private boolean readMetadata; private EsMajorVersion targetVersion; public AbstractHiveSearchTest(String query, boolean readMetadata) { this.query = query; this.readMetadata = readMetadata; } @Before public void before() throws Exception { provisionEsLib(); RestUtils.refresh("hive*"); targetVersion = TestUtils.getEsClusterInfo().getMajorVersion(); new QueryTestParams(tempFolder).provisionQueries(HdpBootstrap.hadoopConfig()); } @After public void after() throws Exception { testInstance++; HiveSuite.after(); } @Test public void basicLoad() throws Exception { String create = "CREATE EXTERNAL TABLE artistsload" + testInstance + "(" + "id BIGINT, " + "name STRING, " + "links STRUCT<url:STRING, picture:STRING>) " + tableProps(resource("hive-artists", "data", targetVersion)); String select = "SELECT * FROM artistsload" + testInstance; server.execute(create); List<String> result = server.execute(select); assertTrue("Hive returned null", containsNoNull(result)); assertContains(result, "Marilyn"); assertContains(result, "last.fm/music/MALICE"); assertContains(result, "last.fm/serve/252/5872875.jpg"); } @Test public void basicLoadWMetadata() throws Exception { Assume.assumeTrue("Only applicable to metadata reading", readMetadata); String create = "CREATE EXTERNAL TABLE artistsload" + testInstance + "(" + "id BIGINT, " + "name STRING, " + "links STRUCT<url:STRING, picture:STRING>, " + "meta MAP<STRING, STRING>) " + tableProps(resource("hive-artists", "data", targetVersion), "'es.read.metadata.field'='meta'"); String select = "SELECT meta FROM artistsload" + testInstance; server.execute(create); List<String> result = server.execute(select); assertTrue("Hive returned null", containsNoNull(result)); assertContains(result, "\"_score\":\"1.0\""); System.out.println(result); } //@Test public void basicCountOperator() throws Exception { String create = "CREATE EXTERNAL TABLE artistscount" + testInstance + " (" + "id BIGINT, " + "name STRING, " + "links STRUCT<url:STRING, picture:STRING>) " + tableProps(resource("hive-artists", "data", targetVersion)); String select = "SELECT count(*) FROM artistscount" + testInstance; server.execute(create); List<String> result = server.execute(select); assertTrue("Hive returned null", containsNoNull(result)); assertEquals(1, result.size()); assertTrue(Integer.valueOf(result.get(0)) > 1); } @Test public void basicArrayMapping() throws Exception { String create = "CREATE EXTERNAL TABLE compoundarray" + testInstance + " (" + "rid BIGINT, " + "mapids ARRAY<BIGINT>, " + "rdata MAP<STRING, STRING>) " + tableProps(resource("hive-compound", "data", targetVersion)); String select = "SELECT * FROM compoundarray" + testInstance; server.execute(create); List<String> result = server.execute(select); assertTrue(result.size() > 1); assertTrue("Hive returned null", containsNoNull(result)); assertContains(result, "400,401"); assertContains(result, "{\"6\":"); } //@Test public void basicTimestampLoad() throws Exception { String create = "CREATE EXTERNAL TABLE timestampload" + testInstance + " (" + "id BIGINT, " + "date TIMESTAMP, " + "name STRING, " + "links STRUCT<url:STRING, picture:STRING>) " + tableProps(resource("hive-artiststimestamp", "data", targetVersion)); String select = "SELECT date FROM timestampload" + testInstance; String select2 = "SELECT unix_timestamp(), date FROM timestampload" + testInstance; String date = new SimpleDateFormat("yyyy-MM-dd").format(new Date()); server.execute(create); List<String> result = server.execute(select); assertTrue(result.size() > 1); assertTrue("Hive returned null", containsNoNull(result)); assertContains(result, date); result = server.execute(select2); assertTrue("Hive returned null", containsNoNull(result)); assertTrue(result.size() > 1); assertContains(result, date); } @Test @Ignore // cast isn't fully supported for date as it throws CCE public void basicDateLoad() throws Exception { String create = "CREATE EXTERNAL TABLE dateload" + testInstance + " (" + "id BIGINT, " + "date DATE, " + "name STRING, " + "links STRUCT<url:STRING, picture:STRING>) " + tableProps(resource("hive-datesave", "data", targetVersion)); String select = "SELECT date FROM dateload" + testInstance; String select2 = "SELECT unix_timestamp(), date FROM dateload" + testInstance; System.out.println(server.execute(create)); List<String> result = server.execute(select); System.out.println(result); assertTrue("Hive returned null", containsNoNull(result)); System.out.println(server.execute(select2)); } //@Test public void javaMethodInvocation() throws Exception { String create = "CREATE EXTERNAL TABLE methodInvocation" + testInstance + " (" + "id BIGINT, " + "name STRING, " + "links STRUCT<url:STRING, picture:STRING>) " + tableProps(resource("hive-artists", "data", targetVersion)); long currentTimeMillis = System.currentTimeMillis(); String select = "SELECT java_method(\"java.lang.System\", \"currentTimeMillis\") FROM methodInvocation" + testInstance + " LIMIT 5"; server.execute(create); List<String> result = server.execute(select); assertTrue("Hive returned null", containsNoNull(result)); assertContains(result, String.valueOf(currentTimeMillis).substring(0, 5)); } @Test public void columnAliases() throws Exception { String create = "CREATE EXTERNAL TABLE aliasload" + testInstance + " (" + "dTE TIMESTAMP, " + "Name STRING, " + "links STRUCT<uRl:STRING, pICture:STRING>) " + tableProps(resource("hive-aliassave", "data", targetVersion), "'es.mapping.names' = 'dTE:@timestamp, uRl:url_123'"); String select = "SELECT * FROM aliasload" + testInstance; server.execute(create); List<String> result = server.execute(select); assertTrue("Hive returned null", containsNoNull(result)); assertTrue(result.size() > 1); assertContains(result, "Marilyn"); assertContains(result, "last.fm/music/MALICE"); assertContains(result, "last.fm/serve/252/2181591.jpg"); } @Test public void testMissingIndex() throws Exception { String create = "CREATE EXTERNAL TABLE missing" + testInstance + " (" + "dTE TIMESTAMP, " + "Name STRING, " + "links STRUCT<uRl:STRING, pICture:STRING>) " + tableProps(resource("foobar", "missing", targetVersion), "'es.index.read.missing.as.empty' = 'true'"); String select = "SELECT * FROM missing" + testInstance; server.execute(create); List<String> result = server.execute(select); assertEquals(0, result.size()); } @Test(expected = SQLException.class) public void testSourceFieldCollision() throws Exception { String create = "CREATE EXTERNAL TABLE collisiontest" + testInstance + "(" + "id BIGINT, " + "name STRING, " + "links STRUCT<url:STRING, picture:STRING>) " + tableProps(resource("hive-artists", "data", targetVersion), "'es.read.source.filter' = 'name,links'"); String select = "SELECT * FROM collisiontest" + testInstance; server.execute(create); server.execute(select); fail("Should not have executed successfully: User specified source filter should conflict with source filter from connector."); } @Test public void testVarcharLoad() throws Exception { String create = "CREATE EXTERNAL TABLE varcharload" + testInstance + " (" + "id BIGINT, " + "name STRING, " + "links STRUCT<url:STRING, picture:STRING>) " + tableProps(resource("hive-varcharsave", "data", targetVersion)); String select = "SELECT * FROM varcharload" + testInstance; System.out.println(server.execute(create)); List<String> result = server.execute(select); assertTrue("Hive returned null", containsNoNull(result)); assertTrue(result.size() > 1); assertContains(result, "Marilyn"); assertContains(result, "last.fm/music/MALICE"); assertContains(result, "last.fm/serve/252/2181591.jpg"); } @Test public void testCharLoad() throws Exception { // create external table String create = "CREATE EXTERNAL TABLE charload" + testInstance + " (" + "id BIGINT, " + "name STRING, " + "links STRUCT<url:STRING, picture:STRING>) " + tableProps(resource("hive-charsave", "data", targetVersion)); // this does not String select = "SELECT * FROM charload" + testInstance; server.execute(create); List<String> result = server.execute(select); assertTrue(result.size() > 1); assertContains(result, "Marilyn"); assertContains(result, "last.fm/music/MALICE"); assertContains(result, "last.fm/serve/252/2181591.jpg"); } @Test public void testParentChild() throws Exception { EsAssume.versionOnOrBefore(EsMajorVersion.V_5_X, "Parent Child Disabled in 6.0"); String create = "CREATE EXTERNAL TABLE childload" + testInstance + " (" + "id BIGINT, " + "name STRING, " + "links STRUCT<url:STRING, picture:STRING>) " + tableProps("hive-pc/child", "'es.index.read.missing.as.empty' = 'true'"); String select = "SELECT * FROM childload" + testInstance; System.out.println(server.execute(create)); List<String> result = server.execute(select); assertTrue("Hive returned null", containsNoNull(result)); assertTrue(result.size() > 1); assertContains(result, "Marilyn"); assertContains(result, "last.fm/music/MALICE"); assertContains(result, "last.fm/serve/252/2181591.jpg"); } @Test public void testReadWriteSameJob() throws Exception { String write = "CREATE EXTERNAL TABLE rwwrite" + testInstance + " (" + "id BIGINT, " + "name STRING, " + "links STRUCT<url:STRING, picture:STRING>) " + tableProps(resource("hive-rwwrite", "data", targetVersion)); String read = "CREATE EXTERNAL TABLE rwread" + testInstance + " (" + "id BIGINT, " + "name STRING, " + "links STRUCT<url:STRING, picture:STRING>) " + tableProps(resource("hive-artists", "data", targetVersion)); String selectInsert = "INSERT OVERWRITE TABLE rwwrite" + testInstance + " SELECT * FROM rwread" + testInstance; String select = "SELECT * FROM rwwrite" + testInstance; System.out.println(server.execute(read)); System.out.println(server.execute(write)); System.out.println(server.execute(selectInsert)); List<String> result = server.execute(select); assertTrue("Hive returned null", containsNoNull(result)); assertTrue(result.size() > 1); assertContains(result, "Marilyn"); assertContains(result, "last.fm/music/MALICE"); assertContains(result, "last.fm/serve/252/2181591.jpg"); } //@Test public void basicJoin() throws Exception { String left = "CREATE EXTERNAL TABLE left" + testInstance + "(" + "id BIGINT, " + "name STRING, " + "links STRUCT<url:STRING, picture:STRING>) " + tableProps(resource("hive-artists", "data", targetVersion)); String right = "CREATE EXTERNAL TABLE right" + testInstance + "(" + "id BIGINT, " + "name STRING, " + "links STRUCT<url:STRING, picture:STRING>) " + tableProps(resource("hive-artists", "data", targetVersion)); String select = "SELECT * FROM left" + testInstance + " l JOIN right" + testInstance + " r ON l.id = r.id"; //String select = "SELECT * FROM left" + testInstance + " l JOIN source r ON l.id = r.id"; server.execute(left); server.execute(right); System.out.println(server.execute("SHOW CREATE TABLE left" + testInstance)); System.out.println(server.execute("SHOW CREATE TABLE right" + testInstance)); List<String> result = server.execute(select); assertTrue("Hive returned null", containsNoNull(result)); assertContains(result, "Marilyn"); assertContains(result, "last.fm/music/MALICE"); assertContains(result, "last.fm/serve/252/5872875.jpg"); } @Test public void basicUnion() throws Exception { //table unionA and table uinonB should be from difference es index/type String unionA = "CREATE EXTERNAL TABLE uniona" + testInstance + " (" + "id BIGINT, " + "name STRING, " + "links STRUCT<url:STRING, picture:STRING>) " + tableProps(resource("hive-artists", "data", targetVersion)); String unionB = "CREATE EXTERNAL TABLE unionb" + testInstance + " (" + "id BIGINT, " + "name STRING, " + "links STRUCT<url:STRING, picture:STRING>) " + tableProps(resource("hive-varcharsave", "data", targetVersion)); //create two external table server.execute(unionA); server.execute(unionB); // select alone String selectA = "SELECT id,name FROM uniona" + testInstance; String selectB = "SELECT id,name FROM unionb" + testInstance; List<String> resultA = server.execute(selectA); List<String> resultB = server.execute(selectB); //select union String selectUnion = selectA + " UNION ALL " + selectB; List<String> resultUnion = server.execute(selectUnion); System.out.println(server.execute("SHOW CREATE TABLE uniona" + testInstance)); System.out.println(server.execute("SHOW CREATE TABLE unionb" + testInstance)); assertTrue("Hive returned null", containsNoNull(resultA)); assertTrue("Hive returned null", containsNoNull(resultB)); assertTrue("Hive returned null", containsNoNull(resultUnion)); //union all operation don't remove the same elements, //so the total is equal to the sum of all the subqueries. assertTrue(resultA.size() + resultB.size() == resultUnion.size()); } @Test public void testDynamicPattern() throws Exception { Assert.assertTrue(RestUtils.exists(resource("hive-pattern-7", "data", targetVersion))); Assert.assertTrue(RestUtils.exists(resource("hive-pattern-10", "data", targetVersion))); Assert.assertTrue(RestUtils.exists(resource("hive-pattern-15", "data", targetVersion))); } @Test public void testDynamicPatternFormat() throws Exception { Assert.assertTrue(RestUtils.exists(resource("hive-pattern-format-2007-10-06", "data", targetVersion))); Assert.assertTrue(RestUtils.exists(resource("hive-pattern-format-2011-10-06", "data", targetVersion))); Assert.assertTrue(RestUtils.exists(resource("hive-pattern-format-2001-10-06", "data", targetVersion))); } private static boolean containsNoNull(List<String> str) { for (String string : str) { if (string.contains("NULL")) { return false; } } return true; } private static void assertContains(List<String> str, String content) { for (String string : str) { if (string.contains(content)) { return; } } fail(String.format("'%s' not found in %s", content, str)); } private String tableProps(String resource, String... params) { List<String> copy = new ArrayList(Arrays.asList(params)); copy.add("'" + ConfigurationOptions.ES_READ_METADATA + "'='" + readMetadata + "'"); return HiveSuite.tableProps(resource, query, copy.toArray(new String[copy.size()])); } }
/* * Zed Attack Proxy (ZAP) and its related class files. * * ZAP is an HTTP/HTTPS proxy for assessing web application security. * * Copyright 2012 ZAP development team * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.zaproxy.zap.control; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.net.MalformedURLException; import java.net.URL; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.zip.ZipEntry; import java.util.zip.ZipFile; import org.apache.commons.configuration.SubnodeConfiguration; import org.apache.commons.lang.ArrayUtils; import org.apache.commons.lang.StringUtils; import org.apache.commons.lang.SystemUtils; import org.apache.log4j.Logger; import org.parosproxy.paros.Constant; import org.parosproxy.paros.core.scanner.AbstractPlugin; import org.parosproxy.paros.extension.Extension; import org.zaproxy.zap.Version; import org.zaproxy.zap.control.dependency.CyclicDependencyIssue; import org.zaproxy.zap.control.dependency.DifferentSemanticsDependencyIssue; import org.zaproxy.zap.control.dependency.MissingDependencyIssue; import org.zaproxy.zap.control.dependency.NewerDepencyIssue; import org.zaproxy.zap.control.dependency.OlderVersionDependencyIssue; import org.zaproxy.zap.control.dependency.OutdatedDepencyIssue; import org.zaproxy.zap.extension.pscan.PluginPassiveScanner; public class AddOn { public enum Status {unknown, example, alpha, beta, weekly, release} private static ZapRelease v2_4 = new ZapRelease("2.4.0"); /** * The installation status of the add-on. * * @since 2.4.0 */ public enum InstallationStatus { /** * The add-on is available for installation, for example, an add-on in the marketplace (even if it requires previous * actions, in this case, download the file). */ AVAILABLE, /** * The add-on was not (yet) installed. For example, the add-on is available in the 'plugin' directory but it's missing a * dependency or requires a greater Java version. It's also in this status while a dependency is being updated. */ NOT_INSTALLED, /** * The add-on is installed. */ INSTALLED, /** * The add-on is being downloaded. */ DOWNLOADING, /** * The uninstallation of the add-on failed. For example, when the add-on is not dynamically installable or when an * {@code Exception} is thrown during the uninstallation. */ UNINSTALLATION_FAILED, /** * The soft uninstallation of the add-on failed. It's in this status when the uninstallation failed for an update of a * dependency. */ SOFT_UNINSTALLATION_FAILED } private String id; private String name; private String description = ""; private String author = ""; private int fileVersion; private Version version; private Status status; private String changes = ""; private File file = null; private URL url = null; private URL info = null; private long size = 0; private boolean hasZapAddOnEntry = false; /** * Flag that indicates if the manifest was read (or attempted to). Allows to prevent reading the manifest a second time when * the add-on file is corrupt. */ private boolean manifestRead; private String notBeforeVersion = null; private String notFromVersion = null; private String hash = null; /** * The installation status of the add-on. * <p> * Default is {@code NOT_INSTALLED}. * * @see InstallationStatus#NOT_INSTALLED */ private InstallationStatus installationStatus = InstallationStatus.NOT_INSTALLED; private List<String> extensions = Collections.emptyList(); private List<ExtensionWithDeps> extensionsWithDeps = Collections.emptyList(); /** * The extensions of the add-on that were loaded. * <p> * This instance variable is lazy initialised. * * @see #addLoadedExtension(Extension) * @see #removeLoadedExtension(Extension) */ private List<Extension> loadedExtensions; private List<String> ascanrules = Collections.emptyList(); private List<AbstractPlugin> loadedAscanrules = Collections.emptyList(); private boolean loadedAscanRulesSet; private List<String> pscanrules = Collections.emptyList(); private List<PluginPassiveScanner> loadedPscanrules = Collections.emptyList(); private boolean loadedPscanRulesSet; private List<String> files = Collections.emptyList(); private AddOnClassnames addOnClassnames = AddOnClassnames.ALL_ALLOWED; private Dependencies dependencies; private static final Logger logger = Logger.getLogger(AddOn.class); public static boolean isAddOn(String fileName) { if (! fileName.toLowerCase().endsWith(".zap")) { return false; } if (fileName.substring(0, fileName.indexOf(".")).split("-").length < 3) { return false; } String[] strArray = fileName.substring(0, fileName.indexOf(".")).split("-"); try { Status.valueOf(strArray[1]); Integer.parseInt(strArray[2]); } catch (Exception e) { return false; } return true; } public static boolean isAddOn(File f) { if (! f.exists()) { return false; } return isAddOn(f.getName()); } public AddOn(String fileName) throws Exception { // Format is <name>-<status>-<version>.zap if (! isAddOn(fileName)) { throw new Exception("Invalid ZAP add-on file " + fileName); } String[] strArray = fileName.substring(0, fileName.indexOf(".")).split("-"); this.id = strArray[0]; this.name = this.id; // Will be overriden if theres a ZapAddOn.xml file this.status = Status.valueOf(strArray[1]); this.fileVersion = Integer.parseInt(strArray[2]); } /** * Constructs an {@code AddOn} from the given {@code file}. * <p> * The {@code ZapAddOn.xml} ZIP file entry is read after validating that the add-on has a valid add-on file name. * <p> * The installation status of the add-on is 'not installed'. * * @param file the file of the add-on * @throws Exception if the given {@code file} does not exist, does not have a valid add-on file name or an error occurred * while reading the {@code ZapAddOn.xml} ZIP file entry */ public AddOn(File file) throws Exception { this(file.getName()); if (! isAddOn(file)) { throw new Exception("Invalid ZAP add-on file " + file.getAbsolutePath()); } this.file = file; loadManifestFile(); } private void loadManifestFile() throws IOException { manifestRead = true; if (file.exists()) { // Might not exist in the tests try (ZipFile zip = new ZipFile(file)) { ZipEntry zapAddOnEntry = zip.getEntry("ZapAddOn.xml"); if (zapAddOnEntry != null) { try (InputStream zis = zip.getInputStream(zapAddOnEntry)) { ZapAddOnXmlFile zapAddOnXml = new ZapAddOnXmlFile(zis); this.name = zapAddOnXml.getName(); this.version = zapAddOnXml.getVersion(); this.description = zapAddOnXml.getDescription(); this.changes = zapAddOnXml.getChanges(); this.author = zapAddOnXml.getAuthor(); this.notBeforeVersion = zapAddOnXml.getNotBeforeVersion(); this.notFromVersion = zapAddOnXml.getNotFromVersion(); this.dependencies = zapAddOnXml.getDependencies(); this.ascanrules = zapAddOnXml.getAscanrules(); this.extensions = zapAddOnXml.getExtensions(); this.extensionsWithDeps = zapAddOnXml.getExtensionsWithDeps(); this.files = zapAddOnXml.getFiles(); this.pscanrules = zapAddOnXml.getPscanrules(); this.addOnClassnames = zapAddOnXml.getAddOnClassnames(); hasZapAddOnEntry = true; } } } } } /** * Constructs an {@code AddOn} from an add-on entry of {@code ZapVersions.xml} file. The installation status of the add-on * is 'not installed'. * <p> * The given {@code SubnodeConfiguration} must have a {@code XPathExpressionEngine} installed. * <p> * The {@code ZapAddOn.xml} ZIP file entry is read, if the add-on file exists locally. * * @param id the id of the add-on * @param baseDir the base directory where the add-on is located * @param xmlData the source of add-on entry of {@code ZapVersions.xml} file * @throws MalformedURLException if the {@code URL} of the add-on is malformed * @throws IOException if an error occurs while reading the XML data * @see org.apache.commons.configuration.tree.xpath.XPathExpressionEngine */ public AddOn(String id, File baseDir, SubnodeConfiguration xmlData) throws MalformedURLException, IOException { this.id = id; ZapVersionsAddOnEntry addOnData = new ZapVersionsAddOnEntry(xmlData); this.name = addOnData.getName(); this.description = addOnData.getDescription(); this.author = addOnData.getAuthor(); this.fileVersion = addOnData.getPackageVersion(); this.dependencies = addOnData.getDependencies(); this.extensionsWithDeps = addOnData.getExtensionsWithDeps(); this.version = addOnData.getVersion(); this.status = AddOn.Status.valueOf(addOnData.getStatus()); this.changes = addOnData.getChanges(); this.url = new URL(addOnData.getUrl()); this.file = new File(baseDir, addOnData.getFile()); this.size = addOnData.getSize(); this.notBeforeVersion = addOnData.getNotBeforeVersion(); this.notFromVersion = addOnData.getNotFromVersion(); if (addOnData.getInfo() != null && !addOnData.getInfo().isEmpty()) { try { this.info = new URL(addOnData.getInfo()); } catch (Exception ignore) { if (logger.isDebugEnabled()) { logger.debug("Wrong info URL for add-on \"" + name + "\":", ignore); } } } this.hash = addOnData.getHash(); loadManifestFile(); } public String getId() { return id; } public void setId(String id) { this.id = id; } public String getName() { return name; } public void setName(String name) { this.name = name; } public String getDescription() { return description; } public void setDescription(String description) { this.description = description; } public int getFileVersion() { return fileVersion; } /** * Gets the semantic version of this add-on. * * @return the semantic version of the add-on, or {@code null} if none * @since 2.4.0 */ public Version getVersion() { return version; } public Status getStatus() { return status; } public void setStatus(Status status) { this.status = status; } public String getChanges() { return changes; } public void setChanges(String changes) { this.changes = changes; } public File getFile() { return file; } public void setFile(File file) { this.file = file; } public URL getUrl() { return url; } public void setUrl(URL url) { this.url = url; } public long getSize() { return size; } public void setSize(long size) { this.size = size; } public String getAuthor() { return author; } public void setAuthor(String author) { this.author = author; } /** * Sets the installation status of the add-on. * * @param installationStatus the new installation status * @throws IllegalArgumentException if the given {@code installationStatus} is {@code null}. * @since 2.4.0 */ public void setInstallationStatus(InstallationStatus installationStatus) { if (installationStatus == null) { throw new IllegalArgumentException("Parameter installationStatus must not be null."); } this.installationStatus = installationStatus; } /** * Gets installations status of the add-on. * * @return the installation status, never {@code null} * @since 2.4.0 */ public InstallationStatus getInstallationStatus() { return installationStatus; } public boolean hasZapAddOnEntry() { if (! hasZapAddOnEntry) { if (!manifestRead) { // Worth trying, as it depends which constructor has been used try { this.loadManifestFile(); } catch (IOException e) { if (logger.isDebugEnabled()) { logger.debug("Failed to read the ZapAddOn.xml file of " + id + ":", e); } } } } return hasZapAddOnEntry; } /** * Gets the classnames that can be loaded for the add-on. * * @return the classnames that can be loaded * @since 2.4.3 */ public AddOnClassnames getAddOnClassnames() { return addOnClassnames; } public List<String> getExtensions() { return extensions; } /** * Returns the classnames of {@code Extension}sthat have dependencies on add-ons. * * @return the classnames of the extensions with dependencies on add-ons. * @see #hasExtensionsWithDeps() */ public List<String> getExtensionsWithDeps() { if (extensionsWithDeps.isEmpty()) { return Collections.emptyList(); } List<String> extensionClassnames = new ArrayList<>(extensionsWithDeps.size()); for (ExtensionWithDeps extension : extensionsWithDeps) { extensionClassnames.add(extension.getClassname()); } return extensionClassnames; } /** * Returns the classnames that can be loaded for the given {@code Extension} (with dependencies). * * @param classname the classname of the extension * @return the classnames that can be loaded * @since 2.4.3 * @see #hasExtensionsWithDeps() */ public AddOnClassnames getExtensionAddOnClassnames(String classname) { if (extensionsWithDeps.isEmpty() || classname == null || classname.isEmpty()) { return AddOnClassnames.ALL_ALLOWED; } for (ExtensionWithDeps extension : extensionsWithDeps) { if (classname.equals(extension.getClassname())) { return extension.getAddOnClassnames(); } } return AddOnClassnames.ALL_ALLOWED; } /** * Tells whether or not this add-on has at least one extension with dependencies. * * @return {@code true} if the add-on has at leas one extension with dependencies, {@code false} otherwise * @see #getExtensionsWithDeps() */ public boolean hasExtensionsWithDeps() { return !extensionsWithDeps.isEmpty(); } /** * Gets the extensions of this add-on that have dependencies and were loaded. * * @return an unmodifiable {@code List} with the extensions of this add-on that have dependencies and were loaded * @since 2.4.0 */ public List<Extension> getLoadedExtensionsWithDeps() { List<String> classnames = getExtensionsWithDeps(); ArrayList<Extension> loadedExtensions = new ArrayList<>(extensionsWithDeps.size()); for (Extension extension : getLoadedExtensions()) { if (classnames.contains(extension.getClass().getCanonicalName())) { loadedExtensions.add(extension); } } loadedExtensions.trimToSize(); return loadedExtensions; } /** * Gets the extensions of this add-on that were loaded. * * @return an unmodifiable {@code List} with the extensions of this add-on that were loaded * @since 2.4.0 */ public List<Extension> getLoadedExtensions() { if (loadedExtensions == null) { return Collections.emptyList(); } return Collections.unmodifiableList(loadedExtensions); } /** * Adds the given {@code extension} to the list of loaded extensions of this add-on. * <p> * This add-on is set to the given {@code extension}. * * @param extension the extension of this add-on that was loaded * @throws IllegalArgumentException if extension is {@code null} * @since 2.4.0 * @see #removeLoadedExtension(Extension) * @see Extension#setAddOn(AddOn) */ public void addLoadedExtension(Extension extension) { if (extension == null) { throw new IllegalArgumentException("Parameter extension must not be null."); } if (loadedExtensions == null) { loadedExtensions = new ArrayList<>(1); } if (!loadedExtensions.contains(extension)) { loadedExtensions.add(extension); extension.setAddOn(this); } } /** * Removes the given {@code extension} from the list of loaded extensions of this add-on. * <p> * The add-on of the given {@code extension} is set to {@code null}. * <p> * The call to this method has no effect if the given {@code extension} does not belong to this add-on. * * @param extension the loaded extension of this add-on that should be removed * @throws IllegalArgumentException if extension is {@code null} * @since 2.4.0 * @see #addLoadedExtension(Extension) * @see Extension#setAddOn(AddOn) */ public void removeLoadedExtension(Extension extension) { if (extension == null) { throw new IllegalArgumentException("Parameter extension must not be null."); } if (loadedExtensions != null && loadedExtensions.contains(extension)) { loadedExtensions.remove(extension); extension.setAddOn(null); } } public List<String> getAscanrules() { return ascanrules; } /** * Gets the active scan rules of this add-on that were loaded. * * @return an unmodifiable {@code List} with the active scan rules of this add-on that were loaded, never {@code null} * @since 2.4.3 * @see #setLoadedAscanrules(List) */ public List<AbstractPlugin> getLoadedAscanrules() { return loadedAscanrules; } /** * Sets the loaded active scan rules of the add-on, allowing to set the status of the active scan rules appropriately and to * keep track of the active scan rules loaded so that they can be removed during uninstallation. * <p> * <strong>Note:</strong> Helper method to be used (only) by/during (un)installation process and loading of the add-on. * Should be called when installing/loading the add-on, by setting the loaded active scan rules, and when uninstalling by * setting an empty list. The method {@code setLoadedAscanrulesSet(boolean)} should also be called. * * @param ascanrules the active scan rules loaded, might be empty if none were actually loaded * @throws IllegalArgumentException if {@code ascanrules} is {@code null}. * @since 2.4.3 * @see #setLoadedAscanrulesSet(boolean) * @see AbstractPlugin#setStatus(Status) */ void setLoadedAscanrules(List<AbstractPlugin> ascanrules) { if (ascanrules == null) { throw new IllegalArgumentException("Parameter ascanrules must not be null."); } if (ascanrules.isEmpty()) { loadedAscanrules = Collections.emptyList(); return; } for (AbstractPlugin ascanrule : ascanrules) { ascanrule.setStatus(getStatus()); } loadedAscanrules = Collections.unmodifiableList(new ArrayList<>(ascanrules)); } /** * Tells whether or not the loaded active scan rules of the add-on, if any, were already set to the add-on. * <p> * <strong>Note:</strong> Helper method to be used (only) by/during (un)installation process and loading of the add-on. * * @return {@code true} if the loaded active scan rules were already set, {@code false} otherwise * @since 2.4.3 * @see #setLoadedAscanrules(List) * @see #setLoadedAscanrulesSet(boolean) */ boolean isLoadedAscanrulesSet() { return loadedAscanRulesSet; } /** * Sets whether or not the loaded active scan rules, if any, where already set to the add-on. * <p> * <strong>Note:</strong> Helper method to be used (only) by/during (un)installation process and loading of the add-on. The * method should be called, with {@code true} during installation/loading and {@code false} during uninstallation, after * calling the method {@code setLoadedAscanrules(List)}. * * @param ascanrulesSet {@code true} if the loaded active scan rules were already set, {@code false} otherwise * @since 2.4.3 * @see #setLoadedAscanrules(List) */ void setLoadedAscanrulesSet(boolean ascanrulesSet) { loadedAscanRulesSet = ascanrulesSet; } public List<String> getPscanrules() { return pscanrules; } /** * Gets the passive scan rules of this add-on that were loaded. * * @return an unmodifiable {@code List} with the passive scan rules of this add-on that were loaded, never {@code null} * @since 2.4.3 * @see #setLoadedPscanrules(List) */ public List<PluginPassiveScanner> getLoadedPscanrules() { return loadedPscanrules; } /** * Sets the loaded passive scan rules of the add-on, allowing to set the status of the passive scan rules appropriately and * keep track of the passive scan rules loaded so that they can be removed during uninstallation. * <p> * <strong>Note:</strong> Helper method to be used (only) by/during (un)installation process and loading of the add-on. * Should be called when installing/loading the add-on, by setting the loaded passive scan rules, and when uninstalling by * setting an empty list. The method {@code setLoadedPscanrulesSet(boolean)} should also be called. * * @param pscanrules the passive scan rules loaded, might be empty if none were actually loaded * @throws IllegalArgumentException if {@code pscanrules} is {@code null}. * @since 2.4.3 * @see #setLoadedPscanrulesSet(boolean) * @see PluginPassiveScanner#setStatus(Status) */ void setLoadedPscanrules(List<PluginPassiveScanner> pscanrules) { if (pscanrules == null) { throw new IllegalArgumentException("Parameter pscanrules must not be null."); } if (pscanrules.isEmpty()) { loadedPscanrules = Collections.emptyList(); return; } for (PluginPassiveScanner pscanrule : pscanrules) { pscanrule.setStatus(getStatus()); } loadedPscanrules = Collections.unmodifiableList(new ArrayList<>(pscanrules)); } /** * Tells whether or not the loaded passive scan rules of the add-on, if any, were already set to the add-on. * <p> * <strong>Note:</strong> Helper method to be used (only) by/during (un)installation process and loading of the add-on. * * @return {@code true} if the loaded passive scan rules were already set, {@code false} otherwise * @since 2.4.3 * @see #setLoadedPscanrules(List) * @see #setLoadedPscanrulesSet(boolean) */ boolean isLoadedPscanrulesSet() { return loadedPscanRulesSet; } /** * Sets whether or not the loaded passive scan rules, if any, where already set to the add-on. * <p> * <strong>Note:</strong> Helper method to be used (only) by/during (un)installation process and loading of the add-on. The * method should be called, with {@code true} during installation/loading and {@code false} during uninstallation, after * calling the method {@code setLoadedPscanrules(List)}. * * @param pscanrulesSet {@code true} if the loaded passive scan rules were already set, {@code false} otherwise * @since 2.4.3 * @see #setLoadedPscanrules(List) */ void setLoadedPscanrulesSet(boolean pscanrulesSet) { loadedPscanRulesSet = pscanrulesSet; } public List<String> getFiles() { return files; } public boolean isSameAddOn(AddOn addOn) { return this.getId().equals(addOn.getId()); } public boolean isUpdateTo(AddOn addOn) throws IllegalArgumentException { if (! this.isSameAddOn(addOn)) { throw new IllegalArgumentException("Different addons: " + this.getId() + " != " + addOn.getId()); } if (this.getFileVersion() > addOn.getFileVersion()) { return true; } return this.getStatus().ordinal() > addOn.getStatus().ordinal(); } /** * @deprecated (2.4.0) Use {@link #calculateRunRequirements(Collection)} instead. Returns {@code false}. * @return {@code false} always. */ @Deprecated public boolean canLoad() { return false; } /** * Tells whether or not this add-on can be loaded in the currently running ZAP version, as given by * {@code Constant.PROGRAM_VERSION}. * * @return {@code true} if the add-on can be loaded in the currently running ZAP version, {@code false} otherwise * @see #canLoadInVersion(String) * @see Constant#PROGRAM_VERSION */ public boolean canLoadInCurrentVersion() { return canLoadInVersion(Constant.PROGRAM_VERSION); } /** * Tells whether or not this add-on can be run in the currently running Java version. * <p> * This is a convenience method that calls {@code canRunInJavaVersion(String)} with the running Java version (as given by * {@code SystemUtils.JAVA_VERSION}) as parameter. * * @return {@code true} if the add-on can be run in the currently running Java version, {@code false} otherwise * @since 2.4.0 * @see #canRunInJavaVersion(String) * @see SystemUtils#JAVA_VERSION */ public boolean canRunInCurrentJavaVersion() { return canRunInJavaVersion(SystemUtils.JAVA_VERSION); } /** * Tells whether or not this add-on can be run in the given {@code javaVersion}. * <p> * If the given {@code javaVersion} is {@code null} and this add-on depends on a specific java version the method returns * {@code false}. * * @param javaVersion the java version that will be checked * @return {@code true} if the add-on can be loaded in the given {@code javaVersion}, {@code false} otherwise * @since 2.4.0 */ public boolean canRunInJavaVersion(String javaVersion) { if (dependencies == null) { return true; } String requiredVersion = dependencies.getJavaVersion(); if (requiredVersion == null) { return true; } if (javaVersion == null) { return false; } return getJavaVersion(javaVersion) >= getJavaVersion(requiredVersion); } /** * Calculates the requirements to run this add-on, in the current ZAP and Java versions and with the given * {@code availableAddOns}. * <p> * If the add-on depends on other add-ons, those add-ons are also checked if are also runnable. * <p> * <strong>Note:</strong> All the given {@code availableAddOns} are expected to be loadable in the currently running ZAP * version, that is, the method {@code AddOn.canLoadInCurrentVersion()}, returns {@code true}. * * @param availableAddOns the other add-ons available * @return a requirements to run the add-on, and if not runnable the reason why it's not. * @since 2.4.0 * @see #canLoadInCurrentVersion() * @see #canRunInCurrentJavaVersion() * @see AddOnRunRequirements */ public AddOnRunRequirements calculateRunRequirements(Collection<AddOn> availableAddOns) { AddOnRunRequirements requirements = new AddOnRunRequirements(this); calculateRunRequirementsImpl(availableAddOns, requirements, null, this); if (requirements.isRunnable()) { checkExtensionsWithDeps(availableAddOns, requirements, this); } return requirements; } private static void calculateRunRequirementsImpl( Collection<AddOn> availableAddOns, BaseRunRequirements requirements, AddOn parent, AddOn addOn) { AddOn installedVersion = getAddOn(availableAddOns, addOn.getId()); if (installedVersion != null && !addOn.equals(installedVersion)) { requirements.setIssue(new OlderVersionDependencyIssue(), installedVersion); if (logger.isDebugEnabled()) { logger.debug("Add-on " + addOn + " not runnable, old version still installed: " + installedVersion); } return; } if (!requirements.addDependency(parent, addOn)) { logger.warn("Cyclic dependency detected with: " + requirements.getDependencies()); requirements.setIssue(new CyclicDependencyIssue(), requirements.getDependencies()); return; } if (addOn.dependencies == null) { return; } if (!addOn.canRunInCurrentJavaVersion()) { requirements.setMinimumJavaVersionIssue(addOn, addOn.dependencies.getJavaVersion()); } for (AddOnDep dependency : addOn.dependencies.getAddOns()) { String addOnId = dependency.getId(); if (addOnId != null) { AddOn addOnDep = getAddOn(availableAddOns, addOnId); if (addOnDep == null) { requirements.setIssue(new MissingDependencyIssue(), addOnId); return; } if (dependency.getNotBeforeVersion() > -1 && addOnDep.fileVersion < dependency.getNotBeforeVersion()) { requirements.setIssue(new OutdatedDepencyIssue(), addOnDep, Integer.valueOf(dependency.getNotBeforeVersion())); return; } if (dependency.getNotFromVersion() > -1 && addOnDep.fileVersion > dependency.getNotFromVersion()) { requirements.setIssue(new NewerDepencyIssue(), addOnDep, Integer.valueOf(dependency.getNotFromVersion())); return; } if (!dependency.getSemVer().isEmpty()) { if (addOnDep.version == null || !addOnDep.version.matches(dependency.getSemVer())) { requirements.setIssue(new DifferentSemanticsDependencyIssue(), addOnDep, dependency.getSemVer()); return; } } calculateRunRequirementsImpl(availableAddOns, requirements, addOn, addOnDep); if (requirements.hasDependencyIssue()) { return; } } } } private static void checkExtensionsWithDeps(Collection<AddOn> availableAddOns, AddOnRunRequirements requirements, AddOn addOn) { if (addOn.extensionsWithDeps.isEmpty()) { return; } for (ExtensionWithDeps extension : addOn.extensionsWithDeps) { calculateExtensionRunRequirements(extension, availableAddOns, requirements, addOn); } } private static void calculateExtensionRunRequirements( ExtensionWithDeps extension, Collection<AddOn> availableAddOns, AddOnRunRequirements requirements, AddOn addOn) { ExtensionRunRequirements extensionRequirements = new ExtensionRunRequirements(addOn, extension.getClassname()); requirements.addExtensionRequirements(extensionRequirements); for (AddOnDep dependency : extension.getDependencies()) { String addOnId = dependency.getId(); if (addOnId == null) { continue; } AddOn addOnDep = getAddOn(availableAddOns, addOnId); if (addOnDep == null) { if (addOn.hasOnlyOneExtensionWithDependencies()) { requirements.setIssue(new MissingDependencyIssue(), addOnId); return; } extensionRequirements.setIssue(new MissingDependencyIssue(), addOnId); continue; } if (dependency.getNotBeforeVersion() > -1 && addOnDep.fileVersion < dependency.getNotBeforeVersion()) { if (addOn.hasOnlyOneExtensionWithDependencies()) { requirements.setIssue( new OutdatedDepencyIssue(), addOnDep, Integer.valueOf(dependency.getNotBeforeVersion())); return; } extensionRequirements.setIssue( new OutdatedDepencyIssue(), addOnDep, Integer.valueOf(dependency.getNotBeforeVersion())); continue; } if (dependency.getNotFromVersion() > -1 && addOnDep.fileVersion > dependency.getNotFromVersion()) { if (addOn.hasOnlyOneExtensionWithDependencies()) { requirements.setIssue( new OutdatedDepencyIssue(), addOnDep, Integer.valueOf(dependency.getNotFromVersion())); return; } extensionRequirements.setIssue( new OutdatedDepencyIssue(), addOnDep, Integer.valueOf(dependency.getNotFromVersion())); continue; } if (!dependency.getSemVer().isEmpty()) { if (addOnDep.version == null || !addOnDep.version.matches(dependency.getSemVer())) { if (addOn.hasOnlyOneExtensionWithDependencies()) { requirements.setIssue(new DifferentSemanticsDependencyIssue(), addOnDep, dependency.getSemVer()); return; } extensionRequirements.setIssue(new DifferentSemanticsDependencyIssue(), addOnDep, dependency.getSemVer()); continue; } } calculateRunRequirementsImpl(availableAddOns, extensionRequirements, addOn, addOnDep); } } private boolean hasOnlyOneExtensionWithDependencies() { if (extensionsWithDeps.size() != 1) { return false; } if (extensions.isEmpty() && files.isEmpty() && pscanrules.isEmpty() && ascanrules.isEmpty()) { return true; } return false; } /** * Calculates the requirements to run the given {@code extension}, in the current ZAP and Java versions and with the given * {@code availableAddOns}. * <p> * If the extension depends on other add-ons, those add-ons are checked if are also runnable. * <p> * <strong>Note:</strong> All the given {@code availableAddOns} are expected to be loadable in the currently running ZAP * version, that is, the method {@code AddOn.canLoadInCurrentVersion()}, returns {@code true}. * * @param extension the extension that will be checked * @param availableAddOns the add-ons available * @return the requirements to run the extension, and if not runnable the reason why it's not. * @since 2.4.0 * @see AddOnRunRequirements#getExtensionRequirements() */ public AddOnRunRequirements calculateExtensionRunRequirements(Extension extension, Collection<AddOn> availableAddOns) { return calculateExtensionRunRequirements(extension.getClass().getCanonicalName(), availableAddOns); } /** * Calculates the requirements to run the extension with the given {@code classname}, in the current ZAP and Java versions * and with the given {@code availableAddOns}. * <p> * If the extension depends on other add-ons, those add-ons are checked if are also runnable. * <p> * <strong>Note:</strong> All the given {@code availableAddOns} are expected to be loadable in the currently running ZAP * version, that is, the method {@code AddOn.canLoadInCurrentVersion()}, returns {@code true}. * * @param classname the classname of extension that will be checked * @param availableAddOns the add-ons available * @return the requirements to run the extension, and if not runnable the reason why it's not. * @since 2.4.0 * @see AddOnRunRequirements#getExtensionRequirements() */ public AddOnRunRequirements calculateExtensionRunRequirements(String classname, Collection<AddOn> availableAddOns) { AddOnRunRequirements requirements = new AddOnRunRequirements(this); for (ExtensionWithDeps extensionWithDeps : extensionsWithDeps) { if (extensionWithDeps.getClassname().equals(classname)) { calculateExtensionRunRequirements(extensionWithDeps, availableAddOns, requirements, this); break; } } return requirements; } /** * Tells whether or not the given {@code extension} has a (direct) dependency on the given {@code addOn} (including * version). * * @param extension the extension that will be checked * @param addOn the add-on that will be checked in the dependencies on the extension * @return {@code true} if the extension depends on the given add-on, {@code false} otherwise. * @since 2.4.0 */ public boolean dependsOn(Extension extension, AddOn addOn) { String classname = extension.getClass().getCanonicalName(); for (ExtensionWithDeps extensionWithDeps : extensionsWithDeps) { if (extensionWithDeps.getClassname().equals(classname)) { return dependsOn(extensionWithDeps.getDependencies(), addOn); } } return false; } private static boolean dependsOn(List<AddOnDep> dependencies, AddOn addOn) { for (AddOnDep dependency : dependencies) { if (dependency.getId().equals(addOn.id)) { if (dependency.getNotBeforeVersion() > -1 && addOn.fileVersion < dependency.getNotBeforeVersion()) { return false; } if (dependency.getNotFromVersion() > -1 && addOn.fileVersion > dependency.getNotFromVersion()) { return false; } if (!dependency.getSemVer().isEmpty()) { if (addOn.version == null) { return false; } else if (!addOn.version.matches(dependency.getSemVer())) { return false; } } return true; } } return false; } /** * Tells whether or not the extension with the given {@code classname} is loaded. * * @param classname the classname of the extension * @return {@code true} if the extension is loaded, {@code false} otherwise * @since 2.4.0 */ public boolean isExtensionLoaded(String classname) { List<Extension> allExtensions = new ArrayList<>(getLoadedExtensions().size() + getLoadedExtensionsWithDeps().size()); allExtensions.addAll(getLoadedExtensions()); allExtensions.addAll(getLoadedExtensionsWithDeps()); for (Extension extension : getLoadedExtensions()) { if (classname.equals(extension.getClass().getCanonicalName())) { return true; } } return false; } /** * Returns the minimum Java version required to run this add-on or an empty {@code String} if there's no minimum version. * * @return the minimum Java version required to run this add-on or an empty {@code String} if no minimum version * @since 2.4.0 */ public String getMinimumJavaVersion() { if (dependencies == null) { return ""; } return dependencies.getJavaVersion(); } /** * Gets the add-on with the given {@code id} from the given collection of {@code addOns}. * * @param addOns the collection of add-ons where the search will be made * @param id the id of the add-on to search for * @return the {@code AddOn} with the given id, or {@code null} if not found */ private static AddOn getAddOn(Collection<AddOn> addOns, String id) { for (AddOn addOn : addOns) { if (addOn.getId().equals(id)) { return addOn; } } return null; } /** * Tells whether or not this add-on can be loaded in the given {@code zapVersion}. * * @param zapVersion the ZAP version that will be checked * @return {@code true} if the add-on can be loaded in the given {@code zapVersion}, {@code false} otherwise */ public boolean canLoadInVersion(String zapVersion) { // Require add-ons to declare the version they implement if (this.notBeforeVersion == null || this.notBeforeVersion.isEmpty()) { return false; } ZapReleaseComparitor zrc = new ZapReleaseComparitor(); ZapRelease zr = new ZapRelease(zapVersion); ZapRelease notBeforeRelease = new ZapRelease(this.notBeforeVersion); if (zrc.compare(zr, notBeforeRelease) < 0) { return false; } if (zrc.compare(notBeforeRelease, v2_4) < 0) { // Dont load any add-ons that imply they are prior to 2.4.0 - they probably wont work return false; } if (this.notFromVersion != null && this.notFromVersion.length() > 0) { ZapRelease notFromRelease = new ZapRelease(this.notFromVersion); return (zrc.compare(zr, notFromRelease) < 0); } return true; } public void setNotBeforeVersion(String notBeforeVersion) { this.notBeforeVersion = notBeforeVersion; } public void setNotFromVersion(String notFromVersion) { this.notFromVersion = notFromVersion; } public String getNotBeforeVersion() { return notBeforeVersion; } public String getNotFromVersion() { return notFromVersion; } public URL getInfo() { return info; } public void setInfo(URL info) { this.info = info; } public String getHash() { return hash; } /** * Returns the IDs of the add-ons dependencies, an empty collection if none. * * @return the IDs of the dependencies. * @since 2.4.0 */ public List<String> getIdsAddOnDependencies() { if (dependencies == null) { return Collections.emptyList(); } List<String> ids = new ArrayList<>(dependencies.getAddOns().size()); for (AddOnDep dep : dependencies.getAddOns()) { ids.add(dep.getId()); } return ids; } /** * Tells whether or not this add-on has a (direct) dependency on the given {@code addOn} (including version). * * @param addOn the add-on that will be checked * @return {@code true} if it depends on the given add-on, {@code false} otherwise. * @since 2.4.0 */ public boolean dependsOn(AddOn addOn) { if (dependencies == null || dependencies.getAddOns().isEmpty()) { return false; } return dependsOn(dependencies.getAddOns(), addOn); } /** * Tells whether or not this add-on has a (direct) dependency on any of the given {@code addOns} (including version). * * @param addOns the add-ons that will be checked * @return {@code true} if it depends on any of the given add-ons, {@code false} otherwise. * @since 2.4.0 */ public boolean dependsOn(Collection<AddOn> addOns) { if (dependencies == null || dependencies.getAddOns().isEmpty()) { return false; } for (AddOn addOn : addOns) { if (dependsOn(addOn)) { return true; } } return false; } @Override public String toString() { StringBuilder strBuilder = new StringBuilder(); strBuilder.append("[id=").append(id); strBuilder.append(", fileVersion=").append(fileVersion); if (version != null) { strBuilder.append(", version=").append(version); } strBuilder.append(']'); return strBuilder.toString(); } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((id == null) ? 0 : id.hashCode()); result = prime * result + fileVersion; result = prime * result + ((version == null) ? 0 : version.hashCode()); return result; } /** * Two add-ons are considered equal if both add-ons have the same ID, file version and semantic version. */ @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } AddOn other = (AddOn) obj; if (id == null) { if (other.id != null) { return false; } } else if (!id.equals(other.id)) { return false; } if (fileVersion != other.fileVersion) { return false; } if (version == null) { if (other.version != null) { return false; } } else if (!version.equals(other.version)) { return false; } return true; } static int getJavaVersion(String javaVersion) { return toVersionInt(toJavaVersionIntArray(javaVersion, 2)); } // NOTE: Following 2 methods copied from org.apache.commons.lang.SystemUtils version 2.6 because of constrained visibility private static int[] toJavaVersionIntArray(String version, int limit) { if (version == null) { return ArrayUtils.EMPTY_INT_ARRAY; } String[] strings = StringUtils.split(version, "._- "); int[] ints = new int[Math.min(limit, strings.length)]; int j = 0; for (int i = 0; i < strings.length && j < limit; i++) { String s = strings[i]; if (s.length() > 0) { try { ints[j] = Integer.parseInt(s); j++; } catch (Exception e) { } } } if (ints.length > j) { int[] newInts = new int[j]; System.arraycopy(ints, 0, newInts, 0, j); ints = newInts; } return ints; } private static int toVersionInt(int[] javaVersions) { if (javaVersions == null) { return 0; } int intVersion = 0; int len = javaVersions.length; if (len >= 1) { intVersion = javaVersions[0] * 100; } if (len >= 2) { intVersion += javaVersions[1] * 10; } if (len >= 3) { intVersion += javaVersions[2]; } return intVersion; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.jmx; import java.lang.management.ManagementFactory; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.Date; import javax.management.InstanceNotFoundException; import javax.management.MBeanRegistrationException; import javax.management.MBeanServer; import javax.management.MBeanServerInvocationHandler; import javax.management.MalformedObjectNameException; import javax.management.ObjectName; import org.apache.camel.builder.RouteBuilder; import org.apache.camel.component.jmx.beans.ISimpleMXBean; import org.apache.camel.component.jmx.beans.SimpleBean; import org.apache.camel.component.mock.MockEndpoint; import org.apache.camel.impl.DefaultCamelContext; import org.apache.camel.support.SimpleRegistry; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; /** * MBean that is registered for the unit tests. The fixture will register a bean and provide access to the mxbean so * tests can invoke methods on the mxbean to trigger notifications. */ public class SimpleBeanFixture { /** * domain to use for the mbean */ protected static final String DOMAIN = "TestDomain"; /** * key for the object name */ protected static final String NAME = "name"; protected MBeanServer server; /** * camel context to stand up for the test */ private DefaultCamelContext mContext = new DefaultCamelContext(); /** * registry to store referenced beans (i.e. objectProperties or NotificationFilter) */ private SimpleRegistry mRegistry = new SimpleRegistry(); /** * destination for the simple route created. */ private MockEndpointFixture mMockEndpoint; @BeforeEach public void setUp() throws Exception { initServer(); initBean(); initRegistry(); initContext(); startContext(); } protected void startContext() throws Exception { mContext.start(); } @AfterEach public void tearDown() throws Exception { if (!mContext.isStopped()) { mContext.stop(); } unregisterBean(makeObjectName("simpleBean")); } protected void initServer() throws Exception { server = ManagementFactory.getPlatformMBeanServer(); } /** * Registers the bean on the platform mbean server */ protected void registerBean(Object aBean, ObjectName aObjectName) throws Exception { server.registerMBean(aBean, aObjectName); } /** * Unregisters the bean */ protected void unregisterBean(ObjectName aObjectName) throws MBeanRegistrationException, InstanceNotFoundException { server.unregisterMBean(aObjectName); } /** * Gets the mxbean for our remote object using the specified name */ protected ISimpleMXBean getMXBean(ObjectName aObjectName) { return MBeanServerInvocationHandler.newProxyInstance( server, aObjectName, ISimpleMXBean.class, false); // revert the above change to the below when we move to JDK 1.6 // ISimpleMXBean simpleBean = JMX.newMXBeanProxy(server, aObjectName, ISimpleMXBean.class); // return simpleBean; } /** * Gets the mxbean for our remote object using the default name "simpleBean" */ protected ISimpleMXBean getSimpleMXBean() throws MalformedObjectNameException { return getMXBean(makeObjectName("simpleBean")); } /** * Makes an ObjectName for the given domain using our domain and the name attribute. */ protected ObjectName makeObjectName(String aName) throws MalformedObjectNameException { ObjectName objectName = new ObjectName(DOMAIN, NAME, aName); return objectName; } /** * Creates the bean and registers it within the mbean server. Note that we're using a fixed timestamp here to * simplify the assertions in the tests */ protected void initBean() throws Exception { registerBean(createSimpleBean(), makeObjectName("simpleBean")); } protected SimpleBean createSimpleBean() throws ParseException { SimpleBean simpleBean = new SimpleBean(); SimpleDateFormat sdf = new SimpleDateFormat("yyyy-dd-MM'T'HH:mm:ss"); Date date = sdf.parse("2010-07-01T10:30:15"); simpleBean.setTimestamp(date.getTime()); return simpleBean; } /** * Initializes the camel context by creating a simple route from our mbean to the mock endpoint. */ protected void initContext() throws Exception { final MockEndpoint mock = mContext.getEndpoint("mock:sink", MockEndpoint.class); mock.setExpectedMessageCount(1); mMockEndpoint = new MockEndpointFixture(mock); mContext.setRegistry(getRegistry()); mContext.addRoutes(new RouteBuilder() { @Override public void configure() throws Exception { from(buildFromURI().toString()).to(mock); } }); } /** * Override this to control the properties that make up the endpoint */ protected JMXUriBuilder buildFromURI() { JMXUriBuilder uri = new JMXUriBuilder().withObjectDomain(DOMAIN) .withObjectName("simpleBean"); return uri; } /** * Override this to put stuff into the registry so it's available to be referenced. (i.e. NotificationFilter or * Hashtable<String,String> for ObjectProperties */ protected void initRegistry() { } protected DefaultCamelContext getContext() { return mContext; } protected void setContext(DefaultCamelContext aContext) { mContext = aContext; } protected SimpleRegistry getRegistry() { return mRegistry; } protected void setRegistry(SimpleRegistry aRegistry) { mRegistry = aRegistry; } protected MockEndpointFixture getMockFixture() { return mMockEndpoint; } }
/* * Copyright 2020 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/container/v1/cluster_service.proto package com.google.container.v1; /** * * * <pre> * Configuration for NodeLocal DNSCache * </pre> * * Protobuf type {@code google.container.v1.DnsCacheConfig} */ public final class DnsCacheConfig extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.container.v1.DnsCacheConfig) DnsCacheConfigOrBuilder { private static final long serialVersionUID = 0L; // Use DnsCacheConfig.newBuilder() to construct. private DnsCacheConfig(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private DnsCacheConfig() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new DnsCacheConfig(); } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private DnsCacheConfig( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 8: { enabled_ = input.readBool(); break; } default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.container.v1.ClusterServiceProto .internal_static_google_container_v1_DnsCacheConfig_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.container.v1.ClusterServiceProto .internal_static_google_container_v1_DnsCacheConfig_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.container.v1.DnsCacheConfig.class, com.google.container.v1.DnsCacheConfig.Builder.class); } public static final int ENABLED_FIELD_NUMBER = 1; private boolean enabled_; /** * * * <pre> * Whether NodeLocal DNSCache is enabled for this cluster. * </pre> * * <code>bool enabled = 1;</code> * * @return The enabled. */ @java.lang.Override public boolean getEnabled() { return enabled_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (enabled_ != false) { output.writeBool(1, enabled_); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (enabled_ != false) { size += com.google.protobuf.CodedOutputStream.computeBoolSize(1, enabled_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.container.v1.DnsCacheConfig)) { return super.equals(obj); } com.google.container.v1.DnsCacheConfig other = (com.google.container.v1.DnsCacheConfig) obj; if (getEnabled() != other.getEnabled()) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + ENABLED_FIELD_NUMBER; hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(getEnabled()); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static com.google.container.v1.DnsCacheConfig parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.container.v1.DnsCacheConfig parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.container.v1.DnsCacheConfig parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.container.v1.DnsCacheConfig parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.container.v1.DnsCacheConfig parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.container.v1.DnsCacheConfig parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.container.v1.DnsCacheConfig parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.container.v1.DnsCacheConfig parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.container.v1.DnsCacheConfig parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.container.v1.DnsCacheConfig parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.container.v1.DnsCacheConfig parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.container.v1.DnsCacheConfig parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.container.v1.DnsCacheConfig prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Configuration for NodeLocal DNSCache * </pre> * * Protobuf type {@code google.container.v1.DnsCacheConfig} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.container.v1.DnsCacheConfig) com.google.container.v1.DnsCacheConfigOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.container.v1.ClusterServiceProto .internal_static_google_container_v1_DnsCacheConfig_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.container.v1.ClusterServiceProto .internal_static_google_container_v1_DnsCacheConfig_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.container.v1.DnsCacheConfig.class, com.google.container.v1.DnsCacheConfig.Builder.class); } // Construct using com.google.container.v1.DnsCacheConfig.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {} } @java.lang.Override public Builder clear() { super.clear(); enabled_ = false; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.container.v1.ClusterServiceProto .internal_static_google_container_v1_DnsCacheConfig_descriptor; } @java.lang.Override public com.google.container.v1.DnsCacheConfig getDefaultInstanceForType() { return com.google.container.v1.DnsCacheConfig.getDefaultInstance(); } @java.lang.Override public com.google.container.v1.DnsCacheConfig build() { com.google.container.v1.DnsCacheConfig result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.container.v1.DnsCacheConfig buildPartial() { com.google.container.v1.DnsCacheConfig result = new com.google.container.v1.DnsCacheConfig(this); result.enabled_ = enabled_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.container.v1.DnsCacheConfig) { return mergeFrom((com.google.container.v1.DnsCacheConfig) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.container.v1.DnsCacheConfig other) { if (other == com.google.container.v1.DnsCacheConfig.getDefaultInstance()) return this; if (other.getEnabled() != false) { setEnabled(other.getEnabled()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.container.v1.DnsCacheConfig parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (com.google.container.v1.DnsCacheConfig) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private boolean enabled_; /** * * * <pre> * Whether NodeLocal DNSCache is enabled for this cluster. * </pre> * * <code>bool enabled = 1;</code> * * @return The enabled. */ @java.lang.Override public boolean getEnabled() { return enabled_; } /** * * * <pre> * Whether NodeLocal DNSCache is enabled for this cluster. * </pre> * * <code>bool enabled = 1;</code> * * @param value The enabled to set. * @return This builder for chaining. */ public Builder setEnabled(boolean value) { enabled_ = value; onChanged(); return this; } /** * * * <pre> * Whether NodeLocal DNSCache is enabled for this cluster. * </pre> * * <code>bool enabled = 1;</code> * * @return This builder for chaining. */ public Builder clearEnabled() { enabled_ = false; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.container.v1.DnsCacheConfig) } // @@protoc_insertion_point(class_scope:google.container.v1.DnsCacheConfig) private static final com.google.container.v1.DnsCacheConfig DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.container.v1.DnsCacheConfig(); } public static com.google.container.v1.DnsCacheConfig getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<DnsCacheConfig> PARSER = new com.google.protobuf.AbstractParser<DnsCacheConfig>() { @java.lang.Override public DnsCacheConfig parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new DnsCacheConfig(input, extensionRegistry); } }; public static com.google.protobuf.Parser<DnsCacheConfig> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<DnsCacheConfig> getParserForType() { return PARSER; } @java.lang.Override public com.google.container.v1.DnsCacheConfig getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
/* * Copyright (C) 2016 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file * except in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.android.systemui.qs; import android.graphics.Path; import android.util.Log; import android.view.View; import android.view.View.OnAttachStateChangeListener; import android.view.View.OnLayoutChangeListener; import android.widget.TextView; import com.android.systemui.qs.PagedTileLayout.PageListener; import com.android.systemui.qs.QSPanel.QSTileLayout; import com.android.systemui.qs.QSTile.Host.Callback; import com.android.systemui.qs.TouchAnimator.Builder; import com.android.systemui.qs.TouchAnimator.Listener; import com.android.systemui.statusbar.phone.QSTileHost; import com.android.systemui.tuner.TunerService; import com.android.systemui.tuner.TunerService.Tunable; import java.util.ArrayList; import java.util.Collection; public class QSAnimator implements Callback, PageListener, Listener, OnLayoutChangeListener, OnAttachStateChangeListener, Tunable { private static final String TAG = "QSAnimator"; private static final String ALLOW_FANCY_ANIMATION = "sysui_qs_fancy_anim"; private static final String MOVE_FULL_ROWS = "sysui_qs_move_whole_rows"; public static final float EXPANDED_TILE_DELAY = .7f; private static final float LAST_ROW_EXPANDED_DELAY = .86f; private final ArrayList<View> mAllViews = new ArrayList<>(); private final ArrayList<View> mTopFiveQs = new ArrayList<>(); private final QuickQSPanel mQuickQsPanel; private final QSPanel mQsPanel; private final QSContainer mQsContainer; private PagedTileLayout mPagedLayout; private boolean mOnFirstPage = true; private TouchAnimator mFirstPageAnimator; private TouchAnimator mFirstPageDelayedAnimator; private TouchAnimator mTranslationXAnimator; private TouchAnimator mTranslationYAnimator; private TouchAnimator mNonfirstPageAnimator; private TouchAnimator mLastRowAnimator; private boolean mOnKeyguard; private boolean mAllowFancy; private boolean mFullRows; private int mNumQuickTiles; private float mLastPosition; private QSTileHost mHost; public QSAnimator(QSContainer container, QuickQSPanel quickPanel, QSPanel panel) { mQsContainer = container; mQuickQsPanel = quickPanel; mQsPanel = panel; mQsPanel.addOnAttachStateChangeListener(this); container.addOnLayoutChangeListener(this); QSTileLayout tileLayout = mQsPanel.getTileLayout(); if (tileLayout instanceof PagedTileLayout) { mPagedLayout = ((PagedTileLayout) tileLayout); mPagedLayout.setPageListener(this); } else { Log.w(TAG, "QS Not using page layout"); } } public void onRtlChanged() { updateAnimators(); } public void setOnKeyguard(boolean onKeyguard) { mOnKeyguard = onKeyguard; mQuickQsPanel.setVisibility(mOnKeyguard ? View.INVISIBLE : View.VISIBLE); if (mOnKeyguard) { clearAnimationState(); } } public void setHost(QSTileHost qsh) { mHost = qsh; qsh.addCallback(this); updateAnimators(); } @Override public void onViewAttachedToWindow(View v) { TunerService.get(mQsContainer.getContext()).addTunable(this, ALLOW_FANCY_ANIMATION, MOVE_FULL_ROWS, QuickQSPanel.NUM_QUICK_TILES); } @Override public void onViewDetachedFromWindow(View v) { if (mHost != null) { mHost.removeCallback(this); } TunerService.get(mQsContainer.getContext()).removeTunable(this); } @Override public void onTuningChanged(String key, String newValue) { if (ALLOW_FANCY_ANIMATION.equals(key)) { mAllowFancy = newValue == null || Integer.parseInt(newValue) != 0; if (!mAllowFancy) { clearAnimationState(); } } else if (MOVE_FULL_ROWS.equals(key)) { mFullRows = newValue == null || Integer.parseInt(newValue) != 0; } else if (QuickQSPanel.NUM_QUICK_TILES.equals(key)) { mNumQuickTiles = mQuickQsPanel.getNumQuickTiles(mQsContainer.getContext()); clearAnimationState(); } updateAnimators(); } @Override public void onPageChanged(boolean isFirst) { if (mOnFirstPage == isFirst) return; if (!isFirst) { clearAnimationState(); } mOnFirstPage = isFirst; } private void updateAnimators() { TouchAnimator.Builder firstPageBuilder = new Builder(); TouchAnimator.Builder translationXBuilder = new Builder(); TouchAnimator.Builder translationYBuilder = new Builder(); TouchAnimator.Builder lastRowBuilder = new Builder(); if (mQsPanel.getHost() == null) return; Collection<QSTile<?>> tiles = mQsPanel.getHost().getTiles(); int count = 0; int[] loc1 = new int[2]; int[] loc2 = new int[2]; int lastXDiff = 0; int lastYDiff = 0; int lastX = 0; clearAnimationState(); mAllViews.clear(); mTopFiveQs.clear(); mAllViews.add((View) mQsPanel.getTileLayout()); for (QSTile<?> tile : tiles) { QSTileBaseView tileView = mQsPanel.getTileView(tile); final TextView label = ((QSTileView) tileView).getLabel(); final View tileIcon = tileView.getIcon().getIconView(); if (count < mNumQuickTiles && mAllowFancy) { // Quick tiles. QSTileBaseView quickTileView = mQuickQsPanel.getTileView(tile); lastX = loc1[0]; getRelativePosition(loc1, quickTileView.getIcon(), mQsContainer); getRelativePosition(loc2, tileIcon, mQsContainer); final int xDiff = loc2[0] - loc1[0]; final int yDiff = loc2[1] - loc1[1]; lastXDiff = loc1[0] - lastX; lastYDiff = yDiff; // Move the quick tile right from its location to the new one. translationXBuilder.addFloat(quickTileView, "translationX", 0, xDiff); translationYBuilder.addFloat(quickTileView, "translationY", 0, yDiff); // Counteract the parent translation on the tile. So we have a static base to // animate the label position off from. firstPageBuilder.addFloat(tileView, "translationY", mQsPanel.getHeight(), 0); // Move the real tile's label from the quick tile position to its final // location. translationXBuilder.addFloat(label, "translationX", -xDiff, 0); translationYBuilder.addFloat(label, "translationY", -yDiff, 0); mTopFiveQs.add(tileIcon); mAllViews.add(tileIcon); mAllViews.add(quickTileView); } else if (mFullRows && isIconInAnimatedRow(count)) { // TODO: Refactor some of this, it shares a lot with the above block. // Move the last tile position over by the last difference between quick tiles. // This makes the extra icons seems as if they are coming from positions in the // quick panel. loc1[0] += lastXDiff; getRelativePosition(loc2, tileIcon, mQsContainer); final int xDiff = loc2[0] - loc1[0]; final int yDiff = loc2[1] - loc1[1]; firstPageBuilder.addFloat(tileView, "translationY", mQsPanel.getHeight(), 0); translationXBuilder.addFloat(tileView, "translationX", -xDiff, 0); translationYBuilder.addFloat(label, "translationY", -yDiff, 0); translationYBuilder.addFloat(tileIcon, "translationY", -yDiff, 0); mAllViews.add(tileIcon); } else { lastRowBuilder.addFloat(tileView, "alpha", 0, 1); } mAllViews.add(tileView); mAllViews.add(label); count++; } if (mAllowFancy) { mFirstPageAnimator = firstPageBuilder .setListener(this) .build(); // Fade in the tiles/labels as we reach the final position. mFirstPageDelayedAnimator = new TouchAnimator.Builder() .setStartDelay(EXPANDED_TILE_DELAY) .addFloat(mQsPanel.getTileLayout(), "alpha", 0, 1).build(); mLastRowAnimator = lastRowBuilder .setStartDelay(LAST_ROW_EXPANDED_DELAY) .build(); Path path = new Path(); path.moveTo(0, 0); path.cubicTo(0, 0, 0, 1, 1, 1); PathInterpolatorBuilder interpolatorBuilder = new PathInterpolatorBuilder(0, 0, 0, 1); translationXBuilder.setInterpolator(interpolatorBuilder.getXInterpolator()); translationYBuilder.setInterpolator(interpolatorBuilder.getYInterpolator()); mTranslationXAnimator = translationXBuilder.build(); mTranslationYAnimator = translationYBuilder.build(); } mNonfirstPageAnimator = new TouchAnimator.Builder() .addFloat(mQuickQsPanel, "alpha", 1, 0) .setListener(mNonFirstPageListener) .setEndDelay(.5f) .build(); } private boolean isIconInAnimatedRow(int count) { if (mPagedLayout == null) { return false; } final int columnCount = mPagedLayout.getColumnCount(); return count < ((mNumQuickTiles + columnCount - 1) / columnCount) * columnCount; } private void getRelativePosition(int[] loc1, View view, View parent) { loc1[0] = 0 + view.getWidth() / 2; loc1[1] = 0; getRelativePositionInt(loc1, view, parent); } private void getRelativePositionInt(int[] loc1, View view, View parent) { if(view == parent || view == null) return; // Ignore tile pages as they can have some offset we don't want to take into account in // RTL. if (!(view instanceof PagedTileLayout.TilePage)) { loc1[0] += view.getLeft(); loc1[1] += view.getTop(); } getRelativePositionInt(loc1, (View) view.getParent(), parent); } public void setPosition(float position) { if (mFirstPageAnimator == null) return; if (mOnKeyguard) { return; } mLastPosition = position; if (mOnFirstPage && mAllowFancy) { mQuickQsPanel.setAlpha(1); mFirstPageAnimator.setPosition(position); mFirstPageDelayedAnimator.setPosition(position); mTranslationXAnimator.setPosition(position); mTranslationYAnimator.setPosition(position); mLastRowAnimator.setPosition(position); } else { mNonfirstPageAnimator.setPosition(position); } } @Override public void onAnimationAtStart() { mQuickQsPanel.setVisibility(View.VISIBLE); } @Override public void onAnimationAtEnd() { mQuickQsPanel.setVisibility(View.INVISIBLE); final int N = mTopFiveQs.size(); for (int i = 0; i < N; i++) { mTopFiveQs.get(i).setVisibility(View.VISIBLE); } } @Override public void onAnimationStarted() { mQuickQsPanel.setVisibility(mOnKeyguard ? View.INVISIBLE : View.VISIBLE); if (mOnFirstPage) { final int N = mTopFiveQs.size(); for (int i = 0; i < N; i++) { mTopFiveQs.get(i).setVisibility(View.INVISIBLE); } } } private void clearAnimationState() { final int N = mAllViews.size(); mQuickQsPanel.setAlpha(0); for (int i = 0; i < N; i++) { View v = mAllViews.get(i); v.setAlpha(1); v.setTranslationX(0); v.setTranslationY(0); } final int N2 = mTopFiveQs.size(); for (int i = 0; i < N2; i++) { mTopFiveQs.get(i).setVisibility(View.VISIBLE); } } @Override public void onLayoutChange(View v, int left, int top, int right, int bottom, int oldLeft, int oldTop, int oldRight, int oldBottom) { mQsPanel.post(mUpdateAnimators); } @Override public void onTilesChanged() { // Give the QS panels a moment to generate their new tiles, then create all new animators // hooked up to the new views. mQsPanel.post(mUpdateAnimators); } private final TouchAnimator.Listener mNonFirstPageListener = new TouchAnimator.ListenerAdapter() { @Override public void onAnimationStarted() { mQuickQsPanel.setVisibility(View.VISIBLE); } }; private Runnable mUpdateAnimators = new Runnable() { @Override public void run() { updateAnimators(); setPosition(mLastPosition); } }; }
package org.ratson.pentagrid.gui; import java.awt.BasicStroke; import java.awt.Color; import java.awt.Dimension; import java.awt.Graphics; import java.awt.Graphics2D; import java.awt.RenderingHints; import java.awt.Shape; import java.awt.geom.GeneralPath; import java.awt.image.BufferedImage; import java.util.ArrayList; import javax.swing.JComponent; import org.ratson.pentagrid.OrientedPath; import org.ratson.pentagrid.Path; import org.ratson.pentagrid.PathNavigation; import org.ratson.pentagrid.Transform; import org.ratson.pentagrid.Util; import org.ratson.pentagrid.fields.SimpleMapField; import org.ratson.pentagrid.gui.poincare_panel.PoincarePanelEvent; import org.ratson.pentagrid.gui.poincare_panel.PoincarePanelListener; import org.ratson.util.Function1; @SuppressWarnings("serial") public class FarPoincarePanel extends JComponent { private OrientedPath viewCenter = new OrientedPath(Path.getRoot(), 0); private ArrayList<VisibleCell> visibleCells = new ArrayList<FarPoincarePanel.VisibleCell>(); private int visibleRadius = 5; private Transform viewTransform = (new Transform()).setEye(); private SimpleMapField field = null; private Shape cellsShape = null; public boolean antiAlias = false; public boolean showGrid = true; private Shape gridShape = null; private GridPainter gridDrawer = new GridPainter( visibleRadius ); private int viewTfmModifCounter = 0; //how many times view transform was modified. private int fixTransformEvery = 100;//fix view transformation matrix every n steps public Color clrCell = Color.BLUE; public Color clrBorder= Color.BLACK; public Color clrGrid= Color.LIGHT_GRAY; public Color clrExportBg = Color.WHITE; private int margin = 30; private ArrayList<PoincarePanelListener> panelEventListeners = new ArrayList<PoincarePanelListener>(); /**Represents one cell, shown on display. Stores path to this cell, its relative transformation and state*/ static final class VisibleCell{ Path absolutePath; Transform relativeTfm; int state=0; public VisibleCell( Path relative, OrientedPath origin ){ absolutePath = origin.attach(relative).path; relativeTfm = PathNavigation.getTransformation(relative); } public boolean updateState( SimpleMapField fld ){ int oldState = state; state = fld.getCell( absolutePath ); return oldState != state; } } /**re-enerate array of visible cells*/ private void rebuildVisibleCells(){ visibleCells.clear(); Util.forField(visibleRadius, new Function1<Path, Boolean>() { public Boolean call(Path relPath) { visibleCells.add(new VisibleCell( relPath, viewCenter )); return true; } }); visibleCells.trimToSize(); } private boolean updateCellsState(){ boolean changed = false; synchronized(field){ for ( VisibleCell c : visibleCells ) { changed = c.updateState(field) || changed; } } return changed; } public FarPoincarePanel( SimpleMapField f ){ field = f; rebuildVisibleCells(); updateCellsState(); } public void setField( SimpleMapField f ){ field = f; update(); } public void setViewRadius( int r ){ assert r >= 1; if ( r != visibleRadius ){ visibleRadius = r; rebuildVisibleCells(); cellsShape = null; update(); } } @Override protected void paintComponent(Graphics g) { super.paintComponent(g); Dimension sz = getSize(); Graphics2D g2 = (Graphics2D) g; if( antiAlias ) g2.setRenderingHint( RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON); g2.translate( sz.width/2, sz.height/2); paintContents( g2, sz ); } /**Draw grid cells*/ private void doShowGrid( Graphics2D g2 ) { if ( gridShape == null ){ gridShape = gridDrawer.createShape(viewTransform); } g2.setColor( clrGrid ); g2.setStroke( new BasicStroke(0) ); g2.draw( gridShape ); } /**Shift view*/ public void offsetView(double x, double y){ Transform offsetTfm = new Transform(); offsetTfm.setShift( x, y ); setView( offsetTfm.mul( viewTransform ) ); } /**Rotate vies by some angle*/ public void rotateView( double angle ){ Transform rot = new Transform(); rot.setRot(angle); setView( rot.mul( viewTransform) ); } /**Creates a shape, that is a projection of the field*/ private Shape createFieldShape(){ GeneralPath path = new GeneralPath(); for ( VisibleCell c: visibleCells) { if ( c.state != 0 ) createCellShape(path, viewTransform.mul( c.relativeTfm) ); } return path; } /**Precalculated coordinates of a pentagon*/ private static double[] pentagonPoints = new double[10]; static{ double r = Math.sqrt (2 / Math.sqrt (5)) *0.9; for (int i = 0; i < 10; i+=2) { double angle = Math.PI / 5 * i; pentagonPoints[i] = Math.cos(angle)*r; pentagonPoints[i+1] = Math.sin(angle)*r; } } private void createCellShape( GeneralPath path, Transform pathTfm ){ //double[] xyt = pathTfm.tfmVector(new double[]{0,0,1} ); double t = pathTfm.getAt(2, 2); if ( t < 100 ) PoincareGraphics.renderPoincarePolygon( path, pathTfm, pentagonPoints, true); } static double len2( double x, double y ){ return x*x+y*y; } private double getScale( Dimension size ){ return Math.max( 1, 0.5 * ( Math.min( size.width, size.height ) - margin ) ); } public double getScale(){ return getScale(getSize()); } private void paintContents(Graphics2D g2, Dimension size) { if ( cellsShape == null ){ cellsShape = createFieldShape(); } //AffineTransform oldTfm = g2.getTransform(); double scale = getScale( size ); g2.scale(scale, scale); g2.setColor(clrCell); g2.fill(cellsShape); if ( showGrid ) doShowGrid( g2 ); g2.setColor(clrBorder); g2.setStroke( new BasicStroke(0) ); g2.drawOval(-1, -1, 2, 2); //g2.setTransform(oldTfm); } /**Set view matrix and redraw view*/ private void setView( Transform tfm ){ viewTransform = tfm; viewTfmModifCounter ++; if (viewTfmModifCounter > fixTransformEvery ){ viewTfmModifCounter = 0; viewTransform= viewTransform.fix(); } adjustViewCenter(); cellsShape = null; gridShape = null; repaint(); } /**Updates cell states and causes repaint*/ public void update(){ if ( updateCellsState() ){ //repaint only if some cells were changed cellsShape = null; repaint(); } } /**Move view to the origin*/ public void centerView(){ viewTfmModifCounter = 0; OrientedPath center = new OrientedPath( Path.getRoot(), 0); PoincarePanelEvent event = null; if ( ! viewCenter.path.isRoot() ) event = new PoincarePanelEvent(viewCenter, center); viewCenter = center; rebuildVisibleCells(); updateCellsState(); setView( viewTransform.setEye() ); if (event != null) fireOriginChanged( event ); } /**Given the point in th view coordinates, return path to the cell, containing it * Path is relative to the view center*/ public Path mouse2cellPathRel( int x, int y ){ Dimension sz = getSize(); double scale = getScale( sz ); //poincare projection coordinates double dx = (x - sz.width/2) / scale; double dy = -(y - sz.height/2) / scale; //System.out.println("Dx="+dx+" Dy="+dy); //restore hyperbolic coordinates // x / (t+1); double d2 = len2( dx, dy ); if ( d2 >= 1 ) return null; double s = 2/(1-d2); double [] point = new double[]{ dx*s, dy*s, 0.5*(d2+1)*s }; return PathNavigation.point2path(viewTransform.hypInverse().tfmVector(point)); } /**Absolute path to the cell*/ public Path mouse2cellPath( int x, int y ){ Path relativePath = mouse2cellPathRel(x, y); return viewCenter.attach(relativePath).path; } /**Put view center to the specified cell, and reset view offset*/ public void setOrigin( OrientedPath newCenter ){ if ( newCenter.equals(viewCenter)) return; PoincarePanelEvent event = new PoincarePanelEvent(viewCenter, newCenter); viewCenter = newCenter; rebuildVisibleCells(); setView( viewTransform.setEye() ); update(); fireOriginChanged( event ); } public OrientedPath getOrigin(){ return viewCenter; } /**Shift view origin by the given offset, and adjust transformation so that view does not change*/ public void rebaseRelative( Path offset ){ if (offset.isRoot()) return; //nothing to do OrientedPath newCenter = viewCenter.attach( offset ); PoincarePanelEvent e = new PoincarePanelEvent(viewCenter, newCenter); Transform offsetTfm = PathNavigation.getTransformation( offset ); viewCenter = newCenter; viewTransform = viewTransform.mul( offsetTfm ); rebuildVisibleCells(); update(); fireOriginChanged(e); } /**adjust view center, setting it to the cell, nearest to the geometrical center of the Poincare circle*/ public void adjustViewCenter(){ try{ Path centerPath = PathNavigation.point2path(viewTransform.tfmZeroVectorInv()); //path to the cell at the geometric center rebaseRelative( centerPath ); }catch( RuntimeException err ){ System.err.println( "Failed to adjust path: "+err.getMessage() ); } } public BufferedImage exportImage( Dimension size, boolean antiAlias ){ BufferedImage img = new BufferedImage( size.width, size.height, BufferedImage.TYPE_INT_RGB); Graphics2D g = (Graphics2D)img.getGraphics(); g.setColor( clrExportBg ); g.fillRect(0, 0, size.width, size.height); if( antiAlias ) g.setRenderingHint( RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON); g.translate(size.width/2, size.height/2); paintContents( g, size ); return img; } public void AddPoincarePanelListener( PoincarePanelListener listener) { panelEventListeners.add( listener ); } private void fireOriginChanged( PoincarePanelEvent e ){ for( PoincarePanelListener l: panelEventListeners ) l.originChanged(e); } }
/* * Copyright (C) 2017 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.gapid.rpclib.binary; import com.google.common.collect.Maps; import com.google.gapid.rpclib.schema.Dynamic; import com.google.gapid.rpclib.schema.Entity; import java.io.EOFException; import java.io.IOException; import java.io.InputStream; import java.io.UnsupportedEncodingException; import java.util.Map; /** * A decoder of various RPC primitive types. * The encoding format is documented at the following link: * https://github.com/google/gapid/+/master/binary/doc.go */ public class Decoder { private final Map<Integer, Entity> mEntities; private final Map<Integer, BinaryObject> mObjects; private final InputStream mInputStream; private final byte[] mBuffer; private final EncodingControl mControl = new EncodingControl(); public Decoder(InputStream in) { mEntities = Maps.newHashMap(); mObjects = Maps.newHashMap(); mInputStream = in; mBuffer = new byte[9]; mEntities.put(0, null); mObjects.put(0, null); } public void read(byte[] buf, int count) throws IOException { int off = 0; while (off < count) { int readCount = mInputStream.read(buf, off, count - off); if (readCount == -1) { throw new EOFException("Decoder: End of stream while reading"); } off += readCount; } } private void read(int count) throws IOException { read(mBuffer, count); } public boolean bool() throws IOException { read(1); return mBuffer[0] != 0; } public byte int8() throws IOException { read(1); return mBuffer[0]; } public byte uint8() throws IOException { return int8(); } private long intv() throws IOException { long uv = uintv(); long v = uv >>> 1; if ((uv & 1) != 0) { v = ~v; } return v; } private long uintv() throws IOException { read(1); int count = 0; while (((0x80 >> count) & mBuffer[0]) != 0) count++; long v = mBuffer[0] & (0xff >> count); if (count == 0) { return v; } read(count); for (int i = 0; i < count; i++) { v = (v << 8) | (mBuffer[i] & 0xffL); } return v; } public short int16() throws IOException { return (short)intv(); } public short uint16() throws IOException { return (short)uintv(); } public int int32() throws IOException { return (int)intv(); } public int uint32() throws IOException { return (int)uintv(); } public long int64() throws IOException { return intv(); } public long uint64() throws IOException { return uintv(); } public float float32() throws IOException { int bits = (int)uintv(); int shuffled = ((bits & 0x000000ff) << 24) | ((bits & 0x0000ff00) << 8) | ((bits & 0x00ff0000) >> 8) | ((bits & 0xff000000) >>> 24); return Float.intBitsToFloat(shuffled); } public double float64() throws IOException { long bits = uintv(); long shuffled = ((bits & 0x00000000000000ffL) << 56) | ((bits & 0x000000000000ff00L) << 40) | ((bits & 0x0000000000ff0000L) << 24) | ((bits & 0x00000000ff000000L) << 8) | ((bits & 0x000000ff00000000L) >> 8) | ((bits & 0x0000ff0000000000L) >> 24) | ((bits & 0x00ff000000000000L) >> 40) | ((bits & 0xff00000000000000L) >>> 56); return Double.longBitsToDouble(shuffled); } public String string() throws IOException { int size = uint32(); byte[] bytes = new byte[size]; for (int i = 0; i < size; i++) { bytes[i] = int8(); } try { return new String(bytes, "UTF-8"); } catch (UnsupportedEncodingException e) { throw new RuntimeException(e); // Should never happen } } public String nonCompactString() throws IOException { return (mControl.mode != EncodingControl.Compact) ? string() : ""; } private int readSid() throws IOException { int v = uint32(); if (v == 1) { // encoded sid 0 is a special marker // read control block. mControl.decode(this); // read the real sid v = uint32(); } return v; } public Entity entity() throws IOException { int v = readSid(); int sid = v >> 1; if ((v & 1) != 0) { Entity entity = new Entity(); mEntities.put(sid, entity); entity.decode(this); return entity; } if (!mEntities.containsKey(sid)) { throw new RuntimeException("Unknown entity: " + sid); } return mEntities.get(sid); } public void value(BinaryObject obj) throws IOException { obj.klass().decode(this, obj); } public BinaryObject variant() throws IOException { Entity entity = entity(); if (entity == null) { return null; } BinaryClass c = Namespace.lookup(entity); if (c == null) { c = Dynamic.register(entity); } BinaryObject obj = c.create(); c.decode(this, obj); return obj; } public BinaryObject object() throws IOException { int v = readSid(); int sid = v >> 1; if ((v & 1) != 0) { BinaryObject obj = variant(); mObjects.put(sid, obj); return obj; } if (!mObjects.containsKey(sid)) { throw new RuntimeException("Unknown object: " + sid); } return mObjects.get(sid); } public InputStream stream() { return mInputStream; } public int getMode() { return mControl.mode; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.processors.cache.datastructures.partitioned; import java.util.UUID; import java.util.concurrent.Callable; import java.util.concurrent.CountDownLatch; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import org.apache.ignite.Ignite; import org.apache.ignite.IgniteCache; import org.apache.ignite.IgniteQueue; import org.apache.ignite.cache.CacheAtomicityMode; import org.apache.ignite.cache.CacheMode; import org.apache.ignite.configuration.CacheConfiguration; import org.apache.ignite.configuration.IgniteConfiguration; import org.apache.ignite.internal.IgniteInternalFuture; import org.apache.ignite.internal.processors.cache.datastructures.IgniteCollectionAbstractTest; import org.apache.ignite.testframework.GridTestUtils; import org.apache.ignite.transactions.Transaction; import org.junit.Test; import static java.util.concurrent.TimeUnit.MILLISECONDS; import static org.apache.ignite.cache.CacheAtomicityMode.TRANSACTIONAL; import static org.apache.ignite.cache.CacheMode.PARTITIONED; import static org.apache.ignite.cache.CacheRebalanceMode.SYNC; import static org.apache.ignite.cache.CacheWriteSynchronizationMode.FULL_SYNC; import static org.apache.ignite.transactions.TransactionConcurrency.PESSIMISTIC; import static org.apache.ignite.transactions.TransactionIsolation.REPEATABLE_READ; /** * */ public class GridCachePartitionedQueueCreateMultiNodeSelfTest extends IgniteCollectionAbstractTest { /** {@inheritDoc} */ @Override protected int gridCount() { return 1; } /** {@inheritDoc} */ @Override protected CacheMode collectionCacheMode() { return PARTITIONED; } /** {@inheritDoc} */ @Override protected CacheAtomicityMode collectionCacheAtomicityMode() { return TRANSACTIONAL; } /** {@inheritDoc} */ @Override protected void beforeTestsStarted() throws Exception { // No-op. } /** {@inheritDoc} */ @Override protected IgniteConfiguration getConfiguration(String igniteInstanceName) throws Exception { IgniteConfiguration c = super.getConfiguration(igniteInstanceName); c.setIncludeEventTypes(); c.setPeerClassLoadingEnabled(false); CacheConfiguration[] ccfg = c.getCacheConfiguration(); if (ccfg != null) { assert ccfg.length == 1 : ccfg.length; c.setCacheConfiguration(ccfg[0], cacheConfiguration()); } else c.setCacheConfiguration(cacheConfiguration()); return c; } /** */ protected CacheConfiguration cacheConfiguration() { CacheConfiguration cc = defaultCacheConfiguration(); cc.setCacheMode(PARTITIONED); cc.setWriteSynchronizationMode(FULL_SYNC); cc.setRebalanceMode(SYNC); cc.setBackups(0); return cc; } /** {@inheritDoc} */ @Override protected void afterTest() throws Exception { stopAllGrids(true); } /** * @throws Exception If failed. */ @Test public void testQueueCreation() throws Exception { final AtomicInteger idx = new AtomicInteger(); IgniteInternalFuture<?> fut = multithreadedAsync( new Callable<Object>() { @Override public Object call() throws Exception { int idx0 = idx.getAndIncrement(); Thread.currentThread().setName("createQueue-" + idx0); final Ignite ignite = startGrid(idx0); UUID locNodeId = ignite.cluster().localNode().id(); info("Started grid: " + locNodeId); info("Creating queue: " + locNodeId); GridTestUtils.runMultiThreaded(new Callable<Void>() { @Override public Void call() throws Exception { ignite.queue("queue", 1, config(true)); return null; } }, 10, "create-queue-" + ignite.name()); IgniteQueue<String> q = ignite.queue("queue", 1, config(true)); assert q != null; info("Putting first value: " + locNodeId); q.offer("val", 1000, MILLISECONDS); info("Putting second value: " + locNodeId); boolean res2 = q.offer("val1", 1000, MILLISECONDS); assert !res2; info("Thread finished: " + locNodeId); return null; } }, 10 ); fut.get(); } /** * @throws Exception If failed. */ @Test public void testTx() throws Exception { if (cacheConfiguration().getAtomicityMode() != TRANSACTIONAL) return; int threadCnt = 10; final AtomicInteger idx = new AtomicInteger(); final AtomicBoolean flag = new AtomicBoolean(); final CountDownLatch latch = new CountDownLatch(threadCnt); IgniteInternalFuture<?> fut = multithreadedAsync( new Callable<Object>() { @Override public Object call() throws Exception { Ignite ignite = startGrid(idx.getAndIncrement()); boolean wait = false; if (wait) { latch.countDown(); latch.await(); } // If output presents, test passes with greater probability. // info("Start puts."); IgniteCache<Integer, String> cache = ignite.cache(DEFAULT_CACHE_NAME); info("Partition: " + ignite.affinity(DEFAULT_CACHE_NAME).partition(1)); try (Transaction tx = ignite.transactions().txStart(PESSIMISTIC, REPEATABLE_READ)) { // info("Getting value for key 1"); String s = cache.get(1); // info("Got value: " + s); if (s == null) { assert flag.compareAndSet(false, true); // info("Putting value."); cache.put(1, "val"); // info("Done putting value"); tx.commit(); } else assert "val".equals(s) : "String: " + s; } info("Thread finished for grid: " + ignite.name()); return null; } }, threadCnt ); fut.get(); } }
/** * Copyright 2005-2014 The Kuali Foundation * * Licensed under the Educational Community License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.opensource.org/licenses/ecl2.php * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kuali.rice.kim.impl.responsibility; import java.util.Iterator; import java.util.List; import java.util.Map; import javax.persistence.Entity; import javax.persistence.Id; import javax.persistence.Inheritance; import javax.persistence.InheritanceType; import javax.persistence.Table; import org.apache.commons.lang.StringUtils; import org.kuali.rice.kim.api.KimConstants; import org.kuali.rice.kim.api.identity.Person; import org.kuali.rice.kim.api.responsibility.Responsibility; import org.kuali.rice.kim.api.responsibility.ResponsibilityContract; import org.kuali.rice.kim.api.services.KimApiServiceLocator; import org.kuali.rice.kim.api.type.KimType; import org.kuali.rice.kim.api.type.KimTypeAttribute; import org.kuali.rice.kim.api.type.KimTypeInfoService; import org.kuali.rice.kim.impl.common.attribute.KimAttributeDataBo; import org.kuali.rice.kim.impl.group.GroupBo; import org.kuali.rice.kim.impl.identity.PersonImpl; import org.kuali.rice.kim.impl.role.RoleBo; import org.kuali.rice.kim.impl.role.RoleResponsibilityBo; import org.kuali.rice.krad.bo.PersistableBusinessObjectBase; import org.kuali.rice.krad.service.DataDictionaryService; import org.kuali.rice.krad.service.KRADServiceLocatorWeb; import org.springframework.util.AutoPopulatingList; @Entity @Table(name="ZZZ_FAKE_KRIM_RSP_T") @Inheritance(strategy=InheritanceType.TABLE_PER_CLASS) public class UberResponsibilityBo extends PersistableBusinessObjectBase implements ResponsibilityContract { private static final long serialVersionUID = 1L; @Id private String id; private String namespaceCode; private String name; private String description; private String templateId; private boolean active; private ResponsibilityTemplateBo template = new ResponsibilityTemplateBo(); private List<ResponsibilityAttributeBo> attributeDetails = new AutoPopulatingList<ResponsibilityAttributeBo>(ResponsibilityAttributeBo.class); private List<RoleResponsibilityBo> roleResponsibilities = new AutoPopulatingList<RoleResponsibilityBo>(RoleResponsibilityBo.class); private Map<String, String> attributes; @Override public Map<String, String> getAttributes() { return attributeDetails != null ? KimAttributeDataBo.toAttributes(attributeDetails) : attributes; } /** * Converts a mutable bo to its immutable counterpart * * @param bo the mutable business object * @return the immutable object */ public static Responsibility to(ResponsibilityBo bo) { if (bo == null) { return null; } return Responsibility.Builder.create(bo).build(); } /** * Converts a immutable object to its mutable counterpart * * @param im immutable object * @return the mutable bo */ public static ResponsibilityBo from(Responsibility im) { if (im == null) { return null; } ResponsibilityBo bo = new ResponsibilityBo(); bo.id = im.getId(); bo.namespaceCode = im.getNamespaceCode(); bo.name = im.getName(); bo.description = im.getDescription(); bo.active = im.isActive(); bo.templateId = im.getTemplate() != null ? im.getTemplate().getId() : null; bo.template = ResponsibilityTemplateBo.from(im.getTemplate()); bo.attributes = im.getAttributes(); bo.setVersionNumber(im.getVersionNumber()); bo.setObjectId(im.getObjectId()); return bo; } public ResponsibilityTemplateBo getTemplate() { return template; } public String getDetailObjectsValues() { StringBuffer detailObjectsToDisplayBuffer = new StringBuffer(); Iterator<ResponsibilityAttributeBo> respIter = attributeDetails.iterator(); while (respIter.hasNext()) { ResponsibilityAttributeBo respAttributeData = respIter.next(); detailObjectsToDisplayBuffer.append(respAttributeData.getAttributeValue()); if (respIter.hasNext()) { detailObjectsToDisplayBuffer.append(KimConstants.KimUIConstants.COMMA_SEPARATOR); } } return detailObjectsToDisplayBuffer.toString(); } public String getDetailObjectsToDisplay() { final KimType kimType = getTypeInfoService().getKimType(getTemplate().getKimTypeId()); StringBuffer detailObjects = new StringBuffer(); Iterator<ResponsibilityAttributeBo> respIter = attributeDetails.iterator(); while (respIter.hasNext()) { ResponsibilityAttributeBo bo = respIter.next(); detailObjects.append(getKimAttributeLabelFromDD(kimType.getAttributeDefinitionById(bo.getKimAttributeId()))).append(":").append(bo.getAttributeValue()); if (respIter.hasNext()) { detailObjects.append(KimConstants.KimUIConstants.COMMA_SEPARATOR); } } return detailObjects.toString(); } private String getKimAttributeLabelFromDD(KimTypeAttribute attribute) { return getDataDictionaryService().getAttributeLabel(attribute.getKimAttribute().getComponentName(), attribute.getKimAttribute().getAttributeName()); } private DataDictionaryService getDataDictionaryService() { return KRADServiceLocatorWeb.getDataDictionaryService(); } private KimTypeInfoService getTypeInfoService() { return KimApiServiceLocator.getKimTypeInfoService(); } @Override public String getId() { return id; } public void setId(String id) { this.id = id; } @Override public String getNamespaceCode() { return namespaceCode; } public void setNamespaceCode(String namespaceCode) { this.namespaceCode = namespaceCode; } @Override public String getName() { return name; } public void setName(String name) { this.name = name; } @Override public String getDescription() { return description; } public void setDescription(String description) { this.description = description; } public String getTemplateId() { return templateId; } public void setTemplateId(String templateId) { this.templateId = templateId; } public boolean getActive() { return active; } @Override public boolean isActive() { return active; } public void setActive(boolean active) { this.active = active; } public void setTemplate(ResponsibilityTemplateBo template) { this.template = template; } public List<ResponsibilityAttributeBo> getAttributeDetails() { return attributeDetails; } public void setAttributeDetails(List<ResponsibilityAttributeBo> attributeDetails) { this.attributeDetails = attributeDetails; } public List<RoleResponsibilityBo> getRoleResponsibilities() { return roleResponsibilities; } public void setRoleResponsibilities(List<RoleResponsibilityBo> roleResponsibilities) { this.roleResponsibilities = roleResponsibilities; } public void setAttributes(Map<String, String> attributes) { this.attributes = attributes; } private List<RoleBo> assignedToRoles = new AutoPopulatingList<RoleBo>(RoleBo.class); private String assignedToRoleNamespaceForLookup; private String assignedToRoleNameForLookup; private RoleBo assignedToRole = new RoleBo(); private String assignedToPrincipalNameForLookup; private Person assignedToPrincipal = new PersonImpl(); private String assignedToGroupNamespaceForLookup; private String assignedToGroupNameForLookup; private GroupBo assignedToGroup = new GroupBo(); private String attributeName; private String attributeValue; private String detailCriteria; public String getAssignedToRolesToDisplay() { StringBuffer assignedToRolesToDisplay = new StringBuffer(); for (RoleBo roleImpl : assignedToRoles) { assignedToRolesToDisplay.append(getRoleDetailsToDisplay(roleImpl)); } return StringUtils.chomp(assignedToRolesToDisplay.toString(), KimConstants.KimUIConstants.COMMA_SEPARATOR); } public String getRoleDetailsToDisplay(RoleBo roleImpl) { return roleImpl.getNamespaceCode().trim() + " " + roleImpl.getName().trim() + KimConstants.KimUIConstants.COMMA_SEPARATOR; } public List<RoleBo> getAssignedToRoles() { return assignedToRoles; } public void setAssignedToRoles(List<RoleBo> assignedToRoles) { this.assignedToRoles = assignedToRoles; } public String getAssignedToRoleNamespaceForLookup() { return assignedToRoleNamespaceForLookup; } public void setAssignedToRoleNamespaceForLookup(String assignedToRoleNamespaceForLookup) { this.assignedToRoleNamespaceForLookup = assignedToRoleNamespaceForLookup; } public String getAssignedToRoleNameForLookup() { return assignedToRoleNameForLookup; } public void setAssignedToRoleNameForLookup(String assignedToRoleNameForLookup) { this.assignedToRoleNameForLookup = assignedToRoleNameForLookup; } public RoleBo getAssignedToRole() { return assignedToRole; } public void setAssignedToRole(RoleBo assignedToRole) { this.assignedToRole = assignedToRole; } public String getAssignedToPrincipalNameForLookup() { return assignedToPrincipalNameForLookup; } public void setAssignedToPrincipalNameForLookup(String assignedToPrincipalNameForLookup) { this.assignedToPrincipalNameForLookup = assignedToPrincipalNameForLookup; } public Person getAssignedToPrincipal() { return assignedToPrincipal; } public void setAssignedToPrincipal(Person assignedToPrincipal) { this.assignedToPrincipal = assignedToPrincipal; } public String getAssignedToGroupNamespaceForLookup() { return assignedToGroupNamespaceForLookup; } public void setAssignedToGroupNamespaceForLookup(String assignedToGroupNamespaceForLookup) { this.assignedToGroupNamespaceForLookup = assignedToGroupNamespaceForLookup; } public String getAssignedToGroupNameForLookup() { return assignedToGroupNameForLookup; } public void setAssignedToGroupNameForLookup(String assignedToGroupNameForLookup) { this.assignedToGroupNameForLookup = assignedToGroupNameForLookup; } public GroupBo getAssignedToGroup() { return assignedToGroup; } public void setAssignedToGroup(GroupBo assignedToGroup) { this.assignedToGroup = assignedToGroup; } public String getAttributeName() { return attributeName; } public void setAttributeName(String attributeName) { this.attributeName = attributeName; } public String getAttributeValue() { return attributeValue; } public void setAttributeValue(String attributeValue) { this.attributeValue = attributeValue; } public String getDetailCriteria() { return detailCriteria; } public void setDetailCriteria(String detailCriteria) { this.detailCriteria = detailCriteria; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to you under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.calcite.adapter.jdbc; import org.apache.calcite.adapter.java.JavaTypeFactory; import org.apache.calcite.linq4j.tree.Expressions; import org.apache.calcite.rel.RelFieldCollation; import org.apache.calcite.rel.RelNode; import org.apache.calcite.rel.core.AggregateCall; import org.apache.calcite.rel.type.RelDataType; import org.apache.calcite.rel.type.RelDataTypeField; import org.apache.calcite.rex.RexCall; import org.apache.calcite.rex.RexInputRef; import org.apache.calcite.rex.RexLiteral; import org.apache.calcite.rex.RexLocalRef; import org.apache.calcite.rex.RexNode; import org.apache.calcite.rex.RexProgram; import org.apache.calcite.sql.SqlAggFunction; import org.apache.calcite.sql.SqlBinaryOperator; import org.apache.calcite.sql.SqlCall; import org.apache.calcite.sql.SqlDataTypeSpec; import org.apache.calcite.sql.SqlDialect; import org.apache.calcite.sql.SqlIdentifier; import org.apache.calcite.sql.SqlJoin; import org.apache.calcite.sql.SqlLiteral; import org.apache.calcite.sql.SqlNode; import org.apache.calcite.sql.SqlNodeList; import org.apache.calcite.sql.SqlOperator; import org.apache.calcite.sql.SqlSelect; import org.apache.calcite.sql.SqlSelectKeyword; import org.apache.calcite.sql.SqlSetOperator; import org.apache.calcite.sql.fun.SqlCase; import org.apache.calcite.sql.fun.SqlStdOperatorTable; import org.apache.calcite.sql.fun.SqlSumEmptyIsZeroAggFunction; import org.apache.calcite.sql.parser.SqlParserPos; import org.apache.calcite.sql.type.BasicSqlType; import org.apache.calcite.sql.type.SqlTypeName; import org.apache.calcite.sql.validate.SqlValidatorUtil; import org.apache.calcite.util.Pair; import org.apache.calcite.util.Util; import com.google.common.collect.ImmutableList; import java.util.AbstractList; import java.util.ArrayList; import java.util.Calendar; import java.util.Collection; import java.util.Collections; import java.util.LinkedHashSet; import java.util.List; import java.util.Set; /** * State for generating a SQL statement. */ public class JdbcImplementor { public static final SqlParserPos POS = SqlParserPos.ZERO; final SqlDialect dialect; private final Set<String> aliasSet = new LinkedHashSet<String>(); public JdbcImplementor(SqlDialect dialect, JavaTypeFactory typeFactory) { this.dialect = dialect; Util.discard(typeFactory); } /** Creates a result based on a single relational expression. */ public Result result(SqlNode node, Collection<Clause> clauses, RelNode rel) { final String alias2 = SqlValidatorUtil.getAlias(node, -1); final String alias3 = alias2 != null ? alias2 : "t"; final String alias4 = SqlValidatorUtil.uniquify( alias3, aliasSet, SqlValidatorUtil.EXPR_SUGGESTER); final String alias5 = alias2 == null || !alias2.equals(alias4) ? alias4 : null; return new Result(node, clauses, alias5, Collections.singletonList(Pair.of(alias4, rel.getRowType()))); } /** Creates a result based on a join. (Each join could contain one or more * relational expressions.) */ public Result result(SqlNode join, Result leftResult, Result rightResult) { final List<Pair<String, RelDataType>> list = new ArrayList<Pair<String, RelDataType>>(); list.addAll(leftResult.aliases); list.addAll(rightResult.aliases); return new Result(join, Expressions.list(Clause.FROM), null, list); } /** Wraps a node in a SELECT statement that has no clauses: * "SELECT ... FROM (node)". */ SqlSelect wrapSelect(SqlNode node) { assert node instanceof SqlJoin || node instanceof SqlIdentifier || node instanceof SqlCall && (((SqlCall) node).getOperator() instanceof SqlSetOperator || ((SqlCall) node).getOperator() == SqlStdOperatorTable.AS) : node; return new SqlSelect(POS, SqlNodeList.EMPTY, null, node, null, null, null, SqlNodeList.EMPTY, null, null, null); } public Result visitChild(int i, RelNode e) { return ((JdbcRel) e).implement(this); } /** Context for translating a {@link RexNode} expression (within a * {@link RelNode}) into a {@link SqlNode} expression (within a SQL parse * tree). */ public abstract class Context { private final int fieldCount; protected Context(int fieldCount) { this.fieldCount = fieldCount; } public abstract SqlNode field(int ordinal); /** Converts an expression from {@link RexNode} to {@link SqlNode} * format. */ SqlNode toSql(RexProgram program, RexNode rex) { switch (rex.getKind()) { case LOCAL_REF: final int index = ((RexLocalRef) rex).getIndex(); return toSql(program, program.getExprList().get(index)); case INPUT_REF: return field(((RexInputRef) rex).getIndex()); case LITERAL: final RexLiteral literal = (RexLiteral) rex; if (literal.getTypeName() == SqlTypeName.SYMBOL) { final SqlLiteral.SqlSymbol symbol = (SqlLiteral.SqlSymbol) literal.getValue(); return SqlLiteral.createSymbol(symbol, POS); } switch (literal.getTypeName().getFamily()) { case CHARACTER: return SqlLiteral.createCharString((String) literal.getValue2(), POS); case NUMERIC: case EXACT_NUMERIC: return SqlLiteral.createExactNumeric(literal.getValue().toString(), POS); case APPROXIMATE_NUMERIC: return SqlLiteral.createApproxNumeric( literal.getValue().toString(), POS); case BOOLEAN: return SqlLiteral.createBoolean((Boolean) literal.getValue(), POS); case DATE: return SqlLiteral.createDate((Calendar) literal.getValue(), POS); case TIME: return SqlLiteral.createTime((Calendar) literal.getValue(), literal.getType().getPrecision(), POS); case TIMESTAMP: return SqlLiteral.createTimestamp((Calendar) literal.getValue(), literal.getType().getPrecision(), POS); case ANY: case NULL: switch (literal.getTypeName()) { case NULL: return SqlLiteral.createNull(POS); // fall through } default: throw new AssertionError(literal + ": " + literal.getTypeName()); } case CASE: final RexCall caseCall = (RexCall) rex; final List<SqlNode> caseNodeList = toSql(program, caseCall.getOperands()); final SqlNode valueNode; final List<SqlNode> whenList = Expressions.list(); final List<SqlNode> thenList = Expressions.list(); final SqlNode elseNode; if (caseNodeList.size() % 2 == 0) { // switched: // "case x when v1 then t1 when v2 then t2 ... else e end" valueNode = caseNodeList.get(0); for (int i = 1; i < caseNodeList.size() - 1; i += 2) { whenList.add(caseNodeList.get(i)); thenList.add(caseNodeList.get(i + 1)); } } else { // other: "case when w1 then t1 when w2 then t2 ... else e end" valueNode = null; for (int i = 0; i < caseNodeList.size() - 1; i += 2) { whenList.add(caseNodeList.get(i)); thenList.add(caseNodeList.get(i + 1)); } } elseNode = caseNodeList.get(caseNodeList.size() - 1); return new SqlCase(POS, valueNode, new SqlNodeList(whenList, POS), new SqlNodeList(thenList, POS), elseNode); default: final RexCall call = (RexCall) rex; final SqlOperator op = call.getOperator(); final List<SqlNode> nodeList = toSql(program, call.getOperands()); switch (rex.getKind()) { case CAST: nodeList.add(toSql(call.getType())); } if (op instanceof SqlBinaryOperator && nodeList.size() > 2) { // In RexNode trees, OR and AND have any number of children; // SqlCall requires exactly 2. So, convert to a left-deep binary tree. return createLeftCall(op, nodeList); } return op.createCall(new SqlNodeList(nodeList, POS)); } } private SqlNode createLeftCall(SqlOperator op, List<SqlNode> nodeList) { if (nodeList.size() == 2) { return op.createCall(new SqlNodeList(nodeList, POS)); } final List<SqlNode> butLast = Util.skipLast(nodeList); final SqlNode last = nodeList.get(nodeList.size() - 1); final SqlNode call = createLeftCall(op, butLast); return op.createCall(new SqlNodeList(ImmutableList.of(call, last), POS)); } private SqlNode toSql(RelDataType type) { switch (dialect.getDatabaseProduct()) { case MYSQL: switch (type.getSqlTypeName()) { case VARCHAR: // MySQL doesn't have a VARCHAR type, only CHAR. return new SqlDataTypeSpec(new SqlIdentifier("CHAR", POS), type.getPrecision(), -1, null, null, POS); case INTEGER: return new SqlDataTypeSpec(new SqlIdentifier("_UNSIGNED", POS), type.getPrecision(), -1, null, null, POS); } break; } if (type instanceof BasicSqlType) { return new SqlDataTypeSpec( new SqlIdentifier(type.getSqlTypeName().name(), POS), type.getPrecision(), type.getScale(), type.getCharset() != null && dialect.supportsCharSet() ? type.getCharset().name() : null, null, POS); } throw new AssertionError(type); // TODO: implement } private List<SqlNode> toSql(RexProgram program, List<RexNode> operandList) { final List<SqlNode> list = new ArrayList<SqlNode>(); for (RexNode rex : operandList) { list.add(toSql(program, rex)); } return list; } public List<SqlNode> fieldList() { return new AbstractList<SqlNode>() { public SqlNode get(int index) { return field(index); } public int size() { return fieldCount; } }; } /** Converts a call to an aggregate function to an expression. */ public SqlNode toSql(AggregateCall aggCall) { SqlOperator op = (SqlAggFunction) aggCall.getAggregation(); if (op instanceof SqlSumEmptyIsZeroAggFunction) { op = SqlStdOperatorTable.SUM; } final List<SqlNode> operands = Expressions.list(); for (int arg : aggCall.getArgList()) { operands.add(field(arg)); } return op.createCall( aggCall.isDistinct() ? SqlSelectKeyword.DISTINCT.symbol(POS) : null, POS, operands.toArray(new SqlNode[operands.size()])); } /** Converts a collation to an ORDER BY item. */ public SqlNode toSql(RelFieldCollation collation) { SqlNode node = field(collation.getFieldIndex()); switch (collation.getDirection()) { case DESCENDING: case STRICTLY_DESCENDING: node = SqlStdOperatorTable.DESC.createCall(POS, node); } switch (collation.nullDirection) { case FIRST: node = SqlStdOperatorTable.NULLS_FIRST.createCall(POS, node); break; case LAST: node = SqlStdOperatorTable.NULLS_LAST.createCall(POS, node); break; } return node; } } private static int computeFieldCount( List<Pair<String, RelDataType>> aliases) { int x = 0; for (Pair<String, RelDataType> alias : aliases) { x += alias.right.getFieldCount(); } return x; } /** Implementation of Context that precedes field references with their * "table alias" based on the current sub-query's FROM clause. */ public class AliasContext extends Context { private final boolean qualified; private final List<Pair<String, RelDataType>> aliases; public AliasContext(List<Pair<String, RelDataType>> aliases, boolean qualified) { super(computeFieldCount(aliases)); this.aliases = aliases; this.qualified = qualified; } public SqlNode field(int ordinal) { for (Pair<String, RelDataType> alias : aliases) { final List<RelDataTypeField> fields = alias.right.getFieldList(); if (ordinal < fields.size()) { RelDataTypeField field = fields.get(ordinal); return new SqlIdentifier(!qualified ? ImmutableList.of(field.getName()) : ImmutableList.of(alias.left, field.getName()), POS); } ordinal -= fields.size(); } throw new AssertionError( "field ordinal " + ordinal + " out of range " + aliases); } } /** Result of implementing a node. */ public class Result { final SqlNode node; private final String neededAlias; private final List<Pair<String, RelDataType>> aliases; final Expressions.FluentList<Clause> clauses; private Result(SqlNode node, Collection<Clause> clauses, String neededAlias, List<Pair<String, RelDataType>> aliases) { this.node = node; this.neededAlias = neededAlias; this.aliases = aliases; this.clauses = Expressions.list(clauses); } /** Once you have a Result of implementing a child relational expression, * call this method to create a Builder to implement the current relational * expression by adding additional clauses to the SQL query. * * <p>You need to declare which clauses you intend to add. If the clauses * are "later", you can add to the same query. For example, "GROUP BY" comes * after "WHERE". But if they are the same or earlier, this method will * start a new SELECT that wraps the previous result.</p> * * <p>When you have called * {@link Builder#setSelect(org.apache.calcite.sql.SqlNodeList)}, * {@link Builder#setWhere(org.apache.calcite.sql.SqlNode)} etc. call * {@link Builder#result(org.apache.calcite.sql.SqlNode, java.util.Collection, org.apache.calcite.rel.RelNode)} * to fix the new query.</p> * * @param rel Relational expression being implemented * @param clauses Clauses that will be generated to implement current * relational expression * @return A builder */ public Builder builder(JdbcRel rel, Clause... clauses) { final Clause maxClause = maxClause(); boolean needNew = false; for (Clause clause : clauses) { if (maxClause.ordinal() >= clause.ordinal()) { needNew = true; } } SqlSelect select; Expressions.FluentList<Clause> clauseList = Expressions.list(); if (needNew) { select = subSelect(); } else { select = asSelect(); clauseList.addAll(this.clauses); } clauseList.appendAll(clauses); Context newContext; final SqlNodeList selectList = select.getSelectList(); if (selectList != null) { newContext = new Context(selectList.size()) { @Override public SqlNode field(int ordinal) { final SqlNode selectItem = selectList.get(ordinal); switch (selectItem.getKind()) { case AS: return ((SqlCall) selectItem).operand(0); } return selectItem; } }; } else { newContext = new AliasContext(aliases, aliases.size() > 1); } return new Builder(rel, clauseList, select, newContext); } // make private? public Clause maxClause() { Clause maxClause = null; for (Clause clause : clauses) { if (maxClause == null || clause.ordinal() > maxClause.ordinal()) { maxClause = clause; } } assert maxClause != null; return maxClause; } /** Returns a node that can be included in the FROM clause or a JOIN. It has * an alias that is unique within the query. The alias is implicit if it * can be derived using the usual rules (For example, "SELECT * FROM emp" is * equivalent to "SELECT * FROM emp AS emp".) */ public SqlNode asFrom() { if (neededAlias != null) { return SqlStdOperatorTable.AS.createCall(POS, node, new SqlIdentifier(neededAlias, POS)); } return node; } public SqlSelect subSelect() { return wrapSelect(asFrom()); } /** Converts a non-query node into a SELECT node. Set operators (UNION, * INTERSECT, EXCEPT) remain as is. */ SqlSelect asSelect() { if (node instanceof SqlSelect) { return (SqlSelect) node; } return wrapSelect(node); } /** Converts a non-query node into a SELECT node. Set operators (UNION, * INTERSECT, EXCEPT) remain as is. */ public SqlNode asQuery() { if (node instanceof SqlCall && ((SqlCall) node).getOperator() instanceof SqlSetOperator) { return node; } return asSelect(); } /** Returns a context that always qualifies identifiers. Useful if the * Context deals with just one arm of a join, yet we wish to generate * a join condition that qualifies column names to disambiguate them. */ public Context qualifiedContext() { return new AliasContext(aliases, true); } } /** Builder. */ public class Builder { private final JdbcRel rel; private final List<Clause> clauses; private final SqlSelect select; public final Context context; public Builder(JdbcRel rel, List<Clause> clauses, SqlSelect select, Context context) { this.rel = rel; this.clauses = clauses; this.select = select; this.context = context; } public void setSelect(SqlNodeList nodeList) { select.setSelectList(nodeList); } public void setWhere(SqlNode node) { assert clauses.contains(Clause.WHERE); select.setWhere(node); } public void setGroupBy(SqlNodeList nodeList) { assert clauses.contains(Clause.GROUP_BY); select.setGroupBy(nodeList); } public void setOrderBy(SqlNodeList nodeList) { assert clauses.contains(Clause.ORDER_BY); select.setOrderBy(nodeList); } public Result result() { return JdbcImplementor.this.result(select, clauses, rel); } } /** Clauses in a SQL query. Ordered by evaluation order. * SELECT is set only when there is a NON-TRIVIAL SELECT clause. */ enum Clause { FROM, WHERE, GROUP_BY, HAVING, SELECT, SET_OP, ORDER_BY } } // End JdbcImplementor.java
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.ShardSearchFailure; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.metrics.stats.extended.ExtendedStats; import org.junit.Test; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.search.aggregations.AggregationBuilders.extendedStats; import static org.elasticsearch.search.aggregations.AggregationBuilders.histogram; import static org.hamcrest.Matchers.*; /** * */ public class ExtendedStatsTests extends AbstractNumericTests { private static double stdDev(int... vals) { return Math.sqrt(variance(vals)); } private static double variance(int... vals) { double sum = 0; double sumOfSqrs = 0; for (int val : vals) { sum += val; sumOfSqrs += val * val; } return (sumOfSqrs - ((sum * sum) / vals.length)) / vals.length; } @Test public void testEmptyAggregation() throws Exception { SearchResponse searchResponse = client().prepareSearch("empty_bucket_idx") .setQuery(matchAllQuery()) .addAggregation(histogram("histo").field("value").interval(1l).minDocCount(0).subAggregation(extendedStats("stats"))) .execute().actionGet(); assertThat(searchResponse.getHits().getTotalHits(), equalTo(2l)); Histogram histo = searchResponse.getAggregations().get("histo"); assertThat(histo, notNullValue()); Histogram.Bucket bucket = histo.getBucketByKey(1l); assertThat(bucket, notNullValue()); ExtendedStats stats = bucket.getAggregations().get("stats"); assertThat(stats, notNullValue()); assertThat(stats.getName(), equalTo("stats")); assertThat(stats.getSumOfSquares(), equalTo(0.0)); assertThat(stats.getCount(), equalTo(0l)); assertThat(stats.getSum(), equalTo(0.0)); assertThat(stats.getMin(), equalTo(Double.POSITIVE_INFINITY)); assertThat(stats.getMax(), equalTo(Double.NEGATIVE_INFINITY)); assertThat(Double.isNaN(stats.getStdDeviation()), is(true)); assertThat(Double.isNaN(stats.getAvg()), is(true)); } @Test public void testUnmapped() throws Exception { SearchResponse searchResponse = client().prepareSearch("idx_unmapped") .setQuery(matchAllQuery()) .addAggregation(extendedStats("stats").field("value")) .execute().actionGet(); assertThat(searchResponse.getHits().getTotalHits(), equalTo(0l)); ExtendedStats stats = searchResponse.getAggregations().get("stats"); assertThat(stats, notNullValue()); assertThat(stats.getName(), equalTo("stats")); assertThat(stats.getAvg(), equalTo(Double.NaN)); assertThat(stats.getMin(), equalTo(Double.POSITIVE_INFINITY)); assertThat(stats.getMax(), equalTo(Double.NEGATIVE_INFINITY)); assertThat(stats.getSum(), equalTo(0.0)); assertThat(stats.getCount(), equalTo(0l)); assertThat(stats.getSumOfSquares(), equalTo(0.0)); assertThat(stats.getVariance(), equalTo(Double.NaN)); assertThat(stats.getStdDeviation(), equalTo(Double.NaN)); } @Test public void testSingleValuedField() throws Exception { SearchResponse searchResponse = client().prepareSearch("idx") .setQuery(matchAllQuery()) .addAggregation(extendedStats("stats").field("value")) .execute().actionGet(); assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); ExtendedStats stats = searchResponse.getAggregations().get("stats"); assertThat(stats, notNullValue()); assertThat(stats.getName(), equalTo("stats")); assertThat(stats.getAvg(), equalTo((double) (1+2+3+4+5+6+7+8+9+10) / 10)); assertThat(stats.getMin(), equalTo(1.0)); assertThat(stats.getMax(), equalTo(10.0)); assertThat(stats.getSum(), equalTo((double) 1+2+3+4+5+6+7+8+9+10)); assertThat(stats.getCount(), equalTo(10l)); assertThat(stats.getSumOfSquares(), equalTo((double) 1+4+9+16+25+36+49+64+81+100)); assertThat(stats.getVariance(), equalTo(variance(1, 2, 3, 4, 5, 6, 7, 8 ,9, 10))); assertThat(stats.getStdDeviation(), equalTo(stdDev(1, 2, 3, 4, 5, 6, 7, 8 ,9, 10))); } @Test public void testSingleValuedField_PartiallyUnmapped() throws Exception { SearchResponse searchResponse = client().prepareSearch("idx", "idx_unmapped") .setQuery(matchAllQuery()) .addAggregation(extendedStats("stats").field("value")) .execute().actionGet(); assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); ExtendedStats stats = searchResponse.getAggregations().get("stats"); assertThat(stats, notNullValue()); assertThat(stats.getName(), equalTo("stats")); assertThat(stats.getAvg(), equalTo((double) (1+2+3+4+5+6+7+8+9+10) / 10)); assertThat(stats.getMin(), equalTo(1.0)); assertThat(stats.getMax(), equalTo(10.0)); assertThat(stats.getSum(), equalTo((double) 1+2+3+4+5+6+7+8+9+10)); assertThat(stats.getCount(), equalTo(10l)); assertThat(stats.getSumOfSquares(), equalTo((double) 1+4+9+16+25+36+49+64+81+100)); assertThat(stats.getVariance(), equalTo(variance(1, 2, 3, 4, 5, 6, 7, 8 ,9, 10))); assertThat(stats.getStdDeviation(), equalTo(stdDev(1, 2, 3, 4, 5, 6, 7, 8 ,9, 10))); } @Test public void testSingleValuedField_WithValueScript() throws Exception { SearchResponse searchResponse = client().prepareSearch("idx") .setQuery(matchAllQuery()) .addAggregation(extendedStats("stats").field("value").script("_value + 1")) .execute().actionGet(); assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); ExtendedStats stats = searchResponse.getAggregations().get("stats"); assertThat(stats, notNullValue()); assertThat(stats.getName(), equalTo("stats")); assertThat(stats.getAvg(), equalTo((double) (2+3+4+5+6+7+8+9+10+11) / 10)); assertThat(stats.getMin(), equalTo(2.0)); assertThat(stats.getMax(), equalTo(11.0)); assertThat(stats.getSum(), equalTo((double) 2+3+4+5+6+7+8+9+10+11)); assertThat(stats.getCount(), equalTo(10l)); assertThat(stats.getSumOfSquares(), equalTo((double) 4+9+16+25+36+49+64+81+100+121)); assertThat(stats.getVariance(), equalTo(variance(2, 3, 4, 5, 6, 7, 8 ,9, 10, 11))); assertThat(stats.getStdDeviation(), equalTo(stdDev(2, 3, 4, 5, 6, 7, 8 ,9, 10, 11))); } @Test public void testSingleValuedField_WithValueScript_WithParams() throws Exception { SearchResponse searchResponse = client().prepareSearch("idx") .setQuery(matchAllQuery()) .addAggregation(extendedStats("stats").field("value").script("_value + inc").param("inc", 1)) .execute().actionGet(); assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); ExtendedStats stats = searchResponse.getAggregations().get("stats"); assertThat(stats, notNullValue()); assertThat(stats.getName(), equalTo("stats")); assertThat(stats.getAvg(), equalTo((double) (2+3+4+5+6+7+8+9+10+11) / 10)); assertThat(stats.getMin(), equalTo(2.0)); assertThat(stats.getMax(), equalTo(11.0)); assertThat(stats.getSum(), equalTo((double) 2+3+4+5+6+7+8+9+10+11)); assertThat(stats.getCount(), equalTo(10l)); assertThat(stats.getSumOfSquares(), equalTo((double) 4+9+16+25+36+49+64+81+100+121)); assertThat(stats.getVariance(), equalTo(variance(2, 3, 4, 5, 6, 7, 8 ,9, 10, 11))); assertThat(stats.getStdDeviation(), equalTo(stdDev(2, 3, 4, 5, 6, 7, 8 ,9, 10, 11))); } @Test public void testMultiValuedField() throws Exception { SearchResponse searchResponse = client().prepareSearch("idx") .setQuery(matchAllQuery()) .addAggregation(extendedStats("stats").field("values")) .execute().actionGet(); assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); ExtendedStats stats = searchResponse.getAggregations().get("stats"); assertThat(stats, notNullValue()); assertThat(stats.getName(), equalTo("stats")); assertThat(stats.getAvg(), equalTo((double) (2+3+4+5+6+7+8+9+10+11+3+4+5+6+7+8+9+10+11+12) / 20)); assertThat(stats.getMin(), equalTo(2.0)); assertThat(stats.getMax(), equalTo(12.0)); assertThat(stats.getSum(), equalTo((double) 2+3+4+5+6+7+8+9+10+11+3+4+5+6+7+8+9+10+11+12)); assertThat(stats.getCount(), equalTo(20l)); assertThat(stats.getSumOfSquares(), equalTo((double) 4+9+16+25+36+49+64+81+100+121+9+16+25+36+49+64+81+100+121+144)); assertThat(stats.getVariance(), equalTo(variance(2, 3, 4, 5, 6, 7, 8 ,9, 10, 11, 3, 4, 5, 6, 7, 8 ,9, 10, 11, 12))); assertThat(stats.getStdDeviation(), equalTo(stdDev(2, 3, 4, 5, 6, 7, 8 ,9, 10, 11, 3, 4, 5, 6, 7, 8 ,9, 10, 11, 12))); } @Test public void testMultiValuedField_WithValueScript() throws Exception { SearchResponse searchResponse = client().prepareSearch("idx") .setQuery(matchAllQuery()) .addAggregation(extendedStats("stats").field("values").script("_value - 1")) .execute().actionGet(); assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); ExtendedStats stats = searchResponse.getAggregations().get("stats"); assertThat(stats, notNullValue()); assertThat(stats.getName(), equalTo("stats")); assertThat(stats.getAvg(), equalTo((double) (1+2+3+4+5+6+7+8+9+10+2+3+4+5+6+7+8+9+10+11) / 20)); assertThat(stats.getMin(), equalTo(1.0)); assertThat(stats.getMax(), equalTo(11.0)); assertThat(stats.getSum(), equalTo((double) 1+2+3+4+5+6+7+8+9+10+2+3+4+5+6+7+8+9+10+11)); assertThat(stats.getCount(), equalTo(20l)); assertThat(stats.getSumOfSquares(), equalTo((double) 1+4+9+16+25+36+49+64+81+100+4+9+16+25+36+49+64+81+100+121)); assertThat(stats.getVariance(), equalTo(variance(1, 2, 3, 4, 5, 6, 7, 8 ,9, 10, 2, 3, 4, 5, 6, 7, 8 ,9, 10, 11))); assertThat(stats.getStdDeviation(), equalTo(stdDev(1, 2, 3, 4, 5, 6, 7, 8 ,9, 10, 2, 3, 4, 5, 6, 7, 8 ,9, 10, 11))); } @Test public void testMultiValuedField_WithValueScript_WithParams() throws Exception { SearchResponse searchResponse = client().prepareSearch("idx") .setQuery(matchAllQuery()) .addAggregation(extendedStats("stats").field("values").script("_value - dec").param("dec", 1)) .execute().actionGet(); assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); ExtendedStats stats = searchResponse.getAggregations().get("stats"); assertThat(stats, notNullValue()); assertThat(stats.getName(), equalTo("stats")); assertThat(stats.getAvg(), equalTo((double) (1+2+3+4+5+6+7+8+9+10+2+3+4+5+6+7+8+9+10+11) / 20)); assertThat(stats.getMin(), equalTo(1.0)); assertThat(stats.getMax(), equalTo(11.0)); assertThat(stats.getSum(), equalTo((double) 1+2+3+4+5+6+7+8+9+10+2+3+4+5+6+7+8+9+10+11)); assertThat(stats.getCount(), equalTo(20l)); assertThat(stats.getSumOfSquares(), equalTo((double) 1+4+9+16+25+36+49+64+81+100+4+9+16+25+36+49+64+81+100+121)); assertThat(stats.getVariance(), equalTo(variance(1, 2, 3, 4, 5, 6, 7, 8 ,9, 10, 2, 3, 4, 5, 6, 7, 8 ,9, 10, 11))); assertThat(stats.getStdDeviation(), equalTo(stdDev(1, 2, 3, 4, 5, 6, 7, 8 ,9, 10, 2, 3, 4, 5, 6, 7, 8 ,9, 10, 11))); } @Test public void testScript_SingleValued() throws Exception { SearchResponse searchResponse = client().prepareSearch("idx") .setQuery(matchAllQuery()) .addAggregation(extendedStats("stats").script("doc['value'].value")) .execute().actionGet(); assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); ExtendedStats stats = searchResponse.getAggregations().get("stats"); assertThat(stats, notNullValue()); assertThat(stats.getName(), equalTo("stats")); assertThat(stats.getAvg(), equalTo((double) (1+2+3+4+5+6+7+8+9+10) / 10)); assertThat(stats.getMin(), equalTo(1.0)); assertThat(stats.getMax(), equalTo(10.0)); assertThat(stats.getSum(), equalTo((double) 1+2+3+4+5+6+7+8+9+10)); assertThat(stats.getCount(), equalTo(10l)); assertThat(stats.getSumOfSquares(), equalTo((double) 1+4+9+16+25+36+49+64+81+100)); assertThat(stats.getVariance(), equalTo(variance(1, 2, 3, 4, 5, 6, 7, 8 ,9, 10))); assertThat(stats.getStdDeviation(), equalTo(stdDev(1, 2, 3, 4, 5, 6, 7, 8 ,9, 10))); } @Test public void testScript_SingleValued_WithParams() throws Exception { SearchResponse searchResponse = client().prepareSearch("idx") .setQuery(matchAllQuery()) .addAggregation(extendedStats("stats").script("doc['value'].value + inc").param("inc", 1)) .execute().actionGet(); assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); ExtendedStats stats = searchResponse.getAggregations().get("stats"); assertThat(stats, notNullValue()); assertThat(stats.getName(), equalTo("stats")); assertThat(stats.getAvg(), equalTo((double) (2+3+4+5+6+7+8+9+10+11) / 10)); assertThat(stats.getMin(), equalTo(2.0)); assertThat(stats.getMax(), equalTo(11.0)); assertThat(stats.getSum(), equalTo((double) 2+3+4+5+6+7+8+9+10+11)); assertThat(stats.getCount(), equalTo(10l)); assertThat(stats.getSumOfSquares(), equalTo((double) 4+9+16+25+36+49+64+81+100+121)); assertThat(stats.getVariance(), equalTo(variance(2, 3, 4, 5, 6, 7, 8 ,9, 10, 11))); assertThat(stats.getStdDeviation(), equalTo(stdDev(2, 3, 4, 5, 6, 7, 8 ,9, 10, 11))); } @Test public void testScript_ExplicitSingleValued_WithParams() throws Exception { SearchResponse searchResponse = client().prepareSearch("idx") .setQuery(matchAllQuery()) .addAggregation(extendedStats("stats").script("doc['value'].value + inc").param("inc", 1)) .execute().actionGet(); assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); ExtendedStats stats = searchResponse.getAggregations().get("stats"); assertThat(stats, notNullValue()); assertThat(stats.getName(), equalTo("stats")); assertThat(stats.getAvg(), equalTo((double) (2+3+4+5+6+7+8+9+10+11) / 10)); assertThat(stats.getMin(), equalTo(2.0)); assertThat(stats.getMax(), equalTo(11.0)); assertThat(stats.getSum(), equalTo((double) 2+3+4+5+6+7+8+9+10+11)); assertThat(stats.getCount(), equalTo(10l)); assertThat(stats.getSumOfSquares(), equalTo((double) 4+9+16+25+36+49+64+81+100+121)); assertThat(stats.getVariance(), equalTo(variance(2, 3, 4, 5, 6, 7, 8 ,9, 10, 11))); assertThat(stats.getStdDeviation(), equalTo(stdDev(2, 3, 4, 5, 6, 7, 8 ,9, 10, 11))); } @Test public void testScript_MultiValued() throws Exception { SearchResponse searchResponse = client().prepareSearch("idx") .setQuery(matchAllQuery()) .addAggregation(extendedStats("stats").script("doc['values'].values")) .execute().actionGet(); assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); ExtendedStats stats = searchResponse.getAggregations().get("stats"); assertThat(stats, notNullValue()); assertThat(stats.getName(), equalTo("stats")); assertThat(stats.getAvg(), equalTo((double) (2+3+4+5+6+7+8+9+10+11+3+4+5+6+7+8+9+10+11+12) / 20)); assertThat(stats.getMin(), equalTo(2.0)); assertThat(stats.getMax(), equalTo(12.0)); assertThat(stats.getSum(), equalTo((double) 2+3+4+5+6+7+8+9+10+11+3+4+5+6+7+8+9+10+11+12)); assertThat(stats.getCount(), equalTo(20l)); assertThat(stats.getSumOfSquares(), equalTo((double) 4+9+16+25+36+49+64+81+100+121+9+16+25+36+49+64+81+100+121+144)); assertThat(stats.getVariance(), equalTo(variance(2, 3, 4, 5, 6, 7, 8 ,9, 10, 11, 3, 4, 5, 6, 7, 8 ,9, 10, 11, 12))); assertThat(stats.getStdDeviation(), equalTo(stdDev(2, 3, 4, 5, 6, 7, 8 ,9, 10, 11, 3, 4, 5, 6, 7, 8 ,9, 10, 11, 12))); } @Test public void testScript_ExplicitMultiValued() throws Exception { SearchResponse searchResponse = client().prepareSearch("idx") .setQuery(matchAllQuery()) .addAggregation(extendedStats("stats").script("doc['values'].values")) .execute().actionGet(); assertShardExecutionState(searchResponse, 0); assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); ExtendedStats stats = searchResponse.getAggregations().get("stats"); assertThat(stats, notNullValue()); assertThat(stats.getName(), equalTo("stats")); assertThat(stats.getAvg(), equalTo((double) (2+3+4+5+6+7+8+9+10+11+3+4+5+6+7+8+9+10+11+12) / 20)); assertThat(stats.getMin(), equalTo(2.0)); assertThat(stats.getMax(), equalTo(12.0)); assertThat(stats.getSum(), equalTo((double) 2+3+4+5+6+7+8+9+10+11+3+4+5+6+7+8+9+10+11+12)); assertThat(stats.getCount(), equalTo(20l)); assertThat(stats.getSumOfSquares(), equalTo((double) 4+9+16+25+36+49+64+81+100+121+9+16+25+36+49+64+81+100+121+144)); assertThat(stats.getVariance(), equalTo(variance(2, 3, 4, 5, 6, 7, 8 ,9, 10, 11, 3, 4, 5, 6, 7, 8 ,9, 10, 11, 12))); assertThat(stats.getStdDeviation(), equalTo(stdDev(2, 3, 4, 5, 6, 7, 8 ,9, 10, 11, 3, 4, 5, 6, 7, 8 ,9, 10, 11, 12))); } @Test public void testScript_MultiValued_WithParams() throws Exception { SearchResponse searchResponse = client().prepareSearch("idx") .setQuery(matchAllQuery()) .addAggregation(extendedStats("stats").script("[ doc['value'].value, doc['value'].value - dec ]").param("dec", 1)) .execute().actionGet(); assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l)); ExtendedStats stats = searchResponse.getAggregations().get("stats"); assertThat(stats, notNullValue()); assertThat(stats.getName(), equalTo("stats")); assertThat(stats.getAvg(), equalTo((double) (1+2+3+4+5+6+7+8+9+10+0+1+2+3+4+5+6+7+8+9) / 20)); assertThat(stats.getMin(), equalTo(0.0)); assertThat(stats.getMax(), equalTo(10.0)); assertThat(stats.getSum(), equalTo((double) 1+2+3+4+5+6+7+8+9+10+0+1+2+3+4+5+6+7+8+9)); assertThat(stats.getCount(), equalTo(20l)); assertThat(stats.getSumOfSquares(), equalTo((double) 1+4+9+16+25+36+49+64+81+100+0+1+4+9+16+25+36+49+64+81)); assertThat(stats.getVariance(), equalTo(variance(1, 2, 3, 4, 5, 6, 7, 8 ,9, 10, 0, 1, 2, 3, 4, 5, 6, 7, 8 ,9))); assertThat(stats.getStdDeviation(), equalTo(stdDev(1, 2, 3, 4, 5, 6, 7, 8 ,9, 10, 0, 1, 2, 3, 4, 5, 6, 7, 8 ,9))); } private void assertShardExecutionState(SearchResponse response, int expectedFailures) throws Exception { ShardSearchFailure[] failures = response.getShardFailures(); if (failures.length != expectedFailures) { for (ShardSearchFailure failure : failures) { logger.error("Shard Failure: {}", failure.reason(), failure.toString()); } fail("Unexpected shard failures!"); } assertThat("Not all shards are initialized", response.getSuccessfulShards(), equalTo(response.getTotalShards())); } }
/* * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * */ package org.apache.bookkeeper.bookie; import java.io.IOException; import java.nio.ByteBuffer; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; import com.google.common.util.concurrent.ThreadFactoryBuilder; import org.apache.bookkeeper.bookie.CheckpointSource.Checkpoint; import org.apache.bookkeeper.conf.ServerConfiguration; import org.apache.bookkeeper.meta.ActiveLedgerManager; import org.apache.bookkeeper.proto.BookieProtocol; import org.apache.bookkeeper.stats.Counter; import org.apache.bookkeeper.stats.StatsLogger; import org.apache.bookkeeper.util.DaemonThreadFactory; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import static org.apache.bookkeeper.bookie.BookKeeperServerStats.*; public class SortedLedgerStorage extends InterleavedLedgerStorage implements LedgerStorage, CacheCallback, SkipListFlusher { private final static Logger LOG = LoggerFactory.getLogger(SortedLedgerStorage.class); private final EntryMemTable memTable; private final ScheduledExecutorService scheduler; // Stats private final Counter memtableReadEntryCounter; private final Counter entrylogReadEntryCounter; private final Counter memtableReadBytesCounter; private final Counter entrylogReadBytesCounter; public SortedLedgerStorage(ServerConfiguration conf, ActiveLedgerManager activeLedgerManager, LedgerDirsManager ledgerDirsManager, LedgerDirsManager indexDirsManager, final CheckpointSource checkpointSource, StatsLogger statsLogger) throws IOException { super(conf, activeLedgerManager, ledgerDirsManager, indexDirsManager, checkpointSource, statsLogger); this.memTable = new EntryMemTable(conf, checkpointSource, statsLogger); this.scheduler = Executors.newSingleThreadScheduledExecutor( new ThreadFactoryBuilder() .setThreadFactory(new DaemonThreadFactory((Thread.NORM_PRIORITY + Thread.MAX_PRIORITY)/2)) .setNameFormat("SortedLedgerStorageExecutor-%d") .build()); // Stats this.memtableReadEntryCounter = statsLogger.getCounter(NUM_ENTRIES_READ_FROM_MEMTABLE); this.memtableReadBytesCounter = statsLogger.getCounter(NUM_BYTES_READ_FROM_MEMTABLE); this.entrylogReadEntryCounter = statsLogger.getCounter(NUM_ENTRIES_READ_FROM_ENTRYLOG); this.entrylogReadBytesCounter = statsLogger.getCounter(NUM_BYTES_READ_FROM_ENTRYLOG); } @Override public void start() { try { flush(); } catch (IOException e) { LOG.error("Exception thrown while flushing ledger cache.", e); } super.start(); } @Override public void shutdown() throws InterruptedException { // Wait for any jobs currently scheduled to be completed and then shut down. scheduler.shutdown(); if (!scheduler.awaitTermination(3, TimeUnit.SECONDS)) { scheduler.shutdownNow(); } super.shutdown(); } @Override public boolean ledgerExists(long ledgerId) throws IOException { // Done this way because checking the skip list is an O(logN) operation compared to // the O(1) for the ledgerCache. if (!super.ledgerExists(ledgerId)) { EntryKeyValue kv = memTable.getLastEntry(ledgerId); if (null == kv) { return super.ledgerExists(ledgerId); } } return true; } @Override public long addEntry(ByteBuffer entry) throws IOException { long ledgerId = entry.getLong(); long entryId = entry.getLong(); long lac = entry.getLong(); entry.rewind(); memTable.addEntry(ledgerId, entryId, entry, this); ledgerCache.updateLastAddConfirmed(ledgerId, lac); return entryId; } /** * Get the last entry id for a particular ledger. * @param ledgerId * @return */ private ByteBuffer getLastEntryId(long ledgerId) throws IOException { EntryKeyValue kv = memTable.getLastEntry(ledgerId); ByteBuffer bufferToRet; if (null != kv) { bufferToRet = kv.getValueAsByteBuffer(); memtableReadEntryCounter.inc(); memtableReadBytesCounter.add(bufferToRet.remaining()); } else { // If it doesn't exist in the skip list, then fallback to the ledger cache+index. bufferToRet = super.getEntry(ledgerId, BookieProtocol.LAST_ADD_CONFIRMED); entrylogReadEntryCounter.inc(); entrylogReadBytesCounter.add(bufferToRet.remaining()); } return bufferToRet; } @Override public ByteBuffer getEntry(long ledgerId, long entryId) throws IOException { if (entryId == BookieProtocol.LAST_ADD_CONFIRMED) { return getLastEntryId(ledgerId); } ByteBuffer buffToRet = getEntryImpl(ledgerId, entryId); if (null == buffToRet) { EntryKeyValue kv = memTable.getEntry(ledgerId, entryId); if (null == kv) { // The entry might have been flushed since we last checked, so query the ledger cache again. // If the entry truly doesn't exist, then this will throw a NoEntryException buffToRet = super.getEntry(ledgerId, entryId); entrylogReadEntryCounter.inc(); entrylogReadBytesCounter.add(buffToRet.remaining()); } else { buffToRet = kv.getValueAsByteBuffer(); memtableReadEntryCounter.inc(); memtableReadBytesCounter.add(buffToRet.remaining()); } } else { entrylogReadEntryCounter.inc(); entrylogReadBytesCounter.add(buffToRet.remaining()); } // buffToRet will not be null when we reach here. return buffToRet; } @Override public Checkpoint checkpoint(final Checkpoint checkpoint) throws IOException { Checkpoint lastCheckpoint = checkpointHolder.getLastCheckpoint(); // if checkpoint is less than last checkpoint, we don't need to do checkpoint again. if (lastCheckpoint.compareTo(checkpoint) > 0) { return lastCheckpoint; } memTable.flush(this, checkpoint); return super.checkpoint(checkpoint); } @Override public void process(long ledgerId, long entryId, ByteBuffer buffer) throws IOException { processEntry(ledgerId, entryId, buffer, false); } @Override public void flush() throws IOException { memTable.flush(this, Checkpoint.MAX); super.flush(); } // CacheCallback functions. @Override public void onSizeLimitReached(final Checkpoint cp) throws IOException { // when size limit reached, we get the previous checkpoint from snapshot mem-table. // at this point, we are safer to schedule a checkpoint, since the entries added before // this checkpoint already written to entry logger. // but it would be better not to let mem-table flush to different entry log files, // so we roll entry log files in SortedLedgerStorage itself. // After that, we could make the process writing data to entry logger file not bound with checkpoint. // otherwise, it hurts add performance. // // The only exception for the size limitation is if a file grows to be more than 4GB, // we have to force rolling log, which it might cause slight performance effects. scheduler.submit(new Runnable() { @Override public void run() { try { LOG.info("Started flushing mem table before checkpoint {}.", cp); long logIdBeforeFlush = entryLogger.getCurLogId(); memTable.flush(SortedLedgerStorage.this); long logIdAfterFlush = entryLogger.getCurLogId(); // in any case that an entry log reach the limit, we rolled the log and start checkpointing. // if a memory table is flushed spanning over two entry log files, we also roll log. this is // for performance consideration: since we don't wanna checkpoint a new log file that ledger // storage is writing to. if (entryLogger.reachEntryLogLimit(0) || logIdAfterFlush != logIdBeforeFlush) { entryLogger.rollLog(); LOG.info("Rolling entry logger since it reached size limitation."); } } catch (IOException e) { // TODO: if we failed to flush data, we should switch the bookie back to readonly mode // or shutdown it. LOG.error("Exception thrown while flushing skip list cache.", e); } } }); } }
// Copyright 2011-2016 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.security.zynamics.binnavi.Gui.Tutorials; import java.awt.BorderLayout; import java.awt.Color; import java.awt.GridLayout; import java.awt.Window; import java.awt.event.ActionEvent; import java.awt.event.WindowAdapter; import java.awt.event.WindowEvent; import javax.swing.AbstractAction; import javax.swing.ImageIcon; import javax.swing.JButton; import javax.swing.JDialog; import javax.swing.JLabel; import javax.swing.JOptionPane; import javax.swing.JPanel; import javax.swing.JScrollPane; import javax.swing.JTextField; import javax.swing.JTextPane; import javax.swing.border.EmptyBorder; import javax.swing.border.TitledBorder; import com.google.common.base.Preconditions; import com.google.security.zynamics.binnavi.CMain; import com.google.security.zynamics.binnavi.Gui.CIconInitializer; import com.google.security.zynamics.binnavi.Tutorials.CTutorial; import com.google.security.zynamics.binnavi.Tutorials.ITutorialListener; import com.google.security.zynamics.zylib.gui.CMessageBox; import com.google.security.zynamics.zylib.gui.GuiHelper; /** * Dialog where tutorials are run. In this dialog the individual tutorial steps are shown and the * user can control the progress of the tutorials. */ public final class CTutorialDialog extends JDialog { /** * Used for serialization. */ private static final long serialVersionUID = 5087316538644576623L; /** * The only valid instance of the tutorial dialog class. */ private static CTutorialDialog m_instance = new CTutorialDialog(); /** * Message shown when the user executes a wrong action. */ private static final String WRONG_ACTION_STRING = "Wrong action executed (%d)"; /** * Message that shows the progress of the tutorial. */ private static final String TOPIC_STRING = "Current Tutorial: %s (Step %d of %d)"; /** * Text field where the step description is shown. */ private final JTextPane m_descriptionField = new JTextPane(); /** * Listener that updates the dialog on changes in the current tutorial. */ private final InternalTutorialListener m_listener = new InternalTutorialListener(); /** * The currently active tutorial. */ private CTutorial m_currentTutorial = null; /** * Label where the title of the tutorial is shown. */ private final JLabel m_topicLabel = new JLabel(); /** * Field where warning messages are shown. */ private final JTextField m_warningLabel = new JTextField("XXXXXXXX"); /** * Button used to advance the active tutorial to the next step (if available). */ private final JButton m_nextButton = new JButton(new NextAction()); /** * The skip button allows the user to skip to the next tutorial step without performing the * required actions. */ private final JButton m_skipButton = new JButton(new SkipAction()); /** * Creates a new tutorial dialog. */ private CTutorialDialog() { super((Window) null, "BinNavi Tutorial"); CIconInitializer.initializeWindowIcons(this); setLayout(new BorderLayout()); m_topicLabel.setBorder(new EmptyBorder(5, 5, 5, 5)); m_descriptionField.setContentType("text/html"); m_descriptionField.setEditable(false); m_descriptionField.setBorder(new TitledBorder("")); final JPanel topPanel = new JPanel(); topPanel.add(m_topicLabel); add(m_topicLabel, BorderLayout.NORTH); add(new JScrollPane(m_descriptionField)); final JPanel bottomPanel = new JPanel(new BorderLayout()); final JButton cancelButton = new JButton(new QuitAction()); final JPanel leftPanel = new JPanel(new BorderLayout()); m_warningLabel.setEditable(false); m_warningLabel.setForeground(Color.RED); m_warningLabel.setBorder(new EmptyBorder(0, 5, 0, 0)); leftPanel.add(cancelButton, BorderLayout.WEST); leftPanel.add(m_warningLabel); final JPanel buttonsPanel = new JPanel(new GridLayout(0, 2)); buttonsPanel.add(m_skipButton); buttonsPanel.add(m_nextButton); bottomPanel.add(leftPanel, BorderLayout.WEST); bottomPanel.add(buttonsPanel, BorderLayout.EAST); add(bottomPanel, BorderLayout.SOUTH); setSize(550, 300); GuiHelper.centerOnScreen(this); setLocation(getLocation().x + 200, getLocation().y - 200); setAlwaysOnTop(true); setDefaultCloseOperation(DO_NOTHING_ON_CLOSE); addWindowListener(new InternalWindowListener()); } /** * Returns the globally valid instance of the tutorial dialog. * * @return The globally valid instance of the tutorial dialog. */ public static CTutorialDialog instance() { return m_instance; } /** * Closes the dialog after asking the user if he wants to cancel the current tutorial. */ private void quit() { if (JOptionPane.YES_OPTION == CMessageBox.showYesNoCancelQuestion(this, "Do you really want to quit the current tutorial?")) { m_currentTutorial.removeListener(m_listener); m_currentTutorial = null; m_warningLabel.setText(""); updateNextButton(); m_skipButton.setEnabled(true); m_descriptionField.setText(""); setVisible(false); } } /** * Updates the Next button depending on the state of the current tutorial. */ private void updateNextButton() { m_nextButton.setEnabled((m_currentTutorial != null) && (m_currentTutorial.getStepCounter() != m_currentTutorial.getStepCount()) && m_currentTutorial.getCurrentStep().canNext()); } /** * Returns the currently active tutorial. * * @return The currently active tutorial or null if no tutorial is active. */ public CTutorial getCurrentTutorial() { return m_currentTutorial; } /** * Starts a given tutorial. * * @param tutorial The tutorial to start. */ public void start(final CTutorial tutorial) { Preconditions.checkNotNull(tutorial, "IE01296: Tutorial argument can not be null"); tutorial.addListener(m_listener); tutorial.start(); } /** * Shows a warning that a wrong action was executed by the user. * * @param actionId Identifier of the wrong action. */ public void wrongAction(final long actionId) { m_warningLabel.setText(String.format(WRONG_ACTION_STRING, actionId)); toFront(); } /** * Listener that updates the dialog on changes in the current tutorial. */ private class InternalTutorialListener implements ITutorialListener { @Override public void changedStep(final CTutorial tutorial) { m_warningLabel.setText(""); updateNextButton(); m_topicLabel.setText(String.format(TOPIC_STRING, tutorial.getName(), tutorial.getStepCounter() + 1, tutorial.getStepCount())); m_descriptionField.setText(tutorial.getCurrentStep().getDescription()); m_descriptionField.setCaretPosition(0); toFront(); } @Override public void finished(final CTutorial tutorial) { m_warningLabel.setText(""); updateNextButton(); m_skipButton.setEnabled(false); m_descriptionField.setText("Tutorial finished"); toFront(); } @Override public void started(final CTutorial tutorial) { m_warningLabel.setText(""); m_currentTutorial = tutorial; m_topicLabel.setText(String.format(TOPIC_STRING, tutorial.getName(), tutorial.getStepCounter() + 1, tutorial.getStepCount())); updateNextButton(); if (!isVisible()) { setVisible(true); } m_descriptionField.setText(tutorial.getCurrentStep().getDescription()); toFront(); } } /** * Window listener for modified window closing behaviour. */ private class InternalWindowListener extends WindowAdapter { @Override public void windowClosing(final WindowEvent event) { quit(); } } /** * Action controller of the Next button. */ private class NextAction extends AbstractAction { /** * Used for serialization. */ private static final long serialVersionUID = 7665614446808231684L; /** * Creates a new action controller for the Next button. */ private NextAction() { super("Next", new ImageIcon(CMain.class.getResource("data/arrow_right.png"))); } @Override public void actionPerformed(final ActionEvent event) { m_currentTutorial.next(); } } /** * Action controller for the Quit button. */ private class QuitAction extends AbstractAction { /** * Used for serialization. */ private static final long serialVersionUID = 5896923932054029970L; /** * Creates a new action controller for the Quit button. */ private QuitAction() { super("Quit", new ImageIcon(CMain.class.getResource("data/cancel.png"))); } @Override public void actionPerformed(final ActionEvent event) { quit(); } } /** * Action controller for the Skip button. */ private class SkipAction extends AbstractAction { /** * Used for serialization. */ private static final long serialVersionUID = -623733076208212254L; /** * Creates a new action controller for the Next button. */ private SkipAction() { super("Skip Step", new ImageIcon(CMain.class.getResource("data/arrow_right.png"))); } @Override public void actionPerformed(final ActionEvent event) { m_currentTutorial.next(); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /* * $Id: ElemValueOf.java 468643 2006-10-28 06:56:03Z minchau $ */ package org.apache.xalan.templates; import javax.xml.transform.TransformerException; import org.apache.xalan.res.XSLTErrorResources; import org.apache.xalan.transformer.TransformerImpl; import org.apache.xml.dtm.DTM; import org.apache.xml.serializer.SerializationHandler; import org.apache.xpath.Expression; import org.apache.xpath.XPath; import org.apache.xpath.XPathContext; import org.apache.xpath.objects.XObject; import org.xml.sax.SAXException; /** * Implement xsl:value-of. * <pre> * <!ELEMENT xsl:value-of EMPTY> * <!ATTLIST xsl:value-of * select %expr; #REQUIRED * disable-output-escaping (yes|no) "no" * > * </pre> * @see <a href="http://www.w3.org/TR/xslt#value-of">value-of in XSLT Specification</a> * @xsl.usage advanced */ public class ElemValueOf extends ElemTemplateElement { static final long serialVersionUID = 3490728458007586786L; /** * The select expression to be executed. * @serial */ private XPath m_selectExpression = null; /** * True if the pattern is a simple ".". * @serial */ private boolean m_isDot = false; /** * Set the "select" attribute. * The required select attribute is an expression; this expression * is evaluated and the resulting object is converted to a * string as if by a call to the string function. * * @param v The value to set for the "select" attribute. */ public void setSelect(XPath v) { if (null != v) { String s = v.getPatternString(); m_isDot = (null != s) && s.equals("."); } m_selectExpression = v; } /** * Get the "select" attribute. * The required select attribute is an expression; this expression * is evaluated and the resulting object is converted to a * string as if by a call to the string function. * * @return The value of the "select" attribute. */ public XPath getSelect() { return m_selectExpression; } /** * Tells if this element should disable escaping. * @serial */ private boolean m_disableOutputEscaping = false; /** * Set the "disable-output-escaping" attribute. * Normally, the xml output method escapes & and < (and * possibly other characters) when outputting text nodes. * This ensures that the output is well-formed XML. However, * it is sometimes convenient to be able to produce output * that is almost, but not quite well-formed XML; for * example, the output may include ill-formed sections * which are intended to be transformed into well-formed * XML by a subsequent non-XML aware process. For this reason, * XSLT provides a mechanism for disabling output escaping. * An xsl:value-of or xsl:text element may have a * disable-output-escaping attribute; the allowed values * are yes or no; the default is no; if the value is yes, * then a text node generated by instantiating the xsl:value-of * or xsl:text element should be output without any escaping. * @see <a href="http://www.w3.org/TR/xslt#disable-output-escaping">disable-output-escaping in XSLT Specification</a> * * @param v The value to set for the "disable-output-escaping" attribute. */ public void setDisableOutputEscaping(boolean v) { m_disableOutputEscaping = v; } /** * Get the "disable-output-escaping" attribute. * Normally, the xml output method escapes & and < (and * possibly other characters) when outputting text nodes. * This ensures that the output is well-formed XML. However, * it is sometimes convenient to be able to produce output * that is almost, but not quite well-formed XML; for * example, the output may include ill-formed sections * which are intended to be transformed into well-formed * XML by a subsequent non-XML aware process. For this reason, * XSLT provides a mechanism for disabling output escaping. * An xsl:value-of or xsl:text element may have a * disable-output-escaping attribute; the allowed values * are yes or no; the default is no; if the value is yes, * then a text node generated by instantiating the xsl:value-of * or xsl:text element should be output without any escaping. * @see <a href="http://www.w3.org/TR/xslt#disable-output-escaping">disable-output-escaping in XSLT Specification</a> * * @return The value of the "disable-output-escaping" attribute. */ public boolean getDisableOutputEscaping() { return m_disableOutputEscaping; } /** * Get an integer representation of the element type. * * @return An integer representation of the element, defined in the * Constants class. * @see org.apache.xalan.templates.Constants */ public int getXSLToken() { return Constants.ELEMNAME_VALUEOF; } /** * This function is called after everything else has been * recomposed, and allows the template to set remaining * values that may be based on some other property that * depends on recomposition. * * NEEDSDOC @param sroot * * @throws TransformerException */ public void compose(StylesheetRoot sroot) throws TransformerException { super.compose(sroot); java.util.Vector vnames = sroot.getComposeState().getVariableNames(); if (null != m_selectExpression) m_selectExpression.fixupVariables( vnames, sroot.getComposeState().getGlobalsSize()); } /** * Return the node name. * * @return The node name */ public String getNodeName() { return Constants.ELEMNAME_VALUEOF_STRING; } /** * Execute the string expression and copy the text to the * result tree. * The required select attribute is an expression; this expression * is evaluated and the resulting object is converted to a string * as if by a call to the string function. The string specifies * the string-value of the created text node. If the string is * empty, no text node will be created. The created text node will * be merged with any adjacent text nodes. * @see <a href="http://www.w3.org/TR/xslt#value-of">value-of in XSLT Specification</a> * * @param transformer non-null reference to the the current transform-time state. * * @throws TransformerException */ public void execute(TransformerImpl transformer) throws TransformerException { XPathContext xctxt = transformer.getXPathContext(); SerializationHandler rth = transformer.getResultTreeHandler(); try { // Optimize for "." xctxt.pushNamespaceContext(this); int current = xctxt.getCurrentNode(); xctxt.pushCurrentNodeAndExpression(current, current); if (m_disableOutputEscaping) rth.processingInstruction( javax.xml.transform.Result.PI_DISABLE_OUTPUT_ESCAPING, ""); try { Expression expr = m_selectExpression.getExpression(); expr.executeCharsToContentHandler(xctxt, rth); } finally { if (m_disableOutputEscaping) rth.processingInstruction( javax.xml.transform.Result.PI_ENABLE_OUTPUT_ESCAPING, ""); xctxt.popNamespaceContext(); xctxt.popCurrentNodeAndExpression(); } } catch (SAXException se) { throw new TransformerException(se); } catch (RuntimeException re) { TransformerException te = new TransformerException(re); te.setLocator(this); throw te; } } /** * Add a child to the child list. * * @param newChild Child to add to children list * * @return Child just added to children list * * @throws DOMException */ public ElemTemplateElement appendChild(ElemTemplateElement newChild) { error(XSLTErrorResources.ER_CANNOT_ADD, new Object[]{ newChild.getNodeName(), this.getNodeName() }); //"Can not add " +((ElemTemplateElement)newChild).m_elemName + //" to " + this.m_elemName); return null; } /** * Call the children visitors. * @param visitor The visitor whose appropriate method will be called. */ protected void callChildVisitors(XSLTVisitor visitor, boolean callAttrs) { if(callAttrs) m_selectExpression.getExpression().callVisitors(m_selectExpression, visitor); super.callChildVisitors(visitor, callAttrs); } }
/* * Copyright 2015 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.drools.core.reteoo; import java.util.List; import org.drools.core.RuleBaseConfiguration; import org.drools.core.base.ClassObjectType; import org.drools.core.common.BaseNode; import org.drools.core.common.ReteEvaluator; import org.drools.core.common.RuleBasePartitionId; import org.drools.core.common.UpdateContext; import org.drools.core.definitions.rule.impl.RuleImpl; import org.drools.core.reteoo.builder.BuildContext; import org.drools.core.rule.Pattern; import org.drools.core.rule.TypeDeclaration; import org.drools.core.spi.ObjectType; import org.drools.core.util.bitmask.AllSetBitMask; import org.drools.core.util.bitmask.BitMask; import org.drools.core.util.bitmask.EmptyBitMask; public abstract class AbstractTerminalNode extends BaseNode implements TerminalNode { private LeftTupleSource tupleSource; private BitMask declaredMask = EmptyBitMask.get(); private BitMask inferredMask = EmptyBitMask.get(); private BitMask negativeMask = EmptyBitMask.get(); private LeftTupleNode[] pathNodes; private transient PathEndNode[] pathEndNodes; private PathMemSpec pathMemSpec; private int objectCount; public AbstractTerminalNode() { } public AbstractTerminalNode(int id, RuleBasePartitionId partitionId, boolean partitionsEnabled, LeftTupleSource source, final BuildContext context) { super(id, partitionId, partitionsEnabled); this.tupleSource = source; this.setObjectCount(getLeftTupleSource().getObjectCount()); // 'terminal' nodes do not increase the count context.addPathEndNode(this); initMemoryId( context ); } @Override public PathMemSpec getPathMemSpec() { if (pathMemSpec == null) { pathMemSpec = calculatePathMemSpec( null ); } return pathMemSpec; } @Override public void resetPathMemSpec(TerminalNode removingTN) { pathMemSpec = removingTN == null ? null : calculatePathMemSpec( null, removingTN ); } @Override public void setPathEndNodes(PathEndNode[] pathEndNodes) { this.pathEndNodes = pathEndNodes; } @Override public PathEndNode[] getPathEndNodes() { return pathEndNodes; } public int getPathIndex() { return tupleSource.getPathIndex() + 1; } public int getObjectCount() { return objectCount; } public void setObjectCount(int count) { objectCount = count; } protected void initDeclaredMask(BuildContext context) { if ( !(unwrapTupleSource() instanceof LeftInputAdapterNode)) { // RTN's not after LIANode are not relevant for property specific, so don't block anything. setDeclaredMask( AllSetBitMask.get() ); return; } Pattern pattern = context.getLastBuiltPatterns()[0]; ObjectType objectType = pattern.getObjectType(); if ( !(objectType instanceof ClassObjectType) ) { // InitialFact has no type declaration and cannot be property specific // Only ClassObjectType can use property specific setDeclaredMask( AllSetBitMask.get() ); return; } Class objectClass = ((ClassObjectType)objectType).getClassType(); TypeDeclaration typeDeclaration = context.getRuleBase().getTypeDeclaration(objectClass); if ( typeDeclaration == null || !typeDeclaration.isPropertyReactive() ) { // if property specific is not on, then accept all modification propagations setDeclaredMask( AllSetBitMask.get() ); } else { List<String> accessibleProperties = pattern.getAccessibleProperties( context.getRuleBase() ); setDeclaredMask( pattern.getPositiveWatchMask(accessibleProperties) ); setNegativeMask( pattern.getNegativeWatchMask(accessibleProperties) ); } } public void initInferredMask() { LeftTupleSource leftTupleSource = unwrapTupleSource(); if ( leftTupleSource instanceof LeftInputAdapterNode && ((LeftInputAdapterNode)leftTupleSource).getParentObjectSource() instanceof AlphaNode ) { AlphaNode alphaNode = (AlphaNode) ((LeftInputAdapterNode)leftTupleSource).getParentObjectSource(); setInferredMask( alphaNode.updateMask( getDeclaredMask() ) ); } else { setInferredMask( getDeclaredMask() ); } setInferredMask( getInferredMask().resetAll( getNegativeMask() ) ); if ( getNegativeMask().isAllSet() && !getDeclaredMask().isAllSet() ) { setInferredMask( getInferredMask().setAll( getDeclaredMask() ) ); } } public LeftTupleSource unwrapTupleSource() { return tupleSource instanceof FromNode ? tupleSource.getLeftTupleSource() : tupleSource; } public abstract RuleImpl getRule(); public PathMemory createMemory(RuleBaseConfiguration config, ReteEvaluator reteEvaluator) { return initPathMemory( this, new PathMemory(this, reteEvaluator) ); } public static PathMemory initPathMemory( PathEndNode pathEndNode, PathMemory pmem ) { PathMemSpec pathMemSpec = pathEndNode.getPathMemSpec(); pmem.setAllLinkedMaskTest(pathMemSpec. allLinkedTestMask ); pmem.setSegmentMemories( new SegmentMemory[pathMemSpec.smemCount] ); return pmem; } public LeftTuple createPeer(LeftTuple original) { RuleTerminalNodeLeftTuple peer = new RuleTerminalNodeLeftTuple(); peer.initPeer( (BaseLeftTuple) original, this ); original.setPeer( peer ); return peer; } protected boolean doRemove(final RuleRemovalContext context, final ReteooBuilder builder) { getLeftTupleSource().removeTupleSink(this); this.tupleSource = null; return true; } public LeftTupleSource getLeftTupleSource() { return this.tupleSource; } public BitMask getDeclaredMask() { return declaredMask; } public BitMask getInferredMask() { return inferredMask; } public BitMask getLeftInferredMask() { return inferredMask; } public void setDeclaredMask(BitMask mask) { declaredMask = mask; } public void setInferredMask(BitMask mask) { inferredMask = mask; } public BitMask getNegativeMask() { return negativeMask; } public void setNegativeMask(BitMask mask) { negativeMask = mask; } public void networkUpdated(UpdateContext updateContext) { getLeftTupleSource().networkUpdated(updateContext); } public boolean isInUse() { return false; } public boolean isLeftTupleMemoryEnabled() { return false; } public void setLeftTupleMemoryEnabled(boolean tupleMemoryEnabled) { // do nothing, this can only ever be false } public static LeftTupleNode[] getPathNodes(PathEndNode endNode) { LeftTupleNode[] pathNodes = new LeftTupleNode[endNode.getPathIndex() + 1]; for (LeftTupleNode node = endNode; node != null; node = node.getLeftTupleSource()) { pathNodes[node.getPathIndex()] = node; } return pathNodes; } public LeftTupleNode[] getPathNodes() { if (pathNodes == null) { pathNodes = getPathNodes( this ); } return pathNodes; } public final boolean hasPathNode(LeftTupleNode node) { for (LeftTupleNode pathNode : getPathNodes()) { if (node.getId() == pathNode.getId()) { return true; } } return false; } public final boolean isTerminalNodeOf(LeftTupleNode node) { for (PathEndNode pathEndNode : getPathEndNodes()) { if (pathEndNode.hasPathNode( node )) { return true; } } return false; } public LeftTupleSinkPropagator getSinkPropagator() { return EmptyLeftTupleSinkAdapter.getInstance(); } @Override public final void setPartitionIdWithSinks( RuleBasePartitionId partitionId ) { this.partitionId = partitionId; } @Override public ObjectTypeNode getObjectTypeNode() { return getLeftTupleSource().getObjectTypeNode(); } }
package seedu.jimi.ui; import java.time.LocalDateTime; import java.time.format.TextStyle; import java.util.ArrayList; import java.util.Locale; import java.util.logging.Logger; import com.google.common.eventbus.Subscribe; import javafx.application.Platform; import javafx.collections.ObservableList; import javafx.fxml.FXML; import javafx.scene.Node; import javafx.scene.control.Accordion; import javafx.scene.control.ListCell; import javafx.scene.control.ListView; import javafx.scene.control.SplitPane; import javafx.scene.control.TitledPane; import javafx.scene.layout.AnchorPane; import javafx.scene.layout.VBox; import javafx.stage.Stage; import seedu.jimi.commons.core.LogsCenter; import seedu.jimi.commons.events.model.TaskBookChangedEvent; import seedu.jimi.commons.events.ui.ShowTaskPanelSectionEvent; import seedu.jimi.commons.events.ui.TaskPanelSelectionChangedEvent; import seedu.jimi.model.task.ReadOnlyTask; /** * Panel containing the list of tasks. */ public class TaskListPanel extends UiPart { private final Logger logger = LogsCenter.getLogger(TaskListPanel.class); private static final String FXML = "TaskListPanel.fxml"; private VBox panel; private AnchorPane placeHolderPane; private ObservableList<ReadOnlyTask> overdueTaskList; private ObservableList<ReadOnlyTask> floatingTaskList; private ObservableList<ReadOnlyTask> completedTaskList; private ObservableList<ReadOnlyTask> incompleteTaskList; private ArrayList<ObservableList<ReadOnlyTask>> daysTaskList; //main accordion view @FXML Accordion tasksAccordion; //all list views @FXML private ListView<ReadOnlyTask> overdueTaskListView; @FXML private ListView<ReadOnlyTask> taskListView; @FXML private ListView<ReadOnlyTask> completedTaskListView; @FXML private ListView<ReadOnlyTask> incompleteTaskListView; @FXML private ListView<ReadOnlyTask> taskListViewDay1; @FXML private ListView<ReadOnlyTask> taskListViewDay2; @FXML private ListView<ReadOnlyTask> taskListViewDay3; @FXML private ListView<ReadOnlyTask> taskListViewDay4; @FXML private ListView<ReadOnlyTask> taskListViewDay5; @FXML private ListView<ReadOnlyTask> taskListViewDay6; @FXML private ListView<ReadOnlyTask> taskListViewDay7; //incomplete/complete title labels @FXML private TitledPane titleCompletedTasks; @FXML private TitledPane titleIncompleteTasks; //taskListPanel title labels @FXML private TitledPane titleOverdueTasks; @FXML private TitledPane titleFloatingTasks; @FXML private TitledPane titleTaskDay1; @FXML private TitledPane titleTaskDay2; @FXML private TitledPane titleTaskDay3; @FXML private TitledPane titleTaskDay4; @FXML private TitledPane titleTaskDay5; @FXML private TitledPane titleTaskDay6; @FXML private TitledPane titleTaskDay7; public TaskListPanel() { super(); } @Override public void setNode(Node node) { panel = (VBox) node; } @Override public String getFxmlPath() { return FXML; } @Override public void setPlaceholder(AnchorPane pane) { this.placeHolderPane = pane; } public static TaskListPanel load(Stage primaryStage, AnchorPane taskListPlaceholder, ObservableList<ReadOnlyTask> overdueTaskList, ObservableList<ReadOnlyTask> floatingTaskList, ObservableList<ReadOnlyTask> incompleteTaskList, ObservableList<ReadOnlyTask> completedTaskList, ArrayList<ObservableList<ReadOnlyTask>> daysList) { TaskListPanel taskListPanel = UiPartLoader.loadUiPart(primaryStage, taskListPlaceholder, new TaskListPanel()); taskListPanel.configure(overdueTaskList, floatingTaskList, incompleteTaskList, completedTaskList, daysList); return taskListPanel; } private void configure(ObservableList<ReadOnlyTask> overdueTaskList, ObservableList<ReadOnlyTask> floatingTaskList, ObservableList<ReadOnlyTask> incompleteTaskList, ObservableList<ReadOnlyTask> completedTaskList, ArrayList<ObservableList<ReadOnlyTask>> daysList) { instantiateLists(overdueTaskList, floatingTaskList, incompleteTaskList, completedTaskList, daysList); setConnections(); showFloatingTasks(); updateAllTitles(); addToPlaceholder(); registerAsAnEventHandler(this); // to update labels } private void instantiateLists(ObservableList<ReadOnlyTask> overdueTaskList, ObservableList<ReadOnlyTask> floatingTaskList, ObservableList<ReadOnlyTask> incompleteTaskList, ObservableList<ReadOnlyTask> completedTaskList, ArrayList<ObservableList<ReadOnlyTask>> daysList) { this.overdueTaskList = overdueTaskList; this.floatingTaskList = floatingTaskList; this.incompleteTaskList = incompleteTaskList; this.completedTaskList = completedTaskList; this.daysTaskList = daysList; } private void setConnections() { setupListViews(); setEventHandlerForSelectionChangeEvent(); } private void setupListViews() { this.overdueTaskListView.setItems(this.overdueTaskList); this.overdueTaskListView.setCellFactory(newListView -> new TaskListViewCell()); this.taskListView.setItems(this.floatingTaskList); this.taskListView.setCellFactory(newListView -> new TaskListViewCell()); this.completedTaskListView.setItems(this.completedTaskList); this.completedTaskListView.setCellFactory(newListView -> new TaskListViewCell()); this.incompleteTaskListView.setItems(this.incompleteTaskList); this.incompleteTaskListView.setCellFactory(newListView -> new TaskListViewCell()); setupDaysListViews(daysTaskList, taskListViewDay1, taskListViewDay2, taskListViewDay3, taskListViewDay4, taskListViewDay5, taskListViewDay6, taskListViewDay7); } /** * Checks the listview's respective DayOfWeek and then assigns the list matched to it. * @param daysTaskList * @param taskListViewDays */ @SafeVarargs private final void setupDaysListViews(ArrayList<ObservableList<ReadOnlyTask>> daysTaskList, ListView<ReadOnlyTask>... taskListViewDays) { int i = 0; for (ListView<ReadOnlyTask> lv : taskListViewDays) { lv.setItems(daysTaskList.get(i++)); lv.setCellFactory(newListView -> new TaskListViewCell()); } } private void addToPlaceholder() { SplitPane.setResizableWithParent(placeHolderPane, true); placeHolderPane.getChildren().add(panel); } private void setEventHandlerForSelectionChangeEvent() { setEventHandlerForListView(taskListView); setEventHandlerForListView(completedTaskListView); setEventHandlerForListView(incompleteTaskListView); } private void setEventHandlerForListView(ListView<ReadOnlyTask> listView) { listView.getSelectionModel().selectedItemProperty().addListener((observable, oldValue, newValue) -> { if (newValue != null) { logger.fine("Selection in task list panel changed to : '" + newValue + "'"); raise(new TaskPanelSelectionChangedEvent(newValue)); } }); } public void scrollTo(int index) { Platform.runLater(() -> { taskListView.scrollTo(index); taskListView.getSelectionModel().clearAndSelect(index); }); } private void updateOverdueTasksTitle() { this.titleOverdueTasks.setText("Overdue Tasks (" + this.overdueTaskList.size() + ")"); } private void updateFloatingTasksTitle() { this.titleFloatingTasks.setText("Floating Tasks (" + this.floatingTaskList.size() + ")"); } private void updateIncompleteTasksTitle() { this.titleIncompleteTasks.setText("Incomplete Tasks (" + this.incompleteTaskList.size() + ")"); } private void updateCompleteTasksTitle() { this.titleCompletedTasks.setText("Completed Tasks (" + this.completedTaskList.size() + ")"); } private void updateDaysTitles(TitledPane... panes) { int i = 0; for(TitledPane t : panes) { if(i == 0) { t.setText("Today (" + this.daysTaskList.get(i++).size() + ")"); } else if(i == 1) { t.setText("Tomorrow (" + this.daysTaskList.get(i++).size() + ")"); } else { String dayOfWeek = LocalDateTime.now().getDayOfWeek().plus(i).getDisplayName(TextStyle.FULL, Locale.ENGLISH); t.setText(dayOfWeek + " (" + this.daysTaskList.get(i++).size() + ")"); } } } private void updateAllTitles() { updateOverdueTasksTitle(); updateFloatingTasksTitle(); updateCompleteTasksTitle(); updateIncompleteTasksTitle(); updateDaysTitles(titleTaskDay1, titleTaskDay2, titleTaskDay3, titleTaskDay4, titleTaskDay5, titleTaskDay6, titleTaskDay7); } //========== Event handlers ================================================================================ /** * Updates all the titles when taskBook is changed. Updates remaining tasks for each title. * @param tbce */ @Subscribe public void handleTaskBookChangedEvent(TaskBookChangedEvent tbce) { updateAllTitles(); logger.info(LogsCenter.getEventHandlingLogMessage(tbce, "Setting floatingTaskListSize label to : " + ""+tbce.data.getTaskList().size())); } /** * Expands the relevant task panels according to user input. */ @Subscribe public void handleShowTaskPanelSelectionEvent(ShowTaskPanelSectionEvent event) { switch (event.sectionToDisplay) { case "overdue": showOverdueTasks(); break; case "floating": showFloatingTasks(); break; case "incomplete": showIncompleteTasks(); break; case "completed": showCompletedTasks(); break; case "today": showDay1(); break; case "tomorrow": showDay2(); break; case "monday": case "tuesday": case "wednesday": case "thursday": case "friday": case "saturday": case "sunday": showRequiredDay(event.sectionToDisplay.toLowerCase()); default: break; } } /** * Finds the title to be displayed and calls its respective method to expand it. * @param sectionToDisplay */ private void showRequiredDay(String sectionToDisplay) { if(titleTaskDay1.getText().toLowerCase().contains(sectionToDisplay)) { showDay1(); } else if(titleTaskDay2.getText().toLowerCase().contains(sectionToDisplay)) { showDay2(); } else if(titleTaskDay3.getText().toLowerCase().contains(sectionToDisplay)) { showDay3(); }else if(titleTaskDay4.getText().toLowerCase().contains(sectionToDisplay)) { showDay4(); }else if(titleTaskDay5.getText().toLowerCase().contains(sectionToDisplay)) { showDay5(); }else if(titleTaskDay6.getText().toLowerCase().contains(sectionToDisplay)) { showDay6(); }else if(titleTaskDay7.getText().toLowerCase().contains(sectionToDisplay)) { showDay7(); } } //========== Method calls to expand relevant listviews in panel. =========================================== public void showOverdueTasks() { tasksAccordion.setExpandedPane(titleOverdueTasks); } public void showFloatingTasks() { tasksAccordion.setExpandedPane(titleFloatingTasks); } public void showIncompleteTasks() { tasksAccordion.setExpandedPane(titleIncompleteTasks); } public void showCompletedTasks() { tasksAccordion.setExpandedPane(titleCompletedTasks); } //today public void showDay1() { tasksAccordion.setExpandedPane(titleTaskDay1); } //tomorrow public void showDay2() { tasksAccordion.setExpandedPane(titleTaskDay2); } public void showDay3() { tasksAccordion.setExpandedPane(titleTaskDay3); } public void showDay4() { tasksAccordion.setExpandedPane(titleTaskDay4); } public void showDay5() { tasksAccordion.setExpandedPane(titleTaskDay5); } public void showDay6() { tasksAccordion.setExpandedPane(titleTaskDay6); } public void showDay7() { tasksAccordion.setExpandedPane(titleTaskDay7); } //=========================================================================================================== class TaskListViewCell extends ListCell<ReadOnlyTask> { public TaskListViewCell() { } @Override protected void updateItem(ReadOnlyTask task, boolean isEmpty) { super.updateItem(task, isEmpty); if (isEmpty || task == null) { setGraphic(null); setText(null); } else { setGraphic(TaskCard.load(task, getIndex() + 1).getLayout()); this.setStyle(""); } } } }
package kernitus.plugin.Hotels.managers; import com.sk89q.worldguard.bukkit.WorldGuardPlugin; import com.sk89q.worldguard.domains.DefaultDomain; import com.sk89q.worldguard.protection.flags.DefaultFlag; import com.sk89q.worldguard.protection.flags.Flag; import com.sk89q.worldguard.protection.flags.RegionGroup; import com.sk89q.worldguard.protection.flags.RegionGroupFlag; import com.sk89q.worldguard.protection.flags.StateFlag.State; import com.sk89q.worldguard.protection.flags.registry.FlagRegistry; import com.sk89q.worldguard.protection.flags.registry.SimpleFlagRegistry; import com.sk89q.worldguard.protection.managers.RegionManager; import com.sk89q.worldguard.protection.managers.storage.StorageException; import com.sk89q.worldguard.protection.regions.ProtectedCuboidRegion; import com.sk89q.worldguard.protection.regions.ProtectedPolygonalRegion; import com.sk89q.worldguard.protection.regions.ProtectedRegion; import kernitus.plugin.Hotels.handlers.HTConfigHandler; import org.bukkit.*; import org.bukkit.configuration.ConfigurationSection; import org.bukkit.configuration.file.YamlConfiguration; import org.bukkit.entity.EntityType; import org.bukkit.entity.Player; import org.bukkit.plugin.Plugin; import java.util.*; import java.util.regex.Matcher; import java.util.regex.Pattern; public class HTWorldGuardManager { public static WorldGuardPlugin getWorldGuard(){ Plugin p = Bukkit.getServer().getPluginManager().getPlugin("WorldGuard"); if (p instanceof WorldGuardPlugin) return (WorldGuardPlugin) p; else return null; } public static ProtectedRegion getRegion(World world, String string) { return getRM(world).getRegion(string); } public static void addOwner(OfflinePlayer p, ProtectedRegion r){ DefaultDomain owners = r.getOwners(); owners.addPlayer(p.getUniqueId()); r.setOwners(owners); } public static void addOwners(DefaultDomain dd, ProtectedRegion r){ DefaultDomain owners = r.getOwners(); owners.addAll(dd); r.setOwners(owners); } public static void addMember(OfflinePlayer p, ProtectedRegion r){ DefaultDomain members = r.getMembers(); members.addPlayer(p.getUniqueId()); r.setMembers(members); } public static void addMembers(DefaultDomain dd, ProtectedRegion r){ DefaultDomain members = r.getMembers(); members.addAll(dd); r.setMembers(members); } public static void addMembers(List<UUID> ids, ProtectedRegion r){ DefaultDomain members = r.getMembers(); ids.forEach(members::addPlayer); } public static void setMember(UUID uuid, ProtectedRegion r){ DefaultDomain member = new DefaultDomain(); member.addPlayer(uuid); r.setMembers(member); } public static void setMember(OfflinePlayer p, ProtectedRegion r){ DefaultDomain members = new DefaultDomain(); members.addPlayer(p.getUniqueId()); r.setMembers(members); } public static void setOwner(OfflinePlayer p, ProtectedRegion r){ DefaultDomain owners = new DefaultDomain(); owners.addPlayer(p.getUniqueId()); r.setOwners(owners); } public static void setMembers(ArrayList<UUID> uuids, ProtectedRegion r){ DefaultDomain members = new DefaultDomain(); for(UUID uuid : uuids) members.addPlayer(uuid); r.setMembers(members); } public static void setOwners(ArrayList<UUID> uuids, ProtectedRegion r){ DefaultDomain owners = new DefaultDomain(); for(UUID uuid : uuids) owners.addPlayer(uuid); r.setOwners(owners); } public static void removeOwner(OfflinePlayer p, ProtectedRegion r){ DefaultDomain owners = r.getOwners(); owners.removePlayer(p.getUniqueId()); r.setOwners(owners); } public static void removeOwners(DefaultDomain dd, ProtectedRegion r){ DefaultDomain owners = r.getOwners(); owners.removeAll(dd); r.setOwners(owners); } public static void removeMember(UUID id, ProtectedRegion r){ DefaultDomain members = r.getMembers(); members.removePlayer(id); r.setMembers(members); } public static void removeMembers(DefaultDomain dd, ProtectedRegion r){ DefaultDomain members = r.getMembers(); members.removeAll(dd); r.setMembers(members); } public static void removeMembers(List<UUID> ids, ProtectedRegion r){ DefaultDomain members = r.getMembers(); DefaultDomain toRemove = new DefaultDomain(); ids.forEach(toRemove::addPlayer); members.removeAll(toRemove); r.setMembers(members); } public static void addRegion(World w, ProtectedRegion r){ getRM(w).addRegion(r); } public static void removeRegion(World w, String r){ getRM(w).removeRegion(r); } public static void removeRegion(World w, ProtectedRegion r){ getRM(w).removeRegion(r.getId()); } public static void saveRegions(World world){ try { getRM(world).save(); } catch (StorageException e) { e.printStackTrace(); } } public static RegionManager getRM(World world){ return getWorldGuard().getRegionManager(world); } public static boolean hasRegion(World world, String regionName){ return getRM(world).hasRegion(regionName); } public static ProtectedRegion getHotelRegion(World world, String name){ return getRegion(world, "hotel-" + name); } public static ProtectedRegion getRoomRegion(World world, String hotelName, String num){ return getRegion(world, "hotel-" + hotelName + "-" + num); } public static void renameRegion(String oldname, String newname, World world){ if(!hasRegion(world, oldname)) return; //If old region exists ProtectedRegion oldRegion = getRegion(world, oldname);//Get old region ProtectedRegion newRegion; if(oldRegion instanceof ProtectedCuboidRegion) newRegion = new ProtectedCuboidRegion(newname, oldRegion.getMinimumPoint(), oldRegion.getMaximumPoint()); else if(oldRegion instanceof ProtectedPolygonalRegion) newRegion = new ProtectedPolygonalRegion(newname, oldRegion.getPoints(), oldRegion.getMinimumPoint().getBlockY(), oldRegion.getMaximumPoint().getBlockY()); else return; //Not the correct type of region getRM(world).addRegion(newRegion); newRegion.copyFrom(oldRegion); removeRegion(world, oldRegion); saveRegions(world); } public static Collection<ProtectedRegion> getRegions(World world){ return getRM(world).getRegions().values(); } public static boolean isOwner(Player p, ProtectedRegion r){ return r.getOwners().contains(p.getName()) || r.getOwners().contains(p.getUniqueId()); } public static boolean isOwner(Player p, String id, World w){ return hasRegion(w, id) && isOwner(p, getRegion(w, id)); } public static boolean doTwoRegionsOverlap(ProtectedRegion r1, ProtectedRegion r2){ return r2.containsAny(r1.getPoints()); } public static boolean doHotelRegionsOverlap(ProtectedRegion region, World world){ Collection<ProtectedRegion> regions = getRegions(world); List<ProtectedRegion> inter = region.getIntersectingRegions(regions); for(ProtectedRegion reg : inter) if(reg.getId().startsWith("hotel-")) return true; return false; } public static boolean doesRoomRegionOverlap(ProtectedRegion region, World world){ Collection<ProtectedRegion> regions = getRegions(world); List<ProtectedRegion> inter = region.getIntersectingRegions(regions); for(ProtectedRegion reg : inter) if(reg.getId().matches("hotel-\\w+-\\d+")) return true; //It's a room region return false; } public static void setFlags(ConfigurationSection section, ProtectedRegion r, String name, World world){ FlagRegistry registry = new SimpleFlagRegistry(); registry.registerAll(DefaultFlag.getDefaultFlags()); boolean isHotel = !r.getId().matches("hotel-.+-\\d+"); Map <Flag<?>, Object> flags = new HashMap<Flag<?>, Object>(); Map <Flag<?>, Object> groupFlags = new HashMap<Flag<?>, Object>(); Map <Flag<?>, String> groupFlagValues = new HashMap<Flag<?>, String>(); for(String key : section.getKeys(true)){ String pureKey = key.replaceAll(".+\\.", ""); String keyValue = section.getString(key); if(keyValue == null || keyValue.equalsIgnoreCase("none") || keyValue.startsWith("MemorySection")) continue; if(keyValue.contains("-g ")){ final Pattern pattern = Pattern.compile("(\\s?)(-g\\s)(\\w+)(\\s?)"); final Matcher matcher = pattern.matcher(keyValue); while (matcher.find()){ String pureGroupFlag = matcher.group(3); groupFlags.put(DefaultFlag.fuzzyMatchFlag(registry, pureKey), keyValue); groupFlagValues.put(DefaultFlag.fuzzyMatchFlag(registry, pureKey), pureGroupFlag); } keyValue = keyValue.replaceAll("\\s?-g\\s\\w+\\s?", ""); } switch(pureKey){ case "GREETING": if(Boolean.valueOf(keyValue)){ String message; if(isHotel) message = Mes.getStringNoPrefix("message.hotel.enter").replaceAll("%hotel%", name); else message = Mes.getStringNoPrefix("message.room.enter").replaceAll("%room%", name); flags.put(DefaultFlag.fuzzyMatchFlag(registry, pureKey), message); } break; case "FAREWELL": if(Boolean.valueOf(keyValue)){ String message; if(isHotel) message = Mes.getStringNoPrefix("message.hotel.exit").replaceAll("%hotel%", name); else message = Mes.getStringNoPrefix("message.room.exit").replaceAll("%room%", name); flags.put(DefaultFlag.fuzzyMatchFlag(registry, pureKey), message); } break; //String case "DENY-MESSAGE": case "ENTRY-DENY-MESSAGE": case "EXIT-DENY-MESSAGE": case "TIME-LOCK": flags.put(DefaultFlag.fuzzyMatchFlag(registry, pureKey), keyValue); break; //Integer case "HEAL-DELAY": case "HEAL-AMOUNT": case "FEED-DELAY": case "FEED-AMOUNT": case "FEED-MIN-HUNGER": case "FEED-MAX-HUNGER": Integer intFlag = Integer.valueOf(keyValue); if(intFlag!=null) flags.put(DefaultFlag.fuzzyMatchFlag(registry, pureKey), intFlag); break; //Double case "HEAL-MIN-HEALTH": case "HEAL-MAX-HEALTH": case "PRICE": Double doubleFlag = Double.valueOf(keyValue); if(doubleFlag!=null) flags.put(DefaultFlag.fuzzyMatchFlag(registry, pureKey), doubleFlag); break; //Boolean case "NOTIFY-ENTER": case "NOTIFY-LEAVE": case "BUYABLE": case "EXIT-OVERRIDE": Boolean booleanFlag = Boolean.valueOf(keyValue); flags.put(DefaultFlag.fuzzyMatchFlag(registry, pureKey), booleanFlag); break; //Weather Type (Clear or downfall) case "WEATHER-LOCK": WeatherType weatherFlag = WeatherType.valueOf(keyValue.toUpperCase()); if(weatherFlag!=null) flags.put(DefaultFlag.fuzzyMatchFlag(registry, pureKey), weatherFlag); break; //GameMode (Adventure, Creative, Spectator, Survival) case "GAME-MODE": GameMode gamemodeFlag = GameMode.valueOf(keyValue.toUpperCase()); if(gamemodeFlag!=null) flags.put(DefaultFlag.fuzzyMatchFlag(registry, pureKey), gamemodeFlag); break; //Set of entities case "DENY-SPAWN": List<String> entityList = section.getStringList(key); Set<EntityType> entitySet = new HashSet<EntityType>(); entityList.forEach(entity -> entitySet.add(EntityType.valueOf(entity))); flags.put(DefaultFlag.fuzzyMatchFlag(registry, pureKey), entitySet); break; case "BLOCKED-CMDS": case "ALLOWED-CMDS": String[] cmdsValues = keyValue.split(","); Set<String> cmdsSet = new HashSet<String>(); for(String cmd: cmdsValues) cmdsSet.add("/"+cmd); flags.put(DefaultFlag.fuzzyMatchFlag(registry, pureKey), cmdsSet); break; case "TELEPORT": case "SPAWN": int x = section.getInt(key + ".x"); int y = section.getInt(key + ".y"); int z = section.getInt(key + ".z"); int yaw = 0; int pitch = 0; Location locationFlag = new Location(world, x, y, z, yaw, pitch); if(locationFlag!=null) flags.put(DefaultFlag.fuzzyMatchFlag(registry, pureKey), locationFlag); break; default: if(keyValue.equalsIgnoreCase("ALLOW")) flags.put(DefaultFlag.fuzzyMatchFlag(registry, pureKey), State.ALLOW); else if(keyValue.equalsIgnoreCase("DENY")) flags.put(DefaultFlag.fuzzyMatchFlag(registry, pureKey), State.DENY); else Mes.debug("Could not match flag: " + pureKey + " with value: " + keyValue); break; } } r.setFlags(flags); for(Flag<?> flag : groupFlags.keySet()){ String groupFlagValue = groupFlagValues.get(flag); groupFlags(r, flag, groupFlagValue); } } public static void groupFlags(ProtectedRegion region,Flag<?> flag, String group){ RegionGroupFlag regionGroupFlag = flag.getRegionGroupFlag(); RegionGroup regionGroup = RegionGroup.valueOf(group.toUpperCase()); region.setFlag(regionGroupFlag, regionGroup); } public static void hotelFlags(ProtectedRegion region, String hotelName, World world){ YamlConfiguration flagsConfig = HTConfigHandler.getFlags(); ConfigurationSection section = flagsConfig.getConfigurationSection("hotel"); setFlags(section, region, hotelName, world); } public static void roomFlags(ProtectedRegion region, String num, World world){ YamlConfiguration flagsConfig = HTConfigHandler.getFlags(); ConfigurationSection section = flagsConfig.getConfigurationSection("room"); setFlags(section, region, String.valueOf(num), world); } public static void makeRoomAccessible(ProtectedRegion region){ if(HTConfigHandler.getconfigYML().getBoolean("allowPlayersIntoFreeRooms", true)){ region.setFlag(DefaultFlag.INTERACT, null); region.setFlag(DefaultFlag.USE, null); makeRoomContainersAccessible(region); } } public static void makeRoomContainersAccessible(ProtectedRegion region){ if(HTConfigHandler.getconfigYML().getBoolean("allowPlayersToOpenContainersInFreeRooms", false)) region.setFlag(DefaultFlag.CHEST_ACCESS, null); } }
/* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package com.smi.travel.datalayer.report.model; /** * * @author Surachai */ public class TicketSaleProfitVolumn { private String month; private int pre2_eco_value; private int pre1_eco_value; private int curr_eco_value; private int pre2_bus_value; private int pre1_bus_value; private int curr_bus_value; private int pre2_fst_value; private int pre1_fst_value; private int curr_fst_value; private int pre2_all_value; private int pre1_all_value; private int curr_all_value; private int eco_growth; private int bus_growth; private int fst_growth; private int all_growth; private String ticketfrom; private String ticketto; private String systemdate; private String user; private String pre1; private String pre2; private String curr; private String from; private String type; private String year; private String neteco; private String netbus; private String netfirst; private String nettotal; private String profiteco; private String profitbus; private String profitfirst; private String profittotal; public String getMonth() { return month; } public void setMonth(String month) { this.month = month; } public int getPre2_eco_value() { return pre2_eco_value; } public void setPre2_eco_value(int pre2_eco_value) { this.pre2_eco_value = pre2_eco_value; } public int getPre1_eco_value() { return pre1_eco_value; } public void setPre1_eco_value(int pre1_eco_value) { this.pre1_eco_value = pre1_eco_value; } public int getCurr_eco_value() { return curr_eco_value; } public void setCurr_eco_value(int curr_eco_value) { this.curr_eco_value = curr_eco_value; } public int getPre2_bus_value() { return pre2_bus_value; } public void setPre2_bus_value(int pre2_bus_value) { this.pre2_bus_value = pre2_bus_value; } public int getPre1_bus_value() { return pre1_bus_value; } public void setPre1_bus_value(int pre1_bus_value) { this.pre1_bus_value = pre1_bus_value; } public int getCurr_bus_value() { return curr_bus_value; } public void setCurr_bus_value(int curr_bus_value) { this.curr_bus_value = curr_bus_value; } public int getPre2_fst_value() { return pre2_fst_value; } public void setPre2_fst_value(int pre2_fst_value) { this.pre2_fst_value = pre2_fst_value; } public int getPre1_fst_value() { return pre1_fst_value; } public void setPre1_fst_value(int pre1_fst_value) { this.pre1_fst_value = pre1_fst_value; } public int getCurr_fst_value() { return curr_fst_value; } public void setCurr_fst_value(int curr_fst_value) { this.curr_fst_value = curr_fst_value; } public int getPre2_all_value() { return pre2_all_value; } public void setPre2_all_value(int pre2_all_value) { this.pre2_all_value = pre2_all_value; } public int getPre1_all_value() { return pre1_all_value; } public void setPre1_all_value(int pre1_all_value) { this.pre1_all_value = pre1_all_value; } public int getCurr_all_value() { return curr_all_value; } public void setCurr_all_value(int curr_all_value) { this.curr_all_value = curr_all_value; } public int getEco_growth() { return eco_growth; } public void setEco_growth(int eco_growth) { this.eco_growth = eco_growth; } public int getBus_growth() { return bus_growth; } public void setBus_growth(int bus_growth) { this.bus_growth = bus_growth; } public int getFst_growth() { return fst_growth; } public void setFst_growth(int fst_growth) { this.fst_growth = fst_growth; } public int getAll_growth() { return all_growth; } public void setAll_growth(int all_growth) { this.all_growth = all_growth; } public String getTicketfrom() { return ticketfrom; } public void setTicketfrom(String ticketfrom) { this.ticketfrom = ticketfrom; } public String getTicketto() { return ticketto; } public void setTicketto(String ticketto) { this.ticketto = ticketto; } public String getSystemdate() { return systemdate; } public void setSystemdate(String systemdate) { this.systemdate = systemdate; } public String getUser() { return user; } public void setUser(String user) { this.user = user; } public String getPre1() { return pre1; } public void setPre1(String pre1) { this.pre1 = pre1; } public String getPre2() { return pre2; } public void setPre2(String pre2) { this.pre2 = pre2; } public String getCurr() { return curr; } public void setCurr(String curr) { this.curr = curr; } public String getFrom() { return from; } public void setFrom(String from) { this.from = from; } public String getType() { return type; } public void setType(String type) { this.type = type; } public String getYear() { return year; } public void setYear(String year) { this.year = year; } public String getNeteco() { return neteco; } public void setNeteco(String neteco) { this.neteco = neteco; } public String getNetbus() { return netbus; } public void setNetbus(String netbus) { this.netbus = netbus; } public String getNetfirst() { return netfirst; } public void setNetfirst(String netfirst) { this.netfirst = netfirst; } public String getNettotal() { return nettotal; } public void setNettotal(String nettotal) { this.nettotal = nettotal; } public String getProfiteco() { return profiteco; } public void setProfiteco(String profiteco) { this.profiteco = profiteco; } public String getProfitbus() { return profitbus; } public void setProfitbus(String profitbus) { this.profitbus = profitbus; } public String getProfitfirst() { return profitfirst; } public void setProfitfirst(String profitfirst) { this.profitfirst = profitfirst; } public String getProfittotal() { return profittotal; } public void setProfittotal(String profittotal) { this.profittotal = profittotal; } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.watcher; import com.carrotsearch.randomizedtesting.LifecycleScope; import org.apache.lucene.util.IOUtils; import org.elasticsearch.test.ElasticsearchTestCase; import org.junit.Test; import java.io.File; import java.io.IOException; import java.nio.charset.Charset; import java.nio.file.Files; import java.nio.file.Path; import java.util.List; import static com.google.common.collect.Lists.newArrayList; import static com.google.common.io.Files.*; import static org.hamcrest.Matchers.*; /** * */ public class FileWatcherTest extends ElasticsearchTestCase { private class RecordingChangeListener extends FileChangesListener { private File rootDir; private RecordingChangeListener(File rootDir) { this.rootDir = rootDir; } private String getRelativeFileName(File file) { return rootDir.toURI().relativize(file.toURI()).getPath(); } private List<String> notifications = newArrayList(); @Override public void onFileInit(File file) { notifications.add("onFileInit: " + getRelativeFileName(file)); } @Override public void onDirectoryInit(File file) { notifications.add("onDirectoryInit: " + getRelativeFileName(file)); } @Override public void onFileCreated(File file) { notifications.add("onFileCreated: " + getRelativeFileName(file)); } @Override public void onFileDeleted(File file) { notifications.add("onFileDeleted: " + getRelativeFileName(file)); } @Override public void onFileChanged(File file) { notifications.add("onFileChanged: " + getRelativeFileName(file)); } @Override public void onDirectoryCreated(File file) { notifications.add("onDirectoryCreated: " + getRelativeFileName(file)); } @Override public void onDirectoryDeleted(File file) { notifications.add("onDirectoryDeleted: " + getRelativeFileName(file)); } public List<String> notifications() { return notifications; } } @Test public void testSimpleFileOperations() throws IOException { File tempDir = newTempDir(LifecycleScope.TEST); RecordingChangeListener changes = new RecordingChangeListener(tempDir); File testFile = new File(tempDir, "test.txt"); touch(testFile); FileWatcher fileWatcher = new FileWatcher(testFile); fileWatcher.addListener(changes); fileWatcher.init(); assertThat(changes.notifications(), contains(equalTo("onFileInit: test.txt"))); changes.notifications().clear(); fileWatcher.checkAndNotify(); assertThat(changes.notifications(), hasSize(0)); append("Test", testFile, Charset.defaultCharset()); fileWatcher.checkAndNotify(); assertThat(changes.notifications(), contains(equalTo("onFileChanged: test.txt"))); changes.notifications().clear(); fileWatcher.checkAndNotify(); assertThat(changes.notifications(), hasSize(0)); Files.delete(testFile.toPath()); fileWatcher.checkAndNotify(); assertThat(changes.notifications(), contains(equalTo("onFileDeleted: test.txt"))); } @Test public void testSimpleDirectoryOperations() throws IOException { File tempDir = newTempDir(LifecycleScope.TEST); RecordingChangeListener changes = new RecordingChangeListener(tempDir); File testDir = new File(tempDir, "test-dir"); testDir.mkdir(); touch(new File(testDir, "test.txt")); touch(new File(testDir, "test0.txt")); FileWatcher fileWatcher = new FileWatcher(testDir); fileWatcher.addListener(changes); fileWatcher.init(); assertThat(changes.notifications(), contains( equalTo("onDirectoryInit: test-dir/"), equalTo("onFileInit: test-dir/test.txt"), equalTo("onFileInit: test-dir/test0.txt") )); changes.notifications().clear(); fileWatcher.checkAndNotify(); assertThat(changes.notifications(), hasSize(0)); for (int i = 0; i < 4; i++) { touch(new File(testDir, "test" + i + ".txt")); } // Make sure that first file is modified append("Test", new File(testDir, "test0.txt"), Charset.defaultCharset()); fileWatcher.checkAndNotify(); assertThat(changes.notifications(), contains( equalTo("onFileChanged: test-dir/test0.txt"), equalTo("onFileCreated: test-dir/test1.txt"), equalTo("onFileCreated: test-dir/test2.txt"), equalTo("onFileCreated: test-dir/test3.txt") )); changes.notifications().clear(); fileWatcher.checkAndNotify(); assertThat(changes.notifications(), hasSize(0)); Files.delete(new File(testDir, "test1.txt").toPath()); Files.delete(new File(testDir, "test2.txt").toPath()); fileWatcher.checkAndNotify(); assertThat(changes.notifications(), contains( equalTo("onFileDeleted: test-dir/test1.txt"), equalTo("onFileDeleted: test-dir/test2.txt") )); changes.notifications().clear(); fileWatcher.checkAndNotify(); assertThat(changes.notifications(), hasSize(0)); Files.delete(new File(testDir, "test0.txt").toPath()); touch(new File(testDir, "test2.txt")); touch(new File(testDir, "test4.txt")); fileWatcher.checkAndNotify(); assertThat(changes.notifications(), contains( equalTo("onFileDeleted: test-dir/test0.txt"), equalTo("onFileCreated: test-dir/test2.txt"), equalTo("onFileCreated: test-dir/test4.txt") )); changes.notifications().clear(); Files.delete(new File(testDir, "test3.txt").toPath()); Files.delete(new File(testDir, "test4.txt").toPath()); fileWatcher.checkAndNotify(); assertThat(changes.notifications(), contains( equalTo("onFileDeleted: test-dir/test3.txt"), equalTo("onFileDeleted: test-dir/test4.txt") )); changes.notifications().clear(); if (testDir.exists()) { IOUtils.rm(testDir.toPath()); } fileWatcher.checkAndNotify(); assertThat(changes.notifications(), contains( equalTo("onFileDeleted: test-dir/test.txt"), equalTo("onFileDeleted: test-dir/test2.txt"), equalTo("onDirectoryDeleted: test-dir") )); } @Test public void testNestedDirectoryOperations() throws IOException { File tempDir = newTempDir(LifecycleScope.TEST); RecordingChangeListener changes = new RecordingChangeListener(tempDir); File testDir = new File(tempDir, "test-dir"); testDir.mkdir(); touch(new File(testDir, "test.txt")); new File(testDir, "sub-dir").mkdir(); touch(new File(testDir, "sub-dir/test0.txt")); FileWatcher fileWatcher = new FileWatcher(testDir); fileWatcher.addListener(changes); fileWatcher.init(); assertThat(changes.notifications(), contains( equalTo("onDirectoryInit: test-dir/"), equalTo("onDirectoryInit: test-dir/sub-dir/"), equalTo("onFileInit: test-dir/sub-dir/test0.txt"), equalTo("onFileInit: test-dir/test.txt") )); changes.notifications().clear(); fileWatcher.checkAndNotify(); assertThat(changes.notifications(), hasSize(0)); // Create new file in subdirectory touch(new File(testDir, "sub-dir/test1.txt")); fileWatcher.checkAndNotify(); assertThat(changes.notifications(), contains( equalTo("onFileCreated: test-dir/sub-dir/test1.txt") )); changes.notifications().clear(); fileWatcher.checkAndNotify(); assertThat(changes.notifications(), hasSize(0)); // Create new subdirectory in subdirectory new File(testDir, "first-level").mkdir(); touch(new File(testDir, "first-level/file1.txt")); new File(testDir, "first-level/second-level").mkdir(); touch(new File(testDir, "first-level/second-level/file2.txt")); fileWatcher.checkAndNotify(); assertThat(changes.notifications(), contains( equalTo("onDirectoryCreated: test-dir/first-level/"), equalTo("onFileCreated: test-dir/first-level/file1.txt"), equalTo("onDirectoryCreated: test-dir/first-level/second-level/"), equalTo("onFileCreated: test-dir/first-level/second-level/file2.txt") )); changes.notifications().clear(); fileWatcher.checkAndNotify(); assertThat(changes.notifications(), hasSize(0)); // Delete a directory, check notifications for Path path = new File(testDir, "first-level").toPath(); if (Files.exists(path)) { IOUtils.rm(path); } fileWatcher.checkAndNotify(); assertThat(changes.notifications(), contains( equalTo("onFileDeleted: test-dir/first-level/file1.txt"), equalTo("onFileDeleted: test-dir/first-level/second-level/file2.txt"), equalTo("onDirectoryDeleted: test-dir/first-level/second-level"), equalTo("onDirectoryDeleted: test-dir/first-level") )); } @Test public void testFileReplacingDirectory() throws IOException { File tempDir = newTempDir(LifecycleScope.TEST); RecordingChangeListener changes = new RecordingChangeListener(tempDir); File testDir = new File(tempDir, "test-dir"); testDir.mkdir(); File subDir = new File(testDir, "sub-dir"); subDir.mkdir(); touch(new File(subDir, "test0.txt")); touch(new File(subDir, "test1.txt")); FileWatcher fileWatcher = new FileWatcher(testDir); fileWatcher.addListener(changes); fileWatcher.init(); assertThat(changes.notifications(), contains( equalTo("onDirectoryInit: test-dir/"), equalTo("onDirectoryInit: test-dir/sub-dir/"), equalTo("onFileInit: test-dir/sub-dir/test0.txt"), equalTo("onFileInit: test-dir/sub-dir/test1.txt") )); changes.notifications().clear(); if (subDir.exists()) { IOUtils.rm(subDir.toPath()); } touch(subDir); fileWatcher.checkAndNotify(); assertThat(changes.notifications(), contains( equalTo("onFileDeleted: test-dir/sub-dir/test0.txt"), equalTo("onFileDeleted: test-dir/sub-dir/test1.txt"), equalTo("onDirectoryDeleted: test-dir/sub-dir"), equalTo("onFileCreated: test-dir/sub-dir") )); changes.notifications().clear(); Files.delete(subDir.toPath()); subDir.mkdir(); fileWatcher.checkAndNotify(); assertThat(changes.notifications(), contains( equalTo("onFileDeleted: test-dir/sub-dir/"), equalTo("onDirectoryCreated: test-dir/sub-dir/") )); } @Test public void testEmptyDirectory() throws IOException { File tempDir = newTempDir(LifecycleScope.TEST); RecordingChangeListener changes = new RecordingChangeListener(tempDir); File testDir = new File(tempDir, "test-dir"); testDir.mkdir(); touch(new File(testDir, "test0.txt")); touch(new File(testDir, "test1.txt")); FileWatcher fileWatcher = new FileWatcher(testDir); fileWatcher.addListener(changes); fileWatcher.init(); changes.notifications().clear(); Files.delete(new File(testDir, "test0.txt").toPath()); Files.delete(new File(testDir, "test1.txt").toPath()); fileWatcher.checkAndNotify(); assertThat(changes.notifications(), contains( equalTo("onFileDeleted: test-dir/test0.txt"), equalTo("onFileDeleted: test-dir/test1.txt") )); } @Test public void testNoDirectoryOnInit() throws IOException { File tempDir = newTempDir(LifecycleScope.TEST); RecordingChangeListener changes = new RecordingChangeListener(tempDir); File testDir = new File(tempDir, "test-dir"); FileWatcher fileWatcher = new FileWatcher(testDir); fileWatcher.addListener(changes); fileWatcher.init(); assertThat(changes.notifications(), hasSize(0)); changes.notifications().clear(); testDir.mkdir(); touch(new File(testDir, "test0.txt")); touch(new File(testDir, "test1.txt")); fileWatcher.checkAndNotify(); assertThat(changes.notifications(), contains( equalTo("onDirectoryCreated: test-dir/"), equalTo("onFileCreated: test-dir/test0.txt"), equalTo("onFileCreated: test-dir/test1.txt") )); } @Test public void testNoFileOnInit() throws IOException { File tempDir = newTempDir(LifecycleScope.TEST); RecordingChangeListener changes = new RecordingChangeListener(tempDir); File testFile = new File(tempDir, "testfile.txt"); FileWatcher fileWatcher = new FileWatcher(testFile); fileWatcher.addListener(changes); fileWatcher.init(); assertThat(changes.notifications(), hasSize(0)); changes.notifications().clear(); touch(testFile); fileWatcher.checkAndNotify(); assertThat(changes.notifications(), contains( equalTo("onFileCreated: testfile.txt") )); } }
/* * Copyright (c) 1997, 2021, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package javax.swing.border; import java.awt.Graphics; import java.awt.Insets; import java.awt.Color; import java.awt.Component; import java.beans.ConstructorProperties; /** * A class which implements a simple two-line bevel border. * <p> * <strong>Warning:</strong> * Serialized objects of this class will not be compatible with * future Swing releases. The current serialization support is * appropriate for short term storage or RMI between applications running * the same version of Swing. As of 1.4, support for long term storage * of all JavaBeans * has been added to the <code>java.beans</code> package. * Please see {@link java.beans.XMLEncoder}. * * @author David Kloba */ @SuppressWarnings("serial") // Same-version serialization only public class BevelBorder extends AbstractBorder { /** Raised bevel type. */ public static final int RAISED = 0; /** Lowered bevel type. */ public static final int LOWERED = 1; /** * The bevel type. */ protected int bevelType; /** * The color to use for the bevel outer highlight. */ protected Color highlightOuter; /** * The color to use for the bevel inner highlight. */ protected Color highlightInner; /** * The color to use for the bevel inner shadow. */ protected Color shadowInner; /** * the color to use for the bevel outer shadow */ protected Color shadowOuter; /** * Creates a bevel border with the specified type and whose * colors will be derived from the background color of the * component passed into the paintBorder method. * @param bevelType the type of bevel for the border */ public BevelBorder(int bevelType) { this.bevelType = bevelType; } /** * Creates a bevel border with the specified type, highlight and * shadow colors. * The bevel outer highlight color and bevel inner highlight color * will be derived from specified highlight color and * bevel outer shadow color and bevel inner shadow color * will be derived from specified shadow color. * @param bevelType the type of bevel for the border * @param highlight the color to use for the bevel highlight * @param shadow the color to use for the bevel shadow */ public BevelBorder(int bevelType, Color highlight, Color shadow) { this(bevelType, highlight.brighter(), highlight, shadow, shadow.brighter()); } /** * Creates a bevel border with the specified type, highlight and * shadow colors. * * @param bevelType the type of bevel for the border * @param highlightOuterColor the color to use for the bevel outer highlight * @param highlightInnerColor the color to use for the bevel inner highlight * @param shadowOuterColor the color to use for the bevel outer shadow * @param shadowInnerColor the color to use for the bevel inner shadow */ @ConstructorProperties({"bevelType", "highlightOuterColor", "highlightInnerColor", "shadowOuterColor", "shadowInnerColor"}) public BevelBorder(int bevelType, Color highlightOuterColor, Color highlightInnerColor, Color shadowOuterColor, Color shadowInnerColor) { this(bevelType); this.highlightOuter = highlightOuterColor; this.highlightInner = highlightInnerColor; this.shadowOuter = shadowOuterColor; this.shadowInner = shadowInnerColor; } /** * Paints the border for the specified component with the specified * position and size. * @param c the component for which this border is being painted * @param g the paint graphics * @param x the x position of the painted border * @param y the y position of the painted border * @param width the width of the painted border * @param height the height of the painted border */ public void paintBorder(Component c, Graphics g, int x, int y, int width, int height) { if (bevelType == RAISED) { paintRaisedBevel(c, g, x, y, width, height); } else if (bevelType == LOWERED) { paintLoweredBevel(c, g, x, y, width, height); } } /** * Reinitialize the insets parameter with this Border's current Insets. * @param c the component for which this border insets value applies * @param insets the object to be reinitialized */ public Insets getBorderInsets(Component c, Insets insets) { insets.set(2, 2, 2, 2); return insets; } /** * Returns the outer highlight color of the bevel border * when rendered on the specified component. If no highlight * color was specified at instantiation, the highlight color * is derived from the specified component's background color. * * @param c the component for which the highlight may be derived * @return the outer highlight {@code Color} * @since 1.3 */ public Color getHighlightOuterColor(Component c) { Color highlight = getHighlightOuterColor(); return highlight != null? highlight : c.getBackground().brighter().brighter(); } /** * Returns the inner highlight color of the bevel border * when rendered on the specified component. If no highlight * color was specified at instantiation, the highlight color * is derived from the specified component's background color. * * @param c the component for which the highlight may be derived * @return the inner highlight {@code Color} * @since 1.3 */ public Color getHighlightInnerColor(Component c) { Color highlight = getHighlightInnerColor(); return highlight != null? highlight : c.getBackground().brighter(); } /** * Returns the inner shadow color of the bevel border * when rendered on the specified component. If no shadow * color was specified at instantiation, the shadow color * is derived from the specified component's background color. * * @param c the component for which the shadow may be derived * @return the inner shadow {@code Color} * @since 1.3 */ public Color getShadowInnerColor(Component c) { Color shadow = getShadowInnerColor(); return shadow != null? shadow : c.getBackground().darker(); } /** * Returns the outer shadow color of the bevel border * when rendered on the specified component. If no shadow * color was specified at instantiation, the shadow color * is derived from the specified component's background color. * * @param c the component for which the shadow may be derived * @return the outer shadow {@code Color} * @since 1.3 */ public Color getShadowOuterColor(Component c) { Color shadow = getShadowOuterColor(); return shadow != null? shadow : c.getBackground().darker().darker(); } /** * Returns the outer highlight color of the bevel border. * Will return null if no highlight color was specified * at instantiation. * * @return the outer highlight {@code Color} or {@code null} if no highlight * color was specified * @since 1.3 */ public Color getHighlightOuterColor() { return highlightOuter; } /** * Returns the inner highlight color of the bevel border. * Will return null if no highlight color was specified * at instantiation. * * @return the inner highlight {@code Color} or {@code null} if no highlight * color was specified * @since 1.3 */ public Color getHighlightInnerColor() { return highlightInner; } /** * Returns the inner shadow color of the bevel border. * Will return null if no shadow color was specified * at instantiation. * * @return the inner shadow {@code Color} or {@code null} if no shadow color * was specified * @since 1.3 */ public Color getShadowInnerColor() { return shadowInner; } /** * Returns the outer shadow color of the bevel border. * Will return null if no shadow color was specified * at instantiation. * * @return the outer shadow {@code Color} or {@code null} if no shadow color * was specified * @since 1.3 */ public Color getShadowOuterColor() { return shadowOuter; } /** * Returns the type of the bevel border. * * @return the bevel border type, either {@code RAISED} or {@code LOWERED} */ public int getBevelType() { return bevelType; } /** * Returns whether or not the border is opaque. This implementation * returns {@code true}. * * @return true */ public boolean isBorderOpaque() { return true; } /** * Paints a raised bevel for the specified component with the specified * position and size. * * @param c the component for which the raised bevel is being painted * @param g the paint graphics * @param x the x position of the raised bevel * @param y the y position of the raised bevel * @param width the width of the raised bevel * @param height the height of the raised bevel */ protected void paintRaisedBevel(Component c, Graphics g, int x, int y, int width, int height) { Color oldColor = g.getColor(); int h = height; int w = width; g.translate(x, y); g.setColor(getHighlightOuterColor(c)); g.drawLine(0, 0, 0, h-2); g.drawLine(1, 0, w-2, 0); g.setColor(getHighlightInnerColor(c)); g.drawLine(1, 1, 1, h-3); g.drawLine(2, 1, w-3, 1); g.setColor(getShadowOuterColor(c)); g.drawLine(0, h-1, w-1, h-1); g.drawLine(w-1, 0, w-1, h-2); g.setColor(getShadowInnerColor(c)); g.drawLine(1, h-2, w-2, h-2); g.drawLine(w-2, 1, w-2, h-3); g.translate(-x, -y); g.setColor(oldColor); } /** * Paints a lowered bevel for the specified component with the specified * position and size. * * @param c the component for which the lowered bevel is being painted * @param g the paint graphics * @param x the x position of the lowered bevel * @param y the y position of the lowered bevel * @param width the width of the lowered bevel * @param height the height of the lowered bevel */ protected void paintLoweredBevel(Component c, Graphics g, int x, int y, int width, int height) { Color oldColor = g.getColor(); int h = height; int w = width; g.translate(x, y); g.setColor(getShadowInnerColor(c)); g.drawLine(0, 0, 0, h-1); g.drawLine(1, 0, w-1, 0); g.setColor(getShadowOuterColor(c)); g.drawLine(1, 1, 1, h-2); g.drawLine(2, 1, w-2, 1); g.setColor(getHighlightOuterColor(c)); g.drawLine(1, h-1, w-1, h-1); g.drawLine(w-1, 1, w-1, h-2); g.setColor(getHighlightInnerColor(c)); g.drawLine(2, h-2, w-2, h-2); g.drawLine(w-2, 2, w-2, h-3); g.translate(-x, -y); g.setColor(oldColor); } }
/** * Copyright 2011-2021 Asakusa Framework Team. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.asakusafw.operator.method; import java.io.IOException; import java.text.MessageFormat; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Objects; import javax.lang.model.element.Modifier; import javax.lang.model.element.PackageElement; import javax.lang.model.element.TypeParameterElement; import javax.lang.model.element.VariableElement; import javax.lang.model.type.DeclaredType; import javax.lang.model.type.TypeMirror; import javax.lang.model.util.Types; import javax.tools.Diagnostic; import com.asakusafw.operator.CompileEnvironment; import com.asakusafw.operator.Constants; import com.asakusafw.operator.description.ClassDescription; import com.asakusafw.operator.model.JavaName; import com.asakusafw.operator.model.OperatorClass; import com.asakusafw.operator.model.OperatorDescription.Node; import com.asakusafw.operator.model.OperatorElement; import com.asakusafw.operator.util.DescriptionHelper; import com.asakusafw.operator.util.ElementHelper; import com.asakusafw.operator.util.JavadocHelper; import com.asakusafw.operator.util.Logger; import com.asakusafw.utils.java.jsr269.bridge.Jsr269; import com.asakusafw.utils.java.model.syntax.ClassDeclaration; import com.asakusafw.utils.java.model.syntax.ClassLiteral; import com.asakusafw.utils.java.model.syntax.CompilationUnit; import com.asakusafw.utils.java.model.syntax.ConstructorDeclaration; import com.asakusafw.utils.java.model.syntax.Expression; import com.asakusafw.utils.java.model.syntax.FieldDeclaration; import com.asakusafw.utils.java.model.syntax.Javadoc; import com.asakusafw.utils.java.model.syntax.MethodDeclaration; import com.asakusafw.utils.java.model.syntax.ModelFactory; import com.asakusafw.utils.java.model.syntax.SimpleName; import com.asakusafw.utils.java.model.syntax.Statement; import com.asakusafw.utils.java.model.syntax.Type; import com.asakusafw.utils.java.model.syntax.TypeBodyDeclaration; import com.asakusafw.utils.java.model.util.AttributeBuilder; import com.asakusafw.utils.java.model.util.ImportBuilder; import com.asakusafw.utils.java.model.util.ImportBuilder.Strategy; import com.asakusafw.utils.java.model.util.JavadocBuilder; import com.asakusafw.utils.java.model.util.Models; import com.asakusafw.utils.java.model.util.TypeBuilder; /** * Emits operator factories for operator methods. */ public class OperatorFactoryEmitter { static final Logger LOG = Logger.get(OperatorFactoryEmitter.class); private final CompileEnvironment environment; /** * Creates a new instance. * @param environment current compiling environment * @throws IllegalArgumentException if some parameters were {@code null} */ public OperatorFactoryEmitter(CompileEnvironment environment) { this.environment = Objects.requireNonNull(environment, "environment must not be null"); //$NON-NLS-1$ } /** * Emits an operator factory class. * @param operatorClass target class description * @throws IllegalArgumentException if some parameters were {@code null} */ public void emit(OperatorClass operatorClass) { Objects.requireNonNull(operatorClass, "operatorClass must not be null"); //$NON-NLS-1$ ClassDescription key = Constants.getFactoryClass(operatorClass.getDeclaration().getQualifiedName()); if (environment.isResourceGenerated(key)) { LOG.debug("class is already generated: {}", key.getClassName()); //$NON-NLS-1$ return; } CompilationUnit unit = Generator.generate(environment, operatorClass); try { environment.emit(unit, operatorClass.getDeclaration()); environment.setResourceGenerated(key); } catch (IOException e) { environment.getProcessingEnvironment().getMessager().printMessage(Diagnostic.Kind.ERROR, MessageFormat.format( Messages.getString("OperatorFactoryEmitter.errorFailEmit"), //$NON-NLS-1$ e.toString()), operatorClass.getDeclaration()); LOG.error(MessageFormat.format( Messages.getString("OperatorFactoryEmitter.logFailEmit"), //$NON-NLS-1$ operatorClass.getDeclaration().getQualifiedName()), e); } } private static final class Generator { private final CompileEnvironment environment; private final ModelFactory f; private final OperatorClass operatorClass; private final Jsr269 converter; private final ImportBuilder imports; private Generator(CompileEnvironment environment, OperatorClass operatorClass) { assert environment != null; assert operatorClass != null; this.environment = environment; this.f = Models.getModelFactory(); this.converter = new Jsr269(f); this.operatorClass = operatorClass; this.imports = new ImportBuilder( f, converter.convert((PackageElement) operatorClass.getDeclaration().getEnclosingElement()), Strategy.TOP_LEVEL); } static CompilationUnit generate(CompileEnvironment environment, OperatorClass operatorClass) { Generator generator = new Generator(environment, operatorClass); return generator.generate(); } CompilationUnit generate() { reserveNameSpace(); ClassDeclaration typeDecl = generateClass(); return f.newCompilationUnit( imports.getPackageDeclaration(), imports.toImportDeclarations(), Collections.singletonList(typeDecl)); } private ClassDeclaration generateClass() { Types types = environment.getProcessingEnvironment().getTypeUtils(); DeclaredType originalClass = types.getDeclaredType(operatorClass.getDeclaration()); SimpleName className = generateClassName(); List<TypeBodyDeclaration> members = new ArrayList<>(); members.add(generateConstructor()); members.addAll(generateMembers()); return f.newClassDeclaration( new JavadocBuilder(f) .inline(Messages.getString("OperatorFactoryEmitter.javadocClassSynopsis"), //$NON-NLS-1$ d -> d.linkType(imports.resolve(converter.convert(originalClass)))) .toJavadoc(), new AttributeBuilder(f) .annotation(DescriptionHelper.resolveAnnotation(imports, Constants.getGenetedAnnotation())) .annotation(ElementHelper.toOperatorFactoryAnnotation(environment, operatorClass, imports)) .Public() .Final() .toAttributes(), className, null, Collections.emptyList(), members); } private void reserveNameSpace() { SimpleName className = generateClassName(); imports.resolvePackageMember(className); for (OperatorElement element : operatorClass.getElements()) { if (element.getDescription() == null) { continue; } imports.resolvePackageMember(f.newQualifiedName(className, generateNodeClassName(element))); } } private List<TypeBodyDeclaration> generateMembers() { List<TypeBodyDeclaration> results = new ArrayList<>(); for (OperatorElement element : operatorClass.getElements()) { if (element.getDescription() == null) { continue; } ClassDeclaration node = generateNodeClass(element); Type type = imports.resolvePackageMember(f.newQualifiedName(generateClassName(), node.getName())); MethodDeclaration factory = generateFactoryMethod(element, type); results.add(node); results.add(factory); } return results; } private ClassDeclaration generateNodeClass(OperatorElement element) { List<TypeBodyDeclaration> members = new ArrayList<>(); members.addAll(generateOutputFields(element)); members.add(generateNodeConstructor(element)); return f.newClassDeclaration( generateNodeClassComment(element), new AttributeBuilder(f) .Public() .Static() .Final() .toAttributes(), generateNodeClassName(element), ElementHelper.toTypeParameters(environment, element.getDeclaration().getTypeParameters(), imports), null, Collections.emptyList(), members); } private List<FieldDeclaration> generateOutputFields(OperatorElement element) { List<FieldDeclaration> results = new ArrayList<>(); for (Node node : element.getDescription().getOutputs()) { Type type = new TypeBuilder(f, DescriptionHelper.resolve(imports, Constants.TYPE_SOURCE)) .parameterize(imports.resolve(converter.convert(node.getType()))) .toType(); results.add(f.newFieldDeclaration( generateOutputFieldComment(element, node), new AttributeBuilder(f) .Public() .Final() .toAttributes(), type, f.newSimpleName(node.getName()), null)); } return results; } private ConstructorDeclaration generateNodeConstructor(OperatorElement element) { Type builderType = DescriptionHelper.resolve(imports, Constants.TYPE_ELEMENT_BUILDER); List<Statement> statements = new ArrayList<>(); SimpleName builderVar = f.newSimpleName("$builder$"); //$NON-NLS-1$ statements.add(new TypeBuilder(f, builderType) .method("createOperator", toOperatorDeclaration(element)) //$NON-NLS-1$ .toLocalVariableDeclaration(builderType, builderVar)); statements.addAll(ElementHelper.toNodeConstructorStatements(environment, element, builderVar, imports)); return f.newConstructorDeclaration( null, new AttributeBuilder(f).toAttributes(), generateNodeClassName(element), ElementHelper.toParameters(environment, element, imports), statements); } private List<Expression> toOperatorDeclaration(OperatorElement element) { assert element != null; List<Expression> results = new ArrayList<>(); results.add(toLiteral(element.getAnnotation().getAnnotationType())); results.add(toLiteral(operatorClass.getDeclaration().asType())); if (operatorClass.getDeclaration().getModifiers().contains(Modifier.ABSTRACT)) { ClassDescription aClass = Constants.getImplementationClass( operatorClass.getDeclaration().getQualifiedName()); Type implementationClass = DescriptionHelper.resolve(imports, aClass); results.add(f.newClassLiteral(implementationClass)); } results.add(Models.toLiteral(f, element.getDeclaration().getSimpleName().toString())); for (VariableElement param : element.getDeclaration().getParameters()) { results.add(toLiteral(param.asType())); } return results; } private ClassLiteral toLiteral(TypeMirror type) { return f.newClassLiteral(imports.resolve(converter.convert(environment.getErasure(type)))); } private SimpleName generateNodeClassName(OperatorElement element) { assert element != null; String name = JavaName.of(element.getDeclaration().getSimpleName().toString()).toTypeName(); return f.newSimpleName(name); } private MethodDeclaration generateFactoryMethod(OperatorElement element, Type rawNodeType) { Type nodeType = ElementHelper.toParameterizedType( environment, element.getDeclaration().getTypeParameters(), rawNodeType, imports); return f.newMethodDeclaration( generateFactoryMethodComment(element), new AttributeBuilder(f) .annotation(ElementHelper.toOperatorInfoAnnotation(environment, element, imports)) .Public() .toAttributes(), ElementHelper.toTypeParameters(environment, element.getDeclaration().getTypeParameters(), imports), nodeType, f.newSimpleName(environment.getMemberName(element.getDeclaration().getSimpleName().toString())), ElementHelper.toParameters(environment, element, imports), 0, Collections.emptyList(), f.newBlock(new TypeBuilder(f, nodeType) .newObject(ElementHelper.toArguments(environment, element, imports)) .toReturnStatement())); } private TypeBodyDeclaration generateConstructor() { return f.newConstructorDeclaration( new JavadocBuilder(f) .text(Messages.getString("OperatorFactoryEmitter.javadocConstructorSynopsis")) //$NON-NLS-1$ .toJavadoc(), new AttributeBuilder(f) .Public() .toAttributes(), generateClassName(), Collections.emptyList(), Collections.singletonList(f.newReturnStatement())); } private SimpleName generateClassName() { ClassDescription aClass = Constants.getFactoryClass(operatorClass.getDeclaration().getQualifiedName()); return f.newSimpleName(aClass.getSimpleName()); } private Javadoc generateFactoryMethodComment(OperatorElement element) { assert element != null; JavadocHelper source = new JavadocHelper(environment); source.put(element.getDeclaration()); JavadocBuilder javadoc = new JavadocBuilder(f); javadoc.inline(source.get(element.getDescription().getDocument())); appendTypeParameterDocs(element, javadoc, source); for (Node node : element.getDescription().getParameters()) { javadoc.param(node.getName()); javadoc.inline(source.get(node.getDocument())); } javadoc.returns().text(Messages.getString("OperatorFactoryEmitter.javadocFactoryMethodReturn")); //$NON-NLS-1$ return javadoc.toJavadoc(); } private Javadoc generateOutputFieldComment(OperatorElement element, Node output) { assert element != null; assert output != null; JavadocHelper source = new JavadocHelper(environment); source.put(element.getDeclaration()); JavadocBuilder javadoc = new JavadocBuilder(f); javadoc.inline(source.get(output.getDocument())); return javadoc.toJavadoc(); } private Javadoc generateNodeClassComment(OperatorElement element) { assert element != null; JavadocHelper source = new JavadocHelper(environment); source.put(element.getDeclaration()); JavadocBuilder javadoc = new JavadocBuilder(f); javadoc.inline(source.get(element.getDescription().getDocument())); appendTypeParameterDocs(element, javadoc, source); return javadoc.toJavadoc(); } private void appendTypeParameterDocs(OperatorElement element, JavadocBuilder javadoc, JavadocHelper source) { assert element != null; assert javadoc != null; assert source != null; for (TypeParameterElement param : element.getDeclaration().getTypeParameters()) { javadoc.typeParam(param.getSimpleName().toString()); javadoc.inline(source.getTypeParameter(param.getSimpleName().toString())); } } } }
/* * * Copyright 2014 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.netflix.zeno.flatblob; import com.netflix.zeno.fastblob.FastBlobStateEngine; import com.netflix.zeno.fastblob.record.ByteDataBuffer; import com.netflix.zeno.fastblob.record.VarInt; import com.netflix.zeno.fastblob.state.FastBlobTypeDeserializationState; import com.netflix.zeno.serializer.FrameworkSerializer; import com.netflix.zeno.serializer.NFTypeSerializer; import java.io.IOException; import java.util.Arrays; import java.util.Collection; import java.util.HashMap; import java.util.Map; import java.util.Set; public class FlatBlobFrameworkSerializer extends FrameworkSerializer<FlatBlobSerializationRecord> { static final int NULL_FLOAT_BITS = Float.floatToIntBits(Float.NaN) + 1; static final long NULL_DOUBLE_BITS = Double.doubleToLongBits(Double.NaN) + 1; private final FastBlobStateEngine stateEngine; private final ThreadLocal<Map<String, FlatBlobSerializationRecord>> cachedSerializationRecords; public FlatBlobFrameworkSerializer(FlatBlobSerializationFramework flatBlobFramework, FastBlobStateEngine stateEngine) { super(flatBlobFramework); this.stateEngine = stateEngine; this.cachedSerializationRecords = new ThreadLocal<Map<String, FlatBlobSerializationRecord>>(); } /** * Serialize a primitive element */ @Override public void serializePrimitive(FlatBlobSerializationRecord rec, String fieldName, Object value) { if (value == null) { return; } if (value instanceof Integer) { serializePrimitive(rec, fieldName, ((Integer) value).intValue()); } else if (value instanceof Long) { serializePrimitive(rec, fieldName, ((Long) value).longValue()); } else if (value instanceof Float) { serializePrimitive(rec, fieldName, ((Float) value).floatValue()); } else if (value instanceof Double) { serializePrimitive(rec, fieldName, ((Double) value).doubleValue()); } else if (value instanceof Boolean) { serializePrimitive(rec, fieldName, ((Boolean) value).booleanValue()); } else if (value instanceof String) { serializeString(rec, fieldName, (String) value); } else if (value instanceof byte[]){ serializeBytes(rec, fieldName, (byte[]) value); } else { throw new RuntimeException("Primitive type " + value.getClass().getSimpleName() + " not supported!"); } } /** * Serialize an integer, use zig-zag encoding to (probably) get a small positive value, then encode the result as a variable-byte integer. */ @Override public void serializePrimitive(FlatBlobSerializationRecord rec, String fieldName, int value) { ByteDataBuffer fieldBuffer = rec.getFieldBuffer(fieldName); // zig zag encoding VarInt.writeVInt(fieldBuffer, (value << 1) ^ (value >> 31)); } /** * Serialize a long, use zig-zag encoding to (probably) get a small positive value, then encode the result as a variable-byte long. */ @Override public void serializePrimitive(FlatBlobSerializationRecord rec, String fieldName, long value) { ByteDataBuffer fieldBuffer = rec.getFieldBuffer(fieldName); // zig zag encoding VarInt.writeVLong(fieldBuffer, (value << 1) ^ (value >> 63)); } /** * Serialize a float into 4 consecutive bytes */ @Override public void serializePrimitive(FlatBlobSerializationRecord rec, String fieldName, float value) { ByteDataBuffer fieldBuffer = rec.getFieldBuffer(fieldName); int intBits = Float.floatToIntBits(value); writeFixedLengthInt(fieldBuffer, intBits); } /** * Write 4 consecutive bytes */ private static void writeFixedLengthInt(ByteDataBuffer fieldBuffer, int intBits) { fieldBuffer.write((byte) (intBits >>> 24)); fieldBuffer.write((byte) (intBits >>> 16)); fieldBuffer.write((byte) (intBits >>> 8)); fieldBuffer.write((byte) (intBits)); } /** * Serialize a double into 8 consecutive bytes */ @Override public void serializePrimitive(FlatBlobSerializationRecord rec, String fieldName, double value) { ByteDataBuffer fieldBuffer = rec.getFieldBuffer(fieldName); long intBits = Double.doubleToLongBits(value); writeFixedLengthLong(fieldBuffer, intBits); } /** * Write 8 consecutive bytes */ private static void writeFixedLengthLong(ByteDataBuffer fieldBuffer, long intBits) { fieldBuffer.write((byte) (intBits >>> 56)); fieldBuffer.write((byte) (intBits >>> 48)); fieldBuffer.write((byte) (intBits >>> 40)); fieldBuffer.write((byte) (intBits >>> 32)); fieldBuffer.write((byte) (intBits >>> 24)); fieldBuffer.write((byte) (intBits >>> 16)); fieldBuffer.write((byte) (intBits >>> 8)); fieldBuffer.write((byte) (intBits)); } /** * Serialize a boolean as a single byte */ @Override public void serializePrimitive(FlatBlobSerializationRecord rec, String fieldName, boolean value) { ByteDataBuffer fieldBuffer = rec.getFieldBuffer(fieldName); byte byteValue = value ? (byte) 1 : (byte) 0; fieldBuffer.write(byteValue); } private void serializeString(FlatBlobSerializationRecord rec, String fieldName, String value) { if(value == null) return; writeString(value, rec.getFieldBuffer(fieldName)); } @Override public void serializeBytes(FlatBlobSerializationRecord rec, String fieldName, byte[] value) { if(value == null) return; ByteDataBuffer fieldBuffer = rec.getFieldBuffer(fieldName); for (int i = 0; i < value.length; i++) { fieldBuffer.write(value[i]); } } /* * @Deprecated instead use serializeObject(FlatBlobSerializationRecord rec, String fieldName, Object obj) * */ @Deprecated @Override public void serializeObject(FlatBlobSerializationRecord rec, String fieldName, String typeName, Object obj) { int fieldPosition = rec.getSchema().getPosition(fieldName); validateField(fieldName, fieldPosition); serializeObject(rec, fieldPosition, typeName, obj); } private void validateField(String fieldName, int fieldPosition) { if(fieldPosition == -1) { throw new IllegalArgumentException("Attempting to serialize non existent field " + fieldName + "."); } } private void serializeObject(FlatBlobSerializationRecord rec, int fieldPosition, String typeName, Object obj) { if(obj == null) return; int ordinal = findOrdinalInStateEngine(typeName, obj); FlatBlobSerializationRecord subRecord = getSerializationRecord(typeName); framework.getSerializer(typeName).serialize(obj, subRecord); ByteDataBuffer fieldBuffer = rec.getFieldBuffer(fieldPosition); VarInt.writeVInt(fieldBuffer, ordinal); VarInt.writeVInt(fieldBuffer, subRecord.sizeOfData()); subRecord.writeDataTo(fieldBuffer); } @Override public void serializeObject(FlatBlobSerializationRecord rec, String fieldName, Object obj) { int fieldPosition = rec.getSchema().getPosition(fieldName); validateField(fieldName, fieldPosition); serializeObject(rec, fieldPosition, rec.getSchema().getObjectType(fieldName), obj); } @Override public <T> void serializeList(FlatBlobSerializationRecord rec, String fieldName, String typeName, Collection<T> obj) { if(obj == null) return; NFTypeSerializer<Object> elementSerializer = framework.getSerializer(typeName); int fieldPosition = rec.getSchema().getPosition(fieldName); ByteDataBuffer fieldBuffer = rec.getFieldBuffer(fieldPosition); FlatBlobSerializationRecord subRecord = getSerializationRecord(typeName); for(T t : obj) { if(t == null) { VarInt.writeVNull(fieldBuffer); } else { int ordinal = findOrdinalInStateEngine(typeName, t); elementSerializer.serialize(t, subRecord); VarInt.writeVInt(fieldBuffer, ordinal); VarInt.writeVInt(fieldBuffer, subRecord.sizeOfData()); subRecord.writeDataTo(fieldBuffer); subRecord.reset(); } } } @Override public <T> void serializeSet(FlatBlobSerializationRecord rec, String fieldName, String typeName, Set<T> set) { if(set == null) return; FastBlobTypeDeserializationState<Object> typeDeserializationState = stateEngine.getTypeDeserializationState(typeName); int fieldPosition = rec.getSchema().getPosition(fieldName); ByteDataBuffer fieldBuffer = rec.getFieldBuffer(fieldPosition); int setOrdinals[] = new int[set.size()]; Object unidentifiedSetObjects[] = null; int i = 0; for (T obj : set) { if(obj == null) { setOrdinals[i++] = -1; } else { setOrdinals[i] = typeDeserializationState.find(obj); if(setOrdinals[i] == -1) { if(unidentifiedSetObjects == null) unidentifiedSetObjects = new Object[set.size()]; unidentifiedSetObjects[i] = obj; setOrdinals[i] = Integer.MIN_VALUE; } i++; } } Arrays.sort(setOrdinals); FlatBlobSerializationRecord subRecord = getSerializationRecord(typeName); int currentOrdinal = 0; for(i=0;i<setOrdinals.length;i++) { if(setOrdinals[i] == -1) { VarInt.writeVNull(fieldBuffer); VarInt.writeVNull(fieldBuffer); } else { if(setOrdinals[i] == Integer.MIN_VALUE) { Object element = unidentifiedSetObjects[i]; framework.getSerializer(typeName).serialize(element, subRecord); VarInt.writeVNull(fieldBuffer); } else { Object element = typeDeserializationState.get(setOrdinals[i]); framework.getSerializer(typeName).serialize(element, subRecord); VarInt.writeVInt(fieldBuffer, setOrdinals[i] - currentOrdinal); currentOrdinal = setOrdinals[i]; } VarInt.writeVInt(fieldBuffer, subRecord.sizeOfData()); subRecord.writeDataTo(fieldBuffer); subRecord.reset(); } } } @Override public <K, V> void serializeMap(FlatBlobSerializationRecord rec, String fieldName, String keyTypeName, String valueTypeName, Map<K, V> map) { if(map == null) return; FastBlobTypeDeserializationState<Object> keyDeserializationState = stateEngine.getTypeDeserializationState(keyTypeName); FastBlobTypeDeserializationState<Object> valueDeserializationState = stateEngine.getTypeDeserializationState(valueTypeName); int fieldPosition = rec.getSchema().getPosition(fieldName); ByteDataBuffer fieldBuffer = rec.getFieldBuffer(fieldPosition); FlatBlobSerializationRecord keyRecord = getSerializationRecord(keyTypeName); FlatBlobSerializationRecord valueRecord = getSerializationRecord(valueTypeName); long mapEntries[] = new long[map.size()]; int i = 0; for (Map.Entry<K, V> entry : map.entrySet()) { int keyOrdinal = -1; int valueOrdinal = -1; if(entry.getKey() != null) keyOrdinal = keyDeserializationState.find(entry.getKey()); if(entry.getValue() != null) valueOrdinal = valueDeserializationState.find(entry.getValue()); mapEntries[i++] = ((long)valueOrdinal << 32) | (keyOrdinal & 0xFFFFFFFFL); } if(mapEntries.length > i) { mapEntries = Arrays.copyOf(mapEntries, i); throw new RuntimeException("This should not happen."); ///TODO: Remove this sanity check. } Arrays.sort(mapEntries); int currentValueOrdinal = 0; for(i=0;i<mapEntries.length;i++) { int keyOrdinal = (int) mapEntries[i]; int valueOrdinal = (int) (mapEntries[i] >> 32); if(keyOrdinal == -1) { VarInt.writeVNull(fieldBuffer); } else { Object key = keyDeserializationState.get(keyOrdinal); keyRecord.reset(); framework.getSerializer(keyTypeName).serialize(key, keyRecord); VarInt.writeVInt(fieldBuffer, keyOrdinal); VarInt.writeVInt(fieldBuffer, keyRecord.sizeOfData()); keyRecord.writeDataTo(fieldBuffer); } if(valueOrdinal == -1) { VarInt.writeVNull(fieldBuffer); } else { Object value = valueDeserializationState.get(valueOrdinal); valueRecord.reset(); framework.getSerializer(valueTypeName).serialize(value, valueRecord); VarInt.writeVInt(fieldBuffer, valueOrdinal - currentValueOrdinal); VarInt.writeVInt(fieldBuffer, valueRecord.sizeOfData()); valueRecord.writeDataTo(fieldBuffer); currentValueOrdinal = valueOrdinal; } } } /** * Encode a String as a series of VarInts, one per character.<p/> * * @param str * @param out * @return * @throws IOException */ private void writeString(String str, ByteDataBuffer out) { for(int i=0;i<str.length();i++) { VarInt.writeVInt(out, str.charAt(i)); } } private int findOrdinalInStateEngine(String typeName, Object obj) { FastBlobTypeDeserializationState<Object> typeDeserializationState = stateEngine.getTypeDeserializationState(typeName); int ordinal = typeDeserializationState.find(obj); return ordinal; } FlatBlobSerializationRecord getSerializationRecord(String type) { Map<String, FlatBlobSerializationRecord> cachedSerializationRecords = this.cachedSerializationRecords.get(); if(cachedSerializationRecords == null) { cachedSerializationRecords = new HashMap<String, FlatBlobSerializationRecord>(); this.cachedSerializationRecords.set(cachedSerializationRecords); } FlatBlobSerializationRecord rec = cachedSerializationRecords.get(type); if(rec == null) { rec = new FlatBlobSerializationRecord(framework.getSerializer(type).getFastBlobSchema()); cachedSerializationRecords.put(type, rec); } rec.reset(); return rec; } }
package com.carrotsearch.hppc; import java.util.*; import com.carrotsearch.hppc.cursors.*; import com.carrotsearch.hppc.predicates.*; import com.carrotsearch.hppc.procedures.*; import static com.carrotsearch.hppc.HashContainers.*; import static com.carrotsearch.hppc.Containers.*; /** * A hash set of <code>KType</code>s, implemented using using open addressing * with linear probing for collision resolution. * * <p> * <strong>Note:</strong> read about * <a href="{@docRoot}/overview-summary.html#scattervshash"> * important differences between hash and scatter sets</a>. * </p> * * @see KTypeScatterSet * @see <a href="{@docRoot}/overview-summary.html#interfaces">HPPC interfaces diagram</a> */ /*! #if ($TemplateOptions.KTypeGeneric) @SuppressWarnings("unchecked") #end !*/ /*! ${TemplateOptions.generatedAnnotation} !*/ public class KTypeHashSet<KType> extends AbstractKTypeCollection<KType> implements /*! #if ($templateonly) !*/ Intrinsics.KeyHasher<KType>, /*! #end !*/ KTypeLookupContainer<KType>, KTypeSet<KType>, Preallocable, Cloneable { /** The hash array holding keys. */ public /*! #if ($TemplateOptions.KTypeGeneric) !*/ Object [] /*! #else KType [] #end !*/ keys; /** * The number of stored keys (assigned key slots), excluding the special * "empty" key, if any. * * @see #size() * @see #hasEmptyKey */ protected int assigned; /** * Mask for slot scans in {@link #keys}. */ protected int mask; /** * We perturb hash values with a container-unique * seed to avoid problems with nearly-sorted-by-hash * values on iterations. * * @see #hashKey * @see "http://issues.carrot2.org/browse/HPPC-80" * @see "http://issues.carrot2.org/browse/HPPC-103" */ protected int keyMixer; /** * Expand (rehash) {@link #keys} when {@link #assigned} hits this value. */ protected int resizeAt; /** * Special treatment for the "empty slot" key marker. */ protected boolean hasEmptyKey; /** * The load factor for {@link #keys}. */ protected double loadFactor; /** * Per-instance hash order mixing strategy. * @see #keyMixer */ protected HashOrderMixingStrategy orderMixer; /** * New instance with sane defaults. * * @see #KTypeHashSet(int, double, HashOrderMixingStrategy) */ public KTypeHashSet() { this(DEFAULT_EXPECTED_ELEMENTS, DEFAULT_LOAD_FACTOR); } /** * New instance with sane defaults. * * @see #KTypeHashSet(int, double, HashOrderMixingStrategy) */ public KTypeHashSet(int expectedElements) { this(expectedElements, DEFAULT_LOAD_FACTOR); } /** * New instance with sane defaults. * * @see #KTypeHashSet(int, double, HashOrderMixingStrategy) */ public KTypeHashSet(int expectedElements, double loadFactor) { this(expectedElements, loadFactor, HashOrderMixing.defaultStrategy()); } /** * New instance with the provided defaults. * * @param expectedElements * The expected number of elements guaranteed not to cause a rehash (inclusive). * @param loadFactor * The load factor for internal buffers. Insane load factors (zero, full capacity) * are rejected by {@link #verifyLoadFactor(double)}. * @param orderMixer * Hash key order mixing strategy. See {@link HashOrderMixing} for predefined * implementations. Use constant mixers only if you understand the potential * consequences. */ public KTypeHashSet(int expectedElements, double loadFactor, HashOrderMixingStrategy orderMixer) { this.orderMixer = orderMixer; this.loadFactor = verifyLoadFactor(loadFactor); ensureCapacity(expectedElements); } /** * New instance copying elements from another {@link KTypeContainer}. */ public KTypeHashSet(KTypeContainer<? extends KType> container) { this(container.size()); addAll(container); } /** * {@inheritDoc} */ @Override public boolean add(KType key) { if (Intrinsics.isEmpty(key)) { assert Intrinsics.isEmpty(keys[mask + 1]); boolean added = !hasEmptyKey; hasEmptyKey = true; return added; } else { final KType [] keys = Intrinsics.<KType[]> cast(this.keys); final int mask = this.mask; int slot = hashKey(key) & mask; KType existing; while (!Intrinsics.isEmpty(existing = keys[slot])) { if (Intrinsics.equals(this, key, existing)) { return false; } slot = (slot + 1) & mask; } if (assigned == resizeAt) { allocateThenInsertThenRehash(slot, key); } else { keys[slot] = key; } assigned++; return true; } } /** * Adds all elements from the given list (vararg) to this set. * * @return Returns the number of elements actually added as a result of this * call (not previously present in the set). */ /* #if ($TemplateOptions.KTypeGeneric) */ @SafeVarargs /* #end */ public final int addAll(KType... elements) { ensureCapacity(elements.length); int count = 0; for (KType e : elements) { if (add(e)) { count++; } } return count; } /** * Adds all elements from the given {@link KTypeContainer} to this set. * * @return Returns the number of elements actually added as a result of this * call (not previously present in the set). */ public int addAll(KTypeContainer<? extends KType> container) { ensureCapacity(container.size()); return addAll((Iterable<? extends KTypeCursor<? extends KType>>) container); } /** * Adds all elements from the given iterable to this set. * * @return Returns the number of elements actually added as a result of this * call (not previously present in the set). */ public int addAll(Iterable<? extends KTypeCursor<? extends KType>> iterable) { int count = 0; for (KTypeCursor<? extends KType> cursor : iterable) { if (add(cursor.value)) { count++; } } return count; } /** * {@inheritDoc} */ @Override /*! #if ($TemplateOptions.KTypePrimitive) public KType [] toArray() { #else !*/ public Object[] toArray() { /*! #end !*/ final KType[] cloned = Intrinsics.<KType> newArray(size()); int j = 0; if (hasEmptyKey) { cloned[j++] = Intrinsics.empty(); } final KType[] keys = Intrinsics.<KType[]> cast(this.keys); for (int slot = 0, max = mask; slot <= max; slot++) { KType existing; if (!Intrinsics.isEmpty(existing = keys[slot])) { cloned[j++] = existing; } } return cloned; } /** * An alias for the (preferred) {@link #removeAll}. */ public boolean remove(KType key) { if (Intrinsics.isEmpty(key)) { boolean hadEmptyKey = hasEmptyKey; hasEmptyKey = false; return hadEmptyKey; } else { final KType [] keys = Intrinsics.<KType[]> cast(this.keys); final int mask = this.mask; int slot = hashKey(key) & mask; KType existing; while (!Intrinsics.isEmpty(existing = keys[slot])) { if (Intrinsics.equals(this, key, existing)) { shiftConflictingKeys(slot); return true; } slot = (slot + 1) & mask; } return false; } } /** * {@inheritDoc} */ @Override public int removeAll(KType key) { return remove(key) ? 1 : 0; } /** * {@inheritDoc} */ @Override public int removeAll(KTypePredicate<? super KType> predicate) { int before = size(); if (hasEmptyKey) { if (predicate.apply(Intrinsics.<KType> empty())) { hasEmptyKey = false; } } final KType[] keys = Intrinsics.<KType[]> cast(this.keys); for (int slot = 0, max = this.mask; slot <= max;) { KType existing; if (!Intrinsics.isEmpty(existing = keys[slot])) { if (predicate.apply(existing)) { shiftConflictingKeys(slot); continue; // Repeat the check for the same slot i (shifted). } } slot++; } return before - size(); } /** * {@inheritDoc} */ @Override public boolean contains(KType key) { if (Intrinsics.isEmpty(key)) { return hasEmptyKey; } else { final KType [] keys = Intrinsics.<KType[]> cast(this.keys); final int mask = this.mask; int slot = hashKey(key) & mask; KType existing; while (!Intrinsics.isEmpty(existing = keys[slot])) { if (Intrinsics.equals(this, key, existing)) { return true; } slot = (slot + 1) & mask; } return false; } } /** * {@inheritDoc} */ @Override public void clear() { assigned = 0; hasEmptyKey = false; Arrays.fill(keys, Intrinsics.<KType> empty()); } /** * {@inheritDoc} */ @Override public void release() { assigned = 0; hasEmptyKey = false; keys = null; ensureCapacity(Containers.DEFAULT_EXPECTED_ELEMENTS); } /** * {@inheritDoc} */ @Override public boolean isEmpty() { return size() == 0; } /** * Ensure this container can hold at least the * given number of elements without resizing its buffers. * * @param expectedElements The total number of elements, inclusive. */ @Override public void ensureCapacity(int expectedElements) { if (expectedElements > resizeAt || keys == null) { final KType[] prevKeys = Intrinsics.<KType[]> cast(this.keys); allocateBuffers(minBufferSize(expectedElements, loadFactor)); if (prevKeys != null && !isEmpty()) { rehash(prevKeys); } } } /** * {@inheritDoc} */ @Override public int size() { return assigned + (hasEmptyKey ? 1 : 0); } /** * {@inheritDoc} */ @Override public int hashCode() { int h = hasEmptyKey ? 0xDEADBEEF : 0; final KType[] keys = Intrinsics.<KType[]> cast(this.keys); for (int slot = mask; slot >= 0; slot--) { KType existing; if (!Intrinsics.isEmpty(existing = keys[slot])) { h += BitMixer.mix(existing); } } return h; } /** * {@inheritDoc} */ @Override public boolean equals(Object obj) { return obj != null && getClass() == obj.getClass() && sameKeys(getClass().cast(obj)); } /** * Return true if all keys of some other container exist in this container. #if ($TemplateOptions.KTypeGeneric) * Equality comparison is performed with this object's {@link #equals(Object, Object)} * method. #end */ private boolean sameKeys(KTypeSet<?> other) { if (other.size() != size()) { return false; } for (KTypeCursor<?> c : other) { if (!contains(Intrinsics.<KType> cast(c.value))) { return false; } } return true; } /** * {@inheritDoc} */ @Override public KTypeHashSet<KType> clone() { try { /* #if ($templateOnly) */ @SuppressWarnings("unchecked") /* #end */ KTypeHashSet<KType> cloned = (KTypeHashSet<KType>) super.clone(); cloned.keys = keys.clone(); cloned.hasEmptyKey = cloned.hasEmptyKey; cloned.orderMixer = orderMixer.clone(); return cloned; } catch (CloneNotSupportedException e) { throw new RuntimeException(e); } } /** * {@inheritDoc} */ @Override public Iterator<KTypeCursor<KType>> iterator() { return new EntryIterator(); } /** * An iterator implementation for {@link #iterator}. */ protected final class EntryIterator extends AbstractIterator<KTypeCursor<KType>> { private final KTypeCursor<KType> cursor; private final int max = mask + 1; private int slot = -1; public EntryIterator() { cursor = new KTypeCursor<KType>(); } @Override protected KTypeCursor<KType> fetch() { if (slot < max) { KType existing; for (slot++; slot < max; slot++) { if (!Intrinsics.isEmpty(existing = Intrinsics.<KType> cast(keys[slot]))) { cursor.index = slot; cursor.value = existing; return cursor; } } } if (slot == max && hasEmptyKey) { cursor.index = slot; cursor.value = Intrinsics.empty(); slot++; return cursor; } return done(); } } /** * {@inheritDoc} */ @Override public <T extends KTypeProcedure<? super KType>> T forEach(T procedure) { if (hasEmptyKey) { procedure.apply(Intrinsics.<KType> empty()); } final KType[] keys = Intrinsics.<KType[]> cast(this.keys); for (int slot = 0, max = this.mask; slot <= max; slot++) { KType existing; if (!Intrinsics.isEmpty(existing = keys[slot])) { procedure.apply(existing); } } return procedure; } /** * {@inheritDoc} */ @Override public <T extends KTypePredicate<? super KType>> T forEach(T predicate) { if (hasEmptyKey) { if (!predicate.apply(Intrinsics.<KType> empty())) { return predicate; } } final KType[] keys = Intrinsics.<KType[]> cast(this.keys); for (int slot = 0, max = this.mask; slot <= max; slot++) { KType existing; if (!Intrinsics.isEmpty(existing = keys[slot])) { if (!predicate.apply(existing)) { break; } } } return predicate; } /** * Create a set from a variable number of arguments or an array of * <code>KType</code>. The elements are copied from the argument to the * internal buffer. */ /* #if ($TemplateOptions.KTypeGeneric) */ @SafeVarargs /* #end */ public static <KType> KTypeHashSet<KType> from(KType... elements) { final KTypeHashSet<KType> set = new KTypeHashSet<KType>(elements.length); set.addAll(elements); return set; } /** * Returns a hash code for the given key. * * The default implementation mixes the hash of the key with {@link #keyMixer} * to differentiate hash order of keys between hash containers. Helps * alleviate problems resulting from linear conflict resolution in open * addressing. * * The output from this function should evenly distribute keys across the * entire integer range. */ /*! #if ($templateonly) !*/ @Override public /*! #else protected #end !*/ int hashKey(KType key) { assert !Intrinsics.isEmpty(key); // Handled as a special case (empty slot marker). return BitMixer.mix(key, this.keyMixer); } /** * Returns a logical "index" of a given key that can be used to speed up * follow-up logic in certain scenarios (conditional logic). * * The semantics of "indexes" are not strictly defined. Indexes may * (and typically won't be) contiguous. * * The index is valid only between modifications (it will not be affected * by read-only operations). * * @see #indexExists * @see #indexGet * @see #indexInsert * @see #indexReplace * * @param key * The key to locate in the set. * @return A non-negative value of the logical "index" of the key in the set * or a negative value if the key did not exist. */ public int indexOf(KType key) { final int mask = this.mask; if (Intrinsics.<KType> isEmpty(key)) { return hasEmptyKey ? mask + 1 : ~(mask + 1); } else { final KType[] keys = Intrinsics.<KType[]> cast(this.keys); int slot = hashKey(key) & mask; KType existing; while (!Intrinsics.<KType> isEmpty(existing = keys[slot])) { if (Intrinsics.<KType> equals(this, key, existing)) { return slot; } slot = (slot + 1) & mask; } return ~slot; } } /** * @see #indexOf * * @param index The index of a given key, as returned from {@link #indexOf}. * @return Returns <code>true</code> if the index corresponds to an existing key * or false otherwise. This is equivalent to checking whether the index is * a positive value (existing keys) or a negative value (non-existing keys). */ public boolean indexExists(int index) { assert index < 0 || (index >= 0 && index <= mask) || (index == mask + 1 && hasEmptyKey); return index >= 0; } /** * Returns the exact value of the existing key. This method makes sense for sets * of objects which define custom key-equality relationship. * * @see #indexOf * * @param index The index of an existing key. * @return Returns the equivalent key currently stored in the set. * @throws AssertionError If assertions are enabled and the index does * not correspond to an existing key. */ public KType indexGet(int index) { assert index >= 0 : "The index must point at an existing key."; assert index <= mask || (index == mask + 1 && hasEmptyKey); return Intrinsics.<KType> cast(keys[index]); } /** * Replaces the existing equivalent key with the given one and returns any previous value * stored for that key. * * @see #indexOf * * @param index The index of an existing key. * @param equivalentKey The key to put in the set as a replacement. Must be equivalent to * the key currently stored at the provided index. * @return Returns the previous key stored in the set. * @throws AssertionError If assertions are enabled and the index does * not correspond to an existing key. */ public KType indexReplace(int index, KType equivalentKey) { assert index >= 0 : "The index must point at an existing key."; assert index <= mask || (index == mask + 1 && hasEmptyKey); assert Intrinsics.equals(this, keys[index], equivalentKey); KType previousValue = Intrinsics.<KType> cast(keys[index]); keys[index] = equivalentKey; return previousValue; } /** * Inserts a key for an index that is not present in the set. This method * may help in avoiding double recalculation of the key's hash. * * @see #indexOf * * @param index The index of a previously non-existing key, as returned from * {@link #indexOf}. * @throws AssertionError If assertions are enabled and the index does * not correspond to an existing key. */ public void indexInsert(int index, KType key) { assert index < 0 : "The index must not point at an existing key."; index = ~index; if (Intrinsics.isEmpty(key)) { assert index == mask + 1; assert Intrinsics.isEmpty(keys[index]); hasEmptyKey = true; } else { assert Intrinsics.isEmpty(keys[index]); if (assigned == resizeAt) { allocateThenInsertThenRehash(index, key); } else { keys[index] = key; } assigned++; } } @Override public String visualizeKeyDistribution(int characters) { return KTypeBufferVisualizer.visualizeKeyDistribution(keys, mask, characters); } /** * Validate load factor range and return it. Override and suppress if you need * insane load factors. */ protected double verifyLoadFactor(double loadFactor) { checkLoadFactor(loadFactor, MIN_LOAD_FACTOR, MAX_LOAD_FACTOR); return loadFactor; } /** * Rehash from old buffers to new buffers. */ protected void rehash(KType[] fromKeys) { assert HashContainers.checkPowerOfTwo(fromKeys.length - 1); // Rehash all stored keys into the new buffers. final KType[] keys = Intrinsics.<KType[]> cast(this.keys); final int mask = this.mask; KType existing; for (int i = fromKeys.length - 1; --i >= 0;) { if (!Intrinsics.isEmpty(existing = fromKeys[i])) { int slot = hashKey(existing) & mask; while (!Intrinsics.isEmpty(keys[slot])) { slot = (slot + 1) & mask; } keys[slot] = existing; } } } /** * Allocate new internal buffers. This method attempts to allocate * and assign internal buffers atomically (either allocations succeed or not). */ protected void allocateBuffers(int arraySize) { assert Integer.bitCount(arraySize) == 1; // Compute new hash mixer candidate before expanding. final int newKeyMixer = this.orderMixer.newKeyMixer(arraySize); // Ensure no change is done if we hit an OOM. KType[] prevKeys = Intrinsics.<KType[]> cast(this.keys); try { int emptyElementSlot = 1; this.keys = Intrinsics.<KType> newArray(arraySize + emptyElementSlot); } catch (OutOfMemoryError e) { this.keys = prevKeys; throw new BufferAllocationException( "Not enough memory to allocate buffers for rehashing: %,d -> %,d", e, this.keys == null ? 0 : size(), arraySize); } this.resizeAt = expandAtCount(arraySize, loadFactor); this.keyMixer = newKeyMixer; this.mask = arraySize - 1; } /** * This method is invoked when there is a new key to be inserted into * the buffer but there is not enough empty slots to do so. * * New buffers are allocated. If this succeeds, we know we can proceed * with rehashing so we assign the pending element to the previous buffer * (possibly violating the invariant of having at least one empty slot) * and rehash all keys, substituting new buffers at the end. */ protected void allocateThenInsertThenRehash(int slot, KType pendingKey) { assert assigned == resizeAt && Intrinsics.isEmpty(Intrinsics.<KType> cast(keys[slot])) && !Intrinsics.isEmpty(pendingKey); // Try to allocate new buffers first. If we OOM, we leave in a consistent state. final KType[] prevKeys = Intrinsics.<KType[]> cast(this.keys); allocateBuffers(nextBufferSize(mask + 1, size(), loadFactor)); assert this.keys.length > prevKeys.length; // We have succeeded at allocating new data so insert the pending key/value at // the free slot in the old arrays before rehashing. prevKeys[slot] = pendingKey; // Rehash old keys, including the pending key. rehash(prevKeys); } /** * Shift all the slot-conflicting keys allocated to (and including) <code>slot</code>. */ protected void shiftConflictingKeys(int gapSlot) { final KType[] keys = Intrinsics.<KType[]> cast(this.keys); final int mask = this.mask; // Perform shifts of conflicting keys to fill in the gap. int distance = 0; while (true) { final int slot = (gapSlot + (++distance)) & mask; final KType existing = keys[slot]; if (Intrinsics.isEmpty(existing)) { break; } final int idealSlot = hashKey(existing); final int shift = (slot - idealSlot) & mask; if (shift >= distance) { // Entry at this position was originally at or before the gap slot. // Move the conflict-shifted entry to the gap's position and repeat the procedure // for any entries to the right of the current position, treating it // as the new gap. keys[gapSlot] = existing; gapSlot = slot; distance = 0; } } // Mark the last found gap slot without a conflict as empty. keys[gapSlot] = Intrinsics.empty(); assigned--; } }
/* * Copyright 2017 LINE Corporation * * LINE Corporation licenses this file to you under the Apache License, * version 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at: * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.linecorp.armeria.server; import static java.util.Objects.requireNonNull; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Set; import java.util.StringJoiner; import java.util.regex.Matcher; import java.util.regex.Pattern; import com.google.common.base.MoreObjects; import com.google.common.base.Splitter; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.linecorp.armeria.common.annotation.Nullable; /** * The default {@link PathMapping} implementation. It holds three things: * <ul> * <li>The regex-compiled form of the path. It is used for matching and extracting.</li> * <li>The skeleton of the path. It is used for duplication detecting.</li> * <li>A set of path parameters declared in the path pattern</li> * </ul> */ final class ParameterizedPathMapping extends AbstractPathMapping { private static final Pattern VALID_PATTERN = Pattern.compile("(/[^/{}:]+|/:[^/{}]+|/\\{[^/{}]+})+/?"); private static final Pattern CAPTURE_REST_PATTERN = Pattern.compile("/\\{\\*([^/{}]*)}|/:\\*([^/{}]*)"); private static final Pattern CAPTURE_REST_VARIABLE_NAME_PATTERN = Pattern.compile("^\\w+$"); private static final String[] EMPTY_NAMES = new String[0]; private static final Splitter PATH_SPLITTER = Splitter.on('/'); /** * The original path pattern specified in the constructor. */ private final String pathPattern; private final String normalizedPathPattern; /** * Regex form of given path, which will be used for matching or extracting. * * <p>e.g. "/{x}/{y}/{x}" -> "/(?&lt;x&gt;[^/]+)/(?&lt;y&gt;[^/]+)/(\\k&lt;x&gt;)" */ private final Pattern pattern; /** * Skeletal form of given path, which is used for duplicated routing rule detection. * For example, "/{a}/{b}" and "/{c}/{d}" has same skeletal form and regarded as duplicated. * * <p>e.g. "/{x}/{y}/{z}" -> "/:/:/:"</p> * <p>Set a skeletal form with the patterns described in {@link Route#paths()}.</p> */ private final String skeleton; private final List<String> paths; /** * The names of the path parameters in the order of appearance. */ private final String[] paramNameArray; /** * The names of the path parameters this mapping will extract. */ private final Set<String> paramNames; /** * Create a {@link ParameterizedPathMapping} instance from given {@code pathPattern}. * * @param pathPattern the {@link String} that contains path params. * e.g. {@code /users/{name}}, {@code /users/:name}, {@code /users/{*name}} or * {@code /users/:*name} * * @throws IllegalArgumentException if the {@code pathPattern} is invalid. */ ParameterizedPathMapping(String pathPattern) { requireNonNull(pathPattern, "pathPattern"); if (!pathPattern.startsWith("/")) { throw new IllegalArgumentException("pathPattern: " + pathPattern + " (must start with '/')"); } if (!VALID_PATTERN.matcher(pathPattern).matches()) { throw new IllegalArgumentException("pathPattern: " + pathPattern + " (invalid pattern)"); } if (!isValidCaptureRestPattern(pathPattern)) { throw new IllegalArgumentException( "pathPattern: " + pathPattern + " (invalid capture rest pattern)"); } final StringJoiner patternJoiner = new StringJoiner("/"); final StringJoiner normalizedPatternJoiner = new StringJoiner("/"); final StringJoiner skeletonJoiner = new StringJoiner("/"); final List<String> paramNames = new ArrayList<>(); for (String token : PATH_SPLITTER.split(pathPattern)) { final String paramName = paramName(token); if (paramName == null) { // If the given token is a constant, do not manipulate it. patternJoiner.add(token); normalizedPatternJoiner.add(token); skeletonJoiner.add(token); continue; } final boolean captureRestPathMatching = isCaptureRestPathMatching(token); final int paramNameIdx = paramNames.indexOf(paramName); if (paramNameIdx < 0) { // If the given token appeared first time, add it to the set and // replace it with a capturing group expression in regex. paramNames.add(paramName); if (captureRestPathMatching) { patternJoiner.add("(.*)"); } else { patternJoiner.add("([^/]+)"); } } else { // If the given token appeared before, replace it with a back-reference expression // in regex. patternJoiner.add("\\" + (paramNameIdx + 1)); } normalizedPatternJoiner.add((captureRestPathMatching ? ":*" : ':') + paramName); skeletonJoiner.add(captureRestPathMatching ? "*" : ":"); } this.pathPattern = pathPattern; pattern = Pattern.compile(patternJoiner.toString()); normalizedPathPattern = normalizedPatternJoiner.toString(); skeleton = skeletonJoiner.toString(); paths = ImmutableList.of(skeleton, skeleton); paramNameArray = paramNames.toArray(EMPTY_NAMES); this.paramNames = ImmutableSet.copyOf(paramNames); } /** * Returns the name of the path parameter contained in the path element. If it contains no path parameter, * {@code null} is returned. e.g. * <ul> * <li>{@code "{foo}"} -> {@code "foo"}</li> * <li>{@code ":bar"} -> {@code "bar"}</li> * <li>{@code "baz"} -> {@code null}</li> * <li>{@code "{*foo}"} -> {@code "foo"}</li> * <li>{@code ":*foo"} -> {@code "foo"}</li> * </ul> */ @Nullable private static String paramName(String token) { if (token.startsWith("{") && token.endsWith("}")) { final int beginIndex = token.charAt(1) == '*' ? 2 : 1; return token.substring(beginIndex, token.length() - 1); } if (token.startsWith(":")) { final int beginIndex = token.charAt(1) == '*' ? 2 : 1; return token.substring(beginIndex); } return null; } /** * Return true if path parameter contains capture the rest path pattern * ({@code "{*foo}"}" or {@code ":*foo"}). */ private static boolean isCaptureRestPathMatching(String token) { return (token.startsWith("{*") && token.endsWith("}")) || token.startsWith(":*"); } /** * Return true if the capture rest pattern specified is valid. */ private static boolean isValidCaptureRestPattern(String pathPattern) { final Matcher matcher = CAPTURE_REST_PATTERN.matcher(pathPattern); if (!matcher.find()) { // Return true if the path does not include the capture rest pattern. return true; } final String paramName = MoreObjects.firstNonNull(matcher.group(1), matcher.group(2)); // The variable name must be at least a character of alphabet, number and underscore. if (!CAPTURE_REST_VARIABLE_NAME_PATTERN.matcher(paramName).matches()) { return false; } // The capture rest pattern must be located at the end of the path. return pathPattern.length() == matcher.end(); } /** * Returns the skeleton. */ String skeleton() { return skeleton; } @Override public Set<String> paramNames() { return paramNames; } @Override public String patternString() { return normalizedPathPattern; } @Override public RoutePathType pathType() { return RoutePathType.PARAMETERIZED; } @Override public List<String> paths() { return paths; } @Nullable @Override RoutingResultBuilder doApply(RoutingContext routingCtx) { final Matcher matcher = pattern.matcher(routingCtx.path()); if (!matcher.matches()) { return null; } final RoutingResultBuilder builder = RoutingResult.builderWithExpectedNumParams(paramNameArray.length) .path(routingCtx.path()) .query(routingCtx.query()); for (int i = 0; i < paramNameArray.length; i++) { builder.rawParam(paramNameArray[i], matcher.group(i + 1)); } return builder; } @Override public boolean equals(@Nullable Object o) { if (this == o) { return true; } if (!(o instanceof ParameterizedPathMapping)) { return false; } final ParameterizedPathMapping that = (ParameterizedPathMapping) o; return skeleton.equals(that.skeleton) && Arrays.equals(paramNameArray, that.paramNameArray); } @Override public int hashCode() { return skeleton.hashCode() * 31 + Arrays.hashCode(paramNameArray); } @Override public String toString() { return pathPattern; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.kafka.streams.processor.internals; import org.apache.kafka.clients.admin.Admin; import org.apache.kafka.clients.admin.AdminClient; import org.apache.kafka.clients.admin.ListOffsetsResult; import org.apache.kafka.clients.admin.ListOffsetsResult.ListOffsetsResultInfo; import org.apache.kafka.clients.consumer.ConsumerPartitionAssignor.Assignment; import org.apache.kafka.clients.consumer.ConsumerPartitionAssignor.GroupSubscription; import org.apache.kafka.clients.consumer.ConsumerPartitionAssignor.Subscription; import org.apache.kafka.common.Cluster; import org.apache.kafka.common.Node; import org.apache.kafka.common.PartitionInfo; import org.apache.kafka.common.TopicPartition; import org.apache.kafka.common.internals.KafkaFutureImpl; import org.apache.kafka.common.utils.MockTime; import org.apache.kafka.streams.StreamsConfig; import org.apache.kafka.streams.StreamsConfig.InternalConfig; import org.apache.kafka.streams.errors.StreamsException; import org.apache.kafka.streams.processor.TaskId; import org.apache.kafka.streams.processor.internals.assignment.AssignmentInfo; import org.apache.kafka.streams.processor.internals.assignment.AssignorError; import org.apache.kafka.streams.processor.internals.assignment.HighAvailabilityTaskAssignor; import org.apache.kafka.streams.processor.internals.assignment.SubscriptionInfo; import org.apache.kafka.test.MockClientSupplier; import org.apache.kafka.test.MockInternalTopicManager; import org.apache.kafka.test.MockKeyValueStoreBuilder; import org.apache.kafka.test.MockProcessorSupplier; import org.easymock.EasyMock; import org.junit.Before; import org.junit.Test; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.UUID; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; import java.util.stream.Collectors; import static java.util.Arrays.asList; import static java.util.Collections.emptySet; import static java.util.Collections.singletonList; import static java.util.Collections.singletonMap; import static org.apache.kafka.common.utils.Utils.mkSet; import static org.apache.kafka.streams.processor.internals.assignment.AssignmentTestUtils.EMPTY_CHANGELOG_END_OFFSETS; import static org.apache.kafka.streams.processor.internals.assignment.AssignmentTestUtils.EMPTY_TASKS; import static org.apache.kafka.streams.processor.internals.assignment.AssignmentTestUtils.TASK_0_0; import static org.apache.kafka.streams.processor.internals.assignment.AssignmentTestUtils.TASK_0_1; import static org.apache.kafka.streams.processor.internals.assignment.AssignmentTestUtils.TASK_0_2; import static org.apache.kafka.streams.processor.internals.assignment.AssignmentTestUtils.UUID_1; import static org.apache.kafka.streams.processor.internals.assignment.AssignmentTestUtils.UUID_2; import static org.apache.kafka.streams.processor.internals.assignment.StreamsAssignmentProtocolVersions.LATEST_SUPPORTED_VERSION; import static org.easymock.EasyMock.anyObject; import static org.easymock.EasyMock.expect; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.anyOf; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.is; public class HighAvailabilityStreamsPartitionAssignorTest { private final List<PartitionInfo> infos = asList( new PartitionInfo("topic1", 0, Node.noNode(), new Node[0], new Node[0]), new PartitionInfo("topic1", 1, Node.noNode(), new Node[0], new Node[0]), new PartitionInfo("topic1", 2, Node.noNode(), new Node[0], new Node[0]), new PartitionInfo("topic2", 0, Node.noNode(), new Node[0], new Node[0]), new PartitionInfo("topic2", 1, Node.noNode(), new Node[0], new Node[0]), new PartitionInfo("topic2", 2, Node.noNode(), new Node[0], new Node[0]), new PartitionInfo("topic3", 0, Node.noNode(), new Node[0], new Node[0]), new PartitionInfo("topic3", 1, Node.noNode(), new Node[0], new Node[0]), new PartitionInfo("topic3", 2, Node.noNode(), new Node[0], new Node[0]), new PartitionInfo("topic3", 3, Node.noNode(), new Node[0], new Node[0]) ); private final Cluster metadata = new Cluster( "cluster", singletonList(Node.noNode()), infos, emptySet(), emptySet()); private final StreamsPartitionAssignor partitionAssignor = new StreamsPartitionAssignor(); private final MockClientSupplier mockClientSupplier = new MockClientSupplier(); private static final String USER_END_POINT = "localhost:8080"; private static final String APPLICATION_ID = "stream-partition-assignor-test"; private TaskManager taskManager; private Admin adminClient; private StreamsConfig streamsConfig = new StreamsConfig(configProps()); private final InternalTopologyBuilder builder = new InternalTopologyBuilder(); private final StreamsMetadataState streamsMetadataState = EasyMock.createNiceMock(StreamsMetadataState.class); private final Map<String, Subscription> subscriptions = new HashMap<>(); private final AtomicInteger assignmentError = new AtomicInteger(); private final AtomicLong nextProbingRebalanceMs = new AtomicLong(Long.MAX_VALUE); private final MockTime time = new MockTime(); private Map<String, Object> configProps() { final Map<String, Object> configurationMap = new HashMap<>(); configurationMap.put(StreamsConfig.APPLICATION_ID_CONFIG, APPLICATION_ID); configurationMap.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, USER_END_POINT); configurationMap.put(InternalConfig.TASK_MANAGER_FOR_PARTITION_ASSIGNOR, taskManager); configurationMap.put(InternalConfig.STREAMS_METADATA_STATE_FOR_PARTITION_ASSIGNOR, streamsMetadataState); configurationMap.put(InternalConfig.STREAMS_ADMIN_CLIENT, adminClient); configurationMap.put(InternalConfig.ASSIGNMENT_ERROR_CODE, assignmentError); configurationMap.put(InternalConfig.NEXT_SCHEDULED_REBALANCE_MS, nextProbingRebalanceMs); configurationMap.put(InternalConfig.TIME, time); configurationMap.put(InternalConfig.INTERNAL_TASK_ASSIGNOR_CLASS, HighAvailabilityTaskAssignor.class.getName()); return configurationMap; } // Make sure to complete setting up any mocks (such as TaskManager or AdminClient) before configuring the assignor private void configurePartitionAssignorWith(final Map<String, Object> props) { final Map<String, Object> configMap = configProps(); configMap.putAll(props); streamsConfig = new StreamsConfig(configMap); partitionAssignor.configure(configMap); EasyMock.replay(taskManager, adminClient); overwriteInternalTopicManagerWithMock(); } // Useful for tests that don't care about the task offset sums private void createMockTaskManager(final Set<TaskId> activeTasks) { createMockTaskManager(getTaskOffsetSums(activeTasks)); } private void createMockTaskManager(final Map<TaskId, Long> taskOffsetSums) { taskManager = EasyMock.createNiceMock(TaskManager.class); expect(taskManager.builder()).andReturn(builder).anyTimes(); expect(taskManager.getTaskOffsetSums()).andReturn(taskOffsetSums).anyTimes(); expect(taskManager.processId()).andReturn(UUID_1).anyTimes(); builder.setApplicationId(APPLICATION_ID); builder.buildTopology(); } // If you don't care about setting the end offsets for each specific topic partition, the helper method // getTopicPartitionOffsetMap is useful for building this input map for all partitions private void createMockAdminClient(final Map<TopicPartition, Long> changelogEndOffsets) { adminClient = EasyMock.createMock(AdminClient.class); final ListOffsetsResult result = EasyMock.createNiceMock(ListOffsetsResult.class); final KafkaFutureImpl<Map<TopicPartition, ListOffsetsResultInfo>> allFuture = new KafkaFutureImpl<>(); allFuture.complete(changelogEndOffsets.entrySet().stream().collect(Collectors.toMap( Entry::getKey, t -> { final ListOffsetsResultInfo info = EasyMock.createNiceMock(ListOffsetsResultInfo.class); expect(info.offset()).andStubReturn(t.getValue()); EasyMock.replay(info); return info; })) ); expect(adminClient.listOffsets(anyObject())).andStubReturn(result); expect(result.all()).andReturn(allFuture); EasyMock.replay(result); } private void overwriteInternalTopicManagerWithMock() { final MockInternalTopicManager mockInternalTopicManager = new MockInternalTopicManager(streamsConfig, mockClientSupplier.restoreConsumer); partitionAssignor.setInternalTopicManager(mockInternalTopicManager); } @Before public void setUp() { createMockAdminClient(EMPTY_CHANGELOG_END_OFFSETS); } @Test public void shouldReturnAllActiveTasksToPreviousOwnerRegardlessOfBalanceAndTriggerRebalanceIfEndOffsetFetchFailsAndHighAvailabilityEnabled() { final long rebalanceInterval = 5 * 60 * 1000L; builder.addSource(null, "source1", null, null, null, "topic1"); builder.addProcessor("processor1", new MockProcessorSupplier<>(), "source1"); builder.addStateStore(new MockKeyValueStoreBuilder("store1", false), "processor1"); final Set<TaskId> allTasks = mkSet(TASK_0_0, TASK_0_1, TASK_0_2); createMockTaskManager(allTasks); adminClient = EasyMock.createMock(AdminClient.class); expect(adminClient.listOffsets(anyObject())).andThrow(new StreamsException("Should be handled")); configurePartitionAssignorWith(singletonMap(StreamsConfig.PROBING_REBALANCE_INTERVAL_MS_CONFIG, rebalanceInterval)); final String firstConsumer = "consumer1"; final String newConsumer = "consumer2"; subscriptions.put(firstConsumer, new Subscription( singletonList("source1"), getInfo(UUID_1, allTasks).encode() )); subscriptions.put(newConsumer, new Subscription( singletonList("source1"), getInfo(UUID_2, EMPTY_TASKS).encode() )); final Map<String, Assignment> assignments = partitionAssignor .assign(metadata, new GroupSubscription(subscriptions)) .groupAssignment(); final AssignmentInfo firstConsumerUserData = AssignmentInfo.decode(assignments.get(firstConsumer).userData()); final List<TaskId> firstConsumerActiveTasks = firstConsumerUserData.activeTasks(); final AssignmentInfo newConsumerUserData = AssignmentInfo.decode(assignments.get(newConsumer).userData()); final List<TaskId> newConsumerActiveTasks = newConsumerUserData.activeTasks(); // The tasks were returned to their prior owner assertThat(firstConsumerActiveTasks, equalTo(new ArrayList<>(allTasks))); assertThat(newConsumerActiveTasks, empty()); // There is a rebalance scheduled assertThat( time.milliseconds() + rebalanceInterval, anyOf( is(firstConsumerUserData.nextRebalanceMs()), is(newConsumerUserData.nextRebalanceMs()) ) ); } @Test public void shouldScheduleProbingRebalanceOnThisClientIfWarmupTasksRequired() { final long rebalanceInterval = 5 * 60 * 1000L; builder.addSource(null, "source1", null, null, null, "topic1"); builder.addProcessor("processor1", new MockProcessorSupplier<>(), "source1"); builder.addStateStore(new MockKeyValueStoreBuilder("store1", false), "processor1"); final Set<TaskId> allTasks = mkSet(TASK_0_0, TASK_0_1, TASK_0_2); createMockTaskManager(allTasks); createMockAdminClient(getTopicPartitionOffsetsMap( singletonList(APPLICATION_ID + "-store1-changelog"), singletonList(3))); configurePartitionAssignorWith(singletonMap(StreamsConfig.PROBING_REBALANCE_INTERVAL_MS_CONFIG, rebalanceInterval)); final String firstConsumer = "consumer1"; final String newConsumer = "consumer2"; subscriptions.put(firstConsumer, new Subscription( singletonList("source1"), getInfo(UUID_1, allTasks).encode() )); subscriptions.put(newConsumer, new Subscription( singletonList("source1"), getInfo(UUID_2, EMPTY_TASKS).encode() )); final Map<String, Assignment> assignments = partitionAssignor .assign(metadata, new GroupSubscription(subscriptions)) .groupAssignment(); final List<TaskId> firstConsumerActiveTasks = AssignmentInfo.decode(assignments.get(firstConsumer).userData()).activeTasks(); final List<TaskId> newConsumerActiveTasks = AssignmentInfo.decode(assignments.get(newConsumer).userData()).activeTasks(); assertThat(firstConsumerActiveTasks, equalTo(new ArrayList<>(allTasks))); assertThat(newConsumerActiveTasks, empty()); assertThat(assignmentError.get(), equalTo(AssignorError.NONE.code())); final long nextScheduledRebalanceOnThisClient = AssignmentInfo.decode(assignments.get(firstConsumer).userData()).nextRebalanceMs(); final long nextScheduledRebalanceOnOtherClient = AssignmentInfo.decode(assignments.get(newConsumer).userData()).nextRebalanceMs(); assertThat(nextScheduledRebalanceOnThisClient, equalTo(time.milliseconds() + rebalanceInterval)); assertThat(nextScheduledRebalanceOnOtherClient, equalTo(Long.MAX_VALUE)); } /** * Helper for building the input to createMockAdminClient in cases where we don't care about the actual offsets * @param changelogTopics The names of all changelog topics in the topology * @param topicsNumPartitions The number of partitions for the corresponding changelog topic, such that the number * of partitions of the ith topic in changelogTopics is given by the ith element of topicsNumPartitions */ private static Map<TopicPartition, Long> getTopicPartitionOffsetsMap(final List<String> changelogTopics, final List<Integer> topicsNumPartitions) { if (changelogTopics.size() != topicsNumPartitions.size()) { throw new IllegalStateException("Passed in " + changelogTopics.size() + " changelog topic names, but " + topicsNumPartitions.size() + " different numPartitions for the topics"); } final Map<TopicPartition, Long> changelogEndOffsets = new HashMap<>(); for (int i = 0; i < changelogTopics.size(); ++i) { final String topic = changelogTopics.get(i); final int numPartitions = topicsNumPartitions.get(i); for (int partition = 0; partition < numPartitions; ++partition) { changelogEndOffsets.put(new TopicPartition(topic, partition), Long.MAX_VALUE); } } return changelogEndOffsets; } private static SubscriptionInfo getInfo(final UUID processId, final Set<TaskId> prevTasks) { return new SubscriptionInfo( LATEST_SUPPORTED_VERSION, LATEST_SUPPORTED_VERSION, processId, null, getTaskOffsetSums(prevTasks)); } // Stub offset sums for when we only care about the prev/standby task sets, not the actual offsets private static Map<TaskId, Long> getTaskOffsetSums(final Set<TaskId> activeTasks) { final Map<TaskId, Long> taskOffsetSums = activeTasks.stream().collect(Collectors.toMap(t -> t, t -> Task.LATEST_OFFSET)); taskOffsetSums.putAll(EMPTY_TASKS.stream().collect(Collectors.toMap(t -> t, t -> 0L))); return taskOffsetSums; } }
/* * Copyright (C) 2014-2016 LinkedIn Corp. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use * this file except in compliance with the License. You may obtain a copy of the * License at http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed * under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. */ package gobblin.metastore; import static gobblin.util.HadoopUtils.FS_SCHEMES_NON_ATOMIC; import java.io.IOException; import java.net.URI; import java.util.Collection; import java.util.List; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.io.SequenceFile; import org.apache.hadoop.io.Text; import org.apache.hadoop.io.compress.DefaultCodec; import com.google.common.base.Strings; import com.google.common.collect.Lists; import com.google.common.io.Closer; import gobblin.configuration.State; import gobblin.util.HadoopUtils; /** * An implementation of {@link StateStore} backed by a {@link FileSystem}. * * <p> * This implementation uses Hadoop {@link org.apache.hadoop.io.SequenceFile} * to store {@link State}s. Each store maps to one directory, and each * table maps to one file under the store directory. Keys are state IDs * (see {@link State#getId()}), and values are objects of {@link State} or * any of its extensions. Keys will be empty strings if state IDs are not set * (i.e., {@link State#getId()} returns <em>null</em>). In this case, the * {@link FsStateStore#get(String, String, String)} method may not work. * </p> * * @param <T> state object type * * @author Yinan Li */ public class FsStateStore<T extends State> implements StateStore<T> { public static final String TMP_FILE_PREFIX = "_tmp_"; protected final Configuration conf; protected final FileSystem fs; protected boolean useTmpFileForPut; // Root directory for the task state store protected final String storeRootDir; // Class of the state objects to be put into the store private final Class<T> stateClass; public FsStateStore(String fsUri, String storeRootDir, Class<T> stateClass) throws IOException { this.conf = new Configuration(); this.fs = FileSystem.get(URI.create(fsUri), this.conf); this.useTmpFileForPut = !FS_SCHEMES_NON_ATOMIC.contains(this.fs.getUri().getScheme()); this.storeRootDir = storeRootDir; this.stateClass = stateClass; } public FsStateStore(FileSystem fs, String storeRootDir, Class<T> stateClass) { this.fs = fs; this.useTmpFileForPut = !FS_SCHEMES_NON_ATOMIC.contains(this.fs.getUri().getScheme()); this.conf = this.fs.getConf(); this.storeRootDir = storeRootDir; this.stateClass = stateClass; } public FsStateStore(String storeUrl, Class<T> stateClass) throws IOException { this.conf = new Configuration(); Path storePath = new Path(storeUrl); this.fs = storePath.getFileSystem(this.conf); this.useTmpFileForPut = !FS_SCHEMES_NON_ATOMIC.contains(this.fs.getUri().getScheme()); this.storeRootDir = storePath.toUri().getPath(); this.stateClass = stateClass; } @Override public boolean create(String storeName) throws IOException { Path storePath = new Path(this.storeRootDir, storeName); return this.fs.exists(storePath) || this.fs.mkdirs(storePath, new FsPermission((short) 0755)); } @Override public boolean create(String storeName, String tableName) throws IOException { Path storePath = new Path(this.storeRootDir, storeName); if (!this.fs.exists(storePath) && !create(storeName)) { return false; } Path tablePath = new Path(storePath, tableName); if (this.fs.exists(tablePath)) { throw new IOException(String.format("State file %s already exists for table %s", tablePath, tableName)); } return this.fs.createNewFile(tablePath); } @Override public boolean exists(String storeName, String tableName) throws IOException { Path tablePath = new Path(new Path(this.storeRootDir, storeName), tableName); return this.fs.exists(tablePath); } /** * See {@link StateStore#put(String, String, T)}. * * <p> * This implementation does not support putting the state object into an existing store as * append is to be supported by the Hadoop SequenceFile (HADOOP-7139). * </p> */ @Override public void put(String storeName, String tableName, T state) throws IOException { String tmpTableName = this.useTmpFileForPut ? TMP_FILE_PREFIX + tableName : tableName; Path tmpTablePath = new Path(new Path(this.storeRootDir, storeName), tmpTableName); if (!this.fs.exists(tmpTablePath) && !create(storeName, tmpTableName)) { throw new IOException("Failed to create a state file for table " + tmpTableName); } Closer closer = Closer.create(); try { SequenceFile.Writer writer = closer.register(SequenceFile.createWriter(this.fs, this.conf, tmpTablePath, Text.class, this.stateClass, SequenceFile.CompressionType.BLOCK, new DefaultCodec())); writer.append(new Text(Strings.nullToEmpty(state.getId())), state); } catch (Throwable t) { throw closer.rethrow(t); } finally { closer.close(); } if (this.useTmpFileForPut) { Path tablePath = new Path(new Path(this.storeRootDir, storeName), tableName); HadoopUtils.renamePath(this.fs, tmpTablePath, tablePath); } } /** * See {@link StateStore#putAll(String, String, Collection)}. * * <p> * This implementation does not support putting the state objects into an existing store as * append is to be supported by the Hadoop SequenceFile (HADOOP-7139). * </p> */ @Override public void putAll(String storeName, String tableName, Collection<T> states) throws IOException { String tmpTableName = this.useTmpFileForPut ? TMP_FILE_PREFIX + tableName : tableName; Path tmpTablePath = new Path(new Path(this.storeRootDir, storeName), tmpTableName); if (!this.fs.exists(tmpTablePath) && !create(storeName, tmpTableName)) { throw new IOException("Failed to create a state file for table " + tmpTableName); } Closer closer = Closer.create(); try { SequenceFile.Writer writer = closer.register(SequenceFile.createWriter(this.fs, this.conf, tmpTablePath, Text.class, this.stateClass, SequenceFile.CompressionType.BLOCK, new DefaultCodec())); for (T state : states) { writer.append(new Text(Strings.nullToEmpty(state.getId())), state); } } catch (Throwable t) { throw closer.rethrow(t); } finally { closer.close(); } if (this.useTmpFileForPut) { Path tablePath = new Path(new Path(this.storeRootDir, storeName), tableName); HadoopUtils.renamePath(this.fs, tmpTablePath, tablePath); } } @Override public T get(String storeName, String tableName, String stateId) throws IOException { Path tablePath = new Path(new Path(this.storeRootDir, storeName), tableName); if (!this.fs.exists(tablePath)) { return null; } Closer closer = Closer.create(); try { @SuppressWarnings("deprecation") SequenceFile.Reader reader = closer.register(new SequenceFile.Reader(this.fs, tablePath, this.conf)); try { Text key = new Text(); T state = this.stateClass.newInstance(); while (reader.next(key, state)) { if (key.toString().equals(stateId)) { return state; } } } catch (Exception e) { throw new IOException(e); } } catch (Throwable t) { throw closer.rethrow(t); } finally { closer.close(); } return null; } @Override public List<T> getAll(String storeName, String tableName) throws IOException { List<T> states = Lists.newArrayList(); Path tablePath = new Path(new Path(this.storeRootDir, storeName), tableName); if (!this.fs.exists(tablePath)) { return states; } Closer closer = Closer.create(); try { @SuppressWarnings("deprecation") SequenceFile.Reader reader = closer.register(new SequenceFile.Reader(this.fs, tablePath, this.conf)); try { Text key = new Text(); T state = this.stateClass.newInstance(); while (reader.next(key, state)) { states.add(state); // We need a new object for each read state state = this.stateClass.newInstance(); } } catch (Exception e) { throw new IOException(e); } } catch (Throwable t) { throw closer.rethrow(t); } finally { closer.close(); } return states; } @Override public List<T> getAll(String storeName) throws IOException { List<T> states = Lists.newArrayList(); Path storePath = new Path(this.storeRootDir, storeName); if (!this.fs.exists(storePath)) { return states; } for (FileStatus status : this.fs.listStatus(storePath)) { states.addAll(getAll(storeName, status.getPath().getName())); } return states; } @Override public void createAlias(String storeName, String original, String alias) throws IOException { Path originalTablePath = new Path(new Path(this.storeRootDir, storeName), original); if (!this.fs.exists(originalTablePath)) { throw new IOException(String.format("State file %s does not exist for table %s", originalTablePath, original)); } Path aliasTablePath = new Path(new Path(this.storeRootDir, storeName), alias); Path tmpAliasTablePath = new Path(aliasTablePath.getParent(), new Path(TMP_FILE_PREFIX, aliasTablePath.getName())); // Make a copy of the original table as a work-around because // Hadoop version 1.2.1 has no support for symlink yet. HadoopUtils.copyFile(this.fs, originalTablePath, this.fs, aliasTablePath, tmpAliasTablePath, true, this.conf); } @Override public void delete(String storeName, String tableName) throws IOException { Path tablePath = new Path(new Path(this.storeRootDir, storeName), tableName); if (this.fs.exists(tablePath)) { this.fs.delete(tablePath, false); } } @Override public void delete(String storeName) throws IOException { Path storePath = new Path(this.storeRootDir, storeName); if (this.fs.exists(storePath)) { this.fs.delete(storePath, true); } } }
package eu.uqasar.web.pages.adapterdata; /* * #%L * U-QASAR * %% * Copyright (C) 2012 - 2015 U-QASAR Consortium * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.util.Date; import javax.inject.Inject; import org.apache.wicket.RestartResponseException; import org.apache.wicket.Session; import org.apache.wicket.ajax.AjaxRequestTarget; import org.apache.wicket.ajax.markup.html.form.AjaxSubmitLink; import org.apache.wicket.markup.html.basic.Label; import org.apache.wicket.markup.html.form.Form; import org.apache.wicket.markup.html.form.TextField; import org.apache.wicket.markup.html.link.Link; import org.apache.wicket.model.Model; import org.apache.wicket.model.PropertyModel; import org.apache.wicket.model.StringResourceModel; import org.apache.wicket.request.mapper.parameter.PageParameters; import org.apache.wicket.util.string.StringValue; import org.joda.time.DateTime; import de.agilecoders.wicket.extensions.markup.html.bootstrap.form.DateTextField; import de.agilecoders.wicket.extensions.markup.html.bootstrap.form.DateTextFieldConfig; import eu.uqasar.model.measure.JiraMetricMeasurement; import eu.uqasar.service.dataadapter.JiraDataService; import eu.uqasar.web.components.HtmlEvent; import eu.uqasar.web.components.InputBorder; import eu.uqasar.web.components.InputValidationForm; import eu.uqasar.web.components.OnEventInputBeanValidationBorder; import eu.uqasar.web.pages.AboutPage; import eu.uqasar.web.pages.BasePage; public class IssueTrackerDataManagementEditPage extends BasePage { // The tableEntity to edit/save private JiraMetricMeasurement tableEntity; @Inject private JiraDataService jiraService; private final Form<JiraMetricMeasurement> tableEntityForm; private final InputBorder<String> keyBorder; private final InputBorder<String> jiraMetricBorder; private final InputBorder<String> issueContentBorder; @SuppressWarnings("unused") private final DateTextField someDateField; public IssueTrackerDataManagementEditPage(PageParameters parameters) { super(parameters); // extract id parameter and set page title, header and tableEntity // depending on whether we are editing an existing tableEntity or // creating // a new one loadTableEntity(parameters.get("idForTableEntity")); // add form to create new tableEntity add(tableEntityForm = newTableEntityForm()); // add text field for name inside a border component that performs bean // validation tableEntityForm.add(keyBorder = newKeyField()); // add text field for name inside a border component that performs bean // validation tableEntityForm.add(jiraMetricBorder = newJiraMetricField()); // add text field for name inside a border component that performs bean // validation tableEntityForm.add(issueContentBorder = newIssueContentField()); // add date text field for due date inside a border component that // performs bean validation plus a date picker component for easy date // selection tableEntityForm .add(newSomeDateTextField(someDateField = newDateTextField())); // add a button to create new tableEntity tableEntityForm.add(newSubmitLink(parameters)); // add cancel button to return to tableEntity list page tableEntityForm.add(newCancelLink(parameters)); } /** * */ private static final long serialVersionUID = 1L; /** * * @param idParam */ private void loadTableEntity(final StringValue idParam) { if (idParam.isEmpty()) { setPageTitle(new StringResourceModel("page.create.title", this, null)); add(new Label("header", new StringResourceModel( "form.create.header", this, null))); tableEntity = new JiraMetricMeasurement(); } else { setPageTitle(new StringResourceModel("page.edit.title", this, null)); add(new Label("header", new StringResourceModel("form.edit.header", this, null))); // set the tableEntity we got from previous page try { tableEntity = jiraService.getById(idParam .toOptionalLong()); } catch (Exception e) { throw new RestartResponseException(AboutPage.class); } } } /** * * @return */ private Form<JiraMetricMeasurement> newTableEntityForm() { Form<JiraMetricMeasurement> form = new InputValidationForm<>("form"); form.setOutputMarkupId(true); return form; } /** * * @return */ private InputBorder<String> newKeyField() { return new OnEventInputBeanValidationBorder<>("keyBorder", new TextField<>("jiraKey", new PropertyModel<String>( tableEntity, "jiraKey")), new StringResourceModel( "jiraKey.input.label", this, null), HtmlEvent.ONCHANGE); } /** * * @return */ private InputBorder<String> newJiraMetricField(){ return new OnEventInputBeanValidationBorder<>("jiraMetricBorder", new TextField<>("jiraMetric", new PropertyModel<String>( tableEntity, "jiraMetric")), new StringResourceModel( "jiraMetric.input.label", this, null), HtmlEvent.ONCHANGE); } /** * * @return */ private InputBorder<String> newIssueContentField(){ return new OnEventInputBeanValidationBorder<>("jsonContentBorder", new org.apache.wicket.markup.html.form.TextArea<>("jsonContent", new PropertyModel<String>( tableEntity, "jsonContent")), new StringResourceModel( "jsonContent.input.label", this, null), HtmlEvent.ONCHANGE); } /** * * @param someDateTextField * @return */ private InputBorder<Date> newSomeDateTextField( final DateTextField someDateTextField) { return new OnEventInputBeanValidationBorder<>( "someDateValidationBorder", someDateTextField, new StringResourceModel("some.date.input.label", this, null), HtmlEvent.ONCHANGE); } /** * * @return */ private DateTextField newDateTextField() { DateTextFieldConfig config = new DateTextFieldConfig() .withFormat("dd.MM.yyyy") .withStartDate(new DateTime().withYear(1900)) .allowKeyboardNavigation(true).autoClose(true) .highlightToday(false).showTodayButton(false); return new DateTextField("timeStamp", new PropertyModel<Date>(tableEntity, "timeStamp"), config); } /** * * @return */ private AjaxSubmitLink newSubmitLink( final PageParameters parameters) { return new AjaxSubmitLink("submit", tableEntityForm) { private static final long serialVersionUID = -8233439456118623954L; @Override protected void onSubmit(AjaxRequestTarget target, Form<?> form) { save(target, parameters); } @Override protected void onError(AjaxRequestTarget target, Form<?> form) { showErrors(target); } }; } /** * */ private void save(AjaxRequestTarget target, PageParameters parameters) { // save tableEntity saveTableEntity(); // success message has to be associated to session so that it is shown // in the global feedback panel Session.get().success( new StringResourceModel("saved.message", this, Model .of(tableEntity)).getString()); // redirect to tableEntity list page setResponsePage( IssueTrackerDataManagementPage.class, parameters); } /** * */ private boolean saveTableEntity() { // tableEntity.setCompanyAddress(tableEntityAddress); // tableEntity.setEmployee(tableEntityEmployee); jiraService.create(tableEntity); return true; } /** * * @param target */ private void showErrors(AjaxRequestTarget target) { // in case of errors (e.g. validation errors) show error // messages in form target.add(tableEntityForm); } /** * * @return */ private Link<IssueTrackerDataManagementPage> newCancelLink(final PageParameters parameters) { return new Link<IssueTrackerDataManagementPage>("cancel") { private static final long serialVersionUID = -310533532532643267L; @Override public void onClick() { setResponsePage(IssueTrackerDataManagementPage.class,parameters); } }; } }
/* * Copyright 2012, 2014 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.bitcoinj.uri; import org.bitcoinj.core.Address; import org.bitcoinj.core.AddressFormatException; import org.bitcoinj.core.Coin; import org.bitcoinj.core.NetworkParameters; import org.bitcoinj.params.AbstractBitcoinNetParams; import javax.annotation.Nullable; import java.io.UnsupportedEncodingException; import java.net.URI; import java.net.URISyntaxException; import java.net.URLDecoder; import java.util.ArrayList; import java.util.Collections; import java.util.LinkedHashMap; import java.util.List; import java.util.Locale; import java.util.Map; import static com.google.common.base.Preconditions.checkNotNull; /** * <p>Provides a standard implementation of a Bitcoin URI with support for the following:</p> * * <ul> * <li>URLEncoded URIs (as passed in by IE on the command line)</li> * <li>BIP21 names (including the "req-" prefix handling requirements)</li> * </ul> * * <h2>Accepted formats</h2> * * <p>The following input forms are accepted:</p> * * <ul> * <li>{@code bitcoin:<address>}</li> * <li>{@code bitcoin:<address>?<name1>=<value1>&<name2>=<value2>} with multiple * additional name/value pairs</li> * </ul> * * <p>The name/value pairs are processed as follows.</p> * <ol> * <li>URL encoding is stripped and treated as UTF-8</li> * <li>names prefixed with {@code req-} are treated as required and if unknown or conflicting cause a parse exception</li> * <li>Unknown names not prefixed with {@code req-} are added to a Map, accessible by parameter name</li> * <li>Known names not prefixed with {@code req-} are processed unless they are malformed</li> * </ol> * * <p>The following names are known and have the following formats:</p> * <ul> * <li>{@code amount} decimal value to 8 dp (e.g. 0.12345678) <b>Note that the * exponent notation is not supported any more</b></li> * <li>{@code label} any URL encoded alphanumeric</li> * <li>{@code message} any URL encoded alphanumeric</li> * </ul> * * @author Andreas Schildbach (initial code) * @author Jim Burton (enhancements for MultiBit) * @author Gary Rowe (BIP21 support) * @see <a href="https://github.com/bitcoin/bips/blob/master/bip-0021.mediawiki">BIP 0021</a> */ public class BitcoinURI { // Not worth turning into an enum public static final String FIELD_MESSAGE = "message"; public static final String FIELD_LABEL = "label"; public static final String FIELD_AMOUNT = "amount"; public static final String FIELD_ADDRESS = "address"; public static final String FIELD_PAYMENT_REQUEST_URL = "r"; /** * URI for Bitcoin network. Use {@link AbstractBitcoinNetParams#BITCOIN_SCHEME} if you specifically * need Bitcoin, or use {@link NetworkParameters#getUriScheme} to get the scheme * from network parameters. */ @Deprecated public static final String BITCOIN_SCHEME = "bitcoin"; private static final String ENCODED_SPACE_CHARACTER = "%20"; private static final String AMPERSAND_SEPARATOR = "&"; private static final String QUESTION_MARK_SEPARATOR = "?"; /** * Contains all the parameters in the order in which they were processed */ private final Map<String, Object> parameterMap = new LinkedHashMap<>(); /** * Constructs a new BitcoinURI from the given string. Can be for any network. * * @param uri The raw URI data to be parsed (see class comments for accepted formats) * @throws BitcoinURIParseException if the URI is not syntactically or semantically valid. */ public BitcoinURI(String uri) throws BitcoinURIParseException { this(null, uri); } /** * Constructs a new object by trying to parse the input as a valid Bitcoin URI. * * @param params The network parameters that determine which network the URI is from, or null if you don't have * any expectation about what network the URI is for and wish to check yourself. * @param input The raw URI data to be parsed (see class comments for accepted formats) * * @throws BitcoinURIParseException If the input fails Bitcoin URI syntax and semantic checks. */ public BitcoinURI(@Nullable NetworkParameters params, String input) throws BitcoinURIParseException { checkNotNull(input); String scheme = null == params ? AbstractBitcoinNetParams.BITCOIN_SCHEME : params.getUriScheme(); // Attempt to form the URI (fail fast syntax checking to official standards). URI uri; try { uri = new URI(input); } catch (URISyntaxException e) { throw new BitcoinURIParseException("Bad URI syntax", e); } // URI is formed as bitcoin:<address>?<query parameters> // blockchain.info generates URIs of non-BIP compliant form bitcoin://address?.... // We support both until Ben fixes his code. // Remove the bitcoin scheme. // (Note: getSchemeSpecificPart() is not used as it unescapes the label and parse then fails. // For instance with : bitcoin:129mVqKUmJ9uwPxKJBnNdABbuaaNfho4Ha?amount=0.06&label=Tom%20%26%20Jerry // the & (%26) in Tom and Jerry gets interpreted as a separator and the label then gets parsed // as 'Tom ' instead of 'Tom & Jerry') String blockchainInfoScheme = scheme + "://"; String correctScheme = scheme + ":"; String schemeSpecificPart; if (input.startsWith(blockchainInfoScheme)) { schemeSpecificPart = input.substring(blockchainInfoScheme.length()); } else if (input.startsWith(correctScheme)) { schemeSpecificPart = input.substring(correctScheme.length()); } else { throw new BitcoinURIParseException("Unsupported URI scheme: " + uri.getScheme()); } // Split off the address from the rest of the query parameters. String[] addressSplitTokens = schemeSpecificPart.split("\\?", 2); if (addressSplitTokens.length == 0) throw new BitcoinURIParseException("No data found after the bitcoin: prefix"); String addressToken = addressSplitTokens[0]; // may be empty! String[] nameValuePairTokens; if (addressSplitTokens.length == 1) { // Only an address is specified - use an empty '<name>=<value>' token array. nameValuePairTokens = new String[] {}; } else { // Split into '<name>=<value>' tokens. nameValuePairTokens = addressSplitTokens[1].split("&"); } // Attempt to parse the rest of the URI parameters. parseParameters(params, addressToken, nameValuePairTokens); if (!addressToken.isEmpty()) { // Attempt to parse the addressToken as a Bitcoin address for this network try { Address address = Address.fromString(params, addressToken); putWithValidation(FIELD_ADDRESS, address); } catch (final AddressFormatException e) { throw new BitcoinURIParseException("Bad address", e); } } if (addressToken.isEmpty() && getPaymentRequestUrl() == null) { throw new BitcoinURIParseException("No address and no r= parameter found"); } } /** * @param params The network parameters or null * @param nameValuePairTokens The tokens representing the name value pairs (assumed to be * separated by '=' e.g. 'amount=0.2') */ private void parseParameters(@Nullable NetworkParameters params, String addressToken, String[] nameValuePairTokens) throws BitcoinURIParseException { // Attempt to decode the rest of the tokens into a parameter map. for (String nameValuePairToken : nameValuePairTokens) { final int sepIndex = nameValuePairToken.indexOf('='); if (sepIndex == -1) throw new BitcoinURIParseException("Malformed Bitcoin URI - no separator in '" + nameValuePairToken + "'"); if (sepIndex == 0) throw new BitcoinURIParseException("Malformed Bitcoin URI - empty name '" + nameValuePairToken + "'"); final String nameToken = nameValuePairToken.substring(0, sepIndex).toLowerCase(Locale.ENGLISH); final String valueToken = nameValuePairToken.substring(sepIndex + 1); // Parse the amount. if (FIELD_AMOUNT.equals(nameToken)) { // Decode the amount (contains an optional decimal component to 8dp). try { Coin amount = Coin.parseCoin(valueToken); if (params != null && amount.isGreaterThan(params.getMaxMoney())) throw new BitcoinURIParseException("Max number of coins exceeded"); if (amount.signum() < 0) throw new ArithmeticException("Negative coins specified"); putWithValidation(FIELD_AMOUNT, amount); } catch (IllegalArgumentException e) { throw new OptionalFieldValidationException(String.format(Locale.US, "'%s' is not a valid amount", valueToken), e); } catch (ArithmeticException e) { throw new OptionalFieldValidationException(String.format(Locale.US, "'%s' has too many decimal places", valueToken), e); } } else { if (nameToken.startsWith("req-")) { // A required parameter that we do not know about. throw new RequiredFieldValidationException("'" + nameToken + "' is required but not known, this URI is not valid"); } else { // Known fields and unknown parameters that are optional. try { if (valueToken.length() > 0) putWithValidation(nameToken, URLDecoder.decode(valueToken, "UTF-8")); } catch (UnsupportedEncodingException e) { throw new RuntimeException(e); // can't happen } } } } // Note to the future: when you want to implement 'req-expires' have a look at commit 410a53791841 // which had it in. } /** * Put the value against the key in the map checking for duplication. This avoids address field overwrite etc. * * @param key The key for the map * @param value The value to store */ private void putWithValidation(String key, Object value) throws BitcoinURIParseException { if (parameterMap.containsKey(key)) { throw new BitcoinURIParseException(String.format(Locale.US, "'%s' is duplicated, URI is invalid", key)); } else { parameterMap.put(key, value); } } /** * The Bitcoin address from the URI, if one was present. It's possible to have Bitcoin URI's with no address if a * r= payment protocol parameter is specified, though this form is not recommended as older wallets can't understand * it. */ @Nullable public Address getAddress() { return (Address) parameterMap.get(FIELD_ADDRESS); } /** * @return The amount name encoded using a pure integer value based at * 10,000,000 units is 1 BTC. May be null if no amount is specified */ public Coin getAmount() { return (Coin) parameterMap.get(FIELD_AMOUNT); } /** * @return The label from the URI. */ public String getLabel() { return (String) parameterMap.get(FIELD_LABEL); } /** * @return The message from the URI. */ public String getMessage() { return (String) parameterMap.get(FIELD_MESSAGE); } /** * @return The URL where a payment request (as specified in BIP 70) may * be fetched. */ public final String getPaymentRequestUrl() { return (String) parameterMap.get(FIELD_PAYMENT_REQUEST_URL); } /** * Returns the URLs where a payment request (as specified in BIP 70) may be fetched. The first URL is the main URL, * all subsequent URLs are fallbacks. */ public List<String> getPaymentRequestUrls() { ArrayList<String> urls = new ArrayList<>(); while (true) { int i = urls.size(); String paramName = FIELD_PAYMENT_REQUEST_URL + (i > 0 ? Integer.toString(i) : ""); String url = (String) parameterMap.get(paramName); if (url == null) break; urls.add(url); } Collections.reverse(urls); return urls; } /** * @param name The name of the parameter * @return The parameter value, or null if not present */ public Object getParameterByName(String name) { return parameterMap.get(name); } @Override public String toString() { StringBuilder builder = new StringBuilder("BitcoinURI["); boolean first = true; for (Map.Entry<String, Object> entry : parameterMap.entrySet()) { if (first) { first = false; } else { builder.append(","); } builder.append("'").append(entry.getKey()).append("'=").append("'").append(entry.getValue()).append("'"); } builder.append("]"); return builder.toString(); } /** * Simple Bitcoin URI builder using known good fields. * * @param address The Bitcoin address * @param amount The amount * @param label A label * @param message A message * @return A String containing the Bitcoin URI */ public static String convertToBitcoinURI(Address address, Coin amount, String label, String message) { return convertToBitcoinURI(address.getParameters(), address.toString(), amount, label, message); } /** * Simple Bitcoin URI builder using known good fields. * * @param params The network parameters that determine which network the URI * is for. * @param address The Bitcoin address * @param amount The amount * @param label A label * @param message A message * @return A String containing the Bitcoin URI */ public static String convertToBitcoinURI(NetworkParameters params, String address, @Nullable Coin amount, @Nullable String label, @Nullable String message) { checkNotNull(params); checkNotNull(address); if (amount != null && amount.signum() < 0) { throw new IllegalArgumentException("Coin must be positive"); } StringBuilder builder = new StringBuilder(); String scheme = params.getUriScheme(); builder.append(scheme).append(":").append(address); boolean questionMarkHasBeenOutput = false; if (amount != null) { builder.append(QUESTION_MARK_SEPARATOR).append(FIELD_AMOUNT).append("="); builder.append(amount.toPlainString()); questionMarkHasBeenOutput = true; } if (label != null && !"".equals(label)) { if (questionMarkHasBeenOutput) { builder.append(AMPERSAND_SEPARATOR); } else { builder.append(QUESTION_MARK_SEPARATOR); questionMarkHasBeenOutput = true; } builder.append(FIELD_LABEL).append("=").append(encodeURLString(label)); } if (message != null && !"".equals(message)) { if (questionMarkHasBeenOutput) { builder.append(AMPERSAND_SEPARATOR); } else { builder.append(QUESTION_MARK_SEPARATOR); } builder.append(FIELD_MESSAGE).append("=").append(encodeURLString(message)); } return builder.toString(); } /** * Encode a string using URL encoding * * @param stringToEncode The string to URL encode */ static String encodeURLString(String stringToEncode) { try { return java.net.URLEncoder.encode(stringToEncode, "UTF-8").replace("+", ENCODED_SPACE_CHARACTER); } catch (UnsupportedEncodingException e) { throw new RuntimeException(e); // can't happen } } }
/* * Copyright (C) 2011 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package android.support.v4.content; import android.content.Context; import android.os.Handler; import android.os.SystemClock; import android.support.v4.util.TimeUtils; import android.util.Log; import java.io.FileDescriptor; import java.io.PrintWriter; import java.util.concurrent.CountDownLatch; /** * Static library support version of the framework's {@link android.content.AsyncTaskLoader}. * Used to write apps that run on platforms prior to Android 3.0. When running * on Android 3.0 or above, this implementation is still used; it does not try * to switch to the framework's implementation. See the framework SDK * documentation for a class overview. */ public abstract class AsyncTaskLoader<D> extends Loader<D> { static final String TAG = "AsyncTaskLoader"; static final boolean DEBUG = false; final class LoadTask extends ModernAsyncTask<Void, Void, D> implements Runnable { D result; boolean waiting; private CountDownLatch done = new CountDownLatch(1); /* Runs on a worker thread */ @Override protected D doInBackground(Void... params) { if (DEBUG) Log.v(TAG, this + " >>> doInBackground"); result = AsyncTaskLoader.this.onLoadInBackground(); if (DEBUG) Log.v(TAG, this + " <<< doInBackground"); return result; } /* Runs on the UI thread */ @Override protected void onPostExecute(D data) { if (DEBUG) Log.v(TAG, this + " onPostExecute"); try { AsyncTaskLoader.this.dispatchOnLoadComplete(this, data); } finally { done.countDown(); } } @Override protected void onCancelled() { if (DEBUG) Log.v(TAG, this + " onCancelled"); try { AsyncTaskLoader.this.dispatchOnCancelled(this, result); } finally { done.countDown(); } } @Override public void run() { waiting = false; AsyncTaskLoader.this.executePendingTask(); } } volatile LoadTask mTask; volatile LoadTask mCancellingTask; long mUpdateThrottle; long mLastLoadCompleteTime = -10000; Handler mHandler; public AsyncTaskLoader(Context context) { super(context); } /** * Set amount to throttle updates by. This is the minimum time from * when the last {@link #onLoadInBackground()} call has completed until * a new load is scheduled. * * @param delayMS Amount of delay, in milliseconds. */ public void setUpdateThrottle(long delayMS) { mUpdateThrottle = delayMS; if (delayMS != 0) { mHandler = new Handler(); } } @Override protected void onForceLoad() { super.onForceLoad(); cancelLoad(); mTask = new LoadTask(); if (DEBUG) Log.v(TAG, "Preparing load: mTask=" + mTask); executePendingTask(); } /** * Attempt to cancel the current load task. See {@link android.os.AsyncTask#cancel(boolean)} * for more info. Must be called on the main thread of the process. * * <p>Cancelling is not an immediate operation, since the load is performed * in a background thread. If there is currently a load in progress, this * method requests that the load be cancelled, and notes this is the case; * once the background thread has completed its work its remaining state * will be cleared. If another load request comes in during this time, * it will be held until the cancelled load is complete. * * @return Returns <tt>false</tt> if the task could not be cancelled, * typically because it has already completed normally, or * because {@link #startLoading()} hasn't been called; returns * <tt>true</tt> otherwise. */ public boolean cancelLoad() { if (DEBUG) Log.v(TAG, "cancelLoad: mTask=" + mTask); if (mTask != null) { if (mCancellingTask != null) { // There was a pending task already waiting for a previous // one being canceled; just drop it. if (DEBUG) Log.v(TAG, "cancelLoad: still waiting for cancelled task; dropping next"); if (mTask.waiting) { mTask.waiting = false; mHandler.removeCallbacks(mTask); } mTask = null; return false; } else if (mTask.waiting) { // There is a task, but it is waiting for the time it should // execute. We can just toss it. if (DEBUG) Log.v(TAG, "cancelLoad: task is waiting, dropping it"); mTask.waiting = false; mHandler.removeCallbacks(mTask); mTask = null; return false; } else { boolean cancelled = mTask.cancel(false); if (DEBUG) Log.v(TAG, "cancelLoad: cancelled=" + cancelled); if (cancelled) { mCancellingTask = mTask; } mTask = null; return cancelled; } } return false; } /** * Called if the task was canceled before it was completed. Gives the class a chance * to properly dispose of the result. */ public void onCanceled(D data) { } void executePendingTask() { if (mCancellingTask == null && mTask != null) { if (mTask.waiting) { mTask.waiting = false; mHandler.removeCallbacks(mTask); } if (mUpdateThrottle > 0) { long now = SystemClock.uptimeMillis(); if (now < (mLastLoadCompleteTime+mUpdateThrottle)) { // Not yet time to do another load. if (DEBUG) Log.v(TAG, "Waiting until " + (mLastLoadCompleteTime+mUpdateThrottle) + " to execute: " + mTask); mTask.waiting = true; mHandler.postAtTime(mTask, mLastLoadCompleteTime+mUpdateThrottle); return; } } if (DEBUG) Log.v(TAG, "Executing: " + mTask); mTask.executeOnExecutor(ModernAsyncTask.THREAD_POOL_EXECUTOR, (Void[]) null); } } void dispatchOnCancelled(LoadTask task, D data) { onCanceled(data); if (mCancellingTask == task) { if (DEBUG) Log.v(TAG, "Cancelled task is now canceled!"); rollbackContentChanged(); mLastLoadCompleteTime = SystemClock.uptimeMillis(); mCancellingTask = null; executePendingTask(); } } void dispatchOnLoadComplete(LoadTask task, D data) { if (mTask != task) { if (DEBUG) Log.v(TAG, "Load complete of old task, trying to cancel"); dispatchOnCancelled(task, data); } else { if (isAbandoned()) { // This cursor has been abandoned; just cancel the new data. onCanceled(data); } else { commitContentChanged(); mLastLoadCompleteTime = SystemClock.uptimeMillis(); mTask = null; if (DEBUG) Log.v(TAG, "Delivering result"); deliverResult(data); } } } /** */ public abstract D loadInBackground(); /** * Called on a worker thread to perform the actual load. Implementations should not deliver the * result directly, but should return them from this method, which will eventually end up * calling {@link #deliverResult} on the UI thread. If implementations need to process * the results on the UI thread they may override {@link #deliverResult} and do so * there. * * @return Implementations must return the result of their load operation. */ protected D onLoadInBackground() { return loadInBackground(); } /** * Locks the current thread until the loader completes the current load * operation. Returns immediately if there is no load operation running. * Should not be called from the UI thread: calling it from the UI * thread would cause a deadlock. * <p> * Use for testing only. <b>Never</b> call this from a UI thread. * * @hide */ public void waitForLoader() { LoadTask task = mTask; if (task != null) { try { task.done.await(); } catch (InterruptedException e) { // Ignore } } } @Override public void dump(String prefix, FileDescriptor fd, PrintWriter writer, String[] args) { super.dump(prefix, fd, writer, args); if (mTask != null) { writer.print(prefix); writer.print("mTask="); writer.print(mTask); writer.print(" waiting="); writer.println(mTask.waiting); } if (mCancellingTask != null) { writer.print(prefix); writer.print("mCancellingTask="); writer.print(mCancellingTask); writer.print(" waiting="); writer.println(mCancellingTask.waiting); } if (mUpdateThrottle != 0) { writer.print(prefix); writer.print("mUpdateThrottle="); TimeUtils.formatDuration(mUpdateThrottle, writer); writer.print(" mLastLoadCompleteTime="); TimeUtils.formatDuration(mLastLoadCompleteTime, SystemClock.uptimeMillis(), writer); writer.println(); } } }
package net.aung.moviemaniac.events; import net.aung.moviemaniac.data.restapi.responses.GenreListResponse; import net.aung.moviemaniac.data.restapi.responses.MovieListResponse; import net.aung.moviemaniac.data.restapi.responses.MovieReviewResponse; import net.aung.moviemaniac.data.restapi.responses.TrailerResponse; import net.aung.moviemaniac.data.restapi.responses.TVSeriesListResponse; import net.aung.moviemaniac.data.vos.MovieVO; import net.aung.moviemaniac.data.vos.TVSeriesVO; import java.util.ArrayList; /** * Created by aung on 12/12/15. */ public class DataEvent { public static class LoadedMovieListEvent { private MovieListResponse response; private boolean isForce; public LoadedMovieListEvent(MovieListResponse response, boolean isForce) { this.response = response; this.isForce = isForce; } public MovieListResponse getResponse() { return response; } public boolean isForce() { return isForce; } } public static class LoadedNowPlayingMovieListEvent extends LoadedMovieListEvent { public LoadedNowPlayingMovieListEvent(MovieListResponse response, boolean isForce) { super(response, isForce); } } public static class LoadedUpcomingMovieListEvent extends LoadedMovieListEvent { public LoadedUpcomingMovieListEvent(MovieListResponse response, boolean isForce) { super(response, isForce); } } public static class LoadedMostPopularMovieListEvent extends LoadedMovieListEvent { public LoadedMostPopularMovieListEvent(MovieListResponse response, boolean isForce) { super(response, isForce); } } public static class LoadedTopRatedMovieListEvent extends LoadedMovieListEvent { public LoadedTopRatedMovieListEvent(MovieListResponse response, boolean isForce) { super(response, isForce); } } public static class LoadedTVSerieListEvent { private TVSeriesListResponse response; private boolean isForce; public LoadedTVSerieListEvent(TVSeriesListResponse response, boolean isForce) { this.response = response; this.isForce = isForce; } public TVSeriesListResponse getResponse() { return response; } public boolean isForce() { return isForce; } } public static class LoadedPopularTVSeriesListEvent extends LoadedTVSerieListEvent { public LoadedPopularTVSeriesListEvent(TVSeriesListResponse response, boolean isForce) { super(response, isForce); } } public static class LoadedTopRatedTVSeriesListEvent extends LoadedTVSerieListEvent { public LoadedTopRatedTVSeriesListEvent(TVSeriesListResponse response, boolean isForce) { super(response, isForce); } } public static class ShowMovieListEvent { private ArrayList<MovieVO> movieList; private boolean isForce; private int pageNumber; public ShowMovieListEvent(ArrayList<MovieVO> movieList, boolean isForce, int pageNumber) { this.movieList = movieList; this.isForce = isForce; this.pageNumber = pageNumber; } public ArrayList<MovieVO> getMovieList() { return movieList; } public boolean isForce() { return isForce; } public int getPageNumber() { return pageNumber; } } public static class ShowMostPopularMovieListEvent extends ShowMovieListEvent { public ShowMostPopularMovieListEvent(ArrayList<MovieVO> movieList, boolean isForce, int pageNumber) { super(movieList, isForce, pageNumber); } } public static class ShowUpcomingMovieListEvent extends ShowMovieListEvent { public ShowUpcomingMovieListEvent(ArrayList<MovieVO> movieList, boolean isForce, int pageNumber) { super(movieList, isForce, pageNumber); } } public static class ShowTopRatedMovieListEvent extends ShowMovieListEvent { public ShowTopRatedMovieListEvent(ArrayList<MovieVO> movieList, boolean isForce, int pageNumber) { super(movieList, isForce, pageNumber); } } public static class ShowNowPlayingMovieListEvent extends ShowMovieListEvent { public ShowNowPlayingMovieListEvent(ArrayList<MovieVO> movieList, boolean isForce, int pageNumber) { super(movieList, isForce, pageNumber); } } public static class FailedToLoadDataEvent { private String message; public FailedToLoadDataEvent(String message) { this.message = message; } public String getMessage() { return message; } } public static class LoadedMovieDetailEvent { private MovieVO movie; public LoadedMovieDetailEvent(MovieVO movie) { this.movie = movie; } public MovieVO getMovie() { return movie; } } public static class LoadedMovieTrailerEvent { private TrailerResponse response; private int movieId; public LoadedMovieTrailerEvent(TrailerResponse response, int movieId) { this.response = response; this.movieId = movieId; } public TrailerResponse getResponse() { return response; } public int getMovieId() { return movieId; } } public static class LoadedGenreListEvent { private GenreListResponse response; public LoadedGenreListEvent(GenreListResponse response) { this.response = response; } public GenreListResponse getResponse() { return response; } } public static class LoadedMovieReviewEvent { private MovieReviewResponse response; public LoadedMovieReviewEvent(MovieReviewResponse response) { this.response = response; } public MovieReviewResponse getResponse() { return response; } } public static class ShowTVSeriesListEvent { private ArrayList<TVSeriesVO> tvSeriesList; private boolean isForce; private int pageNumber; public ShowTVSeriesListEvent(ArrayList<TVSeriesVO> tvSeriesList, boolean isForce, int pageNumber) { this.tvSeriesList = tvSeriesList; this.isForce = isForce; this.pageNumber = pageNumber; } public ArrayList<TVSeriesVO> getTvSeriesList() { return tvSeriesList; } public boolean isForce() { return isForce; } public int getPageNumber() { return pageNumber; } } public static class ShowPopularTVSeriesListEvent extends ShowTVSeriesListEvent { public ShowPopularTVSeriesListEvent(ArrayList<TVSeriesVO> tvSeriesList, boolean isForce, int pageNumber) { super(tvSeriesList, isForce, pageNumber); } } public static class ShowTopRatedTVSeriesListEvent extends ShowTVSeriesListEvent { public ShowTopRatedTVSeriesListEvent(ArrayList<TVSeriesVO> tvSeriesList, boolean isForce, int pageNumber) { super(tvSeriesList, isForce, pageNumber); } } public static class LoadedTVSeriesDetailEvent { private TVSeriesVO tvSeries; public LoadedTVSeriesDetailEvent(TVSeriesVO tvSeries) { this.tvSeries = tvSeries; } public TVSeriesVO getTvSeries() { return tvSeries; } } public static class LoadedTVSeriesTrailerEvent { private TrailerResponse response; private int tvSeriesId; public LoadedTVSeriesTrailerEvent(TrailerResponse response, int tvSeriesId) { this.response = response; this.tvSeriesId = tvSeriesId; } public TrailerResponse getResponse() { return response; } public int getTvSeriesId() { return tvSeriesId; } } public static class SearchedMovieEvent { private MovieListResponse response; private String query; public SearchedMovieEvent(MovieListResponse response, String query) { this.response = response; this.query = query; } public MovieListResponse getResponse() { return response; } public String getQuery() { return query; } } public static class SearchedTVSeriesEvent { private TVSeriesListResponse response; private String query; public SearchedTVSeriesEvent(TVSeriesListResponse response, String query) { this.response = response; this.query = query; } public TVSeriesListResponse getResponse() { return response; } public String getQuery() { return query; } } }
/* * Copyright (c) 2008-2016, Hazelcast, Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hazelcast.cache; import com.hazelcast.cache.impl.HazelcastServerCachingProvider; import com.hazelcast.config.CacheConfig; import com.hazelcast.core.HazelcastInstance; import com.hazelcast.test.AssertTask; import com.hazelcast.test.HazelcastSerialClassRunner; import com.hazelcast.test.HazelcastTestSupport; import com.hazelcast.test.TestHazelcastInstanceFactory; import com.hazelcast.test.annotation.ParallelTest; import com.hazelcast.test.annotation.QuickTest; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.runner.RunWith; import javax.cache.Cache; import javax.cache.CacheManager; import javax.cache.configuration.FactoryBuilder; import javax.cache.configuration.MutableCacheEntryListenerConfiguration; import javax.cache.configuration.MutableConfiguration; import javax.cache.event.CacheEntryCreatedListener; import javax.cache.event.CacheEntryEvent; import javax.cache.event.CacheEntryExpiredListener; import javax.cache.event.CacheEntryListener; import javax.cache.event.CacheEntryListenerException; import javax.cache.event.CacheEntryRemovedListener; import javax.cache.event.CacheEntryUpdatedListener; import java.io.Serializable; import java.util.HashSet; import java.util.Set; import java.util.concurrent.atomic.AtomicInteger; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; @RunWith(HazelcastSerialClassRunner.class) @Category({QuickTest.class, ParallelTest.class}) public class CacheFromDifferentNodesTest extends HazelcastTestSupport { private TestHazelcastInstanceFactory factory; private HazelcastServerCachingProvider cachingProvider1; private HazelcastServerCachingProvider cachingProvider2; @Before public void init() { factory = new TestHazelcastInstanceFactory(2); HazelcastInstance hz1 = factory.newHazelcastInstance(); HazelcastInstance hz2 = factory.newHazelcastInstance(); cachingProvider1 = HazelcastServerCachingProvider.createCachingProvider(hz1); cachingProvider2 = HazelcastServerCachingProvider.createCachingProvider(hz2); } @After public void tear() { cachingProvider1.close(); cachingProvider2.close(); factory.shutdownAll(); } @Test public void testJSRExample1() throws InterruptedException { final String cacheName = randomString(); CacheManager cacheManager = cachingProvider1.getCacheManager(); assertNotNull(cacheManager); assertNull(cacheManager.getCache(cacheName)); CacheConfig<Integer, String> config = new CacheConfig<Integer, String>(); Cache<Integer, String> cache = cacheManager.createCache(cacheName, config); assertNotNull(cache); assertTrueEventually(new AssertTask() { @Override public void run() throws Exception { CacheManager cm2 = cachingProvider2.getCacheManager(); assertNotNull(cm2.getCache(cacheName)); } }); Integer key = 1; String value1 = "value"; cache.put(key, value1); String value2 = cache.get(key); assertEquals(value1, value2); cache.remove(key); assertNull(cache.get(key)); Cache<Integer, String> cache2 = cacheManager.getCache(cacheName); assertNotNull(cache2); key = 1; value1 = "value"; cache.put(key, value1); value2 = cache.get(key); assertEquals(value1, value2); cache.remove(key); assertNull(cache.get(key)); cacheManager.destroyCache(cacheName); cacheManager.close(); } // Issue https://github.com/hazelcast/hazelcast/issues/5865 @Test public void testCompletionTestByPuttingAndRemovingFromDifferentNodes() throws InterruptedException { String cacheName = "simpleCache"; CacheManager cacheManager1 = cachingProvider1.getCacheManager(); CacheManager cacheManager2 = cachingProvider2.getCacheManager(); CacheConfig<Integer, String> config = new CacheConfig<Integer, String>(); final SimpleEntryListener<Integer, String> listener = new SimpleEntryListener<Integer, String>(); MutableCacheEntryListenerConfiguration<Integer, String> listenerConfiguration = new MutableCacheEntryListenerConfiguration<Integer, String>( FactoryBuilder.factoryOf(listener), null, true, true); config.addCacheEntryListenerConfiguration(listenerConfiguration); Cache<Integer, String> cache1 = cacheManager1.createCache(cacheName, config); Cache<Integer, String> cache2 = cacheManager2.getCache(cacheName); assertNotNull(cache1); assertNotNull(cache2); Integer key1 = 1; String value1 = "value1"; cache1.put(key1, value1); assertTrueEventually(new AssertTask() { @Override public void run() throws Exception { assertEquals(1, listener.created.get()); } }); Integer key2 = 2; String value2 = "value2"; cache1.put(key2, value2); assertTrueEventually(new AssertTask() { @Override public void run() throws Exception { assertEquals(2, listener.created.get()); } }); Set<Integer> keys = new HashSet<Integer>(); keys.add(key1); keys.add(key2); cache2.removeAll(keys); assertTrueEventually(new AssertTask() { @Override public void run() throws Exception { assertEquals(2, listener.removed.get()); } }); } @Test public void testCachesDestroy() { CacheManager cacheManager = cachingProvider1.getCacheManager(); CacheManager cacheManager2 = cachingProvider2.getCacheManager(); MutableConfiguration configuration = new MutableConfiguration(); final Cache c1 = cacheManager.createCache("c1", configuration); final Cache c2 = cacheManager2.getCache("c1"); c1.put("key", "value"); cacheManager.destroyCache("c1"); assertTrueEventually(new AssertTask() { @Override public void run() throws Exception { try { c2.get("key"); throw new AssertionError("get should throw IllegalStateException"); } catch (IllegalStateException e) { //ignored as expected } } }); } @Test public void testCachesDestroyFromOtherManagers() { CacheManager cacheManager = cachingProvider1.getCacheManager(); CacheManager cacheManager2 = cachingProvider2.getCacheManager(); MutableConfiguration configuration = new MutableConfiguration(); final Cache c1 = cacheManager.createCache("c1", configuration); final Cache c2 = cacheManager2.createCache("c2", configuration); c1.put("key", "value"); c2.put("key", "value"); cacheManager.close(); assertTrueAllTheTime(new AssertTask() { @Override public void run() throws Exception { c2.get("key"); } }, 10); } public static class SimpleEntryListener<K, V> implements CacheEntryListener<K, V>, CacheEntryCreatedListener<K, V>, CacheEntryUpdatedListener<K, V>, CacheEntryRemovedListener<K, V>, CacheEntryExpiredListener<K, V>, Serializable { public AtomicInteger created = new AtomicInteger(); public AtomicInteger expired = new AtomicInteger(); public AtomicInteger removed = new AtomicInteger(); public AtomicInteger updated = new AtomicInteger(); public SimpleEntryListener() { } @Override public void onCreated(Iterable<CacheEntryEvent<? extends K, ? extends V>> cacheEntryEvents) throws CacheEntryListenerException { for (CacheEntryEvent<? extends K, ? extends V> cacheEntryEvent : cacheEntryEvents) { created.incrementAndGet(); } } @Override public void onExpired(Iterable<CacheEntryEvent<? extends K, ? extends V>> cacheEntryEvents) throws CacheEntryListenerException { for (CacheEntryEvent<? extends K, ? extends V> cacheEntryEvent : cacheEntryEvents) { expired.incrementAndGet(); } } @Override public void onRemoved(Iterable<CacheEntryEvent<? extends K, ? extends V>> cacheEntryEvents) throws CacheEntryListenerException { for (CacheEntryEvent<? extends K, ? extends V> cacheEntryEvent : cacheEntryEvents) { removed.incrementAndGet(); } } @Override public void onUpdated(Iterable<CacheEntryEvent<? extends K, ? extends V>> cacheEntryEvents) throws CacheEntryListenerException { for (CacheEntryEvent<? extends K, ? extends V> cacheEntryEvent : cacheEntryEvents) { updated.incrementAndGet(); } } } }
/* * Copyright 2000-2009 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /* * Class LineBreakpoint * @author Jeka */ package com.intellij.debugger.ui.breakpoints; import com.intellij.debugger.DebuggerBundle; import com.intellij.debugger.DebuggerManagerEx; import com.intellij.debugger.SourcePosition; import com.intellij.debugger.engine.DebugProcessImpl; import com.intellij.debugger.engine.evaluation.EvaluateException; import com.intellij.debugger.engine.evaluation.EvaluationContextImpl; import com.intellij.debugger.impl.DebuggerUtilsEx; import com.intellij.debugger.impl.PositionUtil; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.editor.Document; import com.intellij.openapi.editor.markup.RangeHighlighter; import com.intellij.openapi.fileEditor.FileDocumentManager; import com.intellij.openapi.project.Project; import com.intellij.openapi.roots.ProjectFileIndex; import com.intellij.openapi.roots.ProjectRootManager; import com.intellij.openapi.util.Computable; import com.intellij.openapi.util.IconLoader; import com.intellij.openapi.util.Key; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.psi.*; import com.intellij.psi.jsp.JspFile; import com.intellij.psi.search.GlobalSearchScope; import com.intellij.psi.util.PsiTreeUtil; import com.intellij.ui.classFilter.ClassFilter; import com.intellij.util.Processor; import com.intellij.util.StringBuilderSpinAllocator; import com.intellij.xdebugger.XDebuggerUtil; import com.intellij.xdebugger.ui.DebuggerIcons; import com.sun.jdi.*; import com.sun.jdi.event.LocatableEvent; import com.sun.jdi.request.BreakpointRequest; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.Nullable; import javax.swing.*; import java.util.ArrayList; import java.util.Collection; import java.util.List; public class LineBreakpoint extends BreakpointWithHighlighter { private static final Logger LOG = Logger.getInstance("#com.intellij.debugger.ui.breakpoints.LineBreakpoint"); // icons public static Icon ICON = DebuggerIcons.ENABLED_BREAKPOINT_ICON; public static final Icon MUTED_ICON = DebuggerIcons.MUTED_BREAKPOINT_ICON; public static final Icon DISABLED_ICON = DebuggerIcons.DISABLED_BREAKPOINT_ICON; public static final Icon MUTED_DISABLED_ICON = DebuggerIcons.MUTED_DISABLED_BREAKPOINT_ICON; private static final Icon ourVerifiedWarningsIcon = IconLoader.getIcon("/debugger/db_verified_warning_breakpoint.png"); private static final Icon ourMutedVerifiedWarningsIcon = IconLoader.getIcon("/debugger/db_muted_verified_warning_breakpoint.png"); private String myMethodName; public static final @NonNls Key<LineBreakpoint> CATEGORY = BreakpointCategory.lookup("line_breakpoints"); protected LineBreakpoint(Project project) { super(project); } protected LineBreakpoint(Project project, RangeHighlighter highlighter) { super(project, highlighter); } protected Icon getDisabledIcon(boolean isMuted) { final Breakpoint master = DebuggerManagerEx.getInstanceEx(myProject).getBreakpointManager().findMasterBreakpoint(this); if (isMuted) { return master == null? MUTED_DISABLED_ICON : DebuggerIcons.MUTED_DISABLED_DEPENDENT_BREAKPOINT_ICON; } else { return master == null? DISABLED_ICON : DebuggerIcons.DISABLED_DEPENDENT_BREAKPOINT_ICON; } } protected Icon getSetIcon(boolean isMuted) { return isMuted? MUTED_ICON : ICON; } protected Icon getInvalidIcon(boolean isMuted) { return isMuted? DebuggerIcons.MUTED_INVALID_BREAKPOINT_ICON : DebuggerIcons.INVALID_BREAKPOINT_ICON; } protected Icon getVerifiedIcon(boolean isMuted) { return isMuted? DebuggerIcons.MUTED_VERIFIED_BREAKPOINT_ICON : DebuggerIcons.VERIFIED_BREAKPOINT_ICON; } protected Icon getVerifiedWarningsIcon(boolean isMuted) { return isMuted? ourMutedVerifiedWarningsIcon : ourVerifiedWarningsIcon; } public Key<LineBreakpoint> getCategory() { return CATEGORY; } protected void reload(PsiFile file) { super.reload(file); myMethodName = findMethodName(file, getHighlighter().getStartOffset()); } protected void createOrWaitPrepare(DebugProcessImpl debugProcess, String classToBeLoaded) { if (isInScopeOf(debugProcess, classToBeLoaded)) { super.createOrWaitPrepare(debugProcess, classToBeLoaded); } } protected void createRequestForPreparedClass(final DebugProcessImpl debugProcess, final ReferenceType classType) { if (!isInScopeOf(debugProcess, classType.name())) { return; } try { List<Location> locs = debugProcess.getPositionManager().locationsOfLine(classType, getSourcePosition()); if (locs.size() > 0) { for (final Location location : locs) { if (LOG.isDebugEnabled()) { LOG.debug("Found location for reference type " + classType.name() + " at line " + getLineIndex() + "; isObsolete: " + (debugProcess.getVirtualMachineProxy().versionHigher("1.4") && location.method().isObsolete())); } BreakpointRequest request = debugProcess.getRequestsManager().createBreakpointRequest(LineBreakpoint.this, location); debugProcess.getRequestsManager().enableRequest(request); if (LOG.isDebugEnabled()) { LOG.debug("Created breakpoint request for reference type " + classType.name() + " at line " + getLineIndex()); } } } else { // there's no executable code in this class debugProcess.getRequestsManager().setInvalid(LineBreakpoint.this, DebuggerBundle.message( "error.invalid.breakpoint.no.executable.code", (getLineIndex() + 1), classType.name()) ); if (LOG.isDebugEnabled()) { LOG.debug("No locations of type " + classType.name() + " found at line " + getLineIndex()); } } } catch (ClassNotPreparedException ex) { if (LOG.isDebugEnabled()) { LOG.debug("ClassNotPreparedException: " + ex.getMessage()); } // there's a chance to add a breakpoint when the class is prepared } catch (ObjectCollectedException ex) { if (LOG.isDebugEnabled()) { LOG.debug("ObjectCollectedException: " + ex.getMessage()); } // there's a chance to add a breakpoint when the class is prepared } catch (InvalidLineNumberException ex) { if (LOG.isDebugEnabled()) { LOG.debug("InvalidLineNumberException: " + ex.getMessage()); } debugProcess.getRequestsManager().setInvalid(LineBreakpoint.this, DebuggerBundle.message("error.invalid.breakpoint.bad.line.number")); } catch (InternalException ex) { LOG.info(ex); } catch(Exception ex) { LOG.info(ex); } updateUI(); } private boolean isInScopeOf(DebugProcessImpl debugProcess, String className) { final SourcePosition position = getSourcePosition(); if (position != null) { final VirtualFile breakpointFile = position.getFile().getVirtualFile(); final ProjectFileIndex fileIndex = ProjectRootManager.getInstance(myProject).getFileIndex(); if (breakpointFile != null && fileIndex.isInSourceContent(breakpointFile)) { // apply filtering to breakpoints from content sources only, not for sources attached to libraries final Collection<VirtualFile> candidates = findClassCandidatesInSourceContent(className, debugProcess.getSearchScope(), fileIndex); if (candidates == null) { return true; } for (VirtualFile classFile : candidates) { if (breakpointFile.equals(classFile)) { return true; } } return false; } } return true; } @Nullable private Collection<VirtualFile> findClassCandidatesInSourceContent(final String className, final GlobalSearchScope scope, final ProjectFileIndex fileIndex) { final int dollarIndex = className.indexOf("$"); final String topLevelClassName = dollarIndex >= 0? className.substring(0, dollarIndex) : className; return ApplicationManager.getApplication().runReadAction(new Computable<Collection<VirtualFile>>() { @Nullable public Collection<VirtualFile> compute() { final PsiClass[] classes = JavaPsiFacade.getInstance(myProject).findClasses(topLevelClassName, scope); if (classes.length == 0) { return null; } final List<VirtualFile> list = new ArrayList<VirtualFile>(classes.length); for (PsiClass aClass : classes) { final PsiFile psiFile = aClass.getContainingFile(); if (psiFile != null) { final VirtualFile vFile = psiFile.getVirtualFile(); if (vFile != null && fileIndex.isInSourceContent(vFile)) { list.add(vFile); } } } return list; } }); } public boolean evaluateCondition(EvaluationContextImpl context, LocatableEvent event) throws EvaluateException { if(CLASS_FILTERS_ENABLED){ Value value = context.getThisObject(); ObjectReference thisObject = (ObjectReference)value; if(thisObject == null) { return false; } String name = DebuggerUtilsEx.getQualifiedClassName(thisObject.referenceType().name(), getProject()); if(name == null) { return false; } ClassFilter [] filters = getClassFilters(); boolean matches = false; for (ClassFilter classFilter : filters) { if (classFilter.isEnabled() && classFilter.matches(name)) { matches = true; break; } } if(!matches) { return false; } ClassFilter [] ifilters = getClassExclusionFilters(); for (ClassFilter classFilter : ifilters) { if (classFilter.isEnabled() && classFilter.matches(name)) { return false; } } } return super.evaluateCondition(context, event); } public String toString() { return getDescription(); } public String getDisplayName() { final int lineNumber = (getHighlighter().getDocument().getLineNumber(getHighlighter().getStartOffset()) + 1); if(isValid()) { final String className = getClassName(); final boolean hasClassInfo = className != null && className.length() > 0; final boolean hasMethodInfo = myMethodName != null && myMethodName.length() > 0; if (hasClassInfo || hasMethodInfo) { final StringBuilder info = StringBuilderSpinAllocator.alloc(); try { String packageName = null; if (hasClassInfo) { final int dotIndex = className.lastIndexOf("."); if (dotIndex >= 0) { info.append(className.substring(dotIndex + 1)); packageName = className.substring(0, dotIndex); } else { info.append(className); } } if(hasMethodInfo) { if (hasClassInfo) { info.append("."); } info.append(myMethodName); } if (packageName != null) { info.append(" (").append(packageName).append(")"); } return DebuggerBundle.message("line.breakpoint.display.name.with.class.or.method", lineNumber, info.toString()); } finally { StringBuilderSpinAllocator.dispose(info); } } return DebuggerBundle.message("line.breakpoint.display.name", lineNumber); } return DebuggerBundle.message("status.breakpoint.invalid"); } private static @Nullable String findMethodName(final PsiFile file, final int offset) { if (file instanceof JspFile) { return null; } if (file instanceof PsiJavaFile) { return ApplicationManager.getApplication().runReadAction(new Computable<String>() { public String compute() { final PsiMethod method = DebuggerUtilsEx.findPsiMethod(file, offset); return method != null? method.getName() + "()" : null; } }); } return null; } public String getEventMessage(LocatableEvent event) { final Location location = event.location(); try { return DebuggerBundle.message( "status.line.breakpoint.reached", location.declaringType().name() + "." + location.method().name(), location.sourceName(), getLineIndex() + 1 ); } catch (AbsentInformationException e) { final String sourceName = getSourcePosition().getFile().getName(); return DebuggerBundle.message( "status.line.breakpoint.reached", location.declaringType().name() + "." + location.method().name(), sourceName, getLineIndex() + 1 ); } } public PsiElement getEvaluationElement() { return PositionUtil.getContextElement(getSourcePosition()); } protected static LineBreakpoint create(Project project, Document document, int lineIndex) { VirtualFile virtualFile = FileDocumentManager.getInstance().getFile(document); if (virtualFile == null) return null; LineBreakpoint breakpoint = new LineBreakpoint(project, createHighlighter(project, document, lineIndex)); return (LineBreakpoint)breakpoint.init(); } public boolean canMoveTo(SourcePosition position) { if (!super.canMoveTo(position)) { return false; } final Document document = PsiDocumentManager.getInstance(getProject()).getDocument(position.getFile()); return canAddLineBreakpoint(myProject, document, position.getLine()); } public static boolean canAddLineBreakpoint(Project project, final Document document, final int lineIndex) { if (lineIndex < 0 || lineIndex >= document.getLineCount()) { return false; } final BreakpointManager breakpointManager = DebuggerManagerEx.getInstanceEx(project).getBreakpointManager(); final LineBreakpoint breakpointAtLine = breakpointManager.findBreakpoint( document, document.getLineStartOffset(lineIndex), CATEGORY); if (breakpointAtLine != null) { // there already exists a line breakpoint at this line return false; } PsiDocumentManager.getInstance(project).commitDocument(document); final boolean[] canAdd = new boolean[]{false}; XDebuggerUtil.getInstance().iterateLine(project, document, lineIndex, new Processor<PsiElement>() { public boolean process(PsiElement element) { if ((element instanceof PsiWhiteSpace) || (PsiTreeUtil.getParentOfType(element, PsiComment.class, false) != null)) { return true; } PsiElement child = element; while(element != null) { final int offset = element.getTextOffset(); if (offset >= 0) { if (document.getLineNumber(offset) != lineIndex) { break; } } child = element; element = element.getParent(); } if(child instanceof PsiMethod && child.getTextRange().getEndOffset() >= document.getLineEndOffset(lineIndex)) { PsiCodeBlock body = ((PsiMethod)child).getBody(); if(body == null) { canAdd[0] = false; } else { PsiStatement[] statements = body.getStatements(); canAdd[0] = statements.length > 0 && document.getLineNumber(statements[0].getTextOffset()) == lineIndex; } } else { canAdd[0] = true; } return false; } }); return canAdd[0]; } public @Nullable String getMethodName() { return myMethodName; } }
import javax.imageio.ImageIO; import java.awt.image.BufferedImage; import java.io.*; import java.util.*; import java.util.stream.IntStream; /** * Created by labuser on 14/05/19. */ public class Main { public static void main(String[] args) { new Execution(1000); } } class Execution{ private int num; private Prime prime; public Execution(int num){ this.num=num; prime = new Prime(10000000); System.out.println("READY"); calculation(); } private void calculation(){ saveBufferdImage(new VortexManager(new DataSet(num)).getArray()); } private void saveCSV(int[][] data) { PrintWriter pw = null; try { pw = new PrintWriter(new BufferedWriter(new FileWriter(new File("result.csv")))); } catch (IOException e) { System.out.println("FILE_ERROR"); } if (pw != null) { for (int i = 0; i < data.length; i++) { for (int j = 0; j < data[i].length; j++) { pw.print(data[i][j] + ","); } pw.println(); } } pw.close(); } private void printArray(int[][] data){ for(int i=0;i<data.length;i++){ for(int j=0;j<data[i].length;j++){ if(prime.contains(data[i][j])) { System.out.print("#"); }else{ System.out.print("."); } } System.out.println(); } } private void saveBufferdImage(int data[][]){ BufferedImage bi = new BufferedImage(data.length,data[0].length,BufferedImage.TYPE_INT_RGB); IntStream.range(0,data.length).parallel().forEach(e -> IntStream.range(0,data[0].length).forEach(f -> bi.setRGB(e,f,getPrimeRGB(e,f,data[e][f])))); File tmp = new File("uram.png"); try { ImageIO.write(bi, "png", tmp); }catch(IOException e){ System.out.println("FILE ERROR"); } } private int getPrimeRGB(int row,int column,int num){ if(prime.contains(num)){ return rgb(0,0,0); }else{ return rgb(255,255,255); } } private int rgb(int r,int g,int b){ return 0xff000000 | r <<16 | g <<8 | b; } } class VortexManager { private FreeIndexArray fia; private Vortex vortex; public VortexManager(int range) { vortex = new Vortex(new FreeIndexArray(), 0, 0); calculation(IntStream.range(1,range*range)); } public VortexManager(DataSet ds) { vortex = new Vortex(new FreeIndexArray(), 0, 0); calculation(ds); } private void calculation(IntStream range) { range.forEach(e -> vortex.add(e)); fia = vortex.getFIA(); fia.reverse(); fia.rotateLeft(); fia.rotateLeft(); //fia.rotatedArrayPrint(); } private void calculation(DataSet ds) { for (int i = 0; i < ds.size(); i++) { vortex.add(ds.get(i)); } fia = vortex.getFIA(); fia.reverse(); fia.rotateLeft(); fia.rotateLeft(); //fia.rotatedArrayPrint(); } public int getSum() { int sum = 0; int[][] tmp = fia.getRotatedArray(); for (int i = 0; i < tmp.length; i++) { //System.out.println(tmp[i][i]); sum += tmp[i][i]; } for (int i = 0; i < tmp.length; i++) { //System.out.println(tmp[tmp.length-1-i][i]); sum += tmp[tmp.length - 1 - i][i]; } return sum - 1; } public int[][] getArray() { return fia.getRotatedArray(); } } class FreeIndexArray { private HashMap<String, Integer> array; private int[][] rotatedArray; private int xmin; private int xmax; private int ymin; private int ymax; public FreeIndexArray() { array = new HashMap<>(); } public void add(int x, int y, int num) { processingCoordinateData(x, y); array.put(Integer.toString(x) + "," + Integer.toString(y), num); } public int get(int x, int y) { if (array.get(Integer.toString(x) + "," + Integer.toString(y)) == null) { return 0; } else { return array.get(Integer.toString(x) + "," + Integer.toString(y)); } } private void processingCoordinateData(int x, int y) { if (x < xmin) { xmin = x; } if (xmax < x) { xmax = x; } if (y < ymin) { ymin = y; } if (ymax < y) { ymax = y; } } public void rotateLeft() { if (rotatedArray == null) { makeEstablished(); } int[][] tmp = new int[rotatedArray[0].length][rotatedArray.length]; IntStream.range(0, tmp.length).parallel().forEach(i -> IntStream.range(0, tmp[i].length).parallel().forEach(j -> tmp[i][j] = rotatedArray[j][rotatedArray.length - 1 - i])); /* for (int i = 0; i < tmp.length; i++) { for (int j = 0; j < tmp[i].length; j++) { tmp[i][j] = rotatedArray[j][rotatedArray.length - 1 - i]; } } */ rotatedArray = tmp; } public void reverse() { if (rotatedArray == null) { makeEstablished(); } int[][] tmp = new int[rotatedArray[0].length][rotatedArray.length]; IntStream.range(0, tmp.length).parallel().forEach(i -> IntStream.range(0, tmp[i].length).parallel().forEach(j -> tmp[i][j] = rotatedArray[j][i])); /* for (int i = 0; i < tmp.length; i++) { for (int j = 0; j < tmp[i].length; j++) { tmp[i][j] = rotatedArray[j][i]; } } */ rotatedArray = tmp; } private void makeEstablished() { rotatedArray = new int[xmax - xmin + 1][ymax - ymin + 1]; IntStream.range(0, rotatedArray.length).parallel().forEach(i -> IntStream.range(0, rotatedArray[i].length).parallel().forEach(j -> procEstablished(i, j))); /* for (int i = 0; i < rotatedArray.length; i++) { for (int j = 0; j < rotatedArray[i].length; j++) { if (array.get(Integer.toString(i + xmin) + "," + Integer.toString(j + ymin)) == null) { rotatedArray[i][j] = 0; } else { rotatedArray[i][j] = array.get(Integer.toString(i + xmin) + "," + Integer.toString(j + ymin)); } } } */ } private void procEstablished(int i, int j) { if (array.get(Integer.toString(i + xmin) + "," + Integer.toString(j + ymin)) == null) { rotatedArray[i][j] = 0; } else { rotatedArray[i][j] = array.get(Integer.toString(i + xmin) + "," + Integer.toString(j + ymin)); } } public void rotatedArrayPrint() { for (int i = 0; i < rotatedArray.length; i++) { for (int j = 0; j < rotatedArray[i].length; j++) { System.out.print(rotatedArray[i][j] + "\t"); } System.out.println(); } } public void print() { for (int i = xmin; i <= xmax; i++) { for (int j = ymin; j <= ymax; j++) { if (array.get(Integer.toString(i) + "," + Integer.toString(j)) == null) { System.out.print("0\t"); } else { System.out.print(array.get(Integer.toString(i) + "," + Integer.toString(j)) + "\t"); } } System.out.println(); } /* for(Map.Entry<String, Integer> e : array.entrySet()) { System.out.println(e.getKey() + " : " + e.getValue()); } */ } public int[][] getRotatedArray() { return rotatedArray; } } class Vortex { private FreeIndexArray fia; private VortexManipulation vm; public Vortex(FreeIndexArray fia, int cx, int cy) { this.fia = fia; vm = new VortexManipulation(cx, cy); } public void add(int num) { int[] tmp = vm.next(); fia.add(tmp[0], tmp[1], num); } public FreeIndexArray getFIA() { return fia; } class VortexManipulation { private int range; private int cx; private int cy; private String nextState; private boolean initFlag; private ArrayList<Integer[]> currentLine; private int pointer; public VortexManipulation(int cx, int cy) { this.cx = cx; this.cy = cy; range = 0; nextState = "LEFT"; initFlag = true; } public int[] next() { //printMap(); int[] tmp = new int[2]; if (currentLine == null) { pointer = 0; currentLine = nextLine(); tmp[0] = currentLine.get(pointer)[0]; tmp[1] = currentLine.get(pointer)[1]; return tmp; } else { pointer++; if (currentLine.size() == pointer) { currentLine = nextLine(); pointer = 0; } tmp[0] = currentLine.get(pointer)[0]; tmp[1] = currentLine.get(pointer)[1]; return tmp; } } public ArrayList<Integer[]> nextLine() { ArrayList<Integer[]> line = new ArrayList<>(); if (initFlag) { initFlag = false; Integer[] result = new Integer[2]; result[0] = cx; result[1] = cy; line.add(result); return line; } else { switch (nextState) { case "LEFT": range++; for (int i = cx - 1; cx - range <= i; i--) { Integer[] result = new Integer[2]; result[0] = i; result[1] = cy; line.add(result); } cx -= range; nextState = "DOWN"; break; case "DOWN": for (int i = cy + 1; i <= cy + range; i++) { Integer[] result = new Integer[2]; result[0] = cx; result[1] = i; line.add(result); } cy += range; nextState = "RIGHT"; break; case "RIGHT": range++; for (int i = cx + 1; i <= cx + range; i++) { Integer[] result = new Integer[2]; result[0] = i; result[1] = cy; line.add(result); } cx += range; nextState = "UP"; break; case "UP": for (int i = cy - 1; cy - range <= i; i--) { Integer[] result = new Integer[2]; result[0] = cx; result[1] = i; line.add(result); } cy -= range; nextState = "LEFT"; break; } return line; } } } } class DataSet { private int[] data; public DataSet(int rectSideNum) { data = new int[rectSideNum * rectSideNum]; calculation(); } private void calculation() { IntStream.range(0, data.length).parallel().forEach(e -> data[e] = e); } public int size() { return data.length; } public int get(int index) { return data[index]; } } class Prime { private boolean[] boolList; private int maxPrime; public Prime(int limit) { boolList = new boolean[limit + 1]; makeBool(limit); } private void makeBool(int limit) { Arrays.fill(boolList, true); boolList[0] = false; boolList[1] = false; for (int i = 2; i < limit; i++) { if (boolList[i]) { maxPrime = i; for (int j = i + i; j < limit; j += i) { boolList[j] = false; } } } } public boolean contains(int num) { return boolList[num]; } public int getMaxPrime() { return maxPrime; } } /* consequence>> ..#.......#.....#... .......#...#.#...#.# ......#.........#... .#...#.#.........#.. ..#.....#.#.....#... .......#.......#.... ..#.#...#.#...#.#... ...........#.......# #...#.#...#.#....... .#.#.#.#.###.#.#.#.. ........#........... ...#...#.#.......... ....#.#...#...#.#... .....#.#...#.....#.# #...#.#.....#.....#. .#.....#.........#.. ....#.....#...#..... .#.#...........#.... ......#...#.#....... .....#.#...#.....#.. */
package com.example.jairo.notemanager; import android.app.Activity; import android.content.Context; import android.content.Intent; import android.graphics.Color; import android.net.ConnectivityManager; import android.net.NetworkInfo; import android.os.Bundle; import android.support.design.widget.CoordinatorLayout; import android.support.design.widget.FloatingActionButton; import android.support.design.widget.Snackbar; import android.support.v4.widget.SwipeRefreshLayout; import android.support.v7.app.AppCompatActivity; import android.support.v7.widget.Toolbar; import android.view.View; import android.view.Menu; import android.view.MenuItem; import android.widget.AdapterView; import android.widget.ArrayAdapter; import android.widget.ListView; import android.widget.TextView; import logic.Recipe; import logic.World; /** * Control the mian activity of the application */ public class MainActivity extends AppCompatActivity { /** * List in which all the recipes are shown */ private ListView recipesList; /** * Instance of the world */ private World world; /** * Array adapter of the LIstVIew */ private ArrayAdapter<Recipe> arrayAdapter; /** * COde to get the result od the activity AddNote */ public final int ACTIVITY_ADD=1; /** * SWipe refresh for the ListVIew */ private SwipeRefreshLayout swipeRefreshLayout; /** * Coordinator layout to show Snackbar messages */ private CoordinatorLayout coordinator; /** * FLoating button to add a recipe/note */ private FloatingActionButton fab; /** * Initiates all the attributes and sets the actions * @param savedInstanceState */ @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); world= World.getInstance(); setContentView(R.layout.activity_main); Toolbar toolbar = (Toolbar) findViewById(R.id.toolbar); recipesList= (ListView) findViewById(R.id.recipesList); setSupportActionBar(toolbar); coordinator= (CoordinatorLayout) findViewById(R.id.coordinator); fab = (FloatingActionButton) findViewById(R.id.fab); fab.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { lunchAddNote(-1); } }); arrayAdapter = new ArrayAdapter<Recipe>( this, android.R.layout.simple_list_item_1, world.getArrayRecipes()); world.sEtListAdapter(arrayAdapter); recipesList.setAdapter(arrayAdapter); recipesList.setOnItemClickListener(new AdapterView.OnItemClickListener() { @Override public void onItemClick(AdapterView<?> adapterView, View view, int i, long l) { lunchAddNote(i); } }); swipeRefreshLayout= (SwipeRefreshLayout) findViewById(R.id.swiperefresh); swipeRefreshLayout.setOnRefreshListener(new SwipeRefreshLayout.OnRefreshListener() { @Override public void onRefresh() { update(); swipeRefreshLayout.setRefreshing(false); } }); update(); } /** * Lunches th activiy add note for an specif note or a new note. * @param pos */ public void lunchAddNote(int pos) { Intent intent = new Intent(this, AddNote.class); if(pos!=-1) { intent.putExtra("item", pos); } startActivityForResult(intent,ACTIVITY_ADD); } @Override public boolean onCreateOptionsMenu(Menu menu) { // Inflate the menu; this adds items to the action bar if it is present. getMenuInflater().inflate(R.menu.menu_main, menu); return true; } @Override public boolean onOptionsItemSelected(MenuItem item) { // Handle action bar item clicks here. The action bar will // automatically handle clicks on the Home/Up button, so long // as you specify a parent activity in AndroidManifest.xml. int id = item.getItemId(); //noinspection SimplifiableIfStatement if (id == R.id.action_settings) { return true; } return super.onOptionsItemSelected(item); } /** * Gets the result of activities lunched and provdes a message * indicating if the action was succesful */ @Override public void onActivityResult(int requestCode, int resultCode, Intent data) { super.onActivityResult(requestCode, resultCode, data); if(!connected()) { return; } switch(requestCode) { case (ACTIVITY_ADD) : { if (resultCode == Activity.RESULT_OK) { boolean returnValue = data.getBooleanExtra("ADDED",false); boolean edited = data.getBooleanExtra("MODIFIDED",false); boolean deleted = data.getBooleanExtra("DELETED",false); if(returnValue) { arrayAdapter.notifyDataSetChanged(); Snackbar.make(coordinator, "Note added", Snackbar.LENGTH_LONG) .setAction("Action", null).show(); } if(edited) { arrayAdapter.notifyDataSetChanged(); Snackbar.make(coordinator, "Note updated", Snackbar.LENGTH_LONG) .setAction("Action", null).show(); } if(deleted) { arrayAdapter.notifyDataSetChanged(); Snackbar.make(coordinator, "Note deleted", Snackbar.LENGTH_LONG) .setAction("Action", null).show(); } } break; } } } /** * Checks if the phone is conected to internet, if not displays a snackbar to retry * @return */ private boolean connected() { ConnectivityManager connectivityManager = (ConnectivityManager) getSystemService(Context.CONNECTIVITY_SERVICE); NetworkInfo activeNetworkInfo = connectivityManager.getActiveNetworkInfo(); boolean rta= activeNetworkInfo != null && activeNetworkInfo.isConnected(); if(!rta) { Snackbar snackbar = Snackbar .make(coordinator, "No internet connection!", Snackbar.LENGTH_LONG) .setAction("RETRY", new View.OnClickListener() { @Override public void onClick(View view) { update(); } }); // Changing message text color snackbar.setActionTextColor(Color.RED); // Changing action button text color View sbView = snackbar.getView(); TextView textView = (TextView) sbView.findViewById(android.support.design.R.id.snackbar_text); textView.setTextColor(Color.YELLOW); snackbar.show(); } return rta; } /** * Gets all the recipes from the API */ private void update() { if(connected()) { fab.setVisibility(View.VISIBLE); world.getRecipes(); } else { fab.setVisibility(View.INVISIBLE); } } }
/* * Copyright (c) 2005-2011, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.wso2.carbon.apimgt.api; import org.json.simple.JSONObject; import org.wso2.carbon.apimgt.api.model.*; import java.util.List; import java.util.Map; import java.util.Set; /** * APIConsumer responsible for providing helper functionality */ public interface APIConsumer extends APIManager { /** * @param subscriberId id of the Subscriber * @return Subscriber * @throws APIManagementException if failed to get Subscriber */ Subscriber getSubscriber(String subscriberId) throws APIManagementException; /** * Returns a list of #{@link org.wso2.carbon.apimgt.api.model.API} bearing the selected tag * * @param tag name of the tag * @return set of API having the given tag name * @throws APIManagementException if failed to get set of API */ Set<API> getAPIsWithTag(String tag, String tenantDomain) throws APIManagementException; /** * Returns a paginated list of #{@link org.wso2.carbon.apimgt.api.model.API} bearing the selected tag * * @param tag name of the tag * @param start starting number * @param end ending number * @return set of API having the given tag name * @throws APIManagementException if failed to get set of API */ Map<String,Object> getPaginatedAPIsWithTag(String tag, int start, int end, String tenantDomain) throws APIManagementException; /** * Returns a list of all published APIs. If a given API has multiple APIs, * only the latest version will be included * in this list. * * @return set of API * @throws APIManagementException if failed to API set */ Set<API> getAllPublishedAPIs(String tenantDomain) throws APIManagementException; /** * Returns a paginated list of all published APIs. If a given API has multiple APIs, * only the latest version will be included * in this list. * @param tenantDomain tenant domain * @param start starting number * @param end ending number * @return set of API * @throws APIManagementException if failed to API set */ Map<String,Object> getAllPaginatedPublishedAPIs(String tenantDomain, int start, int end) throws APIManagementException; /** * Returns top rated APIs * * @param limit if -1, no limit. Return everything else, limit the return list to specified value. * @return Set of API * @throws APIManagementException if failed to get top rated APIs */ Set<API> getTopRatedAPIs(int limit) throws APIManagementException; /** * Get recently added APIs to the store * * @param limit if -1, no limit. Return everything else, limit the return list to specified value. * @return set of API * @throws APIManagementException if failed to get recently added APIs */ Set<API> getRecentlyAddedAPIs(int limit,String tenantDomain) throws APIManagementException; /** * Get all tags of published APIs * * @return a list of all Tags applied to all APIs published. * @throws APIManagementException if failed to get All the tags */ Set<Tag> getAllTags(String tenantDomain) throws APIManagementException; /** * Returns all tags with their descriptions. * * NOTE : The reason for having a separate method to get the tags with their attributes is, * because of the implementation of addition tag attributes. * Tag attributes are saved in a registry location with convention. * e.g. governance/apimgt/applicationdata/tags/{tag_name}/description.txt. * In most of the use cases these attributes are not needed. * So not fetching the description if it is not needed is healthy for performance. * * @param tenantDomain Tenant domain. * @return The description of the tag. * @throws APIManagementException if there is a failure in getting the description. */ Set<Tag> getTagsWithAttributes(String tenantDomain)throws APIManagementException; /** * Rate a particular API. This will be called when subscribers rate an API * * @param apiId The API identifier * @param rating The rating provided by the subscriber * @param user Username of the subscriber providing the rating * @throws APIManagementException If an error occurs while rating the API */ void rateAPI(APIIdentifier apiId, APIRating rating, String user) throws APIManagementException; /** * Remove an user rating of a particular API. This will be called when subscribers remove their rating on an API * * @param apiId The API identifier * @param user Username of the subscriber providing the rating * @throws APIManagementException If an error occurs while rating the API */ void removeAPIRating(APIIdentifier apiId, String user) throws APIManagementException; /** returns the SubscribedAPI object which is related to the subscriptionId * * @param subscriptionId subscription id * @return * @throws APIManagementException */ SubscribedAPI getSubscriptionById(int subscriptionId) throws APIManagementException; /** * Returns a set of SubscribedAPI purchased by the given Subscriber * * @param subscriber Subscriber * @return Set<API> * @throws APIManagementException if failed to get API for subscriber */ Set<SubscribedAPI> getSubscribedAPIs(Subscriber subscriber) throws APIManagementException; /** * @param subscriber the subscriber to be subscribed to the API * @param groupingId the groupId of the subscriber * @return the subscribed API's * @throws APIManagementException */ Set<SubscribedAPI> getSubscribedAPIs(Subscriber subscriber, String groupingId) throws APIManagementException; /** * Returns a set of SubscribedAPIs filtered by the given application name. * * @param subscriber Subscriber * @return Set<API> * @throws APIManagementException if failed to get API for subscriber */ Set<SubscribedAPI> getSubscribedAPIs(Subscriber subscriber, String applicationName, String groupingId) throws APIManagementException; /** * * @param jsonString this string will contain oAuth app details * @param userName user name of logged in user. * @param clientId this is the consumer key of oAuthApplication * @param applicationName this is the APIM appication name. * @param keyType * @return * @throws APIManagementException */ Map<String,Object> mapExistingOAuthClient(String jsonString, String userName, String clientId, String applicationName, String keyType) throws APIManagementException; /** *This method will delete from application key mapping table and application registration table. *@param applicationName application Name *@param tokenType Token Type. *@param groupId group id. *@param userName user name. *@return *@throws APIManagementException */ void cleanUpApplicationRegistration(String applicationName, String tokenType, String groupId, String userName) throws APIManagementException; /** * Returns a set of SubscribedAPIs filtered by the given application name and in between starting and ending indexes. * * @param subscriber Subscriber * @param applicationName Application needed to find subscriptions * @param startSubIndex Starting index of subscriptions to be listed * @param endSubIndex Ending index of Subscriptions to be listed * @param groupingId the group id of the application * @return * @throws APIManagementException */ Set<SubscribedAPI> getPaginatedSubscribedAPIs(Subscriber subscriber, String applicationName, int startSubIndex, int endSubIndex, String groupingId) throws APIManagementException; /** * Returns true if a given user has subscribed to the API * * @param apiIdentifier APIIdentifier * @param userId user id * @return true, if giving api identifier is already subscribed * @throws APIManagementException if failed to check the subscribed state */ boolean isSubscribed(APIIdentifier apiIdentifier, String userId) throws APIManagementException; /** * Returns the number of subscriptions for the given subscriber and app. * * @param subscriber Subscriber * @param applicationName Application * @return The number of subscriptions * @throws APIManagementException if failed to count the number of subscriptions. */ Integer getSubscriptionCount(Subscriber subscriber,String applicationName,String groupingId) throws APIManagementException; /** * Add new Subscriber * * @param identifier APIIdentifier * @param userId id of the user * @param applicationId Application Id * @return SubscriptionResponse subscription response object * @throws APIManagementException if failed to add subscription details to database */ SubscriptionResponse addSubscription(APIIdentifier identifier, String userId, int applicationId) throws APIManagementException; /** * * @param subscriptionId id of the subscription * @return * @throws APIManagementException if failed to get subscription detail from database */ String getSubscriptionStatusById(int subscriptionId) throws APIManagementException; /** * Unsubscribe the specified user from the specified API in the given application * * @param identifier APIIdentifier * @param userId id of the user * @param applicationId Application Id * @throws APIManagementException if failed to remove subscription details from database */ void removeSubscription(APIIdentifier identifier, String userId, int applicationId) throws APIManagementException; /** Removes a subscription specified by SubscribedAPI object * * @param subscription SubscribedAPI object which contains the subscription information * @throws APIManagementException */ void removeSubscription(SubscribedAPI subscription) throws APIManagementException; /** * Remove a Subscriber * * @param identifier APIIdentifier * @param userId id of the user * @throws APIManagementException if failed to add subscription details to database */ void removeSubscriber(APIIdentifier identifier, String userId) throws APIManagementException; /** * This method is to update the subscriber. * * @param identifier APIIdentifier * @param userId user id * @param applicationId Application Id * @throws APIManagementException if failed to update subscription */ void updateSubscriptions(APIIdentifier identifier, String userId, int applicationId) throws APIManagementException; /** * @param identifier Api identifier * @param comment comment text * @param user Username of the comment author * @throws APIManagementException if failed to add comment for API */ void addComment(APIIdentifier identifier, String comment, String user) throws APIManagementException; /** * @param identifier Api identifier * @return Comments * @throws APIManagementException if failed to get comments for identifier */ Comment[] getComments(APIIdentifier identifier) throws APIManagementException; /** * Adds an application * * @param application Application * @param userId User Id * @return Id of the newly created application * @throws APIManagementException if failed to add Application */ int addApplication(Application application, String userId) throws APIManagementException; /** * Updates the details of the specified user application. * * @param application Application object containing updated data * @throws APIManagementException If an error occurs while updating the application */ void updateApplication(Application application) throws APIManagementException; /** * Function to remove an Application from the API Store * @param application - The Application Object that represents the Application * @throws APIManagementException */ void removeApplication(Application application) throws APIManagementException; /** get the status of the Application creation process given the application Id * * @param applicationId Id of the Application * @return * @throws APIManagementException */ String getApplicationStatusById(int applicationId) throws APIManagementException; /** * Creates a request for getting Approval for Application Registration. * * @param userId Subsriber name. * @param applicationName of the Application. * @param tokenType Token type (PRODUCTION | SANDBOX) * @param callbackUrl callback URL * @param allowedDomains allowedDomains for token. * @param validityTime validity time period. * @param groupingId APIM application id. * @param jsonString Callback URL for the Application. * @param tokenScope Scopes for the requested tokens. * * @throws APIManagementException if failed to applications for given subscriber */ Map<String,Object> requestApprovalForApplicationRegistration(String userId, String applicationName, String tokenType, String callbackUrl, String[] allowedDomains, String validityTime, String tokenScope, String groupingId, String jsonString) throws APIManagementException; /** * Creates a request for application update. * * @param userId Subsriber name. * @param applicationName of the Application. * @param tokenType Token type (PRODUCTION | SANDBOX) * @param callbackUrl callback URL * @param allowedDomains allowedDomains for token. * @param validityTime validity time period. * @param groupingId APIM application id. * @param jsonString Callback URL for the Application. * @param tokenScope Scopes for the requested tokens. * @throws APIManagementException if failed to applications for given subscriber */ OAuthApplicationInfo updateAuthClient(String userId, String applicationName, String tokenType, String callbackUrl, String[] allowedDomains, String validityTime, String tokenScope, String groupingId, String jsonString) throws APIManagementException; /** * Delete oAuth application from Key manager and remove key manager mapping from APIM. * @param consumerKey Client id of oAuthApplication. * @throws APIManagementException */ void deleteOAuthApplication(String consumerKey) throws APIManagementException; /** * Returns a list of applications for a given subscriber * * @param subscriber Subscriber * @param groupingId the groupId to which the applications must belong. * @return Applications * @throws APIManagementException if failed to applications for given subscriber */ Application[] getApplications(Subscriber subscriber, String groupingId) throws APIManagementException; /** * This will return APIM application by giving name and subscriber * @param userId APIM subscriber ID. * @param ApplicationName APIM application name. * @param groupId Group id. * @return it will return Application. * @throws APIManagementException */ Application getApplicationsByName(String userId , String ApplicationName , String groupId) throws APIManagementException; /** * Returns the corresponding application given the Id * @param id Id of the Application * @return it will return Application corresponds to the id. * @throws APIManagementException */ Application getApplicationById(int id) throws APIManagementException; /** * @param subscriber the subscriber in relation to the identifiers * @param identifier the identifiers of the API's the subscriber is subscribed to * @param groupingId the grouping Id the subscriber. * @return the set of subscribed API's. * @throws APIManagementException */ Set<SubscribedAPI> getSubscribedIdentifiers(Subscriber subscriber, APIIdentifier identifier, String groupingId) throws APIManagementException; Set<APIIdentifier> getAPIByConsumerKey(String accessToken) throws APIManagementException; Set<API> searchAPI(String searchTerm, String searchType,String tenantDomain) throws APIManagementException; Map<String,Object> searchPaginatedAPIs(String searchTerm, String searchType,String tenantDomain,int start,int end, boolean limitAttributes) throws APIManagementException; int getUserRating(APIIdentifier apiId, String user) throws APIManagementException; /** * Get a list of published APIs by the given provider. * * @param providerId , provider id * @param loggedUser logged user * @param limit Maximum number of results to return. Pass -1 to get all. * @param apiOwner Owner name which is used to filter APIs * @return set of API * @throws APIManagementException if failed to get set of API */ Set<API> getPublishedAPIsByProvider(String providerId, String loggedUser, int limit, String apiOwner, String apiBizOwner) throws APIManagementException; /** Get a list of published APIs by the given provider. * * @param providerId , provider id * @param limit Maximum number of results to return. Pass -1 to get all. * @return set of API * @throws APIManagementException if failed to get set of API */ Set<API> getPublishedAPIsByProvider(String providerId, int limit) throws APIManagementException; /** * Check whether an application access token is already persist in database. * @param accessToken * @return * @throws APIManagementException */ boolean isApplicationTokenExists(String accessToken) throws APIManagementException; /** * Returns a list of Tiers denied for the current user * * @return Set<String> * @throws APIManagementException if failed to get the tiers */ Set<String> getDeniedTiers()throws APIManagementException; /** * Check whether given Tier is denied for the user * @param tierName * @return * @throws APIManagementException if failed to get the tiers */ boolean isTierDeneid(String tierName)throws APIManagementException; /** * Complete Application Registration process.If the Application registration fails before * generating the Access Tokens, this method should be used to resume registration. * @param userId Tenant Aware userID * @param applicationName Name of the Application * @param tokenType Type of the Token (PRODUCTION | SANDBOX) * @param tokenScope scope of the token * @param groupingId the application belongs to. * @return a Map containing the details of the OAuth application. * @throws APIManagementException if failed to get the tiers */ Map<String, String> completeApplicationRegistration(String userId, String applicationName, String tokenType, String tokenScope, String groupingId) throws APIManagementException; /** * Returns details of an API information in low profile * * @param identifier APIIdentifier * @return An API object related to the given identifier or null * @throws APIManagementException if failed get API from APIIdentifier */ API getLightweightAPI(APIIdentifier identifier) throws APIManagementException; /** * Returns a paginated list of all APIs in given Status. If a given API has multiple APIs, * only the latest version will be included * in this list. * @param tenantDomain tenant domain * @param start starting number * @param end ending number * @param returnAPITags If true, tags of each API is returned * @return set of API * @throws APIManagementException if failed to API set */ Map<String,Object> getAllPaginatedAPIsByStatus(String tenantDomain,int start,int end, String Status, boolean returnAPITags) throws APIManagementException; /** * Returns a paginated list of all APIs in given Status list. If a given API has multiple APIs, * only the latest version will be included in this list. * @param tenantDomain tenant domain * @param start starting number * @param end ending number * @param Status One or more Statuses * @param returnAPITags If true, tags of each API is returned * @return set of API * @throws APIManagementException if failed to API set */ Map<String,Object> getAllPaginatedAPIsByStatus(String tenantDomain,int start,int end, String[] Status, boolean returnAPITags) throws APIManagementException; /** * Revokes the oldAccessToken generating a new one. * * @param oldAccessToken Token to be revoked * @param clientId Consumer Key for the Application * @param clientSecret Consumer Secret for the Application * @param validityTime Desired Validity time for the token * @param jsonInput Additional parameters if Authorization server needs any. * @return Details of the newly generated Access Token. * @throws APIManagementException */ AccessTokenInfo renewAccessToken(String oldAccessToken, String clientId, String clientSecret, String validityTime, String[] requestedScopes, String jsonInput) throws APIManagementException; /** * Returns a set of scopes associated with a list of API identifiers. * * @param identifiers list of API identifiers * @return set of scopes. * @throws APIManagementException */ Set<Scope> getScopesBySubscribedAPIs(List<APIIdentifier> identifiers) throws APIManagementException; /** * Returns the scopes of an access token as a string * * @param accessToken access token you want to receive scopes for * @return scopes of the access token as a string * @throws APIManagementException */ String getScopesByToken(String accessToken) throws APIManagementException; /** * Returns a set of scopes for a given space seperated scope key string * * @param scopeKeys a space seperated string of scope keys * @param tenantId tenant id * @return set of scopes * @throws APIManagementException */ Set<Scope> getScopesByScopeKeys(String scopeKeys, int tenantId) throws APIManagementException; String getGroupIds(String response) throws APIManagementException; JSONObject resumeWorkflow(Object[] args); boolean isMonetizationEnabled(String tenantDomain) throws APIManagementException; }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.harmony.security.tests.provider.crypto; import java.io.UnsupportedEncodingException; import junit.framework.Test; import junit.framework.TestCase; import junit.framework.TestSuite; import org.apache.harmony.security.provider.crypto.SHA1Impl; import java.security.MessageDigest; /** * Tests against methods in SHA1Impl class. * The input data and results of computing are defined in Secure Hash Standard, * see http://www.itl.nist.gov/fipspubs/fip180-1.htm */ public class SHA1ImplTest extends TestCase { // SHA1Data constant used in below methods private static final int INDEX = SHA1Impl.BYTES_OFFSET; private static MessageDigest md; /* * @see TestCase#setUp() */ protected void setUp() throws Exception { super.setUp(); md = MessageDigest.getInstance("SHA-1", "Crypto"); } /* * The test checks out that for given three byte input * a value returned by SHA1Impl is equal to both : * - one defined in the Standard and * - one calculated with alternative computation algorithm defined in the Standard. */ public final void testOneBlockMessage() { int[] words = new int[INDEX +6]; // working array to compute hash // values defined in examples in Secure Hash Standard int[] hash1 = {0x67452301, 0xEFCDAB89, 0x98BADCFE, 0x10325476, 0xC3D2E1F0 }; int[] hash = {0xA9993E36, 0x4706816A, 0xBA3E2571, 0x7850C26C, 0x9CD0D89D }; for (int i = 0; i < words.length; i++ ) { words[i] = 0; } words[0] = 0x61626380; // constants from Secure Hash Standard words[15] = 0x00000018; alternateHash(words, hash1); md.update(new byte[]{0x61,0x62,0x63}); byte[] dgst = md.digest(); for ( int k = 0; k < 5; k++ ) { int i = k*4; int j = ((dgst[i ]&0xff)<<24) | ((dgst[i+1]&0xff)<<16) | ((dgst[i+2]&0xff)<<8 ) | (dgst[i+3]&0xff) ; assertTrue("false1: k=" + k + " hash1[k]=" + Integer.toHexString(hash1[k]), hash[k] == hash1[k] ); assertTrue("false2: k=" + k + " j=" + Integer.toHexString(j), hash[k] == j ); } } /* * The test checks out that SHA1Impl computes correct value * if data supplied takes exactly fourteen words of sixteen word buffer. */ public final void testMultiBlockMessage() throws UnsupportedEncodingException { // values defined in examples in Secure Hash Standard int[] hash = {0x84983e44, 0x1c3bd26e, 0xbaae4aa1, 0xf95129e5, 0xe54670f1 }; // string defined in examples in Secure Hash Standard md.update("abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq".getBytes("UTF-8")); byte[] dgst = md.digest(); for ( int k = 0; k < 5; k++ ) { int i = k*4; int j = ((dgst[i ]&0xff)<<24) | ((dgst[i+1]&0xff)<<16) | ((dgst[i+2]&0xff)<<8 ) | (dgst[i+3]&0xff) ; assertTrue("false: k=" + k + " j=" + Integer.toHexString(j), hash[k] == j ); } } /* * The test checks out that SHA1Impl returns correct values * for four different cases of infilling internal buffer and computing intermediate hash. */ public final void testLongMessage() { // values defined in examples in Secure Hash Standard int[] hash = {0x34aa973c, 0xd4c4daa4, 0xf61eeb2b, 0xdbad2731, 0x6534016f }; byte msgs[][] = new byte[][] { {0x61}, {0x61, 0x61}, {0x61, 0x61, 0x61}, {0x61, 0x61, 0x61, 0x61} }; int lngs[] = new int[]{1000000, 500000, 333333, 250000}; for ( int n = 0; n < 4; n++ ) { for ( int i = 0; i < lngs[n]; i++) { md.update(msgs[n]); } if ( n == 2 ) { md.update(msgs[0]); } byte[] dgst = md.digest(); for ( int k = 0; k < 5; k++ ) { int i = k*4; int j = ((dgst[i ]&0xff)<<24) | ((dgst[i+1]&0xff)<<16) | ((dgst[i+2]&0xff)<<8 ) | (dgst[i+3]&0xff) ; assertTrue("false: n =" + n + " k=" + k + " j" + Integer.toHexString(j), hash[k] == j ); } } } /** * implements alternative algorithm described in the SECURE HASH STANDARD */ private void alternateHash(int[] bufW, int[] hash) { // constants defined in Secure Hash Standard final int[] K = { 0x5A827999, 0x5A827999, 0x5A827999, 0x5A827999, 0x5A827999, 0x5A827999, 0x5A827999, 0x5A827999, 0x5A827999, 0x5A827999, 0x5A827999, 0x5A827999, 0x5A827999, 0x5A827999, 0x5A827999, 0x5A827999, 0x5A827999, 0x5A827999, 0x5A827999, 0x5A827999, 0x6ED9EBA1, 0x6ED9EBA1, 0x6ED9EBA1, 0x6ED9EBA1, 0x6ED9EBA1, 0x6ED9EBA1, 0x6ED9EBA1, 0x6ED9EBA1, 0x6ED9EBA1, 0x6ED9EBA1, 0x6ED9EBA1, 0x6ED9EBA1, 0x6ED9EBA1, 0x6ED9EBA1, 0x6ED9EBA1, 0x6ED9EBA1, 0x6ED9EBA1, 0x6ED9EBA1, 0x6ED9EBA1, 0x6ED9EBA1, 0x8F1BBCDC, 0x8F1BBCDC, 0x8F1BBCDC, 0x8F1BBCDC, 0x8F1BBCDC, 0x8F1BBCDC, 0x8F1BBCDC, 0x8F1BBCDC, 0x8F1BBCDC, 0x8F1BBCDC, 0x8F1BBCDC, 0x8F1BBCDC, 0x8F1BBCDC, 0x8F1BBCDC, 0x8F1BBCDC, 0x8F1BBCDC, 0x8F1BBCDC, 0x8F1BBCDC, 0x8F1BBCDC, 0x8F1BBCDC, 0xCA62C1D6, 0xCA62C1D6, 0xCA62C1D6, 0xCA62C1D6, 0xCA62C1D6, 0xCA62C1D6, 0xCA62C1D6, 0xCA62C1D6, 0xCA62C1D6, 0xCA62C1D6, 0xCA62C1D6, 0xCA62C1D6, 0xCA62C1D6, 0xCA62C1D6, 0xCA62C1D6, 0xCA62C1D6, 0xCA62C1D6, 0xCA62C1D6, 0xCA62C1D6, 0xCA62C1D6 }; int a = hash[0]; //0x67452301 ; int b = hash[1]; //0xEFCDAB89 ; int c = hash[2]; //0x98BADCFE ; int d = hash[3]; //0x10325476 ; int e = hash[4]; //0xC3D2E1F0 ; // implementation constant and variables final int MASK = 0x0000000F; int temp; int s; int tmp; // computation defined in Secure Hash Standard for ( int t = 0 ; t < 80 ; t++ ) { s = t & MASK; if ( t >= 16) { tmp = bufW[ (s+13)&MASK ] ^ bufW[(s+8)&MASK ] ^ bufW[ (s+2)&MASK ] ^ bufW[s]; bufW[s] = ( tmp<<1 ) | ( tmp>>>31 ); } temp = ( a << 5 ) | ( a >>> 27 ); if ( t < 20 ) { temp += ( b & c ) | ( (~b) & d ) ; } else if ( t < 40 ) { temp += b ^ c ^ d ; } else if ( t < 60 ) { temp += ( b & c ) | ( b & d ) | ( c & d ) ; } else { temp += b ^ c ^ d ; } temp += e + bufW[s] + K[t] ; e = d; d = c; c = ( b<<30 ) | ( b>>>2 ) ; b = a; a = temp; } hash[0] += a; hash[1] += b; hash[2] += c; hash[3] += d; hash[4] += e; } public static Test suite() { return new TestSuite(SHA1ImplTest.class); } public static void main(String[] args) { junit.textui.TestRunner.run(suite()); } }
/* * Copyright 2015-2017 Austin Keener & Michael Ritter * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.dv8tion.jda.client.entities.impl; import gnu.trove.map.TLongObjectMap; import net.dv8tion.jda.client.entities.Call; import net.dv8tion.jda.client.entities.Friend; import net.dv8tion.jda.client.entities.Group; import net.dv8tion.jda.client.entities.Relationship; import net.dv8tion.jda.client.managers.GroupManager; import net.dv8tion.jda.client.managers.GroupManagerUpdatable; import net.dv8tion.jda.core.JDA; import net.dv8tion.jda.core.entities.ChannelType; import net.dv8tion.jda.core.entities.User; import net.dv8tion.jda.core.entities.impl.JDAImpl; import net.dv8tion.jda.core.requests.RestAction; import net.dv8tion.jda.core.utils.MiscUtil; import java.util.ArrayList; import java.util.Collections; import java.util.List; public class GroupImpl implements Group { private final long id; private final JDAImpl api; private final TLongObjectMap<User> userMap = MiscUtil.newLongMap(); private Call currentCall; private User owner; private String name; private String iconId; private long lastMessageId; private volatile GroupManager manager; private volatile GroupManagerUpdatable managerUpdatable; private final Object mngLock = new Object(); public GroupImpl(long id, JDAImpl api) { this.id = id; this.api = api; } @Override public long getLatestMessageIdLong() { final long messageId = lastMessageId; if (messageId < 0) throw new IllegalStateException("No last message id found."); return messageId; } @Override public boolean hasLatestMessage() { return lastMessageId > 0; } @Override public String getName() { return name; } @Override public ChannelType getType() { return ChannelType.GROUP; } @Override public String getIconId() { return iconId; } @Override public String getIconUrl() { return iconId == null ? null : "https://cdn.discordapp.com/channel-icons/" + id + "/" + iconId + ".jpg"; } @Override public User getOwner() { return owner; } @Override public List<User> getUsers() { return Collections.unmodifiableList( new ArrayList<>( userMap.valueCollection())); } @Override public List<User> getNonFriendUsers() { List<User> nonFriends = new ArrayList<>(); TLongObjectMap<Relationship> map = ((JDAClientImpl) api.asClient()).getRelationshipMap(); userMap.forEachEntry((userId, user) -> { Relationship relationship = map.get(userId); Friend friend = relationship instanceof Friend ? (Friend) relationship : null; if (friend == null) nonFriends.add(user); return true; }); return Collections.unmodifiableList(nonFriends); } @Override public List<Friend> getFriends() { List<Friend> friends = new ArrayList<>(); TLongObjectMap<Relationship> map = ((JDAClientImpl) api.asClient()).getRelationshipMap(); userMap.forEachKey(userId -> { Relationship relationship = map.get(userId); Friend friend = relationship instanceof Friend ? (Friend) relationship : null; if (friend != null) friends.add(friend); return true; }); return Collections.unmodifiableList(friends); } @Override public RestAction<Call> startCall() { return null; } @Override public Call getCurrentCall() { return currentCall; } @Override public RestAction leaveGroup() { return null; } @Override public JDA getJDA() { return api; } @Override public String toString() { return String.format("G:%s(%d)", getName(), id); } @Override public boolean equals(Object o) { if (!(o instanceof GroupImpl)) return false; GroupImpl oGroup = (GroupImpl) o; return id == oGroup.id; } @Override public int hashCode() { return Long.hashCode(id); } public TLongObjectMap<User> getUserMap() { return userMap; } public GroupImpl setCurrentCall(Call call) { this.currentCall = call; return this; } public GroupImpl setOwner(User owner) { this.owner = owner; return this; } public GroupImpl setName(String name) { this.name = name; return this; } public GroupImpl setIconId(String iconId) { this.iconId = iconId; return this; } public GroupImpl setLastMessageId(long lastMessageId) { this.lastMessageId = lastMessageId; return this; } private void checkNull(Object obj, String name) { if (obj == null) throw new NullPointerException("Provided " + name + " was null!"); } @Override public long getIdLong() { return id; } @Override public GroupManager getManager() { GroupManager mng = manager; if (mng == null) { synchronized (mngLock) { mng = manager; if (mng == null) mng = manager = new GroupManager(this); } } return mng; } @Override public GroupManagerUpdatable getManagerUpdatable() { GroupManagerUpdatable mng = managerUpdatable; if (mng == null) { synchronized (mngLock) { mng = managerUpdatable; if (mng == null) mng = managerUpdatable = new GroupManagerUpdatable(this); } } return mng; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.harmony.tests.java.nio; import java.nio.BufferOverflowException; import java.nio.BufferUnderflowException; import java.nio.ByteOrder; import java.nio.IntBuffer; import java.nio.InvalidMarkException; /** * Tests java.nio.IntBuffer * */ public class IntBufferTest extends AbstractBufferTest { protected static final int SMALL_TEST_LENGTH = 5; protected static final int BUFFER_LENGTH = 20; protected IntBuffer buf; protected void setUp() throws Exception { buf = IntBuffer.allocate(BUFFER_LENGTH); loadTestData1(buf); baseBuf = buf; } protected void tearDown() throws Exception { buf = null; baseBuf = null; } public void testArray() { int array[] = buf.array(); assertContentEquals(buf, array, buf.arrayOffset(), buf.capacity()); loadTestData1(array, buf.arrayOffset(), buf.capacity()); assertContentEquals(buf, array, buf.arrayOffset(), buf.capacity()); loadTestData2(array, buf.arrayOffset(), buf.capacity()); assertContentEquals(buf, array, buf.arrayOffset(), buf.capacity()); loadTestData1(buf); assertContentEquals(buf, array, buf.arrayOffset(), buf.capacity()); loadTestData2(buf); assertContentEquals(buf, array, buf.arrayOffset(), buf.capacity()); } public void testArrayOffset() { int array[] = buf.array(); assertContentEquals(buf, array, buf.arrayOffset(), buf.capacity()); loadTestData1(array, buf.arrayOffset(), buf.capacity()); assertContentEquals(buf, array, buf.arrayOffset(), buf.capacity()); loadTestData2(array, buf.arrayOffset(), buf.capacity()); assertContentEquals(buf, array, buf.arrayOffset(), buf.capacity()); loadTestData1(buf); assertContentEquals(buf, array, buf.arrayOffset(), buf.capacity()); loadTestData2(buf); assertContentEquals(buf, array, buf.arrayOffset(), buf.capacity()); } public void testAsReadOnlyBuffer() { buf.clear(); buf.mark(); buf.position(buf.limit()); // readonly's contents should be the same as buf IntBuffer readonly = buf.asReadOnlyBuffer(); assertNotSame(buf, readonly); assertTrue(readonly.isReadOnly()); assertEquals(buf.position(), readonly.position()); assertEquals(buf.limit(), readonly.limit()); assertEquals(buf.isDirect(), readonly.isDirect()); assertEquals(buf.order(), readonly.order()); assertContentEquals(buf, readonly); // readonly's position, mark, and limit should be independent to buf readonly.reset(); assertEquals(readonly.position(), 0); readonly.clear(); assertEquals(buf.position(), buf.limit()); buf.reset(); assertEquals(buf.position(), 0); } public void testCompact() { // case: buffer is full buf.clear(); buf.mark(); loadTestData1(buf); IntBuffer ret = buf.compact(); assertSame(ret, buf); assertEquals(buf.position(), buf.capacity()); assertEquals(buf.limit(), buf.capacity()); assertContentLikeTestData1(buf, 0, 0, buf.capacity()); try { buf.reset(); fail("Should throw Exception"); //$NON-NLS-1$ } catch (InvalidMarkException e) { // expected } // case: buffer is empty buf.position(0); buf.limit(0); buf.mark(); ret = buf.compact(); assertSame(ret, buf); assertEquals(buf.position(), 0); assertEquals(buf.limit(), buf.capacity()); assertContentLikeTestData1(buf, 0, 0, buf.capacity()); try { buf.reset(); fail("Should throw Exception"); //$NON-NLS-1$ } catch (InvalidMarkException e) { // expected } // case: normal assertTrue(buf.capacity() > 5); buf.position(1); buf.limit(5); buf.mark(); ret = buf.compact(); assertSame(ret, buf); assertEquals(buf.position(), 4); assertEquals(buf.limit(), buf.capacity()); assertContentLikeTestData1(buf, 0, 1, 4); try { buf.reset(); fail("Should throw Exception"); //$NON-NLS-1$ } catch (InvalidMarkException e) { // expected } } public void testCompareTo() { // compare to self assertEquals(0, buf.compareTo(buf)); // normal cases assertTrue(buf.capacity() > 5); buf.clear(); IntBuffer other = IntBuffer.allocate(buf.capacity()); loadTestData1(other); assertEquals(0, buf.compareTo(other)); assertEquals(0, other.compareTo(buf)); buf.position(1); assertTrue(buf.compareTo(other) > 0); assertTrue(other.compareTo(buf) < 0); other.position(2); assertTrue(buf.compareTo(other) < 0); assertTrue(other.compareTo(buf) > 0); buf.position(2); other.limit(5); assertTrue(buf.compareTo(other) > 0); assertTrue(other.compareTo(buf) < 0); } public void testDuplicate() { buf.clear(); buf.mark(); buf.position(buf.limit()); // duplicate's contents should be the same as buf IntBuffer duplicate = buf.duplicate(); assertNotSame(buf, duplicate); assertEquals(buf.position(), duplicate.position()); assertEquals(buf.limit(), duplicate.limit()); assertEquals(buf.isReadOnly(), duplicate.isReadOnly()); assertEquals(buf.isDirect(), duplicate.isDirect()); assertEquals(buf.order(), duplicate.order()); assertContentEquals(buf, duplicate); // duplicate's position, mark, and limit should be independent to buf duplicate.reset(); assertEquals(duplicate.position(), 0); duplicate.clear(); assertEquals(buf.position(), buf.limit()); buf.reset(); assertEquals(buf.position(), 0); // duplicate share the same content with buf if (!duplicate.isReadOnly()) { loadTestData1(buf); assertContentEquals(buf, duplicate); loadTestData2(duplicate); assertContentEquals(buf, duplicate); } } public void testEquals() { // equal to self assertTrue(buf.equals(buf)); IntBuffer readonly = buf.asReadOnlyBuffer(); assertTrue(buf.equals(readonly)); IntBuffer duplicate = buf.duplicate(); assertTrue(buf.equals(duplicate)); // always false, if type mismatch assertFalse(buf.equals(Boolean.TRUE)); assertTrue(buf.capacity() > 5); buf.limit(buf.capacity()).position(0); readonly.limit(readonly.capacity()).position(1); assertFalse(buf.equals(readonly)); buf.limit(buf.capacity() - 1).position(0); duplicate.limit(duplicate.capacity()).position(0); assertFalse(buf.equals(duplicate)); } /* * Class under test for int get() */ public void testGet() { buf.clear(); for (int i = 0; i < buf.capacity(); i++) { assertEquals(buf.position(), i); assertEquals(buf.get(), buf.get(i)); } try { buf.get(); fail("Should throw Exception"); //$NON-NLS-1$ } catch (BufferUnderflowException e) { // expected } } /* * Class under test for java.nio.IntBuffer get(int[]) */ public void testGetintArray() { int array[] = new int[1]; buf.clear(); for (int i = 0; i < buf.capacity(); i++) { assertEquals(buf.position(), i); IntBuffer ret = buf.get(array); assertEquals(array[0], buf.get(i)); assertSame(ret, buf); } try { buf.get(array); fail("Should throw Exception"); //$NON-NLS-1$ } catch (BufferUnderflowException e) { // expected } try { buf.get((int[])null); fail("Should throw NPE"); //$NON-NLS-1$ } catch (NullPointerException e) { // expected } } /* * Class under test for java.nio.IntBuffer get(int[], int, int) */ public void testGetintArrayintint() { buf.clear(); int array[] = new int[buf.capacity()]; try { buf.get(new int[buf.capacity() + 1], 0, buf.capacity() + 1); fail("Should throw Exception"); //$NON-NLS-1$ } catch (BufferUnderflowException e) { // expected } assertEquals(buf.position(), 0); try { buf.get(array, -1, array.length); fail("Should throw Exception"); //$NON-NLS-1$ } catch (IndexOutOfBoundsException e) { // expected } buf.get(array, array.length, 0); try { buf.get(array, array.length + 1, 1); fail("Should throw Exception"); //$NON-NLS-1$ } catch (IndexOutOfBoundsException e) { // expected } assertEquals(buf.position(), 0); try { buf.get(array, 2, -1); fail("Should throw Exception"); //$NON-NLS-1$ } catch (IndexOutOfBoundsException e) { // expected } try { buf.get((int[])null, 2, -1); fail("Should throw Exception"); //$NON-NLS-1$ } catch (NullPointerException e) { // expected } try { buf.get(array, 2, array.length); fail("Should throw Exception"); //$NON-NLS-1$ } catch (IndexOutOfBoundsException e) { // expected } try { buf.get(array, 1, Integer.MAX_VALUE); fail("Should throw Exception"); //$NON-NLS-1$ } catch (BufferUnderflowException expected) { } catch (IndexOutOfBoundsException expected) { } try { buf.get(array, Integer.MAX_VALUE, 1); fail("Should throw Exception"); //$NON-NLS-1$ } catch (IndexOutOfBoundsException e) { // expected } assertEquals(buf.position(), 0); buf.clear(); IntBuffer ret = buf.get(array, 0, array.length); assertEquals(buf.position(), buf.capacity()); assertContentEquals(buf, array, 0, array.length); assertSame(ret, buf); } /* * Class under test for int get(int) */ public void testGetint() { buf.clear(); for (int i = 0; i < buf.capacity(); i++) { assertEquals(buf.position(), i); assertEquals(buf.get(), buf.get(i)); } try { buf.get(-1); fail("Should throw Exception"); //$NON-NLS-1$ } catch (IndexOutOfBoundsException e) { // expected } try { buf.get(buf.limit()); fail("Should throw Exception"); //$NON-NLS-1$ } catch (IndexOutOfBoundsException e) { // expected } } public void testHasArray() { assertNotNull(buf.array()); } public void testHashCode() { buf.clear(); IntBuffer readonly = buf.asReadOnlyBuffer(); IntBuffer duplicate = buf.duplicate(); assertTrue(buf.hashCode() == readonly.hashCode()); assertTrue(buf.capacity() > 5); duplicate.position(buf.capacity() / 2); assertTrue(buf.hashCode() != duplicate.hashCode()); } public void testIsDirect() { assertFalse(buf.isDirect()); } public void testOrder() { buf.order(); assertEquals(ByteOrder.nativeOrder(), buf.order()); } /* * Class under test for java.nio.IntBuffer put(int) */ public void testPutint() { buf.clear(); for (int i = 0; i < buf.capacity(); i++) { assertEquals(buf.position(), i); IntBuffer ret = buf.put((int) i); assertEquals(buf.get(i), (int) i); assertSame(ret, buf); } try { buf.put(0); fail("Should throw Exception"); //$NON-NLS-1$ } catch (BufferOverflowException e) { // expected } } /* * Class under test for java.nio.IntBuffer put(int[]) */ public void testPutintArray() { int array[] = new int[1]; buf.clear(); for (int i = 0; i < buf.capacity(); i++) { assertEquals(buf.position(), i); array[0] = (int) i; IntBuffer ret = buf.put(array); assertEquals(buf.get(i), (int) i); assertSame(ret, buf); } try { buf.put(array); fail("Should throw Exception"); //$NON-NLS-1$ } catch (BufferOverflowException e) { // expected } try { buf.position(buf.limit()); buf.put((int[])null); fail("Should throw Exception"); //$NON-NLS-1$ } catch (NullPointerException e) { // expected } } /* * Class under test for java.nio.IntBuffer put(int[], int, int) */ public void testPutintArrayintint() { buf.clear(); int array[] = new int[buf.capacity()]; try { buf.put(new int[buf.capacity() + 1], 0, buf.capacity() + 1); fail("Should throw Exception"); //$NON-NLS-1$ } catch (BufferOverflowException e) { // expected } assertEquals(buf.position(), 0); try { buf.put(array, -1, array.length); fail("Should throw Exception"); //$NON-NLS-1$ } catch (IndexOutOfBoundsException e) { // expected } try { buf.put(array, array.length + 1, 0); fail("Should throw Exception"); //$NON-NLS-1$ } catch (IndexOutOfBoundsException e) { // expected } buf.put(array, array.length, 0); assertEquals(buf.position(), 0); try { buf.put(array, 0, -1); fail("Should throw Exception"); //$NON-NLS-1$ } catch (IndexOutOfBoundsException e) { // expected } try { buf.put((int[])null, 0, -1); fail("Should throw Exception"); //$NON-NLS-1$ } catch (NullPointerException e) { // expected } try { buf.put(array, 2, array.length); fail("Should throw Exception"); //$NON-NLS-1$ } catch (IndexOutOfBoundsException e) { // expected } try { buf.put(array, Integer.MAX_VALUE, 1); fail("Should throw Exception"); //$NON-NLS-1$ } catch (IndexOutOfBoundsException e) { // expected } try { buf.put(array, 1, Integer.MAX_VALUE); fail("Should throw Exception"); //$NON-NLS-1$ } catch (BufferOverflowException expected) { } catch (IndexOutOfBoundsException expected) { } assertEquals(buf.position(), 0); loadTestData2(array, 0, array.length); IntBuffer ret = buf.put(array, 0, array.length); assertEquals(buf.position(), buf.capacity()); assertContentEquals(buf, array, 0, array.length); assertSame(ret, buf); } /* * Class under test for java.nio.IntBuffer put(java.nio.IntBuffer) */ public void testPutIntBuffer() { IntBuffer other = IntBuffer.allocate(buf.capacity()); try { buf.put(buf); fail("Should throw Exception"); //$NON-NLS-1$ } catch (IllegalArgumentException e) { // expected } try { buf.put(IntBuffer.allocate(buf.capacity() + 1)); fail("Should throw Exception"); //$NON-NLS-1$ } catch (BufferOverflowException e) { // expected } try { buf.flip(); buf.put((IntBuffer)null); fail("Should throw Exception"); //$NON-NLS-1$ } catch (NullPointerException e) { // expected } loadTestData2(other); other.clear(); buf.clear(); IntBuffer ret = buf.put(other); assertEquals(other.position(), other.capacity()); assertEquals(buf.position(), buf.capacity()); assertContentEquals(other, buf); assertSame(ret, buf); } /* * Class under test for java.nio.IntBuffer put(int, int) */ public void testPutintint() { buf.clear(); for (int i = 0; i < buf.capacity(); i++) { assertEquals(buf.position(), 0); IntBuffer ret = buf.put(i, (int) i); assertEquals(buf.get(i), (int) i); assertSame(ret, buf); } try { buf.put(-1, 0); fail("Should throw Exception"); //$NON-NLS-1$ } catch (IndexOutOfBoundsException e) { // expected } try { buf.put(buf.limit(), 0); fail("Should throw Exception"); //$NON-NLS-1$ } catch (IndexOutOfBoundsException e) { // expected } } public void testSlice() { assertTrue(buf.capacity() > 5); buf.position(1); buf.limit(buf.capacity() - 1); IntBuffer slice = buf.slice(); assertEquals(buf.isReadOnly(), slice.isReadOnly()); assertEquals(buf.isDirect(), slice.isDirect()); assertEquals(buf.order(), slice.order()); assertEquals(slice.position(), 0); assertEquals(slice.limit(), buf.remaining()); assertEquals(slice.capacity(), buf.remaining()); try { slice.reset(); fail("Should throw Exception"); //$NON-NLS-1$ } catch (InvalidMarkException e) { // expected } // slice share the same content with buf if (!slice.isReadOnly()) { loadTestData1(slice); assertContentLikeTestData1(buf, 1, 0, slice.capacity()); buf.put(2, 500); assertEquals(slice.get(1), 500); } } public void testToString() { String str = buf.toString(); assertTrue(str.indexOf("Int") >= 0 || str.indexOf("int") >= 0); assertTrue(str.indexOf("" + buf.position()) >= 0); assertTrue(str.indexOf("" + buf.limit()) >= 0); assertTrue(str.indexOf("" + buf.capacity()) >= 0); } void loadTestData1(int array[], int offset, int length) { for (int i = 0; i < length; i++) { array[offset + i] = (int) i; } } void loadTestData2(int array[], int offset, int length) { for (int i = 0; i < length; i++) { array[offset + i] = (int) length - i; } } void loadTestData1(IntBuffer buf) { buf.clear(); for (int i = 0; i < buf.capacity(); i++) { buf.put(i, (int) i); } } void loadTestData2(IntBuffer buf) { buf.clear(); for (int i = 0; i < buf.capacity(); i++) { buf.put(i, (int) buf.capacity() - i); } } void assertContentEquals(IntBuffer buf, int array[], int offset, int length) { for (int i = 0; i < length; i++) { assertEquals(buf.get(i), array[offset + i]); } } void assertContentEquals(IntBuffer buf, IntBuffer other) { assertEquals(buf.capacity(), other.capacity()); for (int i = 0; i < buf.capacity(); i++) { assertEquals(buf.get(i), other.get(i)); } } void assertContentLikeTestData1(IntBuffer buf, int startIndex, int startValue, int length) { int value = startValue; for (int i = 0; i < length; i++) { assertEquals(buf.get(startIndex + i), value); value = value + 1; } } }