gt
stringclasses
1 value
context
stringlengths
2.05k
161k
/** * Copyright (C) 2012 - present by OpenGamma Inc. and the OpenGamma group of companies * * Please see distribution for license. */ package com.opengamma.financial.analytics.volatility.surface; import it.unimi.dsi.fastutil.doubles.DoubleArrayList; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Set; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.threeten.bp.LocalDate; import org.threeten.bp.ZonedDateTime; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; import com.opengamma.OpenGammaRuntimeException; import com.opengamma.analytics.financial.equity.variance.pricing.AffineDividends; import com.opengamma.analytics.financial.model.interestrate.curve.ForwardCurve; import com.opengamma.analytics.financial.model.interestrate.curve.ForwardCurveAffineDividends; import com.opengamma.analytics.financial.model.interestrate.curve.YieldCurve; import com.opengamma.analytics.financial.model.option.pricing.analytic.BjerksundStenslandModel; import com.opengamma.analytics.financial.model.volatility.BlackFormulaRepository; import com.opengamma.analytics.util.time.TimeCalculator; import com.opengamma.core.id.ExternalSchemes; import com.opengamma.core.marketdatasnapshot.VolatilitySurfaceData; import com.opengamma.engine.ComputationTarget; import com.opengamma.engine.ComputationTargetSpecification; import com.opengamma.engine.function.AbstractFunction; import com.opengamma.engine.function.FunctionCompilationContext; import com.opengamma.engine.function.FunctionExecutionContext; import com.opengamma.engine.function.FunctionInputs; import com.opengamma.engine.target.ComputationTargetType; import com.opengamma.engine.value.ComputedValue; import com.opengamma.engine.value.SurfaceAndCubePropertyNames; import com.opengamma.engine.value.ValueProperties; import com.opengamma.engine.value.ValuePropertyNames; import com.opengamma.engine.value.ValueRequirement; import com.opengamma.engine.value.ValueRequirementNames; import com.opengamma.engine.value.ValueSpecification; import com.opengamma.financial.analytics.model.FutureOptionExpiries; import com.opengamma.financial.analytics.model.InstrumentTypeProperties; import com.opengamma.financial.analytics.model.curve.forward.ForwardCurveValuePropertyNames; import com.opengamma.financial.analytics.model.equity.EquitySecurityUtils; import com.opengamma.financial.security.option.AmericanExerciseType; import com.opengamma.id.ExternalId; import com.opengamma.id.ExternalIdentifiable; import com.opengamma.id.ExternalScheme; import com.opengamma.util.money.Currency; import com.opengamma.util.tuple.Pair; import com.opengamma.util.tuple.Pairs; /** * */ public class EquityOptionVolatilitySurfaceDataFunction extends AbstractFunction.NonCompiledInvoker { /** The logger */ private static final Logger s_logger = LoggerFactory.getLogger(EquityOptionVolatilitySurfaceDataFunction.class); /** The supported schemes */ private static final Set<ExternalScheme> s_validSchemes = ImmutableSet.of(ExternalSchemes.BLOOMBERG_TICKER, ExternalSchemes.BLOOMBERG_TICKER_WEAK, ExternalSchemes.ACTIVFEED_TICKER); private ConfigDBVolatilitySurfaceSpecificationSource _volatilitySurfaceSpecificationSource; @Override public void init(final FunctionCompilationContext context) { _volatilitySurfaceSpecificationSource = ConfigDBVolatilitySurfaceSpecificationSource.init(context, this); } @Override /** * {@inheritDoc} <p> * INPUT: We are taking a VolatilitySurfaceData object, which contains all number of missing data, plus strikes and vols are in percentages <p> * OUTPUT: and converting this into a StandardVolatilitySurfaceData object, which has no empty values, expiry is in years, and the strike and vol scale is without unit (35% -> 0.35) */ public Set<ComputedValue> execute(final FunctionExecutionContext executionContext, final FunctionInputs inputs, final ComputationTarget target, final Set<ValueRequirement> desiredValues) { final ZonedDateTime valTime = ZonedDateTime.now(executionContext.getValuationClock()); final LocalDate valDate = valTime.toLocalDate(); final ValueRequirement desiredValue = Iterables.getOnlyElement(desiredValues); final Object specificationObject = inputs.getValue(ValueRequirementNames.VOLATILITY_SURFACE_SPEC); if (specificationObject == null) { throw new OpenGammaRuntimeException("Could not get volatility surface specification"); } final VolatilitySurfaceSpecification specification = (VolatilitySurfaceSpecification) specificationObject; final String surfaceQuoteUnits = specification.getQuoteUnits(); // Get the volatility surface data object final Object rawSurfaceObject = inputs.getValue(ValueRequirementNames.VOLATILITY_SURFACE_DATA); if (rawSurfaceObject == null) { throw new OpenGammaRuntimeException("Could not get volatility surface"); } @SuppressWarnings("unchecked") final VolatilitySurfaceData<Object, Object> rawSurface = (VolatilitySurfaceData<Object, Object>) rawSurfaceObject; final VolatilitySurfaceData<Double, Double> stdVolSurface; if (surfaceQuoteUnits.equals(SurfaceAndCubePropertyNames.VOLATILITY_QUOTE)) { stdVolSurface = getSurfaceFromVolatilityQuote(valDate, rawSurface); } else if (surfaceQuoteUnits.equals(SurfaceAndCubePropertyNames.PRICE_QUOTE)) { // Get the discount curve final Object discountCurveObject = inputs.getValue(ValueRequirementNames.YIELD_CURVE); if (discountCurveObject == null) { throw new OpenGammaRuntimeException("Could not get discount curve"); } final YieldCurve discountCurve = (YieldCurve) discountCurveObject; // Get the forward curve final Object forwardCurveObject = inputs.getValue(ValueRequirementNames.FORWARD_CURVE); if (forwardCurveObject == null) { throw new OpenGammaRuntimeException("Could not get forward curve"); } final ForwardCurve forwardCurve = (ForwardCurve) forwardCurveObject; stdVolSurface = getSurfaceFromPriceQuote(valDate, rawSurface, forwardCurve, discountCurve, specification); } else { throw new OpenGammaRuntimeException("Cannot handle quote units " + surfaceQuoteUnits); } // Return final ValueProperties constraints = desiredValue.getConstraints().copy().with(ValuePropertyNames.FUNCTION, getUniqueId()).get(); final ValueSpecification stdVolSpec = new ValueSpecification(ValueRequirementNames.STANDARD_VOLATILITY_SURFACE_DATA, target.toSpecification(), constraints); return Collections.singleton(new ComputedValue(stdVolSpec, stdVolSurface)); } @Override public ComputationTargetType getTargetType() { return ComputationTargetType.PRIMITIVE; // Bloomberg ticker, weak ticker or Activ ticker } @Override public boolean canApplyTo(final FunctionCompilationContext context, final ComputationTarget target) { if (target.getValue() instanceof ExternalIdentifiable) { final ExternalId identifier = ((ExternalIdentifiable) target.getValue()).getExternalId(); return s_validSchemes.contains(identifier.getScheme()); } return false; } @Override public Set<ValueSpecification> getResults(final FunctionCompilationContext context, final ComputationTarget target) { final ValueProperties properties = ValueProperties.all(); final ValueSpecification spec = new ValueSpecification(ValueRequirementNames.STANDARD_VOLATILITY_SURFACE_DATA, target.toSpecification(), properties); return Collections.singleton(spec); } @Override public Set<ValueRequirement> getRequirements(final FunctionCompilationContext context, final ComputationTarget target, final ValueRequirement desiredValue) { // Function requires a VolatilitySurfaceData // Build the surface name, in two parts: the given name and the target final ValueProperties constraints = desiredValue.getConstraints(); final String instrumentType = constraints.getStrictValue(InstrumentTypeProperties.PROPERTY_SURFACE_INSTRUMENT_TYPE); if (instrumentType != null) { if (!InstrumentTypeProperties.EQUITY_OPTION.equals(instrumentType)) { return null; } } final String givenName = constraints.getStrictValue(ValuePropertyNames.SURFACE); if (givenName == null) { return null; } final String fullName = givenName + "_" + EquitySecurityUtils.getTrimmedTarget(((ExternalIdentifiable) target.getValue()).getExternalId()); final VolatilitySurfaceSpecification specification = _volatilitySurfaceSpecificationSource.getSpecification(fullName, InstrumentTypeProperties.EQUITY_OPTION, context .getComputationTargetResolver().getVersionCorrection()); if (specification == null) { s_logger.error("Could not get volatility surface specification with name " + fullName); return null; } // Build the ValueRequirements' constraints final String quoteUnits = specification.getQuoteUnits(); final ValueProperties properties = ValueProperties.builder().with(ValuePropertyNames.SURFACE, givenName) .with(InstrumentTypeProperties.PROPERTY_SURFACE_INSTRUMENT_TYPE, InstrumentTypeProperties.EQUITY_OPTION) .with(SurfaceAndCubePropertyNames.PROPERTY_SURFACE_QUOTE_TYPE, specification.getSurfaceQuoteType()).with(SurfaceAndCubePropertyNames.PROPERTY_SURFACE_UNITS, quoteUnits).get(); final ValueRequirement surfaceReq = new ValueRequirement(ValueRequirementNames.VOLATILITY_SURFACE_DATA, target.toSpecification(), properties); final ValueRequirement specificationReq = new ValueRequirement(ValueRequirementNames.VOLATILITY_SURFACE_SPEC, target.toSpecification(), properties); final Set<ValueRequirement> requirements = new HashSet<>(); requirements.add(surfaceReq); requirements.add(specificationReq); if (quoteUnits.equals(SurfaceAndCubePropertyNames.PRICE_QUOTE)) { // We require forward and discount curves to imply the volatility // DiscountCurve final String discountingCurveName = constraints.getStrictValue(ValuePropertyNames.DISCOUNTING_CURVE_NAME); if (discountingCurveName == null) { return null; } final String curveCalculationConfig = constraints.getStrictValue(ValuePropertyNames.CURVE_CALCULATION_CONFIG); if (curveCalculationConfig == null) { return null; } final String ccyCode = constraints.getStrictValue(ValuePropertyNames.CURVE_CURRENCY); if (ccyCode == null) { return null; } final Currency ccy = Currency.of(ccyCode); final ValueProperties fundingProperties = ValueProperties.builder().with(ValuePropertyNames.CURVE, discountingCurveName) .with(ValuePropertyNames.CURVE_CALCULATION_CONFIG, curveCalculationConfig).get(); final ValueRequirement discountCurveRequirement = new ValueRequirement(ValueRequirementNames.YIELD_CURVE, ComputationTargetSpecification.of(ccy), fundingProperties); requirements.add(discountCurveRequirement); // ForwardCurve final String forwardCurveName = constraints.getStrictValue(ValuePropertyNames.FORWARD_CURVE_NAME); if (forwardCurveName == null) { return null; } final String curveCalculationMethod = constraints.getStrictValue(ForwardCurveValuePropertyNames.PROPERTY_FORWARD_CURVE_CALCULATION_METHOD); if (curveCalculationMethod == null) { return null; } final ValueProperties curveProperties = ValueProperties.builder().with(ValuePropertyNames.CURVE, forwardCurveName) .with(ForwardCurveValuePropertyNames.PROPERTY_FORWARD_CURVE_CALCULATION_METHOD, curveCalculationMethod).get(); final ValueRequirement forwardCurveRequirement = new ValueRequirement(ValueRequirementNames.FORWARD_CURVE, target.toSpecification(), curveProperties); requirements.add(forwardCurveRequirement); } return requirements; } @Override public Set<ValueSpecification> getResults(final FunctionCompilationContext context, final ComputationTarget target, final Map<ValueSpecification, ValueRequirement> inputs) { final ValueProperties.Builder properties = createValueProperties().with(InstrumentTypeProperties.PROPERTY_SURFACE_INSTRUMENT_TYPE, InstrumentTypeProperties.EQUITY_OPTION); boolean surfaceNameSet = false; for (final Map.Entry<ValueSpecification, ValueRequirement> entry : inputs.entrySet()) { final ValueSpecification key = entry.getKey(); if (key.getValueName().equals(ValueRequirementNames.VOLATILITY_SURFACE_DATA)) { properties.with(ValuePropertyNames.SURFACE, key.getProperty(ValuePropertyNames.SURFACE)); surfaceNameSet = true; } else if (key.getValueName().equals(ValueRequirementNames.FORWARD_CURVE)) { // !!! TODO: ONCE DEFAULTS ARE FLOWING THROUGH, extractInputProperties AS IN EquityOptionFunction !!! // final ValueProperties curveProperties = key.getProperties().copy() // .withoutAny(ValuePropertyNames.FUNCTION) // .get(); // for (final String property : curveProperties.getProperties()) { // properties.with(property, curveProperties.getValues(property)); // } properties.with(ValuePropertyNames.FORWARD_CURVE_NAME, key.getProperty(ValuePropertyNames.CURVE)); properties.with(ForwardCurveValuePropertyNames.PROPERTY_FORWARD_CURVE_CALCULATION_METHOD, key.getProperty(ForwardCurveValuePropertyNames.PROPERTY_FORWARD_CURVE_CALCULATION_METHOD)); } else if (key.getValueName().equals(ValueRequirementNames.YIELD_CURVE)) { properties.with(ValuePropertyNames.DISCOUNTING_CURVE_NAME, key.getProperty(ValuePropertyNames.CURVE)); properties.with(ValuePropertyNames.CURVE_CURRENCY, key.getTargetSpecification().getUniqueId().getValue()); properties.with(ValuePropertyNames.CURVE_CALCULATION_CONFIG, key.getProperty(ValuePropertyNames.CURVE_CALCULATION_CONFIG)); } } assert surfaceNameSet; return Collections.singleton(new ValueSpecification(ValueRequirementNames.STANDARD_VOLATILITY_SURFACE_DATA, target.toSpecification(), properties.get())); } private static VolatilitySurfaceData<Double, Double> getSurfaceFromVolatilityQuote(final LocalDate valDate, final VolatilitySurfaceData<Object, Object> rawSurface) { // Remove empties, convert expiries from number to years, and scale vols final Map<Pair<Double, Double>, Double> volValues = new HashMap<>(); final DoubleArrayList tList = new DoubleArrayList(); final DoubleArrayList kList = new DoubleArrayList(); final Object[] xs = rawSurface.getXs(); for (final Object x : xs) { Double t; if (x instanceof Number) { t = FutureOptionExpiries.EQUITY.getFutureOptionTtm(((Number) x).intValue(), valDate); } else if (x instanceof LocalDate) { t = TimeCalculator.getTimeBetween((LocalDate) x, valDate); } else { throw new OpenGammaRuntimeException("Cannot not handle surfaces with x-axis type " + x.getClass()); } if (t > 5. / 365.) { // Bootstrapping vol surface to this data causes far more trouble than any gain. The data simply isn't reliable. final Double[] ysAsDoubles = getYs(rawSurface.getYs()); for (final Double strike : ysAsDoubles) { final Double vol = rawSurface.getVolatility(x, strike); if (vol != null) { tList.add(t); kList.add(strike); volValues.put(Pairs.of(t, strike), vol / 100.); } } } } final VolatilitySurfaceData<Double, Double> stdVolSurface = new VolatilitySurfaceData<>(rawSurface.getDefinitionName(), rawSurface.getSpecificationName(), rawSurface.getTarget(), tList.toArray(new Double[0]), kList.toArray(new Double[0]), volValues); return stdVolSurface; } @SuppressWarnings("deprecation") private static VolatilitySurfaceData<Double, Double> getSurfaceFromPriceQuote(final LocalDate valDate, final VolatilitySurfaceData<Object, Object> rawSurface, final ForwardCurve forwardCurve, final YieldCurve discountCurve, final VolatilitySurfaceSpecification specification) { // quote type final String surfaceQuoteType = specification.getSurfaceQuoteType(); double callAboveStrike = 0; boolean optionIsCall = true; boolean quoteTypeIsCallPutStrike = false; if (surfaceQuoteType.equals(SurfaceAndCubeQuoteType.CALL_STRIKE)) { optionIsCall = true; } else if (surfaceQuoteType.equals(SurfaceAndCubeQuoteType.PUT_STRIKE)) { optionIsCall = false; } else if (surfaceQuoteType.equals(SurfaceAndCubeQuoteType.CALL_AND_PUT_STRIKE)) { callAboveStrike = ((CallPutSurfaceInstrumentProvider<?, ?>) specification.getSurfaceInstrumentProvider()).useCallAboveStrike(); quoteTypeIsCallPutStrike = true; } else { throw new OpenGammaRuntimeException("Cannot handle surface quote type " + surfaceQuoteType); } // exercise type final boolean isAmerican = specification.getExerciseType() instanceof AmericanExerciseType; BjerksundStenslandModel americanModel = null; final double spot = forwardCurve.getSpot(); if (isAmerican) { americanModel = new BjerksundStenslandModel(); } // Main loop: Remove empties, convert expiries from number to years, and imply vols final Map<Pair<Double, Double>, Double> volValues = new HashMap<>(); final DoubleArrayList tList = new DoubleArrayList(); final DoubleArrayList kList = new DoubleArrayList(); final Object[] xs = rawSurface.getXs(); for (final Object x : xs) { Double t; if (x instanceof Number) { t = FutureOptionExpiries.EQUITY.getFutureOptionTtm(((Number) x).intValue(), valDate); } else if (x instanceof LocalDate) { t = TimeCalculator.getTimeBetween((LocalDate) x, valDate); } else { throw new OpenGammaRuntimeException("Cannot not handle surfaces with x-axis type " + x.getClass()); } final double forward = forwardCurve.getForward(t); final double zerobond = discountCurve.getDiscountFactor(t); final Double[] ysAsDoubles = getYs(rawSurface.getYs()); for (final Double strike : ysAsDoubles) { final Double price = rawSurface.getVolatility(x, strike); if (price != null) { try { if (quoteTypeIsCallPutStrike) { optionIsCall = strike > callAboveStrike ? true : false; } final double vol; if (isAmerican) { double modSpot = spot; double costOfCarry = -Math.log(zerobond) / t; if (forwardCurve instanceof ForwardCurveAffineDividends) { final AffineDividends div = ((ForwardCurveAffineDividends) forwardCurve).getDividends(); final int number = div.getNumberOfDividends(); int i = 0; while (i < number && div.getTau(i) < t) { modSpot = modSpot * (1. - div.getBeta(i)) - div.getAlpha(i) * discountCurve.getDiscountFactor(div.getTau(i)); ++i; } } else { costOfCarry = Math.log(forwardCurve.getForward(t) / spot) / t; } vol = americanModel.impliedVolatility(price, modSpot, strike, -Math.log(zerobond) / t, costOfCarry, t, optionIsCall); } else { final double fwdPrice = price / zerobond; vol = BlackFormulaRepository.impliedVolatility(fwdPrice, forward, strike, t, optionIsCall); } tList.add(t); kList.add(strike); volValues.put(Pairs.of(t, strike), vol); } catch (final Exception e) { LocalDate expiry = null; if (x instanceof Number) { expiry = FutureOptionExpiries.EQUITY.getFutureOptionExpiry(((Number) x).intValue(), valDate); } else if (x instanceof LocalDate) { expiry = (LocalDate) x; } s_logger.info("Liquidity problem: input price, forward and zero bond imply negative volatility at strike, {}, and expiry, {}", strike, expiry); } } } } final VolatilitySurfaceData<Double, Double> stdVolSurface = new VolatilitySurfaceData<>(rawSurface.getDefinitionName(), rawSurface.getSpecificationName(), rawSurface.getTarget(), tList.toArray(new Double[0]), kList.toArray(new Double[0]), volValues); return stdVolSurface; } private static Double[] getYs(final Object ys) { if (ys instanceof Double[]) { return (Double[]) ys; } final Object[] tempArray = (Object[]) ys; final Double[] result = new Double[tempArray.length]; for (int i = 0; i < tempArray.length; i++) { result[i] = (Double) tempArray[i]; } return result; } }
/* * Copyright (c) Facebook, Inc. and its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.buck.apple; import com.facebook.buck.core.sourcepath.SourcePath; import com.facebook.buck.core.sourcepath.SourceWithFlags; import com.facebook.buck.core.util.immutables.BuckStyleValue; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; import java.nio.file.Path; import java.util.Collection; import java.util.List; import java.util.Objects; import java.util.Optional; import java.util.function.Function; import org.immutables.value.Value; @BuckStyleValue public abstract class GroupedSource { /** The type of grouped source entry this object represents. */ public enum Type { /** A single {@link SourceWithFlags}. */ SOURCE_WITH_FLAGS, /** A single {@link SourcePath} that shouldn't be included in the build phase. */ IGNORED_SOURCE, /** A single {@link SourcePath} representing a public header file. */ PUBLIC_HEADER, /** A single {@link SourcePath} representing a private header file. */ PRIVATE_HEADER, /** A source group (group name and one or more GroupedSource objects). */ SOURCE_GROUP, } protected abstract Type getType(); protected abstract Optional<SourceWithFlags> getSourceWithFlags(); protected abstract Optional<SourcePath> getSourcePath(); protected abstract Optional<String> getSourceGroupName(); protected abstract Optional<Path> getSourceGroupPathRelativeToTarget(); protected abstract Optional<List<GroupedSource>> getSourceGroup(); @Value.Check protected void check() { switch (getType()) { case SOURCE_WITH_FLAGS: Preconditions.checkArgument(getSourceWithFlags().isPresent()); Preconditions.checkArgument(!getSourcePath().isPresent()); Preconditions.checkArgument(!getSourceGroupName().isPresent()); Preconditions.checkArgument(!getSourceGroupPathRelativeToTarget().isPresent()); Preconditions.checkArgument(!getSourceGroup().isPresent()); break; case IGNORED_SOURCE: case PUBLIC_HEADER: case PRIVATE_HEADER: Preconditions.checkArgument(!getSourceWithFlags().isPresent()); Preconditions.checkArgument(getSourcePath().isPresent()); Preconditions.checkArgument(!getSourceGroupName().isPresent()); Preconditions.checkArgument(!getSourceGroupPathRelativeToTarget().isPresent()); Preconditions.checkArgument(!getSourceGroup().isPresent()); break; case SOURCE_GROUP: Preconditions.checkArgument(!getSourceWithFlags().isPresent()); Preconditions.checkArgument(!getSourcePath().isPresent()); Preconditions.checkArgument(getSourceGroupName().isPresent()); Preconditions.checkArgument(getSourceGroupPathRelativeToTarget().isPresent()); Preconditions.checkArgument(getSourceGroup().isPresent()); break; default: throw new RuntimeException("Unhandled type: " + getType()); } } public String getName(Function<SourcePath, Path> pathResolver) { SourcePath sourcePath; switch (getType()) { case SOURCE_WITH_FLAGS: sourcePath = getSourceWithFlags().get().getSourcePath(); return Objects.requireNonNull(pathResolver.apply(sourcePath)).getFileName().toString(); case IGNORED_SOURCE: sourcePath = getSourcePath().get(); return Objects.requireNonNull(pathResolver.apply(sourcePath)).getFileName().toString(); case PUBLIC_HEADER: case PRIVATE_HEADER: sourcePath = getSourcePath().get(); return Objects.requireNonNull(pathResolver.apply(sourcePath)).getFileName().toString(); case SOURCE_GROUP: return getSourceGroupName().get(); default: throw new RuntimeException("Unhandled type: " + getType()); } } /** Creates a {@link GroupedSource} given a {@link SourceWithFlags}. */ public static GroupedSource ofSourceWithFlags(SourceWithFlags sourceWithFlags) { return ImmutableGroupedSource.of( Type.SOURCE_WITH_FLAGS, Optional.of(sourceWithFlags), Optional.empty(), Optional.empty(), Optional.empty(), Optional.empty()); } /** * Creates a {@link GroupedSource} given a {@link SourcePath} representing a file that should not * be included in sources. */ public static GroupedSource ofIgnoredSource(SourcePath sourcePath) { return ImmutableGroupedSource.of( Type.IGNORED_SOURCE, Optional.empty(), Optional.of(sourcePath), Optional.empty(), Optional.empty(), Optional.empty()); } /** * Creates a {@link GroupedSource} given a {@link SourcePath} representing a public header file. */ public static GroupedSource ofPublicHeader(SourcePath headerPath) { return ImmutableGroupedSource.of( Type.PUBLIC_HEADER, Optional.empty(), Optional.of(headerPath), Optional.empty(), Optional.empty(), Optional.empty()); } /** * Creates a {@link GroupedSource} given a {@link SourcePath} representing a private header file. */ public static GroupedSource ofPrivateHeader(SourcePath headerPath) { return ImmutableGroupedSource.of( Type.PRIVATE_HEADER, Optional.empty(), Optional.of(headerPath), Optional.empty(), Optional.empty(), Optional.empty()); } /** Creates a {@link GroupedSource} given a source group name and a list of GroupedSources. */ public static GroupedSource ofSourceGroup( String sourceGroupName, Path sourceGroupPathRelativeToTarget, Collection<GroupedSource> sourceGroup) { return ImmutableGroupedSource.of( Type.SOURCE_GROUP, Optional.empty(), Optional.empty(), Optional.of(sourceGroupName), Optional.of(sourceGroupPathRelativeToTarget), Optional.of((List<GroupedSource>) ImmutableList.copyOf(sourceGroup))); } public interface Visitor { void visitSourceWithFlags(SourceWithFlags sourceWithFlags); void visitIgnoredSource(SourcePath source); void visitPublicHeader(SourcePath publicHeader); void visitPrivateHeader(SourcePath privateHeader); void visitSourceGroup( String sourceGroupName, Path sourceGroupPathRelativeToTarget, List<GroupedSource> sourceGroup); } public void visit(Visitor visitor) { switch (getType()) { case SOURCE_WITH_FLAGS: visitor.visitSourceWithFlags(getSourceWithFlags().get()); break; case IGNORED_SOURCE: visitor.visitIgnoredSource(getSourcePath().get()); break; case PUBLIC_HEADER: visitor.visitPublicHeader(getSourcePath().get()); break; case PRIVATE_HEADER: visitor.visitPrivateHeader(getSourcePath().get()); break; case SOURCE_GROUP: visitor.visitSourceGroup( getSourceGroupName().get(), getSourceGroupPathRelativeToTarget().get(), getSourceGroup().get()); } } }
/* * Copyright 2014-2015 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, * either express or implied. See the License for the specific language * governing permissions and limitations under the License. */ package org.docksidestage.dockside.dbflute.bsentity; import java.util.List; import java.util.ArrayList; import org.dbflute.Entity; import org.dbflute.dbmeta.DBMeta; import org.dbflute.dbmeta.AbstractEntity; import org.dbflute.dbmeta.accessory.DomainEntity; import org.dbflute.optional.OptionalEntity; import org.docksidestage.dockside.dbflute.allcommon.DBMetaInstanceHandler; import org.docksidestage.dockside.dbflute.exentity.*; /** * The entity of VENDOR_THE_LONG_AND_WINDING_TABLE_AND_COLUMN_REF as TABLE. <br> * <pre> * [primary-key] * THE_LONG_AND_WINDING_TABLE_AND_COLUMN_REF_ID * * [column] * THE_LONG_AND_WINDING_TABLE_AND_COLUMN_REF_ID, THE_LONG_AND_WINDING_TABLE_AND_COLUMN_ID, THE_LONG_AND_WINDING_TABLE_AND_COLUMN_REF_DATE, SHORT_DATE * * [sequence] * * * [identity] * * * [version-no] * * * [foreign table] * VENDOR_THE_LONG_AND_WINDING_TABLE_AND_COLUMN * * [referrer table] * * * [foreign property] * vendorTheLongAndWindingTableAndColumn * * [referrer property] * * * [get/set template] * /= = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = * Long theLongAndWindingTableAndColumnRefId = entity.getTheLongAndWindingTableAndColumnRefId(); * Long theLongAndWindingTableAndColumnId = entity.getTheLongAndWindingTableAndColumnId(); * java.time.LocalDate theLongAndWindingTableAndColumnRefDate = entity.getTheLongAndWindingTableAndColumnRefDate(); * java.time.LocalDate shortDate = entity.getShortDate(); * entity.setTheLongAndWindingTableAndColumnRefId(theLongAndWindingTableAndColumnRefId); * entity.setTheLongAndWindingTableAndColumnId(theLongAndWindingTableAndColumnId); * entity.setTheLongAndWindingTableAndColumnRefDate(theLongAndWindingTableAndColumnRefDate); * entity.setShortDate(shortDate); * = = = = = = = = = =/ * </pre> * @author DBFlute(AutoGenerator) */ public abstract class BsVendorTheLongAndWindingTableAndColumnRef extends AbstractEntity implements DomainEntity { // =================================================================================== // Definition // ========== /** The serial version UID for object serialization. (Default) */ private static final long serialVersionUID = 1L; // =================================================================================== // Attribute // ========= /** THE_LONG_AND_WINDING_TABLE_AND_COLUMN_REF_ID: {PK, NotNull, BIGINT(19)} */ protected Long _theLongAndWindingTableAndColumnRefId; /** THE_LONG_AND_WINDING_TABLE_AND_COLUMN_ID: {IX, NotNull, BIGINT(19), FK to VENDOR_THE_LONG_AND_WINDING_TABLE_AND_COLUMN} */ protected Long _theLongAndWindingTableAndColumnId; /** THE_LONG_AND_WINDING_TABLE_AND_COLUMN_REF_DATE: {NotNull, DATE(10)} */ protected java.time.LocalDate _theLongAndWindingTableAndColumnRefDate; /** SHORT_DATE: {NotNull, DATE(10)} */ protected java.time.LocalDate _shortDate; // =================================================================================== // DB Meta // ======= /** {@inheritDoc} */ public DBMeta asDBMeta() { return DBMetaInstanceHandler.findDBMeta(asTableDbName()); } /** {@inheritDoc} */ public String asTableDbName() { return "VENDOR_THE_LONG_AND_WINDING_TABLE_AND_COLUMN_REF"; } // =================================================================================== // Key Handling // ============ /** {@inheritDoc} */ public boolean hasPrimaryKeyValue() { if (_theLongAndWindingTableAndColumnRefId == null) { return false; } return true; } // =================================================================================== // Foreign Property // ================ /** VENDOR_THE_LONG_AND_WINDING_TABLE_AND_COLUMN by my THE_LONG_AND_WINDING_TABLE_AND_COLUMN_ID, named 'vendorTheLongAndWindingTableAndColumn'. */ protected OptionalEntity<VendorTheLongAndWindingTableAndColumn> _vendorTheLongAndWindingTableAndColumn; /** * [get] VENDOR_THE_LONG_AND_WINDING_TABLE_AND_COLUMN by my THE_LONG_AND_WINDING_TABLE_AND_COLUMN_ID, named 'vendorTheLongAndWindingTableAndColumn'. <br> * Optional: alwaysPresent(), ifPresent().orElse(), get(), ... * @return The entity of foreign property 'vendorTheLongAndWindingTableAndColumn'. (NotNull, EmptyAllowed: when e.g. null FK column, no setupSelect) */ public OptionalEntity<VendorTheLongAndWindingTableAndColumn> getVendorTheLongAndWindingTableAndColumn() { if (_vendorTheLongAndWindingTableAndColumn == null) { _vendorTheLongAndWindingTableAndColumn = OptionalEntity.relationEmpty(this, "vendorTheLongAndWindingTableAndColumn"); } return _vendorTheLongAndWindingTableAndColumn; } /** * [set] VENDOR_THE_LONG_AND_WINDING_TABLE_AND_COLUMN by my THE_LONG_AND_WINDING_TABLE_AND_COLUMN_ID, named 'vendorTheLongAndWindingTableAndColumn'. * @param vendorTheLongAndWindingTableAndColumn The entity of foreign property 'vendorTheLongAndWindingTableAndColumn'. (NullAllowed) */ public void setVendorTheLongAndWindingTableAndColumn(OptionalEntity<VendorTheLongAndWindingTableAndColumn> vendorTheLongAndWindingTableAndColumn) { _vendorTheLongAndWindingTableAndColumn = vendorTheLongAndWindingTableAndColumn; } // =================================================================================== // Referrer Property // ================= protected <ELEMENT> List<ELEMENT> newReferrerList() { // overriding to import return new ArrayList<ELEMENT>(); } // =================================================================================== // Basic Override // ============== @Override protected boolean doEquals(Object obj) { if (obj instanceof BsVendorTheLongAndWindingTableAndColumnRef) { BsVendorTheLongAndWindingTableAndColumnRef other = (BsVendorTheLongAndWindingTableAndColumnRef)obj; if (!xSV(_theLongAndWindingTableAndColumnRefId, other._theLongAndWindingTableAndColumnRefId)) { return false; } return true; } else { return false; } } @Override protected int doHashCode(int initial) { int hs = initial; hs = xCH(hs, asTableDbName()); hs = xCH(hs, _theLongAndWindingTableAndColumnRefId); return hs; } @Override protected String doBuildStringWithRelation(String li) { StringBuilder sb = new StringBuilder(); if (_vendorTheLongAndWindingTableAndColumn != null && _vendorTheLongAndWindingTableAndColumn.isPresent()) { sb.append(li).append(xbRDS(_vendorTheLongAndWindingTableAndColumn, "vendorTheLongAndWindingTableAndColumn")); } return sb.toString(); } protected <ET extends Entity> String xbRDS(org.dbflute.optional.OptionalEntity<ET> et, String name) { // buildRelationDisplayString() return et.get().buildDisplayString(name, true, true); } @Override protected String doBuildColumnString(String dm) { StringBuilder sb = new StringBuilder(); sb.append(dm).append(xfND(_theLongAndWindingTableAndColumnRefId)); sb.append(dm).append(xfND(_theLongAndWindingTableAndColumnId)); sb.append(dm).append(xfND(_theLongAndWindingTableAndColumnRefDate)); sb.append(dm).append(xfND(_shortDate)); if (sb.length() > dm.length()) { sb.delete(0, dm.length()); } sb.insert(0, "{").append("}"); return sb.toString(); } @Override protected String doBuildRelationString(String dm) { StringBuilder sb = new StringBuilder(); if (_vendorTheLongAndWindingTableAndColumn != null && _vendorTheLongAndWindingTableAndColumn.isPresent()) { sb.append(dm).append("vendorTheLongAndWindingTableAndColumn"); } if (sb.length() > dm.length()) { sb.delete(0, dm.length()).insert(0, "(").append(")"); } return sb.toString(); } @Override public VendorTheLongAndWindingTableAndColumnRef clone() { return (VendorTheLongAndWindingTableAndColumnRef)super.clone(); } // =================================================================================== // Accessor // ======== /** * [get] THE_LONG_AND_WINDING_TABLE_AND_COLUMN_REF_ID: {PK, NotNull, BIGINT(19)} <br> * @return The value of the column 'THE_LONG_AND_WINDING_TABLE_AND_COLUMN_REF_ID'. (basically NotNull if selected: for the constraint) */ public Long getTheLongAndWindingTableAndColumnRefId() { checkSpecifiedProperty("theLongAndWindingTableAndColumnRefId"); return _theLongAndWindingTableAndColumnRefId; } /** * [set] THE_LONG_AND_WINDING_TABLE_AND_COLUMN_REF_ID: {PK, NotNull, BIGINT(19)} <br> * @param theLongAndWindingTableAndColumnRefId The value of the column 'THE_LONG_AND_WINDING_TABLE_AND_COLUMN_REF_ID'. (basically NotNull if update: for the constraint) */ public void setTheLongAndWindingTableAndColumnRefId(Long theLongAndWindingTableAndColumnRefId) { registerModifiedProperty("theLongAndWindingTableAndColumnRefId"); _theLongAndWindingTableAndColumnRefId = theLongAndWindingTableAndColumnRefId; } /** * [get] THE_LONG_AND_WINDING_TABLE_AND_COLUMN_ID: {IX, NotNull, BIGINT(19), FK to VENDOR_THE_LONG_AND_WINDING_TABLE_AND_COLUMN} <br> * @return The value of the column 'THE_LONG_AND_WINDING_TABLE_AND_COLUMN_ID'. (basically NotNull if selected: for the constraint) */ public Long getTheLongAndWindingTableAndColumnId() { checkSpecifiedProperty("theLongAndWindingTableAndColumnId"); return _theLongAndWindingTableAndColumnId; } /** * [set] THE_LONG_AND_WINDING_TABLE_AND_COLUMN_ID: {IX, NotNull, BIGINT(19), FK to VENDOR_THE_LONG_AND_WINDING_TABLE_AND_COLUMN} <br> * @param theLongAndWindingTableAndColumnId The value of the column 'THE_LONG_AND_WINDING_TABLE_AND_COLUMN_ID'. (basically NotNull if update: for the constraint) */ public void setTheLongAndWindingTableAndColumnId(Long theLongAndWindingTableAndColumnId) { registerModifiedProperty("theLongAndWindingTableAndColumnId"); _theLongAndWindingTableAndColumnId = theLongAndWindingTableAndColumnId; } /** * [get] THE_LONG_AND_WINDING_TABLE_AND_COLUMN_REF_DATE: {NotNull, DATE(10)} <br> * @return The value of the column 'THE_LONG_AND_WINDING_TABLE_AND_COLUMN_REF_DATE'. (basically NotNull if selected: for the constraint) */ public java.time.LocalDate getTheLongAndWindingTableAndColumnRefDate() { checkSpecifiedProperty("theLongAndWindingTableAndColumnRefDate"); return _theLongAndWindingTableAndColumnRefDate; } /** * [set] THE_LONG_AND_WINDING_TABLE_AND_COLUMN_REF_DATE: {NotNull, DATE(10)} <br> * @param theLongAndWindingTableAndColumnRefDate The value of the column 'THE_LONG_AND_WINDING_TABLE_AND_COLUMN_REF_DATE'. (basically NotNull if update: for the constraint) */ public void setTheLongAndWindingTableAndColumnRefDate(java.time.LocalDate theLongAndWindingTableAndColumnRefDate) { registerModifiedProperty("theLongAndWindingTableAndColumnRefDate"); _theLongAndWindingTableAndColumnRefDate = theLongAndWindingTableAndColumnRefDate; } /** * [get] SHORT_DATE: {NotNull, DATE(10)} <br> * @return The value of the column 'SHORT_DATE'. (basically NotNull if selected: for the constraint) */ public java.time.LocalDate getShortDate() { checkSpecifiedProperty("shortDate"); return _shortDate; } /** * [set] SHORT_DATE: {NotNull, DATE(10)} <br> * @param shortDate The value of the column 'SHORT_DATE'. (basically NotNull if update: for the constraint) */ public void setShortDate(java.time.LocalDate shortDate) { registerModifiedProperty("shortDate"); _shortDate = shortDate; } }
/* * Licensed to ElasticSearch and Shay Banon under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. ElasticSearch licenses this * file to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.common.io.stream; import org.apache.lucene.util.BytesRef; import org.elasticsearch.Version; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.UTF8StreamWriter; import org.elasticsearch.common.text.Text; import org.joda.time.ReadableInstant; import java.io.IOException; import java.io.OutputStream; import java.lang.ref.SoftReference; import java.util.Date; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * */ public abstract class StreamOutput extends OutputStream { private static ThreadLocal<SoftReference<UTF8StreamWriter>> utf8StreamWriter = new ThreadLocal<SoftReference<UTF8StreamWriter>>(); public static UTF8StreamWriter utf8StreamWriter() { SoftReference<UTF8StreamWriter> ref = utf8StreamWriter.get(); UTF8StreamWriter writer = (ref == null) ? null : ref.get(); if (writer == null) { writer = new UTF8StreamWriter(1024 * 4); utf8StreamWriter.set(new SoftReference<UTF8StreamWriter>(writer)); } writer.reset(); return writer; } private Version version = Version.CURRENT; public Version getVersion() { return this.version; } public StreamOutput setVersion(Version version) { this.version = version; return this; } public boolean seekPositionSupported() { return false; } public long position() throws IOException { throw new UnsupportedOperationException(); } public void seek(long position) throws IOException { throw new UnsupportedOperationException(); } /** * Writes a single byte. */ public abstract void writeByte(byte b) throws IOException; /** * Writes an array of bytes. * * @param b the bytes to write */ public void writeBytes(byte[] b) throws IOException { writeBytes(b, 0, b.length); } /** * Writes an array of bytes. * * @param b the bytes to write * @param length the number of bytes to write */ public void writeBytes(byte[] b, int length) throws IOException { writeBytes(b, 0, length); } /** * Writes an array of bytes. * * @param b the bytes to write * @param offset the offset in the byte array * @param length the number of bytes to write */ public abstract void writeBytes(byte[] b, int offset, int length) throws IOException; /** * Writes the bytes reference, including a length header. */ public void writeBytesReference(@Nullable BytesReference bytes) throws IOException { if (bytes == null) { writeVInt(0); return; } writeVInt(bytes.length()); bytes.writeTo(this); } public void writeBytesRef(BytesRef bytes) throws IOException { if (bytes == null) { writeVInt(0); return; } writeVInt(bytes.length); write(bytes.bytes, bytes.offset, bytes.length); } public final void writeShort(short v) throws IOException { writeByte((byte) (v >> 8)); writeByte((byte) v); } /** * Writes an int as four bytes. */ public void writeInt(int i) throws IOException { writeByte((byte) (i >> 24)); writeByte((byte) (i >> 16)); writeByte((byte) (i >> 8)); writeByte((byte) i); } /** * Writes an int in a variable-length format. Writes between one and * five bytes. Smaller values take fewer bytes. Negative numbers * will always use all 5 bytes and are therefore better serialized * using {@link #writeInt} */ public void writeVInt(int i) throws IOException { while ((i & ~0x7F) != 0) { writeByte((byte) ((i & 0x7f) | 0x80)); i >>>= 7; } writeByte((byte) i); } /** * Writes a long as eight bytes. */ public void writeLong(long i) throws IOException { writeInt((int) (i >> 32)); writeInt((int) i); } /** * Writes an long in a variable-length format. Writes between one and nine * bytes. Smaller values take fewer bytes. Negative numbers are not * supported. */ public void writeVLong(long i) throws IOException { assert i >= 0; while ((i & ~0x7F) != 0) { writeByte((byte) ((i & 0x7f) | 0x80)); i >>>= 7; } writeByte((byte) i); } public void writeOptionalString(@Nullable String str) throws IOException { if (str == null) { writeBoolean(false); } else { writeBoolean(true); writeString(str); } } public void writeOptionalSharedString(@Nullable String str) throws IOException { if (str == null) { writeBoolean(false); } else { writeBoolean(true); writeSharedString(str); } } public void writeOptionalText(@Nullable Text text) throws IOException { if (text == null) { writeInt(-1); } else { writeText(text); } } public void writeText(Text text) throws IOException { if (!text.hasBytes() && seekPositionSupported()) { long pos1 = position(); // make room for the size seek(pos1 + 4); UTF8StreamWriter utf8StreamWriter = utf8StreamWriter(); utf8StreamWriter.setOutput(this); utf8StreamWriter.write(text.string()); utf8StreamWriter.close(); long pos2 = position(); seek(pos1); writeInt((int) (pos2 - pos1 - 4)); seek(pos2); } else { BytesReference bytes = text.bytes(); writeInt(bytes.length()); bytes.writeTo(this); } } public void writeTextArray(Text[] array) throws IOException { writeVInt(array.length); for (Text t : array) { writeText(t); } } public void writeSharedText(Text text) throws IOException { writeText(text); } public void writeString(String str) throws IOException { int charCount = str.length(); writeVInt(charCount); int c; for (int i = 0; i < charCount; i++) { c = str.charAt(i); if (c <= 0x007F) { writeByte((byte) c); } else if (c > 0x07FF) { writeByte((byte) (0xE0 | c >> 12 & 0x0F)); writeByte((byte) (0x80 | c >> 6 & 0x3F)); writeByte((byte) (0x80 | c >> 0 & 0x3F)); } else { writeByte((byte) (0xC0 | c >> 6 & 0x1F)); writeByte((byte) (0x80 | c >> 0 & 0x3F)); } } } public void writeSharedString(String str) throws IOException { writeString(str); } public void writeFloat(float v) throws IOException { writeInt(Float.floatToIntBits(v)); } public void writeDouble(double v) throws IOException { writeLong(Double.doubleToLongBits(v)); } private static byte ZERO = 0; private static byte ONE = 1; private static byte TWO = 2; /** * Writes a boolean. */ public void writeBoolean(boolean b) throws IOException { writeByte(b ? ONE : ZERO); } public void writeOptionalBoolean(@Nullable Boolean b) throws IOException { if (b == null) { writeByte(TWO); } else { writeByte(b ? ONE : ZERO); } } /** * Forces any buffered output to be written. */ public abstract void flush() throws IOException; /** * Closes this stream to further operations. */ public abstract void close() throws IOException; public abstract void reset() throws IOException; @Override public void write(int b) throws IOException { writeByte((byte) b); } @Override public void write(byte[] b, int off, int len) throws IOException { writeBytes(b, off, len); } public void writeStringArray(String[] array) throws IOException { writeVInt(array.length); for (String s : array) { writeString(s); } } /** * Writes a string array, for nullable string, writes it as 0 (empty string). */ public void writeStringArrayNullable(@Nullable String[] array) throws IOException { if (array == null) { writeVInt(0); } else { writeVInt(array.length); for (String s : array) { writeString(s); } } } public void writeMap(@Nullable Map<String, Object> map) throws IOException { writeGenericValue(map); } public void writeGenericValue(@Nullable Object value) throws IOException { if (value == null) { writeByte((byte) -1); return; } Class type = value.getClass(); if (type == String.class) { writeByte((byte) 0); writeString((String) value); } else if (type == Integer.class) { writeByte((byte) 1); writeInt((Integer) value); } else if (type == Long.class) { writeByte((byte) 2); writeLong((Long) value); } else if (type == Float.class) { writeByte((byte) 3); writeFloat((Float) value); } else if (type == Double.class) { writeByte((byte) 4); writeDouble((Double) value); } else if (type == Boolean.class) { writeByte((byte) 5); writeBoolean((Boolean) value); } else if (type == byte[].class) { writeByte((byte) 6); writeVInt(((byte[]) value).length); writeBytes(((byte[]) value)); } else if (value instanceof List) { writeByte((byte) 7); List list = (List) value; writeVInt(list.size()); for (Object o : list) { writeGenericValue(o); } } else if (value instanceof Object[]) { writeByte((byte) 8); Object[] list = (Object[]) value; writeVInt(list.length); for (Object o : list) { writeGenericValue(o); } } else if (value instanceof Map) { if (value instanceof LinkedHashMap) { writeByte((byte) 9); } else { writeByte((byte) 10); } Map<String, Object> map = (Map<String, Object>) value; writeVInt(map.size()); for (Map.Entry<String, Object> entry : map.entrySet()) { writeSharedString(entry.getKey()); writeGenericValue(entry.getValue()); } } else if (type == Byte.class) { writeByte((byte) 11); writeByte((Byte) value); } else if (type == Date.class) { writeByte((byte) 12); writeLong(((Date) value).getTime()); } else if (value instanceof ReadableInstant) { writeByte((byte) 13); writeLong(((ReadableInstant) value).getMillis()); } else if (value instanceof BytesReference) { writeByte((byte) 14); writeBytesReference((BytesReference) value); } else if (value instanceof Text) { writeByte((byte) 15); writeText((Text) value); } else if (type == Short.class) { writeByte((byte) 16); writeShort((Short) value); } else { throw new IOException("Can't write type [" + type + "]"); } } /** * Serializes a potential null value. */ public void writeOptionalStreamable(@Nullable Streamable streamable) throws IOException { if (streamable != null) { writeBoolean(true); streamable.writeTo(this); } else { writeBoolean(false); } } }
/* * Copyright (c) 2005, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package build.tools.buildmetaindex; import java.io.*; import java.util.*; import java.util.jar.*; /** Constructs a meta-index of the specified jar files. The meta-index contains prefixes of packages contained in these jars, indexed by the jar file name. It is intended to be consumed by the JVM to allow the boot class loader to be made lazier. For example, when class data sharing is enabled, the presence of the meta-index allows the JVM to skip opening rt.jar if all of the dependent classes of the application are in the shared archive. A similar mechanism could be useful at the application level as well, for example to make the extension class loader lazier. <p> The contents of the meta-index file for jre/lib look something like this: <PRE> % VERSION 2 # charsets.jar sun/ # jce.jar javax/ ! jsse.jar sun/ com/sun/net/ javax/ com/sun/security/ # management-agent.jar ! rt.jar org/w3c/ com/sun/image/ com/sun/org/ com/sun/imageio/ com/sun/accessibility/ javax/ ... </PRE> <p> It is a current invariant of the code in the JVM which consumes the meta-index that the meta-index indexes only jars in one directory. It is acceptable for jars in that directory to not be mentioned in the meta-index. The meta-index is designed more to be able to perform a quick rejection test of the presence of a particular class in a particular jar file than to be a precise index of the contents of the jar. */ public class BuildMetaIndex { public static void main(String[] args) throws IOException { /* The correct usage of this class is as following: * java BuildMetaIndex -o <meta-index> <a list of jar files> * So the argument length should be at least 3 and the first argument should * be '-o'. */ if (args.length < 3 || !args[0].equals("-o")) { printUsage(); System.exit(1); } try { PrintStream out = new PrintStream(new FileOutputStream(args[1])); out.println("% VERSION 2"); out.println("% WARNING: this file is auto-generated; do not edit"); out.println("% UNSUPPORTED: this file and its format may change and/or"); out.println("% may be removed in a future release"); for (int i = 2; i < args.length; i++) { String filename = args[i]; JarMetaIndex jmi = new JarMetaIndex(filename); HashSet<String> index = jmi.getMetaIndex(); if (index == null) { continue; } /* * meta-index file plays different role in JVM and JDK side. * On the JVM side, meta-index file is used to speed up locating the * class files only while on the JDK side, meta-index file is used to speed * up the resources file and class file. * To help the JVM and JDK code to better utilize the information in meta-index * file, we mark the jar file differently. Here is the current rule we use (See * JarFileKind.getMarkChar() method. ) * For jar file containing only class file, we put '!' before the jar file name; * for jar file containing only resources file, we put '@' before the jar file name; * for jar file containing both resources and class file, we put '#' before the jar name. * Notice the fact that every jar file contains at least the manifest file, so when * we say "jar file containing only class file", we don't include that file. */ out.println(jmi.getJarFileKind().getMarkerChar() + " " + filename); for (Iterator<String> iter = index.iterator(); iter.hasNext(); ) { out.println(iter.next()); } } out.flush(); out.close(); } catch (FileNotFoundException fnfe) { System.err.println("FileNotFoundException occurred"); System.exit(2); } } private static void printUsage() { String usage = "BuildMetaIndex is used to generate a meta index file for the jar files\n" + "you specified. The following is its usage:\n" + " java BuildMetaIndex -o <the output meta index file> <a list of jar files> \n" + " You can specify *.jar to refer to all the jar files in the current directory"; System.err.println(usage); } } enum JarFileKind { CLASSONLY ('!'), RESOURCEONLY ('@'), MIXED ('#'); private char markerChar; JarFileKind(char markerChar) { this.markerChar = markerChar; } public char getMarkerChar() { return markerChar; } } /* * JarMetaIndex associates the jar file with a set of what so called * "meta-index" of the jar file. Essentially, the meta-index is a list * of class prefixes and the plain files contained in META-INF directory ( * not include the manifest file itself). This will help sun.misc.URLClassPath * to quickly locate the resource file and hotspot VM to locate the class file. * */ class JarMetaIndex { private JarFile jar; private volatile HashSet<String> indexSet; /* * A hashmap contains a mapping from the prefix string to * a hashset which contains a set of the second level of prefix string. */ private HashMap<String, HashSet<String>> knownPrefixMap = new HashMap<String, HashSet<String>>(); /* * We add maximum 5 second level entries to "sun", "java" and * "javax" entries. Tune this parameter to get a balance on the * cold start and footprint. */ private static final int MAX_PKGS_WITH_KNOWN_PREFIX = 5; private JarFileKind jarFileKind; JarMetaIndex(String fileName) throws IOException { jar = new JarFile(fileName); knownPrefixMap.put("sun", new HashSet<String>()); knownPrefixMap.put("java", new HashSet<String>()); knownPrefixMap.put("javax", new HashSet<String>()); } /* Returns a HashSet contains the meta index string. */ HashSet<String> getMetaIndex() { if (indexSet == null) { synchronized(this) { if (indexSet == null) { indexSet = new HashSet<String>(); Enumeration entries = jar.entries(); boolean containsOnlyClass = true; boolean containsOnlyResource = true; while (entries.hasMoreElements()) { JarEntry entry = (JarEntry) entries.nextElement(); String name = entry.getName(); /* We only look at the non-directory entry. MANIFEST file is also skipped. */ if (entry.isDirectory() || name.equals("META-INF/MANIFEST.MF")) { continue; } /* Once containsOnlyResource or containsOnlyClass turns to false, no need to check the entry type. */ if (containsOnlyResource || containsOnlyClass) { if (name.endsWith(".class")) { containsOnlyResource = false; } else { containsOnlyClass = false; } } /* Add the full-qualified name of plain files under META-INF directory to the indexSet. */ if (name.startsWith("META-INF")) { indexSet.add(name); continue; } /* Add the prefix name to the knownPrefixMap if the name starts with any string in the knownPrefix list. */ if (isPrefixKnown(name)) { continue; } String[] pkgElements = name.split("/"); // Last one is the class name; definitely ignoring that if (pkgElements.length > 2) { String meta = null; // Need more information than just first two package // name elements to determine that classes in // deploy.jar are not in rt.jar if (pkgElements.length > 3 && pkgElements[0].equals("com") && pkgElements[1].equals("sun")) { // Need more precise information to disambiguate // (illegal) references from applications to // obsolete backported collections classes in // com/sun/java/util if (pkgElements.length > 4 && pkgElements[2].equals("java")) { int bound = 0; if (pkgElements[3].equals("util")) { // Take all of the packages bound = pkgElements.length - 1; } else { // Trim it somewhat more bound = 4; } meta = ""; for (int j = 0; j < bound; j++) { meta += pkgElements[j] + "/"; } } else { meta = pkgElements[0] + "/" + pkgElements[1] + "/" + pkgElements[2] + "/"; } } else { meta = pkgElements[0] + "/" + pkgElements[1] + "/"; } indexSet.add(meta); } } // end of "while" loop; // Add the second level package names to the indexSet for // the predefined names such as "sun", "java" and "javax". addKnownPrefix(); /* Set "jarFileKind" attribute. */ if (containsOnlyClass) { jarFileKind = JarFileKind.CLASSONLY; } else if (containsOnlyResource) { jarFileKind = JarFileKind.RESOURCEONLY; } else { jarFileKind = JarFileKind.MIXED; } } } } return indexSet; } /* * Checks to see whether the name starts with a string which is in the predefined * list. If it is among one of the predefined prefixes, add it to the knowPrefixMap * and returns true, otherwise, returns false. * Returns true if the name is in a predefined prefix list. Otherwise, returns false. */ boolean isPrefixKnown(String name) { int firstSlashIndex = name.indexOf("/"); if (firstSlashIndex == -1) { return false; } String firstPkgElement = name.substring(0, firstSlashIndex); HashSet<String> pkgSet = knownPrefixMap.get(firstPkgElement); /* The name does not starts with "sun", "java" or "javax". */ if (pkgSet == null) { return false; } String secondPkgElement = name.substring(firstSlashIndex + 1, name.indexOf("/", firstSlashIndex + 1)); /* Add the second level package name to the corresponding hashset. */ if (secondPkgElement != null) { pkgSet.add(secondPkgElement); } return true; } /* * Adds all the second level package elements for "sun", "java" and "javax" * if the corresponding jar file does not contain more than * MAX_PKGS_WITH_KNOWN_PREFIX such entries. */ void addKnownPrefix() { if (indexSet == null) { return; } /* Iterate through the hash map, add the second level package names * to the indexSet if has any. */ for (Iterator<String> keysIterator = knownPrefixMap.keySet().iterator(); keysIterator.hasNext();) { String key = keysIterator.next(); HashSet<String> pkgSetStartsWithKey = knownPrefixMap.get(key); int setSize = pkgSetStartsWithKey.size(); if (setSize == 0) { continue; } else if (setSize > JarMetaIndex.MAX_PKGS_WITH_KNOWN_PREFIX) { indexSet.add(key + "/"); } else { /* If the set contains less than MAX_PKGS_WITH_KNOWN_PREFIX, add * them to the indexSet of the MetaIndex object. */ for (Iterator<String> secondPkgElements = pkgSetStartsWithKey.iterator(); secondPkgElements.hasNext();) { indexSet.add(key + "/" + secondPkgElements.next()); } } } // end the outer "for" } JarFileKind getJarFileKind() { // Build meta index if it hasn't. if (indexSet == null) { indexSet = getMetaIndex(); } return jarFileKind; } }
/* * Copyright (C) 2007 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.taobao.android.dx.ssa; import com.taobao.android.dx.rop.code.BasicBlock; import com.taobao.android.dx.rop.code.BasicBlockList; import com.taobao.android.dx.rop.code.Insn; import com.taobao.android.dx.rop.code.InsnList; import com.taobao.android.dx.rop.code.PlainInsn; import com.taobao.android.dx.rop.code.RegisterSpec; import com.taobao.android.dx.rop.code.RegisterSpecList; import com.taobao.android.dx.rop.code.Rop; import com.taobao.android.dx.rop.code.RopMethod; import com.taobao.android.dx.rop.code.Rops; import com.taobao.android.dx.rop.code.SourcePosition; import com.taobao.android.dx.util.Hex; import com.taobao.android.dx.util.IntList; import com.taobao.android.dx.util.IntSet; import java.util.ArrayList; import java.util.BitSet; import java.util.Collections; import java.util.Comparator; import java.util.List; /** * An SSA representation of a basic block. */ public final class SsaBasicBlock { /** * {@code non-null;} comparator for instances of this class that * just compares block labels */ public static final Comparator<SsaBasicBlock> LABEL_COMPARATOR = new LabelComparator(); /** {@code non-null;} insn list associated with this instance */ private ArrayList<SsaInsn> insns; /** {@code non-null;} predecessor set (by block list index) */ private BitSet predecessors; /** {@code non-null;} successor set (by block list index) */ private BitSet successors; /** * {@code non-null;} ordered successor list * (same block may be listed more than once) */ private IntList successorList; /** * block list index of primary successor, or {@code -1} for no primary * successor */ private int primarySuccessor = -1; /** label of block in rop form */ private int ropLabel; /** {@code non-null;} method we belong to */ private SsaMethod parent; /** our index into parent.getBlock() */ private int index; /** list of dom children */ private final ArrayList<SsaBasicBlock> domChildren; /** * the number of moves added to the end of the block during the * phi-removal process. Retained for subsequent move scheduling. */ private int movesFromPhisAtEnd = 0; /** * the number of moves added to the beginning of the block during the * phi-removal process. Retained for subsequent move scheduling. */ private int movesFromPhisAtBeginning = 0; /** * contains last computed value of reachability of this block, or -1 * if reachability hasn't been calculated yet */ private int reachable = -1; /** * {@code null-ok;} indexed by reg: the regs that are live-in at * this block */ private IntSet liveIn; /** * {@code null-ok;} indexed by reg: the regs that are live-out at * this block */ private IntSet liveOut; /** * Creates a new empty basic block. * * @param basicBlockIndex index this block will have * @param ropLabel original rop-form label * @param parent method of this block */ public SsaBasicBlock(final int basicBlockIndex, final int ropLabel, final SsaMethod parent) { this.parent = parent; this.index = basicBlockIndex; this.insns = new ArrayList<SsaInsn>(); this.ropLabel = ropLabel; this.predecessors = new BitSet(parent.getBlocks().size()); this.successors = new BitSet(parent.getBlocks().size()); this.successorList = new IntList(); domChildren = new ArrayList<SsaBasicBlock>(); } /** * Creates a new SSA basic block from a ROP form basic block. * * @param rmeth original method * @param basicBlockIndex index this block will have * @param parent method of this block predecessor set will be * updated * @return new instance */ public static SsaBasicBlock newFromRop(RopMethod rmeth, int basicBlockIndex, final SsaMethod parent) { BasicBlockList ropBlocks = rmeth.getBlocks(); BasicBlock bb = ropBlocks.get(basicBlockIndex); SsaBasicBlock result = new SsaBasicBlock(basicBlockIndex, bb.getLabel(), parent); InsnList ropInsns = bb.getInsns(); result.insns.ensureCapacity(ropInsns.size()); for (int i = 0, sz = ropInsns.size() ; i < sz ; i++) { result.insns.add(new NormalSsaInsn (ropInsns.get(i), result)); } result.predecessors = SsaMethod.bitSetFromLabelList( ropBlocks, rmeth.labelToPredecessors(bb.getLabel())); result.successors = SsaMethod.bitSetFromLabelList(ropBlocks, bb.getSuccessors()); result.successorList = SsaMethod.indexListFromLabelList(ropBlocks, bb.getSuccessors()); if (result.successorList.size() != 0) { int primarySuccessor = bb.getPrimarySuccessor(); result.primarySuccessor = (primarySuccessor < 0) ? -1 : ropBlocks.indexOfLabel(primarySuccessor); } return result; } /** * Adds a basic block as a dom child for this block. Used when constructing * the dom tree. * * @param child {@code non-null;} new dom child */ public void addDomChild(SsaBasicBlock child) { domChildren.add(child); } /** * Gets the dom children for this node. Don't modify this list. * * @return {@code non-null;} list of dom children */ public ArrayList<SsaBasicBlock> getDomChildren() { return domChildren; } /** * Adds a phi insn to the beginning of this block. The result type of * the phi will be set to void, to indicate that it's currently unknown. * * @param reg {@code >=0;} result reg */ public void addPhiInsnForReg(int reg) { insns.add(0, new PhiInsn(reg, this)); } /** * Adds a phi insn to the beginning of this block. This is to be used * when the result type or local-association can be determined at phi * insert time. * * @param resultSpec {@code non-null;} reg */ public void addPhiInsnForReg(RegisterSpec resultSpec) { insns.add(0, new PhiInsn(resultSpec, this)); } /** * Adds an insn to the head of this basic block, just after any phi * insns. * * @param insn {@code non-null;} rop-form insn to add */ public void addInsnToHead(Insn insn) { SsaInsn newInsn = SsaInsn.makeFromRop(insn, this); insns.add(getCountPhiInsns(), newInsn); parent.onInsnAdded(newInsn); } /** * Replaces the last insn in this block. The provided insn must have * some branchingness. * * @param insn {@code non-null;} rop-form insn to add, which must branch. */ public void replaceLastInsn(Insn insn) { if (insn.getOpcode().getBranchingness() == Rop.BRANCH_NONE) { throw new IllegalArgumentException("last insn must branch"); } SsaInsn oldInsn = insns.get(insns.size() - 1); SsaInsn newInsn = SsaInsn.makeFromRop(insn, this); insns.set(insns.size() - 1, newInsn); parent.onInsnRemoved(oldInsn); parent.onInsnAdded(newInsn); } /** * Visits each phi insn. * * @param v {@code non-null;} the callback */ public void forEachPhiInsn(PhiInsn.Visitor v) { int sz = insns.size(); for (int i = 0; i < sz; i++) { SsaInsn insn = insns.get(i); if (insn instanceof PhiInsn) { v.visitPhiInsn((PhiInsn) insn); } else { /* * Presently we assume PhiInsn's are in a continuous * block at the top of the list */ break; } } } /** * Deletes all phi insns. Do this after adding appropriate move insns. */ public void removeAllPhiInsns() { /* * Presently we assume PhiInsn's are in a continuous * block at the top of the list. */ insns.subList(0, getCountPhiInsns()).clear(); } /** * Gets the number of phi insns at the top of this basic block. * * @return count of phi insns */ private int getCountPhiInsns() { int countPhiInsns; int sz = insns.size(); for (countPhiInsns = 0; countPhiInsns < sz; countPhiInsns++) { SsaInsn insn = insns.get(countPhiInsns); if (!(insn instanceof PhiInsn)) { break; } } return countPhiInsns; } /** * @return {@code non-null;} the (mutable) instruction list for this block, * with phi insns at the beginning */ public ArrayList<SsaInsn> getInsns() { return insns; } /** * @return {@code non-null;} the (mutable) list of phi insns for this block */ public List<SsaInsn> getPhiInsns() { return insns.subList(0, getCountPhiInsns()); } /** * @return the block index of this block */ public int getIndex() { return index; } /** * @return the label of this block in rop form */ public int getRopLabel() { return ropLabel; } /** * @return the label of this block in rop form as a hex string */ public String getRopLabelString() { return Hex.u2(ropLabel); } /** * @return {@code non-null;} predecessors set, indexed by block index */ public BitSet getPredecessors() { return predecessors; } /** * @return {@code non-null;} successors set, indexed by block index */ public BitSet getSuccessors() { return successors; } /** * @return {@code non-null;} ordered successor list, containing block * indicies */ public IntList getSuccessorList() { return successorList; } /** * @return {@code >= -1;} block index of primary successor or * {@code -1} if no primary successor */ public int getPrimarySuccessorIndex() { return primarySuccessor; } /** * @return rop label of primary successor */ public int getPrimarySuccessorRopLabel() { return parent.blockIndexToRopLabel(primarySuccessor); } /** * @return {@code null-ok;} the primary successor block or {@code null} * if there is none */ public SsaBasicBlock getPrimarySuccessor() { if (primarySuccessor < 0) { return null; } else { return parent.getBlocks().get(primarySuccessor); } } /** * @return successor list of rop labels */ public IntList getRopLabelSuccessorList() { IntList result = new IntList(successorList.size()); int sz = successorList.size(); for (int i = 0; i < sz; i++) { result.add(parent.blockIndexToRopLabel(successorList.get(i))); } return result; } /** * @return {@code non-null;} method that contains this block */ public SsaMethod getParent() { return parent; } /** * Inserts a new empty GOTO block as a predecessor to this block. * All previous predecessors will be predecessors to the new block. * * @return {@code non-null;} an appropriately-constructed instance */ public SsaBasicBlock insertNewPredecessor() { SsaBasicBlock newPred = parent.makeNewGotoBlock(); // Update the new block. newPred.predecessors = predecessors; newPred.successors.set(index) ; newPred.successorList.add(index); newPred.primarySuccessor = index; // Update us. predecessors = new BitSet(parent.getBlocks().size()); predecessors.set(newPred.index); // Update our (soon-to-be) old predecessors. for (int i = newPred.predecessors.nextSetBit(0); i >= 0; i = newPred.predecessors.nextSetBit(i + 1)) { SsaBasicBlock predBlock = parent.getBlocks().get(i); predBlock.replaceSuccessor(index, newPred.index); } return newPred; } /** * Constructs and inserts a new empty GOTO block {@code Z} between * this block ({@code A}) and a current successor block * ({@code B}). The new block will replace B as A's successor and * A as B's predecessor. A and B will no longer be directly connected. * If B is listed as a successor multiple times, all references * are replaced. * * @param other current successor (B) * @return {@code non-null;} an appropriately-constructed instance */ public SsaBasicBlock insertNewSuccessor(SsaBasicBlock other) { SsaBasicBlock newSucc = parent.makeNewGotoBlock(); if (!successors.get(other.index)) { throw new RuntimeException("Block " + other.getRopLabelString() + " not successor of " + getRopLabelString()); } // Update the new block. newSucc.predecessors.set(this.index); newSucc.successors.set(other.index) ; newSucc.successorList.add(other.index); newSucc.primarySuccessor = other.index; // Update us. for (int i = successorList.size() - 1 ; i >= 0; i--) { if (successorList.get(i) == other.index) { successorList.set(i, newSucc.index); } } if (primarySuccessor == other.index) { primarySuccessor = newSucc.index; } successors.clear(other.index); successors.set(newSucc.index); // Update "other". other.predecessors.set(newSucc.index); other.predecessors.set(index, successors.get(other.index)); return newSucc; } /** * Replaces an old successor with a new successor. This will throw * RuntimeException if {@code oldIndex} was not a successor. * * @param oldIndex index of old successor block * @param newIndex index of new successor block */ public void replaceSuccessor(int oldIndex, int newIndex) { if (oldIndex == newIndex) { return; } // Update us. successors.set(newIndex); if (primarySuccessor == oldIndex) { primarySuccessor = newIndex; } for (int i = successorList.size() - 1 ; i >= 0; i--) { if (successorList.get(i) == oldIndex) { successorList.set(i, newIndex); } } successors.clear(oldIndex); // Update new successor. parent.getBlocks().get(newIndex).predecessors.set(index); // Update old successor. parent.getBlocks().get(oldIndex).predecessors.clear(index); } /** * Removes a successor from this block's successor list. * * @param oldIndex index of successor block to remove */ public void removeSuccessor(int oldIndex) { int removeIndex = 0; for (int i = successorList.size() - 1; i >= 0; i--) { if (successorList.get(i) == oldIndex) { removeIndex = i; } else { primarySuccessor = successorList.get(i); } } successorList.removeIndex(removeIndex); successors.clear(oldIndex); parent.getBlocks().get(oldIndex).predecessors.clear(index); } /** * Attaches block to an exit block if necessary. If this block * is not an exit predecessor or is the exit block, this block does * nothing. For use by {@link com.taobao.android.dx.ssa.SsaMethod#makeExitBlock} * * @param exitBlock {@code non-null;} exit block */ public void exitBlockFixup(SsaBasicBlock exitBlock) { if (this == exitBlock) { return; } if (successorList.size() == 0) { /* * This is an exit predecessor. * Set the successor to the exit block */ successors.set(exitBlock.index); successorList.add(exitBlock.index); primarySuccessor = exitBlock.index; exitBlock.predecessors.set(this.index); } } /** * Adds a move instruction to the end of this basic block, just * before the last instruction. If the result of the final instruction * is the source in question, then the move is placed at the beginning of * the primary successor block. This is for unversioned registers. * * @param result move destination * @param source move source */ public void addMoveToEnd(RegisterSpec result, RegisterSpec source) { if (result.getReg() == source.getReg()) { // Sometimes we end up with no-op moves. Ignore them here. return; } /* * The last Insn has to be a normal SSA insn: a phi can't branch * or return or cause an exception, etc. */ NormalSsaInsn lastInsn; lastInsn = (NormalSsaInsn)insns.get(insns.size()-1); if (lastInsn.getResult() != null || lastInsn.getSources().size() > 0) { /* * The final insn in this block has a source or result * register, and the moves we may need to place and * schedule may interfere. We need to insert this * instruction at the beginning of the primary successor * block instead. We know this is safe, because when we * edge-split earlier, we ensured that each successor has * only us as a predecessor. */ for (int i = successors.nextSetBit(0) ; i >= 0 ; i = successors.nextSetBit(i + 1)) { SsaBasicBlock succ; succ = parent.getBlocks().get(i); succ.addMoveToBeginning(result, source); } } else { /* * We can safely add a move to the end of the block just * before the last instruction, because the final insn does * not assign to anything. */ RegisterSpecList sources = RegisterSpecList.make(source); NormalSsaInsn toAdd = new NormalSsaInsn( new PlainInsn(Rops.opMove(result.getType()), SourcePosition.NO_INFO, result, sources), this); insns.add(insns.size() - 1, toAdd); movesFromPhisAtEnd++; } } /** * Adds a move instruction after the phi insn block. * * @param result move destination * @param source move source */ public void addMoveToBeginning (RegisterSpec result, RegisterSpec source) { if (result.getReg() == source.getReg()) { // Sometimes we end up with no-op moves. Ignore them here. return; } RegisterSpecList sources = RegisterSpecList.make(source); NormalSsaInsn toAdd = new NormalSsaInsn( new PlainInsn(Rops.opMove(result.getType()), SourcePosition.NO_INFO, result, sources), this); insns.add(getCountPhiInsns(), toAdd); movesFromPhisAtBeginning++; } /** * Sets the register as used in a bitset, taking into account its * category/width. * * @param regsUsed set, indexed by register number * @param rs register to mark as used */ private static void setRegsUsed (BitSet regsUsed, RegisterSpec rs) { regsUsed.set(rs.getReg()); if (rs.getCategory() > 1) { regsUsed.set(rs.getReg() + 1); } } /** * Checks to see if the register is used in a bitset, taking * into account its category/width. * * @param regsUsed set, indexed by register number * @param rs register to mark as used * @return true if register is fully or partially (for the case of wide * registers) used. */ private static boolean checkRegUsed (BitSet regsUsed, RegisterSpec rs) { int reg = rs.getReg(); int category = rs.getCategory(); return regsUsed.get(reg) || (category == 2 ? regsUsed.get(reg + 1) : false); } /** * Ensures that all move operations in this block occur such that * reads of any register happen before writes to that register. * NOTE: caller is expected to returnSpareRegisters()! * * TODO: See Briggs, et al "Practical Improvements to the Construction and * Destruction of Static Single Assignment Form" section 5. a) This can * be done in three passes. * * @param toSchedule List of instructions. Must consist only of moves. */ private void scheduleUseBeforeAssigned(List<SsaInsn> toSchedule) { BitSet regsUsedAsSources = new BitSet(parent.getRegCount()); // TODO: Get rid of this. BitSet regsUsedAsResults = new BitSet(parent.getRegCount()); int sz = toSchedule.size(); int insertPlace = 0; while (insertPlace < sz) { int oldInsertPlace = insertPlace; // Record all registers used as sources in this block. for (int i = insertPlace; i < sz; i++) { setRegsUsed(regsUsedAsSources, toSchedule.get(i).getSources().get(0)); setRegsUsed(regsUsedAsResults, toSchedule.get(i).getResult()); } /* * If there are no circular dependencies, then there exists * n instructions where n > 1 whose result is not used as a source. */ for (int i = insertPlace; i <sz; i++) { SsaInsn insn = toSchedule.get(i); /* * Move these n registers to the front, since they overwrite * nothing. */ if (!checkRegUsed(regsUsedAsSources, insn.getResult())) { Collections.swap(toSchedule, i, insertPlace++); } } /* * If we've made no progress in this iteration, there's a * circular dependency. Split it using the temp reg. */ if (oldInsertPlace == insertPlace) { SsaInsn insnToSplit = null; // Find an insn whose result is used as a source. for (int i = insertPlace; i < sz; i++) { SsaInsn insn = toSchedule.get(i); if (checkRegUsed(regsUsedAsSources, insn.getResult()) && checkRegUsed(regsUsedAsResults, insn.getSources().get(0))) { insnToSplit = insn; /* * We're going to split this insn; move it to the * front. */ Collections.swap(toSchedule, insertPlace, i); break; } } // At least one insn will be set above. RegisterSpec result = insnToSplit.getResult(); RegisterSpec tempSpec = result.withReg( parent.borrowSpareRegister(result.getCategory())); NormalSsaInsn toAdd = new NormalSsaInsn( new PlainInsn(Rops.opMove(result.getType()), SourcePosition.NO_INFO, tempSpec, insnToSplit.getSources()), this); toSchedule.add(insertPlace++, toAdd); RegisterSpecList newSources = RegisterSpecList.make(tempSpec); NormalSsaInsn toReplace = new NormalSsaInsn( new PlainInsn(Rops.opMove(result.getType()), SourcePosition.NO_INFO, result, newSources), this); toSchedule.set(insertPlace, toReplace); // The size changed. sz = toSchedule.size(); } regsUsedAsSources.clear(); regsUsedAsResults.clear(); } } /** * Adds {@code regV} to the live-out list for this block. This is called * by the liveness analyzer. * * @param regV register that is live-out for this block. */ public void addLiveOut (int regV) { if (liveOut == null) { liveOut = SetFactory.makeLivenessSet(parent.getRegCount()); } liveOut.add(regV); } /** * Adds {@code regV} to the live-in list for this block. This is * called by the liveness analyzer. * * @param regV register that is live-in for this block. */ public void addLiveIn (int regV) { if (liveIn == null) { liveIn = SetFactory.makeLivenessSet(parent.getRegCount()); } liveIn.add(regV); } /** * Returns the set of live-in registers. Valid after register * interference graph has been generated, otherwise empty. * * @return {@code non-null;} live-in register set. */ public IntSet getLiveInRegs() { if (liveIn == null) { liveIn = SetFactory.makeLivenessSet(parent.getRegCount()); } return liveIn; } /** * Returns the set of live-out registers. Valid after register * interference graph has been generated, otherwise empty. * * @return {@code non-null;} live-out register set */ public IntSet getLiveOutRegs() { if (liveOut == null) { liveOut = SetFactory.makeLivenessSet(parent.getRegCount()); } return liveOut; } /** * @return true if this is the one-and-only exit block for this method */ public boolean isExitBlock() { return index == parent.getExitBlockIndex(); } /** * Returns true if this block was last calculated to be reachable. * Recalculates reachability if value has never been computed. * * @return {@code true} if reachable */ public boolean isReachable() { if (reachable == -1) { parent.computeReachability(); } return (reachable == 1); } /** * Sets reachability of block to specified value * * @param reach new value of reachability for block */ public void setReachable(int reach) { reachable = reach; } /** * Sorts move instructions added via {@code addMoveToEnd} during * phi removal so that results don't overwrite sources that are used. * For use after all phis have been removed and all calls to * addMoveToEnd() have been made.<p> * * This is necessary because copy-propogation may have left us in a state * where the same basic block has the same register as a phi operand * and a result. In this case, the register in the phi operand always * refers value before any other phis have executed. */ public void scheduleMovesFromPhis() { if (movesFromPhisAtBeginning > 1) { List<SsaInsn> toSchedule; toSchedule = insns.subList(0, movesFromPhisAtBeginning); scheduleUseBeforeAssigned(toSchedule); SsaInsn firstNonPhiMoveInsn = insns.get(movesFromPhisAtBeginning); /* * TODO: It's actually possible that this case never happens, * because a move-exception block, having only one predecessor * in SSA form, perhaps is never on a dominance frontier. */ if (firstNonPhiMoveInsn.isMoveException()) { if (true) { /* * We've yet to observe this case, and if it can * occur the code written to handle it probably * does not work. */ throw new RuntimeException( "Unexpected: moves from " +"phis before move-exception"); } else { /* * A move-exception insn must be placed first in this block * We need to move it there, and deal with possible * interference. */ boolean moveExceptionInterferes = false; int moveExceptionResult = firstNonPhiMoveInsn.getResult().getReg(); /* * Does the move-exception result reg interfere with the * phi moves? */ for (SsaInsn insn : toSchedule) { if (insn.isResultReg(moveExceptionResult) || insn.isRegASource(moveExceptionResult)) { moveExceptionInterferes = true; break; } } if (!moveExceptionInterferes) { // This is the easy case. insns.remove(movesFromPhisAtBeginning); insns.add(0, firstNonPhiMoveInsn); } else { /* * We need to move the result to a spare reg * and move it back. */ RegisterSpec originalResultSpec = firstNonPhiMoveInsn.getResult(); int spareRegister = parent.borrowSpareRegister( originalResultSpec.getCategory()); // We now move it to a spare register. firstNonPhiMoveInsn.changeResultReg(spareRegister); RegisterSpec tempSpec = firstNonPhiMoveInsn.getResult(); insns.add(0, firstNonPhiMoveInsn); // And here we move it back. NormalSsaInsn toAdd = new NormalSsaInsn( new PlainInsn( Rops.opMove(tempSpec.getType()), SourcePosition.NO_INFO, originalResultSpec, RegisterSpecList.make(tempSpec)), this); /* * Place it immediately after the phi-moves, * overwriting the move-exception that was there. */ insns.set(movesFromPhisAtBeginning + 1, toAdd); } } } } if (movesFromPhisAtEnd > 1) { scheduleUseBeforeAssigned( insns.subList(insns.size() - movesFromPhisAtEnd - 1, insns.size() - 1)); } // Return registers borrowed here and in scheduleUseBeforeAssigned(). parent.returnSpareRegisters(); } /** * Visits all insns in this block. * * @param visitor {@code non-null;} callback interface */ public void forEachInsn(SsaInsn.Visitor visitor) { // This gets called a LOT, and not using an iterator // saves a lot of allocations and reduces memory usage int len = insns.size(); for (int i = 0; i < len; i++) { insns.get(i).accept(visitor); } } /** {@inheritDoc} */ @Override public String toString() { return "{" + index + ":" + Hex.u2(ropLabel) + '}'; } /** * Visitor interface for basic blocks. */ public interface Visitor { /** * Indicates a block has been visited by an iterator method. * * @param v {@code non-null;} block visited * @param parent {@code null-ok;} parent node if applicable */ void visitBlock(SsaBasicBlock v, SsaBasicBlock parent); } /** * Label comparator. */ public static final class LabelComparator implements Comparator<SsaBasicBlock> { /** {@inheritDoc} */ public int compare(SsaBasicBlock b1, SsaBasicBlock b2) { int label1 = b1.ropLabel; int label2 = b2.ropLabel; if (label1 < label2) { return -1; } else if (label1 > label2) { return 1; } else { return 0; } } } }
package org.jetbrains.plugins.ipnb.format; import com.google.common.collect.Lists; import com.google.gson.*; import com.google.gson.annotations.SerializedName; import com.google.gson.internal.LinkedTreeMap; import com.google.gson.reflect.TypeToken; import com.google.gson.stream.JsonWriter; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.editor.Document; import com.intellij.openapi.module.Module; import com.intellij.openapi.project.Project; import com.intellij.openapi.project.ProjectUtil; import com.intellij.openapi.projectRoots.Sdk; import com.intellij.openapi.roots.ProjectRootManager; import com.intellij.openapi.ui.Messages; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.util.text.VersionComparatorUtil; import com.jetbrains.python.packaging.PyPackage; import com.jetbrains.python.packaging.PyPackageUtil; import com.jetbrains.python.sdk.PythonSdkType; import org.apache.commons.lang.math.NumberUtils; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.jetbrains.plugins.ipnb.editor.panels.IpnbEditablePanel; import org.jetbrains.plugins.ipnb.editor.panels.IpnbFilePanel; import org.jetbrains.plugins.ipnb.format.cells.*; import org.jetbrains.plugins.ipnb.format.cells.output.*; import java.io.*; import java.lang.reflect.Type; import java.nio.charset.Charset; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; public class IpnbParser { private static final Logger LOG = Logger.getInstance(IpnbParser.class); private static final Gson gson = initGson(); private static final List<String> myErrors = new ArrayList<>(); private static final String VALIDATION_ERROR_TEXT = "An invalid notebook may not function properly. The validation error was:"; private static final String VALIDATION_ERROR_TITLE = "Notebook Validation Failed"; @NotNull private static Gson initGson() { final GsonBuilder builder = new GsonBuilder().setPrettyPrinting().disableHtmlEscaping() .registerTypeAdapter(IpnbCellRaw.class, new RawCellAdapter()) .registerTypeAdapter(IpnbFileRaw.class, new FileAdapter()) .registerTypeAdapter(CellOutputRaw.class, new OutputsAdapter()) .registerTypeAdapter(OutputDataRaw.class, new OutputDataAdapter()) .registerTypeAdapter(CellOutputRaw.class, new CellOutputDeserializer()) .registerTypeAdapter(OutputDataRaw.class, new OutputDataDeserializer()) .registerTypeAdapter(IpnbCellRaw.class, new CellRawDeserializer()).serializeNulls(); return builder.create(); } @NotNull public static IpnbFile parseIpnbFile(@NotNull final CharSequence fileText, @NotNull final VirtualFile virtualFile) { myErrors.clear(); final String path = virtualFile.getPath(); IpnbFileRaw rawFile = gson.fromJson(fileText.toString(), IpnbFileRaw.class); if (rawFile == null) { int nbformat = isIpythonNewFormat(virtualFile) ? 4 : 3; return new IpnbFile(new HashMap<>(), nbformat, 0, Lists.newArrayList(), path); } List<IpnbCell> cells = new ArrayList<>(); final IpnbWorksheet[] worksheets = rawFile.worksheets; if (worksheets == null) { for (IpnbCellRaw rawCell : rawFile.cells) { cells.add(rawCell.createCell(validateSource(rawCell))); } } else { for (IpnbWorksheet worksheet : worksheets) { final List<IpnbCellRaw> rawCells = worksheet.cells; for (IpnbCellRaw rawCell : rawCells) { cells.add(rawCell.createCell(validateSource(rawCell))); } } } showValidationMessage(); return new IpnbFile(rawFile.metadata, rawFile.nbformat, rawFile.nbformat_minor, cells, path); } private static boolean validateSource(IpnbCellRaw cell) { if (cell.source == null && cell.input == null) { final String error = VALIDATION_ERROR_TEXT + "\n" + "\"source\" or \"input\" is required property:\n" + cell; myErrors.add(error); LOG.warn(error); return false; } return true; } private static void showValidationMessage() { if (!myErrors.isEmpty()) { Messages.showWarningDialog(myErrors.get(0), VALIDATION_ERROR_TITLE); } } public static boolean isIpythonNewFormat(@NotNull final VirtualFile virtualFile) { final Project project = ProjectUtil.guessProjectForFile(virtualFile); if (project != null) { final Module module = ProjectRootManager.getInstance(project).getFileIndex().getModuleForFile(virtualFile); if (module != null) { final Sdk sdk = PythonSdkType.findPythonSdk(module); if (sdk != null) { // It should be called first before IpnbConnectionManager#startIpythonServer() final List<PyPackage> packages = PyPackageUtil.refreshAndGetPackagesModally(sdk); final PyPackage ipython = packages != null ? PyPackageUtil.findPackage(packages, "ipython") : null; final PyPackage jupyter = packages != null ? PyPackageUtil.findPackage(packages, "jupyter") : null; if (jupyter == null && ipython != null && VersionComparatorUtil.compare(ipython.getVersion(), "3.0") <= 0) { return false; } } } } return true; } @NotNull public static IpnbFile parseIpnbFile(@NotNull Document document, @NotNull final VirtualFile virtualFile) throws IOException { return parseIpnbFile(document.getImmutableCharSequence(), virtualFile); } public static void saveIpnbFile(@NotNull final IpnbFilePanel ipnbPanel) { final String json = newDocumentText(ipnbPanel); if (json == null) return; writeToFile(ipnbPanel.getIpnbFile().getPath(), json); } @Nullable public static String newDocumentText(@NotNull final IpnbFilePanel ipnbPanel) { final IpnbFile ipnbFile = ipnbPanel.getIpnbFile(); if (ipnbFile == null) return null; for (IpnbEditablePanel panel : ipnbPanel.getIpnbPanels()) { if (panel.isModified()) { panel.updateCellSource(); } } final IpnbFileRaw fileRaw = new IpnbFileRaw(); fileRaw.nbformat_minor = ipnbFile.getNbFormatMinor(); fileRaw.metadata = ipnbFile.getMetadata(); if (ipnbFile.getNbformat() == 4) { for (IpnbCell cell : ipnbFile.getCells()) { fileRaw.cells.add(IpnbCellRaw.fromCell(cell, ipnbFile.getNbformat())); } } else { final IpnbWorksheet worksheet = new IpnbWorksheet(); worksheet.cells.clear(); for (IpnbCell cell : ipnbFile.getCells()) { worksheet.cells.add(IpnbCellRaw.fromCell(cell, ipnbFile.getNbformat())); } fileRaw.worksheets = new IpnbWorksheet[]{worksheet}; } final StringWriter stringWriter = new StringWriter(); final JsonWriter writer = new JsonWriter(stringWriter); writer.setIndent(" "); gson.toJson(fileRaw, fileRaw.getClass(), writer); return stringWriter.toString() +"\n"; } private static void writeToFile(@NotNull final String path, @NotNull final String json) { final File file = new File(path); try { final FileOutputStream fileOutputStream = new FileOutputStream(file); final OutputStreamWriter writer = new OutputStreamWriter(fileOutputStream, Charset.forName("UTF-8").newEncoder()); try { writer.write(json); } catch (IOException e) { LOG.error(e); } finally { try { writer.close(); fileOutputStream.close(); } catch (IOException e) { LOG.error(e); } } } catch (FileNotFoundException e) { LOG.error(e); } } @SuppressWarnings("unused") public static class IpnbFileRaw { IpnbWorksheet[] worksheets; List<IpnbCellRaw> cells = new ArrayList<>(); Map<String, Object> metadata = new HashMap<>(); int nbformat = 4; int nbformat_minor; } private static class IpnbWorksheet { final List<IpnbCellRaw> cells = new ArrayList<>(); } @SuppressWarnings("unused") private static class IpnbCellRaw { String cell_type; Integer execution_count; Map<String, Object> metadata = new HashMap<>(); Integer level; List<CellOutputRaw> outputs; List<String> source; List<String> input; String language; Integer prompt_number; @Override public String toString() { return new GsonBuilder().setPrettyPrinting().create().toJson(this); } public static IpnbCellRaw fromCell(@NotNull final IpnbCell cell, int nbformat) { final IpnbCellRaw raw = new IpnbCellRaw(); if (cell instanceof IpnbEditableCell) { raw.metadata = ((IpnbEditableCell)cell).getMetadata(); } if (cell instanceof IpnbMarkdownCell) { raw.cell_type = "markdown"; raw.source = ((IpnbMarkdownCell)cell).getSource(); } else if (cell instanceof IpnbCodeCell) { raw.cell_type = "code"; final ArrayList<CellOutputRaw> outputRaws = new ArrayList<>(); for (IpnbOutputCell outputCell : ((IpnbCodeCell)cell).getCellOutputs()) { outputRaws.add(CellOutputRaw.fromOutput(outputCell, nbformat)); } raw.outputs = outputRaws; final Integer promptNumber = ((IpnbCodeCell)cell).getPromptNumber(); if (nbformat == 4) { raw.execution_count = promptNumber != null && promptNumber >= 0 ? promptNumber : null; raw.source = ((IpnbCodeCell)cell).getSource(); } else { raw.prompt_number = promptNumber != null && promptNumber >= 0 ? promptNumber : null; raw.language = ((IpnbCodeCell)cell).getLanguage(); raw.input = ((IpnbCodeCell)cell).getSource(); } } else if (cell instanceof IpnbRawCell) { raw.cell_type = "raw"; raw.source = ((IpnbRawCell)cell).getSource(); } else if (cell instanceof IpnbHeadingCell) { raw.cell_type = "heading"; raw.source = ((IpnbHeadingCell)cell).getSource(); raw.level = ((IpnbHeadingCell)cell).getLevel(); } return raw; } @Nullable public IpnbCell createCell(boolean isValidSource) { final IpnbCell cell; if (cell_type.equals("markdown")) { cell = new IpnbMarkdownCell(isValidSource ? source : new ArrayList<>(), metadata); } else if (cell_type.equals("code")) { final List<IpnbOutputCell> outputCells = new ArrayList<>(); for (CellOutputRaw outputRaw : outputs) { outputCells.add(outputRaw.createOutput()); } final Integer prompt = prompt_number != null ? prompt_number : execution_count; cell = new IpnbCodeCell(language == null ? "python" : language, input == null ? (isValidSource ? source : new ArrayList<>()) : input, prompt, outputCells, metadata); } else if (cell_type.equals("raw")) { cell = new IpnbRawCell(isValidSource ? source : new ArrayList<>()); } else if (cell_type.equals("heading")) { cell = new IpnbHeadingCell(isValidSource ? source : new ArrayList<>(), level, metadata); } else { cell = null; } return cell; } } private static class CellOutputRaw { String ename; String name; String evalue; OutputDataRaw data; Integer execution_count; String png; String stream; String jpeg; List<String> html; List<String> latex; List<String> svg; Integer prompt_number; List<String> traceback; Map<String, Object> metadata; String output_type; List<String> text; public static CellOutputRaw fromOutput(@NotNull final IpnbOutputCell outputCell, int nbformat) { final CellOutputRaw raw = new CellOutputRaw(); raw.metadata = outputCell.getMetadata(); if (raw.metadata == null && !(outputCell instanceof IpnbStreamOutputCell) && !(outputCell instanceof IpnbErrorOutputCell)) { raw.metadata = new HashMap<>(); } if (outputCell instanceof IpnbPngOutputCell) { if (nbformat == 4) { final OutputDataRaw dataRaw = new OutputDataRaw(); dataRaw.png = ((IpnbPngOutputCell)outputCell).getBase64String(); dataRaw.text = outputCell.getText(); raw.data = dataRaw; raw.execution_count = outputCell.getPromptNumber(); raw.output_type = outputCell.getPromptNumber() != null ? "execute_result" : "display_data"; } else { raw.png = ((IpnbPngOutputCell)outputCell).getBase64String(); raw.text = outputCell.getText(); } } else if (outputCell instanceof IpnbSvgOutputCell) { if (nbformat == 4) { final OutputDataRaw dataRaw = new OutputDataRaw(); dataRaw.text = outputCell.getText(); dataRaw.svg = ((IpnbSvgOutputCell)outputCell).getSvg(); raw.data = dataRaw; raw.execution_count = outputCell.getPromptNumber(); raw.output_type = outputCell.getPromptNumber() != null ? "execute_result" : "display_data"; } else { raw.svg = ((IpnbSvgOutputCell)outputCell).getSvg(); raw.text = outputCell.getText(); } } else if (outputCell instanceof IpnbJpegOutputCell) { if (nbformat == 4) { final OutputDataRaw dataRaw = new OutputDataRaw(); dataRaw.text = outputCell.getText(); dataRaw.jpeg = Lists.newArrayList(((IpnbJpegOutputCell)outputCell).getBase64String()); raw.data = dataRaw; } else { raw.jpeg = ((IpnbJpegOutputCell)outputCell).getBase64String(); raw.text = outputCell.getText(); } } else if (outputCell instanceof IpnbLatexOutputCell) { if (nbformat == 4) { final OutputDataRaw dataRaw = new OutputDataRaw(); dataRaw.text = outputCell.getText(); if (((IpnbLatexOutputCell)outputCell).isMarkdown()) { dataRaw.markdown = ((IpnbLatexOutputCell)outputCell).getLatex(); } else { dataRaw.latex = ((IpnbLatexOutputCell)outputCell).getLatex(); } raw.data = dataRaw; raw.execution_count = outputCell.getPromptNumber(); raw.output_type = outputCell.getPromptNumber() != null ? "execute_result" : "display_data"; } else { raw.latex = ((IpnbLatexOutputCell)outputCell).getLatex(); raw.text = outputCell.getText(); raw.prompt_number = outputCell.getPromptNumber(); } } else if (outputCell instanceof IpnbStreamOutputCell) { if (nbformat == 4) { raw.name = ((IpnbStreamOutputCell)outputCell).getStream(); } else { raw.stream = ((IpnbStreamOutputCell)outputCell).getStream(); } raw.text = outputCell.getText(); raw.output_type = "stream"; } else if (outputCell instanceof IpnbHtmlOutputCell) { if (nbformat == 4) { final OutputDataRaw dataRaw = new OutputDataRaw(); dataRaw.html = ((IpnbHtmlOutputCell)outputCell).getHtmls(); dataRaw.text = outputCell.getText(); raw.data = dataRaw; raw.execution_count = outputCell.getPromptNumber(); } else { raw.html = ((IpnbHtmlOutputCell)outputCell).getHtmls(); } raw.output_type = nbformat == 4 ? "execute_result" : "pyout"; } else if (outputCell instanceof IpnbErrorOutputCell) { raw.output_type = nbformat == 4 ? "error" : "pyerr"; raw.evalue = ((IpnbErrorOutputCell)outputCell).getEvalue(); raw.ename = ((IpnbErrorOutputCell)outputCell).getEname(); raw.traceback = outputCell.getText(); } else if (outputCell instanceof IpnbOutOutputCell) { if (nbformat == 4) { raw.execution_count = outputCell.getPromptNumber(); raw.output_type = "execute_result"; final OutputDataRaw dataRaw = new OutputDataRaw(); dataRaw.text = outputCell.getText(); raw.data = dataRaw; } else { raw.output_type = "pyout"; raw.prompt_number = outputCell.getPromptNumber(); raw.text = outputCell.getText(); } } else { raw.text = outputCell.getText(); } return raw; } public IpnbOutputCell createOutput() { List<String> text = this.text != null ? this.text : data != null ? data.text : Lists.newArrayList(); Integer prompt = execution_count != null ? execution_count : prompt_number; final IpnbOutputCell outputCell; if (png != null || (data != null && data.png != null)) { outputCell = new IpnbPngOutputCell(png == null ? StringUtil.join(data.png) : png, text, prompt, metadata); } else if (jpeg != null || (data != null && data.jpeg != null)) { outputCell = new IpnbJpegOutputCell(jpeg == null ? StringUtil.join(data.jpeg, "") : jpeg, text, prompt, metadata); } else if (svg != null || (data != null && data.svg != null)) { outputCell = new IpnbSvgOutputCell(svg == null ? data.svg : svg, text, prompt, metadata); } else if (html != null || (data != null && data.html != null)) { outputCell = new IpnbHtmlOutputCell(html == null ? data.html : html, text, prompt, metadata); } else if (latex != null || (data != null && data.latex != null)) { outputCell = new IpnbLatexOutputCell(latex == null ? data.latex : latex, false, prompt, text, metadata); } else if (data != null && data.markdown != null) { outputCell = new IpnbLatexOutputCell(data.markdown, true, prompt, text, metadata); } else if (stream != null || name != null) { outputCell = new IpnbStreamOutputCell(stream == null ? name : stream, text, prompt, metadata); } else if ("pyerr".equals(output_type) || "error".equals(output_type)) { outputCell = new IpnbErrorOutputCell(evalue, ename, traceback, prompt, metadata); } else if ("pyout".equals(output_type)) { outputCell = new IpnbOutOutputCell(text, prompt, metadata); } else if ("execute_result".equals(output_type) && data != null) { outputCell = new IpnbOutOutputCell(data.text, prompt, metadata); } else if ("display_data".equals(output_type)) { outputCell = new IpnbPngOutputCell(null, text, prompt, metadata); } else { outputCell = new IpnbOutputCell(text, prompt, metadata); } return outputCell; } } private static class OutputDataRaw { @SerializedName("image/png") String png; @SerializedName("text/html") List<String> html; @SerializedName("image/svg+xml") List<String> svg; @SerializedName("image/jpeg") List<String> jpeg; @SerializedName("text/latex") List<String> latex; @SerializedName("text/plain") List<String> text; @SerializedName("text/markdown") List<String> markdown; } static class RawCellAdapter implements JsonSerializer<IpnbCellRaw> { @Override public JsonElement serialize(IpnbCellRaw cellRaw, Type typeOfSrc, JsonSerializationContext context) { final JsonObject jsonObject = new JsonObject(); jsonObject.addProperty("cell_type", cellRaw.cell_type); if ("code".equals(cellRaw.cell_type)) { final Integer count = cellRaw.execution_count; if (count == null) { jsonObject.add("execution_count", JsonNull.INSTANCE); } else { jsonObject.addProperty("execution_count", count); } } if (cellRaw.metadata != null) { final JsonElement metadata = gson.toJsonTree(cellRaw.metadata); jsonObject.add("metadata", metadata); } if (cellRaw.level != null) { jsonObject.addProperty("level", cellRaw.level); } if (cellRaw.outputs != null) { final JsonElement outputs = gson.toJsonTree(cellRaw.outputs); jsonObject.add("outputs", outputs); } if (cellRaw.source != null) { final JsonElement source = gson.toJsonTree(cellRaw.source); jsonObject.add("source", source); } if (cellRaw.input != null) { final JsonElement input = gson.toJsonTree(cellRaw.input); jsonObject.add("input", input); } if (cellRaw.language != null) { jsonObject.addProperty("language", cellRaw.language); } if (cellRaw.prompt_number != null) { jsonObject.addProperty("prompt_number", cellRaw.prompt_number); } return jsonObject; } } static class FileAdapter implements JsonSerializer<IpnbFileRaw>, JsonDeserializer<IpnbFileRaw> { @Override public JsonElement serialize(IpnbFileRaw fileRaw, Type typeOfSrc, JsonSerializationContext context) { final JsonObject jsonObject = new JsonObject(); if (fileRaw.worksheets != null) { final JsonElement worksheets = gson.toJsonTree(fileRaw.worksheets); jsonObject.add("worksheets", worksheets); } if (fileRaw.cells != null) { final JsonElement cells = gson.toJsonTree(fileRaw.cells, new TypeToken<List<IpnbCellRaw>>(){}.getType()); jsonObject.add("cells", cells); } final JsonElement metadata = gson.toJsonTree(fileRaw.metadata); jsonObject.add("metadata", metadata); jsonObject.addProperty("nbformat", fileRaw.nbformat); jsonObject.addProperty("nbformat_minor", fileRaw.nbformat_minor); return jsonObject; } @Override public IpnbFileRaw deserialize(JsonElement json, Type typeOfT, JsonDeserializationContext context) throws JsonParseException { JsonObject object = json.getAsJsonObject(); IpnbFileRaw fileRaw = new IpnbFileRaw(); JsonElement worksheets = object.get("worksheets"); if (worksheets != null) { fileRaw.worksheets = gson.fromJson(worksheets, new TypeToken<List<IpnbWorksheet>>(){}.getType()); } JsonElement cellsElement = object.get("cells"); if (cellsElement != null) { fileRaw.cells = gson.fromJson(cellsElement, new TypeToken<List<IpnbCellRaw>>(){}.getType()); } JsonElement metadataElement = object.get("metadata"); if (metadataElement != null) { LinkedTreeMap<String, Object> metadataMap = gson.fromJson(metadataElement, new TypeToken<Map<String, Object>>(){}.getType()); JsonElement kernelInfo = metadataElement.getAsJsonObject().get("kernel_info"); if (kernelInfo != null) { metadataMap.put("kernel_info", gson.fromJson(kernelInfo, new TypeToken<Map<String, String>>() {}.getType())); } JsonElement languageInfo = metadataElement.getAsJsonObject().get("language_info"); if (languageInfo != null) { LinkedTreeMap<String, Object> languageInfoMap = gson.fromJson(languageInfo, new TypeToken<Map<String, Object>>() {}.getType()); JsonElement codemirrorMode = languageInfo.getAsJsonObject().get("codemirror_mode"); if (codemirrorMode != null) { LinkedTreeMap<String, Object> codemirrorModeMap = gson.fromJson(codemirrorMode, new TypeToken<Map<String, String>>() {}.getType()); if (codemirrorModeMap.containsKey("version")) { String version = (String)codemirrorModeMap.get("version"); if (NumberUtils.isNumber(version)) { try { codemirrorModeMap.put("version", Integer.parseInt(version)); } catch (NumberFormatException e) { // added this to obtain backward compatibility as previously we parsed "version" as double. codemirrorModeMap.put("version", (int) Double.parseDouble(version)); } } } languageInfoMap.put("codemirror_mode", codemirrorModeMap); } metadataMap.put("language_info", languageInfoMap); } fileRaw.metadata = metadataMap; } JsonElement nbformat = object.get("nbformat"); if (nbformat != null) { fileRaw.nbformat = nbformat.getAsInt(); } JsonElement nbformatMinor = object.get("nbformat_minor"); if (nbformatMinor != null) { fileRaw.nbformat_minor = nbformatMinor.getAsInt(); } return fileRaw; } } static class CellRawDeserializer implements JsonDeserializer<IpnbCellRaw> { @Override public IpnbCellRaw deserialize(JsonElement json, Type typeOfT, JsonDeserializationContext context) throws JsonParseException { final JsonObject object = json.getAsJsonObject(); final IpnbCellRaw cellRaw = new IpnbCellRaw(); final JsonElement cell_type = object.get("cell_type"); if (cell_type != null) { cellRaw.cell_type = cell_type.getAsString(); } final JsonElement count = object.get("execution_count"); if (count != null) { cellRaw.execution_count = count.isJsonNull() ? null : count.getAsInt(); } final JsonElement metadata = object.get("metadata"); if (metadata != null) { cellRaw.metadata = gson.fromJson(metadata, Map.class); } final JsonElement level = object.get("level"); if (level != null) { cellRaw.level = level.getAsInt(); } final JsonElement outputsElement = object.get("outputs"); if (outputsElement != null) { final JsonArray outputs = outputsElement.getAsJsonArray(); cellRaw.outputs = Lists.newArrayList(); for (JsonElement output : outputs) { cellRaw.outputs.add(gson.fromJson(output, CellOutputRaw.class)); } } cellRaw.source = getStringOrArray("source", object); cellRaw.input = getStringOrArray("input", object); final JsonElement language = object.get("language"); if (language != null) { cellRaw.language = language.getAsString(); } final JsonElement number = object.get("prompt_number"); if (number != null) { if ("*".equals(number.getAsString())) { cellRaw.prompt_number = null; } else { cellRaw.prompt_number = number.getAsInt(); } } return cellRaw; } } static class OutputDataDeserializer implements JsonDeserializer<OutputDataRaw> { @Override public OutputDataRaw deserialize(JsonElement json, Type typeOfT, JsonDeserializationContext context) throws JsonParseException { final JsonObject object = json.getAsJsonObject(); final OutputDataRaw dataRaw = new OutputDataRaw(); final JsonElement png = object.get("image/png"); if (png instanceof JsonArray) { final JsonArray array = png.getAsJsonArray(); StringBuilder pngString = new StringBuilder(); for (int i = 0; i != array.size(); ++i) { pngString.append(array.get(i).getAsString()); } dataRaw.png = pngString.toString(); } else if (png instanceof JsonPrimitive) { dataRaw.png = png.getAsString(); } dataRaw.html = getStringOrArray("text/html", object); dataRaw.svg = getStringOrArray("image/svg+xml", object); dataRaw.jpeg = getStringOrArray("image/jpeg", object); dataRaw.latex = getStringOrArray("text/latex", object); dataRaw.markdown = getStringOrArray("text/markdown", object); dataRaw.text = getStringOrArray("text/plain", object); return dataRaw; } } static class CellOutputDeserializer implements JsonDeserializer<CellOutputRaw> { @Override public CellOutputRaw deserialize(JsonElement json, Type typeOfT, JsonDeserializationContext context) throws JsonParseException { final JsonObject object = json.getAsJsonObject(); final CellOutputRaw cellOutputRaw = new CellOutputRaw(); final JsonElement ename = object.get("ename"); if (ename != null) { cellOutputRaw.ename = ename.getAsString(); } final JsonElement name = object.get("name"); if (name != null) { cellOutputRaw.name = name.getAsString(); } final JsonElement evalue = object.get("evalue"); if (evalue != null) { cellOutputRaw.evalue = evalue.getAsString(); } final JsonElement data = object.get("data"); if (data != null) { cellOutputRaw.data = gson.fromJson(data, OutputDataRaw.class); } final JsonElement count = object.get("execution_count"); if (count != null) { cellOutputRaw.execution_count = count.getAsInt(); } final JsonElement outputType = object.get("output_type"); if (outputType != null) { cellOutputRaw.output_type = outputType.getAsString(); } final JsonElement png = object.get("png"); if (png != null) { cellOutputRaw.png = png.getAsString(); } final JsonElement stream = object.get("stream"); if (stream != null) { cellOutputRaw.stream = stream.getAsString(); } final JsonElement jpeg = object.get("jpeg"); if (jpeg != null) { cellOutputRaw.jpeg = jpeg.getAsString(); } cellOutputRaw.html = getStringOrArray("html", object); cellOutputRaw.latex = getStringOrArray("latex", object); cellOutputRaw.svg = getStringOrArray("svg", object); final JsonElement promptNumber = object.get("prompt_number"); if (promptNumber != null) { cellOutputRaw.prompt_number = promptNumber.getAsInt(); } cellOutputRaw.text = getStringOrArray("text", object); cellOutputRaw.traceback = getStringOrArray("traceback", object); final JsonElement metadata = object.get("metadata"); if (metadata != null) { cellOutputRaw.metadata = gson.fromJson(metadata, Map.class); } return cellOutputRaw; } } @Nullable private static ArrayList<String> getStringOrArray(String name, JsonObject object) { final JsonElement jsonElement = object.get(name); final ArrayList<String> strings = Lists.newArrayList(); if (jsonElement == null) return null; if (jsonElement.isJsonArray()) { final JsonArray array = jsonElement.getAsJsonArray(); for (JsonElement element : array) { strings.add(element.getAsString()); } } else { strings.add(jsonElement.getAsString()); } return strings; } static class OutputsAdapter implements JsonSerializer<CellOutputRaw> { @Override public JsonElement serialize(CellOutputRaw cellRaw, Type typeOfSrc, JsonSerializationContext context) { final JsonObject jsonObject = new JsonObject(); if (cellRaw.ename != null) { jsonObject.addProperty("ename", cellRaw.ename); } if (cellRaw.name != null) { jsonObject.addProperty("name", cellRaw.name); } if (cellRaw.evalue != null) { jsonObject.addProperty("evalue", cellRaw.evalue); } if (cellRaw.data != null) { final JsonElement data = gson.toJsonTree(cellRaw.data); jsonObject.add("data", data); } if (cellRaw.execution_count != null) { jsonObject.addProperty("execution_count", cellRaw.execution_count); } if (cellRaw.png != null) { jsonObject.addProperty("png", cellRaw.png); } if (cellRaw.stream != null) { jsonObject.addProperty("stream", cellRaw.stream); } if (cellRaw.jpeg != null) { jsonObject.addProperty("jpeg", cellRaw.jpeg); } if (cellRaw.html != null) { final JsonElement html = gson.toJsonTree(cellRaw.html); jsonObject.add("html", html); } if (cellRaw.latex != null) { final JsonElement latex = gson.toJsonTree(cellRaw.latex); jsonObject.add("latex", latex); } if (cellRaw.svg != null) { final JsonElement svg = gson.toJsonTree(cellRaw.svg); jsonObject.add("svg", svg); } if (cellRaw.prompt_number != null) { jsonObject.addProperty("prompt_number", cellRaw.prompt_number); } if (cellRaw.traceback != null) { final JsonElement traceback = gson.toJsonTree(cellRaw.traceback); jsonObject.add("traceback", traceback); } if (cellRaw.metadata != null) { final JsonElement metadata = gson.toJsonTree(cellRaw.metadata); jsonObject.add("metadata", metadata); } if (cellRaw.output_type != null) { jsonObject.addProperty("output_type", cellRaw.output_type); } if (cellRaw.text != null) { final JsonElement text = gson.toJsonTree(cellRaw.text); jsonObject.add("text", text); } return jsonObject; } } static class OutputDataAdapter implements JsonSerializer<OutputDataRaw> { @Override public JsonElement serialize(OutputDataRaw cellRaw, Type typeOfSrc, JsonSerializationContext context) { final JsonObject jsonObject = new JsonObject(); if (cellRaw.png != null) { jsonObject.addProperty("image/png", cellRaw.png); } if (cellRaw.html != null) { final JsonElement html = gson.toJsonTree(cellRaw.html); jsonObject.add("text/html", html); } if (cellRaw.svg != null) { final JsonElement svg = gson.toJsonTree(cellRaw.svg); jsonObject.add("image/svg+xml", svg); } if (cellRaw.jpeg != null) { final JsonElement jpeg = gson.toJsonTree(cellRaw.jpeg); jsonObject.add("image/jpeg", jpeg); } if (cellRaw.latex != null) { final JsonElement latex = gson.toJsonTree(cellRaw.latex); jsonObject.add("text/latex", latex); } if (cellRaw.markdown != null) { final JsonElement markdown = gson.toJsonTree(cellRaw.markdown); jsonObject.add("text/markdown", markdown); } if (cellRaw.text != null) { final JsonElement text = gson.toJsonTree(cellRaw.text); jsonObject.add("text/plain", text); } return jsonObject; } } }
/** * Copyright 2014 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package rx.observables; import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotSame; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import static org.mockito.Matchers.any; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.never; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static rx.observables.StringObservable.byLine; import static rx.observables.StringObservable.decode; import static rx.observables.StringObservable.encode; import static rx.observables.StringObservable.from; import static rx.observables.StringObservable.join; import static rx.observables.StringObservable.split; import static rx.observables.StringObservable.using; import java.io.ByteArrayInputStream; import java.io.FilterReader; import java.io.IOException; import java.io.Reader; import java.io.StringReader; import java.nio.charset.Charset; import java.nio.charset.CharsetDecoder; import java.nio.charset.MalformedInputException; import java.util.Arrays; import java.util.List; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import org.junit.Test; import rx.Observable; import rx.Observer; import rx.functions.Func1; import rx.observables.StringObservable.Line; import rx.observables.StringObservable.UnsafeFunc0; import rx.observers.TestObserver; import rx.observers.TestSubscriber; public class StringObservableTest { @Test public void testMultibyteSpanningTwoBuffers() { Observable<byte[]> src = Observable.just(new byte[] { (byte) 0xc2 }, new byte[] { (byte) 0xa1 }); String out = StringObservable.decode(src, "UTF-8").toBlocking().single(); assertEquals("\u00A1", out); } @Test public void testMalformedAtTheEndReplace() { Observable<byte[]> src = Observable.just(new byte[] { (byte) 0xc2 }); String out = decode(src, "UTF-8").toBlocking().single(); // REPLACEMENT CHARACTER assertEquals("\uFFFD", out); } @Test public void testMalformedInTheMiddleReplace() { Observable<byte[]> src = Observable.just(new byte[] { (byte) 0xc2, 65 }); String out = decode(src, "UTF-8").toBlocking().single(); // REPLACEMENT CHARACTER assertEquals("\uFFFDA", out); } @Test(expected = RuntimeException.class) public void testMalformedAtTheEndReport() { Observable<byte[]> src = Observable.just(new byte[] { (byte) 0xc2 }); CharsetDecoder charsetDecoder = Charset.forName("UTF-8").newDecoder(); decode(src, charsetDecoder).toBlocking().single(); } @Test(expected = RuntimeException.class) public void testMalformedInTheMiddleReport() { Observable<byte[]> src = Observable.just(new byte[] { (byte) 0xc2, 65 }); CharsetDecoder charsetDecoder = Charset.forName("UTF-8").newDecoder(); decode(src, charsetDecoder).toBlocking().single(); } @Test public void testPropogateError() { Observable<byte[]> src = Observable.just(new byte[] { 65 }); Observable<byte[]> err = Observable.error(new IOException()); CharsetDecoder charsetDecoder = Charset.forName("UTF-8").newDecoder(); try { decode(Observable.concat(src, err), charsetDecoder).toList().toBlocking().single(); fail(); } catch (RuntimeException e) { assertEquals(IOException.class, e.getCause().getClass()); } } @Test public void testPropogateErrorInTheMiddleOfMultibyte() { Observable<byte[]> src = Observable.just(new byte[] { (byte) 0xc2 }); Observable<byte[]> err = Observable.error(new IOException()); CharsetDecoder charsetDecoder = Charset.forName("UTF-8").newDecoder(); try { decode(Observable.concat(src, err), charsetDecoder).toList().toBlocking().single(); fail(); } catch (RuntimeException e) { assertEquals(MalformedInputException.class, e.getCause().getClass()); } } @Test public void testEncode() { assertArrayEquals( new byte[] { (byte) 0xc2, (byte) 0xa1 }, encode(Observable.just("\u00A1"), "UTF-8") .toBlocking().single()); } @Test public void testSplitOnCollon() { testSplit("boo:and:foo", ":", 0, "boo", "and", "foo"); } @Test public void testSplitOnOh() { testSplit("boo:and:foo", "o", 0, "b", "", ":and:f"); } public void testSplit(String str, String regex, int limit, String... parts) { testSplit(str, regex, 0, Observable.just(str), parts); for (int i = 0; i < str.length(); i++) { String a = str.substring(0, i); String b = str.substring(i, str.length()); testSplit(a + "|" + b, regex, limit, Observable.just(a, b), parts); } } public void testSplit(String message, String regex, int limit, Observable<String> src, String... parts) { Observable<String> act = split(src, regex); Observable<String> exp = Observable.from(parts); AssertObservable.assertObservableEqualsBlocking("when input is " + message + " and limit = " + limit, exp, act); } @Test public void testJoinMixed() { Observable<String> source = Observable.from(Arrays.asList("a", "1", "c")); Observable<String> result = join(source, ", "); @SuppressWarnings("unchecked") Observer<Object> observer = mock(Observer.class); result.subscribe(new TestObserver<Object>(observer)); verify(observer, times(1)).onNext("a, 1, c"); verify(observer, times(1)).onCompleted(); verify(observer, never()).onError(any(Throwable.class)); } @Test public void testJoinWithEmptyString() { Observable<String> source = Observable.just("", "b", "c"); Observable<String> result = join(source, ", "); @SuppressWarnings("unchecked") Observer<Object> observer = mock(Observer.class); result.subscribe(new TestObserver<Object>(observer)); verify(observer, times(1)).onNext(", b, c"); verify(observer, times(1)).onCompleted(); verify(observer, never()).onError(any(Throwable.class)); } @Test public void testJoinWithNull() { Observable<String> source = Observable.just("a", null, "c"); Observable<String> result = join(source, ", "); @SuppressWarnings("unchecked") Observer<Object> observer = mock(Observer.class); result.subscribe(new TestObserver<Object>(observer)); verify(observer, times(1)).onNext("a, null, c"); verify(observer, times(1)).onCompleted(); verify(observer, never()).onError(any(Throwable.class)); } @Test public void testJoinSingle() { Observable<String> source = Observable.just("a"); Observable<String> result = join(source, ", "); @SuppressWarnings("unchecked") Observer<Object> observer = mock(Observer.class); result.subscribe(new TestObserver<Object>(observer)); verify(observer, times(1)).onNext("a"); verify(observer, times(1)).onCompleted(); verify(observer, never()).onError(any(Throwable.class)); } @Test public void testJoinEmpty() { Observable<String> source = Observable.empty(); Observable<String> result = join(source, ", "); @SuppressWarnings("unchecked") Observer<Object> observer = mock(Observer.class); result.subscribe(new TestObserver<Object>(observer)); verify(observer, times(1)).onNext(""); verify(observer, times(1)).onCompleted(); verify(observer, never()).onError(any(Throwable.class)); } @Test public void testJoinThrows() { Observable<String> source = Observable.concat(Observable.just("a"), Observable .<String> error(new RuntimeException("Forced failure"))); Observable<String> result = join(source, ", "); @SuppressWarnings("unchecked") Observer<Object> observer = mock(Observer.class); result.subscribe(new TestObserver<Object>(observer)); verify(observer, never()).onNext("a"); verify(observer, never()).onCompleted(); verify(observer, times(1)).onError(any(Throwable.class)); } @Test public void testFromInputStream() { final byte[] inBytes = "test".getBytes(); final byte[] outBytes = from(new ByteArrayInputStream(inBytes)).toBlocking().single(); assertNotSame(inBytes, outBytes); assertArrayEquals(inBytes, outBytes); } @Test public void testFromInputStreamWillUnsubscribeBeforeCallingNextRead() { final byte[] inBytes = "test".getBytes(); final AtomicInteger numReads = new AtomicInteger(0); ByteArrayInputStream is = new ByteArrayInputStream(inBytes) { @Override public synchronized int read(byte[] b, int off, int len) { numReads.incrementAndGet(); return super.read(b, off, len); } }; StringObservable.from(is).first().toBlocking().single(); assertEquals(1, numReads.get()); } @Test public void testFromReader() { final String inStr = "test"; final String outStr = from(new StringReader(inStr)).toBlocking().single(); assertNotSame(inStr, outStr); assertEquals(inStr, outStr); } @Test public void testByLine() { String newLine = System.getProperty("line.separator"); List<Line> lines = byLine(Observable.from(Arrays.asList("qwer", newLine + "asdf" + newLine, "zx", "cv"))) .toList().toBlocking().single(); assertEquals(Arrays.asList(new Line(0, "qwer"), new Line(1, "asdf"), new Line(2, "zxcv")), lines); } @Test public void testUsingCloseOnComplete() throws IOException { final TestSubscriber<String> subscriber = new TestSubscriber<String>(); final Reader reader = spy(new StringReader("hello")); using(new UnsafeFunc0<Reader>() { @Override public Reader call() throws Exception { return reader; } }, new Func1<Reader, Observable<String>>() { @Override public Observable<String> call(Reader reader) { return from(reader, 2); } }).subscribe(subscriber); assertArrayEquals(new String[]{"he","ll","o"}, subscriber.getOnNextEvents().toArray()); assertEquals(1, subscriber.getOnCompletedEvents().size()); assertEquals(0, subscriber.getOnErrorEvents().size()); verify(reader, times(1)).close(); } @Test public void testUsingCloseOnError() throws IOException { final TestSubscriber<String> subscriber = new TestSubscriber<String>(); final AtomicBoolean closed = new AtomicBoolean(); final Reader reader = new FilterReader(new StringReader("hello")) { @Override public int read(char[] cbuf) throws IOException { throw new IOException("boo"); } @Override public void close() throws IOException { closed.set(true); } }; using(new UnsafeFunc0<Reader>() { @Override public Reader call() throws Exception { return reader; } }, new Func1<Reader, Observable<String>>() { @Override public Observable<String> call(Reader reader) { return from(reader, 2); } }).subscribe(subscriber); assertEquals(0, subscriber.getOnNextEvents().size()); assertEquals(0, subscriber.getOnCompletedEvents().size()); assertEquals(1, subscriber.getOnErrorEvents().size()); assertTrue(closed.get()); } @Test public void testUsingCloseOnUnsubscribe() throws IOException { final TestSubscriber<String> subscriber = new TestSubscriber<String>(); final Reader reader = spy(new StringReader("hello")); using(new UnsafeFunc0<Reader>() { @Override public Reader call() throws Exception { return reader; } }, new Func1<Reader, Observable<String>>() { @Override public Observable<String> call(Reader reader) { return from(reader, 2); } }).take(1).subscribe(subscriber); assertArrayEquals(new String[]{"he"}, subscriber.getOnNextEvents().toArray()); assertEquals(1, subscriber.getOnNextEvents().size()); assertEquals(1, subscriber.getOnCompletedEvents().size()); assertEquals(0, subscriber.getOnErrorEvents().size()); verify(reader, times(1)).close(); } }
package jaci.openrio.toast.core.loader.simulation; import jaci.openrio.toast.core.Toast; import jaci.openrio.toast.core.ToastConfiguration; import jaci.openrio.toast.lib.state.RobotState; import java.net.DatagramPacket; import java.net.DatagramSocket; import java.net.InetAddress; import java.net.MulticastSocket; /** * Simulated DriverStation communication library * * @author Jaci */ public class DriverStationCommunications { public static ThreadGroup group = new ThreadGroup("driver_station"); public static void init() { if (ToastConfiguration.Property.SIM_DS_ENABLED.asBoolean()) { Thread runThread = new Thread(group, DriverStationCommunications::run); runThread.start(); } } public static void broadcast() { String service = "roborio-" + ToastConfiguration.Property.SIM_BROADCAST_TEAM + "-frc"; String hostname = "roborio-" + ToastConfiguration.Property.SIM_BROADCAST_TEAM + "-frc"; String ip_str = "127.0.0.1"; char ip[] = new char[4]; String arr[] = ip_str.split("\\."); for (int i = 0; i < 4; i++) { ip[i] = (char) (int) Integer.parseInt(arr[i]); } char payload_1[] = { 0x00, 0x00, 0x84, 0x00, // ID, Response Query 0x00, 0x00, 0x00, 0x03, // No Question, 3 Answers 0x00, 0x00, 0x00, 0x01, // No Authority, 1 Additional RR }; char payload_2[] = { 0x03, // Len: 3 0x5f, 0x6e, 0x69, // _ni 0x04, // Len: 4 0x5f, 0x74, 0x63, 0x70, // _tcp 0x05, // Len: 5 0x6c, 0x6f, 0x63, 0x61, 0x6c, // local 0x00, // end of string 0x00, 0x0c, 0x80, 0x01, // Type: PTR (domain name PoinTeR), Class: IN, Cache flush: true 0x00, 0x00, 0x00, 0x3C, // TTL: 60 Sec 0x00, (char) (0x03 + service.length()), (char) service.length() }; char payload_3[] = service.toCharArray(); char payload_4[] = { 0xc0, 0x0c, // Name Offset (0xc0, 0x0c => 12 =>._ni._tcp.local) // Record 2: SRV 0xc0, 0x26, 0x00, 0x21, // Name Offset (mdns.service_name), Type: SRV (Server Selection) 0x80, 0x01, // Class: IN, Cache flush: true 0x00, 0x00, 0x00, 0x3C, // TTL: 60 sec 0x00, (char) (0xE + hostname.length()), // Data Length: 14 + thnl 0x00, 0x00, 0x00, 0x00, // Priority: 0, Weight: 0 0x0d, 0xfc, // Port: 3580 (char) hostname.length() // Len: thnl }; char payload_5[] = hostname.toCharArray(); char payload_6[] = { 0x05, // Len: 5 0x6c, 0x6f, 0x63, 0x61, 0x6c, // local 0x00, // end of string // Record 3: TXT 0xc0, 0x26, 0x00, 0x10, // Name Offset (mdns.service_name), Type: TXT 0x80, 0x01, // Class: IN, Cache flush: true 0x00, 0x00, 0x00, 0x3C, // TTL: 60 sec 0x00, 0x01, 0x00, // Data Length: 1, TXT Length: 0 // Additional Record: A 0xc0, (char) (0x3b + service.length()), // Name Offset (mdns.target_host_name) 0x00, 0x01, 0x80, 0x01, // Type: A, Class: IN, Cache flush: true 0x00, 0x00, 0x00, 0x3C, // TTL: 60 sec 0x00, 0x04, // Data Length: 4 ip[0], ip[1], ip[2], ip[3] // IP Bytes }; char payload[] = new char[payload_1.length + payload_2.length + payload_3.length + payload_4.length + payload_5.length + payload_6.length]; System.arraycopy(payload_1, 0, payload, 0, payload_1.length); int l = payload_1.length; System.arraycopy(payload_2, 0, payload, l, payload_2.length); l += payload_2.length; System.arraycopy(payload_3, 0, payload, l, payload_3.length); l += payload_3.length; System.arraycopy(payload_4, 0, payload, l, payload_4.length); l += payload_4.length; System.arraycopy(payload_5, 0, payload, l, payload_5.length); l += payload_5.length; System.arraycopy(payload_6, 0, payload, l, payload_6.length); try { InetAddress group = InetAddress.getByName("224.0.0.251"); MulticastSocket multicast_socket = new MulticastSocket(5353); multicast_socket.joinGroup(group); byte[] byte_payload = new byte[payload.length]; for(int i = 0; i < payload.length; i++) { byte_payload[i] = (byte) payload[i]; } Toast.log().info("Driver Station Communications -> Broadcast Running!"); while (true) { DatagramPacket packet = new DatagramPacket(byte_payload, byte_payload.length, group, 5353); multicast_socket.send(packet); Thread.sleep(5000); } } catch (Exception e) { } } public static void run() { if (ToastConfiguration.Property.SIM_BROADCAST_MDNS.asBoolean()) { Thread broadcastThread = new Thread(group, DriverStationCommunications::broadcast); broadcastThread.run(); } try { DatagramSocket socket = new DatagramSocket(1110); byte[] buffer = new byte[8192]; DatagramPacket packet = new DatagramPacket(buffer, buffer.length); while (true) { socket.receive(packet); decodePacket(packet, buffer); byte[] toSend = encodePacket(); DatagramPacket sendPacket = new DatagramPacket(toSend, toSend.length, packet.getAddress(), 1150); socket.send(sendPacket); } } catch (Exception e) { Toast.log().error("Could not start Toast DriverStation Networking Service: " + e); Toast.log().exception(e); } } static byte pi = 0; static byte ng = 0; static byte control = 0; public static boolean connected = false; public static short[][] joyaxis = new short[6][12]; public static short[] joyaxiscount = new short[6]; public static int[] joybuttons = new int[6]; public static byte[] joybuttoncount = new byte[6]; public static short[][] joypov = new short[6][1]; public static short[] joypovcount = new short[6]; public static void decodePacket(DatagramPacket packet, byte[] buffer) { connected = true; pi = buffer[0]; ng = buffer[1]; if (buffer[2] != 0) { // General Packet byte ctrl = buffer[3]; control = ctrl; SimulationData.currentState = ctrl == 0 ? RobotState.DISABLED : ctrl == 4 ? RobotState.TELEOP : ctrl == 6 ? RobotState.AUTONOMOUS : ctrl == 5 ? RobotState.TEST : RobotState.DISABLED; SimulationData.repaintState(); SimulationData.alliance_station = buffer[5]; int i = 6; boolean search = true; int joyid = 0; // Joysticks while (i < buffer.length && search) { int structure_size = buffer[i]; search = buffer[i + 1] == 0x0c; if (!search) continue; int axis_count = buffer[i + 2]; joyaxiscount[joyid] = (byte) axis_count; joyaxis[joyid] = new short[axis_count]; for (int ax = 0; ax < axis_count; ax++) { int ax_val = buffer[i + 2 + ax + 1]; joyaxis[joyid][ax] = (short) ax_val; } int b = i + 2 + axis_count + 1; int button_count = buffer[b]; joybuttoncount[joyid] = (byte) button_count; int button_delta = (button_count / 8 + ((button_count%8 == 0) ? 0 : 1)); int total_mask = 0; for (int bm = 0; bm < button_delta; bm++) { byte button_mask = buffer[b + bm + 1]; total_mask = (total_mask << (bm * 8)) | button_mask; } joybuttons[joyid] = total_mask; b = b + button_delta + 1; int pov_count = buffer[b]; joypovcount[joyid] = (byte) pov_count; joypov[joyid] = new short[pov_count]; for (int pv = 0; pv < pov_count; pv++) { int a1 = buffer[b + 1 + (pv*2)]; int a2 = buffer[b + 1 + (pv*2) + 1]; if (a2 < 0) a2 = 256 + a2; short result = (short) (a1 << 8 | a2); joypov[joyid][pv] = result; } joyid++; i += structure_size + 1; } } else { // Connection Packet } } public static byte[] encodePacket() { byte[] buffer = new byte[8]; buffer[0] = pi; buffer[1] = ng; buffer[2] = 0x01; buffer[3] = control; buffer[4] = 0x10 | 0x20; double volts = SimulationData.pdpVoltage; // Splits Bat Voltage into two bytes buffer[5] = (byte)volts; buffer[6] = (byte)((volts * 100 - ((byte)volts) * 100) * 2.5); buffer[7] = 0; return buffer; } }
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.wellarchitected.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.protocol.StructuredPojo; import com.amazonaws.protocol.ProtocolMarshaller; /** * <p> * A workload summary return object. * </p> * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/wellarchitected-2020-03-31/WorkloadSummary" target="_top">AWS * API Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class WorkloadSummary implements Serializable, Cloneable, StructuredPojo { private String workloadId; private String workloadArn; private String workloadName; private String owner; private java.util.Date updatedAt; private java.util.List<String> lenses; private java.util.Map<String, Integer> riskCounts; private String improvementStatus; /** * @param workloadId */ public void setWorkloadId(String workloadId) { this.workloadId = workloadId; } /** * @return */ public String getWorkloadId() { return this.workloadId; } /** * @param workloadId * @return Returns a reference to this object so that method calls can be chained together. */ public WorkloadSummary withWorkloadId(String workloadId) { setWorkloadId(workloadId); return this; } /** * @param workloadArn */ public void setWorkloadArn(String workloadArn) { this.workloadArn = workloadArn; } /** * @return */ public String getWorkloadArn() { return this.workloadArn; } /** * @param workloadArn * @return Returns a reference to this object so that method calls can be chained together. */ public WorkloadSummary withWorkloadArn(String workloadArn) { setWorkloadArn(workloadArn); return this; } /** * @param workloadName */ public void setWorkloadName(String workloadName) { this.workloadName = workloadName; } /** * @return */ public String getWorkloadName() { return this.workloadName; } /** * @param workloadName * @return Returns a reference to this object so that method calls can be chained together. */ public WorkloadSummary withWorkloadName(String workloadName) { setWorkloadName(workloadName); return this; } /** * @param owner */ public void setOwner(String owner) { this.owner = owner; } /** * @return */ public String getOwner() { return this.owner; } /** * @param owner * @return Returns a reference to this object so that method calls can be chained together. */ public WorkloadSummary withOwner(String owner) { setOwner(owner); return this; } /** * @param updatedAt */ public void setUpdatedAt(java.util.Date updatedAt) { this.updatedAt = updatedAt; } /** * @return */ public java.util.Date getUpdatedAt() { return this.updatedAt; } /** * @param updatedAt * @return Returns a reference to this object so that method calls can be chained together. */ public WorkloadSummary withUpdatedAt(java.util.Date updatedAt) { setUpdatedAt(updatedAt); return this; } /** * @return */ public java.util.List<String> getLenses() { return lenses; } /** * @param lenses */ public void setLenses(java.util.Collection<String> lenses) { if (lenses == null) { this.lenses = null; return; } this.lenses = new java.util.ArrayList<String>(lenses); } /** * <p> * <b>NOTE:</b> This method appends the values to the existing list (if any). Use * {@link #setLenses(java.util.Collection)} or {@link #withLenses(java.util.Collection)} if you want to override the * existing values. * </p> * * @param lenses * @return Returns a reference to this object so that method calls can be chained together. */ public WorkloadSummary withLenses(String... lenses) { if (this.lenses == null) { setLenses(new java.util.ArrayList<String>(lenses.length)); } for (String ele : lenses) { this.lenses.add(ele); } return this; } /** * @param lenses * @return Returns a reference to this object so that method calls can be chained together. */ public WorkloadSummary withLenses(java.util.Collection<String> lenses) { setLenses(lenses); return this; } /** * @return */ public java.util.Map<String, Integer> getRiskCounts() { return riskCounts; } /** * @param riskCounts */ public void setRiskCounts(java.util.Map<String, Integer> riskCounts) { this.riskCounts = riskCounts; } /** * @param riskCounts * @return Returns a reference to this object so that method calls can be chained together. */ public WorkloadSummary withRiskCounts(java.util.Map<String, Integer> riskCounts) { setRiskCounts(riskCounts); return this; } /** * Add a single RiskCounts entry * * @see WorkloadSummary#withRiskCounts * @returns a reference to this object so that method calls can be chained together. */ public WorkloadSummary addRiskCountsEntry(String key, Integer value) { if (null == this.riskCounts) { this.riskCounts = new java.util.HashMap<String, Integer>(); } if (this.riskCounts.containsKey(key)) throw new IllegalArgumentException("Duplicated keys (" + key.toString() + ") are provided."); this.riskCounts.put(key, value); return this; } /** * Removes all the entries added into RiskCounts. * * @return Returns a reference to this object so that method calls can be chained together. */ public WorkloadSummary clearRiskCountsEntries() { this.riskCounts = null; return this; } /** * @param improvementStatus * @see WorkloadImprovementStatus */ public void setImprovementStatus(String improvementStatus) { this.improvementStatus = improvementStatus; } /** * @return * @see WorkloadImprovementStatus */ public String getImprovementStatus() { return this.improvementStatus; } /** * @param improvementStatus * @return Returns a reference to this object so that method calls can be chained together. * @see WorkloadImprovementStatus */ public WorkloadSummary withImprovementStatus(String improvementStatus) { setImprovementStatus(improvementStatus); return this; } /** * @param improvementStatus * @return Returns a reference to this object so that method calls can be chained together. * @see WorkloadImprovementStatus */ public WorkloadSummary withImprovementStatus(WorkloadImprovementStatus improvementStatus) { this.improvementStatus = improvementStatus.toString(); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getWorkloadId() != null) sb.append("WorkloadId: ").append(getWorkloadId()).append(","); if (getWorkloadArn() != null) sb.append("WorkloadArn: ").append(getWorkloadArn()).append(","); if (getWorkloadName() != null) sb.append("WorkloadName: ").append(getWorkloadName()).append(","); if (getOwner() != null) sb.append("Owner: ").append(getOwner()).append(","); if (getUpdatedAt() != null) sb.append("UpdatedAt: ").append(getUpdatedAt()).append(","); if (getLenses() != null) sb.append("Lenses: ").append(getLenses()).append(","); if (getRiskCounts() != null) sb.append("RiskCounts: ").append(getRiskCounts()).append(","); if (getImprovementStatus() != null) sb.append("ImprovementStatus: ").append(getImprovementStatus()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof WorkloadSummary == false) return false; WorkloadSummary other = (WorkloadSummary) obj; if (other.getWorkloadId() == null ^ this.getWorkloadId() == null) return false; if (other.getWorkloadId() != null && other.getWorkloadId().equals(this.getWorkloadId()) == false) return false; if (other.getWorkloadArn() == null ^ this.getWorkloadArn() == null) return false; if (other.getWorkloadArn() != null && other.getWorkloadArn().equals(this.getWorkloadArn()) == false) return false; if (other.getWorkloadName() == null ^ this.getWorkloadName() == null) return false; if (other.getWorkloadName() != null && other.getWorkloadName().equals(this.getWorkloadName()) == false) return false; if (other.getOwner() == null ^ this.getOwner() == null) return false; if (other.getOwner() != null && other.getOwner().equals(this.getOwner()) == false) return false; if (other.getUpdatedAt() == null ^ this.getUpdatedAt() == null) return false; if (other.getUpdatedAt() != null && other.getUpdatedAt().equals(this.getUpdatedAt()) == false) return false; if (other.getLenses() == null ^ this.getLenses() == null) return false; if (other.getLenses() != null && other.getLenses().equals(this.getLenses()) == false) return false; if (other.getRiskCounts() == null ^ this.getRiskCounts() == null) return false; if (other.getRiskCounts() != null && other.getRiskCounts().equals(this.getRiskCounts()) == false) return false; if (other.getImprovementStatus() == null ^ this.getImprovementStatus() == null) return false; if (other.getImprovementStatus() != null && other.getImprovementStatus().equals(this.getImprovementStatus()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getWorkloadId() == null) ? 0 : getWorkloadId().hashCode()); hashCode = prime * hashCode + ((getWorkloadArn() == null) ? 0 : getWorkloadArn().hashCode()); hashCode = prime * hashCode + ((getWorkloadName() == null) ? 0 : getWorkloadName().hashCode()); hashCode = prime * hashCode + ((getOwner() == null) ? 0 : getOwner().hashCode()); hashCode = prime * hashCode + ((getUpdatedAt() == null) ? 0 : getUpdatedAt().hashCode()); hashCode = prime * hashCode + ((getLenses() == null) ? 0 : getLenses().hashCode()); hashCode = prime * hashCode + ((getRiskCounts() == null) ? 0 : getRiskCounts().hashCode()); hashCode = prime * hashCode + ((getImprovementStatus() == null) ? 0 : getImprovementStatus().hashCode()); return hashCode; } @Override public WorkloadSummary clone() { try { return (WorkloadSummary) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } @com.amazonaws.annotation.SdkInternalApi @Override public void marshall(ProtocolMarshaller protocolMarshaller) { com.amazonaws.services.wellarchitected.model.transform.WorkloadSummaryMarshaller.getInstance().marshall(this, protocolMarshaller); } }
/* * Copyright (c) 2007, 2015, Oracle and/or its affiliates. All rights reserved. * ORACLE PROPRIETARY/CONFIDENTIAL. Use is subject to license terms. */ /* * Copyright 1999-2004 The Apache Software Foundation. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.sun.org.apache.xml.internal.res; import java.util.ListResourceBundle; /** * Set up error messages. * We build a two dimensional array of message keys and * message strings. In order to add a new message here, * you need to first add a String constant. And you need * to enter key, value pair as part of the contents * array. You also need to update MAX_CODE for error strings * and MAX_WARNING for warnings ( Needed for only information * purpose ) */ public class XMLErrorResources extends ListResourceBundle { /* * This file contains error and warning messages related to Xalan Error * Handling. * * General notes to translators: * * 1) Xalan (or more properly, Xalan-interpretive) and XSLTC are names of * components. * XSLT is an acronym for "XML Stylesheet Language: Transformations". * XSLTC is an acronym for XSLT Compiler. * * 2) A stylesheet is a description of how to transform an input XML document * into a resultant XML document (or HTML document or text). The * stylesheet itself is described in the form of an XML document. * * 3) A template is a component of a stylesheet that is used to match a * particular portion of an input document and specifies the form of the * corresponding portion of the output document. * * 4) An element is a mark-up tag in an XML document; an attribute is a * modifier on the tag. For example, in <elem attr='val' attr2='val2'> * "elem" is an element name, "attr" and "attr2" are attribute names with * the values "val" and "val2", respectively. * * 5) A namespace declaration is a special attribute that is used to associate * a prefix with a URI (the namespace). The meanings of element names and * attribute names that use that prefix are defined with respect to that * namespace. * * 6) "Translet" is an invented term that describes the class file that * results from compiling an XML stylesheet into a Java class. * * 7) XPath is a specification that describes a notation for identifying * nodes in a tree-structured representation of an XML document. An * instance of that notation is referred to as an XPath expression. * */ /** Maximum error messages, this is needed to keep track of the number of messages. */ public static final int MAX_CODE = 61; /** Maximum warnings, this is needed to keep track of the number of warnings. */ public static final int MAX_WARNING = 0; /** Maximum misc strings. */ public static final int MAX_OTHERS = 4; /** Maximum total warnings and error messages. */ public static final int MAX_MESSAGES = MAX_CODE + MAX_WARNING + 1; /* * Message keys */ public static final String ER_FUNCTION_NOT_SUPPORTED = "ER_FUNCTION_NOT_SUPPORTED"; public static final String ER_CANNOT_OVERWRITE_CAUSE = "ER_CANNOT_OVERWRITE_CAUSE"; public static final String ER_NO_DEFAULT_IMPL = "ER_NO_DEFAULT_IMPL"; public static final String ER_CHUNKEDINTARRAY_NOT_SUPPORTED = "ER_CHUNKEDINTARRAY_NOT_SUPPORTED"; public static final String ER_OFFSET_BIGGER_THAN_SLOT = "ER_OFFSET_BIGGER_THAN_SLOT"; public static final String ER_COROUTINE_NOT_AVAIL = "ER_COROUTINE_NOT_AVAIL"; public static final String ER_COROUTINE_CO_EXIT = "ER_COROUTINE_CO_EXIT"; public static final String ER_COJOINROUTINESET_FAILED = "ER_COJOINROUTINESET_FAILED"; public static final String ER_COROUTINE_PARAM = "ER_COROUTINE_PARAM"; public static final String ER_PARSER_DOTERMINATE_ANSWERS = "ER_PARSER_DOTERMINATE_ANSWERS"; public static final String ER_NO_PARSE_CALL_WHILE_PARSING = "ER_NO_PARSE_CALL_WHILE_PARSING"; public static final String ER_TYPED_ITERATOR_AXIS_NOT_IMPLEMENTED = "ER_TYPED_ITERATOR_AXIS_NOT_IMPLEMENTED"; public static final String ER_ITERATOR_AXIS_NOT_IMPLEMENTED = "ER_ITERATOR_AXIS_NOT_IMPLEMENTED"; public static final String ER_ITERATOR_CLONE_NOT_SUPPORTED = "ER_ITERATOR_CLONE_NOT_SUPPORTED"; public static final String ER_UNKNOWN_AXIS_TYPE = "ER_UNKNOWN_AXIS_TYPE"; public static final String ER_AXIS_NOT_SUPPORTED = "ER_AXIS_NOT_SUPPORTED"; public static final String ER_NO_DTMIDS_AVAIL = "ER_NO_DTMIDS_AVAIL"; public static final String ER_NOT_SUPPORTED = "ER_NOT_SUPPORTED"; public static final String ER_NODE_NON_NULL = "ER_NODE_NON_NULL"; public static final String ER_COULD_NOT_RESOLVE_NODE = "ER_COULD_NOT_RESOLVE_NODE"; public static final String ER_STARTPARSE_WHILE_PARSING = "ER_STARTPARSE_WHILE_PARSING"; public static final String ER_STARTPARSE_NEEDS_SAXPARSER = "ER_STARTPARSE_NEEDS_SAXPARSER"; public static final String ER_COULD_NOT_INIT_PARSER = "ER_COULD_NOT_INIT_PARSER"; public static final String ER_EXCEPTION_CREATING_POOL = "ER_EXCEPTION_CREATING_POOL"; public static final String ER_PATH_CONTAINS_INVALID_ESCAPE_SEQUENCE = "ER_PATH_CONTAINS_INVALID_ESCAPE_SEQUENCE"; public static final String ER_SCHEME_REQUIRED = "ER_SCHEME_REQUIRED"; public static final String ER_NO_SCHEME_IN_URI = "ER_NO_SCHEME_IN_URI"; public static final String ER_NO_SCHEME_INURI = "ER_NO_SCHEME_INURI"; public static final String ER_PATH_INVALID_CHAR = "ER_PATH_INVALID_CHAR"; public static final String ER_SCHEME_FROM_NULL_STRING = "ER_SCHEME_FROM_NULL_STRING"; public static final String ER_SCHEME_NOT_CONFORMANT = "ER_SCHEME_NOT_CONFORMANT"; public static final String ER_HOST_ADDRESS_NOT_WELLFORMED = "ER_HOST_ADDRESS_NOT_WELLFORMED"; public static final String ER_PORT_WHEN_HOST_NULL = "ER_PORT_WHEN_HOST_NULL"; public static final String ER_INVALID_PORT = "ER_INVALID_PORT"; public static final String ER_FRAG_FOR_GENERIC_URI ="ER_FRAG_FOR_GENERIC_URI"; public static final String ER_FRAG_WHEN_PATH_NULL = "ER_FRAG_WHEN_PATH_NULL"; public static final String ER_FRAG_INVALID_CHAR = "ER_FRAG_INVALID_CHAR"; public static final String ER_PARSER_IN_USE = "ER_PARSER_IN_USE"; public static final String ER_CANNOT_CHANGE_WHILE_PARSING = "ER_CANNOT_CHANGE_WHILE_PARSING"; public static final String ER_SELF_CAUSATION_NOT_PERMITTED = "ER_SELF_CAUSATION_NOT_PERMITTED"; public static final String ER_NO_USERINFO_IF_NO_HOST = "ER_NO_USERINFO_IF_NO_HOST"; public static final String ER_NO_PORT_IF_NO_HOST = "ER_NO_PORT_IF_NO_HOST"; public static final String ER_NO_QUERY_STRING_IN_PATH = "ER_NO_QUERY_STRING_IN_PATH"; public static final String ER_NO_FRAGMENT_STRING_IN_PATH = "ER_NO_FRAGMENT_STRING_IN_PATH"; public static final String ER_CANNOT_INIT_URI_EMPTY_PARMS = "ER_CANNOT_INIT_URI_EMPTY_PARMS"; public static final String ER_METHOD_NOT_SUPPORTED ="ER_METHOD_NOT_SUPPORTED"; public static final String ER_INCRSAXSRCFILTER_NOT_RESTARTABLE = "ER_INCRSAXSRCFILTER_NOT_RESTARTABLE"; public static final String ER_XMLRDR_NOT_BEFORE_STARTPARSE = "ER_XMLRDR_NOT_BEFORE_STARTPARSE"; public static final String ER_AXIS_TRAVERSER_NOT_SUPPORTED = "ER_AXIS_TRAVERSER_NOT_SUPPORTED"; public static final String ER_ERRORHANDLER_CREATED_WITH_NULL_PRINTWRITER = "ER_ERRORHANDLER_CREATED_WITH_NULL_PRINTWRITER"; public static final String ER_SYSTEMID_UNKNOWN = "ER_SYSTEMID_UNKNOWN"; public static final String ER_LOCATION_UNKNOWN = "ER_LOCATION_UNKNOWN"; public static final String ER_PREFIX_MUST_RESOLVE = "ER_PREFIX_MUST_RESOLVE"; public static final String ER_CREATEDOCUMENT_NOT_SUPPORTED = "ER_CREATEDOCUMENT_NOT_SUPPORTED"; public static final String ER_CHILD_HAS_NO_OWNER_DOCUMENT = "ER_CHILD_HAS_NO_OWNER_DOCUMENT"; public static final String ER_CHILD_HAS_NO_OWNER_DOCUMENT_ELEMENT = "ER_CHILD_HAS_NO_OWNER_DOCUMENT_ELEMENT"; public static final String ER_CANT_OUTPUT_TEXT_BEFORE_DOC = "ER_CANT_OUTPUT_TEXT_BEFORE_DOC"; public static final String ER_CANT_HAVE_MORE_THAN_ONE_ROOT = "ER_CANT_HAVE_MORE_THAN_ONE_ROOT"; public static final String ER_ARG_LOCALNAME_NULL = "ER_ARG_LOCALNAME_NULL"; public static final String ER_ARG_LOCALNAME_INVALID = "ER_ARG_LOCALNAME_INVALID"; public static final String ER_ARG_PREFIX_INVALID = "ER_ARG_PREFIX_INVALID"; public static final String ER_NAME_CANT_START_WITH_COLON = "ER_NAME_CANT_START_WITH_COLON"; // Message keys used by the serializer public static final String ER_RESOURCE_COULD_NOT_FIND = "ER_RESOURCE_COULD_NOT_FIND"; public static final String ER_RESOURCE_COULD_NOT_LOAD = "ER_RESOURCE_COULD_NOT_LOAD"; public static final String ER_BUFFER_SIZE_LESSTHAN_ZERO = "ER_BUFFER_SIZE_LESSTHAN_ZERO"; public static final String ER_INVALID_UTF16_SURROGATE = "ER_INVALID_UTF16_SURROGATE"; public static final String ER_OIERROR = "ER_OIERROR"; public static final String ER_NAMESPACE_PREFIX = "ER_NAMESPACE_PREFIX"; public static final String ER_STRAY_ATTRIBUTE = "ER_STRAY_ATTIRBUTE"; public static final String ER_STRAY_NAMESPACE = "ER_STRAY_NAMESPACE"; public static final String ER_COULD_NOT_LOAD_RESOURCE = "ER_COULD_NOT_LOAD_RESOURCE"; public static final String ER_COULD_NOT_LOAD_METHOD_PROPERTY = "ER_COULD_NOT_LOAD_METHOD_PROPERTY"; public static final String ER_SERIALIZER_NOT_CONTENTHANDLER = "ER_SERIALIZER_NOT_CONTENTHANDLER"; public static final String ER_ILLEGAL_ATTRIBUTE_POSITION = "ER_ILLEGAL_ATTRIBUTE_POSITION"; public static final String ER_ILLEGAL_CHARACTER = "ER_ILLEGAL_CHARACTER"; /* * Now fill in the message text. * Then fill in the message text for that message code in the * array. Use the new error code as the index into the array. */ // Error messages... /** The lookup table for error messages. */ private static final Object[][] contents = { /** Error message ID that has a null message, but takes in a single object. */ {"ER0000" , "{0}" }, { ER_FUNCTION_NOT_SUPPORTED, "Function not supported!"}, { ER_CANNOT_OVERWRITE_CAUSE, "Cannot overwrite cause"}, { ER_NO_DEFAULT_IMPL, "No default implementation found "}, { ER_CHUNKEDINTARRAY_NOT_SUPPORTED, "ChunkedIntArray({0}) not currently supported"}, { ER_OFFSET_BIGGER_THAN_SLOT, "Offset bigger than slot"}, { ER_COROUTINE_NOT_AVAIL, "Coroutine not available, id={0}"}, { ER_COROUTINE_CO_EXIT, "CoroutineManager received co_exit() request"}, { ER_COJOINROUTINESET_FAILED, "co_joinCoroutineSet() failed"}, { ER_COROUTINE_PARAM, "Coroutine parameter error ({0})"}, { ER_PARSER_DOTERMINATE_ANSWERS, "\nUNEXPECTED: Parser doTerminate answers {0}"}, { ER_NO_PARSE_CALL_WHILE_PARSING, "parse may not be called while parsing"}, { ER_TYPED_ITERATOR_AXIS_NOT_IMPLEMENTED, "Error: typed iterator for axis {0} not implemented"}, { ER_ITERATOR_AXIS_NOT_IMPLEMENTED, "Error: iterator for axis {0} not implemented "}, { ER_ITERATOR_CLONE_NOT_SUPPORTED, "Iterator clone not supported"}, { ER_UNKNOWN_AXIS_TYPE, "Unknown axis traversal type: {0}"}, { ER_AXIS_NOT_SUPPORTED, "Axis traverser not supported: {0}"}, { ER_NO_DTMIDS_AVAIL, "No more DTM IDs are available"}, { ER_NOT_SUPPORTED, "Not supported: {0}"}, { ER_NODE_NON_NULL, "Node must be non-null for getDTMHandleFromNode"}, { ER_COULD_NOT_RESOLVE_NODE, "Could not resolve the node to a handle"}, { ER_STARTPARSE_WHILE_PARSING, "startParse may not be called while parsing"}, { ER_STARTPARSE_NEEDS_SAXPARSER, "startParse needs a non-null SAXParser"}, { ER_COULD_NOT_INIT_PARSER, "could not initialize parser with"}, { ER_EXCEPTION_CREATING_POOL, "exception creating new instance for pool"}, { ER_PATH_CONTAINS_INVALID_ESCAPE_SEQUENCE, "Path contains invalid escape sequence"}, { ER_SCHEME_REQUIRED, "Scheme is required!"}, { ER_NO_SCHEME_IN_URI, "No scheme found in URI: {0}"}, { ER_NO_SCHEME_INURI, "No scheme found in URI"}, { ER_PATH_INVALID_CHAR, "Path contains invalid character: {0}"}, { ER_SCHEME_FROM_NULL_STRING, "Cannot set scheme from null string"}, { ER_SCHEME_NOT_CONFORMANT, "The scheme is not conformant."}, { ER_HOST_ADDRESS_NOT_WELLFORMED, "Host is not a well formed address"}, { ER_PORT_WHEN_HOST_NULL, "Port cannot be set when host is null"}, { ER_INVALID_PORT, "Invalid port number"}, { ER_FRAG_FOR_GENERIC_URI, "Fragment can only be set for a generic URI"}, { ER_FRAG_WHEN_PATH_NULL, "Fragment cannot be set when path is null"}, { ER_FRAG_INVALID_CHAR, "Fragment contains invalid character"}, { ER_PARSER_IN_USE, "Parser is already in use"}, { ER_CANNOT_CHANGE_WHILE_PARSING, "Cannot change {0} {1} while parsing"}, { ER_SELF_CAUSATION_NOT_PERMITTED, "Self-causation not permitted"}, { ER_NO_USERINFO_IF_NO_HOST, "Userinfo may not be specified if host is not specified"}, { ER_NO_PORT_IF_NO_HOST, "Port may not be specified if host is not specified"}, { ER_NO_QUERY_STRING_IN_PATH, "Query string cannot be specified in path and query string"}, { ER_NO_FRAGMENT_STRING_IN_PATH, "Fragment cannot be specified in both the path and fragment"}, { ER_CANNOT_INIT_URI_EMPTY_PARMS, "Cannot initialize URI with empty parameters"}, { ER_METHOD_NOT_SUPPORTED, "Method not yet supported "}, { ER_INCRSAXSRCFILTER_NOT_RESTARTABLE, "IncrementalSAXSource_Filter not currently restartable"}, { ER_XMLRDR_NOT_BEFORE_STARTPARSE, "XMLReader not before startParse request"}, { ER_AXIS_TRAVERSER_NOT_SUPPORTED, "Axis traverser not supported: {0}"}, { ER_ERRORHANDLER_CREATED_WITH_NULL_PRINTWRITER, "ListingErrorHandler created with null PrintWriter!"}, { ER_SYSTEMID_UNKNOWN, "SystemId Unknown"}, { ER_LOCATION_UNKNOWN, "Location of error unknown"}, { ER_PREFIX_MUST_RESOLVE, "Prefix must resolve to a namespace: {0}"}, { ER_CREATEDOCUMENT_NOT_SUPPORTED, "createDocument() not supported in XPathContext!"}, { ER_CHILD_HAS_NO_OWNER_DOCUMENT, "Attribute child does not have an owner document!"}, { ER_CHILD_HAS_NO_OWNER_DOCUMENT_ELEMENT, "Attribute child does not have an owner document element!"}, { ER_CANT_OUTPUT_TEXT_BEFORE_DOC, "Warning: can't output text before document element! Ignoring..."}, { ER_CANT_HAVE_MORE_THAN_ONE_ROOT, "Can't have more than one root on a DOM!"}, { ER_ARG_LOCALNAME_NULL, "Argument 'localName' is null"}, // Note to translators: A QNAME has the syntactic form [NCName:]NCName // The localname is the portion after the optional colon; the message indicates // that there is a problem with that part of the QNAME. { ER_ARG_LOCALNAME_INVALID, "Localname in QNAME should be a valid NCName"}, // Note to translators: A QNAME has the syntactic form [NCName:]NCName // The prefix is the portion before the optional colon; the message indicates // that there is a problem with that part of the QNAME. { ER_ARG_PREFIX_INVALID, "Prefix in QNAME should be a valid NCName"}, { ER_NAME_CANT_START_WITH_COLON, "Name cannot start with a colon"}, { "BAD_CODE", "Parameter to createMessage was out of bounds"}, { "FORMAT_FAILED", "Exception thrown during messageFormat call"}, { "line", "Line #"}, { "column","Column #"}, {ER_SERIALIZER_NOT_CONTENTHANDLER, "The serializer class ''{0}'' does not implement org.xml.sax.ContentHandler."}, {ER_RESOURCE_COULD_NOT_FIND, "The resource [ {0} ] could not be found.\n {1}" }, {ER_RESOURCE_COULD_NOT_LOAD, "The resource [ {0} ] could not load: {1} \n {2} \t {3}" }, {ER_BUFFER_SIZE_LESSTHAN_ZERO, "Buffer size <=0" }, {ER_INVALID_UTF16_SURROGATE, "Invalid UTF-16 surrogate detected: {0} ?" }, {ER_OIERROR, "IO error" }, {ER_ILLEGAL_ATTRIBUTE_POSITION, "Cannot add attribute {0} after child nodes or before an element is produced. Attribute will be ignored."}, /* * Note to translators: The stylesheet contained a reference to a * namespace prefix that was undefined. The value of the substitution * text is the name of the prefix. */ {ER_NAMESPACE_PREFIX, "Namespace for prefix ''{0}'' has not been declared." }, /* * Note to translators: This message is reported if the stylesheet * being processed attempted to construct an XML document with an * attribute in a place other than on an element. The substitution text * specifies the name of the attribute. */ {ER_STRAY_ATTRIBUTE, "Attribute ''{0}'' outside of element." }, /* * Note to translators: As with the preceding message, a namespace * declaration has the form of an attribute and is only permitted to * appear on an element. The substitution text {0} is the namespace * prefix and {1} is the URI that was being used in the erroneous * namespace declaration. */ {ER_STRAY_NAMESPACE, "Namespace declaration ''{0}''=''{1}'' outside of element." }, {ER_COULD_NOT_LOAD_RESOURCE, "Could not load ''{0}'' (check CLASSPATH), now using just the defaults"}, { ER_ILLEGAL_CHARACTER, "Attempt to output character of integral value {0} that is not represented in specified output encoding of {1}."}, {ER_COULD_NOT_LOAD_METHOD_PROPERTY, "Could not load the propery file ''{0}'' for output method ''{1}'' (check CLASSPATH)" } }; /** * Get the association list. * * @return The association list. */ protected Object[][] getContents() { return contents; } }
/* * Copyright (C) 2011 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.pomelo.devnews.loader; import java.io.BufferedInputStream; import java.io.BufferedWriter; import java.io.Closeable; import java.io.EOFException; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.FileWriter; import java.io.FilterOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.OutputStream; import java.io.OutputStreamWriter; import java.io.Reader; import java.io.StringWriter; import java.io.Writer; import java.lang.reflect.Array; import java.nio.charset.Charset; import java.util.ArrayList; import java.util.Arrays; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.Map; import java.util.concurrent.Callable; import java.util.concurrent.ExecutorService; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; /** ****************************************************************************** * Taken from the JB source code, can be found in: * libcore/luni/src/main/java/libcore/io/DiskLruCache.java * or direct link: * https://android.googlesource.com/platform/libcore/+/android-4.1.1_r1/luni/src/main/java/libcore/io/DiskLruCache.java ****************************************************************************** * * A cache that uses a bounded amount of space on a filesystem. Each cache * entry has a string key and a fixed number of values. Values are byte * sequences, accessible as streams or files. Each value must be between {@code * 0} and {@code Integer.MAX_VALUE} bytes in length. * * <p>The cache stores its data in a directory on the filesystem. This * directory must be exclusive to the cache; the cache may delete or overwrite * files from its directory. It is an error for multiple processes to use the * same cache directory at the same time. * * <p>This cache limits the number of bytes that it will store on the * filesystem. When the number of stored bytes exceeds the limit, the cache will * remove entries in the background until the limit is satisfied. The limit is * not strict: the cache may temporarily exceed it while waiting for files to be * deleted. The limit does not include filesystem overhead or the cache * journal so space-sensitive applications should set a conservative limit. * * <p>Clients call {@link #edit} to create or update the values of an entry. An * entry may have only one editor at one time; if a value is not available to be * edited then {@link #edit} will return null. * <ul> * <li>When an entry is being <strong>created</strong> it is necessary to * supply a full set of values; the empty value should be used as a * placeholder if necessary. * <li>When an entry is being <strong>edited</strong>, it is not necessary * to supply data for every value; values default to their previous * value. * </ul> * Every {@link #edit} call must be matched by a call to {@link Editor#commit} * or {@link Editor#abort}. Committing is atomic: a read observes the full set * of values as they were before or after the commit, but never a mix of values. * * <p>Clients call {@link #get} to read a snapshot of an entry. The read will * observe the value at the time that {@link #get} was called. Updates and * removals after the call do not impact ongoing reads. * * <p>This class is tolerant of some I/O errors. If files are missing from the * filesystem, the corresponding entries will be dropped from the cache. If * an error occurs while writing a cache value, the edit will fail silently. * Callers should handle other problems by catching {@code IOException} and * responding appropriately. */ public final class DiskLruCache implements Closeable { static final String JOURNAL_FILE = "journal"; static final String JOURNAL_FILE_TMP = "journal.tmp"; static final String MAGIC = "libcore.io.DiskLruCache"; static final String VERSION_1 = "1"; static final long ANY_SEQUENCE_NUMBER = -1; private static final String CLEAN = "CLEAN"; private static final String DIRTY = "DIRTY"; private static final String REMOVE = "REMOVE"; private static final String READ = "READ"; private static final Charset UTF_8 = Charset.forName("UTF-8"); private static final int IO_BUFFER_SIZE = 8 * 1024; /* * This cache uses a journal file named "journal". A typical journal file * looks like this: * libcore.io.DiskLruCache * 1 * 100 * 2 * * CLEAN 3400330d1dfc7f3f7f4b8d4d803dfcf6 832 21054 * DIRTY 335c4c6028171cfddfbaae1a9c313c52 * CLEAN 335c4c6028171cfddfbaae1a9c313c52 3934 2342 * REMOVE 335c4c6028171cfddfbaae1a9c313c52 * DIRTY 1ab96a171faeeee38496d8b330771a7a * CLEAN 1ab96a171faeeee38496d8b330771a7a 1600 234 * READ 335c4c6028171cfddfbaae1a9c313c52 * READ 3400330d1dfc7f3f7f4b8d4d803dfcf6 * * The first five lines of the journal form its header. They are the * constant string "libcore.io.DiskLruCache", the disk cache's version, * the application's version, the value count, and a blank line. * * Each of the subsequent lines in the file is a record of the state of a * cache entry. Each line contains space-separated values: a state, a key, * and optional state-specific values. * o DIRTY lines track that an entry is actively being created or updated. * Every successful DIRTY action should be followed by a CLEAN or REMOVE * action. DIRTY lines without a matching CLEAN or REMOVE indicate that * temporary files may need to be deleted. * o CLEAN lines track a cache entry that has been successfully published * and may be read. A publish line is followed by the lengths of each of * its values. * o READ lines track accesses for LRU. * o REMOVE lines track entries that have been deleted. * * The journal file is appended to as cache operations occur. The journal may * occasionally be compacted by dropping redundant lines. A temporary file named * "journal.tmp" will be used during compaction; that file should be deleted if * it exists when the cache is opened. */ private final File directory; private final File journalFile; private final File journalFileTmp; private final int appVersion; private final long maxSize; private final int valueCount; private long size = 0; private Writer journalWriter; private final LinkedHashMap<String, Entry> lruEntries = new LinkedHashMap<String, Entry>(0, 0.75f, true); private int redundantOpCount; /** * To differentiate between old and current snapshots, each entry is given * a sequence number each time an edit is committed. A snapshot is stale if * its sequence number is not equal to its entry's sequence number. */ private long nextSequenceNumber = 0; /* From java.util.Arrays */ @SuppressWarnings("unchecked") private static <T> T[] copyOfRange(T[] original, int start, int end) { final int originalLength = original.length; // For exception priority compatibility. if (start > end) { throw new IllegalArgumentException(); } if (start < 0 || start > originalLength) { throw new ArrayIndexOutOfBoundsException(); } final int resultLength = end - start; final int copyLength = Math.min(resultLength, originalLength - start); final T[] result = (T[]) Array .newInstance(original.getClass().getComponentType(), resultLength); System.arraycopy(original, start, result, 0, copyLength); return result; } /** * Returns the remainder of 'reader' as a string, closing it when done. */ public static String readFully(Reader reader) throws IOException { try { StringWriter writer = new StringWriter(); char[] buffer = new char[1024]; int count; while ((count = reader.read(buffer)) != -1) { writer.write(buffer, 0, count); } return writer.toString(); } finally { reader.close(); } } /** * Returns the ASCII characters up to but not including the next "\r\n", or * "\n". * * @throws EOFException if the stream is exhausted before the next newline * character. */ public static String readAsciiLine(InputStream in) throws IOException { // TODO: support UTF-8 here instead StringBuilder result = new StringBuilder(80); while (true) { int c = in.read(); if (c == -1) { throw new EOFException(); } else if (c == '\n') { break; } result.append((char) c); } int length = result.length(); if (length > 0 && result.charAt(length - 1) == '\r') { result.setLength(length - 1); } return result.toString(); } /** * Closes 'closeable', ignoring any checked exceptions. Does nothing if 'closeable' is null. */ public static void closeQuietly(Closeable closeable) { if (closeable != null) { try { closeable.close(); } catch (RuntimeException rethrown) { throw rethrown; } catch (Exception ignored) { } } } /** * Recursively delete everything in {@code dir}. */ // TODO: this should specify paths as Strings rather than as Files public static void deleteContents(File dir) throws IOException { File[] files = dir.listFiles(); if (files == null) { throw new IllegalArgumentException("not a directory: " + dir); } for (File file : files) { if (file.isDirectory()) { deleteContents(file); } if (!file.delete()) { throw new IOException("failed to delete file: " + file); } } } /** This cache uses a single background thread to evict entries. */ private final ExecutorService executorService = new ThreadPoolExecutor(0, 1, 60L, TimeUnit.SECONDS, new LinkedBlockingQueue<Runnable>()); private final Callable<Void> cleanupCallable = new Callable<Void>() { @Override public Void call() throws Exception { synchronized (DiskLruCache.this) { if (journalWriter == null) { return null; // closed } trimToSize(); if (journalRebuildRequired()) { rebuildJournal(); redundantOpCount = 0; } } return null; } }; private DiskLruCache(File directory, int appVersion, int valueCount, long maxSize) { this.directory = directory; this.appVersion = appVersion; this.journalFile = new File(directory, JOURNAL_FILE); this.journalFileTmp = new File(directory, JOURNAL_FILE_TMP); this.valueCount = valueCount; this.maxSize = maxSize; } /** * Opens the cache in {@code directory}, creating a cache if none exists * there. * * @param directory a writable directory * @param appVersion * @param valueCount the number of values per cache entry. Must be positive. * @param maxSize the maximum number of bytes this cache should use to store * @throws IOException if reading or writing the cache directory fails */ public static DiskLruCache open(File directory, int appVersion, int valueCount, long maxSize) throws IOException { if (maxSize <= 0) { throw new IllegalArgumentException("maxSize <= 0"); } if (valueCount <= 0) { throw new IllegalArgumentException("valueCount <= 0"); } // prefer to pick up where we left off DiskLruCache cache = new DiskLruCache(directory, appVersion, valueCount, maxSize); if (cache.journalFile.exists()) { try { cache.readJournal(); cache.processJournal(); cache.journalWriter = new BufferedWriter(new FileWriter(cache.journalFile, true), IO_BUFFER_SIZE); return cache; } catch (IOException journalIsCorrupt) { // System.logW("DiskLruCache " + directory + " is corrupt: " // + journalIsCorrupt.getMessage() + ", removing"); cache.delete(); } } // create a new empty cache directory.mkdirs(); cache = new DiskLruCache(directory, appVersion, valueCount, maxSize); cache.rebuildJournal(); return cache; } private void readJournal() throws IOException { InputStream in = new BufferedInputStream(new FileInputStream(journalFile), IO_BUFFER_SIZE); try { String magic = readAsciiLine(in); String version = readAsciiLine(in); String appVersionString = readAsciiLine(in); String valueCountString = readAsciiLine(in); String blank = readAsciiLine(in); if (!MAGIC.equals(magic) || !VERSION_1.equals(version) || !Integer.toString(appVersion).equals(appVersionString) || !Integer.toString(valueCount).equals(valueCountString) || !"".equals(blank)) { throw new IOException("unexpected journal header: [" + magic + ", " + version + ", " + valueCountString + ", " + blank + "]"); } while (true) { try { readJournalLine(readAsciiLine(in)); } catch (EOFException endOfJournal) { break; } } } finally { closeQuietly(in); } } private void readJournalLine(String line) throws IOException { String[] parts = line.split(" "); if (parts.length < 2) { throw new IOException("unexpected journal line: " + line); } String key = parts[1]; if (parts[0].equals(REMOVE) && parts.length == 2) { lruEntries.remove(key); return; } Entry entry = lruEntries.get(key); if (entry == null) { entry = new Entry(key); lruEntries.put(key, entry); } if (parts[0].equals(CLEAN) && parts.length == 2 + valueCount) { entry.readable = true; entry.currentEditor = null; entry.setLengths(copyOfRange(parts, 2, parts.length)); } else if (parts[0].equals(DIRTY) && parts.length == 2) { entry.currentEditor = new Editor(entry); } else if (parts[0].equals(READ) && parts.length == 2) { // this work was already done by calling lruEntries.get() } else { throw new IOException("unexpected journal line: " + line); } } /** * Computes the initial size and collects garbage as a part of opening the * cache. Dirty entries are assumed to be inconsistent and will be deleted. */ private void processJournal() throws IOException { deleteIfExists(journalFileTmp); for (Iterator<Entry> i = lruEntries.values().iterator(); i.hasNext(); ) { Entry entry = i.next(); if (entry.currentEditor == null) { for (int t = 0; t < valueCount; t++) { size += entry.lengths[t]; } } else { entry.currentEditor = null; for (int t = 0; t < valueCount; t++) { deleteIfExists(entry.getCleanFile(t)); deleteIfExists(entry.getDirtyFile(t)); } i.remove(); } } } /** * Creates a new journal that omits redundant information. This replaces the * current journal if it exists. */ private synchronized void rebuildJournal() throws IOException { if (journalWriter != null) { journalWriter.close(); } Writer writer = new BufferedWriter(new FileWriter(journalFileTmp), IO_BUFFER_SIZE); writer.write(MAGIC); writer.write("\n"); writer.write(VERSION_1); writer.write("\n"); writer.write(Integer.toString(appVersion)); writer.write("\n"); writer.write(Integer.toString(valueCount)); writer.write("\n"); writer.write("\n"); for (Entry entry : lruEntries.values()) { if (entry.currentEditor != null) { writer.write(DIRTY + ' ' + entry.key + '\n'); } else { writer.write(CLEAN + ' ' + entry.key + entry.getLengths() + '\n'); } } writer.close(); journalFileTmp.renameTo(journalFile); journalWriter = new BufferedWriter(new FileWriter(journalFile, true), IO_BUFFER_SIZE); } private static void deleteIfExists(File file) throws IOException { // try { // Libcore.os.remove(file.getPath()); // } catch (ErrnoException errnoException) { // if (errnoException.errno != OsConstants.ENOENT) { // throw errnoException.rethrowAsIOException(); // } // } if (file.exists() && !file.delete()) { throw new IOException(); } } /** * Returns a snapshot of the entry named {@code key}, or null if it doesn't * exist is not currently readable. If a value is returned, it is moved to * the head of the LRU queue. */ public synchronized Snapshot get(String key) throws IOException { checkNotClosed(); validateKey(key); Entry entry = lruEntries.get(key); if (entry == null) { return null; } if (!entry.readable) { return null; } /* * Open all streams eagerly to guarantee that we see a single published * snapshot. If we opened streams lazily then the streams could come * from different edits. */ InputStream[] ins = new InputStream[valueCount]; try { for (int i = 0; i < valueCount; i++) { ins[i] = new FileInputStream(entry.getCleanFile(i)); } } catch (FileNotFoundException e) { // a file must have been deleted manually! return null; } redundantOpCount++; journalWriter.append(READ + ' ' + key + '\n'); if (journalRebuildRequired()) { executorService.submit(cleanupCallable); } return new Snapshot(key, entry.sequenceNumber, ins); } /** * Returns an editor for the entry named {@code key}, or null if another * edit is in progress. */ public Editor edit(String key) throws IOException { return edit(key, ANY_SEQUENCE_NUMBER); } private synchronized Editor edit(String key, long expectedSequenceNumber) throws IOException { checkNotClosed(); validateKey(key); Entry entry = lruEntries.get(key); if (expectedSequenceNumber != ANY_SEQUENCE_NUMBER && (entry == null || entry.sequenceNumber != expectedSequenceNumber)) { return null; // snapshot is stale } if (entry == null) { entry = new Entry(key); lruEntries.put(key, entry); } else if (entry.currentEditor != null) { return null; // another edit is in progress } Editor editor = new Editor(entry); entry.currentEditor = editor; // flush the journal before creating files to prevent file leaks journalWriter.write(DIRTY + ' ' + key + '\n'); journalWriter.flush(); return editor; } /** * Returns the directory where this cache stores its data. */ public File getDirectory() { return directory; } /** * Returns the maximum number of bytes that this cache should use to store * its data. */ public long maxSize() { return maxSize; } /** * Returns the number of bytes currently being used to store the values in * this cache. This may be greater than the max size if a background * deletion is pending. */ public synchronized long size() { return size; } private synchronized void completeEdit(Editor editor, boolean success) throws IOException { Entry entry = editor.entry; if (entry.currentEditor != editor) { throw new IllegalStateException(); } // if this edit is creating the entry for the first time, every index must have a value if (success && !entry.readable) { for (int i = 0; i < valueCount; i++) { if (!entry.getDirtyFile(i).exists()) { editor.abort(); throw new IllegalStateException("edit didn't create file " + i); } } } for (int i = 0; i < valueCount; i++) { File dirty = entry.getDirtyFile(i); if (success) { if (dirty.exists()) { File clean = entry.getCleanFile(i); dirty.renameTo(clean); long oldLength = entry.lengths[i]; long newLength = clean.length(); entry.lengths[i] = newLength; size = size - oldLength + newLength; } } else { deleteIfExists(dirty); } } redundantOpCount++; entry.currentEditor = null; if (entry.readable | success) { entry.readable = true; journalWriter.write(CLEAN + ' ' + entry.key + entry.getLengths() + '\n'); if (success) { entry.sequenceNumber = nextSequenceNumber++; } } else { lruEntries.remove(entry.key); journalWriter.write(REMOVE + ' ' + entry.key + '\n'); } if (size > maxSize || journalRebuildRequired()) { executorService.submit(cleanupCallable); } } /** * We only rebuild the journal when it will halve the size of the journal * and eliminate at least 2000 ops. */ private boolean journalRebuildRequired() { final int REDUNDANT_OP_COMPACT_THRESHOLD = 2000; return redundantOpCount >= REDUNDANT_OP_COMPACT_THRESHOLD && redundantOpCount >= lruEntries.size(); } /** * Drops the entry for {@code key} if it exists and can be removed. Entries * actively being edited cannot be removed. * * @return true if an entry was removed. */ public synchronized boolean remove(String key) throws IOException { checkNotClosed(); validateKey(key); Entry entry = lruEntries.get(key); if (entry == null || entry.currentEditor != null) { return false; } for (int i = 0; i < valueCount; i++) { File file = entry.getCleanFile(i); if (!file.delete()) { throw new IOException("failed to delete " + file); } size -= entry.lengths[i]; entry.lengths[i] = 0; } redundantOpCount++; journalWriter.append(REMOVE + ' ' + key + '\n'); lruEntries.remove(key); if (journalRebuildRequired()) { executorService.submit(cleanupCallable); } return true; } /** * Returns true if this cache has been closed. */ public boolean isClosed() { return journalWriter == null; } private void checkNotClosed() { if (journalWriter == null) { throw new IllegalStateException("cache is closed"); } } /** * Force buffered operations to the filesystem. */ public synchronized void flush() throws IOException { checkNotClosed(); trimToSize(); journalWriter.flush(); } /** * Closes this cache. Stored values will remain on the filesystem. */ public synchronized void close() throws IOException { if (journalWriter == null) { return; // already closed } for (Entry entry : new ArrayList<Entry>(lruEntries.values())) { if (entry.currentEditor != null) { entry.currentEditor.abort(); } } trimToSize(); journalWriter.close(); journalWriter = null; } private void trimToSize() throws IOException { while (size > maxSize) { // Map.Entry<String, Entry> toEvict = lruEntries.eldest(); final Map.Entry<String, Entry> toEvict = lruEntries.entrySet().iterator().next(); remove(toEvict.getKey()); } } /** * Closes the cache and deletes all of its stored values. This will delete * all files in the cache directory including files that weren't created by * the cache. */ public void delete() throws IOException { close(); deleteContents(directory); } private void validateKey(String key) { if (key.contains(" ") || key.contains("\n") || key.contains("\r")) { throw new IllegalArgumentException( "keys must not contain spaces or newlines: \"" + key + "\""); } } private static String inputStreamToString(InputStream in) throws IOException { return readFully(new InputStreamReader(in, UTF_8)); } /** * A snapshot of the values for an entry. */ public final class Snapshot implements Closeable { private final String key; private final long sequenceNumber; private final InputStream[] ins; private Snapshot(String key, long sequenceNumber, InputStream[] ins) { this.key = key; this.sequenceNumber = sequenceNumber; this.ins = ins; } /** * Returns an editor for this snapshot's entry, or null if either the * entry has changed since this snapshot was created or if another edit * is in progress. */ public Editor edit() throws IOException { return DiskLruCache.this.edit(key, sequenceNumber); } /** * Returns the unbuffered stream with the value for {@code index}. */ public InputStream getInputStream(int index) { return ins[index]; } /** * Returns the string value for {@code index}. */ public String getString(int index) throws IOException { return inputStreamToString(getInputStream(index)); } @Override public void close() { for (InputStream in : ins) { closeQuietly(in); } } } /** * Edits the values for an entry. */ public final class Editor { private final Entry entry; private boolean hasErrors; private Editor(Entry entry) { this.entry = entry; } /** * Returns an unbuffered input stream to read the last committed value, * or null if no value has been committed. */ public InputStream newInputStream(int index) throws IOException { synchronized (DiskLruCache.this) { if (entry.currentEditor != this) { throw new IllegalStateException(); } if (!entry.readable) { return null; } return new FileInputStream(entry.getCleanFile(index)); } } /** * Returns the last committed value as a string, or null if no value * has been committed. */ public String getString(int index) throws IOException { InputStream in = newInputStream(index); return in != null ? inputStreamToString(in) : null; } /** * Returns a new unbuffered output stream to write the value at * {@code index}. If the underlying output stream encounters errors * when writing to the filesystem, this edit will be aborted when * {@link #commit} is called. The returned output stream does not throw * IOExceptions. */ public OutputStream newOutputStream(int index) throws IOException { synchronized (DiskLruCache.this) { if (entry.currentEditor != this) { throw new IllegalStateException(); } return new FaultHidingOutputStream(new FileOutputStream(entry.getDirtyFile(index))); } } /** * Sets the value at {@code index} to {@code value}. */ public void set(int index, String value) throws IOException { Writer writer = null; try { writer = new OutputStreamWriter(newOutputStream(index), UTF_8); writer.write(value); } finally { closeQuietly(writer); } } /** * Commits this edit so it is visible to readers. This releases the * edit lock so another edit may be started on the same key. */ public void commit() throws IOException { if (hasErrors) { completeEdit(this, false); remove(entry.key); // the previous entry is stale } else { completeEdit(this, true); } } /** * Aborts this edit. This releases the edit lock so another edit may be * started on the same key. */ public void abort() throws IOException { completeEdit(this, false); } private class FaultHidingOutputStream extends FilterOutputStream { private FaultHidingOutputStream(OutputStream out) { super(out); } @Override public void write(int oneByte) { try { out.write(oneByte); } catch (IOException e) { hasErrors = true; } } @Override public void write(byte[] buffer, int offset, int length) { try { out.write(buffer, offset, length); } catch (IOException e) { hasErrors = true; } } @Override public void close() { try { out.close(); } catch (IOException e) { hasErrors = true; } } @Override public void flush() { try { out.flush(); } catch (IOException e) { hasErrors = true; } } } } private final class Entry { private final String key; /** Lengths of this entry's files. */ private final long[] lengths; /** True if this entry has ever been published */ private boolean readable; /** The ongoing edit or null if this entry is not being edited. */ private Editor currentEditor; /** The sequence number of the most recently committed edit to this entry. */ private long sequenceNumber; private Entry(String key) { this.key = key; this.lengths = new long[valueCount]; } public String getLengths() throws IOException { StringBuilder result = new StringBuilder(); for (long size : lengths) { result.append(' ').append(size); } return result.toString(); } /** * Set lengths using decimal numbers like "10123". */ private void setLengths(String[] strings) throws IOException { if (strings.length != valueCount) { throw invalidLengths(strings); } try { for (int i = 0; i < strings.length; i++) { lengths[i] = Long.parseLong(strings[i]); } } catch (NumberFormatException e) { throw invalidLengths(strings); } } private IOException invalidLengths(String[] strings) throws IOException { throw new IOException("unexpected journal line: " + Arrays.toString(strings)); } public File getCleanFile(int i) { return new File(directory, key + "." + i); } public File getDirtyFile(int i) { return new File(directory, key + "." + i + ".tmp"); } } }
/* * Copyright 2012 The Netty Project * * The Netty Project licenses this file to you under the Apache License, * version 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package io.netty.handler.codec.http; import io.netty.util.AsciiString; import static io.netty.util.internal.MathUtil.findNextPositivePowerOfTwo; import static java.util.Objects.requireNonNull; /** * The request method of HTTP or its derived protocols, such as * <a href="http://en.wikipedia.org/wiki/Real_Time_Streaming_Protocol">RTSP</a> and * <a href="http://en.wikipedia.org/wiki/Internet_Content_Adaptation_Protocol">ICAP</a>. */ public class HttpMethod implements Comparable<HttpMethod> { /** * The OPTIONS method represents a request for information about the communication options * available on the request/response chain identified by the Request-URI. This method allows * the client to determine the options and/or requirements associated with a resource, or the * capabilities of a server, without implying a resource action or initiating a resource * retrieval. */ public static final HttpMethod OPTIONS = new HttpMethod("OPTIONS"); /** * The GET method means retrieve whatever information (in the form of an entity) is identified * by the Request-URI. If the Request-URI refers to a data-producing process, it is the * produced data which shall be returned as the entity in the response and not the source text * of the process, unless that text happens to be the output of the process. */ public static final HttpMethod GET = new HttpMethod("GET"); /** * The HEAD method is identical to GET except that the server MUST NOT return a message-body * in the response. */ public static final HttpMethod HEAD = new HttpMethod("HEAD"); /** * The POST method is used to request that the origin server accept the entity enclosed in the * request as a new subordinate of the resource identified by the Request-URI in the * Request-Line. */ public static final HttpMethod POST = new HttpMethod("POST"); /** * The PUT method requests that the enclosed entity be stored under the supplied Request-URI. */ public static final HttpMethod PUT = new HttpMethod("PUT"); /** * The PATCH method requests that a set of changes described in the * request entity be applied to the resource identified by the Request-URI. */ public static final HttpMethod PATCH = new HttpMethod("PATCH"); /** * The DELETE method requests that the origin server delete the resource identified by the * Request-URI. */ public static final HttpMethod DELETE = new HttpMethod("DELETE"); /** * The TRACE method is used to invoke a remote, application-layer loop- back of the request * message. */ public static final HttpMethod TRACE = new HttpMethod("TRACE"); /** * This specification reserves the method name CONNECT for use with a proxy that can dynamically * switch to being a tunnel */ public static final HttpMethod CONNECT = new HttpMethod("CONNECT"); private static final EnumNameMap<HttpMethod> methodMap; static { methodMap = new EnumNameMap<>( new EnumNameMap.Node<>(OPTIONS.toString(), OPTIONS), new EnumNameMap.Node<>(GET.toString(), GET), new EnumNameMap.Node<>(HEAD.toString(), HEAD), new EnumNameMap.Node<>(POST.toString(), POST), new EnumNameMap.Node<>(PUT.toString(), PUT), new EnumNameMap.Node<>(PATCH.toString(), PATCH), new EnumNameMap.Node<>(DELETE.toString(), DELETE), new EnumNameMap.Node<>(TRACE.toString(), TRACE), new EnumNameMap.Node<>(CONNECT.toString(), CONNECT)); } /** * Returns the {@link HttpMethod} represented by the specified name. * If the specified name is a standard HTTP method name, a cached instance * will be returned. Otherwise, a new instance will be returned. */ public static HttpMethod valueOf(String name) { HttpMethod result = methodMap.get(name); return result != null ? result : new HttpMethod(name); } private final AsciiString name; /** * Creates a new HTTP method with the specified name. You will not need to * create a new method unless you are implementing a protocol derived from * HTTP, such as * <a href="http://en.wikipedia.org/wiki/Real_Time_Streaming_Protocol">RTSP</a> and * <a href="http://en.wikipedia.org/wiki/Internet_Content_Adaptation_Protocol">ICAP</a> */ public HttpMethod(String name) { name = requireNonNull(name, "name").trim(); if (name.isEmpty()) { throw new IllegalArgumentException("empty name"); } for (int i = 0; i < name.length(); i ++) { char c = name.charAt(i); if (Character.isISOControl(c) || Character.isWhitespace(c)) { throw new IllegalArgumentException("invalid character in name"); } } this.name = AsciiString.cached(name); } /** * Returns the name of this method. */ public String name() { return name.toString(); } /** * Returns the name of this method. */ public AsciiString asciiName() { return name; } @Override public int hashCode() { return name().hashCode(); } @Override public boolean equals(Object o) { if (this == o) { return true; } if (!(o instanceof HttpMethod)) { return false; } HttpMethod that = (HttpMethod) o; return name().equals(that.name()); } @Override public String toString() { return name.toString(); } @Override public int compareTo(HttpMethod o) { if (o == this) { return 0; } return name().compareTo(o.name()); } private static final class EnumNameMap<T> { private final EnumNameMap.Node<T>[] values; private final int valuesMask; EnumNameMap(EnumNameMap.Node<T>... nodes) { values = (EnumNameMap.Node<T>[]) new EnumNameMap.Node[findNextPositivePowerOfTwo(nodes.length)]; valuesMask = values.length - 1; for (EnumNameMap.Node<T> node : nodes) { int i = hashCode(node.key) & valuesMask; if (values[i] != null) { throw new IllegalArgumentException("index " + i + " collision between values: [" + values[i].key + ", " + node.key + ']'); } values[i] = node; } } T get(String name) { EnumNameMap.Node<T> node = values[hashCode(name) & valuesMask]; return node == null || !node.key.equals(name) ? null : node.value; } private static int hashCode(String name) { // This hash code needs to produce a unique index in the "values" array for each HttpMethod. If new // HttpMethods are added this algorithm will need to be adjusted. The constructor will "fail fast" if there // are duplicates detected. // For example with the current set of HttpMethods it just so happens that the String hash code value // shifted right by 6 bits modulo 16 is unique relative to all other HttpMethod values. return name.hashCode() >>> 6; } private static final class Node<T> { final String key; final T value; Node(String key, T value) { this.key = key; this.value = value; } } } }
package com.ryansteckler.nlpunbounce; /** * Created by rsteckler on 9/7/14. */ import android.animation.Animator; import android.animation.AnimatorSet; import android.animation.LayoutTransition; import android.animation.ObjectAnimator; import android.animation.ValueAnimator; import android.app.Activity; import android.app.AlertDialog; import android.app.Fragment; import android.content.BroadcastReceiver; import android.content.Context; import android.content.DialogInterface; import android.content.Intent; import android.content.IntentFilter; import android.content.SharedPreferences; import android.content.pm.PackageManager; import android.content.res.Resources; import android.net.Uri; import android.os.Bundle; import android.os.Handler; import android.os.Message; import android.text.Html; import android.text.method.LinkMovementMethod; import android.util.DisplayMetrics; import android.util.Log; import android.view.LayoutInflater; import android.view.Menu; import android.view.MenuInflater; import android.view.MenuItem; import android.view.View; import android.view.ViewGroup; import android.view.animation.AccelerateDecelerateInterpolator; import android.view.animation.LinearInterpolator; import android.widget.ImageView; import android.widget.LinearLayout; import android.widget.ProgressBar; import android.widget.ScrollView; import android.widget.TextView; import com.ryansteckler.nlpunbounce.helpers.DownloadHelper; import com.ryansteckler.nlpunbounce.helpers.LocaleHelper; import com.ryansteckler.nlpunbounce.helpers.RootHelper; import com.ryansteckler.nlpunbounce.helpers.SettingsHelper; import com.ryansteckler.nlpunbounce.helpers.ThemeHelper; import com.ryansteckler.nlpunbounce.hooks.Wakelocks; import com.ryansteckler.nlpunbounce.models.UnbounceStatsCollection; import java.io.File; /** * A placeholder fragment containing a simple view. */ public class HomeFragment extends Fragment { private OnFragmentInteractionListener mListener; private int mSetupStep = 0; private int mSetupFailureStep = SETUP_FAILURE_NONE; //We're optimists :) private final static int SETUP_FAILURE_NONE = 0; //We're good. The service is running. private final static int SETUP_FAILURE_SERVICE = 1; //The service isn't running, but Xposed is installed. private final static int SETUP_FAILURE_VERSION = 2; //The service isn't running, but Xposed is installed. private final static int SETUP_FAILURE_XPOSED_RUNNING = 3; //Xposed isn't running ("installed") private final static int SETUP_FAILURE_XPOSED_INSTALL = 4; //Xposed isn't installed private final static int SETUP_FAILURE_ROOT = 5; //There's no root access. private final static String TAG = "Amplify: "; /** * Returns a new instance of this fragment for the given section * number. */ public static HomeFragment newInstance() { HomeFragment fragment = new HomeFragment(); return fragment; } public HomeFragment() { } @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); LocaleHelper.onActivityCreateSetLocale(this.getActivity()); ThemeHelper.onActivityCreateSetTheme(this.getActivity()); setHasOptionsMenu(true); SharedPreferences prefs = getActivity().getSharedPreferences("com.ryansteckler.nlpunbounce" + "_preferences", Context.MODE_WORLD_READABLE); String lastVersion = prefs.getString("file_version", "0"); if (!lastVersion.equals(Wakelocks.FILE_VERSION)) { //Reset stats UnbounceStatsCollection.getInstance().recreateFiles(getActivity()); Intent intent = new Intent(XposedReceiver.REFRESH_ACTION); intent.putExtra(XposedReceiver.STAT_TYPE, UnbounceStatsCollection.STAT_CURRENT); try { getActivity().sendBroadcast(intent); } catch (IllegalStateException ise) { } } } @Override public void onDestroyView() { super.onDestroyView(); getActivity().unregisterReceiver(refreshReceiver); } @Override public void onViewCreated(final View view, Bundle savedInstanceState) { super.onViewCreated(view, savedInstanceState); mListener.onHomeSetTitle(getResources().getString(R.string.title_home)); //Register for stats updates refreshReceiver = new BroadcastReceiver() { @Override public void onReceive(Context context, Intent intent) { loadStatsFromSource(view); } }; //Register when new stats come in. getActivity().registerReceiver(refreshReceiver, new IntentFilter(ActivityReceiver.STATS_REFRESHED_ACTION)); loadStatsFromSource(view); setupResetStatsButton(view); setupKarma(view); updatePremiumUi(); requestRefresh(); handleSetup(view); } private void handleSetup(final View view) { //All the first run stuff: final SharedPreferences prefs = getActivity().getSharedPreferences("com.ryansteckler.nlpunbounce" + "_preferences", Context.MODE_WORLD_READABLE); boolean firstRun = prefs.getBoolean("first_launch", true); if (!getAmplifyKernelVersion().equals(Wakelocks.VERSION) || firstRun) { //Show the banner final LinearLayout banner = (LinearLayout)view.findViewById(R.id.banner); banner.setVisibility(View.VISIBLE); //Let's find out why the service isn't running: if (!getAmplifyKernelVersion().equals(Wakelocks.VERSION)) { mSetupFailureStep = SETUP_FAILURE_VERSION; if (!isUnbounceServiceRunning()) { mSetupFailureStep = SETUP_FAILURE_SERVICE; if (!isXposedInstalled()) { mSetupFailureStep = SETUP_FAILURE_XPOSED_INSTALL; if (!RootHelper.isDeviceRooted()) { mSetupFailureStep = SETUP_FAILURE_ROOT; } } } } //Disable navigation away from the welcome banner. //TODO: Fade the home bar? getActivity().getActionBar().setHomeButtonEnabled(false); //Setup animations on the banner view.post(new Runnable() { @Override public void run() { int waitForAttach = 0; while (getActivity() == null && waitForAttach < 10) { try { Thread.sleep(250); } catch (InterruptedException e) { } } ViewGroup container = (ViewGroup)getActivity().findViewById(R.id.bannerSwitcher); setupBannerAnimations(container); ViewGroup buttonContainer = (ViewGroup)getActivity().findViewById(R.id.welcomeButtonContainer); animateButtonContainer(buttonContainer); } }); //Blur the background and store the animation so we can reverse it when we're done ValueAnimator blurAnimation = blurBackground(view); //This progress animation drives the rest of the logic. At different steps in the animation, we do //different things. The last step takes care of "fixing" whatever problems exist. final ProgressBar progressChecking = (ProgressBar) view.findViewById(R.id.progressDetect); progressChecking.setProgress(0); final ValueAnimator progressAnimation = ValueAnimator.ofInt(0, 100); WelcomeAnimationListener welcomeListener = new WelcomeAnimationListener(banner, blurAnimation, progressChecking, progressAnimation); progressAnimation.addListener(welcomeListener); progressAnimation.addUpdateListener(welcomeListener); progressAnimation.setDuration(2000); progressAnimation.setStartDelay(200); //Create a small gap between each step, so they look discrete progressAnimation.setInterpolator(new LinearInterpolator()); //Start the animations. blurAnimation.start(); new Handler().postDelayed(new Runnable() { @Override public void run() { progressAnimation.start(); } }, 1200); //Let the screen "come up" and blur start. Let the user take in the screen before starting things moving. } } private class WelcomeAnimationListener implements Animator.AnimatorListener, ValueAnimator.AnimatorUpdateListener { @Override public void onAnimationCancel(Animator animator) {} @Override public void onAnimationRepeat(Animator animator) {} @Override public void onAnimationStart(Animator animator) {} private View mParentView; private ValueAnimator mReverseWhenDone; private ProgressBar mProgressChecking; ValueAnimator mProgressAnimation; public WelcomeAnimationListener(View parentView, final ValueAnimator reverseWhenDone, ProgressBar progressChecking, ValueAnimator progressAnimation) { mParentView = parentView; mReverseWhenDone = reverseWhenDone; mProgressChecking = progressChecking; mProgressAnimation = progressAnimation; } @Override public void onAnimationUpdate(final ValueAnimator animator) { int curValue = (int) animator.getAnimatedValue(); if (isAdded()) { mProgressChecking.setProgress(curValue); mProgressChecking.requestLayout(); } } @Override public void onAnimationEnd(Animator animator) { //Each time the animation finishes, handle the next step mSetupStep++; Log.i(TAG, "OnAnimationEnd called. We're on step: " + mSetupStep); Log.d(TAG, "Original fragment status: " + (isAdded() ? "True" : "False")); Log.d(TAG, "Refreshing fragment status"); getFragmentManager().executePendingTransactions(); Log.d(TAG, "New fragment status: " + (isAdded() ? "True" : "False")); if (isAdded()) { final TextView stepText = (TextView) mParentView.findViewById(R.id.welcomeStepText); if (mSetupStep == 1) { Log.d(TAG, "Starting animation for step 1."); Log.d(TAG, "Status of animation.isRunning (Should be false): " + mProgressAnimation.isRunning()); stepText.setText(getResources().getString(R.string.welcome_banner_checking_xposed)); mProgressChecking.setProgress(0); mProgressAnimation = ValueAnimator.ofInt(0, 100); mProgressAnimation.addListener(this); mProgressAnimation.addUpdateListener(this); mProgressAnimation.setDuration(2000); mProgressAnimation.setStartDelay(200); //Create a small gap between each step, so they look discrete mProgressAnimation.setInterpolator(new LinearInterpolator()); mProgressAnimation.start(); Log.d(TAG, "Started animation for step 1."); } else if (mSetupStep == 2) { Log.i(TAG, "Starting animation for step 2."); stepText.setText(getResources().getString(R.string.welcome_banner_checking_root)); mProgressAnimation = ValueAnimator.ofInt(0, 100); mProgressAnimation.addListener(this); mProgressAnimation.addUpdateListener(this); mProgressAnimation.setDuration(2000); mProgressAnimation.setStartDelay(200); //Create a small gap between each step, so they look discrete mProgressAnimation.setInterpolator(new LinearInterpolator()); mProgressAnimation.start(); Log.d(TAG, "Started animation for step 2."); } else if (mSetupStep == 3) { handleFinalStep(); } } else { Log.i(TAG, "Not running animation because the fragment isn't added."); } } private void handleFinalStep() { //Setup the text on the final screen to good/bad to set user expectations final TextView stepText = (TextView)mParentView.findViewById(R.id.welcomeStepText); if (mSetupFailureStep == SETUP_FAILURE_NONE) { stepText.setText(getResources().getString(R.string.welcome_banner_checking_looks_great)); } else { stepText.setText(getResources().getString(R.string.welcome_banner_checking_uhoh)); } //This is the next button that we hide, show, and replace the text of. Make it visible so the //user can move forward final LinearLayout nextButton = (LinearLayout)mParentView.findViewById(R.id.buttonWelcomeNext); nextButton.setVisibility(View.VISIBLE); nextButton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { //When the user clicks the button, hide it so it doens't move during re-layout nextButton.setVisibility(View.INVISIBLE); //The text of the problem, and how to fix it TextView problemText = (TextView) getActivity().findViewById(R.id.textWelcomeProblemDescription); //The text of the next button final TextView nextButtonText = (TextView) getActivity().findViewById(R.id.buttonTextWelcomeNext); if (mSetupFailureStep == SETUP_FAILURE_NONE) { //Everything is good! handleNoFailure(problemText, nextButton); } else if (mSetupFailureStep == SETUP_FAILURE_SERVICE) { //Service isn't running handleServiceFailure(problemText, nextButtonText, nextButton); } else if (mSetupFailureStep == SETUP_FAILURE_VERSION) { //Service is the wrong version handleVersionFailure(problemText, nextButtonText, nextButton); } else if (mSetupFailureStep == SETUP_FAILURE_XPOSED_RUNNING) { //Xposed isn't running handleXposedRunningFailure(problemText, nextButtonText, nextButton); } else if (mSetupFailureStep == SETUP_FAILURE_XPOSED_INSTALL) { //Xposed isn't installed //This is the tricky one... handleXposedInstalledFailure(problemText, nextButtonText, nextButton); } else if (mSetupFailureStep == SETUP_FAILURE_ROOT) { //The device isn't rooted handleRootFailure(problemText, nextButtonText, nextButton); } //The views should be setup now. Swap out the "checking" view, and swap in the "fixit" view View vOut = getActivity().findViewById(R.id.welcomeDetection); final View vIn = getActivity().findViewById(R.id.welcomeProblem); vOut.setVisibility(View.GONE); new Handler().postDelayed(new Runnable() { @Override public void run() { vIn.setVisibility(View.VISIBLE); new Handler().postDelayed(new Runnable() { @Override public void run() { nextButton.setVisibility(View.VISIBLE); } }, 300); } }, 300); } }); } private void handleRootFailure(TextView problemText, TextView nextButtonText, LinearLayout nextButton) { nextButtonText.setText(getResources().getString(R.string.welcome_banner_button_exit)); String errorFormat = getResources().getString(R.string.welcome_banner_problem_root); String errorText = String.format(errorFormat, R.string.welcome_banner_problem_root_link); problemText.setText(Html.fromHtml(errorText)); problemText.setMovementMethod(LinkMovementMethod.getInstance()); nextButton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { getActivity().finish(); } }); } private void handleXposedInstalledFailure(TextView problemText, final TextView nextButtonText, final LinearLayout nextButton) { //Set the problem text. String errorFormat = getResources().getString(R.string.welcome_banner_problem_xposed_installed); String errorText = String.format(errorFormat, R.string.welcome_banner_problem_xposed_installed_link); problemText.setText(Html.fromHtml(errorText)); problemText.setMovementMethod(LinkMovementMethod.getInstance()); //Show the download view View welcomeDownload = getActivity().findViewById(R.id.welcomeFrameworkDownload); welcomeDownload.setVisibility(View.VISIBLE); //Set the download progress bar ProgressBar downloadProgress = (ProgressBar) getActivity().findViewById(R.id.progressDownloadXposed); downloadProgress.setProgress(0); //Start the download new DownloadHelper().startDownload(getActivity(), downloadProgress, new DownloadHelper.DownloadListener() { @Override public void onFinished(final boolean success, final String filename) { //When the download is finished (which happens on a non-ui thread) getActivity().runOnUiThread(new Runnable() { @Override public void run() { //If the download was successful... if (success) { //Update the downloading text TextView downloadText = (TextView)getActivity().findViewById(R.id.welcome_download_status); downloadText.setText(getString(R.string.welcome_downloaded_xposed)); //Let them install the framework nextButton.setVisibility(View.VISIBLE); nextButtonText.setText(R.string.welcome_banner_button_install); nextButton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { //Install the Xposed apk, then exit. (Ideally, install, then install/update) Intent intent = new Intent(Intent.ACTION_VIEW); intent.setDataAndType(Uri.fromFile(new File(filename)), "application/vnd.android.package-archive"); intent.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK); startActivity(intent); getActivity().finish(); } }); } else { TextView downloadText = (TextView)getActivity().findViewById(R.id.welcome_download_status); downloadText.setText(getString(R.string.welcome_download_error_xposed)); nextButton.setVisibility(View.VISIBLE); nextButtonText.setText(R.string.welcome_banner_button_exit); nextButton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { getActivity().finish(); } }); } } }); } }); } private void handleXposedRunningFailure(TextView problemText, TextView nextButtonText, LinearLayout nextButton) { nextButtonText.setText(getActivity().getResources().getString(R.string.welcome_banner_button_fixit)); String errorText = getResources().getString(R.string.welcome_banner_problem_xposed_running); problemText.setText(Html.fromHtml(errorText)); problemText.setMovementMethod(LinkMovementMethod.getInstance()); nextButton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { launchXposedFramework(); getActivity().finish(); } }); } private void handleVersionFailure(TextView problemText, TextView nextButtonText, LinearLayout nextButton) { nextButtonText.setText(getActivity().getResources().getString(R.string.welcome_banner_button_fixit)); String errorText = getResources().getString(R.string.welcome_banner_problem_version); problemText.setText(Html.fromHtml(errorText)); problemText.setMovementMethod(LinkMovementMethod.getInstance()); nextButton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { getActivity().finish(); } }); } private void handleServiceFailure(TextView problemText, TextView nextButtonText, LinearLayout nextButton) { nextButtonText.setText(getActivity().getResources().getString(R.string.welcome_banner_button_fixit)); String errorText = getResources().getString(R.string.welcome_banner_problem_service); problemText.setText(Html.fromHtml(errorText)); problemText.setMovementMethod(LinkMovementMethod.getInstance()); nextButton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { launchXposedModules(); getActivity().finish(); } }); } private void handleNoFailure(TextView problemText, LinearLayout nextButton) { problemText.setText(getResources().getString(R.string.welcome_banner_problem_none)); SharedPreferences prefs = getActivity().getSharedPreferences("com.ryansteckler.nlpunbounce" + "_preferences", Context.MODE_WORLD_READABLE); SettingsHelper.resetToDefaults(prefs); nextButton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { getActivity().getActionBar().setHomeButtonEnabled(true); //When we're done, hide the parent view mParentView.setVisibility(View.GONE); mReverseWhenDone.reverse(); new Handler().postDelayed(new Runnable() { @Override public void run() { ImageView unblur = (ImageView) getActivity().findViewById(R.id.imageBlur); unblur.setVisibility(View.GONE); } }, mReverseWhenDone.getDuration()); } }); } } private ValueAnimator blurBackground(View view) { //Blur the background //Show the image (now transparent) final ImageView imageBlur = (ImageView) view.findViewById(R.id.imageBlur); imageBlur.setVisibility(View.VISIBLE); //Fade it to opaque ValueAnimator blurAnimation = ValueAnimator.ofFloat(0, 0.8f); blurAnimation.setDuration(1000); blurAnimation.setInterpolator(new AccelerateDecelerateInterpolator()); blurAnimation.addUpdateListener(new ValueAnimator.AnimatorUpdateListener() { @Override public void onAnimationUpdate(final ValueAnimator animator) { float curValue = (float) animator.getAnimatedValue(); imageBlur.setAlpha(curValue); } }); return blurAnimation; } private void setupKarma(View view) { LinearLayout layout = (LinearLayout) view.findViewById(R.id.buttonKarma1); layout.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { //Catch crash try { ((MaterialSettingsActivity) getActivity()).mHelper.launchPurchaseFlow(getActivity(), "donate_2", 2, ((MaterialSettingsActivity) getActivity()).mPurchaseFinishedListener, "2"); } catch (IllegalStateException ise) { new AlertDialog.Builder(getActivity()) .setTitle(R.string.alert_noiab_title) .setMessage(R.string.alert_noiab_content) .setNeutralButton("OK", new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int which) { } }) .setIcon(android.R.drawable.ic_dialog_alert) .show(); } } }); layout = (LinearLayout) view.findViewById(R.id.buttonKarma5); layout.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { try { ((MaterialSettingsActivity)getActivity()).mHelper.launchPurchaseFlow(getActivity(), "donate_5", 5, ((MaterialSettingsActivity)getActivity()).mPurchaseFinishedListener, "5"); } catch (IllegalStateException ise) { new AlertDialog.Builder(getActivity()) .setTitle(R.string.alert_noiab_title) .setMessage(R.string.alert_noiab_content) .setNeutralButton("OK", new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int which) { } }) .setIcon(android.R.drawable.ic_dialog_alert) .show(); } } }); layout = (LinearLayout) view.findViewById(R.id.buttonKarma10); layout.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { try { ((MaterialSettingsActivity)getActivity()).mHelper.launchPurchaseFlow(getActivity(), "donate_10", 10, ((MaterialSettingsActivity)getActivity()).mPurchaseFinishedListener, "10"); } catch (IllegalStateException ise) { new AlertDialog.Builder(getActivity()) .setTitle(R.string.alert_noiab_title) .setMessage(R.string.alert_noiab_content) .setNeutralButton("OK", new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int which) { } }) .setIcon(android.R.drawable.ic_dialog_alert) .show(); } } }); LinearLayout layoutAgain = (LinearLayout) view.findViewById(R.id.buttonKarma1Again); layoutAgain.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { try { ((MaterialSettingsActivity)getActivity()).mHelper.launchPurchaseFlow(getActivity(), "donate_1_consumable", 1, ((MaterialSettingsActivity)getActivity()).mPurchaseFinishedListener, "1"); } catch (IllegalStateException ise) { new AlertDialog.Builder(getActivity()) .setTitle(R.string.alert_noiab_title) .setMessage(R.string.alert_noiab_content) .setNeutralButton("OK", new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int which) { } }) .setIcon(android.R.drawable.ic_dialog_alert) .show(); } } }); layoutAgain = (LinearLayout) view.findViewById(R.id.buttonKarma5Again); layoutAgain.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { try { ((MaterialSettingsActivity)getActivity()).mHelper.launchPurchaseFlow(getActivity(), "donate_5_consumable", 5, ((MaterialSettingsActivity)getActivity()).mPurchaseFinishedListener, "5"); } catch (IllegalStateException ise) { new AlertDialog.Builder(getActivity()) .setTitle(R.string.alert_noiab_title) .setMessage(R.string.alert_noiab_content) .setNeutralButton("OK", new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int which) { } }) .setIcon(android.R.drawable.ic_dialog_alert) .show(); } } }); layoutAgain = (LinearLayout) view.findViewById(R.id.buttonKarma10Again); layoutAgain.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { try { ((MaterialSettingsActivity)getActivity()).mHelper.launchPurchaseFlow(getActivity(), "donate_10_consumable", 10, ((MaterialSettingsActivity)getActivity()).mPurchaseFinishedListener, "10"); } catch (IllegalStateException ise) { new AlertDialog.Builder(getActivity()) .setTitle(R.string.alert_noiab_title) .setMessage(R.string.alert_noiab_content) .setNeutralButton("OK", new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int which) { } }) .setIcon(android.R.drawable.ic_dialog_alert) .show(); } } }); TextView helpFurtherButton = (TextView) view.findViewById(R.id.buttonHelpFurther); final LinearLayout expanded = (LinearLayout) view.findViewById(R.id.layoutExpandedDonateAgain); final ScrollView scroll = (ScrollView) view.findViewById(R.id.scrollView); helpFurtherButton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { expanded.setVisibility(View.VISIBLE); scroll.post(new Runnable() { @Override public void run() { scroll.fullScroll(View.FOCUS_DOWN); } }); } }); } private void setupResetStatsButton(final View view) { TextView resetStatsButton = (TextView)view.findViewById(R.id.buttonResetStats); resetStatsButton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View textView) { new AlertDialog.Builder(getActivity()) .setTitle(R.string.alert_delete_stats_title) .setMessage(R.string.alert_delete_stats_content) .setPositiveButton(R.string.dialog_delete, new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int which) { UnbounceStatsCollection.getInstance().resetStats(getActivity(), UnbounceStatsCollection.STAT_CURRENT); Intent intent = new Intent(XposedReceiver.RESET_ACTION); intent.putExtra(XposedReceiver.STAT_TYPE, UnbounceStatsCollection.STAT_CURRENT); try { getActivity().sendBroadcast(intent); } catch (IllegalStateException ise) { } loadStatsFromSource(view); } }) .setNegativeButton(R.string.dialog_cancel, new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int which) { // do nothing } }) .setIcon(android.R.drawable.ic_dialog_alert) .show(); } }); } @Override public void onResume() { super.onResume(); } private boolean isXposedInstalled() { PackageManager pm = getActivity().getPackageManager(); try { pm.getPackageInfo("de.robv.android.xposed.installer", PackageManager.GET_ACTIVITIES); return true; } catch (PackageManager.NameNotFoundException e) { return false; } } private boolean isInstalledFromPlay() { String installer = getActivity().getPackageManager().getInstallerPackageName("com.ryansteckler.nlpunbounce"); if (installer == null) { return false; } else { return installer.equals("com.android.vending"); } } private boolean launchXposedModules() { Intent LaunchIntent = null; try { LaunchIntent = getActivity().getPackageManager().getLaunchIntentForPackage("de.robv.android.xposed.installer"); if (LaunchIntent == null) { return false; } else { Intent intent = new Intent("de.robv.android.xposed.installer.OPEN_SECTION"); intent.setPackage("de.robv.android.xposed.installer"); intent.putExtra("section", "modules"); intent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK); startActivity(intent); } } catch (Exception e) { if (LaunchIntent != null) { LaunchIntent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK); startActivity(LaunchIntent); } else { return false; } } return true; } private boolean launchXposedFramework() { Intent LaunchIntent = null; try { LaunchIntent = getActivity().getPackageManager().getLaunchIntentForPackage("de.robv.android.xposed.installer"); if (LaunchIntent == null) { return false; } else { Intent intent = new Intent("de.robv.android.xposed.installer.OPEN_SECTION"); intent.setPackage("de.robv.android.xposed.installer"); intent.putExtra("section", "install"); intent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK); startActivity(intent); } } catch (Exception e) { if (LaunchIntent != null) { LaunchIntent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK); startActivity(LaunchIntent); } else { return false; } } return true; } private BroadcastReceiver refreshReceiver; @Override public void onHiddenChanged(boolean hidden) { super.onHiddenChanged(hidden); if (!hidden) { updatePremiumUi(); } } private void updatePremiumUi() { if (((MaterialSettingsActivity)getActivity()).isPremium()) { View againView = (View) getActivity().findViewById(R.id.layoutDonateAgain); againView.setVisibility(View.VISIBLE); View donateView = (View) getActivity().findViewById(R.id.layoutDonate); donateView.setVisibility(View.GONE); } } private void loadStatsFromSource(final View view) { final UnbounceStatsCollection stats = UnbounceStatsCollection.getInstance(); final Context c = getActivity(); stats.loadStats(c, true); String duration = stats.getWakelockDurationAllowedFormatted(c, UnbounceStatsCollection.STAT_CURRENT); //Wakelocks TextView textView = (TextView)view.findViewById(R.id.textLocalWakeTimeAllowed); textView.setText(duration); textView = (TextView)view.findViewById(R.id.textRunningSince); textView.setText(stats.getRunningSinceFormatted()); textView = (TextView)view.findViewById(R.id.textLocalWakeAcquired); textView.setText(String.valueOf(stats.getTotalAllowedWakelockCount(c, UnbounceStatsCollection.STAT_CURRENT))); textView = (TextView)view.findViewById(R.id.textLocalWakeBlocked); textView.setText(String.valueOf(stats.getTotalBlockWakelockCount(c, UnbounceStatsCollection.STAT_CURRENT))); textView = (TextView)view.findViewById(R.id.textLocalWakeTimeBlocked); textView.setText(stats.getWakelockDurationBlockedFormatted(c, UnbounceStatsCollection.STAT_CURRENT)); //Services textView = (TextView)view.findViewById(R.id.textLocalServiceAcquired); textView.setText(String.valueOf(stats.getTotalAllowedServiceCount(c, UnbounceStatsCollection.STAT_CURRENT))); textView = (TextView)view.findViewById(R.id.textLocalServiceBlocked); textView.setText(String.valueOf(stats.getTotalBlockServiceCount(c, UnbounceStatsCollection.STAT_CURRENT))); //Alarms textView = (TextView)view.findViewById(R.id.textLocalAlarmsAcquired); textView.setText(String.valueOf(stats.getTotalAllowedAlarmCount(c, UnbounceStatsCollection.STAT_CURRENT))); textView = (TextView)view.findViewById(R.id.textLocalAlarmsBlocked); textView.setText(String.valueOf(stats.getTotalBlockAlarmCount(c, UnbounceStatsCollection.STAT_CURRENT))); //Global wakelocks. //Kick off a refresh SharedPreferences prefs = getActivity().getSharedPreferences("com.ryansteckler.nlpunbounce" + "_preferences", Context.MODE_WORLD_READABLE); if (prefs.getBoolean("global_participation", true)) { stats.getStatsFromNetwork(c, new Handler() { @Override public void handleMessage(Message msg) { //Global wakelocks TextView textView = (TextView)view.findViewById(R.id.textGlobalWakelockDurationAllowed); textView.setText(stats.getWakelockDurationAllowedFormatted(c, UnbounceStatsCollection.STAT_GLOBAL)); textView = (TextView)view.findViewById(R.id.textGlobalWakelockAllowed); textView.setText(String.valueOf(stats.getTotalAllowedWakelockCount(c, UnbounceStatsCollection.STAT_GLOBAL))); textView = (TextView)view.findViewById(R.id.textGlobalWakelockBlocked); textView.setText(String.valueOf(stats.getTotalBlockWakelockCount(c, UnbounceStatsCollection.STAT_GLOBAL))); textView = (TextView)view.findViewById(R.id.textGlobalWakelockDurationBlocked); textView.setText(stats.getWakelockDurationBlockedFormatted(c, UnbounceStatsCollection.STAT_GLOBAL)); //Global services textView = (TextView)view.findViewById(R.id.textGlobalServiceAllowed); textView.setText(String.valueOf(stats.getTotalAllowedServiceCount(c, UnbounceStatsCollection.STAT_GLOBAL))); textView = (TextView)view.findViewById(R.id.textGlobalServiceBlocked); textView.setText(String.valueOf(stats.getTotalBlockServiceCount(c, UnbounceStatsCollection.STAT_GLOBAL))); //Global Alarms textView = (TextView)view.findViewById(R.id.textGlobalAlarmAllowed); textView.setText(String.valueOf(stats.getTotalAllowedAlarmCount(c, UnbounceStatsCollection.STAT_GLOBAL))); textView = (TextView)view.findViewById(R.id.textGlobalAlarmBlocked); textView.setText(String.valueOf(stats.getTotalBlockAlarmCount(c, UnbounceStatsCollection.STAT_GLOBAL))); } }); } else { //Global wakelocks textView = (TextView)view.findViewById(R.id.textGlobalWakelockDurationAllowed); textView.setText(getResources().getString(R.string.stat_disabled)); textView = (TextView)view.findViewById(R.id.textGlobalWakelockAllowed); textView.setText(getResources().getString(R.string.stat_disabled)); textView = (TextView)view.findViewById(R.id.textGlobalWakelockBlocked); textView.setText(getResources().getString(R.string.stat_disabled)); textView = (TextView)view.findViewById(R.id.textGlobalWakelockDurationBlocked); textView.setText(getResources().getString(R.string.stat_disabled)); //Global services textView = (TextView)view.findViewById(R.id.textGlobalServiceAllowed); textView.setText(getResources().getString(R.string.stat_disabled)); textView = (TextView)view.findViewById(R.id.textGlobalServiceBlocked); textView.setText(getResources().getString(R.string.stat_disabled)); //Global Alarms textView = (TextView)view.findViewById(R.id.textGlobalAlarmAllowed); textView.setText(getResources().getString(R.string.stat_disabled)); textView = (TextView)view.findViewById(R.id.textGlobalAlarmBlocked); textView.setText(getResources().getString(R.string.stat_disabled)); } } @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { View rootView = inflater.inflate(R.layout.fragment_home, container, false); return rootView; } public void onAttach(Activity activity) { super.onAttach(activity); try { mListener = (OnFragmentInteractionListener) activity; } catch (ClassCastException e) { throw new ClassCastException(activity.toString() + " must implement OnFragmentInteractionListener"); } } @Override public void onDetach() { super.onDetach(); mListener = null; } @Override public void onCreateOptionsMenu(Menu menu, MenuInflater inflater) { getActivity().getMenuInflater().inflate(R.menu.home, menu); super.onCreateOptionsMenu(menu, inflater); } @Override public boolean onOptionsItemSelected(MenuItem item) { int id = item.getItemId(); if (id == R.id.action_refresh) { requestRefresh(); return true; } return super.onOptionsItemSelected(item); } private void requestRefresh() { Intent intent = new Intent(XposedReceiver.REFRESH_ACTION); try { getActivity().sendBroadcast(intent); } catch (IllegalStateException ise) { } } /** * This interface must be implemented by activities that contain this * fragment to allow an interaction in this fragment to be communicated * to the activity and potentially other fragments contained in that * activity. * <p> * See the Android Training lesson <a href= * "http://developer.android.com/training/basics/fragments/communicating.html" * >Communicating with Other Fragments</a> for more information. */ public interface OnFragmentInteractionListener { public void onHomeSetTitle(String id); } private void animateButtonContainer(final ViewGroup container) { LayoutTransition lt = container.getLayoutTransition(); if (lt == null) { lt = new LayoutTransition(); } lt.enableTransitionType(LayoutTransition.APPEARING); lt.disableTransitionType(LayoutTransition.DISAPPEARING); lt.setDuration(300); container.setLayoutTransition(lt); } private void setupBannerAnimations(ViewGroup container) { AnimatorSet animatorDisappear = getDisappearAnimation(container); AnimatorSet animatorAppear = getAppearAnimation(container); LayoutTransition lt = container.getLayoutTransition(); if (lt == null) { lt = new LayoutTransition(); } lt.setAnimator(LayoutTransition.DISAPPEARING, animatorDisappear); lt.setAnimator(LayoutTransition.APPEARING, animatorAppear); lt.setStartDelay(LayoutTransition.APPEARING, 0); lt.setDuration(300); container.setLayoutTransition(lt); } private AnimatorSet getDisappearAnimation(ViewGroup container) { float endLocation = container.getHeight(); DisplayMetrics metrics = Resources.getSystem().getDisplayMetrics(); float dp = endLocation / (metrics.densityDpi / 160f); AnimatorSet animator = new AnimatorSet(); ObjectAnimator moveBanner = ObjectAnimator.ofFloat(null, View.TRANSLATION_Y, 0, dp); ObjectAnimator fadeBanner = ObjectAnimator.ofFloat(null, View.ALPHA, 1, 0); animator.playTogether(moveBanner, fadeBanner); return animator; } private AnimatorSet getAppearAnimation(ViewGroup container) { float endLocation = container.getHeight() * -1; DisplayMetrics metrics = Resources.getSystem().getDisplayMetrics(); float dp = endLocation / (metrics.densityDpi / 160f); AnimatorSet animator = new AnimatorSet(); ObjectAnimator moveBanner = ObjectAnimator.ofFloat(null, View.TRANSLATION_Y, dp, 0); ObjectAnimator fadeBanner = ObjectAnimator.ofFloat(null, View.ALPHA, 0, 1); animator.playTogether(moveBanner, fadeBanner); return animator; } public boolean isUnbounceServiceRunning() { //The Unbounce hook changes this to true. return false; } public String getAmplifyKernelVersion() { //The Unbounce hook changes this to true. return "0"; } public boolean isXposedRunning() { // return true; return new File("/data/data/de.robv.android.xposed.installer/bin/XposedBridge.jar").exists(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jena.tdb.setup; import java.io.File; import java.util.HashMap; import java.util.Map; import org.apache.jena.query.ARQ; import org.apache.jena.sparql.engine.main.QC; import org.apache.jena.sparql.engine.optimizer.reorder.ReorderLib; import org.apache.jena.sparql.engine.optimizer.reorder.ReorderTransformation; import org.apache.jena.sparql.sse.SSE_ParseException; import org.apache.jena.tdb.TDB; import org.apache.jena.tdb.TDBException; import org.apache.jena.tdb.base.block.BlockMgr; import org.apache.jena.tdb.base.file.BufferChannel; import org.apache.jena.tdb.base.file.FileSet; import org.apache.jena.tdb.base.file.Location; import org.apache.jena.tdb.base.objectfile.ObjectFile; import org.apache.jena.tdb.base.record.RecordFactory; import org.apache.jena.tdb.index.BuilderStdIndex; import org.apache.jena.tdb.index.Index; import org.apache.jena.tdb.index.IndexParams; import org.apache.jena.tdb.index.RangeIndex; import org.apache.jena.tdb.index.bplustree.BPlusTree; import org.apache.jena.tdb.index.bplustree.BPlusTreeParams; import org.apache.jena.tdb.lib.ColumnMap; import org.apache.jena.tdb.solver.OpExecutorTDB1; import org.apache.jena.tdb.store.*; import org.apache.jena.tdb.store.nodetable.NodeTable; import org.apache.jena.tdb.store.nodetable.NodeTableCache; import org.apache.jena.tdb.store.nodetable.NodeTableInline; import org.apache.jena.tdb.store.nodetable.NodeTableNative; import org.apache.jena.tdb.store.nodetupletable.NodeTupleTable; import org.apache.jena.tdb.store.nodetupletable.NodeTupleTableConcrete; import org.apache.jena.tdb.store.tupletable.TupleIndex; import org.apache.jena.tdb.store.tupletable.TupleIndexRecord; import org.apache.jena.tdb.sys.*; import org.slf4j.Logger; /** * This class is the process of building a dataset. Records * BlockMgr/BufferChannel/NodeTable for use by the transaction builder. */ public class DatasetBuilderStd { private static final Logger log = TDB.logInfo; private BlockMgrBuilder blockMgrBuilder = new BuilderStdIndex.BlockMgrBuilderStd(); private ObjectFileBuilder objectFileBuilder = new BuilderStdDB.ObjectFileBuilderStd(); private void setupRecord() { if ( this.blockMgrBuilder instanceof BlockMgrBuilderRecorder) throw new TDBException("Already recording (BlockMgrBuilder)"); if ( this.objectFileBuilder instanceof ObjectFileBuilderRecorder) throw new TDBException("Already recording (ObjectFileBuilder)"); this.blockMgrBuilder = new BlockMgrBuilderRecorder(blockMgrBuilder, recorder); this.objectFileBuilder= new ObjectFileBuilderRecorder(objectFileBuilder, recorder); } private RangeIndex buildRangeIndex(FileSet fileSet, RecordFactory recordFactory, IndexParams indexParams) { int blkSize = indexParams.getBlockSize(); int order = BPlusTreeParams.calcOrder(blkSize, recordFactory.recordLength()); RangeIndex rIndex = createBPTree(fileSet, order, blockMgrBuilder, blockMgrBuilder, recordFactory, indexParams); return rIndex; } private Index buildIndex(FileSet fileSet, RecordFactory recordFactory, IndexParams indexParams) { // Cheap. return buildRangeIndex(fileSet, recordFactory, indexParams); } /** Knowing all the parameters, create a B+Tree */ private RangeIndex createBPTree(FileSet fileset, int order, BlockMgrBuilder blockMgrBuilderNodes, BlockMgrBuilder blockMgrBuilderRecords, RecordFactory factory, IndexParams indexParams) { // ---- Checking { int blockSize = indexParams.getBlockSize(); if (blockSize < 0 ) throw new IllegalArgumentException("Negative blocksize: "+blockSize); if (blockSize < 0 && order < 0) throw new IllegalArgumentException("Neither blocksize nor order specified"); if (blockSize >= 0 && order < 0) order = BPlusTreeParams.calcOrder(blockSize, factory.recordLength()); if (blockSize >= 0 && order >= 0) { int order2 = BPlusTreeParams.calcOrder(blockSize, factory.recordLength()); if (order != order2) throw new IllegalArgumentException("Wrong order (" + order + "), calculated = " + order2); } } BPlusTreeParams params = new BPlusTreeParams(order, factory); BlockMgr blkMgrNodes = blockMgrBuilderNodes.buildBlockMgr(fileset, Names.bptExtTree, indexParams); BlockMgr blkMgrRecords = blockMgrBuilderRecords.buildBlockMgr(fileset, Names.bptExtRecords, indexParams); return BPlusTree.create(params, blkMgrNodes, blkMgrRecords); } private Recorder recorder = new Recorder(); /** * @param location * @return DatasetGraphTDB */ public static DatasetGraphTDB create(Location location) { return create(location, null); } /** * Create a {@link DatasetGraphTDB} with a set of {@link StoreParams}. * The parameters for a store have 3 inputs: the parameters provided, * any parameters * * @param location Where to create the database. * @param appParams Store parameters to use (null means use default). {See {@link StoreParams}). * @return DatasetGraphTDB */ public static DatasetGraphTDB create(Location location, StoreParams appParams) { StoreParams locParams = StoreParamsCodec.read(location); StoreParams dftParams = StoreParams.getDftStoreParams(); // This can write the chosen parameters if necessary (new database, appParams != null, locParams == null) boolean newArea = TDBInternal.isNewDatabaseArea(location); StoreParams params = Build.decideStoreParams(location, newArea, appParams, locParams, dftParams); DatasetBuilderStd x = new DatasetBuilderStd(); DatasetGraphTDB dsg = x.build(location, params); return dsg; } public static DatasetGraphTDB create(StoreParams params) { // Memory version? return create(Location.mem(), params); } public static DatasetBuilderStd stdBuilder() { return new DatasetBuilderStd(); } protected DatasetBuilderStd() { this(new BuilderStdIndex.BlockMgrBuilderStd(), new BuilderStdDB.ObjectFileBuilderStd()); } public DatasetBuilderStd(BlockMgrBuilder blockMgrBuilder, ObjectFileBuilder objectFileBuilder) { this.blockMgrBuilder = blockMgrBuilder; this.objectFileBuilder = objectFileBuilder; this.recorder = new Recorder(); // XXX YUK setupRecord(); } // private void standardSetup() { // ObjectFileBuilder objectFileBuilder = new BuilderStdDB.ObjectFileBuilderStd(); // BlockMgrBuilder blockMgrBuilder = new BuilderStdIndex.BlockMgrBuilderStd(); // IndexBuilder indexBuilderNT = new BuilderStdIndex.IndexBuilderStd(blockMgrBuilder, blockMgrBuilder); // NodeTableBuilder nodeTableBuilder = new BuilderStdDB.NodeTableBuilderStd(indexBuilderNT, objectFileBuilder); // setupRecord(blockMgrBuilder, nodeTableBuilder); // } private static void checkLocation(Location location) { if ( location.isMem() ) return; String dirname = location.getDirectoryPath(); File dir = new File(dirname); // File location. if ( ! dir.exists() ) error(log, "Does not exist: "+dirname); if ( ! dir.isDirectory() ) error(log, "Not a directory: "+dirname); if ( ! dir.canRead() ) error(log, "Directory not readable: "+dirname); if ( ! dir.canWrite() ) error(log, "Directory not writeable: "+dirname); } public DatasetGraphTDB build(Location location, StoreParams params) { // Ensure that there is global synchronization synchronized (DatasetBuilderStd.class) { log.debug("Build database: "+location.getDirectoryPath()); checkLocation(location); return _build(location, params, true, null); } } private static String DB_CONFIG_FILE = "tdb.cfg"; // Main engine for building. // Called by DatasetBuilderTxn // XXX Rework - provide a cloning constructor (copies maps). // Or "reset" public DatasetGraphTDB _build(Location location, StoreParams params, boolean writeable, ReorderTransformation _transform) { return buildWorker(location, writeable, _transform, params); } private synchronized DatasetGraphTDB buildWorker(Location location, boolean writeable, ReorderTransformation _transform, StoreParams params) { recorder.start(); DatasetControl policy = createConcurrencyPolicy(); NodeTable nodeTable = makeNodeTable(location, params); TripleTable tripleTable = makeTripleTable(location, nodeTable, policy, params); QuadTable quadTable = makeQuadTable(location, nodeTable, policy, params); DatasetPrefixesTDB prefixes = makePrefixTable(location, policy, params); ReorderTransformation transform = (_transform == null) ? chooseReorderTransformation(location) : _transform; StorageConfig storageConfig = new StorageConfig(location, params, writeable, recorder.blockMgrs, recorder.objectFiles, recorder.bufferChannels); recorder.finish(); DatasetGraphTDB dsg = new DatasetGraphTDB(tripleTable, quadTable, prefixes, transform, storageConfig); // TDB does filter placement on BGPs itself. dsg.getContext().set(ARQ.optFilterPlacementBGP, false); QC.setFactory(dsg.getContext(), OpExecutorTDB1.OpExecFactoryTDB); return dsg; } private static <X,Y> Map<X,Y> freeze(Map<X,Y> map) { return Map.copyOf(map); } protected DatasetControl createConcurrencyPolicy() { return new DatasetControlMRSW(); } protected TripleTable makeTripleTable(Location location, NodeTable nodeTable, DatasetControl policy, StoreParams params) { String primary = params.getPrimaryIndexTriples(); String[] indexes = params.getTripleIndexes(); // Allow experimentation of other index layouts. // if ( indexes.length != 3 ) // error(log, // "Wrong number of triple table indexes: "+StrUtils.strjoin(",", // indexes)); TupleIndex tripleIndexes[] = makeTupleIndexes(location, primary, indexes, params); if ( tripleIndexes.length != indexes.length ) error(log, "Wrong number of triple table tuples indexes: " + tripleIndexes.length); TripleTable tripleTable = new TripleTable(tripleIndexes, nodeTable, policy); return tripleTable; } protected QuadTable makeQuadTable(Location location, NodeTable nodeTable, DatasetControl policy, StoreParams params) { String primary = params.getPrimaryIndexQuads(); String[] indexes = params.getQuadIndexes(); // Allow experimentation of other index layouts. // if ( indexes.length != 6 ) // error(log, // "Wrong number of quad table indexes: "+StrUtils.strjoin(",", // indexes)); TupleIndex quadIndexes[] = makeTupleIndexes(location, primary, indexes, params); if ( quadIndexes.length != indexes.length ) error(log, "Wrong number of quad table tuples indexes: " + quadIndexes.length); QuadTable quadTable = new QuadTable(quadIndexes, nodeTable, policy); return quadTable; } protected DatasetPrefixesTDB makePrefixTable(Location location, DatasetControl policy, StoreParams params) { String primary = params.getPrimaryIndexPrefix(); String[] indexes = params.getPrefixIndexes(); TupleIndex prefixIndexes[] = makeTupleIndexes(location, primary, indexes, new String[]{params.getIndexPrefix()}, params); if ( prefixIndexes.length != 1 ) error(log, "Wrong number of prefix table tuples indexes: " + prefixIndexes.length); String pnNode2Id = params.getPrefixNode2Id(); String pnId2Node = params.getPrefixId2Node(); // No cache - the prefix mapping is a cache NodeTable prefixNodes = makeNodeTableNoCache(location, pnNode2Id, pnId2Node, params); NodeTupleTable prefixTable = new NodeTupleTableConcrete(primary.length(), prefixIndexes, prefixNodes, policy); DatasetPrefixesTDB prefixes = new DatasetPrefixesTDB(prefixTable); return prefixes; } protected ReorderTransformation chooseReorderTransformation(Location location) { return chooseOptimizer(location); } private TupleIndex[] makeTupleIndexes(Location location, String primary, String[] indexNames, StoreParams params) { return makeTupleIndexes(location, primary, indexNames, indexNames, params); } private TupleIndex[] makeTupleIndexes(Location location, String primary, String[] indexNames, String[] filenames, StoreParams params) { if ( primary.length() != 3 && primary.length() != 4 ) error(log, "Bad primary key length: " + primary.length()); int indexRecordLen = primary.length() * NodeId.SIZE; TupleIndex indexes[] = new TupleIndex[indexNames.length]; for ( int i = 0; i < indexes.length; i++ ) indexes[i] = makeTupleIndex(location, filenames[i], primary, indexNames[i], params); return indexes; } protected TupleIndex makeTupleIndex(Location location, String name, String primary, String indexOrder, StoreParams params) { // Commonly, name == indexOrder. FileSet fs = new FileSet(location, name); ColumnMap colMap = new ColumnMap(primary, indexOrder); return /*tupleIndexBuilder.*/buildTupleIndex(fs, colMap, indexOrder, params); } //------------- private TupleIndex buildTupleIndex(FileSet fileSet, ColumnMap colMap, String name, StoreParams params) { RecordFactory recordFactory = new RecordFactory(SystemTDB.SizeOfNodeId * colMap.length(), 0); RangeIndex rIdx = /*rangeIndexBuilder.*/buildRangeIndex(fileSet, recordFactory, params); TupleIndex tIdx = new TupleIndexRecord(colMap.length(), colMap, name, recordFactory, rIdx); return tIdx; } public NodeTable makeNodeTable(Location location, StoreParams params) { return makeNodeTable$(location, params.getIndexNode2Id(), params.getIndexId2Node(), params); } /** Make a node table overriding the node->id and id->node table names */ private NodeTable makeNodeTable$(Location location, String indexNode2Id, String indexId2Node, StoreParams params) { FileSet fsNodeToId = new FileSet(location, indexNode2Id); FileSet fsId2Node = new FileSet(location, indexId2Node); NodeTable nt = /*nodeTableBuilder.*/buildNodeTable(fsNodeToId, fsId2Node, params); return nt; } //------------- private NodeTable buildNodeTable(FileSet fsIndex, FileSet fsObjectFile, StoreParams params) { RecordFactory recordFactory = new RecordFactory(SystemTDB.LenNodeHash, SystemTDB.SizeOfNodeId); Index idx = /*indexBuilder.*/buildIndex(fsIndex, recordFactory, params); ObjectFile objectFile = objectFileBuilder.buildObjectFile(fsObjectFile, Names.extNodeData); NodeTable nodeTable = new NodeTableNative(idx, objectFile); nodeTable = NodeTableCache.create(nodeTable, params.getNode2NodeIdCacheSize(), params.getNodeId2NodeCacheSize(), params.getNodeMissCacheSize()); nodeTable = NodeTableInline.create(nodeTable); return nodeTable; } protected NodeTable makeNodeTableNoCache(Location location, String indexNode2Id, String indexId2Node, StoreParams params) { StoreParamsBuilder spb = StoreParams.builder(params) .node2NodeIdCacheSize(-1) .nodeId2NodeCacheSize(-1) .nodeMissCacheSize(-1); return makeNodeTable$(location, indexNode2Id, indexId2Node, spb.build()); } private static void error(Logger log, String msg) { if ( log != null ) log.error(msg); throw new TDBException(msg); } private static int parseInt(String str, String messageBase) { try { return Integer.parseInt(str); } catch (NumberFormatException ex) { error(log, messageBase + ": " + str); return -1; } } /** * Set the global flag that control the "No BGP optimizer" warning. Set to * false to silence the warning */ public static void setOptimizerWarningFlag(boolean b) { warnAboutOptimizer = b; } private static boolean warnAboutOptimizer = true; public static ReorderTransformation chooseOptimizer(Location location) { if ( location == null ) return ReorderLib.identity(); ReorderTransformation reorder = null; if ( location.exists(Names.optStats) ) { try { reorder = ReorderLib.weighted(location.getPath(Names.optStats)); } catch (SSE_ParseException ex) { log.warn("Error in stats file: " + ex.getMessage()); reorder = null; } } if ( reorder == null && location.exists(Names.optFixed) ) { // Not as good but better than nothing. reorder = ReorderLib.fixed(); log.debug("Fixed pattern BGP optimizer"); } if ( location.exists(Names.optNone) ) { reorder = ReorderLib.identity(); log.debug("Optimizer explicitly turned off"); } if ( reorder == null ) reorder = SystemTDB.defaultReorderTransform; if ( reorder == null && warnAboutOptimizer ) ARQ.getExecLogger().warn("No BGP optimizer"); return reorder; } interface RecordBlockMgr { void record(FileRef fileRef, BlockMgr blockMgr); } interface RecordObjectFile { void record(FileRef fileRef, ObjectFile objFile); } interface RecordNodeTable { void record(FileRef fileRef, NodeTable nodeTable); } static class ObjectFileBuilderRecorder implements ObjectFileBuilder { private final ObjectFileBuilder builder; private final RecordObjectFile recorder; ObjectFileBuilderRecorder(ObjectFileBuilder objFileBuilder, RecordObjectFile recorder) { this.builder = objFileBuilder; this.recorder = recorder; } @Override public ObjectFile buildObjectFile(FileSet fsObjectFile, String ext) { ObjectFile objectFile = builder.buildObjectFile(fsObjectFile, ext); FileRef ref = FileRef.create(fsObjectFile, ext); recorder.record(ref, objectFile); return objectFile; } } static class BlockMgrBuilderRecorder implements BlockMgrBuilder { private final BlockMgrBuilder builder; private final RecordBlockMgr recorder; BlockMgrBuilderRecorder(BlockMgrBuilder blkMgrBuilder, RecordBlockMgr recorder) { this.builder = blkMgrBuilder; this.recorder = recorder; } @Override public BlockMgr buildBlockMgr(FileSet fileSet, String ext, IndexParams params) { BlockMgr blkMgr = builder.buildBlockMgr(fileSet, ext, params); FileRef ref = FileRef.create(fileSet, ext); recorder.record(ref, blkMgr); return blkMgr; } } static class Recorder implements RecordBlockMgr, RecordObjectFile, RecordNodeTable { Map<FileRef, BlockMgr> blockMgrs = null; Map<FileRef, ObjectFile> objectFiles = null; // Not used currently. Map<FileRef, BufferChannel> bufferChannels = null; Map<FileRef, NodeTable> nodeTables = null; boolean recording = false; Recorder() { } void start() { if ( recording ) throw new TDBException("Recorder already recording"); recording = true; blockMgrs = new HashMap<>(); objectFiles = new HashMap<>(); bufferChannels = new HashMap<>(); nodeTables = new HashMap<>(); } void finish() { if ( ! recording ) throw new TDBException("Recorder not recording"); // null out, not .clear. blockMgrs = null; objectFiles = null; bufferChannels = null; recording = false; } @Override public void record(FileRef fileRef, BlockMgr blockMgr) { if ( recording ) // log.info("BlockMgr: "+fileRef); blockMgrs.put(fileRef, blockMgr); } @Override public void record(FileRef fileRef, ObjectFile objFile) { if ( recording ) // log.info("ObjectTable: "+fileRef); objectFiles.put(fileRef, objFile); } @Override public void record(FileRef fileRef, NodeTable nodeTable) { if ( recording ) // log.info("NodeTable: "+fileRef); nodeTables.put(fileRef, nodeTable); } } }
package org.hisp.dhis.chart.impl; /* * Copyright (c) 2004-2016, University of Oslo * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * Neither the name of the HISP project nor the names of its contributors may * be used to endorse or promote products derived from this software without * specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ import org.apache.commons.math3.analysis.UnivariateFunction; import org.apache.commons.math3.analysis.interpolation.SplineInterpolator; import org.apache.commons.math3.analysis.interpolation.UnivariateInterpolator; import org.apache.commons.math3.exception.MathRuntimeException; import org.apache.commons.math3.stat.regression.SimpleRegression; import org.hisp.dhis.analytics.AnalyticsService; import org.hisp.dhis.analytics.event.EventAnalyticsService; import org.hisp.dhis.chart.BaseChart; import org.hisp.dhis.chart.Chart; import org.hisp.dhis.chart.ChartService; import org.hisp.dhis.chart.ChartType; import org.hisp.dhis.common.*; import org.hisp.dhis.dataelement.DataElement; import org.hisp.dhis.dataelement.DataElementCategoryOptionCombo; import org.hisp.dhis.dataelement.DataElementOperand; import org.hisp.dhis.datavalue.DataValue; import org.hisp.dhis.datavalue.DataValueService; import org.hisp.dhis.i18n.I18nFormat; import org.hisp.dhis.indicator.Indicator; import org.hisp.dhis.minmax.MinMaxDataElement; import org.hisp.dhis.minmax.MinMaxDataElementService; import org.hisp.dhis.organisationunit.OrganisationUnit; import org.hisp.dhis.organisationunit.OrganisationUnitService; import org.hisp.dhis.period.Period; import org.hisp.dhis.period.PeriodService; import org.hisp.dhis.period.RelativePeriods; import org.hisp.dhis.system.grid.GridUtils; import org.hisp.dhis.system.util.MathUtils; import org.hisp.dhis.user.CurrentUserService; import org.hisp.dhis.user.User; import org.jfree.chart.ChartFactory; import org.jfree.chart.JFreeChart; import org.jfree.chart.axis.CategoryAxis; import org.jfree.chart.axis.CategoryLabelPositions; import org.jfree.chart.axis.NumberAxis; import org.jfree.chart.axis.ValueAxis; import org.jfree.chart.labels.StandardPieSectionLabelGenerator; import org.jfree.chart.plot.*; import org.jfree.chart.renderer.category.*; import org.jfree.chart.title.TextTitle; import org.jfree.data.Range; import org.jfree.data.category.CategoryDataset; import org.jfree.data.category.DefaultCategoryDataset; import org.jfree.data.general.DefaultValueDataset; import org.jfree.data.general.ValueDataset; import org.jfree.ui.RectangleInsets; import org.jfree.util.TableOrder; import org.springframework.transaction.annotation.Transactional; import java.awt.*; import java.util.*; import java.util.List; import java.util.Map.Entry; import static org.hisp.dhis.common.DimensionalObject.DIMENSION_SEP; import static org.hisp.dhis.commons.collection.ListUtils.getArray; /** * @author Lars Helge Overland */ @Transactional public class DefaultChartService extends GenericAnalyticalObjectService<Chart> implements ChartService { private static final Font TITLE_FONT = new Font( Font.SANS_SERIF, Font.BOLD, 12 ); private static final Font SUB_TITLE_FONT = new Font( Font.SANS_SERIF, Font.PLAIN, 11 ); private static final Font LABEL_FONT = new Font( Font.SANS_SERIF, Font.PLAIN, 10 ); private static final String TREND_PREFIX = "Trend - "; private static final Color[] COLORS = { Color.decode( "#88be3b" ), Color.decode( "#3b6286" ), Color.decode( "#b7404c" ), Color.decode( "#ff9f3a" ), Color.decode( "#968f8f" ), Color.decode( "#b7409f" ), Color.decode( "#ffda64" ), Color.decode( "#4fbdae" ), Color.decode( "#b78040" ), Color.decode( "#676767" ), Color.decode( "#6a33cf" ), Color.decode( "#4a7833" ) }; private static final Color COLOR_TRANSPARENT = new Color( 255, 255, 255, 0 ); private static final Color COLOR_LIGHT_GRAY = Color.decode( "#dddddd" ); private static final Color COLOR_LIGHTER_GRAY = Color.decode( "#eeeeee" ); // ------------------------------------------------------------------------- // Dependencies // ------------------------------------------------------------------------- private AnalyticalObjectStore<Chart> chartStore; public void setChartStore( AnalyticalObjectStore<Chart> chartStore ) { this.chartStore = chartStore; } private PeriodService periodService; public void setPeriodService( PeriodService periodService ) { this.periodService = periodService; } private DataValueService dataValueService; public void setDataValueService( DataValueService dataValueService ) { this.dataValueService = dataValueService; } private MinMaxDataElementService minMaxDataElementService; public void setMinMaxDataElementService( MinMaxDataElementService minMaxDataElementService ) { this.minMaxDataElementService = minMaxDataElementService; } private CurrentUserService currentUserService; public void setCurrentUserService( CurrentUserService currentUserService ) { this.currentUserService = currentUserService; } private OrganisationUnitService organisationUnitService; public void setOrganisationUnitService( OrganisationUnitService organisationUnitService ) { this.organisationUnitService = organisationUnitService; } private AnalyticsService analyticsService; public void setAnalyticsService( AnalyticsService analyticsService ) { this.analyticsService = analyticsService; } private EventAnalyticsService eventAnalyticsService; public void setEventAnalyticsService( EventAnalyticsService eventAnalyticsService ) { this.eventAnalyticsService = eventAnalyticsService; } // ------------------------------------------------------------------------- // ChartService implementation // ------------------------------------------------------------------------- @Override protected AnalyticalObjectStore<Chart> getAnalyticalObjectStore() { return chartStore; } @Override public JFreeChart getJFreeChart( int id, I18nFormat format ) { Chart chart = getChart( id ); return chart != null ? getJFreeChart( chart, format ) : null; } @Override public JFreeChart getJFreeChart( BaseChart chart, I18nFormat format ) { return getJFreeChart( chart, null, null, format ); } @Override public JFreeChart getJFreeChart( BaseChart chart, Date date, OrganisationUnit organisationUnit, I18nFormat format ) { return getJFreeChart( chart, date, organisationUnit, format, currentUserService.getCurrentUser() ); } @Override public JFreeChart getJFreeChart( BaseChart chart, Date date, OrganisationUnit organisationUnit, I18nFormat format, User currentUser ) { User user = (currentUser != null ? currentUser : currentUserService.getCurrentUser()); if ( organisationUnit == null && user != null ) { organisationUnit = user.getOrganisationUnit(); } List<OrganisationUnit> atLevels = new ArrayList<>(); List<OrganisationUnit> inGroups = new ArrayList<>(); if ( chart.hasOrganisationUnitLevels() ) { atLevels.addAll( organisationUnitService.getOrganisationUnitsAtLevels( chart.getOrganisationUnitLevels(), chart.getOrganisationUnits() ) ); } if ( chart.hasItemOrganisationUnitGroups() ) { inGroups.addAll( organisationUnitService.getOrganisationUnits( chart.getItemOrganisationUnitGroups(), chart.getOrganisationUnits() ) ); } chart.init( user, date, organisationUnit, atLevels, inGroups, format ); JFreeChart resultChart = getJFreeChart( chart ); chart.clearTransientState(); return resultChart; } // ------------------------------------------------------------------------- // Specific chart methods // ------------------------------------------------------------------------- @Override public JFreeChart getJFreePeriodChart( Indicator indicator, OrganisationUnit unit, boolean title, I18nFormat format ) { List<Period> periods = periodService.reloadPeriods( new RelativePeriods().setLast12Months( true ).getRelativePeriods( format, true ) ); Chart chart = new Chart(); if ( title ) { chart.setName( indicator.getName() ); } chart.setType( ChartType.LINE ); chart.setDimensions( DimensionalObject.DATA_X_DIM_ID, DimensionalObject.PERIOD_DIM_ID, DimensionalObject.ORGUNIT_DIM_ID ); chart.setHideLegend( true ); chart.addDataDimensionItem( indicator ); chart.setPeriods( periods ); chart.getOrganisationUnits().add( unit ); chart.setHideSubtitle( title ); chart.setFormat( format ); return getJFreeChart( chart ); } @Override public JFreeChart getJFreeOrganisationUnitChart( Indicator indicator, OrganisationUnit parent, boolean title, I18nFormat format ) { List<Period> periods = periodService.reloadPeriods( new RelativePeriods().setThisYear( true ).getRelativePeriods( format, true ) ); Chart chart = new Chart(); if ( title ) { chart.setName( indicator.getName() ); } chart.setType( ChartType.COLUMN ); chart.setDimensions( DimensionalObject.DATA_X_DIM_ID, DimensionalObject.ORGUNIT_DIM_ID, DimensionalObject.PERIOD_DIM_ID ); chart.setHideLegend( true ); chart.addDataDimensionItem( indicator ); chart.setPeriods( periods ); chart.setOrganisationUnits( parent.getSortedChildren() ); chart.setHideSubtitle( title ); chart.setFormat( format ); return getJFreeChart( chart ); } @Override public JFreeChart getJFreeChart( String name, PlotOrientation orientation, CategoryLabelPositions labelPositions, Map<String, Double> categoryValues ) { DefaultCategoryDataset dataSet = new DefaultCategoryDataset(); for ( Entry<String, Double> entry : categoryValues.entrySet() ) { dataSet.addValue( entry.getValue(), name, entry.getKey() ); } CategoryPlot plot = getCategoryPlot( dataSet, getBarRenderer(), orientation, labelPositions ); JFreeChart jFreeChart = getBasicJFreeChart( plot ); jFreeChart.setTitle( name ); return jFreeChart; } @Override public JFreeChart getJFreeChartHistory( DataElement dataElement, DataElementCategoryOptionCombo categoryOptionCombo, DataElementCategoryOptionCombo attributeOptionCombo, Period lastPeriod, OrganisationUnit organisationUnit, int historyLength, I18nFormat format ) { lastPeriod = periodService.reloadPeriod( lastPeriod ); List<Period> periods = periodService.getPeriods( lastPeriod, historyLength ); MinMaxDataElement minMax = minMaxDataElementService.getMinMaxDataElement( organisationUnit, dataElement, categoryOptionCombo ); UnivariateInterpolator interpolator = new SplineInterpolator(); Integer periodCount = 0; List<Double> x = new ArrayList<>(); List<Double> y = new ArrayList<>(); // --------------------------------------------------------------------- // DataValue, MinValue and MaxValue DataSets // --------------------------------------------------------------------- DefaultCategoryDataset dataValueDataSet = new DefaultCategoryDataset(); DefaultCategoryDataset metaDataSet = new DefaultCategoryDataset(); for ( Period period : periods ) { ++periodCount; period.setName( format.formatPeriod( period ) ); DataValue dataValue = dataValueService.getDataValue( dataElement, period, organisationUnit, categoryOptionCombo, attributeOptionCombo ); double value = 0; if ( dataValue != null && dataValue.getValue() != null && MathUtils.isNumeric( dataValue.getValue() ) ) { value = Double.parseDouble( dataValue.getValue() ); x.add( periodCount.doubleValue() ); y.add( value ); } dataValueDataSet.addValue( value, dataElement.getShortName(), period.getName() ); if ( minMax != null ) { metaDataSet.addValue( minMax.getMin(), "Min value", period.getName() ); metaDataSet.addValue( minMax.getMax(), "Max value", period.getName() ); } } // --------------------------------------------------------------------- // Interpolation DataSet // --------------------------------------------------------------------- if ( x.size() >= 3 ) // minimum 3 points required for interpolation { periodCount = 0; double[] xa = getArray( x ); int min = MathUtils.getMin( xa ).intValue(); int max = MathUtils.getMax( xa ).intValue(); try { UnivariateFunction function = interpolator.interpolate( xa, getArray( y ) ); for ( Period period : periods ) { if ( ++periodCount >= min && periodCount <= max ) { metaDataSet.addValue( function.value( periodCount ), "Regression value", period.getName() ); } } } catch ( MathRuntimeException ex ) { throw new RuntimeException( "Failed to interpolate", ex ); } } // --------------------------------------------------------------------- // Plots // --------------------------------------------------------------------- CategoryPlot plot = getCategoryPlot( dataValueDataSet, getBarRenderer(), PlotOrientation.VERTICAL, CategoryLabelPositions.UP_45 ); plot.setDataset( 1, metaDataSet ); plot.setRenderer( 1, getLineRenderer() ); JFreeChart jFreeChart = getBasicJFreeChart( plot ); return jFreeChart; } // ------------------------------------------------------------------------- // Supportive methods // ------------------------------------------------------------------------- /** * Returns a basic JFreeChart. */ private JFreeChart getBasicJFreeChart( CategoryPlot plot ) { JFreeChart jFreeChart = new JFreeChart( null, TITLE_FONT, plot, false ); jFreeChart.setBackgroundPaint( Color.WHITE ); jFreeChart.setAntiAlias( true ); return jFreeChart; } /** * Returns a CategoryPlot. */ private CategoryPlot getCategoryPlot( CategoryDataset dataSet, CategoryItemRenderer renderer, PlotOrientation orientation, CategoryLabelPositions labelPositions ) { CategoryPlot plot = new CategoryPlot( dataSet, new CategoryAxis(), new NumberAxis(), renderer ); plot.setDatasetRenderingOrder( DatasetRenderingOrder.FORWARD ); plot.setOrientation( orientation ); CategoryAxis xAxis = plot.getDomainAxis(); xAxis.setCategoryLabelPositions( labelPositions ); return plot; } /** * Returns a bar renderer. */ private BarRenderer getBarRenderer() { BarRenderer renderer = new BarRenderer(); renderer.setMaximumBarWidth( 0.07 ); for ( int i = 0; i < COLORS.length; i++ ) { renderer.setSeriesPaint( i, COLORS[i] ); renderer.setShadowVisible( false ); } return renderer; } /** * Returns a line and shape renderer. */ private LineAndShapeRenderer getLineRenderer() { LineAndShapeRenderer renderer = new LineAndShapeRenderer(); for ( int i = 0; i < COLORS.length; i++ ) { renderer.setSeriesPaint( i, COLORS[i] ); } return renderer; } /** * Returns a stacked bar renderer. */ private StackedBarRenderer getStackedBarRenderer() { StackedBarRenderer renderer = new StackedBarRenderer(); for ( int i = 0; i < COLORS.length; i++ ) { renderer.setSeriesPaint( i, COLORS[i] ); renderer.setShadowVisible( false ); } return renderer; } /** * Returns a stacked area renderer. */ private AreaRenderer getStackedAreaRenderer() { StackedAreaRenderer renderer = new StackedAreaRenderer(); for ( int i = 0; i < COLORS.length; i++ ) { renderer.setSeriesPaint( i, COLORS[i] ); } return renderer; } /** * Returns a horizontal line marker for the given x value and label. */ private Marker getMarker( Double value, String label ) { Marker marker = new ValueMarker( value ); marker.setPaint( Color.BLACK ); marker.setStroke( new BasicStroke( 1.1f ) ); marker.setLabel( label ); marker.setLabelOffset( new RectangleInsets( -10, 50, 0, 0 ) ); marker.setLabelFont( SUB_TITLE_FONT ); return marker; } /** * Returns a JFreeChart of type defined in the chart argument. */ private JFreeChart getJFreeChart( BaseChart chart ) { final CategoryDataset[] dataSets = getCategoryDataSet( chart ); final CategoryDataset dataSet = dataSets[0]; final BarRenderer barRenderer = getBarRenderer(); final LineAndShapeRenderer lineRenderer = getLineRenderer(); // --------------------------------------------------------------------- // Plot // --------------------------------------------------------------------- CategoryPlot plot = null; if ( chart.isType( ChartType.LINE ) ) { plot = new CategoryPlot( dataSet, new CategoryAxis(), new NumberAxis(), lineRenderer ); plot.setOrientation( PlotOrientation.VERTICAL ); } else if ( chart.isType( ChartType.COLUMN ) ) { plot = new CategoryPlot( dataSet, new CategoryAxis(), new NumberAxis(), barRenderer ); plot.setOrientation( PlotOrientation.VERTICAL ); } else if ( chart.isType( ChartType.BAR ) ) { plot = new CategoryPlot( dataSet, new CategoryAxis(), new NumberAxis(), barRenderer ); plot.setOrientation( PlotOrientation.HORIZONTAL ); } else if ( chart.isType( ChartType.AREA ) ) { return getStackedAreaChart( chart, dataSet ); } else if ( chart.isType( ChartType.PIE ) ) { return getMultiplePieChart( chart, dataSets ); } else if ( chart.isType( ChartType.STACKED_COLUMN ) ) { return getStackedBarChart( chart, dataSet, false ); } else if ( chart.isType( ChartType.STACKED_BAR ) ) { return getStackedBarChart( chart, dataSet, true ); } else if ( chart.isType( ChartType.RADAR ) ) { return getRadarChart( chart, dataSet ); } else if ( chart.isType( ChartType.GAUGE ) ) { Number number = dataSet.getValue( 0, 0 ); ValueDataset valueDataSet = new DefaultValueDataset( number ); return getGaugeChart( chart, valueDataSet ); } else { throw new IllegalArgumentException( "Illegal or no chart type: " + chart.getType() ); } if ( chart.isRegression() ) { plot.setDataset( 1, dataSets[1] ); plot.setRenderer( 1, lineRenderer ); } JFreeChart jFreeChart = new JFreeChart( chart.getName(), TITLE_FONT, plot, !chart.isHideLegend() ); setBasicConfig( jFreeChart, chart ); if ( chart.isTargetLine() ) { plot.addRangeMarker( getMarker( chart.getTargetLineValue(), chart.getTargetLineLabel() ) ); } if ( chart.isBaseLine() ) { plot.addRangeMarker( getMarker( chart.getBaseLineValue(), chart.getBaseLineLabel() ) ); } if ( chart.isHideSubtitle() ) { jFreeChart.addSubtitle( getSubTitle( chart ) ); } plot.setDatasetRenderingOrder( DatasetRenderingOrder.FORWARD ); // --------------------------------------------------------------------- // Category label positions // --------------------------------------------------------------------- CategoryAxis domainAxis = plot.getDomainAxis(); domainAxis.setCategoryLabelPositions( CategoryLabelPositions.UP_45 ); domainAxis.setLabel( chart.getDomainAxisLabel() ); ValueAxis rangeAxis = plot.getRangeAxis(); rangeAxis.setLabel( chart.getRangeAxisLabel() ); return jFreeChart; } private JFreeChart getStackedAreaChart( BaseChart chart, CategoryDataset dataSet ) { JFreeChart stackedAreaChart = ChartFactory.createStackedAreaChart( chart.getName(), chart.getDomainAxisLabel(), chart.getRangeAxisLabel(), dataSet, PlotOrientation.VERTICAL, !chart.isHideLegend(), false, false ); setBasicConfig( stackedAreaChart, chart ); CategoryPlot plot = (CategoryPlot) stackedAreaChart.getPlot(); plot.setOrientation( PlotOrientation.VERTICAL ); plot.setRenderer( getStackedAreaRenderer() ); CategoryAxis xAxis = plot.getDomainAxis(); xAxis.setCategoryLabelPositions( CategoryLabelPositions.UP_45 ); xAxis.setLabelFont( LABEL_FONT ); return stackedAreaChart; } private JFreeChart getRadarChart( BaseChart chart, CategoryDataset dataSet ) { SpiderWebPlot plot = new SpiderWebPlot( dataSet, TableOrder.BY_ROW ); plot.setLabelFont( LABEL_FONT ); JFreeChart radarChart = new JFreeChart( chart.getName(), TITLE_FONT, plot, !chart.isHideLegend() ); setBasicConfig( radarChart, chart ); return radarChart; } private JFreeChart getStackedBarChart( BaseChart chart, CategoryDataset dataSet, boolean horizontal ) { JFreeChart stackedBarChart = ChartFactory.createStackedBarChart( chart.getName(), chart.getDomainAxisLabel(), chart.getRangeAxisLabel(), dataSet, PlotOrientation.VERTICAL, !chart.isHideLegend(), false, false ); setBasicConfig( stackedBarChart, chart ); CategoryPlot plot = (CategoryPlot) stackedBarChart.getPlot(); plot.setOrientation( horizontal ? PlotOrientation.HORIZONTAL : PlotOrientation.VERTICAL ); plot.setRenderer( getStackedBarRenderer() ); CategoryAxis xAxis = plot.getDomainAxis(); xAxis.setCategoryLabelPositions( CategoryLabelPositions.UP_45 ); return stackedBarChart; } private JFreeChart getMultiplePieChart( BaseChart chart, CategoryDataset[] dataSets ) { JFreeChart multiplePieChart = ChartFactory.createMultiplePieChart( chart.getName(), dataSets[0], TableOrder.BY_ROW, !chart.isHideLegend(), false, false ); setBasicConfig( multiplePieChart, chart ); if ( multiplePieChart.getLegend() != null ) { multiplePieChart.getLegend().setItemFont( SUB_TITLE_FONT ); } MultiplePiePlot multiplePiePlot = (MultiplePiePlot) multiplePieChart.getPlot(); JFreeChart pieChart = multiplePiePlot.getPieChart(); pieChart.setBackgroundPaint( COLOR_TRANSPARENT ); pieChart.getTitle().setFont( SUB_TITLE_FONT ); PiePlot piePlot = (PiePlot) pieChart.getPlot(); piePlot.setBackgroundPaint( COLOR_TRANSPARENT ); piePlot.setOutlinePaint( COLOR_TRANSPARENT ); piePlot.setLabelFont( LABEL_FONT ); piePlot.setLabelGenerator( new StandardPieSectionLabelGenerator( "{2}" ) ); piePlot.setSimpleLabels( true ); piePlot.setIgnoreZeroValues( true ); piePlot.setIgnoreNullValues( true ); piePlot.setShadowXOffset( 0d ); piePlot.setShadowYOffset( 0d ); for ( int i = 0; i < dataSets[0].getColumnCount(); i++ ) { piePlot.setSectionPaint( dataSets[0].getColumnKey( i ), COLORS[(i % COLORS.length)] ); } return multiplePieChart; } private JFreeChart getGaugeChart( BaseChart chart, ValueDataset dataSet ) { MeterPlot meterPlot = new MeterPlot( dataSet ); meterPlot.setUnits( "" ); meterPlot.setRange( new Range( 0.0d, 100d ) ); for ( int i = 0; i < 10; i++ ) { double start = i * 10; double end = start + 10; String label = String.valueOf( start ); meterPlot.addInterval( new MeterInterval( label, new Range( start, end ), COLOR_LIGHT_GRAY, null, COLOR_LIGHT_GRAY ) ); } meterPlot.setMeterAngle(180); meterPlot.setDialBackgroundPaint( COLOR_LIGHT_GRAY ); meterPlot.setDialShape( DialShape.CHORD ); meterPlot.setNeedlePaint( COLORS[0] ); meterPlot.setTickLabelsVisible( true ); meterPlot.setTickLabelFont( LABEL_FONT ); meterPlot.setTickLabelPaint( Color.BLACK ); meterPlot.setTickPaint( COLOR_LIGHTER_GRAY ); meterPlot.setValueFont( TITLE_FONT ); meterPlot.setValuePaint( Color.BLACK ); JFreeChart meterChart = new JFreeChart( chart.getName(), meterPlot ); setBasicConfig( meterChart, chart ); meterChart.removeLegend(); return meterChart; } /** * Sets basic configuration including title font, subtitle, background paint and * anti-alias on the given JFreeChart. */ private void setBasicConfig( JFreeChart jFreeChart, BaseChart chart) { jFreeChart.getTitle().setFont( TITLE_FONT ); jFreeChart.setBackgroundPaint( COLOR_TRANSPARENT ); jFreeChart.setAntiAlias( true ); if ( !chart.isHideTitle() ) { jFreeChart.addSubtitle( getSubTitle( chart ) ); } Plot plot = jFreeChart.getPlot(); plot.setBackgroundPaint( COLOR_TRANSPARENT ); plot.setOutlinePaint( COLOR_TRANSPARENT ); } private TextTitle getSubTitle( BaseChart chart ) { TextTitle textTitle = new TextTitle(); String title = chart.hasTitle() ? chart.getTitle() : chart.generateTitle(); textTitle.setFont( SUB_TITLE_FONT ); textTitle.setText( title ); return textTitle; } private CategoryDataset[] getCategoryDataSet( BaseChart chart ) { Map<String, Object> valueMap = new HashMap<>(); if ( chart.isAnalyticsType( AnalyticsType.AGGREGATE ) ) { valueMap = analyticsService.getAggregatedDataValueMapping( chart ); } else if ( chart.isAnalyticsType( AnalyticsType.EVENT ) ) { Grid grid = eventAnalyticsService.getAggregatedEventData( chart ); chart.setDataItemGrid( grid ); valueMap = GridUtils.getMetaValueMapping( grid, ( grid.getWidth() - 1 ) ); } DefaultCategoryDataset regularDataSet = new DefaultCategoryDataset(); DefaultCategoryDataset regressionDataSet = new DefaultCategoryDataset(); SimpleRegression regression = new SimpleRegression(); BaseAnalyticalObject.sortKeys( valueMap ); List<NameableObject> seriez = new ArrayList<>( chart.series() ); List<NameableObject> categories = new ArrayList<>( chart.category() ); if ( chart.hasSortOrder() ) { categories = getSortedCategories( categories, chart, valueMap ); } for ( NameableObject series : seriez ) { double categoryIndex = 0; for ( NameableObject category : categories ) { categoryIndex++; String key = getKey( series, category, chart.getAnalyticsType() ); Object object = valueMap.get( key ); Number value = object != null && object instanceof Number ? (Number) object : null; regularDataSet.addValue( value, series.getShortName(), category.getShortName() ); if ( chart.isRegression() && value != null && value instanceof Double && !MathUtils.isEqual( (Double) value, MathUtils.ZERO ) ) { regression.addData( categoryIndex, (Double) value ); } } if ( chart.isRegression() ) // Period must be category { categoryIndex = 0; for ( NameableObject category : chart.category() ) { final double value = regression.predict( categoryIndex++ ); // Enough values must exist for regression if ( !Double.isNaN( value ) ) { regressionDataSet.addValue( value, TREND_PREFIX + series.getShortName(), category.getShortName() ); } } } } return new CategoryDataset[]{ regularDataSet, regressionDataSet }; } /** * Creates a key based on the given input. Sorts the key on its components * to remove significance of column order. */ private String getKey( NameableObject series, NameableObject category, AnalyticsType analyticsType ) { String key = series.getUid() + DIMENSION_SEP + category.getUid(); // Replace potential operand separator with dimension separator key = AnalyticsType.AGGREGATE.equals( analyticsType ) ? key.replace( DataElementOperand.SEPARATOR, DIMENSION_SEP ) : key; // TODO fix issue with keys including -. return BaseAnalyticalObject.sortKey( key ); } /** * Returns a list of sorted nameable objects. Sorting is defined per the * corresponding value in the given value map. */ private List<NameableObject> getSortedCategories( List<NameableObject> categories, BaseChart chart, Map<String, Object> valueMap ) { NameableObject series = chart.series().get( 0 ); int sortOrder = chart.getSortOrder(); List<NumericSortWrapper<NameableObject>> list = new ArrayList<>(); for ( NameableObject category : categories ) { String key = getKey( series, category, chart.getAnalyticsType() ); Object value = valueMap.get( key ); if ( value != null && value instanceof Number ) { list.add( new NumericSortWrapper<NameableObject>( category, (Double ) value, sortOrder ) ); } } Collections.sort( list ); return NumericSortWrapper.getObjectList( list ); } // ------------------------------------------------------------------------- // CRUD operations // ------------------------------------------------------------------------- @Override public int addChart( Chart chart ) { return chartStore.save( chart ); } @Override public void updateChart( Chart chart ) { chartStore.update( chart ); } @Override public Chart getChart( int id ) { return chartStore.get( id ); } @Override public Chart getChart( String uid ) { return chartStore.getByUid( uid ); } @Override public Chart getChartNoAcl( String uid ) { return chartStore.getByUidNoAcl( uid ); } @Override public void deleteChart( Chart chart ) { chartStore.delete( chart ); } @Override public List<Chart> getAllCharts() { return chartStore.getAll(); } @Override public Chart getChartByName( String name ) { return chartStore.getByName( name ); } @Override public int getChartCount() { return chartStore.getCount(); } @Override public int getChartCountByName( String name ) { return chartStore.getCountLikeName( name ); } @Override public List<Chart> getChartsBetween( int first, int max ) { return chartStore.getAllOrderedName( first, max ); } @Override public List<Chart> getChartsBetweenByName( String name, int first, int max ) { return chartStore.getAllLikeName( name, first, max ); } }
/** * Copyright (c) 2015 The JobX Project * <p> * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package com.jobxhub.server.dto; import com.google.common.base.Function; import com.jobxhub.server.domain.UserBean; import org.springframework.beans.BeanUtils; import java.io.Serializable; import java.util.Arrays; import java.util.Date; import java.util.List; public class User implements Serializable { private Long userId; private String userName; private String password; private String salt; private Long roleId; private String roleName; private String realName; private String contact; private String email; private String qq; private Date createTime; private Date modifyTime; private byte[] headerPic; private String picExtName; private String headerPath; private List<Long> agentIds; private List<String> execUser; public static Function<? super UserBean, ? extends User> transfer = new Function<UserBean, User>() { @Override public User apply(UserBean input) { return new User(input); } }; public User(){} public User(UserBean user){ BeanUtils.copyProperties(user,this); if (user!=null&&user.getExecUser()!=null) { this.execUser = Arrays.asList(user.getExecUser().split(",")); } } public Long getUserId() { return userId; } public void setUserId(Long userId) { this.userId = userId; } public String getUserName() { return userName; } public void setUserName(String userName) { this.userName = userName; } public String getPassword() { return password; } public void setPassword(String password) { this.password = password; } public String getSalt() { return salt; } public void setSalt(String salt) { this.salt = salt; } public Long getRoleId() { return roleId; } public void setRoleId(Long roleId) { this.roleId = roleId; } public String getRoleName() { return roleName; } public void setRoleName(String roleName) { this.roleName = roleName; } public String getRealName() { return realName; } public void setRealName(String realName) { this.realName = realName; } public String getContact() { return contact; } public void setContact(String contact) { this.contact = contact; } public String getEmail() { return email; } public void setEmail(String email) { this.email = email; } public String getQq() { return qq; } public void setQq(String qq) { this.qq = qq; } public Date getCreateTime() { return createTime; } public void setCreateTime(Date createTime) { this.createTime = createTime; } public Date getModifyTime() { return modifyTime; } public void setModifyTime(Date modifyTime) { this.modifyTime = modifyTime; } public byte[] getHeaderPic() { return headerPic; } public void setHeaderPic(byte[] headerPic) { this.headerPic = headerPic; } public String getPicExtName() { return picExtName; } public void setPicExtName(String picExtName) { this.picExtName = picExtName; } public String getHeaderPath() { return headerPath; } public void setHeaderPath(String headerPath) { this.headerPath = headerPath; } public List<Long> getAgentIds() { return agentIds; } public void setAgentIds(List<Long> agentIds) { this.agentIds = agentIds; } public List<String> getExecUser() { return execUser; } public void setExecUser(List<String> execUser) { this.execUser = execUser; } }
#parse("main/Header.vm") package com.nativelibs4java.opencl; import static com.nativelibs4java.opencl.JavaCL.log; import static com.nativelibs4java.opencl.library.OpenCLLibrary.*; import static com.nativelibs4java.opencl.library.IOpenCLLibrary.*; import com.nativelibs4java.opencl.library.OpenCLLibrary; import com.ochafik.util.string.StringUtils; import org.bridj.*; import java.util.*; import java.lang.reflect.*; import java.util.logging.Level; import java.util.logging.Logger; /** * OpenCL error * @author ochafik */ @SuppressWarnings("serial") public class CLException extends RuntimeException { protected int code; CLException(String message, int code) { super(message); this.code = code; } public int getCode() { return code; } @java.lang.annotation.Retention(java.lang.annotation.RetentionPolicy.RUNTIME) @interface ErrorCode { int value(); } public static class CLVersionException extends CLException { public CLVersionException(String message) { super(message, 0); } } public static class CLTypedException extends CLException { protected String message; public CLTypedException() { super("", 0); ErrorCode code = getClass().getAnnotation(ErrorCode.class); this.code = code.value(); this.message = getClass().getSimpleName(); } @Override public String getMessage() { return message + logSuffix; } void setKernelArg(CLKernel kernel, int argIndex, long size, Pointer<?> ptr) { message += " (kernel name = " + kernel.getFunctionName(); message += ", num args = " + kernel.getNumArgs(); message += ", arg index = " + argIndex; message += ", arg size = " + size; CLProgram program = kernel.getProgram(); if (program != null) message += ", source = <<<\n\t" + program.getSource().replaceAll("\n", "\n\t"); message += "\n>>> )"; } } @ErrorCode(CL_DEVICE_PARTITION_FAILED) public static class DevicePartitionFailed extends CLTypedException {} @ErrorCode(CL_EXEC_STATUS_ERROR_FOR_EVENTS_IN_WAIT_LIST) public static class ExecStatusErrorForEventsInWaitList extends CLTypedException {} @ErrorCode(CL_MISALIGNED_SUB_BUFFER_OFFSET) public static class MisalignedSubBufferOffset extends CLTypedException {} @ErrorCode(CL_COMPILE_PROGRAM_FAILURE) public static class CompileProgramFailure extends CLTypedException {} @ErrorCode(CL_LINKER_NOT_AVAILABLE) public static class LinkerNotAvailable extends CLTypedException {} @ErrorCode(CL_LINK_PROGRAM_FAILURE) public static class LinkProgramFailure extends CLTypedException {} @ErrorCode(CL_KERNEL_ARG_INFO_NOT_AVAILABLE) public static class KernelArgInfoNotAvailable extends CLTypedException {} @ErrorCode(CL_IMAGE_FORMAT_MISMATCH) public static class ImageFormatMismatch extends CLTypedException {} @ErrorCode(CL_PROFILING_INFO_NOT_AVAILABLE) public static class ProfilingInfoNotAvailable extends CLTypedException {} @ErrorCode(CL_DEVICE_NOT_AVAILABLE) public static class DeviceNotAvailable extends CLTypedException {} @ErrorCode(CL_OUT_OF_RESOURCES) public static class OutOfResources extends CLTypedException {} @ErrorCode(CL_COMPILER_NOT_AVAILABLE) public static class CompilerNotAvailable extends CLTypedException {} @ErrorCode(CL_INVALID_GLOBAL_WORK_SIZE) public static class InvalidGlobalWorkSize extends CLTypedException {} @ErrorCode(CL_MAP_FAILURE) public static class MapFailure extends CLTypedException {} @ErrorCode(CL_MEM_OBJECT_ALLOCATION_FAILURE) public static class MemObjectAllocationFailure extends CLTypedException {} @ErrorCode(CL_INVALID_EVENT_WAIT_LIST) public static class InvalidEventWaitList extends CLTypedException {} @ErrorCode(CL_INVALID_ARG_INDEX) public static class InvalidArgIndex extends CLTypedException {} @ErrorCode(CL_INVALID_ARG_SIZE) public static class InvalidArgSize extends CLTypedException {} @ErrorCode(CL_INVALID_ARG_VALUE) public static class InvalidArgValue extends CLTypedException {} @ErrorCode(CL_INVALID_BINARY) public static class InvalidBinary extends CLTypedException {} @ErrorCode(CL_INVALID_EVENT) public static class InvalidEvent extends CLTypedException {} @ErrorCode(CL_INVALID_IMAGE_FORMAT_DESCRIPTOR) public static class InvalidImageFormatDescriptor extends CLTypedException {} @ErrorCode(CL_INVALID_IMAGE_SIZE) public static class InvalidImageSize extends CLTypedException {} @ErrorCode(CL_INVALID_WORK_DIMENSION) public static class InvalidWorkDimension extends CLTypedException {} @ErrorCode(CL_INVALID_WORK_GROUP_SIZE) public static class InvalidWorkGroupSize extends CLTypedException {} @ErrorCode(CL_INVALID_WORK_ITEM_SIZE) public static class InvalidWorkItemSize extends CLTypedException {} @ErrorCode(CL_INVALID_OPERATION) public static class InvalidOperation extends CLTypedException {} @ErrorCode(CL_INVALID_BUFFER_SIZE) public static class InvalidBufferSize extends CLTypedException {} @ErrorCode(CL_INVALID_GLOBAL_OFFSET) public static class InvalidGlobalOffset extends CLTypedException {} @ErrorCode(CL_OUT_OF_HOST_MEMORY) public static class OutOfHostMemory extends CLTypedException {} @ErrorCode(CL_INVALID_COMPILER_OPTIONS) public static class InvalidCompilerOptions extends CLTypedException {} @ErrorCode(CL_INVALID_DEVICE) public static class InvalidDevice extends CLTypedException {} @ErrorCode(CL_INVALID_DEVICE_PARTITION_COUNT) public static class InvalidDevicePartitionCount extends CLTypedException {} @ErrorCode(CL_INVALID_HOST_PTR) public static class InvalidHostPtr extends CLTypedException {} @ErrorCode(CL_INVALID_IMAGE_DESCRIPTOR) public static class InvalidImageDescriptor extends CLTypedException {} @ErrorCode(CL_INVALID_LINKER_OPTIONS) public static class InvalidLinkerOptions extends CLTypedException {} @ErrorCode(CL_INVALID_PLATFORM) public static class InvalidPlatform extends CLTypedException {} @ErrorCode(CL_INVALID_PROPERTY) public static class InvalidProperty extends CLTypedException {} @ErrorCode(CL_INVALID_COMMAND_QUEUE) public static class InvalidCommandQueue extends CLTypedException {} @ErrorCode(CL_MEM_COPY_OVERLAP) public static class MemCopyOverlap extends CLTypedException {} @ErrorCode(CL_INVALID_CONTEXT) public static class InvalidContext extends CLTypedException {} @ErrorCode(CL_INVALID_KERNEL) public static class InvalidKernel extends CLTypedException {} @ErrorCode(CL_INVALID_GL_CONTEXT_APPLE) public static class InvalidGLContextApple extends CLTypedException {} @ErrorCode(CL_INVALID_GL_SHAREGROUP_REFERENCE_KHR) public static class InvalidGLShareGroupReference extends CLTypedException {} @ErrorCode(CL_INVALID_GL_OBJECT) public static class InvalidGLObject extends CLTypedException {} @ErrorCode(CL_INVALID_KERNEL_ARGS) public static class InvalidKernelArgs extends CLTypedException {} @ErrorCode(CL_INVALID_KERNEL_DEFINITION) public static class InvalidKernelDefinition extends CLTypedException {} @ErrorCode(CL_INVALID_KERNEL_NAME) public static class InvalidKernelName extends CLTypedException {} @ErrorCode(CL_INVALID_MEM_OBJECT) public static class InvalidMemObject extends CLTypedException {} @ErrorCode(CL_INVALID_MIP_LEVEL) public static class InvalidMipLevel extends CLTypedException {} @ErrorCode(CL_INVALID_PROGRAM) public static class InvalidProgram extends CLTypedException {} @ErrorCode(CL_INVALID_PROGRAM_EXECUTABLE) public static class InvalidProgramExecutable extends CLTypedException {} @ErrorCode(CL_INVALID_QUEUE_PROPERTIES) public static class InvalidQueueProperties extends CLTypedException {} @ErrorCode(CL_INVALID_VALUE) public static class InvalidValue extends CLTypedException {} @ErrorCode(CL_INVALID_SAMPLER) public static class InvalidSampler extends CLTypedException {} @ErrorCode(CL_INVALID_DEVICE_TYPE) public static class InvalidDeviceType extends CLTypedException {} @ErrorCode(CL_INVALID_BUILD_OPTIONS) public static class InvalidBuildOptions extends CLTypedException {} @ErrorCode(CL_BUILD_PROGRAM_FAILURE) public static class BuildProgramFailure extends CLTypedException {} public static String errorString(int err) { if (err == CL_SUCCESS) return null; List<String> candidates = new ArrayList<String>(); for (Field f : OpenCLLibrary.class.getDeclaredFields()) { if (!Modifier.isStatic(f.getModifiers())) { continue; } if (f.getType().equals(Integer.TYPE)) { try { int i = (Integer) f.get(null); if (i == err) { String name = f.getName(), lname = name.toLowerCase(); if (lname.contains("invalid") || lname.contains("bad") || lname.contains("illegal") || lname.contains("wrong")) { candidates.clear(); candidates.add(name); break; } else candidates.add(name); } } catch (Exception e) { e.printStackTrace(); } } } return StringUtils.implode(candidates, " or "); } static boolean failedForLackOfMemory(int err, int previousAttempts) { switch (err) { case CL_SUCCESS: return false; case CL_OUT_OF_HOST_MEMORY: case CL_OUT_OF_RESOURCES: case CL_MEM_OBJECT_ALLOCATION_FAILURE: if (previousAttempts <= 1) { System.gc(); if (previousAttempts == 1) { try { Thread.sleep(100); } catch (InterruptedException ex) {} } return true; } default: error(err); assert false; // won't reach return false; } } static final String logSuffix = System.getenv("CL_LOG_ERRORS") == null ? " (make sure to log all errors with environment variable CL_LOG_ERRORS=stdout)" : ""; static Map<Integer, Class<? extends CLTypedException>> typedErrorClassesByCode; @SuppressWarnings("unchecked") public static void error(int err) { if (err == CL_SUCCESS) return; if (typedErrorClassesByCode == null) { typedErrorClassesByCode = new HashMap<Integer, Class<? extends CLTypedException>>(); for (Class<?> c : CLException.class.getDeclaredClasses()) { if (c == CLTypedException.class || !CLTypedException.class.isAssignableFrom(c)) continue; typedErrorClassesByCode.put(c.getAnnotation(ErrorCode.class).value(), (Class<? extends CLTypedException>)c); } } CLException toThrow = null; Class<? extends CLTypedException> c = typedErrorClassesByCode.get(err); if (c != null) { try { toThrow = c.newInstance(); } catch (InstantiationException ex) { assert log(Level.SEVERE, null, ex); } catch (IllegalAccessException ex) { assert log(Level.SEVERE, null, ex); } } if (toThrow == null) toThrow = new CLException("OpenCL Error : " + errorString(err) + logSuffix, err); throw toThrow; } }
package com.compomics.util.experiment.biology.ions; import com.compomics.util.experiment.biology.atoms.AtomChain; import com.compomics.util.experiment.biology.ions.impl.PrecursorIon; import com.compomics.util.experiment.biology.ions.impl.Glycan; import com.compomics.util.experiment.biology.ions.impl.PeptideFragmentIon; import com.compomics.util.experiment.biology.ions.impl.TagFragmentIon; import com.compomics.util.experiment.biology.ions.impl.ReporterIon; import com.compomics.util.experiment.biology.ions.impl.ElementaryIon; import com.compomics.util.experiment.biology.ions.impl.RelatedIon; import com.compomics.util.experiment.biology.ions.impl.ImmoniumIon; import com.compomics.util.experiment.biology.aminoacids.AminoAcid; import com.compomics.util.experiment.personalization.ExperimentObject; import com.compomics.util.pride.CvTerm; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashSet; import java.util.stream.Collectors; /** * This class models an ion. * * @author Marc Vaudel */ public abstract class Ion extends ExperimentObject { /** * Empty default constructor */ public Ion() { } /** * Serial number for backward compatibility. */ static final long serialVersionUID = -1505719074403886934L; /** * Cache for the neutral losses as string. */ private String neutralLossesAsString = null; /** * An enumerator of the supported ion types. */ public enum IonType { /** * Identifier for a peptide fragment ion. */ PEPTIDE_FRAGMENT_ION(0), /** * A tag fragment ion */ TAG_FRAGMENT_ION(1), /** * Identifier for an MH ion. The number of H is not represented here. */ PRECURSOR_ION(2), /** * Identifier for an immonium ion. */ IMMONIUM_ION(3), /** * Identifier for a reporter ion. */ REPORTER_ION(4), /** * Identifier for a glycan. */ GLYCAN(5), /** * Identifier for an elementary ion. */ ELEMENTARY_ION(6), /** * Identifier for an unknown ion. */ UNKNOWN(7), /** * Identifier for a related ion. */ RELATED_ION(8); /** * The index of the type. */ public final int index; /** * Constructor. * * @param index the index of the type */ private IonType(int index) { this.index = index; } } /** * Type of ion. */ protected IonType type = IonType.UNKNOWN; /** * The theoretic mass. * * @deprecated use the double value instead. */ protected Double theoreticMass; /** * The theoretic mass. */ protected double theoreticMass1; /** * The atomic composition of the ion. */ protected AtomChain atomChain; /** * Returns the name of the ion. The name should be short enough to be * displayed on a spectrum. * * @return the name of the ion */ public abstract String getName(); /** * Returns the CV term adapted to the fragment ion. Null if none * corresponding. * * @return the CV term adapted to the fragment ion. Null if none * corresponding */ public abstract CvTerm getPrideCvTerm(); /** * Returns the CV term adapted to the fragment ion. Null if none * corresponding. * * @return the CV term adapted to the fragment ion. Null if none * corresponding */ public abstract CvTerm getPsiMsCvTerm(); /** * Returns the ion subtype. * * @return the ion subtype as integer */ public abstract int getSubType(); /** * Returns the subtype as string. * * @return the subtype as string */ public abstract String getSubTypeAsString(); /** * Returns an array of possible subtypes. * * @param ionType an array of possible subtypes * * @return an array of possible subtypes */ public static int[] getPossibleSubtypes(IonType ionType) { switch (ionType) { case ELEMENTARY_ION: return ElementaryIon.getPossibleSubtypes(); case GLYCAN: return Glycan.getPossibleSubtypes(); case IMMONIUM_ION: return ImmoniumIon.getPossibleSubtypes(); case PEPTIDE_FRAGMENT_ION: return PeptideFragmentIon.getPossibleSubtypes(); case TAG_FRAGMENT_ION: return TagFragmentIon.getPossibleSubtypes(); case PRECURSOR_ION: return PrecursorIon.getPossibleSubtypes(); case REPORTER_ION: return ReporterIon.getPossibleSubtypes(); case RELATED_ION: return RelatedIon.getPossibleSubtypes(); default: throw new UnsupportedOperationException("Not supported yet."); } } /** * Returns a hashset of possible subtypes. * * @param ionType a hashset of possible subtypes * * @return a hashset of possible subtypes */ public static HashSet<Integer> getPossibleSubtypesAsSet(IonType ionType) { int[] possibleSubtypes = getPossibleSubtypes(ionType); return Arrays.stream(possibleSubtypes) .boxed() .collect(Collectors.toCollection(HashSet::new)); } /** * Returns the possible neutral losses of this ion type. An empty list if * none. * * @return the possible neutral losses of this ion type */ public abstract NeutralLoss[] getNeutralLosses(); /** * Indicates whether the ion has a neutral loss. * * @return a boolean indicating whether the ion has a neutral loss */ public boolean hasNeutralLosses() { switch (type) { case PEPTIDE_FRAGMENT_ION: case TAG_FRAGMENT_ION: case PRECURSOR_ION: NeutralLoss[] neutralLosses = getNeutralLosses(); return neutralLosses != null && neutralLosses.length > 0; default: return false; } } /** * Returns a boolean indicating whether the ion is the same as another ion. * * @param anotherIon the other ion * @return a boolean indicating whether the ion is the same as another ion */ public abstract boolean isSameAs(Ion anotherIon); /** * Returns the neutral loss (if any), the empty string if no loss. * * @return the neutral loss */ public String getNeutralLossesAsString() { if (neutralLossesAsString == null) { neutralLossesAsString = getNeutralLossesAsString(getNeutralLosses()); } return neutralLossesAsString; } /** * Returns the neutral loss (if any), the empty string if no loss. * * @param neutralLosses the neutral loss (if any) * @return the neutral loss */ public static String getNeutralLossesAsString(NeutralLoss[] neutralLosses) { if (neutralLosses == null) { return ""; } ArrayList<String> names = new ArrayList<>(neutralLosses.length); for (NeutralLoss neutralLoss : neutralLosses) { names.add(neutralLoss.name); } Collections.sort(names); StringBuilder result = new StringBuilder(4 * neutralLosses.length); for (String name : names) { result.append('-').append(name); } return result.toString(); } /** * Returns the theoretic mass, from the atomic composition if available, * from the theoreticMass field otherwise. * * @return the theoretic mass */ public double getTheoreticMass() { if (atomChain != null) { return atomChain.getMass(); } return theoreticMass1; } /** * Returns the m/z expected for this ion at the given charge. * * @param charge the charge of interest * * @return the m/z expected for this ion */ public double getTheoreticMz(Integer charge) { double protonMass = ElementaryIon.proton.getTheoreticMass(); double mz = getTheoreticMass() + protonMass; if (charge > 1) { mz = (mz + (charge - 1) * protonMass) / charge; } return mz; } /** * Returns the atomic composition. * * @return the atomic composition */ public AtomChain getAtomicComposition() { return atomChain; } /** * Returns the atomic composition. * * @param atomChain the atomic composition */ public void setAtomicComposition(AtomChain atomChain) { this.atomChain = atomChain; } /** * Sets a new theoretic mass. * * @param theoreticMass a new theoretic mass */ public void setTheoreticMass(double theoreticMass) { this.theoreticMass1 = theoreticMass; } /** * Returns the ion type. * * @return the ion type */ public IonType getType() { return type; } /** * Returns the implemented ion types. * * @return the implemented ion types */ public static ArrayList<IonType> getImplementedIonTypes() { ArrayList<IonType> result = new ArrayList<>(); result.add(IonType.ELEMENTARY_ION); result.add(IonType.GLYCAN); result.add(IonType.IMMONIUM_ION); result.add(IonType.PEPTIDE_FRAGMENT_ION); result.add(IonType.TAG_FRAGMENT_ION); result.add(IonType.PRECURSOR_ION); result.add(IonType.REPORTER_ION); result.add(IonType.RELATED_ION); return result; } /** * Returns the type of ion as string. * * @return the type of ion as string */ public String getTypeAsString() { return getTypeAsString(type); } /** * Returns the type of ion as string. * * @param type the type of ion as string * @return the type of ion as string */ public static String getTypeAsString(IonType type) { switch (type) { case PEPTIDE_FRAGMENT_ION: return "Peptide fragment ion"; case TAG_FRAGMENT_ION: return "Tag fragment ion"; case PRECURSOR_ION: return "Precursor ion"; case IMMONIUM_ION: return "Immonium ion"; case REPORTER_ION: return "Reporter ion"; case GLYCAN: return "Glycan"; case ELEMENTARY_ION: return "Elementary ion"; case RELATED_ION: return "Related ion"; case UNKNOWN: return "Unknown ion type"; default: throw new UnsupportedOperationException("No name for ion type " + type + "."); } } /** * Convenience method returning a generic ion based on the given ion type. * * @param ionType the ion type * @param subType the ion subtype * @param neutralLosses the neutral losses. Null list if none. * @return a generic ion */ public static Ion getGenericIon(IonType ionType, int subType, NeutralLoss[] neutralLosses) { switch (ionType) { case ELEMENTARY_ION: return new ElementaryIon("new ElementaryIon", 0.0, subType); case GLYCAN: return new Glycan("new Glycan", "new Glycan"); case IMMONIUM_ION: return ImmoniumIon.getImmoniumIon(subType); case PEPTIDE_FRAGMENT_ION: return new PeptideFragmentIon(subType, neutralLosses); case TAG_FRAGMENT_ION: return new TagFragmentIon(subType, neutralLosses); case PRECURSOR_ION: return new PrecursorIon(neutralLosses); case REPORTER_ION: return ReporterIon.getReporterIon(subType); case RELATED_ION: return new RelatedIon(AminoAcid.A, AtomChain.getAtomChain("H"), -1, false); default: throw new UnsupportedOperationException("No generic constructor for " + getTypeAsString(ionType) + "."); } } /** * Convenience method returning a generic ion based on the given ion type * without neutral losses. * * @param ionType the ion type * @param subType the ion subtype * @return a generic ion */ public static Ion getGenericIon(IonType ionType, int subType) { switch (ionType) { case ELEMENTARY_ION: return new ElementaryIon("new ElementaryIon", 0.0, subType); case GLYCAN: return new Glycan("new Glycon", "new Glycon"); case IMMONIUM_ION: return ImmoniumIon.getImmoniumIon(subType); case PEPTIDE_FRAGMENT_ION: return new PeptideFragmentIon(subType); case TAG_FRAGMENT_ION: return new TagFragmentIon(subType); case PRECURSOR_ION: return new PrecursorIon(); case REPORTER_ION: return ReporterIon.getReporterIon(subType); case RELATED_ION: return new RelatedIon(AminoAcid.A, AtomChain.getAtomChain("H"), -1, false); default: throw new UnsupportedOperationException("No generic constructor for " + getTypeAsString(ionType) + "."); } } }
package nxt; import nxt.util.Logger; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; final class DbVersion { static void init() { try (Connection con = Db.getConnection(); Statement stmt = con.createStatement()) { int nextUpdate = 1; try { ResultSet rs = stmt.executeQuery("SELECT next_update FROM version"); if (! rs.next()) { throw new RuntimeException("Invalid version table"); } nextUpdate = rs.getInt("next_update"); if (! rs.isLast()) { throw new RuntimeException("Invalid version table"); } rs.close(); Logger.logMessage("Database update may take a while if needed, current db version " + (nextUpdate - 1) + "..."); } catch (SQLException e) { Logger.logMessage("Initializing an empty database"); stmt.executeUpdate("CREATE TABLE version (next_update INT NOT NULL)"); stmt.executeUpdate("INSERT INTO version VALUES (1)"); con.commit(); } update(nextUpdate); } catch (SQLException e) { throw new RuntimeException(e.toString(), e); } } private static void apply(String sql) { try (Connection con = Db.getConnection(); Statement stmt = con.createStatement()) { try { if (sql != null) { Logger.logDebugMessage("Will apply sql:\n" + sql); stmt.executeUpdate(sql); } stmt.executeUpdate("UPDATE version SET next_update = next_update + 1"); con.commit(); } catch (Exception e) { con.rollback(); throw e; } } catch (SQLException e) { throw new RuntimeException("Database error executing " + sql, e); } } private static void update(int nextUpdate) { switch (nextUpdate) { case 1: apply("CREATE TABLE IF NOT EXISTS block (db_id INT IDENTITY, id BIGINT NOT NULL, version INT NOT NULL, " + "timestamp INT NOT NULL, previous_block_id BIGINT, " + "FOREIGN KEY (previous_block_id) REFERENCES block (id) ON DELETE CASCADE, total_amount INT NOT NULL, " + "total_fee INT NOT NULL, payload_length INT NOT NULL, generator_public_key BINARY(32) NOT NULL, " + "previous_block_hash BINARY(32), cumulative_difficulty VARBINARY NOT NULL, base_target BIGINT NOT NULL, " + "next_block_id BIGINT, FOREIGN KEY (next_block_id) REFERENCES block (id) ON DELETE SET NULL, " + "index INT NOT NULL, height INT NOT NULL, generation_signature BINARY(64) NOT NULL, " + "block_signature BINARY(64) NOT NULL, payload_hash BINARY(32) NOT NULL, generator_account_id BIGINT NOT NULL)"); case 2: apply("CREATE UNIQUE INDEX IF NOT EXISTS block_id_idx ON block (id)"); case 3: apply("CREATE TABLE IF NOT EXISTS transaction (db_id INT IDENTITY, id BIGINT NOT NULL, " + "deadline SMALLINT NOT NULL, sender_public_key BINARY(32) NOT NULL, recipient_id BIGINT NOT NULL, " + "amount INT NOT NULL, fee INT NOT NULL, referenced_transaction_id BIGINT, index INT NOT NULL, " + "height INT NOT NULL, block_id BIGINT NOT NULL, FOREIGN KEY (block_id) REFERENCES block (id) ON DELETE CASCADE, " + "signature BINARY(64) NOT NULL, timestamp INT NOT NULL, type TINYINT NOT NULL, subtype TINYINT NOT NULL, " + "sender_account_id BIGINT NOT NULL, attachment OTHER)"); case 4: apply("CREATE UNIQUE INDEX IF NOT EXISTS transaction_id_idx ON transaction (id)"); case 5: apply("CREATE UNIQUE INDEX IF NOT EXISTS block_height_idx ON block (height)"); case 6: apply("CREATE INDEX IF NOT EXISTS transaction_timestamp_idx ON transaction (timestamp)"); case 7: apply("CREATE INDEX IF NOT EXISTS block_generator_account_id_idx ON block (generator_account_id)"); case 8: apply("CREATE INDEX IF NOT EXISTS transaction_sender_account_id_idx ON transaction (sender_account_id)"); case 9: apply("CREATE INDEX IF NOT EXISTS transaction_recipient_id_idx ON transaction (recipient_id)"); case 10: apply("ALTER TABLE block ALTER COLUMN generator_account_id RENAME TO generator_id"); case 11: apply("ALTER TABLE transaction ALTER COLUMN sender_account_id RENAME TO sender_id"); case 12: apply("ALTER INDEX block_generator_account_id_idx RENAME TO block_generator_id_idx"); case 13: apply("ALTER INDEX transaction_sender_account_id_idx RENAME TO transaction_sender_id_idx"); case 14: apply("ALTER TABLE block DROP COLUMN IF EXISTS index"); case 15: apply("ALTER TABLE transaction DROP COLUMN IF EXISTS index"); case 16: apply("ALTER TABLE transaction ADD COLUMN IF NOT EXISTS block_timestamp INT"); case 17: apply(null); case 18: apply("ALTER TABLE transaction ALTER COLUMN block_timestamp SET NOT NULL"); case 19: apply("ALTER TABLE transaction ADD COLUMN IF NOT EXISTS hash BINARY(32)"); case 20: apply(null); case 21: apply(null); case 22: apply("CREATE INDEX IF NOT EXISTS transaction_hash_idx ON transaction (hash)"); case 23: apply(null); case 24: apply("ALTER TABLE block ALTER COLUMN total_amount BIGINT"); case 25: apply("ALTER TABLE block ALTER COLUMN total_fee BIGINT"); case 26: apply("ALTER TABLE transaction ALTER COLUMN amount BIGINT"); case 27: apply("ALTER TABLE transaction ALTER COLUMN fee BIGINT"); case 28: apply(null); case 29: apply(null); case 30: apply(null); case 31: apply(null); case 32: apply(null); case 33: apply(null); case 34: apply(null); case 35: apply(null); case 36: apply("CREATE TABLE IF NOT EXISTS peer (address VARCHAR PRIMARY KEY)"); case 37: if (!Constants.isTestnet) { apply("INSERT INTO peer (address) VALUES " + "('178.194.110.193'), ('nrs01.nxtsolaris.info'), ('xeqtorcreed2.vps.nxtcrypto.org'), ('5.101.101.137'), " + "('54.76.203.25'), ('ns1.anameserver.de'), ('cryptkeeper.vps.nxtcrypto.org'), ('vps11.nxtcrypto.org'), " + "('80.137.236.53'), ('wallet.nxtty.com'), ('2.84.130.26'), ('91.121.223.107'), ('80.137.229.25'), " + "('enricoip.no-ip.biz'), ('195.154.127.172'), ('69.64.35.62'), ('88.168.85.129:7874'), ('105.229.160.133'), " + "('rigel1.ddns.net'), ('59.36.74.47'), ('n2.nxtportal.org'), ('samson.vps.nxtcrypto.org'), " + "('nrs02.nxtsolaris.info'), ('miasik.no-ip.org'), ('vh44.ddns.net:7873'), ('212.18.225.173'), " + "('91.121.41.192'), ('serras.homenet.org'), ('217.17.88.5'), ('77.179.100.57'), ('89.98.191.95'), " + "('nxt1107.no-ip.biz'), ('mycrypto.no-ip.biz'), ('89.250.240.63'), ('vps4.nxtcrypto.org'), " + "('89.72.57.246'), ('bitsy10.vps.nxtcrypto.org'), ('85.191.52.188'), ('gayka.no-ip.info'), " + "('77.179.99.25'), ('106.186.127.189'), ('23.238.198.218'), ('www.mycoinmine.org'), ('162.201.61.133'), " + "('54.191.200.44'), ('54.186.166.78'), ('212.129.12.103'), ('node0.forgenxt.com'), ('188.226.179.119'), " + "('lyynx.vps.nxtcrypto.org'), ('nxt.phukhew.com'), ('162.242.16.147'), ('pakisnxt.no-ip.org'), " + "('85.214.200.59'), ('101.164.96.109'), ('nxt.alkeron.com'), ('83.212.102.244'), ('23.88.229.194'), " + "('162.243.213.190'), ('87.139.122.157'), ('nxt1.webice.ru'), ('37.59.41.216'), ('46.149.84.141'), " + "('87.138.143.21'), ('151.236.29.228'), ('99.244.142.34'), ('nxt10.webice.ru'), ('cobaltskky.hopto.org'), " + "('83.212.103.18'), ('nxt9.webice.ru'), ('89.70.254.145'), ('190.10.9.166'), ('95.85.46.177'), " + "('dreschel2.dyndns.org'), ('113.77.223.63'), ('50.98.11.195'), ('209.126.70.159'), ('178.24.158.31'), " + "('54.210.102.135'), ('83.212.102.193'), ('195.154.174.124'), ('162.243.243.32'), ('87.148.12.130'), " + "('83.69.2.13'), ('cryonet.de'), ('79.24.191.97'), ('nxt.homer.ru'), ('nxtpi.zapto.org'), " + "('nxs1.hanza.co.id'), ('23.102.0.45'), ('2.86.61.231'), ('87.230.14.1'), ('105.224.252.123'), " + "('88.163.78.131'), ('50.43.35.122'), ('80.137.233.81'), ('24.149.8.238'), ('91.34.227.212'), " + "('217.186.178.66'), ('178.198.145.191'), ('73.36.141.199'), ('192.3.157.232'), ('2.225.88.10'), " + "('74.192.195.151'), ('108.61.57.76'), ('109.230.224.65'), ('94.26.187.66'), ('124.244.49.12'), " + "('88.12.55.125'), ('180.129.0.77'), ('162.243.145.83'), ('93.171.209.103'), ('87.139.122.48'), " + "('89.250.240.60'), ('83.212.102.234'), ('112.199.191.219'), ('vps10.nxtcrypto.org'), ('85.10.201.15'), " + "('179.43.128.136'), ('85.25.134.59'), ('80.86.92.70'), ('178.162.39.12'), ('46.194.145.144'), " + "('bitsy09.vps.nxtcrypto.org'), ('147.32.246.247'), ('74.91.124.3'), ('95.68.87.206'), " + "('115.28.220.183'), ('91.34.239.93'), ('121.40.84.99'), ('168.63.232.16'), ('105.227.3.50'), " + "('211.149.213.86'), ('nxtcoint119a.no-ip.org'), ('186.220.71.26'), ('bitsy05.vps.nxtcrypto.org'), " + "('80.137.229.62'), ('162.243.198.24'), ('61.131.37.210'), ('n1.nxtportal.org'), ('nxtx.ru'), " + "('201.209.45.121'), ('5.35.119.103'), ('105.229.173.29'), ('114.215.142.34:15011'), ('caelum.no-ip.org'), " + "('46.109.166.244'), ('89.250.240.56'), ('77.179.96.66'), ('90.184.9.47'), ('188.226.206.41'), " + "('nxtnode.noip.me'), ('bitsy07.vps.nxtcrypto.org'), ('abctc.vps.nxtcrypto.org'), " + "('bitsy01.vps.nxtcrypto.org'), ('107.170.189.27'), ('109.74.203.187:7874'), ('188.35.156.10'), " + "('cubie-solar.mjke.de:7873'), ('46.173.9.98'), ('xyzzyx.vps.nxtcrypto.org'), ('188.226.197.131'), " + "('jefdiesel.vps.nxtcrypto.org'), ('89.250.243.166'), ('46.194.14.81'), ('109.254.63.44'), " + "('80.86.92.139'), ('91.121.41.45'), ('nxt01.now.im'), ('54.179.177.81'), ('83.212.124.193'), " + "('bitsy03.vps.nxtcrypto.org'), ('xeqtorcreed.vps.nxtcrypto.org'), ('bitsy08.vps.nxtcrypto.org'), " + "('178.62.50.75'), ('212.83.145.17'), ('107.170.164.129'), ('67.212.71.172'), ('oldnbold.vps.nxtcrypto.org'), " + "('54.72.17.26'), ('24.224.68.29'), ('107.170.35.110'), ('nxt7.webice.ru'), ('88.79.173.189'), " + "('83.212.102.194'), ('113.10.136.142'), ('54.187.11.72'), ('139.228.37.156'), ('105.224.252.84'), " + "('bitsy02.vps.nxtcrypto.org'), ('199.217.119.33'), ('silvanoip.dhcp.biz'), ('84.242.91.139'), " + "('80.153.101.190'), ('198.199.81.29'), ('54.86.132.52'), ('77.58.253.73'), ('213.46.57.77'), " + "('54.84.4.195'), ('105.229.177.132'), ('217.26.24.27'), ('raspnxt.hopto.org'), ('188.138.88.154'), " + "('113.78.101.129'), ('nxt2.webice.ru'), ('vps5.nxtcrypto.org'), ('80.86.92.66'), ('107.170.3.62'), " + "('85.214.222.82'), ('94.74.170.10'), ('24.230.136.187'), ('99.47.218.132'), ('nxt.hofhom.nl'), " + "('nxt.sx'), ('188.167.90.118'), ('77.103.104.254'), ('allbits.vps.nxtcrypto.org'), ('24.161.110.115'), " + "('90.146.62.91'), ('91.69.121.229'), ('131.151.103.114'), ('82.146.36.253'), ('162.243.80.209'), " + "('89.250.243.200'), ('83.167.48.253'), ('54.88.54.58'), ('105.224.252.58'), ('nxt6.webice.ru'), " + "('178.15.99.67'), ('54.85.132.143'), ('89.250.243.167'), ('85.214.199.215'), ('82.46.194.21'), " + "('83.212.102.247'), ('bitsy06.vps.nxtcrypto.org'), ('nxs2.hanza.co.id'), ('23.238.198.144'), " + "('screenname.vps.nxtcrypto.org'), ('67.212.71.171'), ('54.191.19.147'), ('24.91.143.15'), ('83.212.103.90'), " + "('83.212.97.126'), ('77.249.237.229'), ('67.212.71.173'), ('37.120.168.131'), ('nxt4.webice.ru'), " + "('184.57.30.220'), ('95.24.87.207'), ('162.243.38.34'), ('14.200.16.219'), ('80.137.243.161'), " + "('113.78.102.157'), ('59.37.188.95'), ('nxt8.webice.ru'), ('nxtnode.hopto.org'), ('113.77.25.179'), " + "('178.33.203.157'), ('91.120.22.146'), ('178.150.207.53'), ('77.179.117.226'), ('69.141.139.8'), " + "('vh44.ddns.net'), ('83.212.103.212'), ('95.85.24.151'), ('5.39.76.123'), ('209.126.70.170'), " + "('cubie-solar.mjke.de'), ('106.187.95.232'), ('185.12.44.108'), ('vps9.nxtcrypto.org'), ('nxt5.webice.ru'), " + "('bitsy04.vps.nxtcrypto.org'), ('nxt3.webice.ru'), ('69.122.140.198'), ('54.210.102.134'), " + "('46.109.165.4'), ('panzetti.vps.nxtcrypto.org'), ('80.137.230.115')"); } else { apply("INSERT INTO peer (address) VALUES " + "('178.150.207.53'), ('192.241.223.132'), ('node9.mynxtcoin.org'), ('node10.mynxtcoin.org'), " + "('node3.mynxtcoin.org'), ('109.87.169.253'), ('nxtnet.fr'), ('50.112.241.97'), " + "('2.84.142.149'), ('bug.airdns.org'), ('83.212.103.14'), ('62.210.131.30'), ('104.131.254.22'), " + "('46.28.111.249')"); } case 38: apply("ALTER TABLE transaction ADD COLUMN IF NOT EXISTS full_hash BINARY(32)"); case 39: apply("ALTER TABLE transaction ADD COLUMN IF NOT EXISTS referenced_transaction_full_hash BINARY(32)"); case 40: apply(null); case 41: apply("ALTER TABLE transaction ALTER COLUMN full_hash SET NOT NULL"); case 42: apply("CREATE UNIQUE INDEX IF NOT EXISTS transaction_full_hash_idx ON transaction (full_hash)"); case 43: apply(null); case 44: apply(null); case 45: apply(null); case 46: apply("ALTER TABLE transaction ADD COLUMN IF NOT EXISTS attachment_bytes VARBINARY"); case 47: BlockDb.deleteAll(); apply(null); case 48: apply("ALTER TABLE transaction DROP COLUMN attachment"); case 49: apply("UPDATE transaction a SET a.referenced_transaction_full_hash = " + "(SELECT full_hash FROM transaction b WHERE b.id = a.referenced_transaction_id) " + "WHERE a.referenced_transaction_full_hash IS NULL"); case 50: apply("ALTER TABLE transaction DROP COLUMN referenced_transaction_id"); case 51: apply("ALTER TABLE transaction DROP COLUMN hash"); case 52: if (Constants.isTestnet) { BlockchainProcessorImpl.getInstance().validateAtNextScan(); } apply(null); case 53: apply("DROP INDEX transaction_recipient_id_idx"); case 54: apply("ALTER TABLE transaction ALTER COLUMN recipient_id SET NULL"); case 55: try (Connection con = Db.getConnection(); Statement stmt = con.createStatement(); PreparedStatement pstmt = con.prepareStatement("UPDATE transaction SET recipient_id = null WHERE type = ? AND subtype = ?")) { try { for (byte type = 0; type <= 4; type++) { for (byte subtype = 0; subtype <= 8; subtype++) { TransactionType transactionType = TransactionType.findTransactionType(type, subtype); if (transactionType == null) { continue; } if (!transactionType.hasRecipient()) { pstmt.setByte(1, type); pstmt.setByte(2, subtype); pstmt.executeUpdate(); } } } stmt.executeUpdate("UPDATE version SET next_update = next_update + 1"); con.commit(); } catch (SQLException e) { con.rollback(); throw e; } } catch (SQLException e) { throw new RuntimeException(e); } case 56: apply("CREATE INDEX IF NOT EXISTS transaction_recipient_id_idx ON transaction (recipient_id)"); case 57: apply("DROP INDEX transaction_timestamp_idx"); case 58: apply("CREATE INDEX IF NOT EXISTS transaction_timestamp_idx ON transaction (timestamp DESC)"); case 59: apply("ALTER TABLE transaction ADD COLUMN IF NOT EXISTS version TINYINT"); case 60: apply("UPDATE transaction SET version = 0"); case 61: apply("ALTER TABLE transaction ALTER COLUMN version SET NOT NULL"); case 62: apply("ALTER TABLE transaction ADD COLUMN IF NOT EXISTS has_message BOOLEAN NOT NULL DEFAULT FALSE"); case 63: apply("ALTER TABLE transaction ADD COLUMN IF NOT EXISTS has_encrypted_message BOOLEAN NOT NULL DEFAULT FALSE"); case 64: apply("UPDATE transaction SET has_message = TRUE WHERE type = 1 AND subtype = 0"); case 65: apply("ALTER TABLE transaction ADD COLUMN IF NOT EXISTS has_public_key_announcement BOOLEAN NOT NULL DEFAULT FALSE"); case 66: apply("ALTER TABLE transaction ADD COLUMN IF NOT EXISTS ec_block_height INT DEFAULT NULL"); case 67: apply("ALTER TABLE transaction ADD COLUMN IF NOT EXISTS ec_block_id BIGINT DEFAULT NULL"); case 68: apply("ALTER TABLE transaction ADD COLUMN IF NOT EXISTS has_encrypttoself_message BOOLEAN NOT NULL DEFAULT FALSE"); case 69: return; default: throw new RuntimeException("Database inconsistent with code, probably trying to run older code on newer database"); } } private DbVersion() {} //never }
/** * Copyright (C) 2013 - present by OpenGamma Inc. and the OpenGamma group of companies * * Please see distribution for license. */ package com.opengamma.analytics.financial.interestrate.payments.provider; import static org.testng.AssertJUnit.assertEquals; import org.testng.annotations.Test; import org.threeten.bp.Period; import org.threeten.bp.ZonedDateTime; import com.opengamma.analytics.financial.instrument.index.GeneratorSwapFixedON; import com.opengamma.analytics.financial.instrument.index.GeneratorSwapFixedONMaster; import com.opengamma.analytics.financial.instrument.index.IborIndex; import com.opengamma.analytics.financial.instrument.index.IndexIborMaster; import com.opengamma.analytics.financial.instrument.index.IndexON; import com.opengamma.analytics.financial.instrument.payment.CouponONArithmeticAverageDefinition; import com.opengamma.analytics.financial.instrument.payment.CouponONArithmeticAverageSpreadDefinition; import com.opengamma.analytics.financial.instrument.payment.CouponONArithmeticAverageSpreadSimplifiedDefinition; import com.opengamma.analytics.financial.interestrate.payments.derivative.CouponONArithmeticAverage; import com.opengamma.analytics.financial.interestrate.payments.derivative.CouponONArithmeticAverageSpread; import com.opengamma.analytics.financial.interestrate.payments.derivative.CouponONArithmeticAverageSpreadSimplified; import com.opengamma.analytics.financial.interestrate.payments.derivative.PaymentFixed; import com.opengamma.analytics.financial.model.interestrate.curve.YieldAndDiscountCurve; import com.opengamma.analytics.financial.model.interestrate.curve.YieldCurve; import com.opengamma.analytics.financial.provider.calculator.discounting.PresentValueCurveSensitivityDiscounting2Calculator; import com.opengamma.analytics.financial.provider.calculator.discounting.PresentValueCurveSensitivityDiscountingCalculator; import com.opengamma.analytics.financial.provider.calculator.discounting.PresentValueDiscountingCalculator; import com.opengamma.analytics.financial.provider.description.MulticurveProviderDiscountDataSets; import com.opengamma.analytics.financial.provider.description.interestrate.MulticurveProviderDiscount; import com.opengamma.analytics.financial.provider.description.interestrate.ParameterProviderInterface; import com.opengamma.analytics.financial.provider.sensitivity.multicurve.MultipleCurrencyMulticurveSensitivity; import com.opengamma.analytics.financial.provider.sensitivity.multicurve.MultipleCurrencyParameterSensitivity; import com.opengamma.analytics.financial.provider.sensitivity.multicurve.ParameterSensitivityMulticurveDiscountInterpolatedFDCalculator; import com.opengamma.analytics.financial.provider.sensitivity.parameter.ParameterSensitivityParameterCalculator; import com.opengamma.analytics.financial.schedule.ScheduleCalculator; import com.opengamma.analytics.financial.util.AssertSensitivityObjects; import com.opengamma.analytics.math.curve.ConstantDoublesCurve; import com.opengamma.financial.convention.calendar.Calendar; import com.opengamma.timeseries.precise.zdt.ImmutableZonedDateTimeDoubleTimeSeries; import com.opengamma.timeseries.precise.zdt.ZonedDateTimeDoubleTimeSeries; import com.opengamma.util.money.Currency; import com.opengamma.util.money.MultipleCurrencyAmount; import com.opengamma.util.test.TestGroup; import com.opengamma.util.time.DateUtils; /** * Methods related to different ON Arithmetic Average coupons (standard, spread, simplified). * Pricing methods are full forward and approximated. */ @Test(groups = TestGroup.UNIT) public class CouponONArithmeticAverageDiscountingMethodTest { private static final MulticurveProviderDiscount MULTICURVES = MulticurveProviderDiscountDataSets.createMulticurveEurUsd(); private static final IndexON FEDFUND = MulticurveProviderDiscountDataSets.getIndexesON()[0]; private static final Currency USD = FEDFUND.getCurrency(); private static final Calendar NYC = MulticurveProviderDiscountDataSets.getUSDCalendar(); private static final GeneratorSwapFixedON GENERATOR_SWAP_EONIA = GeneratorSwapFixedONMaster.getInstance().getGenerator("USD1YFEDFUND", NYC); private static final IborIndex USDLIBOR3M = IndexIborMaster.getInstance().getIndex("USDLIBOR3M"); private static final ZonedDateTime REFERENCE_DATE = DateUtils.getUTCDate(2011, 5, 23); private static final Period TENOR_3M = Period.ofMonths(3); private static final Period TENOR_1Y = Period.ofYears(1); private static final double NOTIONAL = 100000000; // 100m private static final double SPREAD = 0.0010; // 10 bps private static final ZonedDateTime FORWARD_DATE = ScheduleCalculator.getAdjustedDate(REFERENCE_DATE, TENOR_1Y, USDLIBOR3M, NYC); /** Time series **/ private static final ZonedDateTimeDoubleTimeSeries TS_ON = ImmutableZonedDateTimeDoubleTimeSeries.ofUTC( new ZonedDateTime[] {DateUtils.getUTCDate(2011, 5, 23), DateUtils.getUTCDate(2011, 5, 24), DateUtils.getUTCDate(2011, 5, 25), DateUtils.getUTCDate(2011, 5, 26), DateUtils.getUTCDate(2011, 5, 27), DateUtils.getUTCDate(2011, 5, 30), DateUtils.getUTCDate(2011, 5, 31), DateUtils.getUTCDate(2011, 6, 1), DateUtils.getUTCDate(2011, 6, 2), DateUtils.getUTCDate(2011, 6, 3), DateUtils.getUTCDate(2011, 6, 6)}, new double[] {0.0500, 0.0100, 0.0100, 0.0100, 0.0100, 0.0100, 0.0200, 0.0200, 0.0200, 0.0200, 0.0200}); /** Coupon ON AA */ private static final CouponONArithmeticAverageDefinition FEDFUND_CPN_3M_DEF = CouponONArithmeticAverageDefinition.from(FEDFUND, REFERENCE_DATE, TENOR_3M, NOTIONAL, 0, USDLIBOR3M.getBusinessDayConvention(), true, NYC); private static final CouponONArithmeticAverage FEDFUND_CPN_3M = FEDFUND_CPN_3M_DEF.toDerivative(REFERENCE_DATE); private static final CouponONArithmeticAverage FEDFUND_CPN_3M_ACCRUED = (CouponONArithmeticAverage) FEDFUND_CPN_3M_DEF.toDerivative(DateUtils.getUTCDate(2011, 6, 7), TS_ON); private static final CouponONArithmeticAverageDefinition FEDFUND_CPN_3MFWD_DEF = CouponONArithmeticAverageDefinition.from(FEDFUND, FORWARD_DATE, TENOR_3M, NOTIONAL, 0, USDLIBOR3M.getBusinessDayConvention(), true, NYC); private static final CouponONArithmeticAverage FEDFUND_CPN_3MFWD = FEDFUND_CPN_3MFWD_DEF.toDerivative(REFERENCE_DATE); private static final CouponONArithmeticAverageDefinition FEDFUND_CPN_1Y_DEF = CouponONArithmeticAverageDefinition.from(FEDFUND, REFERENCE_DATE, TENOR_1Y, NOTIONAL, 0, USDLIBOR3M.getBusinessDayConvention(), true, NYC); private static final CouponONArithmeticAverage FEDFUND_CPN_1Y = FEDFUND_CPN_1Y_DEF.toDerivative(REFERENCE_DATE); /** Coupon ON AA - spread */ private static final CouponONArithmeticAverageSpreadDefinition FEDFUND_CPN_SPREAD_3M_DEF = CouponONArithmeticAverageSpreadDefinition.from(FEDFUND, REFERENCE_DATE, TENOR_3M, NOTIONAL, 0, USDLIBOR3M.getBusinessDayConvention(), true, SPREAD, NYC); // private static final CouponONArithmeticAverageSpread FEDFUND_CPN_SPREAD_3M = FEDFUND_CPN_SPREAD_3M_DEF.toDerivative(REFERENCE_DATE); private static final CouponONArithmeticAverageSpread FEDFUND_CPN_SPREAD_3M_ACCRUED = (CouponONArithmeticAverageSpread) FEDFUND_CPN_SPREAD_3M_DEF.toDerivative(DateUtils.getUTCDate(2011, 6, 7), TS_ON); private static final CouponONArithmeticAverageSpreadDefinition FEDFUND_CPN_SPREAD_3MFWD_DEF = CouponONArithmeticAverageSpreadDefinition.from(FEDFUND, FORWARD_DATE, TENOR_3M, NOTIONAL, 0, USDLIBOR3M.getBusinessDayConvention(), true, SPREAD, NYC); private static final CouponONArithmeticAverageSpread FEDFUND_CPN_SPREAD_3MFWD = FEDFUND_CPN_SPREAD_3MFWD_DEF.toDerivative(REFERENCE_DATE); private static final CouponONArithmeticAverageSpreadDefinition FEDFUND_CPN_SPREAD0_3MFWD_DEF = CouponONArithmeticAverageSpreadDefinition.from(FEDFUND, FORWARD_DATE, TENOR_3M, NOTIONAL, 0, USDLIBOR3M.getBusinessDayConvention(), true, 0.0d, NYC); private static final CouponONArithmeticAverageSpread FEDFUND_CPN_SPREAD0_3MFWD = FEDFUND_CPN_SPREAD0_3MFWD_DEF.toDerivative(REFERENCE_DATE); /** Coupon ON AA - spread simplified */ private static final CouponONArithmeticAverageSpreadSimplifiedDefinition FEDFUND_CPN_3M_SIMPL0_DEFINITION = CouponONArithmeticAverageSpreadSimplifiedDefinition.from(FEDFUND, FORWARD_DATE, FORWARD_DATE.plusMonths(3), NOTIONAL, 0, 0.0, NYC); private static final CouponONArithmeticAverageSpreadSimplified FEDFUND_CPN_3M_SIMPL0 = FEDFUND_CPN_3M_SIMPL0_DEFINITION.toDerivative(REFERENCE_DATE); private static final CouponONArithmeticAverageSpreadSimplifiedDefinition FEDFUND_CPN_3M_SIMPL_DEFINITION = CouponONArithmeticAverageSpreadSimplifiedDefinition.from(FEDFUND, FORWARD_DATE, FORWARD_DATE.plusMonths(3), NOTIONAL, 0, SPREAD, NYC); private static final CouponONArithmeticAverageSpreadSimplified FEDFUND_CPN_3M_SIMPL = FEDFUND_CPN_3M_SIMPL_DEFINITION.toDerivative(REFERENCE_DATE); private static final CouponONArithmeticAverageDiscountingMethod METHOD_FF_EXACT = CouponONArithmeticAverageDiscountingMethod.getInstance(); private static final CouponONArithmeticAverageDiscountingApproxMethod METHOD_FF_APPRO = CouponONArithmeticAverageDiscountingApproxMethod.getInstance(); private static final CouponONArithmeticAverageSpreadDiscountingMethod METHOD_FF_EXACT_SPREAD = CouponONArithmeticAverageSpreadDiscountingMethod.getInstance(); private static final CouponONArithmeticAverageSpreadDiscountingApproxMethod METHOD_FF_APPRO_SPREAD = CouponONArithmeticAverageSpreadDiscountingApproxMethod.getInstance(); private static final CouponONArithmeticAverageSpreadSimplifiedDiscountingApproxMethod METHOD_AA_SIMPL = CouponONArithmeticAverageSpreadSimplifiedDiscountingApproxMethod.getInstance(); private static final PresentValueDiscountingCalculator PVDC = PresentValueDiscountingCalculator.getInstance(); private static final PresentValueCurveSensitivityDiscountingCalculator PVCSDC = PresentValueCurveSensitivityDiscountingCalculator.getInstance(); private static final PresentValueCurveSensitivityDiscounting2Calculator PVCSD2C = PresentValueCurveSensitivityDiscounting2Calculator.getInstance(); private static final ParameterSensitivityParameterCalculator<ParameterProviderInterface> PSC = new ParameterSensitivityParameterCalculator<>(PVCSDC); private static final ParameterSensitivityParameterCalculator<ParameterProviderInterface> PSC2 = new ParameterSensitivityParameterCalculator<>(PVCSD2C); private static final double SHIFT = 1.0E-6; private static final ParameterSensitivityMulticurveDiscountInterpolatedFDCalculator PSC_DSC_FD = new ParameterSensitivityMulticurveDiscountInterpolatedFDCalculator(PVDC, SHIFT); private static final double TOLERANCE_PV = 1.0E-2; private static final double TOLERANCE_PV_DELTA = 1.0E+2; private static final double TOLERANCE_REL = 1.0E-6; // 0.01 bp private static final double TOLERANCE_REL_DELTA = 1.0E-3; private static final double TOLERANCE_REL_DELTA_2 = 1.0E-4; @Test public void presentValueExactVsApprox() { final MultipleCurrencyAmount pv3MExact = METHOD_FF_EXACT.presentValue(FEDFUND_CPN_3M, MULTICURVES); final MultipleCurrencyAmount pv3MAppro = METHOD_FF_APPRO.presentValue(FEDFUND_CPN_3M, MULTICURVES); assertEquals("CouponArithmeticAverageONDiscountingMethod: present value", pv3MExact.getAmount(USD), pv3MAppro.getAmount(USD), TOLERANCE_REL * NOTIONAL * FEDFUND_CPN_3M.getFixingPeriodRemainingAccrualFactor()); final MultipleCurrencyAmount pv1YExact = METHOD_FF_EXACT.presentValue(FEDFUND_CPN_1Y, MULTICURVES); final MultipleCurrencyAmount pv1YAppro = METHOD_FF_APPRO.presentValue(FEDFUND_CPN_1Y, MULTICURVES); assertEquals("CouponArithmeticAverageONDiscountingMethod: present value", pv1YExact.getAmount(USD), pv1YAppro.getAmount(USD), TOLERANCE_REL * NOTIONAL * FEDFUND_CPN_1Y.getFixingPeriodRemainingAccrualFactor()); final MultipleCurrencyAmount pv3MFwdExact = METHOD_FF_EXACT.presentValue(FEDFUND_CPN_3MFWD, MULTICURVES); final MultipleCurrencyAmount pv3MFwdAppro = METHOD_FF_APPRO.presentValue(FEDFUND_CPN_3MFWD, MULTICURVES); assertEquals("CouponArithmeticAverageONDiscountingMethod: present value", pv3MFwdExact.getAmount(USD), pv3MFwdAppro.getAmount(USD), TOLERANCE_REL * NOTIONAL * FEDFUND_CPN_3MFWD.getFixingPeriodRemainingAccrualFactor()); final MultipleCurrencyAmount pv3MFwdSpread0Exact = METHOD_FF_EXACT_SPREAD.presentValue(FEDFUND_CPN_SPREAD0_3MFWD, MULTICURVES); assertEquals("CouponArithmeticAverageONDiscountingMethod: present value", pv3MFwdSpread0Exact.getAmount(USD), pv3MFwdExact.getAmount(USD), TOLERANCE_PV); final MultipleCurrencyAmount pv3MFwdSpread0Appro = METHOD_FF_APPRO_SPREAD.presentValue(FEDFUND_CPN_SPREAD0_3MFWD, MULTICURVES); assertEquals("CouponArithmeticAverageONDiscountingMethod: present value", pv3MFwdSpread0Appro.getAmount(USD), pv3MFwdAppro.getAmount(USD), TOLERANCE_PV); } @Test public void presentValueWithAccruedExactVsApprox() { final MultipleCurrencyAmount pv3MExact = METHOD_FF_EXACT.presentValue(FEDFUND_CPN_3M_ACCRUED, MULTICURVES); final MultipleCurrencyAmount pv3MAppro = METHOD_FF_APPRO.presentValue(FEDFUND_CPN_3M_ACCRUED, MULTICURVES); assertEquals("CouponArithmeticAverageONDiscountingMethod: present value", pv3MExact.getAmount(USD), pv3MAppro.getAmount(USD), TOLERANCE_REL * NOTIONAL * FEDFUND_CPN_3M.getFixingPeriodRemainingAccrualFactor()); } @Test public void presentValueWithAccruedWithSpreadExactVsApprox() { final MultipleCurrencyAmount pv3MExact = METHOD_FF_EXACT_SPREAD.presentValue(FEDFUND_CPN_SPREAD_3M_ACCRUED, MULTICURVES); final MultipleCurrencyAmount pv3MAppro = METHOD_FF_APPRO_SPREAD.presentValue(FEDFUND_CPN_SPREAD_3M_ACCRUED, MULTICURVES); assertEquals("CouponArithmeticAverageONDiscountingMethod: present value", pv3MExact.getAmount(USD), pv3MAppro.getAmount(USD), TOLERANCE_REL * NOTIONAL * FEDFUND_CPN_3M.getFixingPeriodRemainingAccrualFactor()); } @Test public void presentValueSpreadExactVsApprox() { final MultipleCurrencyAmount pv3MExact = METHOD_FF_EXACT_SPREAD.presentValue(FEDFUND_CPN_SPREAD_3MFWD, MULTICURVES); final MultipleCurrencyAmount pv3MAppro = METHOD_FF_APPRO_SPREAD.presentValue(FEDFUND_CPN_SPREAD_3MFWD, MULTICURVES); assertEquals("CouponArithmeticAverageONDiscountingMethod: present value", pv3MExact.getAmount(USD), pv3MAppro.getAmount(USD), TOLERANCE_REL * NOTIONAL * FEDFUND_CPN_3M.getFixingPeriodRemainingAccrualFactor()); } @Test public void presentValueFullVsSimplified() { final MultipleCurrencyAmount pv3MFull = METHOD_FF_APPRO.presentValue(FEDFUND_CPN_3MFWD, MULTICURVES); final MultipleCurrencyAmount pv3MSimp0 = METHOD_AA_SIMPL.presentValue(FEDFUND_CPN_3M_SIMPL0, MULTICURVES); assertEquals("CouponONArithmeticAverageSpreadSimpleDiscountingMethod: present value", pv3MFull.getAmount(USD), pv3MSimp0.getAmount(USD), TOLERANCE_PV); final MultipleCurrencyAmount pv3MSimp = METHOD_AA_SIMPL.presentValue(FEDFUND_CPN_3M_SIMPL, MULTICURVES); final PaymentFixed spreadPayment = new PaymentFixed(USD, FEDFUND_CPN_3M_SIMPL.getPaymentTime(), FEDFUND_CPN_3M_SIMPL.getSpreadAmount()); final MultipleCurrencyAmount pvSpread = spreadPayment.accept(PVDC, MULTICURVES); assertEquals("CouponONArithmeticAverageSpreadSimpleDiscountingMethod: present value", pv3MSimp0.plus(pvSpread).getAmount(USD), pv3MSimp.getAmount(USD), TOLERANCE_PV); } @Test public void presentValueApproxMethodVsCalculator() { final MultipleCurrencyAmount pv3MMethod = METHOD_FF_APPRO.presentValue(FEDFUND_CPN_3M, MULTICURVES); final MultipleCurrencyAmount pv3MCalc = FEDFUND_CPN_3M.accept(PVDC, MULTICURVES); assertEquals("CouponArithmeticAverageONDiscountingMethod: present value", pv3MMethod.getAmount(USD), pv3MCalc.getAmount(USD), TOLERANCE_PV); final MultipleCurrencyAmount pv3MMethodSpSi = METHOD_AA_SIMPL.presentValue(FEDFUND_CPN_3M_SIMPL, MULTICURVES); final MultipleCurrencyAmount pv3MCalcSpSi = FEDFUND_CPN_3M_SIMPL.accept(PVDC, MULTICURVES); assertEquals("CouponArithmeticAverageONDiscountingMethod: present value", pv3MMethodSpSi.getAmount(USD), pv3MCalcSpSi.getAmount(USD), TOLERANCE_PV); final MultipleCurrencyAmount pv3MSpreadMethod = METHOD_FF_APPRO_SPREAD.presentValue(FEDFUND_CPN_SPREAD_3MFWD, MULTICURVES); final MultipleCurrencyAmount pv3MSpreadCalc = FEDFUND_CPN_SPREAD_3MFWD.accept(PVDC, MULTICURVES); assertEquals("CouponArithmeticAverageONDiscountingMethod: present value", pv3MSpreadMethod.getAmount(USD), pv3MSpreadCalc.getAmount(USD), TOLERANCE_PV); } @Test public void presentValueCurveSensitivityApprox() { final MultipleCurrencyParameterSensitivity pvpsApprox = PSC.calculateSensitivity(FEDFUND_CPN_3MFWD, MULTICURVES, MULTICURVES.getAllNames()); final MultipleCurrencyParameterSensitivity pvpsFD = PSC_DSC_FD.calculateSensitivity(FEDFUND_CPN_3MFWD, MULTICURVES); AssertSensitivityObjects.assertEquals("CashDiscountingProviderMethod: presentValueCurveSensitivity ", pvpsApprox, pvpsFD, TOLERANCE_PV_DELTA); } @Test public void presentValueCurveSensitivitySpreadExactVsApprox() { final MultipleCurrencyParameterSensitivity pvcs3MExact = PSC2.calculateSensitivity(FEDFUND_CPN_SPREAD_3MFWD, MULTICURVES); final MultipleCurrencyParameterSensitivity pvcs3MAppro = PSC.calculateSensitivity(FEDFUND_CPN_SPREAD_3MFWD, MULTICURVES);; AssertSensitivityObjects.assertEquals("CouponArithmeticAverageONDiscountingMethod: present value curve sensitivity", pvcs3MExact, pvcs3MAppro, TOLERANCE_REL_DELTA_2 * NOTIONAL); } @Test public void presentValueCurveSensitivitySimplifiedSpread() { final MultipleCurrencyParameterSensitivity pvpsFwd = PSC.calculateSensitivity(FEDFUND_CPN_3MFWD, MULTICURVES, MULTICURVES.getAllNames()); final PaymentFixed spreadPayment = new PaymentFixed(USD, FEDFUND_CPN_3M_SIMPL.getPaymentTime(), FEDFUND_CPN_3M_SIMPL.getSpreadAmount()); final MultipleCurrencyParameterSensitivity pvpsSpread = PSC.calculateSensitivity(spreadPayment, MULTICURVES, MULTICURVES.getAllNames()); final MultipleCurrencyParameterSensitivity pvpsExpected = pvpsFwd.plus(pvpsSpread); final MultipleCurrencyParameterSensitivity pvpsSpreadSimpl = PSC.calculateSensitivity(FEDFUND_CPN_3M_SIMPL, MULTICURVES, MULTICURVES.getAllNames()); AssertSensitivityObjects.assertEquals("CashDiscountingProviderMethod: presentValueCurveSensitivity ", pvpsExpected, pvpsSpreadSimpl, TOLERANCE_PV_DELTA); } @Test public void presentValueCurveSensitivityApproxMethodVsCalculator() { final MultipleCurrencyMulticurveSensitivity pvcs3MMethod = METHOD_FF_APPRO.presentValueCurveSensitivity(FEDFUND_CPN_3M, MULTICURVES); final MultipleCurrencyMulticurveSensitivity pvcs3MCalc = FEDFUND_CPN_3M.accept(PVCSDC, MULTICURVES); AssertSensitivityObjects.assertEquals("CouponArithmeticAverageONDiscountingMethod: present value", pvcs3MMethod, pvcs3MCalc, TOLERANCE_PV_DELTA); } @Test public void presentValueCurveSensitivityExactVsApprox() { final MultipleCurrencyParameterSensitivity pvpsAppro = PSC.calculateSensitivity(FEDFUND_CPN_3MFWD, MULTICURVES, MULTICURVES.getAllNames()); final MultipleCurrencyParameterSensitivity pvpsExact = PSC2.calculateSensitivity(FEDFUND_CPN_3MFWD, MULTICURVES, MULTICURVES.getAllNames()); AssertSensitivityObjects.assertEquals("CouponArithmeticAverageONDiscountingMethod: present value curve sensitivity", pvpsAppro, pvpsExact, TOLERANCE_REL_DELTA * NOTIONAL * FEDFUND_CPN_3MFWD.getFixingPeriodRemainingAccrualFactor()); } @Test(enabled = false) /** * Compare the performance of the approximated method to the exact method. */ public void performance() { long startTime, endTime; final int nbTest = 10000; @SuppressWarnings("unused") MultipleCurrencyAmount pvExact = MultipleCurrencyAmount.of(USD, 0.0); @SuppressWarnings("unused") MultipleCurrencyAmount pvAppro = MultipleCurrencyAmount.of(USD, 0.0); @SuppressWarnings("unused") MultipleCurrencyMulticurveSensitivity pvcsAppro; @SuppressWarnings("unused") MultipleCurrencyMulticurveSensitivity pvcsExact; startTime = System.currentTimeMillis(); for (int looptest = 0; looptest < nbTest; looptest++) { final CouponONArithmeticAverageDefinition ffDefinition = CouponONArithmeticAverageDefinition.from(FEDFUND, ScheduleCalculator.getAdjustedDate(REFERENCE_DATE, TENOR_1Y, USDLIBOR3M, NYC), TENOR_3M, NOTIONAL, 0, USDLIBOR3M.getBusinessDayConvention(), true, NYC); final CouponONArithmeticAverage ff = ffDefinition.toDerivative(REFERENCE_DATE); pvExact = METHOD_FF_EXACT.presentValue(ff, MULTICURVES); // pvExact = METHOD_FF_EXACT.presentValue(FEDFUND_CPN_3MFWD, MULTICURVES); } endTime = System.currentTimeMillis(); System.out.println("CouponArithmeticAverageONDiscountingMethod: " + nbTest + " pv Arithmetic Average ON - Exact: " + (endTime - startTime) + " ms"); // Performance note: AA ON exact pv: 26-Mar-2013: On Mac Pro 3.2 GHz Quad-Core Intel Xeon: 200 ms for 10000 coupons. // Performance note: AA ON exact constr. + pv: 26-Mar-2013: On Mac Pro 3.2 GHz Quad-Core Intel Xeon: 460 ms for 10000 coupons. startTime = System.currentTimeMillis(); for (int looptest = 0; looptest < nbTest; looptest++) { final CouponONArithmeticAverageDefinition ffDefinition = CouponONArithmeticAverageDefinition.from(FEDFUND, ScheduleCalculator.getAdjustedDate(REFERENCE_DATE, TENOR_1Y, USDLIBOR3M, NYC), TENOR_3M, NOTIONAL, 0, USDLIBOR3M.getBusinessDayConvention(), true, NYC); final CouponONArithmeticAverage ff = ffDefinition.toDerivative(REFERENCE_DATE); pvAppro = METHOD_FF_APPRO.presentValue(ff, MULTICURVES); // pvAppro = METHOD_FF_APPRO.presentValue(FEDFUND_CPN_3MFWD, MULTICURVES); } endTime = System.currentTimeMillis(); System.out.println("CouponArithmeticAverageONDiscountingMethod: " + nbTest + " pv Arithmetic Average ON - Approximation: " + (endTime - startTime) + " ms"); // Performance note: AA ON approx pv: 26-Mar-2013: On Mac Pro 3.2 GHz Quad-Core Intel Xeon: 5 ms for 10000 coupons. // Performance note: AA ON approx constr. + pv: 26-Mar-2013: On Mac Pro 3.2 GHz Quad-Core Intel Xeon: 250 ms for 10000 coupons. startTime = System.currentTimeMillis(); for (int looptest = 0; looptest < nbTest; looptest++) { pvcsExact = METHOD_FF_EXACT.presentValueCurveSensitivity(FEDFUND_CPN_3MFWD, MULTICURVES); } endTime = System.currentTimeMillis(); System.out.println("CouponArithmeticAverageONDiscountingMethod: " + nbTest + " pvcs Arithmetic Average ON - Exact: " + (endTime - startTime) + " ms"); // Performance note: AA ON exact pvcs: 26-Mar-2013: On Mac Pro 3.2 GHz Quad-Core Intel Xeon: 220 ms for 10000 coupons. startTime = System.currentTimeMillis(); for (int looptest = 0; looptest < nbTest; looptest++) { pvcsAppro = METHOD_FF_APPRO.presentValueCurveSensitivity(FEDFUND_CPN_3MFWD, MULTICURVES); } endTime = System.currentTimeMillis(); System.out.println("CouponArithmeticAverageONDiscountingMethod: " + nbTest + " pvcs Arithmetic Average ON - Approximation: " + (endTime - startTime) + " ms"); // Performance note: AA ON approx pvcs: 26-Mar-2013: On Mac Pro 3.2 GHz Quad-Core Intel Xeon: 12 ms for 10000 coupons. } @Test(enabled = false) /** * Reports the error of the arithmetic average approximation by the log of the compounded rate. */ public void averageApproximation() { final MulticurveProviderDiscount multicurvesCst = new MulticurveProviderDiscount(); YieldAndDiscountCurve curveCst = YieldCurve.from(ConstantDoublesCurve.from(0.0, "CST")); multicurvesCst.setCurve(FEDFUND, curveCst); final double[] rateLevel = {0.01, 0.05, 0.10 }; final int nbLevel = rateLevel.length; final int nbStart = 36; final Period step = Period.ofMonths(1); final ZonedDateTime[] effectiveDate = new ZonedDateTime[nbStart]; effectiveDate[0] = ScheduleCalculator.getAdjustedDate(REFERENCE_DATE, GENERATOR_SWAP_EONIA.getSpotLag(), NYC); final double[][] payComp = new double[nbLevel][nbStart]; final double[][] payAA = new double[nbLevel][nbStart]; final double[][] payAAApprox = new double[nbLevel][nbStart]; final double[][] rateComp = new double[nbLevel][nbStart]; final double[][] rateAA = new double[nbLevel][nbStart]; final double[][] rateAAApprox = new double[nbLevel][nbStart]; for (int looplevel = 0; looplevel < nbLevel; looplevel++) { curveCst = YieldCurve.from(ConstantDoublesCurve.from(rateLevel[looplevel], "CST")); multicurvesCst.replaceCurve(FEDFUND, curveCst); for (int loopstart = 0; loopstart < nbStart; loopstart++) { effectiveDate[loopstart] = ScheduleCalculator.getAdjustedDate(effectiveDate[0], step.multipliedBy(loopstart), USDLIBOR3M, NYC); final ZonedDateTime endDate = ScheduleCalculator.getAdjustedDate(effectiveDate[loopstart], TENOR_3M, USDLIBOR3M, NYC); final CouponONArithmeticAverageDefinition cpnONDefinition = CouponONArithmeticAverageDefinition.from(FEDFUND, effectiveDate[loopstart], endDate, NOTIONAL, 0, NYC); final CouponONArithmeticAverage cpnON = cpnONDefinition.toDerivative(REFERENCE_DATE); // Compute daily forwards final int nbON = cpnON.getFixingPeriodAccrualFactors().length; final double fwdON[] = new double[nbON]; for (int loopon = 0; loopon < nbON; loopon++) { fwdON[loopon] = multicurvesCst.getSimplyCompoundForwardRate(FEDFUND, cpnON.getFixingPeriodStartTimes()[loopon], cpnON.getFixingPeriodEndTimes()[loopon], cpnON.getFixingPeriodAccrualFactors()[loopon]); } // Compounded period forward payComp[looplevel][loopstart] = multicurvesCst.getSimplyCompoundForwardRate(FEDFUND, cpnON.getFixingPeriodStartTimes()[0], cpnON.getFixingPeriodStartTimes()[nbON], cpnON.getFixingPeriodRemainingAccrualFactor()) * cpnON.getFixingPeriodRemainingAccrualFactor(); payAA[looplevel][loopstart] = 0; for (int loopon = 0; loopon < nbON; loopon++) { payAA[looplevel][loopstart] += fwdON[loopon] * cpnON.getFixingPeriodAccrualFactors()[loopon]; } payAAApprox[looplevel][loopstart] = Math.log(1 + payComp[looplevel][loopstart]); rateComp[looplevel][loopstart] = payComp[looplevel][loopstart] / cpnON.getFixingPeriodRemainingAccrualFactor(); rateAA[looplevel][loopstart] = payAA[looplevel][loopstart] / cpnON.getFixingPeriodRemainingAccrualFactor(); rateAAApprox[looplevel][loopstart] = payAAApprox[looplevel][loopstart] / cpnON.getFixingPeriodRemainingAccrualFactor(); } } // int t = 0; // t++; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.commons.dbcp2; import java.sql.Connection; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.SQLWarning; import java.sql.Statement; /** * A dummy {@link Statement}, for testing purposes. * * @author Rodney Waldhoff * @author Dirk Verbeeck * @version $Id: TesterStatement.java 1649424 2015-01-04 21:15:33Z tn $ */ public class TesterStatement implements Statement { public TesterStatement(Connection conn) { _connection = conn; } public TesterStatement(Connection conn, int resultSetType, int resultSetConcurrency) { _connection = conn; _resultSetType = resultSetType; _resultSetConcurrency = resultSetConcurrency; } protected Connection _connection = null; protected boolean _open = true; protected int _rowsUpdated = 1; protected boolean _executeResponse = true; protected int _maxFieldSize = 1024; protected int _maxRows = 1024; protected boolean _escapeProcessing = false; protected int _queryTimeout = 1000; protected String _cursorName = null; protected int _fetchDirection = 1; protected int _fetchSize = 1; protected int _resultSetConcurrency = 1; protected int _resultSetType = 1; protected ResultSet _resultSet = null; @Override public ResultSet executeQuery(String sql) throws SQLException { checkOpen(); if("null".equals(sql)) { return null; } if("invalid".equals(sql)) { throw new SQLException("invalid query"); } if ("broken".equals(sql)) { throw new SQLException("broken connection"); } if("select username".equals(sql)) { String username = ((TesterConnection) _connection).getUsername(); Object[][] data = {{username}}; return new TesterResultSet(this, data); } else { // Simulate timeout if queryTimout is set to less than 5 seconds if (_queryTimeout > 0 && _queryTimeout < 5) { throw new SQLException("query timeout"); } return new TesterResultSet(this); } } @Override public int executeUpdate(String sql) throws SQLException { checkOpen(); return _rowsUpdated; } @Override public void close() throws SQLException { // calling close twice has no effect if (!_open) { return; } _open = false; if (_resultSet != null) { _resultSet.close(); _resultSet = null; } } @Override public int getMaxFieldSize() throws SQLException { checkOpen(); return _maxFieldSize; } @Override public void setMaxFieldSize(int max) throws SQLException { checkOpen(); _maxFieldSize = max; } @Override public int getMaxRows() throws SQLException { checkOpen(); return _maxRows; } @Override public void setMaxRows(int max) throws SQLException { checkOpen(); _maxRows = max; } @Override public void setEscapeProcessing(boolean enable) throws SQLException { checkOpen(); _escapeProcessing = enable; } @Override public int getQueryTimeout() throws SQLException { checkOpen(); return _queryTimeout; } @Override public void setQueryTimeout(int seconds) throws SQLException { checkOpen(); _queryTimeout = seconds; } @Override public void cancel() throws SQLException { checkOpen(); } @Override public SQLWarning getWarnings() throws SQLException { checkOpen(); return null; } @Override public void clearWarnings() throws SQLException { checkOpen(); } @Override public void setCursorName(String name) throws SQLException { checkOpen(); _cursorName = name; } @Override public boolean execute(String sql) throws SQLException { checkOpen(); if("invalid".equals(sql)) { throw new SQLException("invalid query"); } return _executeResponse; } @Override public ResultSet getResultSet() throws SQLException { checkOpen(); if (_resultSet == null) { _resultSet = new TesterResultSet(this); } return _resultSet; } @Override public int getUpdateCount() throws SQLException { checkOpen(); return _rowsUpdated; } @Override public boolean getMoreResults() throws SQLException { checkOpen(); return false; } @Override public void setFetchDirection(int direction) throws SQLException { checkOpen(); _fetchDirection = direction; } @Override public int getFetchDirection() throws SQLException { checkOpen(); return _fetchDirection; } @Override public void setFetchSize(int rows) throws SQLException { checkOpen(); _fetchSize = rows; } @Override public int getFetchSize() throws SQLException { checkOpen(); return _fetchSize; } @Override public int getResultSetConcurrency() throws SQLException { checkOpen(); return _resultSetConcurrency; } @Override public int getResultSetType() throws SQLException { checkOpen(); return _resultSetType; } @Override public void addBatch(String sql) throws SQLException { checkOpen(); } @Override public void clearBatch() throws SQLException { checkOpen(); } @Override public int[] executeBatch() throws SQLException { checkOpen(); return new int[0]; } @Override public Connection getConnection() throws SQLException { checkOpen(); return _connection; } protected void checkOpen() throws SQLException { if(!_open) { throw new SQLException("Connection is closed."); } } @Override public boolean getMoreResults(int current) throws SQLException { throw new SQLException("Not implemented."); } @Override public ResultSet getGeneratedKeys() throws SQLException { return new TesterResultSet(this); } @Override public int executeUpdate(String sql, int autoGeneratedKeys) throws SQLException { throw new SQLException("Not implemented."); } @Override public int executeUpdate(String sql, int columnIndexes[]) throws SQLException { throw new SQLException("Not implemented."); } @Override public int executeUpdate(String sql, String columnNames[]) throws SQLException { throw new SQLException("Not implemented."); } @Override public boolean execute(String sql, int autoGeneratedKeys) throws SQLException { throw new SQLException("Not implemented."); } @Override public boolean execute(String sql, int columnIndexes[]) throws SQLException { throw new SQLException("Not implemented."); } @Override public boolean execute(String sql, String columnNames[]) throws SQLException { throw new SQLException("Not implemented."); } @Override public int getResultSetHoldability() throws SQLException { checkOpen(); throw new SQLException("Not implemented."); } @Override public boolean isWrapperFor(Class<?> iface) throws SQLException { throw new SQLException("Not implemented."); } @Override public <T> T unwrap(Class<T> iface) throws SQLException { throw new SQLException("Not implemented."); } @Override public boolean isClosed() throws SQLException { return !_open; } @Override public void setPoolable(boolean poolable) throws SQLException { throw new SQLException("Not implemented."); } @Override public boolean isPoolable() throws SQLException { throw new SQLException("Not implemented."); } @Override public void closeOnCompletion() throws SQLException { throw new SQLException("Not implemented."); } @Override public boolean isCloseOnCompletion() throws SQLException { throw new SQLException("Not implemented."); } }
/* * Copyright Camunda Services GmbH and/or licensed to Camunda Services GmbH * under one or more contributor license agreements. See the NOTICE file * distributed with this work for additional information regarding copyright * ownership. Camunda licenses this file to you under the Apache License, * Version 2.0; you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.camunda.bpm.container.impl.jboss.config; import org.camunda.bpm.container.impl.metadata.spi.ProcessEnginePluginXml; import org.camunda.bpm.engine.ProcessEngineException; import java.util.HashMap; import java.util.List; import java.util.Map; /** * @author Daniel Meyer * @author Thorben Lindhauer */ public class ManagedProcessEngineMetadata { /** indicates whether the process engine should automatically create / * update the database schema upon startup */ public static String PROP_IS_AUTO_SCHEMA_UPDATE = "isAutoSchemaUpdate"; /** indicates whether the identity module is used and if this tables are * required */ public static String PROP_IS_IDENTITY_USED = "isIdentityUsed"; /** indicates whether the job executor should be automatically activated */ public static String PROP_IS_ACTIVATE_JOB_EXECUTOR = "isActivateJobExecutor"; /** the prefix to be used for all process engine database tables */ public static String PROP_DB_TABLE_PREFIX = "dbTablePrefix"; /** the name of the platform job executor acquisition to use */ public static String PROP_JOB_EXECUTOR_ACQUISITION_NAME = "jobExecutorAcquisitionName"; private boolean isDefault; private String engineName; private String datasourceJndiName; private String historyLevel; protected String configuration; private Map<String, String> configurationProperties; private Map<String, String> foxLegacyProperties; private List<ProcessEnginePluginXml> pluginConfigurations; /** * @param isDefault * @param engineName * @param datasourceJndiName * @param historyLevel * @param configuration * @param properties * @param pluginConfigurations */ public ManagedProcessEngineMetadata(boolean isDefault, String engineName, String datasourceJndiName, String historyLevel, String configuration, Map<String, String> properties, List<ProcessEnginePluginXml> pluginConfigurations) { this.isDefault = isDefault; this.engineName = engineName; this.datasourceJndiName = datasourceJndiName; this.historyLevel = historyLevel; this.configuration = configuration; this.configurationProperties = selectProperties(properties, false); this.foxLegacyProperties = selectProperties(properties, true); this.pluginConfigurations = pluginConfigurations; } public boolean isDefault() { return isDefault; } public void setDefault(boolean isDefault) { this.isDefault = isDefault; } public String getEngineName() { return engineName; } public void setEngineName(String engineName) { this.engineName = engineName; } public String getDatasourceJndiName() { return datasourceJndiName; } public void setDatasourceJndiName(String datasourceJndiName) { this.datasourceJndiName = datasourceJndiName; } public String getHistoryLevel() { return historyLevel; } public void setHistoryLevel(String historyLevel) { this.historyLevel = historyLevel; } public String getConfiguration() { return configuration; } public void setConfiguration(String configuration) { this.configuration = configuration; } public Map<String, String> getConfigurationProperties() { return configurationProperties; } public void setConfigurationProperties(Map<String, String> properties) { this.configurationProperties = properties; } public Map<String, String> getFoxLegacyProperties() { return foxLegacyProperties; } public void setFoxLegacyProperties(Map<String, String> foxLegacyProperties) { this.foxLegacyProperties = foxLegacyProperties; } public List<ProcessEnginePluginXml> getPluginConfigurations() { return pluginConfigurations; } public void setPluginConfigurations(List<ProcessEnginePluginXml> pluginConfigurations) { this.pluginConfigurations = pluginConfigurations; } public boolean isIdentityUsed() { Object object = getFoxLegacyProperties().get(PROP_IS_IDENTITY_USED); if(object == null) { return true; } else { return Boolean.parseBoolean((String) object); } } public boolean isAutoSchemaUpdate() { Object object = getFoxLegacyProperties().get(PROP_IS_AUTO_SCHEMA_UPDATE); if(object == null) { return true; } else { return Boolean.parseBoolean((String) object); } } public boolean isActivateJobExecutor() { Object object = getFoxLegacyProperties().get(PROP_IS_ACTIVATE_JOB_EXECUTOR); if(object == null) { return true; } else { return Boolean.parseBoolean((String) object); } } public String getDbTablePrefix() { Object object = getFoxLegacyProperties().get(PROP_DB_TABLE_PREFIX); if(object == null) { return null; } else { return (String) object; } } public String getJobExecutorAcquisitionName() { Object object = getFoxLegacyProperties().get(PROP_JOB_EXECUTOR_ACQUISITION_NAME); if(object == null) { return "default"; } else { return (String) object; } } /** * validates the configuration and throws {@link ProcessEngineException} * if the configuration is invalid. */ public void validate() { StringBuilder validationErrorBuilder = new StringBuilder("Process engine configuration is invalid: \n"); boolean isValid = true; if(datasourceJndiName == null || datasourceJndiName.isEmpty()) { isValid = false; validationErrorBuilder.append(" property 'datasource' cannot be null \n"); } if(engineName == null || engineName.isEmpty()) { isValid = false; validationErrorBuilder.append(" property 'engineName' cannot be null \n"); } for (int i = 0; i < pluginConfigurations.size(); i++) { ProcessEnginePluginXml pluginConfiguration = pluginConfigurations.get(i); if (pluginConfiguration.getPluginClass() == null || pluginConfiguration.getPluginClass().isEmpty()) { isValid = false; validationErrorBuilder.append(" property 'class' in plugin[" + i + "] cannot be null \n"); } } if(!isValid) { throw new ProcessEngineException(validationErrorBuilder.toString()); } } private Map<String, String> selectProperties(Map<String, String> allProperties, boolean selectFoxProperties) { Map<String, String> result = null; if (selectFoxProperties) { result = new HashMap<String, String>(); String isAutoSchemaUpdate = allProperties.get(PROP_IS_AUTO_SCHEMA_UPDATE); String isActivateJobExecutor = allProperties.get(PROP_IS_ACTIVATE_JOB_EXECUTOR); String isIdentityUsed = allProperties.get(PROP_IS_IDENTITY_USED); String dbTablePrefix = allProperties.get(PROP_DB_TABLE_PREFIX); String jobExecutorAcquisitionName = allProperties.get(PROP_JOB_EXECUTOR_ACQUISITION_NAME); if (isAutoSchemaUpdate != null) { result.put(PROP_IS_AUTO_SCHEMA_UPDATE, isAutoSchemaUpdate); } if (isActivateJobExecutor != null) { result.put(PROP_IS_ACTIVATE_JOB_EXECUTOR, isActivateJobExecutor); } if (isIdentityUsed != null) { result.put(PROP_IS_IDENTITY_USED, isIdentityUsed); } if (dbTablePrefix != null) { result.put(PROP_DB_TABLE_PREFIX, dbTablePrefix); } if (jobExecutorAcquisitionName != null) { result.put(PROP_JOB_EXECUTOR_ACQUISITION_NAME, jobExecutorAcquisitionName); } } else { result = new HashMap<String, String>(allProperties); result.remove(PROP_IS_AUTO_SCHEMA_UPDATE); result.remove(PROP_IS_ACTIVATE_JOB_EXECUTOR); result.remove(PROP_IS_IDENTITY_USED); result.remove(PROP_DB_TABLE_PREFIX); result.remove(PROP_JOB_EXECUTOR_ACQUISITION_NAME); } return result; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package org.apache.ivy; import org.apache.ivy.core.retrieve.RetrieveOptions; import org.apache.ivy.util.CacheCleaner; import org.apache.ivy.util.cli.CommandLine; import org.apache.ivy.util.cli.ParseException; import org.junit.After; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import org.junit.rules.TemporaryFolder; import java.io.File; import java.net.URL; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.util.Arrays; import java.util.HashSet; import java.util.Set; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; public class MainTest { private File cache; @Rule public ExpectedException expExc = ExpectedException.none(); @Rule public TemporaryFolder tempDir = new TemporaryFolder(); @Before public void setUp() { cache = new File("build/cache"); System.setProperty("ivy.cache.dir", cache.getAbsolutePath()); } @After public void tearDown() { CacheCleaner.deleteDir(cache); } @Test public void testHelp() throws Exception { run(new String[] {"-?"}); } @Test public void testBadOption() throws Exception { expExc.expect(ParseException.class); expExc.expectMessage("Unrecognized option: -bad"); run(new String[] {"-bad"}); } @Test public void testMissingParameter() throws Exception { expExc.expect(ParseException.class); expExc.expectMessage("no argument for: ivy"); run(new String[] {"-ivy"}); } @Test public void testResolveSimple() throws Exception { run(new String[] {"-settings", "test/repositories/ivysettings.xml", "-ivy", "test/repositories/1/org1/mod1.1/ivys/ivy-1.0.xml"}); assertTrue(new File("build/cache/org1/mod1.2/ivy-2.0.xml").exists()); } @Test public void testResolveSimpleWithConfs() throws Exception { run(new String[] {"-settings", "test/repositories/ivysettings.xml", "-ivy", "test/repositories/1/org1/mod1.1/ivys/ivy-1.0.xml", "-confs", "default"}); assertTrue(new File("build/cache/org1/mod1.2/ivy-2.0.xml").exists()); } @Test public void testResolveSimpleWithConfs2() throws Exception { run(new String[] {"-settings", "test/repositories/ivysettings.xml", "-confs", "default", "-ivy", "test/repositories/1/org1/mod1.1/ivys/ivy-1.0.xml"}); assertTrue(new File("build/cache/org1/mod1.2/ivy-2.0.xml").exists()); } @Test public void testExtraParams1() throws Exception { String[] params = new String[] {"-settings", "test/repositories/ivysettings.xml", "-confs", "default", "-ivy", "test/repositories/1/org1/mod1.1/ivys/ivy-1.0.xml", "foo1", "foo2"}; CommandLine line = Main.getParser().parse(params); String[] leftOver = line.getLeftOverArgs(); assertNotNull(leftOver); assertEquals(2, leftOver.length); assertEquals("foo1", leftOver[0]); assertEquals("foo2", leftOver[1]); } @Test public void testExtraParams2() throws Exception { String[] params = new String[] {"-settings", "test/repositories/ivysettings.xml", "-confs", "default", "-ivy", "test/repositories/1/org1/mod1.1/ivys/ivy-1.0.xml", "--", "foo1", "foo2"}; CommandLine line = Main.getParser().parse(params); String[] leftOver = line.getLeftOverArgs(); assertNotNull(leftOver); assertEquals(2, leftOver.length); assertEquals("foo1", leftOver[0]); assertEquals("foo2", leftOver[1]); } @Test public void testExtraParams3() throws Exception { String[] params = new String[] {"-settings", "test/repositories/ivysettings.xml", "-confs", "default", "-ivy", "test/repositories/1/org1/mod1.1/ivys/ivy-1.0.xml"}; CommandLine line = Main.getParser().parse(params); String[] leftOver = line.getLeftOverArgs(); assertNotNull(leftOver); assertEquals(0, leftOver.length); } /** * Test case for IVY-1355. * {@code types} argument to the command line must be parsed correctly when it's passed * more than one value for the argument. * * @throws Exception if something goes wrong * @see <a href="https://issues.apache.org/jira/browse/IVY-1355">IVY-1355</a> */ @Test public void testTypes() throws Exception { final String[] params = new String[]{"-settings", "test/repositories/ivysettings.xml", "-retrieve", "build/test/main/retrieve/[module]/[conf]/[artifact]-[revision].[ext]", "-types", "jar", "source"}; final CommandLine parsedCommand = Main.getParser().parse(params); final String[] parsedTypes = parsedCommand.getOptionValues("types"); assertNotNull("Values for types argument is missing", parsedTypes); assertEquals("Unexpected number of values parsed for types argument", 2, parsedTypes.length); final Set<String> uniqueParsedTypes = new HashSet<>(Arrays.asList(parsedTypes)); assertTrue("jar type is missing from the parsed types argument", uniqueParsedTypes.contains("jar")); assertTrue("jar type is missing from the parsed types argument", uniqueParsedTypes.contains("source")); } /** * Tests that the {@code overwriteMode} passed for the retrieve command works as expected * * @throws Exception if something goes wrong */ @Test public void testRetrieveOverwriteMode() throws Exception { final String[] args = new String[]{"-settings", "test/repositories/ivysettings.xml", "-retrieve", "build/test/main/retrieve/overwrite-test/[artifact].[ext]", "-overwriteMode", "different", "-ivy", "test/repositories/1/org/mod1/ivys/ivy-5.0.xml"}; final CommandLine parsedCommand = Main.getParser().parse(args); final String parsedOverwriteMode = parsedCommand.getOptionValue("overwriteMode"); assertEquals("Unexpected overwriteMode parsed", RetrieveOptions.OVERWRITEMODE_DIFFERENT, parsedOverwriteMode); // create a dummy file which we expect the retrieve task to overwrite final Path retrieveArtifactPath = Paths.get("build/test/main/retrieve/overwrite-test/foo-bar.jar"); Files.createDirectories(retrieveArtifactPath.getParent()); Files.write(retrieveArtifactPath, new byte[0]); assertEquals("Unexpected content at " + retrieveArtifactPath, 0, Files.readAllBytes(retrieveArtifactPath).length); // issue the retrieve (which retrieves the org:foo-bar:2.3.4 artifact) run(args); // expect the existing jar to be overwritten assertTrue("Content at " + retrieveArtifactPath + " was not overwritten by retrieve task", Files.readAllBytes(retrieveArtifactPath).length > 0); } /** * Tests that the {@code makepom} option works as expected * * @throws Exception if something goes wrong */ @Test public void testMakePom() throws Exception { final String pomFilePath = this.tempDir.getRoot().getAbsolutePath() + File.separator + "testmakepom.xml"; final String[] args = new String[]{"-settings", "test/repositories/ivysettings.xml", "-makepom", pomFilePath, "-ivy", "test/repositories/1/org1/mod1.1/ivys/ivy-1.0.xml"}; final CommandLine parsedCommand = Main.getParser().parse(args); final String parsedMakePomPath = parsedCommand.getOptionValue("makepom"); assertEquals("Unexpected makepom parsed", pomFilePath, parsedMakePomPath); assertFalse("pom file " + pomFilePath + " already exists", new File(pomFilePath).exists()); // run the command run(args); assertTrue("pom file hasn't been generated at " + pomFilePath, new File(pomFilePath).isFile()); } /** * Tests that the ivy command can use a URL for the {@code -settings} option. See IVY-1615 */ @Test public void testSettingsURL() throws Exception { final URL settingsURL = new File("test/repositories/ivysettings.xml").toURI().toURL(); run(new String[] {"-settings", settingsURL.toString(), "-ivy", "test/repositories/1/org1/mod1.1/ivys/ivy-1.0.xml"}); assertTrue(new File("build/cache/org1/mod1.2/ivy-2.0.xml").exists()); } private void run(String[] args) throws Exception { Main.run(Main.getParser(), args); } }
/* * Copyright 2016 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.restassured.internal.print; import io.restassured.filter.log.LogDetail; import io.restassured.http.Cookie; import io.restassured.http.Cookies; import io.restassured.http.Header; import io.restassured.http.Headers; import io.restassured.internal.NoParameterValue; import io.restassured.internal.support.Prettifier; import io.restassured.specification.FilterableRequestSpecification; import io.restassured.specification.MultiPartSpecification; import io.restassured.specification.ProxySpecification; import org.apache.commons.lang3.StringUtils; import java.io.PrintStream; import java.util.List; import java.util.Map; import static io.restassured.filter.log.LogDetail.*; /** * A request printer can be used to print a request. */ public class RequestPrinter { private static final String TAB = "\t"; private static final String NEW_LINE = System.getProperty("line.separator"); private static final String EQUALS = "="; private static final String NONE = "<none>"; public static String print(FilterableRequestSpecification requestSpec, String requestMethod, String completeRequestUri, LogDetail logDetail, PrintStream stream, boolean shouldPrettyPrint) { final StringBuilder builder = new StringBuilder(); if (logDetail == ALL || logDetail == METHOD) { addSingle(builder, "Request method:", requestMethod); } if (logDetail == ALL || logDetail == URI) { addSingle(builder, "Request URI:", completeRequestUri); } if (logDetail == ALL) { addProxy(requestSpec, builder); } if (logDetail == ALL || logDetail == PARAMS) { addMapDetails(builder, "Request params:", requestSpec.getRequestParams()); addMapDetails(builder, "Query params:", requestSpec.getQueryParams()); addMapDetails(builder, "Form params:", requestSpec.getFormParams()); addMapDetails(builder, "Path params:", requestSpec.getNamedPathParams()); addMultiParts(requestSpec, builder); } if (logDetail == ALL || logDetail == HEADERS) { addHeaders(requestSpec, builder); } if (logDetail == ALL || logDetail == COOKIES) { addCookies(requestSpec, builder); } if (logDetail == ALL || logDetail == BODY) { addBody(requestSpec, builder, shouldPrettyPrint); } String logString = builder.toString(); if (logString.endsWith("\n")) { logString = StringUtils.removeEnd(logString, "\n"); } stream.println(logString); return logString; } private static void addProxy(FilterableRequestSpecification requestSpec, StringBuilder builder) { builder.append("Proxy:"); ProxySpecification proxySpec = requestSpec.getProxySpecification(); appendThreeTabs(builder); if (proxySpec == null) { builder.append(NONE); } else { builder.append(proxySpec.toString()); } builder.append(NEW_LINE); } private static void addBody(FilterableRequestSpecification requestSpec, StringBuilder builder, boolean shouldPrettyPrint) { builder.append("Body:"); if (requestSpec.getBody() != null) { final String body; if (shouldPrettyPrint) { body = new Prettifier().getPrettifiedBodyIfPossible(requestSpec); } else { body = requestSpec.getBody(); } builder.append(NEW_LINE).append(body); } else { appendTab(appendTwoTabs(builder)).append(NONE); } } private static void addCookies(FilterableRequestSpecification requestSpec, StringBuilder builder) { builder.append("Cookies:"); final Cookies cookies = requestSpec.getCookies(); if (!cookies.exist()) { appendTwoTabs(builder).append(NONE).append(NEW_LINE); } int i = 0; for (Cookie cookie : cookies) { if (i++ == 0) { appendTwoTabs(builder); } else { appendFourTabs(builder); } builder.append(cookie).append(NEW_LINE); } } private static void addHeaders(FilterableRequestSpecification requestSpec, StringBuilder builder) { builder.append("Headers:"); final Headers headers = requestSpec.getHeaders(); if (!headers.exist()) { appendTwoTabs(builder).append(NONE).append(NEW_LINE); } else { int i = 0; for (Header header : headers) { if (i++ == 0) { appendTwoTabs(builder); } else { appendFourTabs(builder); } builder.append(header).append(NEW_LINE); } } } private static void addMultiParts(FilterableRequestSpecification requestSpec, StringBuilder builder) { builder.append("Multiparts:"); final List<MultiPartSpecification> multiParts = requestSpec.getMultiPartParams(); if (multiParts.isEmpty()) { appendTwoTabs(builder).append(NONE).append(NEW_LINE); } else { int i = 0; for (MultiPartSpecification multiPart : multiParts) { if (i++ == 0) { appendTwoTabs(builder); } else { appendFourTabs(builder); } builder.append(multiPart).append(NEW_LINE); } } } private static void addSingle(StringBuilder builder, String str, String requestPath) { appendTab(builder.append(str)).append(requestPath).append(NEW_LINE); } private static void addMapDetails(StringBuilder builder, String title, Map<String, ?> map) { appendTab(builder.append(title)); if (map.isEmpty()) { builder.append(NONE).append(NEW_LINE); } else { int i = 0; for (Map.Entry<String, ?> entry : map.entrySet()) { if (i++ != 0) { appendFourTabs(builder); } final Object value = entry.getValue(); builder.append(entry.getKey()); if (!(value instanceof NoParameterValue)) { builder.append(EQUALS).append(value); } builder.append(NEW_LINE); } } } private static StringBuilder appendFourTabs(StringBuilder builder) { appendTwoTabs(appendTwoTabs(builder)); return builder; } private static StringBuilder appendTwoTabs(StringBuilder builder) { appendTab(appendTab(builder)); return builder; } private static StringBuilder appendThreeTabs(StringBuilder builder) { appendTwoTabs(appendTab(builder)); return builder; } private static StringBuilder appendTab(StringBuilder builder) { return builder.append(TAB); } }
package it.unibz.krdb.obda.owlrefplatform.core.dag; /* * #%L * ontop-reformulation-core * %% * Copyright (C) 2009 - 2014 Free University of Bozen-Bolzano * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import it.unibz.krdb.obda.model.Predicate; import it.unibz.krdb.obda.ontology.BasicClassDescription; import it.unibz.krdb.obda.ontology.Description; import it.unibz.krdb.obda.ontology.Ontology; import it.unibz.krdb.obda.ontology.OntologyFactory; import it.unibz.krdb.obda.ontology.PropertyExpression; import it.unibz.krdb.obda.ontology.SomeValuesFrom; import it.unibz.krdb.obda.ontology.SubPropertyOfAxiom; import it.unibz.krdb.obda.ontology.impl.OntologyFactoryImpl; import it.unibz.krdb.obda.ontology.SubClassOfAxiom; import java.io.Serializable; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedHashMap; import java.util.LinkedList; import java.util.Map; import java.util.Set; @Deprecated public class DAG implements Serializable { /** * */ private static final long serialVersionUID = -9208872698083322721L; private int index_counter = 1; public final static SemanticIndexRange NULL_RANGE = new SemanticIndexRange(-1, -1); public final static int NULL_INDEX = -1; public Map<Description, Description> equi_mappings = new HashMap<Description, Description>(); public final Map<Description, DAGNode> classes; public final Map<Description, DAGNode> roles; public final Map<Description, DAGNode> allnodes; private static final OntologyFactory descFactory = OntologyFactoryImpl.getInstance(); // public final static String thingStr = // "http://www.w3.org/2002/07/owl#Thing"; // public final static URI thingUri = URI.create(thingStr); // public final static Predicate thingPred = // predicateFactory.getPredicate(thingUri, 1); // public final static ClassDescription thingConcept = // descFactory.createClass(thingPred); // public final DAGNode thing = new DAGNode(thingConcept); /** * Build the DAG from the ontology * * @param ontology * ontology that contain TBox assertions for the DAG */ public DAG(Ontology ontology) { int rolenodes = (ontology.getVocabulary().getObjectProperties().size() + ontology.getVocabulary().getDataProperties().size()) * 2; int classnodes = ontology.getVocabulary().getClasses().size() + rolenodes * 2; classes = new LinkedHashMap<Description, DAGNode>(classnodes * 2); roles = new LinkedHashMap<Description, DAGNode>(rolenodes * 2); allnodes = new HashMap<Description, DAGNode>((rolenodes + classnodes) * 2); // classes.put(thingConcept, thing); for (BasicClassDescription concept : ontology.getVocabulary().getClasses()) { DAGNode node = new DAGNode(concept); // if (!concept.equals(thingConcept)) { // addParent(node, thing); classes.put(concept, node); allnodes.put(concept, node); } /* * For each role we add nodes for its inverse, its domain and its range */ Set<PropertyExpression> allroles = new HashSet<PropertyExpression>(); allroles.addAll(ontology.getVocabulary().getObjectProperties()); allroles.addAll(ontology.getVocabulary().getDataProperties()); for (PropertyExpression role : allroles) { DAGNode rolenode = new DAGNode(role); roles.put(role, rolenode); PropertyExpression roleInv = role.getInverse(); DAGNode rolenodeinv = new DAGNode(roleInv); roles.put(roleInv, rolenodeinv); SomeValuesFrom existsRole = descFactory.createPropertySomeRestriction(role); SomeValuesFrom existsRoleInv = descFactory.createPropertySomeRestriction(roleInv); DAGNode existsNode = new DAGNode(existsRole); DAGNode existsNodeInv = new DAGNode(existsRoleInv); classes.put(existsRole, existsNode); classes.put(existsRoleInv, existsNodeInv); allnodes.put(role, rolenode); allnodes.put(existsRole, existsNode); allnodes.put(existsRoleInv, existsNodeInv); allnodes.put(roleInv, rolenodeinv); // addParent(existsNode, thing); // addParent(existsNodeInv, thing); } for (SubClassOfAxiom clsIncl : ontology.getSubClassAxioms()) { BasicClassDescription parent = clsIncl.getSuper(); BasicClassDescription child = clsIncl.getSub(); addClassEdge(parent, child); } for (SubPropertyOfAxiom roleIncl : ontology.getSubPropertyAxioms()) { PropertyExpression parent = roleIncl.getSuper(); PropertyExpression child = roleIncl.getSub(); // This adds the direct edge and the inverse, e.g., R ISA S and // R- ISA S-, // R- ISA S and R ISA S- addRoleEdge(parent, child); } // clean(); } private void addParent(DAGNode child, DAGNode parent) { if (!child.getDescription().equals(parent.getDescription())) { child.getParents().add(parent); parent.getChildren().add(child); } } public DAG(Map<Description, DAGNode> classes, Map<Description, DAGNode> roles, Map<Description, Description> equiMap, Map<Description, DAGNode> allnodes) { this.classes = classes; this.roles = roles; this.equi_mappings = equiMap; this.allnodes = allnodes; } private void addClassEdge(BasicClassDescription parent, BasicClassDescription child) { DAGNode parentNode; if (classes.containsKey(parent)) { parentNode = classes.get(parent); } else { parentNode = new DAGNode(parent); classes.put(parent, parentNode); allnodes.put(parent, parentNode); } DAGNode childNode; if (classes.containsKey(child)) { childNode = classes.get(child); } else { childNode = new DAGNode(child); classes.put(child, childNode); allnodes.put(child, childNode); } addParent(childNode, parentNode); } private void addRoleEdge(PropertyExpression parent, PropertyExpression child) { addRoleEdgeSingle(parent, child); addRoleEdgeSingle(parent.getInverse(), child.getInverse()); } private void addRoleEdgeSingle(PropertyExpression parent, PropertyExpression child) { DAGNode parentNode = roles.get(parent); if (parentNode == null) { parentNode = new DAGNode(parent); roles.put(parent, parentNode); allnodes.put(parent, parentNode); } DAGNode childNode = roles.get(child); if (childNode == null) { childNode = new DAGNode(child); roles.put(child, childNode); allnodes.put(parent, parentNode); } addParent(childNode, parentNode); BasicClassDescription existsParent = descFactory.createPropertySomeRestriction(parent); BasicClassDescription existChild = descFactory.createPropertySomeRestriction(child); addClassEdge(existsParent, existChild); // addClassEdge(thingConcept, existsParent); } public void clean() { /* * First we remove all cycles in roles, not that while doing so we might * also need to colapse some nodes in the class hierarchy, i.e., those * for \exists R and \exists R- */ DAGOperations.removeCycles(roles, equi_mappings, this); DAGOperations.computeTransitiveReduct(roles); DAGOperations.removeCycles(classes, equi_mappings, this); DAGOperations.computeTransitiveReduct(classes); DAGOperations.buildAncestors(roles); DAGOperations.buildAncestors(classes); DAGOperations.buildDescendants(roles); DAGOperations.buildDescendants(classes); } public void index() { LinkedList<DAGNode> roots = new LinkedList<DAGNode>(); for (DAGNode n : classes.values()) { if (n.getParents().isEmpty()) { roots.add(n); } } for (DAGNode n : roles.values()) { if (n.getParents().isEmpty()) { roots.add(n); } } for (DAGNode node : roots) { indexNode(node); } for (DAGNode node : roots) { mergeRangeNode(node); } } private void mergeRangeNode(DAGNode node) { for (DAGNode ch : node.getChildren()) { if (ch != node) { mergeRangeNode(ch); node.getRange().addRange(ch.getRange()); } } } private void indexNode(DAGNode node) { if (node.getIndex() == NULL_INDEX) { node.setIndex(index_counter); node.setRange(new SemanticIndexRange(index_counter, index_counter)); index_counter++; } else { return; } for (DAGNode ch : node.getChildren()) { if (ch != node) { indexNode(ch); } } } @Override public String toString() { StringBuilder res = new StringBuilder(); for (DAGNode node : classes.values()) { res.append(node); res.append("\n"); } for (DAGNode node : roles.values()) { res.append(node); res.append("\n"); } return res.toString(); } @Override public boolean equals(Object other) { if (other == null) return false; if (other == this) return true; if (this.getClass() != other.getClass()) return false; DAG otherDAG = (DAG) other; return this.classes.equals(otherDAG.classes) && this.roles.equals(otherDAG.roles); } @Override public int hashCode() { int result = 17; result += 37 * result + this.classes.hashCode(); result += 37 * result + this.roles.hashCode(); return result; } public Collection<DAGNode> getClasses() { return classes.values(); } public Collection<DAGNode> getRoles() { return roles.values(); } /*** * Returns the nodes of this DAG considering the equivalence maps. * * @param conceptDescription * @return */ public DAGNode getClassNode(BasicClassDescription conceptDescription) { DAGNode rv = classes.get(conceptDescription); if (rv == null) { rv = classes.get(equi_mappings.get(conceptDescription)); } return rv; } /*** * Returns the nodes of this DAG considering the equivalence maps. * * Note, this method is NOT SAFE with respecto equivalences of inverses. If * R- is equivalent to S, then R will be removed. Asking for R will give you * the S node, however, it should not be used directly, since its S- that * should be used. This method should return NULL in such cases, and the * caller should use the equi_mappings directly to get the proper * equivalence, realize that it must get the node for S and it must be used * in an inverse way. * * @param conceptDescription * @return */ public DAGNode getRoleNode(PropertyExpression roleDescription) { DAGNode rv = roles.get(roleDescription); if (rv == null) { rv = roles.get(equi_mappings.get(roleDescription)); } return rv; } /*** * Returns the node associated to this description. It doesnt take into * account equivalences. * * @param description * @return */ public DAGNode getNode(Description description) { DAGNode n = allnodes.get(description); if (n == null) return allnodes.get(equi_mappings.get(description)); return n; } }
/* * Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.glacier.model; import java.io.Serializable; import javax.annotation.Generated; /** * <p> * Contains the Amazon S3 Glacier response to your request. * </p> * <p> * For information about the underlying REST API, see <a * href="https://docs.aws.amazon.com/amazonglacier/latest/dev/api-archive-post.html">Upload Archive</a>. For conceptual * information, see <a href="https://docs.aws.amazon.com/amazonglacier/latest/dev/working-with-archives.html">Working * with Archives in Amazon S3 Glacier</a>. * </p> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class CompleteMultipartUploadResult extends com.amazonaws.AmazonWebServiceResult<com.amazonaws.ResponseMetadata> implements Serializable, Cloneable { /** * <p> * The relative URI path of the newly added archive resource. * </p> */ private String location; /** * <p> * The checksum of the archive computed by Amazon S3 Glacier. * </p> */ private String checksum; /** * <p> * The ID of the archive. This value is also included as part of the location. * </p> */ private String archiveId; /** * <p> * The relative URI path of the newly added archive resource. * </p> * * @param location * The relative URI path of the newly added archive resource. */ public void setLocation(String location) { this.location = location; } /** * <p> * The relative URI path of the newly added archive resource. * </p> * * @return The relative URI path of the newly added archive resource. */ public String getLocation() { return this.location; } /** * <p> * The relative URI path of the newly added archive resource. * </p> * * @param location * The relative URI path of the newly added archive resource. * @return Returns a reference to this object so that method calls can be chained together. */ public CompleteMultipartUploadResult withLocation(String location) { setLocation(location); return this; } /** * <p> * The checksum of the archive computed by Amazon S3 Glacier. * </p> * * @param checksum * The checksum of the archive computed by Amazon S3 Glacier. */ public void setChecksum(String checksum) { this.checksum = checksum; } /** * <p> * The checksum of the archive computed by Amazon S3 Glacier. * </p> * * @return The checksum of the archive computed by Amazon S3 Glacier. */ public String getChecksum() { return this.checksum; } /** * <p> * The checksum of the archive computed by Amazon S3 Glacier. * </p> * * @param checksum * The checksum of the archive computed by Amazon S3 Glacier. * @return Returns a reference to this object so that method calls can be chained together. */ public CompleteMultipartUploadResult withChecksum(String checksum) { setChecksum(checksum); return this; } /** * <p> * The ID of the archive. This value is also included as part of the location. * </p> * * @param archiveId * The ID of the archive. This value is also included as part of the location. */ public void setArchiveId(String archiveId) { this.archiveId = archiveId; } /** * <p> * The ID of the archive. This value is also included as part of the location. * </p> * * @return The ID of the archive. This value is also included as part of the location. */ public String getArchiveId() { return this.archiveId; } /** * <p> * The ID of the archive. This value is also included as part of the location. * </p> * * @param archiveId * The ID of the archive. This value is also included as part of the location. * @return Returns a reference to this object so that method calls can be chained together. */ public CompleteMultipartUploadResult withArchiveId(String archiveId) { setArchiveId(archiveId); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getLocation() != null) sb.append("Location: ").append(getLocation()).append(","); if (getChecksum() != null) sb.append("Checksum: ").append(getChecksum()).append(","); if (getArchiveId() != null) sb.append("ArchiveId: ").append(getArchiveId()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof CompleteMultipartUploadResult == false) return false; CompleteMultipartUploadResult other = (CompleteMultipartUploadResult) obj; if (other.getLocation() == null ^ this.getLocation() == null) return false; if (other.getLocation() != null && other.getLocation().equals(this.getLocation()) == false) return false; if (other.getChecksum() == null ^ this.getChecksum() == null) return false; if (other.getChecksum() != null && other.getChecksum().equals(this.getChecksum()) == false) return false; if (other.getArchiveId() == null ^ this.getArchiveId() == null) return false; if (other.getArchiveId() != null && other.getArchiveId().equals(this.getArchiveId()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getLocation() == null) ? 0 : getLocation().hashCode()); hashCode = prime * hashCode + ((getChecksum() == null) ? 0 : getChecksum().hashCode()); hashCode = prime * hashCode + ((getArchiveId() == null) ? 0 : getArchiveId().hashCode()); return hashCode; } @Override public CompleteMultipartUploadResult clone() { try { return (CompleteMultipartUploadResult) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jena.fuseki.cmd ; import java.util.List ; import org.apache.jena.atlas.lib.FileOps ; import org.apache.jena.fuseki.Fuseki ; import org.apache.jena.fuseki.FusekiLogging ; import org.apache.jena.fuseki.build.Template ; import org.apache.jena.fuseki.jetty.JettyFuseki ; import org.apache.jena.fuseki.jetty.JettyServerConfig ; import org.apache.jena.fuseki.server.FusekiEnv ; import org.apache.jena.fuseki.server.FusekiServerListener ; import org.apache.jena.fuseki.server.ServerInitialConfig ; import org.apache.jena.query.ARQ ; import org.apache.jena.query.Dataset ; import org.apache.jena.riot.Lang ; import org.apache.jena.riot.RDFDataMgr ; import org.apache.jena.riot.RDFLanguages ; import org.apache.jena.sparql.core.DatasetGraphFactory ; import org.apache.jena.system.JenaSystem ; import org.apache.jena.tdb.TDB ; import org.apache.jena.tdb.sys.Names ; import org.apache.jena.tdb.transaction.TransactionManager ; import org.slf4j.Logger ; import jena.cmd.ArgDecl ; import jena.cmd.CmdException ; import arq.cmdline.CmdARQ ; import arq.cmdline.ModDatasetAssembler ; public class FusekiCmd { // This allows us to set logging before calling FusekiCmdInner // FusekiCmdInner inherits from CmdMain which statically sets logging. // By java classloading, super class statics run before the // statics of a class are run. static { FusekiEnv.mode = FusekiEnv.INIT.STANDALONE ; FusekiLogging.setLogging() ; } static public void main(String... argv) { FusekiCmdInner.innerMain(argv); } static class FusekiCmdInner extends CmdARQ { // --mgt. --mgtPort :: Legacy. private static ArgDecl argMgt = new ArgDecl(ArgDecl.NoValue, "mgt") ; private static ArgDecl argMgtPort = new ArgDecl(ArgDecl.HasValue, "mgtPort", "mgtport") ; // --home :: Legacy - do not use. private static ArgDecl argHome = new ArgDecl(ArgDecl.HasValue, "home") ; private static ArgDecl argPages = new ArgDecl(ArgDecl.HasValue, "pages") ; private static ArgDecl argMem = new ArgDecl(ArgDecl.NoValue, "mem") ; private static ArgDecl argAllowUpdate = new ArgDecl(ArgDecl.NoValue, "update", "allowUpdate") ; private static ArgDecl argFile = new ArgDecl(ArgDecl.HasValue, "file") ; private static ArgDecl argMemTDB = new ArgDecl(ArgDecl.NoValue, "memtdb", "memTDB") ; private static ArgDecl argTDB = new ArgDecl(ArgDecl.HasValue, "loc", "location") ; private static ArgDecl argPort = new ArgDecl(ArgDecl.HasValue, "port") ; private static ArgDecl argLocalhost = new ArgDecl(ArgDecl.NoValue, "localhost", "local") ; private static ArgDecl argTimeout = new ArgDecl(ArgDecl.HasValue, "timeout") ; private static ArgDecl argFusekiConfig = new ArgDecl(ArgDecl.HasValue, "config", "conf") ; private static ArgDecl argJettyConfig = new ArgDecl(ArgDecl.HasValue, "jetty-config") ; private static ArgDecl argGZip = new ArgDecl(ArgDecl.HasValue, "gzip") ; // Deprecated. Use shiro. private static ArgDecl argBasicAuth = new ArgDecl(ArgDecl.HasValue, "basic-auth") ; // private static ModLocation modLocation = new ModLocation() ; private static ModDatasetAssembler modDataset = new ModDatasetAssembler() ; // fuseki [--mem|--desc assembler.ttl] [--port PORT] **** /datasetURI static public void innerMain(String... argv) { JenaSystem.init() ; // Do explicitly so it happens after subsystem initialization. Fuseki.init() ; new FusekiCmdInner(argv).mainRun() ; } private JettyServerConfig jettyServerConfig = new JettyServerConfig() ; { jettyServerConfig.port = 3030 ; jettyServerConfig.contextPath = "/" ; jettyServerConfig.jettyConfigFile = null ; jettyServerConfig.pages = Fuseki.PagesStatic ; jettyServerConfig.enableCompression = true ; jettyServerConfig.verboseLogging = false ; } private ServerInitialConfig cmdLineDataset = new ServerInitialConfig() ; public FusekiCmdInner(String... argv) { super(argv) ; if ( false ) // Consider ... TransactionManager.QueueBatchSize = TransactionManager.QueueBatchSize / 2 ; getUsage().startCategory("Fuseki") ; addModule(modDataset) ; add(argMem, "--mem", "Create an in-memory, non-persistent dataset for the server") ; add(argFile, "--file=FILE", "Create an in-memory, non-persistent dataset for the server, initialised with the contents of the file") ; add(argTDB, "--loc=DIR", "Use an existing TDB database (or create if does not exist)") ; add(argMemTDB, "--memTDB", "Create an in-memory, non-persistent dataset using TDB (testing only)") ; add(argPort, "--port", "Listen on this port number") ; add(argPages, "--pages=DIR", "Set of pages to serve as static content") ; // Set via jetty config file. add(argLocalhost, "--localhost", "Listen only on the localhost interface") ; add(argTimeout, "--timeout=", "Global timeout applied to queries (value in ms) -- format is X[,Y] ") ; add(argAllowUpdate, "--update", "Allow updates (via SPARQL Update and SPARQL HTTP Update)") ; add(argFusekiConfig, "--config=", "Use a configuration file to determine the services") ; add(argJettyConfig, "--jetty-config=FILE", "Set up the server (not services) with a Jetty XML file") ; add(argBasicAuth) ; //add(argMgt, "--mgt", "Enable the management commands") ; add(argMgt) ; // Legacy add(argMgtPort) ; // Legacy //add(argMgtPort, "--mgtPort=port", "Port for management optations") ; //add(argHome, "--home=DIR", "Root of Fuseki installation (overrides environment variable FUSEKI_HOME)") ; add(argGZip, "--gzip=on|off", "Enable GZip compression (HTTP Accept-Encoding) if request header set") ; //add(argUber) ; // add(argGSP) ; super.modVersion.addClass(TDB.class) ; super.modVersion.addClass(Fuseki.class) ; } static String argUsage = "[--config=FILE] [--mem|--desc=AssemblerFile|--file=FILE] [--port PORT] /DatasetPathName" ; @Override protected String getSummary() { return getCommandName() + " " + argUsage ; } @Override protected void processModulesAndArgs() { int x = 0 ; if ( super.isVerbose() || super.isDebug() ) { jettyServerConfig.verboseLogging = true ; // Output is still at level INFO (currently) } Logger log = Fuseki.serverLog ; if ( contains(argFusekiConfig) ) cmdLineDataset.fusekiCmdLineConfigFile = getValue(argFusekiConfig) ; ArgDecl assemblerDescDecl = new ArgDecl(ArgDecl.HasValue, "desc", "dataset") ; // ---- Datasets if ( contains(argMem) ) x++ ; if ( contains(argFile) ) x++ ; if ( contains(assemblerDescDecl) ) x++ ; if ( contains(argTDB) ) x++ ; if ( contains(argMemTDB) ) x++ ; if ( cmdLineDataset.fusekiCmdLineConfigFile != null ) { if ( x >= 1 ) throw new CmdException("Dataset specified on the command line but a configuration file also given.") ; } else { // No configuration file. 0 or 1 legal. if ( x > 1 ) throw new CmdException("Multiple ways providing a dataset. Only one of --mem, --file, --loc or --desc") ; } boolean cmdlineConfigPresent = ( x != 0 ) ; if ( contains(argMem) ) { log.info("Dataset: in-memory") ; cmdLineDataset = new ServerInitialConfig() ; cmdLineDataset.argTemplateFile = Template.templateMemFN ; } if ( contains(argFile) ) { String filename = getValue(argFile) ; log.info("Dataset: in-memory: load file: " + filename) ; if ( !FileOps.exists(filename) ) throw new CmdException("File not found: " + filename) ; // Directly populate the dataset. cmdLineDataset = new ServerInitialConfig() ; cmdLineDataset.dsg = DatasetGraphFactory.createMem() ; // INITIAL DATA. Lang language = RDFLanguages.filenameToLang(filename) ; if ( language == null ) throw new CmdException("Can't guess language for file: " + filename) ; RDFDataMgr.read(cmdLineDataset.dsg, filename) ; } if ( contains(argMemTDB) ) { //log.info("TDB dataset: in-memory") ; cmdLineDataset = new ServerInitialConfig() ; cmdLineDataset.argTemplateFile = Template.templateTDBMemFN ; cmdLineDataset.params.put(Template.DIR, Names.memName) ; } if ( contains(argTDB) ) { cmdLineDataset = new ServerInitialConfig() ; cmdLineDataset.argTemplateFile = Template.templateTDBDirFN ; String dir = getValue(argTDB) ; cmdLineDataset.params.put(Template.DIR, dir) ; } // Otherwise if ( contains(assemblerDescDecl) ) { log.info("Dataset from assembler") ; // Need to add service details. Dataset ds = modDataset.createDataset() ; //cmdLineDataset.dsg = ds.asDatasetGraph() ; } if ( cmdlineConfigPresent && getPositional().size() == 0 ) throw new CmdException("Missing service name") ; if ( !cmdlineConfigPresent && getPositional().size() > 0 ) throw new CmdException("Service name given but no configuration argument to match") ; if ( cmdLineDataset != null ) { if ( getPositional().size() > 1 ) throw new CmdException("Multiple dataset path names given") ; if ( getPositional().size() == 1 ) { cmdLineDataset.datasetPath = getPositionalArg(0) ; if ( cmdLineDataset.datasetPath.length() > 0 && !cmdLineDataset.datasetPath.startsWith("/") ) throw new CmdException("Dataset path name must begin with a /: " + cmdLineDataset.datasetPath) ; cmdLineDataset.allowUpdate = contains(argAllowUpdate) ; if ( ! cmdLineDataset.allowUpdate ) Fuseki.serverLog.info("Running in read-only mode for "+cmdLineDataset.datasetPath) ; // Include the dataset name as NAME for any templates. cmdLineDataset.params.put(Template.NAME, cmdLineDataset.datasetPath) ; } } // ---- Jetty server if ( contains(argBasicAuth) ) Fuseki.configLog.warn("--basic-auth ignored: Use Apache Shiro security - see shiro.ini") ; if ( contains(argPort) ) { String portStr = getValue(argPort) ; try { jettyServerConfig.port = Integer.parseInt(portStr) ; } catch (NumberFormatException ex) { throw new CmdException(argPort.getKeyName() + " : bad port number: " + portStr) ; } } if ( contains(argMgt) ) Fuseki.configLog.warn("Fuseki v2: Management functions are always enabled. --mgt not needed.") ; if ( contains(argMgtPort) ) Fuseki.configLog.warn("Fuseki v2: Management functions are always on the same port as the server. --mgtPort ignored.") ; // if ( contains(argMgt) ) { // jettyServerConfig.mgtPort = 0 ; // if ( contains(argMgtPort) ) { // String mgtPortStr = getValue(argMgtPort) ; // try { // jettyServerConfig.mgtPort = Integer.parseInt(mgtPortStr) ; // } catch (NumberFormatException ex) { // throw new CmdException("--"+argMgtPort.getKeyName() + " : bad port number: " + mgtPortStr) ; // } // } // } if ( contains(argLocalhost) ) jettyServerConfig.loopback = true ; if ( contains(argTimeout) ) { String str = getValue(argTimeout) ; ARQ.getContext().set(ARQ.queryTimeout, str) ; } if ( contains(argJettyConfig) ) { jettyServerConfig.jettyConfigFile = getValue(argJettyConfig) ; if ( !FileOps.exists(jettyServerConfig.jettyConfigFile) ) throw new CmdException("No such file: " + jettyServerConfig.jettyConfigFile) ; } if ( contains(argBasicAuth) ) { jettyServerConfig.authConfigFile = getValue(argBasicAuth) ; if ( !FileOps.exists(jettyServerConfig.authConfigFile) ) throw new CmdException("No such file: " + jettyServerConfig.authConfigFile) ; } if ( contains(argHome) ) { Fuseki.configLog.warn("--home ignored (use enviroment variables $FUSEKI_HOME and $FUSEKI_BASE)") ; // List<String> args = super.getValues(argHome) ; // homeDir = args.get(args.size() - 1) ; } if ( contains(argPages) ) { List<String> args = super.getValues(argPages) ; jettyServerConfig.pages = args.get(args.size() - 1) ; } if ( contains(argGZip) ) { if ( !hasValueOfTrue(argGZip) && !hasValueOfFalse(argGZip) ) throw new CmdException(argGZip.getNames().get(0) + ": Not understood: " + getValue(argGZip)) ; jettyServerConfig.enableCompression = super.hasValueOfTrue(argGZip) ; } } private static String sort_out_dir(String path) { path.replace('\\', '/') ; if ( !path.endsWith("/") ) path = path + "/" ; return path ; } @Override protected void exec() { FusekiServerListener.initialSetup = cmdLineDataset ; // For standalone, command line use ... JettyFuseki.initializeServer(jettyServerConfig) ; JettyFuseki.instance.start() ; JettyFuseki.instance.join() ; System.exit(0) ; } @Override protected String getCommandName() { return "fuseki" ; } } }
/*========================================================================= * Copyright (c) 2010-2014 Pivotal Software, Inc. All Rights Reserved. * This product is protected by U.S. and international copyright * and intellectual property laws. Pivotal products are covered by * one or more patents listed at http://www.pivotal.io/patents. *========================================================================= */ package com.gemstone.gemfire.management.internal.web.controllers; import java.util.concurrent.Callable; import com.gemstone.gemfire.internal.lang.StringUtils; import com.gemstone.gemfire.management.internal.cli.i18n.CliStrings; import com.gemstone.gemfire.management.internal.cli.util.CommandStringBuilder; import org.springframework.http.HttpStatus; import org.springframework.http.ResponseEntity; import org.springframework.stereotype.Controller; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.ResponseBody; /** * The MiscellaneousCommandsController class implements GemFire Management REST API web service endpoints for the * Gfsh Miscellaneous Commands. * <p/> * @author John Blum * @see com.gemstone.gemfire.management.internal.cli.commands.MiscellaneousCommands * @see com.gemstone.gemfire.management.internal.web.controllers.AbstractCommandsController * @see org.springframework.stereotype.Controller * @see org.springframework.web.bind.annotation.PathVariable * @see org.springframework.web.bind.annotation.RequestMapping * @see org.springframework.web.bind.annotation.RequestMethod * @see org.springframework.web.bind.annotation.RequestParam * @see org.springframework.web.bind.annotation.ResponseBody * @since 8.0 */ @Controller("miscellaneousController") @RequestMapping(AbstractCommandsController.REST_API_VERSION) @SuppressWarnings("unused") public class MiscellaneousCommandsController extends AbstractCommandsController { @RequestMapping(method = RequestMethod.GET, value = "/logs") public Callable<ResponseEntity<String>> exportLogs(@RequestParam(CliStrings.EXPORT_LOGS__DIR) final String directory, @RequestParam(value = CliStrings.EXPORT_LOGS__GROUP, required = false) final String[] groups, @RequestParam(value = CliStrings.EXPORT_LOGS__MEMBER, required = false) final String memberNameId, @RequestParam(value = CliStrings.EXPORT_LOGS__LOGLEVEL, required = false) final String logLevel, @RequestParam(value = CliStrings.EXPORT_LOGS__UPTO_LOGLEVEL, defaultValue = "false") final Boolean onlyLogLevel, @RequestParam(value = CliStrings.EXPORT_LOGS__MERGELOG, defaultValue = "false") final Boolean mergeLog, @RequestParam(value = CliStrings.EXPORT_LOGS__STARTTIME, required = false) final String startTime, @RequestParam(value = CliStrings.EXPORT_LOGS__ENDTIME, required = false) final String endTime) { final CommandStringBuilder command = new CommandStringBuilder(CliStrings.EXPORT_LOGS); command.addOption(CliStrings.EXPORT_LOGS__DIR, decode(directory)); if (hasValue(groups)) { command.addOption(CliStrings.EXPORT_LOGS__GROUP, StringUtils.concat(groups, StringUtils.COMMA_DELIMITER)); } if (hasValue(memberNameId)) { command.addOption(CliStrings.EXPORT_LOGS__MEMBER, memberNameId); } if (hasValue(logLevel)) { command.addOption(CliStrings.EXPORT_LOGS__LOGLEVEL, logLevel); } command.addOption(CliStrings.EXPORT_LOGS__UPTO_LOGLEVEL, String.valueOf(Boolean.TRUE.equals(onlyLogLevel))); command.addOption(CliStrings.EXPORT_LOGS__MERGELOG, String.valueOf(Boolean.TRUE.equals(mergeLog))); if (hasValue(startTime)) { command.addOption(CliStrings.EXPORT_LOGS__STARTTIME, startTime); } if (hasValue(endTime)) { command.addOption(CliStrings.EXPORT_LOGS__ENDTIME, endTime); } return new Callable<ResponseEntity<String>>() { @Override public ResponseEntity<String> call() throws Exception { return new ResponseEntity<String>(processCommand(command.toString()), HttpStatus.OK); } }; } // TODO determine whether Async functionality is required @RequestMapping(method = RequestMethod.GET, value = "/stacktraces") @ResponseBody public String exportStackTraces(@RequestParam(value = CliStrings.EXPORT_STACKTRACE__FILE) final String file, @RequestParam(value = CliStrings.EXPORT_STACKTRACE__GROUP, required = false) final String groupName, @RequestParam(value = CliStrings.EXPORT_STACKTRACE__MEMBER, required = false) final String memberNameId) { CommandStringBuilder command = new CommandStringBuilder(CliStrings.EXPORT_STACKTRACE); command.addOption(CliStrings.EXPORT_STACKTRACE__FILE, decode(file)); if (hasValue(groupName)) { command.addOption(CliStrings.EXPORT_STACKTRACE__GROUP, groupName); } if (hasValue(memberNameId)) { command.addOption(CliStrings.EXPORT_STACKTRACE__MEMBER, memberNameId); } return processCommand(command.toString()); } // TODO add Async functionality @RequestMapping(method = RequestMethod.POST, value = "/gc") @ResponseBody public String gc(@RequestParam(value = CliStrings.GC__GROUP, required = false) final String[] groups) { CommandStringBuilder command = new CommandStringBuilder(CliStrings.GC); if (hasValue(groups)) { command.addOption(CliStrings.GC__GROUP, StringUtils.concat(groups, StringUtils.COMMA_DELIMITER)); } return processCommand(command.toString()); } // TODO add Async functionality @RequestMapping(method = RequestMethod.POST, value = "/members/{member}/gc") @ResponseBody public String gc(@PathVariable("member") final String memberNameId) { CommandStringBuilder command = new CommandStringBuilder(CliStrings.GC); command.addOption(CliStrings.GC__MEMBER, decode(memberNameId)); return processCommand(command.toString()); } // TODO add Async functionality @RequestMapping(method = RequestMethod.GET, value = "/netstat") @ResponseBody public String netstat(@RequestParam(value = CliStrings.NETSTAT__MEMBER, required= false) final String[] members, @RequestParam(value = CliStrings.NETSTAT__GROUP, required = false) final String group, @RequestParam(value = CliStrings.NETSTAT__FILE, required = false) final String file, @RequestParam(value = CliStrings.NETSTAT__WITHLSOF, defaultValue = "false") final Boolean withLsof) { CommandStringBuilder command = new CommandStringBuilder(CliStrings.NETSTAT); addCommandOption(null, command, CliStrings.NETSTAT__MEMBER, members); addCommandOption(null, command, CliStrings.NETSTAT__GROUP, group); addCommandOption(null, command, CliStrings.NETSTAT__FILE, file); addCommandOption(null, command, CliStrings.NETSTAT__WITHLSOF, withLsof); return processCommand(command.toString()); } // TODO determine if Async functionality is required @RequestMapping(method = RequestMethod.GET, value = "/deadlocks") @ResponseBody public String showDeadLock(@RequestParam(CliStrings.SHOW_DEADLOCK__DEPENDENCIES__FILE) final String dependenciesFile) { CommandStringBuilder command = new CommandStringBuilder(CliStrings.SHOW_DEADLOCK); command.addOption(CliStrings.SHOW_DEADLOCK__DEPENDENCIES__FILE, decode(dependenciesFile)); return processCommand(command.toString()); } // TODO determine if Async functionality is required @RequestMapping(method = RequestMethod.GET, value = "/members/{member}/log") @ResponseBody public String showLog(@PathVariable("member") final String memberNameId, @RequestParam(value = CliStrings.SHOW_LOG_LINE_NUM, defaultValue = "0") final Integer lines) { CommandStringBuilder command = new CommandStringBuilder(CliStrings.SHOW_LOG); command.addOption(CliStrings.SHOW_LOG_MEMBER, decode(memberNameId)); command.addOption(CliStrings.SHOW_LOG_LINE_NUM, String.valueOf(lines)); return processCommand(command.toString()); } // TODO determine if Async functionality is required @RequestMapping(method = RequestMethod.GET, value = "/metrics") @ResponseBody public String showMetrics(@RequestParam(value = CliStrings.SHOW_METRICS__MEMBER, required = false) final String memberNameId, @RequestParam(value = CliStrings.SHOW_METRICS__REGION, required = false) final String regionNamePath, @RequestParam(value = CliStrings.SHOW_METRICS__FILE, required = false) final String file, @RequestParam(value = CliStrings.SHOW_METRICS__CACHESERVER__PORT, required = false) final String cacheServerPort, @RequestParam(value = CliStrings.SHOW_METRICS__CATEGORY, required = false) final String[] categories) { CommandStringBuilder command = new CommandStringBuilder(CliStrings.SHOW_METRICS); if (hasValue(memberNameId)) { command.addOption(CliStrings.SHOW_METRICS__MEMBER, memberNameId); } if (hasValue(regionNamePath)) { command.addOption(CliStrings.SHOW_METRICS__REGION, regionNamePath); } if (hasValue(file)) { command.addOption(CliStrings.SHOW_METRICS__FILE, file); } if (hasValue(cacheServerPort)) { command.addOption(CliStrings.SHOW_METRICS__CACHESERVER__PORT, cacheServerPort); } if (hasValue(categories)) { command.addOption(CliStrings.SHOW_METRICS__CATEGORY, StringUtils.concat(categories, StringUtils.COMMA_DELIMITER)); } return processCommand(command.toString()); } @RequestMapping(method = RequestMethod.POST, value = "/shutdown") @ResponseBody public String shutdown(@RequestParam(value = CliStrings.SHUTDOWN__TIMEOUT, defaultValue = "-1") final Integer timeout) { CommandStringBuilder command = new CommandStringBuilder(CliStrings.SHUTDOWN); command.addOption(CliStrings.SHUTDOWN__TIMEOUT, String.valueOf(timeout)); return processCommand(command.toString()); } // TODO determine whether the {groups} and {members} path variables corresponding to the --groups and --members // command-line options in the 'change loglevel' Gfsh command actually accept multiple values, and... // TODO if so, then change the groups and members method parameters to String[] types. // TODO If not, then these options should be renamed! @RequestMapping(method = RequestMethod.POST, value = "/groups/{groups}/loglevel") @ResponseBody public String changeLogLevelForGroups(@PathVariable("groups") final String groups, @RequestParam(value = CliStrings.CHANGE_LOGLEVEL__LOGLEVEL, required = true) final String logLevel) { return internalChangeLogLevel(groups, null, logLevel); } @RequestMapping(method = RequestMethod.POST, value = "/members/{members}/loglevel") @ResponseBody public String changeLogLevelForMembers(@PathVariable("members") final String members, @RequestParam(value = CliStrings.CHANGE_LOGLEVEL__LOGLEVEL, required = true) final String logLevel) { return internalChangeLogLevel(null, members, logLevel); } @RequestMapping(method = RequestMethod.POST, value = "/members/{members}/groups/{groups}/loglevel") @ResponseBody public String changeLogLevelForMembersAndGroups(@PathVariable("members") final String members, @PathVariable("groups") final String groups, @RequestParam(value = CliStrings.CHANGE_LOGLEVEL__LOGLEVEL) final String logLevel) { return internalChangeLogLevel(groups, members, logLevel); } // NOTE since "logLevel" is "required", then just set the option; no need to validate it's value. private String internalChangeLogLevel(final String groups, final String members, final String logLevel) { CommandStringBuilder command = new CommandStringBuilder(CliStrings.CHANGE_LOGLEVEL); command.addOption(CliStrings.CHANGE_LOGLEVEL__LOGLEVEL, decode(logLevel)); if (hasValue(groups)) { command.addOption(CliStrings.CHANGE_LOGLEVEL__GROUPS, decode(groups)); } if (hasValue(members)) { command.addOption(CliStrings.CHANGE_LOGLEVEL__MEMBER, decode(members)); } return processCommand(command.toString()); } }
/** * ConceptBodyModuleDec.java * --------------------------------- * Copyright (c) 2016 * RESOLVE Software Research Group * School of Computing * Clemson University * All rights reserved. * --------------------------------- * This file is subject to the terms and conditions defined in * file 'LICENSE.txt', which is part of this source code package. */ package edu.clemson.cs.r2jt.absyn; import edu.clemson.cs.r2jt.collections.Iterator; import edu.clemson.cs.r2jt.collections.List; import edu.clemson.cs.r2jt.data.PosSymbol; import edu.clemson.cs.r2jt.data.Symbol; public class ConceptBodyModuleDec extends AbstractParameterizedModuleDec { // =========================================================== // Variables // =========================================================== /** The name member. */ private PosSymbol name; /** The performance profile name member. */ private PosSymbol profileName; /** The conceptName member. */ private PosSymbol conceptName; /** The enhancementNames member. */ private List<PosSymbol> enhancementNames; /** The requires member. */ private Exp requires; /** The conventions member. */ private List<Exp> conventions; /** The corrs member. */ private List<Exp> corrs; /** The facilityInit member. */ private InitItem facilityInit; /** The facilityFinal member. */ private FinalItem facilityFinal; /** The decs member. */ private List<Dec> decs; // =========================================================== // Constructors // =========================================================== public ConceptBodyModuleDec() {}; public ConceptBodyModuleDec(PosSymbol name, PosSymbol profileName, List<ModuleParameterDec> parameters, PosSymbol conceptName, List<PosSymbol> enhancementNames, List<UsesItem> usesItems, Exp requires, List<Exp> conventions, List<Exp> corrs, InitItem facilityInit, FinalItem facilityFinal, List<Dec> decs) { this.name = name; this.profileName = profileName; this.parameters = parameters; this.conceptName = conceptName; this.enhancementNames = enhancementNames; this.usesItems = usesItems; this.requires = requires; this.conventions = conventions; this.corrs = corrs; this.facilityInit = facilityInit; this.facilityFinal = facilityFinal; this.decs = decs; } // =========================================================== // Accessor Methods // =========================================================== // ----------------------------------------------------------- // Get Methods // ----------------------------------------------------------- /** Returns the value of the name variable. */ public PosSymbol getName() { return name; } /** Returns the value of the profileName variable. */ public PosSymbol getProfileName() { return profileName; } /** Returns the value of the conceptName variable. */ public PosSymbol getConceptName() { return conceptName; } /** Returns the value of the enhancementNames variable. */ public List<PosSymbol> getEnhancementNames() { return enhancementNames; } /** Returns the value of the requires variable. */ public Exp getRequires() { return requires; } /** Returns the value of the conventions variable. */ public List<Exp> getConventions() { return conventions; } /** Returns the value of the corrs variable. */ public List<Exp> getCorrs() { return corrs; } /** Returns the value of the facilityInit variable. */ public InitItem getFacilityInit() { return facilityInit; } /** Returns the value of the facilityFinal variable. */ public FinalItem getFacilityFinal() { return facilityFinal; } /** Returns the value of the decs variable. */ public List<Dec> getDecs() { return decs; } /** Returns a list of procedures in this realization. */ public List<Symbol> getLocalProcedureNames() { List<Symbol> retval = new List<Symbol>(); Iterator<Dec> it = decs.iterator(); while (it.hasNext()) { Dec d = it.next(); if (d instanceof ProcedureDec) { retval.add(d.getName().getSymbol()); } } return retval; } // ----------------------------------------------------------- // Set Methods // ----------------------------------------------------------- /** Sets the name variable to the specified value. */ public void setName(PosSymbol name) { this.name = name; } /** Sets the profileName variable to the specified value. */ public void setProfileName(PosSymbol name) { this.profileName = name; } /** Sets the conceptName variable to the specified value. */ public void setConceptName(PosSymbol conceptName) { this.conceptName = conceptName; } /** Sets the enhancementNames variable to the specified value. */ public void setEnhancementNames(List<PosSymbol> enhancementNames) { this.enhancementNames = enhancementNames; } /** Sets the requires variable to the specified value. */ public void setRequires(Exp requires) { this.requires = requires; } /** Sets the conventions variable to the specified value. */ public void setConventions(List<Exp> conventions) { this.conventions = conventions; } /** Sets the corrs variable to the specified value. */ public void setCorrs(List<Exp> corrs) { this.corrs = corrs; } /** Sets the facilityInit variable to the specified value. */ public void setFacilityInit(InitItem facilityInit) { this.facilityInit = facilityInit; } /** Sets the facilityFinal variable to the specified value. */ public void setFacilityFinal(FinalItem facilityFinal) { this.facilityFinal = facilityFinal; } /** Sets the decs variable to the specified value. */ public void setDecs(List<Dec> decs) { this.decs = decs; } // =========================================================== // Public Methods // =========================================================== /** Accepts a ResolveConceptualVisitor. */ public void accept(ResolveConceptualVisitor v) { v.visitConceptBodyModuleDec(this); } /** Returns a formatted text string of this class. */ public String asString(int indent, int increment) { StringBuffer sb = new StringBuffer(); printSpace(indent, sb); sb.append("ConceptBodyModuleDec\n"); if (name != null) { sb.append(name.asString(indent + increment, increment)); } if (parameters != null) { sb.append(parameters.asString(indent + increment, increment)); } if (conceptName != null) { sb.append(conceptName.asString(indent + increment, increment)); } if (enhancementNames != null) { sb.append(enhancementNames.asString(indent + increment, increment)); } if (usesItems != null) { sb.append(usesItems.asString(indent + increment, increment)); } if (requires != null) { sb.append(requires.asString(indent + increment, increment)); } if (conventions != null) { sb.append(conventions.asString(indent + increment, increment)); } if (corrs != null) { sb.append(corrs.asString(indent + increment, increment)); } if (facilityInit != null) { sb.append(facilityInit.asString(indent + increment, increment)); } if (facilityFinal != null) { sb.append(facilityFinal.asString(indent + increment, increment)); } if (decs != null) { sb.append(decs.asString(indent + increment, increment)); } return sb.toString(); } }
/* * Copyright (c) 2010-2013 Evolveum * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.evolveum.midpoint.model.common.mapping; import static org.testng.AssertJUnit.assertEquals; import java.io.File; import java.io.FileNotFoundException; import java.io.IOException; import java.util.Collection; import javax.xml.bind.JAXBElement; import javax.xml.bind.JAXBException; import javax.xml.namespace.QName; import com.evolveum.prism.xml.ns._public.types_3.ProtectedStringType; import org.xml.sax.SAXException; import com.evolveum.midpoint.model.common.expression.ExpressionFactory; import com.evolveum.midpoint.model.common.expression.ExpressionTestUtil; import com.evolveum.midpoint.model.common.expression.ObjectDeltaObject; import com.evolveum.midpoint.model.common.expression.Source; import com.evolveum.midpoint.model.common.expression.StringPolicyResolver; import com.evolveum.midpoint.prism.ItemDefinition; import com.evolveum.midpoint.prism.OriginType; import com.evolveum.midpoint.prism.PrismContext; import com.evolveum.midpoint.prism.PrismObject; import com.evolveum.midpoint.prism.PrismObjectDefinition; import com.evolveum.midpoint.prism.PrismPropertyValue; import com.evolveum.midpoint.prism.crypto.AESProtector; import com.evolveum.midpoint.prism.crypto.EncryptionException; import com.evolveum.midpoint.prism.delta.ItemDelta; import com.evolveum.midpoint.prism.delta.ObjectDelta; import com.evolveum.midpoint.prism.delta.PrismValueDeltaSetTriple; import com.evolveum.midpoint.prism.path.ItemPath; import com.evolveum.midpoint.prism.util.PrismTestUtil; import com.evolveum.midpoint.schema.MidPointPrismContextFactory; import com.evolveum.midpoint.schema.constants.ExpressionConstants; import com.evolveum.midpoint.schema.constants.MidPointConstants; import com.evolveum.midpoint.schema.constants.SchemaConstants; import com.evolveum.midpoint.schema.result.OperationResult; import com.evolveum.midpoint.schema.util.ObjectResolver; import com.evolveum.midpoint.test.util.DirectoryFileObjectResolver; import com.evolveum.midpoint.test.util.MidPointTestConstants; import com.evolveum.midpoint.test.util.TestUtil; import com.evolveum.midpoint.util.PrettyPrinter; import com.evolveum.midpoint.util.exception.ExpressionEvaluationException; import com.evolveum.midpoint.util.exception.ObjectNotFoundException; import com.evolveum.midpoint.util.exception.SchemaException; import com.evolveum.midpoint.xml.ns._public.common.common_3.MappingType; import com.evolveum.midpoint.xml.ns._public.common.common_3.ResourceType; import com.evolveum.midpoint.xml.ns._public.common.common_3.ShadowType; import com.evolveum.midpoint.xml.ns._public.common.common_3.StringPolicyType; import com.evolveum.midpoint.xml.ns._public.common.common_3.UserType; import com.evolveum.midpoint.xml.ns._public.common.common_3.ValuePolicyType; /** * The class that takes care of all the ornaments of value construction execution. It is used to make the * tests easy to write. * * @author Radovan Semancik * */ public class MappingTestEvaluator { public static File TEST_DIR = new File(MidPointTestConstants.TEST_RESOURCES_DIR, "mapping"); public static final File USER_OLD_FILE = new File(TEST_DIR, "user-jack.xml"); public static final File ACCOUNT_FILE = new File(TEST_DIR, "account-jack.xml"); public static final String USER_OLD_OID = "2f9b9299-6f45-498f-bc8e-8d17c6b93b20"; private static final File PASSWORD_POLICY_FILE = new File(TEST_DIR, "password-policy.xml"); private PrismContext prismContext; private MappingFactory mappingFactory; AESProtector protector; public PrismContext getPrismContext() { return prismContext; } public void init() throws SAXException, IOException, SchemaException { PrettyPrinter.setDefaultNamespacePrefix(MidPointConstants.NS_MIDPOINT_PUBLIC_PREFIX); PrismTestUtil.resetPrismContext(MidPointPrismContextFactory.FACTORY); prismContext = PrismTestUtil.createInitializedPrismContext(); ObjectResolver resolver = new DirectoryFileObjectResolver(MidPointTestConstants.OBJECTS_DIR); protector = ExpressionTestUtil.createInitializedProtector(prismContext); ExpressionFactory expressionFactory = ExpressionTestUtil.createInitializedExpressionFactory(resolver, protector, prismContext); mappingFactory = new MappingFactory(); mappingFactory.setExpressionFactory(expressionFactory); mappingFactory.setObjectResolver(resolver); mappingFactory.setPrismContext(prismContext); mappingFactory.setProfiling(true); mappingFactory.setProtector(protector); } public AESProtector getProtector() { return protector; } public <T> Mapping<PrismPropertyValue<T>> createMapping(String filename, String testName, final StringPolicyType policy, String defaultTargetPropertyName, ObjectDelta<UserType> userDelta) throws SchemaException, IOException, JAXBException, EncryptionException { return createMapping(filename, testName, policy, toPath(defaultTargetPropertyName), userDelta); } public <T> Mapping<PrismPropertyValue<T>> createMapping(String filename, String testName, String defaultTargetPropertyName, ObjectDelta<UserType> userDelta) throws SchemaException, IOException, JAXBException, EncryptionException { return createMapping(filename, testName, null, toPath(defaultTargetPropertyName), userDelta); } public <T> Mapping<PrismPropertyValue<T>> createMapping(String filename, String testName, QName defaultTargetPropertyName, ObjectDelta<UserType> userDelta) throws SchemaException, IOException, JAXBException, EncryptionException { return createMapping(filename, testName, null, toPath(defaultTargetPropertyName), userDelta); } public <T> Mapping<PrismPropertyValue<T>> createMapping(String filename, String testName, String defaultTargetPropertyName, ObjectDelta<UserType> userDelta, PrismObject<UserType> userOld) throws SchemaException, IOException, JAXBException { return createMapping(filename, testName, null, toPath(defaultTargetPropertyName), userDelta, userOld); } public <T> Mapping<PrismPropertyValue<T>> createMapping(String filename, String testName, ItemPath defaultTargetPropertyName, ObjectDelta<UserType> userDelta) throws SchemaException, IOException, JAXBException, EncryptionException { return createMapping(filename, testName, null, defaultTargetPropertyName, userDelta); } public <T> Mapping<PrismPropertyValue<T>> createMapping(String filename, String testName, final StringPolicyType policy, ItemPath defaultTargetPropertyPath, ObjectDelta<UserType> userDelta) throws SchemaException, IOException, JAXBException, EncryptionException { PrismObject<UserType> userOld = null; if (userDelta == null || !userDelta.isAdd()) { userOld = getUserOld(); } return createMapping(filename, testName, policy, defaultTargetPropertyPath, userDelta, userOld); } public <T> Mapping<PrismPropertyValue<T>> createMapping(String filename, String testName, final StringPolicyType policy, ItemPath defaultTargetPropertyPath, ObjectDelta<UserType> userDelta, PrismObject<UserType> userOld) throws SchemaException, IOException, JAXBException { MappingType mappingType = PrismTestUtil.parseAtomicValue( new File(TEST_DIR, filename), MappingType.COMPLEX_TYPE); Mapping<PrismPropertyValue<T>> mapping = mappingFactory.createMapping(mappingType, testName); // Source context: user ObjectDeltaObject<UserType> userOdo = new ObjectDeltaObject<UserType>(userOld , userDelta, null); userOdo.recompute(); mapping.setSourceContext(userOdo); // Variable $user mapping.addVariableDefinition(ExpressionConstants.VAR_USER, userOdo); // Variable $account PrismObject<ShadowType> account = getAccount(); ObjectDeltaObject<ShadowType> accountOdo = new ObjectDeltaObject<ShadowType>(account , null, null); accountOdo.recompute(); mapping.addVariableDefinition(ExpressionConstants.VAR_ACCOUNT, accountOdo); // Target context: user PrismObjectDefinition<UserType> userDefinition = getUserDefinition(); mapping.setTargetContext(userDefinition); StringPolicyResolver stringPolicyResolver = new StringPolicyResolver() { ItemPath outputPath; ItemDefinition outputDefinition; @Override public void setOutputPath(ItemPath outputPath) { this.outputPath = outputPath; } @Override public void setOutputDefinition(ItemDefinition outputDefinition) { this.outputDefinition = outputDefinition; } @Override public StringPolicyType resolve() { return policy; } }; mapping.setStringPolicyResolver(stringPolicyResolver); // Default target if (defaultTargetPropertyPath != null) { ItemDefinition targetDefDefinition = userDefinition.findItemDefinition(defaultTargetPropertyPath); if (targetDefDefinition == null) { throw new IllegalArgumentException("The item path '"+defaultTargetPropertyPath+"' does not have a definition in "+userDefinition); } mapping.setDefaultTargetDefinition(targetDefDefinition); } return mapping; } public <T> Mapping<PrismPropertyValue<T>> createInboudMapping(String filename, String testName, ItemDelta delta, UserType user, ShadowType account, ResourceType resource, final StringPolicyType policy) throws SchemaException, IOException, JAXBException{ MappingType mappingType = PrismTestUtil.parseAtomicValue( new File(TEST_DIR, filename), MappingType.COMPLEX_TYPE); Mapping<PrismPropertyValue<T>> mapping = mappingFactory.createMapping(mappingType,testName); Source<PrismPropertyValue<T>> defaultSource = new Source<PrismPropertyValue<T>>(null, delta, null, ExpressionConstants.VAR_INPUT); defaultSource.recompute(); mapping.setDefaultSource(defaultSource); mapping.setTargetContext(getUserDefinition()); mapping.addVariableDefinition(ExpressionConstants.VAR_USER, user); mapping.addVariableDefinition(ExpressionConstants.VAR_FOCUS, user); mapping.addVariableDefinition(ExpressionConstants.VAR_ACCOUNT, account.asPrismObject()); mapping.addVariableDefinition(ExpressionConstants.VAR_SHADOW, account.asPrismObject()); StringPolicyResolver stringPolicyResolver = new StringPolicyResolver() { ItemPath outputPath; ItemDefinition outputDefinition; @Override public void setOutputPath(ItemPath outputPath) { this.outputPath = outputPath; } @Override public void setOutputDefinition(ItemDefinition outputDefinition) { this.outputDefinition = outputDefinition; } @Override public StringPolicyType resolve() { return policy; } }; mapping.setStringPolicyResolver(stringPolicyResolver); mapping.setOriginType(OriginType.INBOUND); mapping.setOriginObject(resource); return mapping; } protected PrismObject<UserType> getUserOld() throws SchemaException, EncryptionException, IOException { PrismObject<UserType> user = PrismTestUtil.parseObject(USER_OLD_FILE); ProtectedStringType passwordPs = user.asObjectable().getCredentials().getPassword().getValue(); protector.encrypt(passwordPs); return user; } protected PrismObject<ShadowType> getAccount() throws SchemaException, IOException { return PrismTestUtil.parseObject(ACCOUNT_FILE); } public PrismObjectDefinition<UserType> getUserDefinition() { return prismContext.getSchemaRegistry().findObjectDefinitionByCompileTimeClass(UserType.class); } public <T,I> PrismValueDeltaSetTriple<PrismPropertyValue<T>> evaluateMapping(String filename, String testName, ItemPath defaultTargetPropertyPath) throws SchemaException, IOException, JAXBException, ExpressionEvaluationException, ObjectNotFoundException, EncryptionException { Mapping<PrismPropertyValue<T>> mapping = createMapping(filename, testName, defaultTargetPropertyPath, null); OperationResult opResult = new OperationResult(testName); mapping.evaluate(null, opResult); assertResult(opResult); return mapping.getOutputTriple(); } public <T,I> PrismValueDeltaSetTriple<PrismPropertyValue<T>> evaluateMapping(String filename, String testName, QName defaultTargetPropertyName) throws SchemaException, IOException, JAXBException, ExpressionEvaluationException, ObjectNotFoundException, EncryptionException { Mapping<PrismPropertyValue<T>> mapping = createMapping(filename, testName, defaultTargetPropertyName, null); OperationResult opResult = new OperationResult(testName); mapping.evaluate(null, opResult); assertResult(opResult); return mapping.getOutputTriple(); } public <T,I> PrismValueDeltaSetTriple<PrismPropertyValue<T>> evaluateMapping(String filename, String testName, String defaultTargetPropertyName) throws SchemaException, IOException, JAXBException, ExpressionEvaluationException, ObjectNotFoundException, EncryptionException { Mapping<PrismPropertyValue<T>> mapping = createMapping(filename, testName, defaultTargetPropertyName, null); OperationResult opResult = new OperationResult(testName); mapping.evaluate(null, opResult); assertResult(opResult); return mapping.getOutputTriple(); } public void assertResult(OperationResult opResult) { if (opResult.isEmpty()) { // this is OK. Nothing added to result. return; } opResult.computeStatus(); TestUtil.assertSuccess(opResult); } public <T,I> PrismValueDeltaSetTriple<PrismPropertyValue<T>> evaluateMappingDynamicAdd(String filename, String testName, String defaultTargetPropertyName, String changedPropertyName, I... valuesToAdd) throws SchemaException, IOException, JAXBException, ExpressionEvaluationException, ObjectNotFoundException, EncryptionException { return evaluateMappingDynamicAdd(filename, testName, toPath(defaultTargetPropertyName), changedPropertyName, valuesToAdd); } public <T,I> PrismValueDeltaSetTriple<PrismPropertyValue<T>> evaluateMappingDynamicAdd(String filename, String testName, ItemPath defaultTargetPropertyPath, String changedPropertyName, I... valuesToAdd) throws SchemaException, IOException, JAXBException, ExpressionEvaluationException, ObjectNotFoundException, EncryptionException { ObjectDelta<UserType> userDelta = ObjectDelta.createModificationAddProperty(UserType.class, USER_OLD_OID, toPath(changedPropertyName), prismContext, valuesToAdd); Mapping<PrismPropertyValue<T>> mapping = createMapping(filename, testName, defaultTargetPropertyPath, userDelta); OperationResult opResult = new OperationResult(testName); mapping.evaluate(null, opResult); assertResult(opResult); return mapping.getOutputTriple(); } public <T,I> PrismValueDeltaSetTriple<PrismPropertyValue<T>> evaluateMappingDynamicDelete(String filename, String testName, String defaultTargetPropertyName, String changedPropertyName, I... valuesToAdd) throws SchemaException, IOException, JAXBException, ExpressionEvaluationException, ObjectNotFoundException, EncryptionException { ObjectDelta<UserType> userDelta = ObjectDelta.createModificationDeleteProperty(UserType.class, USER_OLD_OID, toPath(changedPropertyName), prismContext, valuesToAdd); Mapping<PrismPropertyValue<T>> mapping = createMapping(filename, testName, defaultTargetPropertyName, userDelta); OperationResult opResult = new OperationResult(testName); mapping.evaluate(null, opResult); assertResult(opResult); return mapping.getOutputTriple(); } public <T,I> PrismValueDeltaSetTriple<PrismPropertyValue<T>> evaluateMappingDynamicReplace(String filename, String testName, String defaultTargetPropertyName, String changedPropertyName, I... valuesToReplace) throws SchemaException, IOException, JAXBException, ExpressionEvaluationException, ObjectNotFoundException, EncryptionException { ObjectDelta<UserType> userDelta = ObjectDelta.createModificationReplaceProperty(UserType.class, USER_OLD_OID, toPath(changedPropertyName), prismContext, valuesToReplace); Mapping<PrismPropertyValue<T>> mapping = createMapping(filename, testName, defaultTargetPropertyName, userDelta); OperationResult opResult = new OperationResult(testName); mapping.evaluate(null, opResult); assertResult(opResult); return mapping.getOutputTriple(); } public <T,I> PrismValueDeltaSetTriple<PrismPropertyValue<T>> evaluateMappingDynamicReplace(String filename, String testName, String defaultTargetPropertyName, ItemPath changedPropertyName, I... valuesToReplace) throws SchemaException, IOException, JAXBException, ExpressionEvaluationException, ObjectNotFoundException, EncryptionException { ObjectDelta<UserType> userDelta = ObjectDelta.createModificationReplaceProperty(UserType.class, USER_OLD_OID, changedPropertyName, prismContext, valuesToReplace); Mapping<PrismPropertyValue<T>> mapping = createMapping(filename, testName, defaultTargetPropertyName, userDelta); OperationResult opResult = new OperationResult(testName); mapping.evaluate(null, opResult); assertResult(opResult); return mapping.getOutputTriple(); } public <T,I> PrismValueDeltaSetTriple<PrismPropertyValue<T>> evaluateMappingDynamicReplace(String filename, String testName, ItemPath defaultTargetPropertyName, String changedPropertyName, I... valuesToReplace) throws SchemaException, IOException, JAXBException, ExpressionEvaluationException, ObjectNotFoundException, EncryptionException { ObjectDelta<UserType> userDelta = ObjectDelta.createModificationReplaceProperty(UserType.class, USER_OLD_OID, toPath(changedPropertyName), prismContext, valuesToReplace); Mapping<PrismPropertyValue<T>> mapping = createMapping(filename, testName, defaultTargetPropertyName, userDelta); OperationResult opResult = new OperationResult(testName); mapping.evaluate(null, opResult); assertResult(opResult); return mapping.getOutputTriple(); } public <T,I> PrismValueDeltaSetTriple<PrismPropertyValue<T>> evaluateMappingDynamicReplace(String filename, String testName, ItemPath defaultTargetPropertyName, ItemPath changedPropertyName, I... valuesToReplace) throws SchemaException, IOException, JAXBException, ExpressionEvaluationException, ObjectNotFoundException, EncryptionException { ObjectDelta<UserType> userDelta = ObjectDelta.createModificationReplaceProperty(UserType.class, USER_OLD_OID, changedPropertyName, prismContext, valuesToReplace); Mapping<PrismPropertyValue<T>> mapping = createMapping(filename, testName, defaultTargetPropertyName, userDelta); OperationResult opResult = new OperationResult(testName); mapping.evaluate(null, opResult); assertResult(opResult); return mapping.getOutputTriple(); } public ItemPath toPath(String propertyName) { return new ItemPath(new QName(SchemaConstants.NS_C, propertyName)); } public ItemPath toPath(QName propertyName) { return new ItemPath(propertyName); } public static <T> T getSingleValue(String setName, Collection<PrismPropertyValue<T>> set) { assertEquals("Expected single value in "+setName+" but found "+set.size()+" values: "+set, 1, set.size()); PrismPropertyValue<T> propertyValue = set.iterator().next(); return propertyValue.getValue(); } public StringPolicyType getStringPolicy() throws SchemaException, IOException { PrismObject<ValuePolicyType> passwordPolicy = PrismTestUtil.parseObject(PASSWORD_POLICY_FILE); return passwordPolicy.asObjectable().getStringPolicy(); } public Object createProtectedString(String string) throws EncryptionException { return protector.encryptString(string); } public void assertProtectedString(String desc, Collection<PrismPropertyValue<ProtectedStringType>> set, String expected) throws EncryptionException { assertEquals("Unexpected size of "+desc+": "+set, 1, set.size()); PrismPropertyValue<ProtectedStringType> pval = set.iterator().next(); ProtectedStringType ps = pval.getValue(); String zeroString = protector.decryptString(ps); assertEquals("Unexpected value in "+desc+": "+set, expected, zeroString); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.client; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.fail; import java.io.IOException; import java.util.HashSet; import java.util.Set; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.LargeTests; import org.apache.hadoop.hbase.master.MasterFileSystem; import org.apache.hadoop.hbase.master.snapshot.SnapshotManager; import org.apache.hadoop.hbase.regionserver.NoSuchColumnFamilyException; import org.apache.hadoop.hbase.snapshot.CorruptedSnapshotException; import org.apache.hadoop.hbase.snapshot.SnapshotTestingUtils; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.FSUtils; import org.junit.After; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; /** * Test clone/restore snapshots from the client */ @Category(LargeTests.class) public class TestRestoreSnapshotFromClient { final Log LOG = LogFactory.getLog(getClass()); private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); private final byte[] FAMILY = Bytes.toBytes("cf"); private byte[] emptySnapshot; private byte[] snapshotName0; private byte[] snapshotName1; private byte[] snapshotName2; private int snapshot0Rows; private int snapshot1Rows; private byte[] tableName; private HBaseAdmin admin; @BeforeClass public static void setUpBeforeClass() throws Exception { TEST_UTIL.getConfiguration().setBoolean(SnapshotManager.HBASE_SNAPSHOT_ENABLED, true); TEST_UTIL.getConfiguration().setBoolean("hbase.online.schema.update.enable", true); TEST_UTIL.getConfiguration().setInt("hbase.hstore.compactionThreshold", 10); TEST_UTIL.getConfiguration().setInt("hbase.regionserver.msginterval", 100); TEST_UTIL.getConfiguration().setInt("hbase.client.pause", 250); TEST_UTIL.getConfiguration().setInt("hbase.client.retries.number", 6); TEST_UTIL.getConfiguration().setBoolean( "hbase.master.enabletable.roundrobin", true); TEST_UTIL.startMiniCluster(3); } @AfterClass public static void tearDownAfterClass() throws Exception { TEST_UTIL.shutdownMiniCluster(); } /** * Initialize the tests with a table filled with some data * and two snapshots (snapshotName0, snapshotName1) of different states. * The tableName, snapshotNames and the number of rows in the snapshot are initialized. */ @Before public void setup() throws Exception { this.admin = TEST_UTIL.getHBaseAdmin(); long tid = System.currentTimeMillis(); tableName = Bytes.toBytes("testtb-" + tid); emptySnapshot = Bytes.toBytes("emptySnaptb-" + tid); snapshotName0 = Bytes.toBytes("snaptb0-" + tid); snapshotName1 = Bytes.toBytes("snaptb1-" + tid); snapshotName2 = Bytes.toBytes("snaptb2-" + tid); // create Table and disable it SnapshotTestingUtils.createTable(TEST_UTIL, tableName, FAMILY); admin.disableTable(tableName); // take an empty snapshot admin.snapshot(emptySnapshot, tableName); HTable table = new HTable(TEST_UTIL.getConfiguration(), tableName); // enable table and insert data admin.enableTable(tableName); SnapshotTestingUtils.loadData(TEST_UTIL, table, 500, FAMILY); snapshot0Rows = TEST_UTIL.countRows(table); admin.disableTable(tableName); // take a snapshot admin.snapshot(snapshotName0, tableName); // enable table and insert more data admin.enableTable(tableName); SnapshotTestingUtils.loadData(TEST_UTIL, table, 500, FAMILY); snapshot1Rows = TEST_UTIL.countRows(table); admin.disableTable(tableName); // take a snapshot of the updated table admin.snapshot(snapshotName1, tableName); // re-enable table admin.enableTable(tableName); table.close(); } @After public void tearDown() throws Exception { TEST_UTIL.deleteTable(tableName); SnapshotTestingUtils.deleteAllSnapshots(TEST_UTIL.getHBaseAdmin()); SnapshotTestingUtils.deleteArchiveDirectory(TEST_UTIL); } @Test public void testRestoreSnapshot() throws IOException { SnapshotTestingUtils.verifyRowCount(TEST_UTIL, tableName, snapshot1Rows); // Restore from snapshot-0 admin.disableTable(tableName); admin.restoreSnapshot(snapshotName0); admin.enableTable(tableName); SnapshotTestingUtils.verifyRowCount(TEST_UTIL, tableName, snapshot0Rows); // Restore from emptySnapshot admin.disableTable(tableName); admin.restoreSnapshot(emptySnapshot); admin.enableTable(tableName); SnapshotTestingUtils.verifyRowCount(TEST_UTIL, tableName, 0); // Restore from snapshot-1 admin.disableTable(tableName); admin.restoreSnapshot(snapshotName1); admin.enableTable(tableName); SnapshotTestingUtils.verifyRowCount(TEST_UTIL, tableName, snapshot1Rows); } @Test public void testRestoreSchemaChange() throws Exception { byte[] TEST_FAMILY2 = Bytes.toBytes("cf2"); HTable table = new HTable(TEST_UTIL.getConfiguration(), tableName); // Add one column family and put some data in it admin.disableTable(tableName); admin.addColumn(tableName, new HColumnDescriptor(TEST_FAMILY2)); admin.enableTable(tableName); assertEquals(2, table.getTableDescriptor().getFamilies().size()); HTableDescriptor htd = admin.getTableDescriptor(tableName); assertEquals(2, htd.getFamilies().size()); SnapshotTestingUtils.loadData(TEST_UTIL, table, 500, TEST_FAMILY2); long snapshot2Rows = snapshot1Rows + 500; assertEquals(snapshot2Rows, TEST_UTIL.countRows(table)); assertEquals(500, TEST_UTIL.countRows(table, TEST_FAMILY2)); Set<String> fsFamilies = getFamiliesFromFS(tableName); assertEquals(2, fsFamilies.size()); table.close(); // Take a snapshot admin.disableTable(tableName); admin.snapshot(snapshotName2, tableName); // Restore the snapshot (without the cf) admin.restoreSnapshot(snapshotName0); assertEquals(1, table.getTableDescriptor().getFamilies().size()); admin.enableTable(tableName); try { TEST_UTIL.countRows(table, TEST_FAMILY2); fail("family '" + Bytes.toString(TEST_FAMILY2) + "' should not exists"); } catch (NoSuchColumnFamilyException e) { // expected } assertEquals(snapshot0Rows, TEST_UTIL.countRows(table)); htd = admin.getTableDescriptor(tableName); assertEquals(1, htd.getFamilies().size()); fsFamilies = getFamiliesFromFS(tableName); assertEquals(1, fsFamilies.size()); table.close(); // Restore back the snapshot (with the cf) admin.disableTable(tableName); admin.restoreSnapshot(snapshotName2); admin.enableTable(tableName); htd = admin.getTableDescriptor(tableName); assertEquals(2, htd.getFamilies().size()); assertEquals(2, table.getTableDescriptor().getFamilies().size()); assertEquals(500, TEST_UTIL.countRows(table, TEST_FAMILY2)); assertEquals(snapshot2Rows, TEST_UTIL.countRows(table)); fsFamilies = getFamiliesFromFS(tableName); assertEquals(2, fsFamilies.size()); table.close(); } @Test public void testCloneSnapshotOfCloned() throws IOException, InterruptedException { byte[] clonedTableName = Bytes.toBytes("clonedtb-" + System.currentTimeMillis()); admin.cloneSnapshot(snapshotName0, clonedTableName); SnapshotTestingUtils.verifyRowCount(TEST_UTIL, clonedTableName, snapshot0Rows); admin.disableTable(clonedTableName); admin.snapshot(snapshotName2, clonedTableName); admin.deleteTable(clonedTableName); waitCleanerRun(); admin.cloneSnapshot(snapshotName2, clonedTableName); SnapshotTestingUtils.verifyRowCount(TEST_UTIL, clonedTableName, snapshot0Rows); TEST_UTIL.deleteTable(clonedTableName); } @Test public void testCloneAndRestoreSnapshot() throws IOException, InterruptedException { TEST_UTIL.deleteTable(tableName); waitCleanerRun(); admin.cloneSnapshot(snapshotName0, tableName); SnapshotTestingUtils.verifyRowCount(TEST_UTIL, tableName, snapshot0Rows); waitCleanerRun(); admin.disableTable(tableName); admin.restoreSnapshot(snapshotName0); admin.enableTable(tableName); SnapshotTestingUtils.verifyRowCount(TEST_UTIL, tableName, snapshot0Rows); } @Test public void testCorruptedSnapshot() throws IOException, InterruptedException { SnapshotTestingUtils.corruptSnapshot(TEST_UTIL, Bytes.toString(snapshotName0)); byte[] cloneName = Bytes.toBytes("corruptedClone-" + System.currentTimeMillis()); try { admin.cloneSnapshot(snapshotName0, cloneName); fail("Expected CorruptedSnapshotException, got succeeded cloneSnapshot()"); } catch (CorruptedSnapshotException e) { // Got the expected corruption exception. // check for no references of the cloned table. assertFalse(admin.tableExists(cloneName)); } catch (Exception e) { fail("Expected CorruptedSnapshotException got: " + e); } } // ========================================================================== // Helpers // ========================================================================== private void waitCleanerRun() throws InterruptedException { TEST_UTIL.getMiniHBaseCluster().getMaster().getHFileCleaner().choreForTesting(); } private Set<String> getFamiliesFromFS(final byte[] tableName) throws IOException { MasterFileSystem mfs = TEST_UTIL.getMiniHBaseCluster().getMaster().getMasterFileSystem(); Set<String> families = new HashSet<String>(); Path tableDir = HTableDescriptor.getTableDir(mfs.getRootDir(), tableName); for (Path regionDir: FSUtils.getRegionDirs(mfs.getFileSystem(), tableDir)) { for (Path familyDir: FSUtils.getFamilyDirs(mfs.getFileSystem(), regionDir)) { families.add(familyDir.getName()); } } return families; } }
/* * Copyright (c) 2012-2016 The ANTLR Project. All rights reserved. * Use of this file is governed by the BSD 3-clause license that * can be found in the LICENSE.txt file in the project root. */ package org.antlr.v4.automata; import org.antlr.runtime.CommonToken; import org.antlr.runtime.Token; import org.antlr.v4.codegen.CodeGenerator; import org.antlr.v4.misc.CharSupport; import org.antlr.v4.parse.ANTLRParser; import org.antlr.v4.runtime.IntStream; import org.antlr.v4.runtime.Lexer; import org.antlr.v4.runtime.atn.ATN; import org.antlr.v4.runtime.atn.ATNState; import org.antlr.v4.runtime.atn.ActionTransition; import org.antlr.v4.runtime.atn.AtomTransition; import org.antlr.v4.runtime.atn.LexerAction; import org.antlr.v4.runtime.atn.LexerChannelAction; import org.antlr.v4.runtime.atn.LexerCustomAction; import org.antlr.v4.runtime.atn.LexerModeAction; import org.antlr.v4.runtime.atn.LexerMoreAction; import org.antlr.v4.runtime.atn.LexerPopModeAction; import org.antlr.v4.runtime.atn.LexerPushModeAction; import org.antlr.v4.runtime.atn.LexerSkipAction; import org.antlr.v4.runtime.atn.LexerTypeAction; import org.antlr.v4.runtime.atn.NotSetTransition; import org.antlr.v4.runtime.atn.RangeTransition; import org.antlr.v4.runtime.atn.RuleStartState; import org.antlr.v4.runtime.atn.SetTransition; import org.antlr.v4.runtime.atn.TokensStartState; import org.antlr.v4.runtime.atn.Transition; import org.antlr.v4.runtime.misc.Interval; import org.antlr.v4.runtime.misc.IntervalSet; import org.antlr.v4.tool.ErrorType; import org.antlr.v4.tool.LexerGrammar; import org.antlr.v4.tool.Rule; import org.antlr.v4.tool.ast.ActionAST; import org.antlr.v4.tool.ast.GrammarAST; import org.antlr.v4.tool.ast.RangeAST; import org.antlr.v4.tool.ast.TerminalAST; import org.stringtemplate.v4.ST; import org.stringtemplate.v4.STGroup; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; public class LexerATNFactory extends ParserATNFactory { public STGroup codegenTemplates; /** * Provides a map of names of predefined constants which are likely to * appear as the argument for lexer commands. These names would be resolved * by the Java compiler for lexer commands that are translated to embedded * actions, but are required during code generation for creating * {@link LexerAction} instances that are usable by a lexer interpreter. */ public static final Map<String, Integer> COMMON_CONSTANTS = new HashMap<String, Integer>(); static { COMMON_CONSTANTS.put("HIDDEN", Lexer.HIDDEN); COMMON_CONSTANTS.put("DEFAULT_TOKEN_CHANNEL", Lexer.DEFAULT_TOKEN_CHANNEL); COMMON_CONSTANTS.put("DEFAULT_MODE", Lexer.DEFAULT_MODE); COMMON_CONSTANTS.put("SKIP", Lexer.SKIP); COMMON_CONSTANTS.put("MORE", Lexer.MORE); COMMON_CONSTANTS.put("EOF", Lexer.EOF); COMMON_CONSTANTS.put("MAX_CHAR_VALUE", Lexer.MAX_CHAR_VALUE); COMMON_CONSTANTS.put("MIN_CHAR_VALUE", Lexer.MIN_CHAR_VALUE); } private List<String> ruleCommands = new ArrayList<String>(); /** * Maps from an action index to a {@link LexerAction} object. */ protected Map<Integer, LexerAction> indexToActionMap = new HashMap<Integer, LexerAction>(); /** * Maps from a {@link LexerAction} object to the action index. */ protected Map<LexerAction, Integer> actionToIndexMap = new HashMap<LexerAction, Integer>(); public LexerATNFactory(LexerGrammar g) { super(g); // use codegen to get correct language templates for lexer commands String language = g.getOptionString("language"); CodeGenerator gen = new CodeGenerator(g.tool, null, language); codegenTemplates = gen.getTemplates(); } public static Set<String> getCommonConstants() { return COMMON_CONSTANTS.keySet(); } @Override public ATN createATN() { // BUILD ALL START STATES (ONE PER MODE) Set<String> modes = ((LexerGrammar) g).modes.keySet(); for (String modeName : modes) { // create s0, start state; implied Tokens rule node TokensStartState startState = newState(TokensStartState.class, null); atn.modeNameToStartState.put(modeName, startState); atn.modeToStartState.add(startState); atn.defineDecisionState(startState); } // INIT ACTION, RULE->TOKEN_TYPE MAP atn.ruleToTokenType = new int[g.rules.size()]; for (Rule r : g.rules.values()) { atn.ruleToTokenType[r.index] = g.getTokenType(r.name); } // CREATE ATN FOR EACH RULE _createATN(g.rules.values()); atn.lexerActions = new LexerAction[indexToActionMap.size()]; for (Map.Entry<Integer, LexerAction> entry : indexToActionMap.entrySet()) { atn.lexerActions[entry.getKey()] = entry.getValue(); } // LINK MODE START STATE TO EACH TOKEN RULE for (String modeName : modes) { List<Rule> rules = ((LexerGrammar)g).modes.get(modeName); TokensStartState startState = atn.modeNameToStartState.get(modeName); for (Rule r : rules) { if ( !r.isFragment() ) { RuleStartState s = atn.ruleToStartState[r.index]; epsilon(startState, s); } } } ATNOptimizer.optimize(g, atn); return atn; } @Override public Handle rule(GrammarAST ruleAST, String name, Handle blk) { ruleCommands.clear(); return super.rule(ruleAST, name, blk); } @Override public Handle action(ActionAST action) { int ruleIndex = currentRule.index; int actionIndex = g.lexerActions.get(action); LexerCustomAction lexerAction = new LexerCustomAction(ruleIndex, actionIndex); return action(action, lexerAction); } protected int getLexerActionIndex(LexerAction lexerAction) { Integer lexerActionIndex = actionToIndexMap.get(lexerAction); if (lexerActionIndex == null) { lexerActionIndex = actionToIndexMap.size(); actionToIndexMap.put(lexerAction, lexerActionIndex); indexToActionMap.put(lexerActionIndex, lexerAction); } return lexerActionIndex; } @Override public Handle action(String action) { if (action.trim().isEmpty()) { ATNState left = newState(null); ATNState right = newState(null); epsilon(left, right); return new Handle(left, right); } // define action AST for this rule as if we had found in grammar ActionAST ast = new ActionAST(new CommonToken(ANTLRParser.ACTION, action)); currentRule.defineActionInAlt(currentOuterAlt, ast); return action(ast); } protected Handle action(GrammarAST node, LexerAction lexerAction) { ATNState left = newState(node); ATNState right = newState(node); boolean isCtxDependent = false; int lexerActionIndex = getLexerActionIndex(lexerAction); ActionTransition a = new ActionTransition(right, currentRule.index, lexerActionIndex, isCtxDependent); left.addTransition(a); node.atnState = left; Handle h = new Handle(left, right); return h; } @Override public Handle lexerAltCommands(Handle alt, Handle cmds) { Handle h = new Handle(alt.left, cmds.right); epsilon(alt.right, cmds.left); return h; } @Override public Handle lexerCallCommand(GrammarAST ID, GrammarAST arg) { LexerAction lexerAction = createLexerAction(ID, arg); if (lexerAction != null) { return action(ID, lexerAction); } // fall back to standard action generation for the command ST cmdST = codegenTemplates.getInstanceOf("Lexer" + CharSupport.capitalize(ID.getText())+ "Command"); if (cmdST == null) { g.tool.errMgr.grammarError(ErrorType.INVALID_LEXER_COMMAND, g.fileName, ID.token, ID.getText()); return epsilon(ID); } if (cmdST.impl.formalArguments == null || !cmdST.impl.formalArguments.containsKey("arg")) { g.tool.errMgr.grammarError(ErrorType.UNWANTED_LEXER_COMMAND_ARGUMENT, g.fileName, ID.token, ID.getText()); return epsilon(ID); } cmdST.add("arg", arg.getText()); cmdST.add("grammar", arg.g); return action(cmdST.render()); } @Override public Handle lexerCommand(GrammarAST ID) { LexerAction lexerAction = createLexerAction(ID, null); if (lexerAction != null) { return action(ID, lexerAction); } // fall back to standard action generation for the command ST cmdST = codegenTemplates.getInstanceOf("Lexer" + CharSupport.capitalize(ID.getText()) + "Command"); if (cmdST == null) { g.tool.errMgr.grammarError(ErrorType.INVALID_LEXER_COMMAND, g.fileName, ID.token, ID.getText()); return epsilon(ID); } if (cmdST.impl.formalArguments != null && cmdST.impl.formalArguments.containsKey("arg")) { g.tool.errMgr.grammarError(ErrorType.MISSING_LEXER_COMMAND_ARGUMENT, g.fileName, ID.token, ID.getText()); return epsilon(ID); } return action(cmdST.render()); } @Override public Handle range(GrammarAST a, GrammarAST b) { ATNState left = newState(a); ATNState right = newState(b); int t1 = CharSupport.getCharValueFromGrammarCharLiteral(a.getText()); int t2 = CharSupport.getCharValueFromGrammarCharLiteral(b.getText()); checkRange(a, b, t1, t2); left.addTransition(new RangeTransition(right, t1, t2)); a.atnState = left; b.atnState = left; return new Handle(left, right); } @Override public Handle set(GrammarAST associatedAST, List<GrammarAST> alts, boolean invert) { ATNState left = newState(associatedAST); ATNState right = newState(associatedAST); IntervalSet set = new IntervalSet(); for (GrammarAST t : alts) { if ( t.getType()==ANTLRParser.RANGE ) { int a = CharSupport.getCharValueFromGrammarCharLiteral(t.getChild(0).getText()); int b = CharSupport.getCharValueFromGrammarCharLiteral(t.getChild(1).getText()); if (checkRange((GrammarAST) t.getChild(0), (GrammarAST) t.getChild(1), a, b)) { checkSetCollision(associatedAST, set, a, b); set.add(a,b); } } else if ( t.getType()==ANTLRParser.LEXER_CHAR_SET ) { set.addAll(getSetFromCharSetLiteral(t)); } else if ( t.getType()==ANTLRParser.STRING_LITERAL ) { int c = CharSupport.getCharValueFromGrammarCharLiteral(t.getText()); if ( c != -1 ) { checkSetCollision(associatedAST, set, c); set.add(c); } else { g.tool.errMgr.grammarError(ErrorType.INVALID_LITERAL_IN_LEXER_SET, g.fileName, t.getToken(), t.getText()); } } else if ( t.getType()==ANTLRParser.TOKEN_REF ) { g.tool.errMgr.grammarError(ErrorType.UNSUPPORTED_REFERENCE_IN_LEXER_SET, g.fileName, t.getToken(), t.getText()); } } if ( invert ) { left.addTransition(new NotSetTransition(right, set)); } else { Transition transition; if (set.getIntervals().size() == 1) { Interval interval = set.getIntervals().get(0); transition = new RangeTransition(right, interval.a, interval.b); } else { transition = new SetTransition(right, set); } left.addTransition(transition); } associatedAST.atnState = left; return new Handle(left, right); } protected boolean checkRange(GrammarAST leftNode, GrammarAST rightNode, int leftValue, int rightValue) { boolean result = true; if (leftValue == -1) { result = false; g.tool.errMgr.grammarError(ErrorType.INVALID_LITERAL_IN_LEXER_SET, g.fileName, leftNode.getToken(), leftNode.getText()); } if (rightValue == -1) { result = false; g.tool.errMgr.grammarError(ErrorType.INVALID_LITERAL_IN_LEXER_SET, g.fileName, rightNode.getToken(), rightNode.getText()); } if (!result) return result; if (rightValue < leftValue) { g.tool.errMgr.grammarError(ErrorType.EMPTY_STRINGS_AND_SETS_NOT_ALLOWED, g.fileName, leftNode.parent.getToken(), leftNode.getText() + ".." + rightNode.getText()); } return result; } /** For a lexer, a string is a sequence of char to match. That is, * "fog" is treated as 'f' 'o' 'g' not as a single transition in * the DFA. Machine== o-'f'-&gt;o-'o'-&gt;o-'g'-&gt;o and has n+1 states * for n characters. */ @Override public Handle stringLiteral(TerminalAST stringLiteralAST) { String chars = stringLiteralAST.getText(); ATNState left = newState(stringLiteralAST); ATNState right; chars = CharSupport.getStringFromGrammarStringLiteral(chars); if (chars == null) { g.tool.errMgr.grammarError(ErrorType.INVALID_ESCAPE_SEQUENCE, g.fileName, stringLiteralAST.getToken()); return new Handle(left, left); } int n = chars.length(); ATNState prev = left; right = null; for (int i = 0; i < n; i++) { right = newState(stringLiteralAST); prev.addTransition(new AtomTransition(right, chars.charAt(i))); prev = right; } stringLiteralAST.atnState = left; return new Handle(left, right); } /** [Aa\t \u1234a-z\]\-] char sets */ @Override public Handle charSetLiteral(GrammarAST charSetAST) { ATNState left = newState(charSetAST); ATNState right = newState(charSetAST); IntervalSet set = getSetFromCharSetLiteral(charSetAST); left.addTransition(new SetTransition(right, set)); charSetAST.atnState = left; return new Handle(left, right); } public IntervalSet getSetFromCharSetLiteral(GrammarAST charSetAST) { String chars = charSetAST.getText(); chars = chars.substring(1, chars.length() - 1); String cset = '"' + chars + '"'; IntervalSet set = new IntervalSet(); if (chars.length() == 0) { g.tool.errMgr.grammarError(ErrorType.EMPTY_STRINGS_AND_SETS_NOT_ALLOWED, g.fileName, charSetAST.getToken(), "[]"); return set; } // unescape all valid escape char like \n, leaving escaped dashes as '\-' // so we can avoid seeing them as '-' range ops. chars = CharSupport.getStringFromGrammarStringLiteral(cset); if (chars == null) { g.tool.errMgr.grammarError(ErrorType.INVALID_ESCAPE_SEQUENCE, g.fileName, charSetAST.getToken()); return set; } int n = chars.length(); // now make x-y become set of char for (int i = 0; i < n; i++) { int c = chars.charAt(i); if (c == '\\' && i+1 < n && chars.charAt(i+1) == '-') { // \- checkSetCollision(charSetAST, set, '-'); set.add('-'); i++; } else if (i+2 < n && chars.charAt(i+1) == '-') { // range x-y int x = c; int y = chars.charAt(i+2); if (x <= y) { checkSetCollision(charSetAST, set, x, y); set.add(x,y); } else { g.tool.errMgr.grammarError(ErrorType.EMPTY_STRINGS_AND_SETS_NOT_ALLOWED, g.fileName, charSetAST.getToken(), "[" + (char) x + "-" + (char) y + "]"); } i += 2; } else { checkSetCollision(charSetAST, set, c); set.add(c); } } return set; } protected void checkSetCollision(GrammarAST ast, IntervalSet set, int el) { if (set.contains(el)) { g.tool.errMgr.grammarError(ErrorType.CHARACTERS_COLLISION_IN_SET, g.fileName, ast.getToken(), (char)el, ast.getText()); } } protected void checkSetCollision(GrammarAST ast, IntervalSet set, int a, int b) { for (int i = a; i <= b; i++) { if (set.contains(i)) { String setText; if (ast.getChildren() == null) { setText = ast.getText(); } else { StringBuilder sb = new StringBuilder(); for (Object child : ast.getChildren()) { if (child instanceof RangeAST) { sb.append(((RangeAST) child).getChild(0).getText()); sb.append(".."); sb.append(((RangeAST) child).getChild(1).getText()); } else { sb.append(((GrammarAST)child).getText()); } sb.append(" | "); } sb.replace(sb.length() - 3, sb.length(), ""); setText = sb.toString(); } g.tool.errMgr.grammarError(ErrorType.CHARACTERS_COLLISION_IN_SET, g.fileName, ast.getToken(), (char)a + "-" + (char)b, setText); break; } } } @Override public Handle tokenRef(TerminalAST node) { // Ref to EOF in lexer yields char transition on -1 if (node.getText().equals("EOF") ) { ATNState left = newState(node); ATNState right = newState(node); left.addTransition(new AtomTransition(right, IntStream.EOF)); return new Handle(left, right); } return _ruleRef(node); } private LexerAction createLexerAction(GrammarAST ID, GrammarAST arg) { String command = ID.getText(); checkCommands(command, ID.getToken()); if ("skip".equals(command) && arg == null) { return LexerSkipAction.INSTANCE; } else if ("more".equals(command) && arg == null) { return LexerMoreAction.INSTANCE; } else if ("popMode".equals(command) && arg == null) { return LexerPopModeAction.INSTANCE; } else if ("mode".equals(command) && arg != null) { String modeName = arg.getText(); Integer mode = getModeConstantValue(modeName, arg.getToken()); if (mode == null) { return null; } return new LexerModeAction(mode); } else if ("pushMode".equals(command) && arg != null) { String modeName = arg.getText(); Integer mode = getModeConstantValue(modeName, arg.getToken()); if (mode == null) { return null; } return new LexerPushModeAction(mode); } else if ("type".equals(command) && arg != null) { String typeName = arg.getText(); Integer type = getTokenConstantValue(typeName, arg.getToken()); if (type == null) { return null; } return new LexerTypeAction(type); } else if ("channel".equals(command) && arg != null) { String channelName = arg.getText(); Integer channel = getChannelConstantValue(channelName, arg.getToken()); if (channel == null) { return null; } return new LexerChannelAction(channel); } else { return null; } } private void checkCommands(String command, Token commandToken) { // Command combinations list: https://github.com/antlr/antlr4/issues/1388#issuecomment-263344701 if (!command.equals("pushMode") && !command.equals("popMode")) { if (ruleCommands.contains(command)) { g.tool.errMgr.grammarError(ErrorType.DUPLICATED_COMMAND, g.fileName, commandToken, command); } if (!ruleCommands.equals("mode")) { String firstCommand = null; if (command.equals("skip")) { if (ruleCommands.contains("more")) { firstCommand = "more"; } else if (ruleCommands.contains("type")) { firstCommand = "type"; } else if (ruleCommands.contains("channel")) { firstCommand = "channel"; } } else if (command.equals("more")) { if (ruleCommands.contains("skip")) { firstCommand = "skip"; } else if (ruleCommands.contains("type")) { firstCommand = "type"; } else if (ruleCommands.contains("channel")) { firstCommand = "channel"; } } else if (command.equals("type") || command.equals("channel")) { if (ruleCommands.contains("more")) { firstCommand = "more"; } else if (ruleCommands.contains("skip")) { firstCommand = "skip"; } } if (firstCommand != null) { g.tool.errMgr.grammarError(ErrorType.INCOMPATIBLE_COMMANDS, g.fileName, commandToken, firstCommand, command); } } } ruleCommands.add(command); } private Integer getModeConstantValue(String modeName, Token token) { if (modeName == null) { return null; } if (modeName.equals("DEFAULT_MODE")) { return Lexer.DEFAULT_MODE; } if (COMMON_CONSTANTS.containsKey(modeName)) { g.tool.errMgr.grammarError(ErrorType.MODE_CONFLICTS_WITH_COMMON_CONSTANTS, g.fileName, token, token.getText()); return null; } List<String> modeNames = new ArrayList<String>(((LexerGrammar)g).modes.keySet()); int mode = modeNames.indexOf(modeName); if (mode >= 0) { return mode; } try { return Integer.parseInt(modeName); } catch (NumberFormatException ex) { g.tool.errMgr.grammarError(ErrorType.CONSTANT_VALUE_IS_NOT_A_RECOGNIZED_MODE_NAME, g.fileName, token, token.getText()); return null; } } private Integer getTokenConstantValue(String tokenName, Token token) { if (tokenName == null) { return null; } if (tokenName.equals("EOF")) { return Lexer.EOF; } if (COMMON_CONSTANTS.containsKey(tokenName)) { g.tool.errMgr.grammarError(ErrorType.TOKEN_CONFLICTS_WITH_COMMON_CONSTANTS, g.fileName, token, token.getText()); return null; } int tokenType = g.getTokenType(tokenName); if (tokenType != org.antlr.v4.runtime.Token.INVALID_TYPE) { return tokenType; } try { return Integer.parseInt(tokenName); } catch (NumberFormatException ex) { g.tool.errMgr.grammarError(ErrorType.CONSTANT_VALUE_IS_NOT_A_RECOGNIZED_TOKEN_NAME, g.fileName, token, token.getText()); return null; } } private Integer getChannelConstantValue(String channelName, Token token) { if (channelName == null) { return null; } if (channelName.equals("HIDDEN")) { return Lexer.HIDDEN; } if (channelName.equals("DEFAULT_TOKEN_CHANNEL")) { return Lexer.DEFAULT_TOKEN_CHANNEL; } if (COMMON_CONSTANTS.containsKey(channelName)) { g.tool.errMgr.grammarError(ErrorType.CHANNEL_CONFLICTS_WITH_COMMON_CONSTANTS, g.fileName, token, token.getText()); return null; } int channelValue = g.getChannelValue(channelName); if (channelValue >= org.antlr.v4.runtime.Token.MIN_USER_CHANNEL_VALUE) { return channelValue; } try { return Integer.parseInt(channelName); } catch (NumberFormatException ex) { g.tool.errMgr.grammarError(ErrorType.CONSTANT_VALUE_IS_NOT_A_RECOGNIZED_CHANNEL_NAME, g.fileName, token, token.getText()); return null; } } }
/* * Copyright 2015, The Querydsl Team (http://www.querydsl.com/team) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.querydsl.jpa.codegen; import com.querydsl.codegen.CodegenModule; import com.querydsl.codegen.EmbeddableSerializer; import com.querydsl.codegen.EntitySerializer; import com.querydsl.codegen.EntityType; import com.querydsl.codegen.Property; import com.querydsl.codegen.QueryTypeFactory; import com.querydsl.codegen.Serializer; import com.querydsl.codegen.SerializerConfig; import com.querydsl.codegen.SimpleSerializerConfig; import com.querydsl.codegen.Supertype; import com.querydsl.codegen.SupertypeSerializer; import com.querydsl.codegen.TypeFactory; import com.querydsl.codegen.TypeMappings; import com.querydsl.codegen.utils.CodeWriter; import com.querydsl.codegen.utils.JavaWriter; import com.querydsl.codegen.utils.model.Type; import com.querydsl.codegen.utils.model.TypeCategory; import com.querydsl.core.QueryException; import com.querydsl.core.annotations.Config; import com.querydsl.core.annotations.PropertyType; import com.querydsl.core.annotations.QueryInit; import com.querydsl.core.annotations.QueryType; import com.querydsl.core.util.Annotations; import com.querydsl.core.util.ReflectionUtils; import org.jetbrains.annotations.Nullable; import javax.persistence.Embeddable; import javax.persistence.Entity; import java.io.File; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.io.OutputStreamWriter; import java.io.Writer; import java.lang.reflect.AnnotatedElement; import java.lang.reflect.Field; import java.lang.reflect.Method; import java.nio.charset.Charset; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.function.Function; import java.util.logging.Level; import java.util.logging.Logger; /** * {@code AbstractDomainExporter} is a common supertype for domain exporters * * @author tiwe * */ public abstract class AbstractDomainExporter { private static final Logger logger = Logger.getLogger(AbstractDomainExporter.class.getName()); private final File targetFolder; private final Map<Class<?>, EntityType> allTypes = new HashMap<>(); private final Map<Class<?>, EntityType> entityTypes = new HashMap<>(); private final Map<Class<?>, EntityType> embeddableTypes = new HashMap<>(); private final Map<Class<?>, EntityType> superTypes = new HashMap<>(); private final Map<Class<?>, SerializerConfig> typeToConfig = new HashMap<>(); private final Set<EntityType> serialized = new HashSet<EntityType>(); @SuppressWarnings("unchecked") protected final TypeFactory typeFactory = new TypeFactory(Arrays.asList(Entity.class, javax.persistence.MappedSuperclass.class, Embeddable.class)); private final QueryTypeFactory queryTypeFactory; private final TypeMappings typeMappings; private final Serializer embeddableSerializer; private final Serializer entitySerializer; private final Serializer supertypeSerializer; private final SerializerConfig serializerConfig; private final Charset charset; private final Set<File> generatedFiles = new HashSet<File>(); private Function<EntityType, String> variableNameFunction; @SuppressWarnings("unchecked") public AbstractDomainExporter(String namePrefix, String nameSuffix, File targetFolder, SerializerConfig serializerConfig, Charset charset) { this.targetFolder = targetFolder; this.serializerConfig = serializerConfig; this.charset = charset; CodegenModule module = new CodegenModule(); module.bind(CodegenModule.PREFIX, namePrefix); module.bind(CodegenModule.SUFFIX, nameSuffix); module.bind(CodegenModule.KEYWORDS, Constants.keywords); module.loadExtensions(); this.queryTypeFactory = module.get(QueryTypeFactory.class); this.typeMappings = module.get(TypeMappings.class); this.embeddableSerializer = module.get(EmbeddableSerializer.class); this.entitySerializer = module.get(EntitySerializer.class); this.supertypeSerializer = module.get(SupertypeSerializer.class); this.variableNameFunction = module.get(Function.class, CodegenModule.VARIABLE_NAME_FUNCTION_CLASS); } /** * Export the contents * * @throws IOException */ public void execute() throws IOException { // collect types try { collectTypes(); } catch (Exception e) { throw new QueryException(e); } // go through supertypes Set<Supertype> additions = new HashSet<>(); for (Map.Entry<Class<?>, EntityType> entry : allTypes.entrySet()) { EntityType entityType = entry.getValue(); if (entityType.getSuperType() != null && !allTypes.containsKey(entityType.getSuperType().getType().getJavaClass())) { additions.add(entityType.getSuperType()); } } for (Supertype type : additions) { type.setEntityType(createEntityType(type.getType(), this.superTypes)); } // merge supertype fields into subtypes Set<EntityType> handled = new HashSet<EntityType>(); for (EntityType type : superTypes.values()) { addSupertypeFields(type, allTypes, handled); } for (EntityType type : entityTypes.values()) { addSupertypeFields(type, allTypes, handled); } for (EntityType type : embeddableTypes.values()) { addSupertypeFields(type, allTypes, handled); } // serialize them serialize(superTypes, supertypeSerializer); serialize(embeddableTypes, embeddableSerializer); serialize(entityTypes, entitySerializer); } private void addSupertypeFields(EntityType model, Map<Class<?>, EntityType> superTypes, Set<EntityType> handled) { if (handled.add(model)) { for (Supertype supertype : model.getSuperTypes()) { EntityType entityType = superTypes.get(supertype.getType().getJavaClass()); if (entityType != null) { addSupertypeFields(entityType, superTypes, handled); supertype.setEntityType(entityType); model.include(supertype); } } } } protected abstract void collectTypes() throws Exception; protected EntityType createEmbeddableType(Class<?> cl) { return createEntityType(cl, embeddableTypes); } protected EntityType createEmbeddableType(Type type) { return createEntityType(type, embeddableTypes); } protected EntityType createEntityType(Class<?> cl) { return createEntityType(cl, entityTypes); } private EntityType createEntityType(Class<?> cl, Map<Class<?>, EntityType> types) { if (allTypes.containsKey(cl)) { return allTypes.get(cl); } else { EntityType type = typeFactory.getEntityType(cl); registerConfig(type); typeMappings.register(type, queryTypeFactory.create(type)); if (!cl.getSuperclass().equals(Object.class)) { type.addSupertype(new Supertype(typeFactory.get(cl.getSuperclass(), cl.getGenericSuperclass()))); } types.put(cl, type); allTypes.put(cl, type); return type; } } protected EntityType createEntityType(Type type) { return createEntityType(type, entityTypes); } protected EntityType createEntityType(Type type, Map<Class<?>, EntityType> types) { Class<?> key = type.getJavaClass(); if (allTypes.containsKey(key)) { return allTypes.get(key); } else { EntityType entityType = new EntityType(type, variableNameFunction); registerConfig(entityType); typeMappings.register(entityType, queryTypeFactory.create(entityType)); Class<?> superClass = key.getSuperclass(); if (entityType.getSuperType() == null && superClass != null && !superClass.equals(Object.class)) { entityType.addSupertype(new Supertype(typeFactory.get(superClass, key.getGenericSuperclass()))); } types.put(key, entityType); allTypes.put(key, entityType); return entityType; } } private void registerConfig(EntityType entityType) { Class<?> key = entityType.getJavaClass(); Config config = key.getAnnotation(Config.class); if (config == null && key.getPackage() != null) { config = key.getPackage().getAnnotation(Config.class); } if (config != null) { typeToConfig.put(key, SimpleSerializerConfig.getConfig(config)); } } @Nullable protected Type getTypeOverride(Type propertyType, AnnotatedElement annotated) { if (annotated.isAnnotationPresent(QueryType.class)) { QueryType queryType = annotated.getAnnotation(QueryType.class); if (queryType.value().equals(PropertyType.NONE)) { return null; } return propertyType.as(TypeCategory.valueOf(queryType.value().name())); } else { return propertyType; } } protected Property createProperty(EntityType entityType, String propertyName, Type propertyType, AnnotatedElement annotated) { List<String> inits = Collections.emptyList(); if (annotated.isAnnotationPresent(QueryInit.class)) { inits = Collections.unmodifiableList(Arrays.asList(annotated.getAnnotation(QueryInit.class).value())); } return new Property(entityType, propertyName, propertyType, inits); } protected EntityType createSuperType(Class<?> cl) { return createEntityType(cl, superTypes); } protected AnnotatedElement getAnnotatedElement(Class<?> cl, String propertyName) throws NoSuchMethodException { Field field = ReflectionUtils.getFieldOrNull(cl, propertyName); Method method = ReflectionUtils.getGetterOrNull(cl, propertyName); if (field != null) { if (method != null) { return new Annotations(field, method); } else { return field; } } else if (method != null) { return method; } else { throw new IllegalArgumentException("No property found for " + cl.getName() + "." + propertyName); } } public Set<File> getGeneratedFiles() { return generatedFiles; } protected Type getType(Class<?> cl, Class<?> mappedType, String propertyName) throws NoSuchMethodException { Field field = ReflectionUtils.getFieldOrNull(cl, propertyName); if (field != null) { if (mappedType.isAssignableFrom(field.getType())) { return typeFactory.get(field.getType(), field.getGenericType()); } else { return typeFactory.get(mappedType); } } else { Method method = ReflectionUtils.getGetterOrNull(cl, propertyName); if (method != null) { if (mappedType.isAssignableFrom(method.getReturnType())) { return typeFactory.get(method.getReturnType(), method.getGenericReturnType()); } else { return typeFactory.get(mappedType); } } else { throw new IllegalArgumentException("No property found for " + cl.getName() + "." + propertyName); } } } private void serialize(Map<Class<?>, EntityType> types, Serializer serializer) throws IOException { for (EntityType entityType : types.values()) { if (serialized.add(entityType)) { Type type = typeMappings.getPathType(entityType, entityType, true); String packageName = type.getPackageName(); String className = packageName.length() > 0 ? (packageName + "." + type.getSimpleName()) : type.getSimpleName(); write(serializer, className.replace('.', '/') + ".java", entityType); } } } private void write(Serializer serializer, String path, EntityType type) throws IOException { File targetFile = new File(targetFolder, path); generatedFiles.add(targetFile); try (Writer w = writerFor(targetFile)) { CodeWriter writer = new JavaWriter(w); if (typeToConfig.containsKey(type.getJavaClass())) { serializer.serialize(type, typeToConfig.get(type.getJavaClass()), writer); } else { serializer.serialize(type, serializerConfig, writer); } } } private Writer writerFor(File file) { if (!file.getParentFile().exists() && !file.getParentFile().mkdirs()) { logger.log(Level.WARNING, "Folder " + file.getParent() + " could not be created"); } try { return new OutputStreamWriter(new FileOutputStream(file), charset); } catch (FileNotFoundException e) { throw new RuntimeException(e.getMessage(), e); } } protected Type normalize(Type first, Type second) { if (first.getFullName().equals(second.getFullName())) { return first; } else { return second; } } public void setUnknownAsEntity(boolean unknownAsEntity) { typeFactory.setUnknownAsEntity(unknownAsEntity); } }
/* * Copyright 2000-2014 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.jetbrains.python.sdk; import com.google.common.base.Predicate; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.application.ModalityState; import com.intellij.openapi.options.ShowSettingsUtil; import com.intellij.openapi.project.DumbModePermission; import com.intellij.openapi.project.DumbService; import com.intellij.openapi.project.Project; import com.intellij.openapi.projectRoots.ProjectJdkTable; import com.intellij.openapi.projectRoots.Sdk; import com.intellij.openapi.projectRoots.SdkAdditionalData; import com.intellij.openapi.projectRoots.impl.ProjectJdkImpl; import com.intellij.openapi.projectRoots.impl.SdkConfigurationUtil; import com.intellij.openapi.ui.DialogWrapper; import com.intellij.openapi.ui.Messages; import com.intellij.openapi.ui.popup.JBPopupFactory; import com.intellij.openapi.ui.popup.ListPopup; import com.intellij.openapi.ui.popup.ListSeparator; import com.intellij.openapi.ui.popup.PopupStep; import com.intellij.openapi.ui.popup.util.BaseListPopupStep; import com.intellij.openapi.util.Disposer; import com.intellij.util.NullableConsumer; import com.jetbrains.python.PyBundle; import com.jetbrains.python.packaging.PyCondaPackageService; import com.jetbrains.python.remote.PythonRemoteInterpreterManager; import com.jetbrains.python.sdk.flavors.PythonSdkFlavor; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import java.awt.*; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.Set; public class PythonSdkDetailsStep extends BaseListPopupStep<String> { @Nullable private DialogWrapper myMore; private final Project myProject; private final Component myOwnerComponent; private final Sdk[] myExistingSdks; private final NullableConsumer<Sdk> myCallback; private static final String LOCAL = PyBundle.message("sdk.details.step.add.local"); private static final String REMOTE = PyBundle.message("sdk.details.step.add.remote"); private static final String VIRTUALENV = PyBundle.message("sdk.details.step.create.virtual.env"); private static final String CONDA = PyBundle.message("sdk.details.step.create.conda.env"); private static final String MORE = PyBundle.message("sdk.details.step.show.more"); private boolean myNewProject; public static void show(final Project project, final Sdk[] existingSdks, @Nullable final DialogWrapper moreDialog, JComponent ownerComponent, final Point popupPoint, final NullableConsumer<Sdk> callback) { show(project, existingSdks, moreDialog, ownerComponent, popupPoint, callback, false); } public static void show(final Project project, final Sdk[] existingSdks, @Nullable final DialogWrapper moreDialog, JComponent ownerComponent, final Point popupPoint, final NullableConsumer<Sdk> callback, boolean isNewProject) { final PythonSdkDetailsStep sdkHomesStep = new PythonSdkDetailsStep(project, moreDialog, ownerComponent, existingSdks, callback); sdkHomesStep.setNewProject(isNewProject); final ListPopup popup = JBPopupFactory.getInstance().createListPopup(sdkHomesStep); popup.showInScreenCoordinates(ownerComponent, popupPoint); } private void setNewProject(boolean isNewProject) { myNewProject = isNewProject; } public PythonSdkDetailsStep(@Nullable final Project project, @Nullable final DialogWrapper moreDialog, @NotNull final Component ownerComponent, @NotNull final Sdk[] existingSdks, @NotNull final NullableConsumer<Sdk> callback) { super(null, getAvailableOptions(moreDialog != null)); myProject = project; myMore = moreDialog; myOwnerComponent = ownerComponent; myExistingSdks = existingSdks; myCallback = callback; } private static List<String> getAvailableOptions(boolean showMore) { final List<String> options = new ArrayList<String>(); options.add(LOCAL); if (PythonRemoteInterpreterManager.getInstance() != null) { options.add(REMOTE); } options.add(VIRTUALENV); if (PyCondaPackageService.getCondaExecutable() != null) { options.add(CONDA); } if (showMore) { options.add(MORE); } return options; } @Nullable @Override public ListSeparator getSeparatorAbove(String value) { return MORE.equals(value) ? new ListSeparator() : null; } private void optionSelected(final String selectedValue) { if (!MORE.equals(selectedValue) && myMore != null) Disposer.dispose(myMore.getDisposable()); if (LOCAL.equals(selectedValue)) { createLocalSdk(); } else if (REMOTE.equals(selectedValue)) { createRemoteSdk(); } else if (VIRTUALENV.equals(selectedValue)) { createVirtualEnvSdk(); } else if (CONDA.equals(selectedValue)) { createCondaEnvSdk(); } else if (myMore != null) { myMore.show(); } } private void createLocalSdk() { ApplicationManager.getApplication().invokeLater(new Runnable() { @Override public void run() { final NullableConsumer<Sdk> callback = new NullableConsumer<Sdk>() { @Override public void consume(@Nullable final Sdk sdk) { myCallback.consume(sdk); if (sdk != null) { DumbService.allowStartingDumbModeInside(DumbModePermission.MAY_START_MODAL, new Runnable() { @Override public void run() { if (ProjectJdkTable.getInstance().findJdk(sdk.getName()) == null) { SdkConfigurationUtil.addSdk(sdk); } PythonSdkUpdater.getInstance().markAlreadyUpdated(sdk.getHomePath()); } }); } } }; SdkConfigurationUtil.createSdk(myProject, myExistingSdks, callback, false, PythonSdkType.getInstance()); } }, ModalityState.any()); } private void createRemoteSdk() { PythonRemoteInterpreterManager remoteInterpreterManager = PythonRemoteInterpreterManager.getInstance(); if (remoteInterpreterManager != null) { remoteInterpreterManager.addRemoteSdk(myProject, myOwnerComponent, Lists.newArrayList(myExistingSdks), myCallback); } else { final String pathToPluginsPage = ShowSettingsUtil.getSettingsMenuName() + " | Plugins"; Messages.showErrorDialog(PyBundle.message("remote.interpreter.error.plugin.missing", pathToPluginsPage), PyBundle.message("remote.interpreter.add.title")); } } private void createVirtualEnvSdk() { AbstractCreateVirtualEnvDialog.VirtualEnvCallback callback = getVEnvCallback(); final CreateVirtualEnvDialog dialog; final List<Sdk> allSdks = Lists.newArrayList(myExistingSdks); Iterables.removeIf(allSdks, new Predicate<Sdk>() { @Override public boolean apply(Sdk sdk) { return !(sdk.getSdkType() instanceof PythonSdkType); } }); final List<PythonSdkFlavor> flavors = PythonSdkFlavor.getApplicableFlavors(false); for (PythonSdkFlavor flavor : flavors) { final Collection<String> strings = flavor.suggestHomePaths(); for (String string : SdkConfigurationUtil.filterExistingPaths(PythonSdkType.getInstance(), strings, myExistingSdks)) { allSdks.add(new PyDetectedSdk(string)); } } final Set<String> sdks = PySdkService.getInstance().getAddedSdks(); for (String string : SdkConfigurationUtil.filterExistingPaths(PythonSdkType.getInstance(), sdks, myExistingSdks)) { allSdks.add(new PyDetectedSdk(string)); } if (myProject != null) { dialog = new CreateVirtualEnvDialog(myProject, allSdks); } else { dialog = new CreateVirtualEnvDialog(myOwnerComponent, allSdks); } if (dialog.showAndGet()) { dialog.createVirtualEnv(callback); } } @NotNull private AbstractCreateVirtualEnvDialog.VirtualEnvCallback getVEnvCallback() { return new CreateVirtualEnvDialog.VirtualEnvCallback() { @Override public void virtualEnvCreated(Sdk sdk, boolean associateWithProject) { PythonSdkUpdater.getInstance().markAlreadyUpdated(sdk.getHomePath()); if (associateWithProject) { SdkAdditionalData additionalData = sdk.getSdkAdditionalData(); if (additionalData == null) { additionalData = new PythonSdkAdditionalData(PythonSdkFlavor.getFlavor(sdk.getHomePath())); ((ProjectJdkImpl)sdk).setSdkAdditionalData(additionalData); } if (myNewProject) { ((PythonSdkAdditionalData)additionalData).associateWithNewProject(); } else { ((PythonSdkAdditionalData)additionalData).associateWithProject(myProject); } } myCallback.consume(sdk); } }; } private void createCondaEnvSdk() { AbstractCreateVirtualEnvDialog.VirtualEnvCallback callback = getVEnvCallback(); final CreateCondaEnvDialog dialog; if (myProject != null) { dialog = new CreateCondaEnvDialog(myProject); } else { dialog = new CreateCondaEnvDialog(myOwnerComponent); } if (dialog.showAndGet()) { dialog.createVirtualEnv(callback); } } @Override public boolean canBeHidden(String value) { return true; } @Override public void canceled() { if (getFinalRunnable() == null && myMore != null) Disposer.dispose(myMore.getDisposable()); } @Override public PopupStep onChosen(final String selectedValue, boolean finalChoice) { return doFinalStep(new Runnable() { public void run() { optionSelected(selectedValue); } }); } }
/* * Copyright 2002-2016 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.security.samples.servletapi.mvc; import java.io.IOException; import java.security.Principal; import javax.naming.AuthenticationException; import javax.servlet.AsyncContext; import javax.servlet.Servlet; import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.springframework.security.core.Authentication; import org.springframework.security.core.context.SecurityContext; import org.springframework.security.core.context.SecurityContextHolder; import org.springframework.security.web.access.ExceptionTranslationFilter; import org.springframework.stereotype.Controller; import org.springframework.validation.BindingResult; import org.springframework.web.bind.annotation.ModelAttribute; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.servlet.mvc.support.RedirectAttributes; /** * A Spring MVC Controller that demonstrates Spring Security's integration with the * standard Servlet API's. Specifically it demonstrates the following: * <ul> * <li>{@link #authenticate(HttpServletRequest, HttpServletResponse)} - Integration with * {@link HttpServletRequest#authenticate(HttpServletResponse)}</li> * <li>{@link #login(HttpServletRequest, HttpServletResponse, LoginForm, BindingResult)} - * Integration with {@link HttpServletRequest#login(String, String)}</li> * <li>{@link #logout(HttpServletRequest, HttpServletResponse, RedirectAttributes)} - Integration with * {@link HttpServletRequest#logout()}</li> * <li>{@link #remoteUser(HttpServletRequest)} - Integration with * {@link HttpServletRequest#getRemoteUser()}</li> * <li>{@link #userPrincipal(HttpServletRequest)} - Integration with * {@link HttpServletRequest#getUserPrincipal()}</li> * <li>{@link #authentication(Authentication)} - Spring MVC's ability to resolve the * {@link Authentication} since it is found on * {@link HttpServletRequest#getUserPrincipal()}</li> * </ul> * * @author Rob Winch * */ @Controller public class ServletApiController { /** * Demonstrates that {@link HttpServletRequest#authenticate(HttpServletResponse)} will * send the user to the log in page configured within Spring Security if the user is * not already authenticated. * * @param request * @param response * @return * @throws ServletException * @throws IOException */ @RequestMapping("/authenticate") public String authenticate(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { boolean authenticate = request.authenticate(response); return authenticate ? "index" : null; } /** * Demonstrates that you can authenticate with Spring Security using * {@link HttpServletRequest#login(String, String)}. * * <p> * If we fail to authenticate, a {@link ServletException} is thrown that wraps the * original {@link AuthenticationException} from Spring Security. This means we can * catch the {@link ServletException} to display the error message. Alternatively, we * could allow the {@link ServletException} to propegate and Spring Security's * {@link ExceptionTranslationFilter} would catch it and process it appropriately. * </p> * <p> * In this method we choose to use Spring MVC's {@link ModelAttribute} to make things * easier for our form. However, this is not necessary. We could have just as easily * obtained the request parameters from the {@link HttpServletRequest} object. * Remember all of these examples would work in a standard {@link Servlet} or anything * with access to the {@link HttpServletRequest} and {@link HttpServletResponse}. * </p> * * @param request * @param response * @param loginForm * @param result * @return */ @RequestMapping(value = "/login", method = RequestMethod.POST) public String login(HttpServletRequest request, HttpServletResponse response, @ModelAttribute LoginForm loginForm, BindingResult result) { try { request.login(loginForm.getUsername(), loginForm.getPassword()); } catch (ServletException authenticationFailed) { result.rejectValue(null, "authentication.failed", authenticationFailed.getMessage()); return "login"; } return "redirect:/"; } /** * Demonstrates that invoking {@link HttpServletRequest#logout()} will log the user * out. Note that the response does not get processed, so you need to write something * to the response. * @param request * @param response * @param redirect * @return * @throws ServletException */ @RequestMapping("/logout") public String logout(HttpServletRequest request, HttpServletResponse response, RedirectAttributes redirect) throws ServletException { request.logout(); return "redirect:/"; } /** * Demonstrates Spring Security with {@link AsyncContext#start(Runnable)}. Spring * Security will automatically transfer the {@link SecurityContext} from the thread * that {@link AsyncContext#start(Runnable)} is invoked to the new Thread that invokes * the {@link Runnable}. * @param request * @param response */ @RequestMapping("/async") public void asynch(HttpServletRequest request, HttpServletResponse response) { final AsyncContext async = request.startAsync(); async.start(() -> { Authentication authentication = SecurityContextHolder.getContext() .getAuthentication(); try { final HttpServletResponse asyncResponse = (HttpServletResponse) async .getResponse(); asyncResponse.setStatus(HttpServletResponse.SC_OK); asyncResponse.getWriter().write(String.valueOf(authentication)); async.complete(); } catch (Exception e) { throw new RuntimeException(e); } }); } /** * Demonstrates that Spring Security automatically populates * {@link HttpServletRequest#getRemoteUser()} with the current username. * @param request * @return */ @ModelAttribute("remoteUser") public String remoteUser(HttpServletRequest request) { return request.getRemoteUser(); } /** * Demonstrates that Spring Security automatically populates * {@link HttpServletRequest#getUserPrincipal()} with the {@link Authentication} that * is present on {@link SecurityContextHolder#getContext()} * @param request * @return */ @ModelAttribute("userPrincipal") public Principal userPrincipal(HttpServletRequest request) { return request.getUserPrincipal(); } /** * Spring MVC will automatically resolve any object that implements {@link Principal} * using {@link HttpServletRequest#getUserPrincipal()}. This means you can easily * resolve the {@link Authentication} just by adding it as an argument to your MVC * controller. Alternatively, you could also have an argument of type * {@link Principal} which would not couple your controller to Spring Security. * @param authentication * @return */ @ModelAttribute public Authentication authentication(Authentication authentication) { return authentication; } @RequestMapping("/") public String welcome() { return "index"; } @RequestMapping(value = "/login", method = RequestMethod.GET) public String login(@ModelAttribute LoginForm loginForm) { return "login"; } }
/* * Copyright 2000-2015 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.openapi.roots.impl; import com.intellij.openapi.CompositeDisposable; import com.intellij.openapi.Disposable; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.extensions.Extensions; import com.intellij.openapi.module.Module; import com.intellij.openapi.project.Project; import com.intellij.openapi.projectRoots.Sdk; import com.intellij.openapi.roots.*; import com.intellij.openapi.roots.impl.libraries.LibraryEx; import com.intellij.openapi.roots.libraries.Library; import com.intellij.openapi.roots.libraries.LibraryTable; import com.intellij.openapi.util.Comparing; import com.intellij.openapi.util.Disposer; import com.intellij.openapi.util.InvalidDataException; import com.intellij.openapi.util.WriteExternalException; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.openapi.vfs.pointers.VirtualFilePointerManager; import com.intellij.util.ArrayUtil; import com.intellij.util.containers.ContainerUtil; import org.jdom.Element; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.*; /** * @author dsl */ public class RootModelImpl extends RootModelBase implements ModifiableRootModel { private static final Logger LOG = Logger.getInstance("#com.intellij.openapi.roots.impl.RootModelImpl"); private final Set<ContentEntry> myContent = new TreeSet<>(ContentComparator.INSTANCE); private final List<OrderEntry> myOrderEntries = new Order(); // cleared by myOrderEntries modification, see Order @Nullable private OrderEntry[] myCachedOrderEntries; @NotNull private final ModuleLibraryTable myModuleLibraryTable; final ModuleRootManagerImpl myModuleRootManager; private boolean myWritable; private final VirtualFilePointerManager myFilePointerManager; private boolean myDisposed = false; private final Set<ModuleExtension> myExtensions = new TreeSet<>(); private final RootConfigurationAccessor myConfigurationAccessor; private final ProjectRootManagerImpl myProjectRootManager; // have to register all child disposables using this fake object since all clients just call ModifiableModel.dispose() private final CompositeDisposable myDisposable = new CompositeDisposable(); RootModelImpl(@NotNull ModuleRootManagerImpl moduleRootManager, ProjectRootManagerImpl projectRootManager, VirtualFilePointerManager filePointerManager) { myModuleRootManager = moduleRootManager; myProjectRootManager = projectRootManager; myFilePointerManager = filePointerManager; myWritable = false; addSourceOrderEntries(); myModuleLibraryTable = new ModuleLibraryTable(this, myProjectRootManager); for (ModuleExtension extension : Extensions.getExtensions(ModuleExtension.EP_NAME, moduleRootManager.getModule())) { ModuleExtension model = extension.getModifiableModel(false); registerOnDispose(model); myExtensions.add(model); } myConfigurationAccessor = new RootConfigurationAccessor(); } private void addSourceOrderEntries() { myOrderEntries.add(new ModuleSourceOrderEntryImpl(this)); } RootModelImpl(@NotNull Element element, @NotNull ModuleRootManagerImpl moduleRootManager, ProjectRootManagerImpl projectRootManager, VirtualFilePointerManager filePointerManager, boolean writable) throws InvalidDataException { myProjectRootManager = projectRootManager; myFilePointerManager = filePointerManager; myModuleRootManager = moduleRootManager; myModuleLibraryTable = new ModuleLibraryTable(this, myProjectRootManager); for (Element child : element.getChildren(ContentEntryImpl.ELEMENT_NAME)) { myContent.add(new ContentEntryImpl(child, this)); } boolean moduleSourceAdded = false; for (Element child : element.getChildren(OrderEntryFactory.ORDER_ENTRY_ELEMENT_NAME)) { final OrderEntry orderEntry = OrderEntryFactory.createOrderEntryByElement(child, this, myProjectRootManager); if (orderEntry instanceof ModuleSourceOrderEntry) { if (moduleSourceAdded) continue; moduleSourceAdded = true; } myOrderEntries.add(orderEntry); } if (!moduleSourceAdded) { myOrderEntries.add(new ModuleSourceOrderEntryImpl(this)); } myWritable = writable; RootModelImpl originalRootModel = moduleRootManager.getRootModel(); for (ModuleExtension extension : originalRootModel.myExtensions) { ModuleExtension model = extension.getModifiableModel(false); model.readExternal(element); registerOnDispose(model); myExtensions.add(model); } myConfigurationAccessor = new RootConfigurationAccessor(); } @Override public boolean isWritable() { return myWritable; } public RootConfigurationAccessor getConfigurationAccessor() { return myConfigurationAccessor; } //creates modifiable model RootModelImpl(@NotNull RootModelImpl rootModel, ModuleRootManagerImpl moduleRootManager, final boolean writable, final RootConfigurationAccessor rootConfigurationAccessor, @NotNull VirtualFilePointerManager filePointerManager, ProjectRootManagerImpl projectRootManager) { myFilePointerManager = filePointerManager; myModuleRootManager = moduleRootManager; myProjectRootManager = projectRootManager; myModuleLibraryTable = new ModuleLibraryTable(this, myProjectRootManager); myWritable = writable; myConfigurationAccessor = rootConfigurationAccessor; final Set<ContentEntry> thatContent = rootModel.myContent; for (ContentEntry contentEntry : thatContent) { if (contentEntry instanceof ClonableContentEntry) { ContentEntry cloned = ((ClonableContentEntry)contentEntry).cloneEntry(this); myContent.add(cloned); } } setOrderEntriesFrom(rootModel); for (ModuleExtension extension : rootModel.myExtensions) { ModuleExtension model = extension.getModifiableModel(writable); registerOnDispose(model); myExtensions.add(model); } } private void setOrderEntriesFrom(@NotNull RootModelImpl rootModel) { removeAllOrderEntries(); for (OrderEntry orderEntry : rootModel.myOrderEntries) { if (orderEntry instanceof ClonableOrderEntry) { myOrderEntries.add(((ClonableOrderEntry)orderEntry).cloneEntry(this, myProjectRootManager, myFilePointerManager)); } } } private void removeAllOrderEntries() { for (OrderEntry entry : myOrderEntries) { Disposer.dispose((OrderEntryBaseImpl)entry); } myOrderEntries.clear(); } @Override @NotNull public OrderEntry[] getOrderEntries() { OrderEntry[] cachedOrderEntries = myCachedOrderEntries; if (cachedOrderEntries == null) { myCachedOrderEntries = cachedOrderEntries = myOrderEntries.toArray(new OrderEntry[myOrderEntries.size()]); } return cachedOrderEntries; } Iterator<OrderEntry> getOrderIterator() { return Collections.unmodifiableList(myOrderEntries).iterator(); } @Override public void removeContentEntry(@NotNull ContentEntry entry) { assertWritable(); LOG.assertTrue(myContent.contains(entry)); if (entry instanceof RootModelComponentBase) { Disposer.dispose((RootModelComponentBase)entry); RootModelImpl entryModel = ((RootModelComponentBase)entry).getRootModel(); LOG.assertTrue(entryModel == this, "Removing from " + this + " content entry obtained from " + entryModel); } myContent.remove(entry); } @Override public void addOrderEntry(@NotNull OrderEntry entry) { assertWritable(); LOG.assertTrue(!myOrderEntries.contains(entry)); myOrderEntries.add(entry); } @NotNull @Override public LibraryOrderEntry addLibraryEntry(@NotNull Library library) { assertWritable(); final LibraryOrderEntry libraryOrderEntry = new LibraryOrderEntryImpl(library, this, myProjectRootManager); assert libraryOrderEntry.isValid(); myOrderEntries.add(libraryOrderEntry); return libraryOrderEntry; } @NotNull @Override public LibraryOrderEntry addInvalidLibrary(@NotNull String name, @NotNull String level) { assertWritable(); final LibraryOrderEntry libraryOrderEntry = new LibraryOrderEntryImpl(name, level, this, myProjectRootManager); myOrderEntries.add(libraryOrderEntry); return libraryOrderEntry; } @NotNull @Override public ModuleOrderEntry addModuleOrderEntry(@NotNull Module module) { assertWritable(); LOG.assertTrue(!module.equals(getModule())); LOG.assertTrue(Comparing.equal(myModuleRootManager.getModule().getProject(), module.getProject())); final ModuleOrderEntryImpl moduleOrderEntry = new ModuleOrderEntryImpl(module, this); myOrderEntries.add(moduleOrderEntry); return moduleOrderEntry; } @NotNull @Override public ModuleOrderEntry addInvalidModuleEntry(@NotNull String name) { assertWritable(); LOG.assertTrue(!name.equals(getModule().getName())); final ModuleOrderEntryImpl moduleOrderEntry = new ModuleOrderEntryImpl(name, this); myOrderEntries.add(moduleOrderEntry); return moduleOrderEntry; } @Nullable @Override public LibraryOrderEntry findLibraryOrderEntry(@NotNull Library library) { for (OrderEntry orderEntry : getOrderEntries()) { if (orderEntry instanceof LibraryOrderEntry && library.equals(((LibraryOrderEntry)orderEntry).getLibrary())) { return (LibraryOrderEntry)orderEntry; } } return null; } @Override public void removeOrderEntry(@NotNull OrderEntry entry) { assertWritable(); removeOrderEntryInternal(entry); } private void removeOrderEntryInternal(OrderEntry entry) { LOG.assertTrue(myOrderEntries.contains(entry)); Disposer.dispose((OrderEntryBaseImpl)entry); myOrderEntries.remove(entry); } @Override public void rearrangeOrderEntries(@NotNull OrderEntry[] newEntries) { assertWritable(); assertValidRearrangement(newEntries); myOrderEntries.clear(); ContainerUtil.addAll(myOrderEntries, newEntries); } private void assertValidRearrangement(@NotNull OrderEntry[] newEntries) { String error = checkValidRearrangement(newEntries); LOG.assertTrue(error == null, error); } @Nullable private String checkValidRearrangement(@NotNull OrderEntry[] newEntries) { if (newEntries.length != myOrderEntries.size()) { return "Size mismatch: old size=" + myOrderEntries.size() + "; new size=" + newEntries.length; } Set<OrderEntry> set = new HashSet<>(); for (OrderEntry newEntry : newEntries) { if (!myOrderEntries.contains(newEntry)) { return "Trying to add nonexisting order entry " + newEntry; } if (set.contains(newEntry)) { return "Trying to add duplicate order entry " + newEntry; } set.add(newEntry); } return null; } @Override public void clear() { final Sdk jdk = getSdk(); removeAllContentEntries(); removeAllOrderEntries(); setSdk(jdk); addSourceOrderEntries(); } private void removeAllContentEntries() { for (ContentEntry entry : myContent) { if (entry instanceof RootModelComponentBase) { Disposer.dispose((RootModelComponentBase)entry); } } myContent.clear(); } @Override public void commit() { myModuleRootManager.commitModel(this); myWritable = false; } public void docommit() { assert isWritable(); if (areOrderEntriesChanged()) { getSourceModel().setOrderEntriesFrom(this); } for (ModuleExtension extension : myExtensions) { if (extension.isChanged()) { extension.commit(); } } if (areContentEntriesChanged()) { getSourceModel().removeAllContentEntries(); for (ContentEntry contentEntry : myContent) { ContentEntry cloned = ((ClonableContentEntry)contentEntry).cloneEntry(getSourceModel()); getSourceModel().myContent.add(cloned); } } } @Override @NotNull public LibraryTable getModuleLibraryTable() { return myModuleLibraryTable; } @Override public Project getProject() { return myProjectRootManager.getProject(); } @Override @NotNull public ContentEntry addContentEntry(@NotNull VirtualFile file) { return addContentEntry(new ContentEntryImpl(file, this)); } @Override @NotNull public ContentEntry addContentEntry(@NotNull String url) { return addContentEntry(new ContentEntryImpl(url, this)); } @Override public boolean isDisposed() { return myDisposed; } @NotNull private ContentEntry addContentEntry(@NotNull ContentEntry e) { if (myContent.contains(e)) { for (ContentEntry contentEntry : getContentEntries()) { if (ContentComparator.INSTANCE.compare(contentEntry, e) == 0) return contentEntry; } } myContent.add(e); return e; } public void writeExternal(@NotNull Element element) throws WriteExternalException { for (ModuleExtension extension : myExtensions) { extension.writeExternal(element); } for (ContentEntry contentEntry : getContent()) { if (contentEntry instanceof ContentEntryImpl) { final Element subElement = new Element(ContentEntryImpl.ELEMENT_NAME); ((ContentEntryImpl)contentEntry).writeExternal(subElement); element.addContent(subElement); } } for (OrderEntry orderEntry : getOrderEntries()) { if (orderEntry instanceof WritableOrderEntry) { ((WritableOrderEntry)orderEntry).writeExternal(element); } } } @Override public void setSdk(@Nullable Sdk jdk) { assertWritable(); final JdkOrderEntry jdkLibraryEntry; if (jdk != null) { jdkLibraryEntry = new ModuleJdkOrderEntryImpl(jdk, this, myProjectRootManager); } else { jdkLibraryEntry = null; } replaceEntryOfType(JdkOrderEntry.class, jdkLibraryEntry); } @Override public void setInvalidSdk(@NotNull String jdkName, String jdkType) { assertWritable(); replaceEntryOfType(JdkOrderEntry.class, new ModuleJdkOrderEntryImpl(jdkName, jdkType, this, myProjectRootManager)); } @Override public void inheritSdk() { assertWritable(); replaceEntryOfType(JdkOrderEntry.class, new InheritedJdkOrderEntryImpl(this, myProjectRootManager)); } @Override public <T extends OrderEntry> void replaceEntryOfType(@NotNull Class<T> entryClass, @Nullable final T entry) { assertWritable(); for (int i = 0; i < myOrderEntries.size(); i++) { OrderEntry orderEntry = myOrderEntries.get(i); if (entryClass.isInstance(orderEntry)) { myOrderEntries.remove(i); if (entry != null) { myOrderEntries.add(i, entry); } return; } } if (entry != null) { myOrderEntries.add(0, entry); } } @Override public String getSdkName() { for (OrderEntry orderEntry : getOrderEntries()) { if (orderEntry instanceof JdkOrderEntry) { return ((JdkOrderEntry)orderEntry).getJdkName(); } } return null; } public void assertWritable() { LOG.assertTrue(myWritable); } public boolean isDependsOn(final Module module) { for (OrderEntry entry : getOrderEntries()) { if (entry instanceof ModuleOrderEntry) { final Module module1 = ((ModuleOrderEntry)entry).getModule(); if (module1 == module) { return true; } } } return false; } public boolean isOrderEntryDisposed() { for (OrderEntry entry : myOrderEntries) { if (entry instanceof RootModelComponentBase && ((RootModelComponentBase)entry).isDisposed()) return true; } return false; } @Override protected Set<ContentEntry> getContent() { return myContent; } private static class ContentComparator implements Comparator<ContentEntry> { public static final ContentComparator INSTANCE = new ContentComparator(); @Override public int compare(@NotNull final ContentEntry o1, @NotNull final ContentEntry o2) { return o1.getUrl().compareTo(o2.getUrl()); } } @Override @NotNull public Module getModule() { return myModuleRootManager.getModule(); } @Override public boolean isChanged() { if (!myWritable) return false; for (ModuleExtension moduleExtension : myExtensions) { if (moduleExtension.isChanged()) return true; } return areOrderEntriesChanged() || areContentEntriesChanged(); } private boolean areContentEntriesChanged() { return ArrayUtil.lexicographicCompare(getContentEntries(), getSourceModel().getContentEntries()) != 0; } private boolean areOrderEntriesChanged() { OrderEntry[] orderEntries = getOrderEntries(); OrderEntry[] sourceOrderEntries = getSourceModel().getOrderEntries(); if (orderEntries.length != sourceOrderEntries.length) return true; for (int i = 0; i < orderEntries.length; i++) { OrderEntry orderEntry = orderEntries[i]; OrderEntry sourceOrderEntry = sourceOrderEntries[i]; if (!orderEntriesEquals(orderEntry, sourceOrderEntry)) { return true; } } return false; } private static boolean orderEntriesEquals(@NotNull OrderEntry orderEntry1, @NotNull OrderEntry orderEntry2) { if (!((OrderEntryBaseImpl)orderEntry1).sameType(orderEntry2)) return false; if (orderEntry1 instanceof JdkOrderEntry) { if (!(orderEntry2 instanceof JdkOrderEntry)) return false; if (orderEntry1 instanceof InheritedJdkOrderEntry && orderEntry2 instanceof ModuleJdkOrderEntry) { return false; } if (orderEntry2 instanceof InheritedJdkOrderEntry && orderEntry1 instanceof ModuleJdkOrderEntry) { return false; } if (orderEntry1 instanceof ModuleJdkOrderEntry && orderEntry2 instanceof ModuleJdkOrderEntry) { String name1 = ((ModuleJdkOrderEntry)orderEntry1).getJdkName(); String name2 = ((ModuleJdkOrderEntry)orderEntry2).getJdkName(); if (!Comparing.strEqual(name1, name2)) { return false; } } } if (orderEntry1 instanceof ExportableOrderEntry) { if (!(((ExportableOrderEntry)orderEntry1).isExported() == ((ExportableOrderEntry)orderEntry2).isExported())) { return false; } if (!(((ExportableOrderEntry)orderEntry1).getScope() == ((ExportableOrderEntry)orderEntry2).getScope())) { return false; } } if (orderEntry1 instanceof ModuleOrderEntry) { LOG.assertTrue(orderEntry2 instanceof ModuleOrderEntry); ModuleOrderEntryImpl entry1 = (ModuleOrderEntryImpl)orderEntry1; ModuleOrderEntryImpl entry2 = (ModuleOrderEntryImpl)orderEntry2; return entry1.isProductionOnTestDependency() == entry2.isProductionOnTestDependency() && Comparing.equal(entry1.getModuleName(), entry2.getModuleName()); } if (orderEntry1 instanceof LibraryOrderEntry) { LOG.assertTrue(orderEntry2 instanceof LibraryOrderEntry); LibraryOrderEntry libraryOrderEntry1 = (LibraryOrderEntry)orderEntry1; LibraryOrderEntry libraryOrderEntry2 = (LibraryOrderEntry)orderEntry2; boolean equal = Comparing.equal(libraryOrderEntry1.getLibraryName(), libraryOrderEntry2.getLibraryName()) && Comparing.equal(libraryOrderEntry1.getLibraryLevel(), libraryOrderEntry2.getLibraryLevel()); if (!equal) return false; Library library1 = libraryOrderEntry1.getLibrary(); Library library2 = libraryOrderEntry2.getLibrary(); if (library1 != null && library2 != null) { if (!Arrays.equals(((LibraryEx)library1).getExcludedRootUrls(), ((LibraryEx)library2).getExcludedRootUrls())) { return false; } } } final OrderRootType[] allTypes = OrderRootType.getAllTypes(); for (OrderRootType type : allTypes) { final String[] orderedRootUrls1 = orderEntry1.getUrls(type); final String[] orderedRootUrls2 = orderEntry2.getUrls(type); if (!Arrays.equals(orderedRootUrls1, orderedRootUrls2)) { return false; } } return true; } void makeExternalChange(@NotNull Runnable runnable) { if (myWritable || myDisposed) return; myModuleRootManager.makeRootsChange(runnable); } @Override public void dispose() { assert !myDisposed; Disposer.dispose(myDisposable); myExtensions.clear(); myWritable = false; myDisposed = true; } private class Order extends ArrayList<OrderEntry> { @Override public void clear() { super.clear(); clearCachedEntries(); } @NotNull @Override public OrderEntry set(int i, @NotNull OrderEntry orderEntry) { super.set(i, orderEntry); ((OrderEntryBaseImpl)orderEntry).setIndex(i); clearCachedEntries(); return orderEntry; } @Override public boolean add(@NotNull OrderEntry orderEntry) { super.add(orderEntry); ((OrderEntryBaseImpl)orderEntry).setIndex(size() - 1); clearCachedEntries(); return true; } @Override public void add(int i, OrderEntry orderEntry) { super.add(i, orderEntry); clearCachedEntries(); setIndicies(i); } @Override public OrderEntry remove(int i) { OrderEntry entry = super.remove(i); setIndicies(i); clearCachedEntries(); return entry; } @Override public boolean remove(Object o) { int index = indexOf(o); if (index < 0) return false; remove(index); clearCachedEntries(); return true; } @Override public boolean addAll(Collection<? extends OrderEntry> collection) { int startSize = size(); boolean result = super.addAll(collection); setIndicies(startSize); clearCachedEntries(); return result; } @Override public boolean addAll(int i, Collection<? extends OrderEntry> collection) { boolean result = super.addAll(i, collection); setIndicies(i); clearCachedEntries(); return result; } @Override public void removeRange(int i, int i1) { super.removeRange(i, i1); clearCachedEntries(); setIndicies(i); } @Override public boolean removeAll(Collection<?> collection) { boolean result = super.removeAll(collection); setIndicies(0); clearCachedEntries(); return result; } @Override public boolean retainAll(Collection<?> collection) { boolean result = super.retainAll(collection); setIndicies(0); clearCachedEntries(); return result; } private void clearCachedEntries() { myCachedOrderEntries = null; } private void setIndicies(int startIndex) { for (int j = startIndex; j < size(); j++) { ((OrderEntryBaseImpl)get(j)).setIndex(j); } } } private RootModelImpl getSourceModel() { assertWritable(); return myModuleRootManager.getRootModel(); } @Override public String toString() { return "RootModelImpl{" + "module=" + getModule().getName() + ", writable=" + myWritable + ", disposed=" + myDisposed + '}'; } @Nullable @Override public <T> T getModuleExtension(@NotNull final Class<T> klass) { for (ModuleExtension extension : myExtensions) { if (klass.isAssignableFrom(extension.getClass())) { //noinspection unchecked return (T)extension; } } return null; } void registerOnDispose(@NotNull Disposable disposable) { myDisposable.add(disposable); } }
/* * Copyright 2000-2016 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.jetbrains.python.inspections; import com.intellij.codeInspection.LocalInspectionToolSession; import com.intellij.codeInspection.LocalQuickFix; import com.intellij.codeInspection.ProblemDescriptor; import com.intellij.codeInspection.ProblemsHolder; import com.intellij.codeInspection.ex.InspectionProfileModifiableModelKt; import com.intellij.openapi.project.Project; import com.intellij.psi.PsiElementVisitor; import com.intellij.psi.PsiFile; import com.jetbrains.python.PyPsiBundle; import com.jetbrains.python.PyTokenTypes; import com.jetbrains.python.PythonUiService; import com.jetbrains.python.inspections.quickfix.ChainedComparisonsQuickFix; import com.jetbrains.python.psi.PyBinaryExpression; import com.jetbrains.python.psi.PyElementType; import com.jetbrains.python.psi.PyExpression; import com.jetbrains.python.psi.PyLiteralExpression; import com.jetbrains.python.psi.types.TypeEvalContext; import org.jetbrains.annotations.Nls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import java.awt.*; import static com.jetbrains.python.psi.PyUtil.as; /** * User: catherine * * Inspection to detect chained comparisons which can be simplified * For instance, a < b and b < c --> a < b < c */ public class PyChainedComparisonsInspection extends PyInspection { private static final String INSPECTION_SHORT_NAME = "PyChainedComparisonsInspection"; public boolean ignoreConstantInTheMiddle = false; @Nullable @Override public JComponent createOptionsPanel() { JCheckBox checkBox = PythonUiService.getInstance().createInspectionCheckBox( PyPsiBundle.message("INSP.chained.comparisons.ignore.statements.with.constant.in.the.middle"), this, "ignoreConstantInTheMiddle"); final JPanel rootPanel = new JPanel(new BorderLayout()); if (checkBox != null) { rootPanel.add(checkBox, BorderLayout.PAGE_START); } return rootPanel; } @NotNull @Override public PsiElementVisitor buildVisitor(@NotNull ProblemsHolder holder, boolean isOnTheFly, @NotNull LocalInspectionToolSession session) { return new Visitor(holder, ignoreConstantInTheMiddle, PyInspectionVisitor.getContext(session)); } private static class Visitor extends PyInspectionVisitor { /** * @see ChainedComparisonsQuickFix#ChainedComparisonsQuickFix(boolean, boolean, boolean) */ boolean myIsLeft; boolean myIsRight; PyElementType myOperator; boolean getInnerRight; boolean isConstantInTheMiddle; boolean ignoreConstantInTheMiddle; Visitor(@Nullable ProblemsHolder holder, boolean ignoreConstantInTheMiddle, @NotNull TypeEvalContext context) { super(holder, context); this.ignoreConstantInTheMiddle = ignoreConstantInTheMiddle; } @Override public void visitPyBinaryExpression(final @NotNull PyBinaryExpression node) { myIsLeft = false; myIsRight = false; myOperator = null; getInnerRight = false; final PyBinaryExpression leftExpression = as(node.getLeftExpression(), PyBinaryExpression.class); final PyBinaryExpression rightExpression = as(node.getRightExpression(), PyBinaryExpression.class); if (leftExpression != null && rightExpression != null && node.getOperator() == PyTokenTypes.AND_KEYWORD) { boolean applicable = false; if (leftExpression.getOperator() == PyTokenTypes.AND_KEYWORD) { final PyBinaryExpression leftLeft = as(leftExpression.getLeftExpression(), PyBinaryExpression.class); final PyBinaryExpression leftRight = as(leftExpression.getRightExpression(), PyBinaryExpression.class); if (leftLeft != null && (isRightSimplified(leftLeft, rightExpression) || isLeftSimplified(leftLeft, rightExpression))) { applicable = true; getInnerRight = false; } else if (leftRight != null && (isRightSimplified(leftRight, rightExpression) || isLeftSimplified(leftRight, rightExpression))) { applicable = true; getInnerRight = true; } } else if (isRightSimplified(leftExpression, rightExpression) || isLeftSimplified(leftExpression, rightExpression)) { applicable = true; } if (applicable) { if (isConstantInTheMiddle) { if (!ignoreConstantInTheMiddle) { registerProblem(node, PyPsiBundle.message("INSP.simplify.chained.comparison"), new ChainedComparisonsQuickFix(myIsLeft, myIsRight, getInnerRight), new DontSimplifyStatementsWithConstantInTheMiddleQuickFix()); } } else { registerProblem(node, PyPsiBundle.message("INSP.simplify.chained.comparison"), new ChainedComparisonsQuickFix(myIsLeft, myIsRight, getInnerRight)); } } } } private boolean isRightSimplified(@NotNull final PyBinaryExpression leftExpression, @NotNull final PyBinaryExpression rightExpression) { final PyExpression leftRight = leftExpression.getRightExpression(); if (leftRight instanceof PyBinaryExpression && PyTokenTypes.RELATIONAL_OPERATIONS.contains(((PyBinaryExpression)leftRight).getOperator())) { if (isRightSimplified((PyBinaryExpression)leftRight, rightExpression)) { return true; } } myOperator = leftExpression.getOperator(); if (PyTokenTypes.RELATIONAL_OPERATIONS.contains(myOperator)) { if (leftRight != null) { if (leftRight.getText().equals(getLeftExpression(rightExpression, true).getText())) { myIsLeft = false; myIsRight = true; isConstantInTheMiddle = leftRight instanceof PyLiteralExpression; return true; } final PyExpression right = getSmallestRight(rightExpression, true); if (right != null && leftRight.getText().equals(right.getText())) { myIsLeft = false; myIsRight = false; isConstantInTheMiddle = leftRight instanceof PyLiteralExpression; return true; } } } return false; } private static boolean isOpposite(final PyElementType op1, final PyElementType op2) { if ((op1 == PyTokenTypes.GT || op1 == PyTokenTypes.GE) && (op2 == PyTokenTypes.LT || op2 == PyTokenTypes.LE)) { return true; } if ((op2 == PyTokenTypes.GT || op2 == PyTokenTypes.GE) && (op1 == PyTokenTypes.LT || op1 == PyTokenTypes.LE)) { return true; } return false; } private boolean isLeftSimplified(PyBinaryExpression leftExpression, PyBinaryExpression rightExpression) { final PyExpression leftLeft = leftExpression.getLeftExpression(); if (leftLeft instanceof PyBinaryExpression && PyTokenTypes.RELATIONAL_OPERATIONS.contains(((PyBinaryExpression)leftLeft).getOperator())) { if (isLeftSimplified((PyBinaryExpression)leftLeft, rightExpression)) { return true; } } myOperator = leftExpression.getOperator(); if (PyTokenTypes.RELATIONAL_OPERATIONS.contains(myOperator)) { if (leftLeft != null) { if (leftLeft.getText().equals(getLeftExpression(rightExpression, false).getText())) { myIsLeft = true; myIsRight = true; isConstantInTheMiddle = leftLeft instanceof PyLiteralExpression; return true; } final PyExpression right = getSmallestRight(rightExpression, false); if (right != null && leftLeft.getText().equals(right.getText())) { myIsLeft = true; myIsRight = false; isConstantInTheMiddle = leftLeft instanceof PyLiteralExpression; return true; } } } return false; } private PyExpression getLeftExpression(PyBinaryExpression expression, boolean isRight) { PyExpression result = expression; while (result instanceof PyBinaryExpression && (PyTokenTypes.RELATIONAL_OPERATIONS.contains(((PyBinaryExpression)result).getOperator()) || PyTokenTypes.EQUALITY_OPERATIONS.contains(((PyBinaryExpression)result).getOperator()))) { final boolean opposite = isOpposite(((PyBinaryExpression)result).getOperator(), myOperator); if ((isRight && opposite) || (!isRight && !opposite)) { break; } result = ((PyBinaryExpression)result).getLeftExpression(); } return result; } @Nullable private PyExpression getSmallestRight(PyBinaryExpression expression, boolean isRight) { PyExpression result = expression; while (result instanceof PyBinaryExpression && (PyTokenTypes.RELATIONAL_OPERATIONS.contains(((PyBinaryExpression)result).getOperator()) || PyTokenTypes.EQUALITY_OPERATIONS.contains(((PyBinaryExpression)result).getOperator()))) { final boolean opposite = isOpposite(((PyBinaryExpression)result).getOperator(), myOperator); if ((isRight && !opposite) || (!isRight && opposite)) { break; } result = ((PyBinaryExpression)result).getRightExpression(); } return result; } } private static class DontSimplifyStatementsWithConstantInTheMiddleQuickFix implements LocalQuickFix { @Nls @NotNull @Override public String getFamilyName() { return PyPsiBundle.message("INSP.chained.comparisons.ignore.statements.with.constant.in.the.middle"); } @Override public void applyFix(@NotNull Project project, @NotNull ProblemDescriptor descriptor) { final PsiFile file = descriptor.getStartElement().getContainingFile(); InspectionProfileModifiableModelKt.modifyAndCommitProjectProfile(project, it -> ((PyChainedComparisonsInspection)it.getUnwrappedTool(INSPECTION_SHORT_NAME, file)).ignoreConstantInTheMiddle = true); } } }
/* Copyright (c) 2008, Nathan Sweet * All rights reserved. * * Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following * conditions are met: * * - Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * - Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following * disclaimer in the documentation and/or other materials provided with the distribution. * - Neither the name of Esoteric Software nor the names of its contributors may be used to endorse or promote products derived * from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT * SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package com.esotericsoftware.kryo.serializers; import static com.esotericsoftware.minlog.Log.*; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; import java.lang.reflect.Field; import java.lang.reflect.Method; import java.lang.reflect.Modifier; import java.lang.reflect.Type; import java.lang.reflect.TypeVariable; import java.security.AccessControlException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Comparator; import java.util.HashSet; import java.util.List; import com.esotericsoftware.kryo.Kryo; import com.esotericsoftware.kryo.NotNull; import com.esotericsoftware.kryo.Serializer; import com.esotericsoftware.kryo.io.Input; import com.esotericsoftware.kryo.io.Output; import com.esotericsoftware.kryo.util.IntArray; import com.esotericsoftware.kryo.util.ObjectMap; import com.esotericsoftware.kryo.util.Util; import com.esotericsoftware.reflectasm.FieldAccess; // BOZO - Make primitive serialization with ReflectASM configurable? /** Serializes objects using direct field assignment. FieldSerializer is generic and can serialize most classes without any * configuration. It is efficient and writes only the field data, without any extra information. It does not support adding, * removing, or changing the type of fields without invalidating previously serialized bytes. This can be acceptable in many * situations, such as when sending data over a network, but may not be a good choice for long term data storage because the Java * classes cannot evolve. Because FieldSerializer attempts to read and write non-public fields by default, it is important to * evaluate each class that will be serialized. If fields are public, bytecode generation will be used instead of reflection. * @see Serializer * @see Kryo#register(Class, Serializer) * @see VersionFieldSerializer * @see TaggedFieldSerializer * @see CompatibleFieldSerializer * @author Nathan Sweet <misc@n4te.com> * @author Roman Levenstein <romixlev@gmail.com> */ public class FieldSerializer<T> extends Serializer<T> implements Comparator<FieldSerializer.CachedField> { final Kryo kryo; final Class type; /** type variables declared for this type */ final TypeVariable[] typeParameters; final Class componentType; protected final FieldSerializerConfig config; private CachedField[] fields = new CachedField[0]; private CachedField[] transientFields = new CachedField[0]; protected HashSet<CachedField> removedFields = new HashSet(); Object access; private FieldSerializerUnsafeUtil unsafeUtil; private FieldSerializerGenericsUtil genericsUtil; private FieldSerializerAnnotationsUtil annotationsUtil; /** Concrete classes passed as values for type variables */ private Class[] generics; private Generics genericsScope; /** If set, this serializer tries to use a variable length encoding for int and long fields */ private boolean varIntsEnabled; /** If set, adjacent primitive fields are written in bulk This flag may only work with Oracle JVMs, because they layout * primitive fields in memory in such a way that primitive fields are grouped together. This option has effect only when used * with Unsafe-based FieldSerializer. * <p> * FIXME: Not all versions of Sun/Oracle JDK properly work with this option. Disable it for now. Later add dynamic checks to * see if this feature is supported by a current JDK version. * </p> */ private boolean useMemRegions = false; private boolean hasObjectFields = false; static CachedFieldFactory asmFieldFactory; static CachedFieldFactory objectFieldFactory; static CachedFieldFactory unsafeFieldFactory; static boolean unsafeAvailable; static Class<?> unsafeUtilClass; static Method sortFieldsByOffsetMethod; static { try { unsafeUtilClass = FieldSerializer.class.getClassLoader().loadClass("com.esotericsoftware.kryo.util.UnsafeUtil"); Method unsafeMethod = unsafeUtilClass.getMethod("unsafe"); sortFieldsByOffsetMethod = unsafeUtilClass.getMethod("sortFieldsByOffset", List.class); Object unsafe = unsafeMethod.invoke(null); if (unsafe != null) unsafeAvailable = true; } catch (Throwable e) { if (TRACE) trace("kryo", "sun.misc.Unsafe is unavailable."); } } { varIntsEnabled = true; if (TRACE) trace("kryo", "Optimize ints: " + varIntsEnabled); } public FieldSerializer (Kryo kryo, Class type) { this(kryo, type, null); } public FieldSerializer (Kryo kryo, Class type, Class[] generics) { this(kryo, type, generics, kryo.getFieldSerializerConfig().clone()); } protected FieldSerializer (Kryo kryo, Class type, Class[] generics, FieldSerializerConfig config) { this.config = config; this.kryo = kryo; this.type = type; this.generics = generics; this.typeParameters = type.getTypeParameters(); if (this.typeParameters == null || this.typeParameters.length == 0) this.componentType = type.getComponentType(); else this.componentType = null; this.genericsUtil = new FieldSerializerGenericsUtil(this); this.unsafeUtil = FieldSerializerUnsafeUtil.Factory.getInstance(this); this.annotationsUtil = new FieldSerializerAnnotationsUtil(this); rebuildCachedFields(); } /** Called when the list of cached fields must be rebuilt. This is done any time settings are changed that affect which fields * will be used. It is called from the constructor for FieldSerializer, but not for subclasses. Subclasses must call this from * their constructor. */ protected void rebuildCachedFields () { rebuildCachedFields(false); } /** Rebuilds the list of cached fields. * @param minorRebuild if set, processing due to changes in generic type parameters will be optimized */ protected void rebuildCachedFields (boolean minorRebuild) { /** TODO: Optimize rebuildCachedFields invocations performed due to changes in generic type parameters */ if (TRACE && generics != null) trace("kryo", "Generic type parameters: " + Arrays.toString(generics)); if (type.isInterface()) { fields = new CachedField[0]; // No fields to serialize. return; } hasObjectFields = false; if (config.isOptimizedGenerics()) { // For generic classes, generate a mapping from type variable names to the concrete types // This mapping is the same for the whole class. Generics genScope = genericsUtil.buildGenericsScope(type, generics); genericsScope = genScope; // Push proper scopes at serializer construction time if (genericsScope != null) kryo.getGenericsResolver().pushScope(type, genericsScope); } List<Field> validFields; List<Field> validTransientFields; IntArray useAsm = new IntArray(); if (!minorRebuild) { // Collect all fields. List<Field> allFields = new ArrayList(); Class nextClass = type; while (nextClass != Object.class) { Field[] declaredFields = nextClass.getDeclaredFields(); if (declaredFields != null) { for (Field f : declaredFields) { if (Modifier.isStatic(f.getModifiers())) continue; allFields.add(f); } } nextClass = nextClass.getSuperclass(); } ObjectMap context = kryo.getContext(); // Sort fields by their offsets if (useMemRegions && !config.isUseAsm() && unsafeAvailable) { try { Field[] allFieldsArray = (Field[])sortFieldsByOffsetMethod.invoke(null, allFields); allFields = Arrays.asList(allFieldsArray); } catch (Exception e) { throw new RuntimeException("Cannot invoke UnsafeUtil.sortFieldsByOffset()", e); } } // TODO: useAsm is modified as a side effect, this should be pulled out of buildValidFields // Build a list of valid non-transient fields validFields = buildValidFields(false, allFields, context, useAsm); // Build a list of valid transient fields validTransientFields = buildValidFields(true, allFields, context, useAsm); // Use ReflectASM for any public fields. if (config.isUseAsm() && !Util.IS_ANDROID && Modifier.isPublic(type.getModifiers()) && useAsm.indexOf(1) != -1) { try { access = FieldAccess.get(type); } catch (RuntimeException ignored) { } } } else { // It is a minor rebuild validFields = buildValidFieldsFromCachedFields(fields, useAsm); // Build a list of valid transient fields validTransientFields = buildValidFieldsFromCachedFields(transientFields, useAsm); } List<CachedField> cachedFields = new ArrayList(validFields.size()); List<CachedField> cachedTransientFields = new ArrayList(validTransientFields.size()); // Process non-transient fields createCachedFields(useAsm, validFields, cachedFields, 0); // Process transient fields createCachedFields(useAsm, validTransientFields, cachedTransientFields, validFields.size()); Collections.sort(cachedFields, this); fields = cachedFields.toArray(new CachedField[cachedFields.size()]); Collections.sort(cachedTransientFields, this); transientFields = cachedTransientFields.toArray(new CachedField[cachedTransientFields.size()]); initializeCachedFields(); if (genericsScope != null) kryo.getGenericsResolver().popScope(); if (!minorRebuild) { for (CachedField field : removedFields) removeField(field); } annotationsUtil.processAnnotatedFields(this); } private List<Field> buildValidFieldsFromCachedFields (CachedField[] cachedFields, IntArray useAsm) { ArrayList<Field> fields = new ArrayList<Field>(cachedFields.length); for (CachedField f : cachedFields) { fields.add(f.field); useAsm.add((f.accessIndex > -1) ? 1 : 0); } return fields; } private List<Field> buildValidFields (boolean transientFields, List<Field> allFields, ObjectMap context, IntArray useAsm) { List<Field> result = new ArrayList(allFields.size()); for (int i = 0, n = allFields.size(); i < n; i++) { Field field = allFields.get(i); int modifiers = field.getModifiers(); if (Modifier.isTransient(modifiers) != transientFields) continue; if (Modifier.isStatic(modifiers)) continue; if (field.isSynthetic() && config.isIgnoreSyntheticFields()) continue; if (!field.isAccessible()) { if (!config.isSetFieldsAsAccessible()) continue; try { field.setAccessible(true); } catch (AccessControlException ex) { continue; } } Optional optional = field.getAnnotation(Optional.class); if (optional != null && !context.containsKey(optional.value())) continue; result.add(field); // BOZO - Must be public? useAsm .add(!Modifier.isFinal(modifiers) && Modifier.isPublic(modifiers) && Modifier.isPublic(field.getType().getModifiers()) ? 1 : 0); } return result; } private void createCachedFields (IntArray useAsm, List<Field> validFields, List<CachedField> cachedFields, int baseIndex) { if (config.isUseAsm() || !useMemRegions) { for (int i = 0, n = validFields.size(); i < n; i++) { Field field = validFields.get(i); int accessIndex = -1; if (access != null && useAsm.get(baseIndex + i) == 1) accessIndex = ((FieldAccess)access).getIndex(field.getName()); cachedFields.add(newCachedField(field, cachedFields.size(), accessIndex)); } } else { unsafeUtil.createUnsafeCacheFieldsAndRegions(validFields, cachedFields, baseIndex, useAsm); } } public void setGenerics (Kryo kryo, Class[] generics) { if (!config.isOptimizedGenerics()) return; this.generics = generics; if (typeParameters != null && typeParameters.length > 0) { // There is no need to rebuild all cached fields from scratch. // Generic parameter types do not affect the set of fields, offsets of fields, // transient and non-transient properties. They only affect the type of // fields and serializers selected for each field. rebuildCachedFields(true); } } /** Get generic type parameters of the class controlled by this serializer. * @return generic type parameters or null, if there are none. */ public Class[] getGenerics () { return generics; } protected void initializeCachedFields () { } CachedField newCachedField (Field field, int fieldIndex, int accessIndex) { Class[] fieldClass = new Class[] {field.getType()}; Type fieldGenericType = (config.isOptimizedGenerics()) ? field.getGenericType() : null; CachedField cachedField; if (!config.isOptimizedGenerics() || fieldGenericType == fieldClass[0]) { // For optimized generics this is a field without generic type parameters if (TRACE) trace("kryo", "Field " + field.getName() + ": " + fieldClass[0]); cachedField = newMatchingCachedField(field, accessIndex, fieldClass[0], fieldGenericType, null); } else { cachedField = genericsUtil.newCachedFieldOfGenericType(field, accessIndex, fieldClass, fieldGenericType); } if (cachedField instanceof ObjectField) { hasObjectFields = true; } cachedField.field = field; cachedField.varIntsEnabled = varIntsEnabled; if (!config.isUseAsm()) { cachedField.offset = unsafeUtil.getObjectFieldOffset(field); } cachedField.access = (FieldAccess)access; cachedField.accessIndex = accessIndex; cachedField.canBeNull = config.isFieldsCanBeNull() && !fieldClass[0].isPrimitive() && !field.isAnnotationPresent(NotNull.class); // Always use the same serializer for this field if the field's class is final. if (kryo.isFinal(fieldClass[0]) || config.isFixedFieldTypes()) cachedField.valueClass = fieldClass[0]; return cachedField; } CachedField newMatchingCachedField (Field field, int accessIndex, Class fieldClass, Type fieldGenericType, Class[] fieldGenerics) { CachedField cachedField; if (accessIndex != -1) { cachedField = getAsmFieldFactory().createCachedField(fieldClass, field, this); } else if (!config.isUseAsm()) { cachedField = getUnsafeFieldFactory().createCachedField(fieldClass, field, this); } else { cachedField = getObjectFieldFactory().createCachedField(fieldClass, field, this); if (config.isOptimizedGenerics()) { if (fieldGenerics != null) ((ObjectField)cachedField).generics = fieldGenerics; else if (fieldGenericType != null) { Class[] cachedFieldGenerics = FieldSerializerGenericsUtil.getGenerics(fieldGenericType, kryo); ((ObjectField)cachedField).generics = cachedFieldGenerics; if (TRACE) trace("kryo", "Field generics: " + Arrays.toString(cachedFieldGenerics)); } } } return cachedField; } private CachedFieldFactory getAsmFieldFactory () { if (asmFieldFactory == null) asmFieldFactory = new AsmCachedFieldFactory(); return asmFieldFactory; } private CachedFieldFactory getObjectFieldFactory () { if (objectFieldFactory == null) objectFieldFactory = new ObjectCachedFieldFactory(); return objectFieldFactory; } private CachedFieldFactory getUnsafeFieldFactory () { // Use reflection to load UnsafeFieldFactory, so that there is no explicit dependency // on anything using Unsafe. This is required to make FieldSerializer work on those // platforms that do not support sun.misc.Unsafe properly. if (unsafeFieldFactory == null) { try { unsafeFieldFactory = (CachedFieldFactory)this.getClass().getClassLoader() .loadClass("com.esotericsoftware.kryo.serializers.UnsafeCachedFieldFactory").newInstance(); } catch (Exception e) { throw new RuntimeException("Cannot create UnsafeFieldFactory", e); } } return unsafeFieldFactory; } public int compare (CachedField o1, CachedField o2) { // Fields are sorted by alpha so the order of the data is known. return getCachedFieldName(o1).compareTo(getCachedFieldName(o2)); } /** Sets the default value for {@link CachedField#setCanBeNull(boolean)}. Calling this method resets the {@link #getFields() * cached fields}. * @param fieldsCanBeNull False if none of the fields are null. Saves 0-1 byte per field. True if it is not known (default). */ public void setFieldsCanBeNull (boolean fieldsCanBeNull) { config.setFieldsCanBeNull(fieldsCanBeNull); rebuildCachedFields(); } /** Controls which fields are serialized. Calling this method resets the {@link #getFields() cached fields}. * @param setFieldsAsAccessible If true, all non-transient fields (inlcuding private fields) will be serialized and * {@link Field#setAccessible(boolean) set as accessible} if necessary (default). If false, only fields in the public * API will be serialized. */ public void setFieldsAsAccessible (boolean setFieldsAsAccessible) { config.setFieldsAsAccessible(setFieldsAsAccessible); rebuildCachedFields(); } /** Controls if synthetic fields are serialized. Default is true. Calling this method resets the {@link #getFields() cached * fields}. * @param ignoreSyntheticFields If true, only non-synthetic fields will be serialized. */ public void setIgnoreSyntheticFields (boolean ignoreSyntheticFields) { config.setIgnoreSyntheticFields(ignoreSyntheticFields); rebuildCachedFields(); } /** Sets the default value for {@link CachedField#setClass(Class)} to the field's declared type. This allows FieldSerializer to * be more efficient, since it knows field values will not be a subclass of their declared type. Default is false. Calling this * method resets the {@link #getFields() cached fields}. */ public void setFixedFieldTypes (boolean fixedFieldTypes) { config.setFixedFieldTypes(fixedFieldTypes); rebuildCachedFields(); } /** Controls whether ASM should be used. Calling this method resets the {@link #getFields() cached fields}. * @param setUseAsm If true, ASM will be used for fast serialization. If false, Unsafe will be used (default) */ public void setUseAsm (boolean setUseAsm) { config.setUseAsm(setUseAsm); rebuildCachedFields(); } // Enable/disable copying of transient fields public void setCopyTransient (boolean setCopyTransient) { config.setCopyTransient(setCopyTransient); } // Enable/disable serialization of transient fields public void setSerializeTransient (boolean setSerializeTransient) { config.setSerializeTransient(setSerializeTransient); } /** Controls if the serialization of generics should be optimized for smaller size. * <p> * <strong>Important:</strong> This setting changes the serialized representation, so that data can be deserialized only with * if this setting is the same as it was for serialization. * </p> * @param setOptimizedGenerics If true, the serialization of generics will be optimize for smaller size (default: false) */ public void setOptimizedGenerics (boolean setOptimizedGenerics) { config.setOptimizedGenerics(setOptimizedGenerics); rebuildCachedFields(); } /** This method can be called for different fields having the same type. Even though the raw type is the same, if the type is * generic, it could happen that different concrete classes are used to instantiate it. Therefore, in case of different * instantiation parameters, the fields analysis should be repeated. * * TODO: Cache serializer instances generated for a given set of generic parameters. Reuse it later instead of recomputing * every time. */ public void write (Kryo kryo, Output output, T object) { if (TRACE) trace("kryo", "FieldSerializer.write fields of class: " + object.getClass().getName()); if (config.isOptimizedGenerics()) { if (typeParameters != null && generics != null) { // Rebuild fields info. It may result in rebuilding the genericScope rebuildCachedFields(); } if (genericsScope != null) { // Push proper scopes at serializer usage time kryo.getGenericsResolver().pushScope(type, genericsScope); } } CachedField[] fields = this.fields; for (int i = 0, n = fields.length; i < n; i++) fields[i].write(output, object); // Serialize transient fields if (config.isSerializeTransient()) { for (int i = 0, n = transientFields.length; i < n; i++) transientFields[i].write(output, object); } if (config.isOptimizedGenerics() && genericsScope != null) { // Pop the scope for generics kryo.getGenericsResolver().popScope(); } } public T read (Kryo kryo, Input input, Class<T> type) { try { if (config.isOptimizedGenerics()) { if (typeParameters != null && generics != null) { // Rebuild fields info. It may result in rebuilding the // genericScope rebuildCachedFields(); } if (genericsScope != null) { // Push a new scope for generics kryo.getGenericsResolver().pushScope(type, genericsScope); } } T object = create(kryo, input, type); kryo.reference(object); CachedField[] fields = this.fields; for (int i = 0, n = fields.length; i < n; i++) fields[i].read(input, object); // De-serialize transient fields if (config.isSerializeTransient()) { for (int i = 0, n = transientFields.length; i < n; i++) transientFields[i].read(input, object); } return object; } finally { if (config.isOptimizedGenerics() && genericsScope != null && kryo.getGenericsResolver() != null) { // Pop the scope for generics kryo.getGenericsResolver().popScope(); } } } /** Used by {@link #read(Kryo, Input, Class)} to create the new object. This can be overridden to customize object creation, eg * to call a constructor with arguments. The default implementation uses {@link Kryo#newInstance(Class)}. */ protected T create (Kryo kryo, Input input, Class<T> type) { return kryo.newInstance(type); } /** Allows specific fields to be optimized. */ public CachedField getField (String fieldName) { for (CachedField cachedField : fields) if (getCachedFieldName(cachedField).equals(fieldName)) return cachedField; throw new IllegalArgumentException("Field \"" + fieldName + "\" not found on class: " + type.getName()); } protected String getCachedFieldName (CachedField cachedField) { return config.getCachedFieldNameStrategy().getName(cachedField); } /** Removes a field so that it won't be serialized. */ public void removeField (String fieldName) { for (int i = 0; i < fields.length; i++) { CachedField cachedField = fields[i]; if (getCachedFieldName(cachedField).equals(fieldName)) { CachedField[] newFields = new CachedField[fields.length - 1]; System.arraycopy(fields, 0, newFields, 0, i); System.arraycopy(fields, i + 1, newFields, i, newFields.length - i); fields = newFields; removedFields.add(cachedField); return; } } for (int i = 0; i < transientFields.length; i++) { CachedField cachedField = transientFields[i]; if (getCachedFieldName(cachedField).equals(fieldName)) { CachedField[] newFields = new CachedField[transientFields.length - 1]; System.arraycopy(transientFields, 0, newFields, 0, i); System.arraycopy(transientFields, i + 1, newFields, i, newFields.length - i); transientFields = newFields; removedFields.add(cachedField); return; } } throw new IllegalArgumentException("Field \"" + fieldName + "\" not found on class: " + type.getName()); } /** Removes a field so that it won't be serialized. */ public void removeField (CachedField removeField) { for (int i = 0; i < fields.length; i++) { CachedField cachedField = fields[i]; if (cachedField == removeField) { CachedField[] newFields = new CachedField[fields.length - 1]; System.arraycopy(fields, 0, newFields, 0, i); System.arraycopy(fields, i + 1, newFields, i, newFields.length - i); fields = newFields; removedFields.add(cachedField); return; } } for (int i = 0; i < transientFields.length; i++) { CachedField cachedField = transientFields[i]; if (cachedField == removeField) { CachedField[] newFields = new CachedField[transientFields.length - 1]; System.arraycopy(transientFields, 0, newFields, 0, i); System.arraycopy(transientFields, i + 1, newFields, i, newFields.length - i); transientFields = newFields; removedFields.add(cachedField); return; } } throw new IllegalArgumentException("Field \"" + removeField + "\" not found on class: " + type.getName()); } /** Get all fields controlled by this FieldSerializer * @return all fields controlled by this FieldSerializer */ public CachedField[] getFields () { return fields; } /** Get all transient fields controlled by this FieldSerializer * @return all transient fields controlled by this FieldSerializer */ public CachedField[] getTransientFields () { return transientFields; } public Class getType () { return type; } public Kryo getKryo () { return kryo; } public boolean getUseAsmEnabled () { return config.isUseAsm(); } public boolean getUseMemRegions () { return useMemRegions; } public boolean getCopyTransient () { return config.isCopyTransient(); } public boolean getSerializeTransient () { return config.isSerializeTransient(); } /** Used by {@link #copy(Kryo, Object)} to create the new object. This can be overridden to customize object creation, eg to * call a constructor with arguments. The default implementation uses {@link Kryo#newInstance(Class)}. */ protected T createCopy (Kryo kryo, T original) { return (T)kryo.newInstance(original.getClass()); } public T copy (Kryo kryo, T original) { T copy = createCopy(kryo, original); kryo.reference(copy); // Copy transient fields if (config.isCopyTransient()) { for (int i = 0, n = transientFields.length; i < n; i++) transientFields[i].copy(original, copy); } for (int i = 0, n = fields.length; i < n; i++) fields[i].copy(original, copy); return copy; } final Generics getGenericsScope () { return genericsScope; } /** Controls how a field will be serialized. */ public static abstract class CachedField<X> { Field field; FieldAccess access; Class valueClass; Serializer serializer; boolean canBeNull; int accessIndex = -1; long offset = -1; boolean varIntsEnabled = true; /** @param valueClass The concrete class of the values for this field. This saves 1-2 bytes. The serializer registered for * the specified class will be used. Only set to a non-null value if the field type in the class definition is * final or the values for this field will not vary. */ public void setClass (Class valueClass) { this.valueClass = valueClass; this.serializer = null; } /** @param valueClass The concrete class of the values for this field. This saves 1-2 bytes. Only set to a non-null value if * the field type in the class definition is final or the values for this field will not vary. */ public void setClass (Class valueClass, Serializer serializer) { this.valueClass = valueClass; this.serializer = serializer; } public void setSerializer (Serializer serializer) { this.serializer = serializer; } public Serializer getSerializer () { return this.serializer; } public void setCanBeNull (boolean canBeNull) { this.canBeNull = canBeNull; } public Field getField () { return field; } public String toString () { return field.getName(); } abstract public void write (Output output, Object object); abstract public void read (Input input, Object object); abstract public void copy (Object original, Object copy); } public static interface CachedFieldFactory { public CachedField createCachedField (Class fieldClass, Field field, FieldSerializer ser); } public interface CachedFieldNameStrategy { CachedFieldNameStrategy DEFAULT = new CachedFieldNameStrategy() { @Override public String getName (CachedField cachedField) { return cachedField.field.getName(); } }; CachedFieldNameStrategy EXTENDED = new CachedFieldNameStrategy() { @Override public String getName (CachedField cachedField) { return cachedField.field.getDeclaringClass().getSimpleName() + "." + cachedField.field.getName(); } }; String getName (CachedField cachedField); } /** Indicates a field should be ignored when its declaring class is registered unless the {@link Kryo#getContext() context} has * a value set for the specified key. This can be useful when a field must be serialized for one purpose, but not for another. * Eg, a class for a networked application could have a field that should not be serialized and sent to clients, but should be * serialized when stored on the server. * @author Nathan Sweet <misc@n4te.com> */ @Retention(RetentionPolicy.RUNTIME) @Target(ElementType.FIELD) static public @interface Optional { public String value(); } /** Used to annotate fields with a specific Kryo serializer. */ @Retention(RetentionPolicy.RUNTIME) @Target(ElementType.FIELD) public @interface Bind { /** Value. * * @return the class<? extends serializer> used for this field */ @SuppressWarnings("rawtypes") Class<? extends Serializer> value(); } }
/* * Copyright (c) 2010-2015 Pivotal Software, Inc. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); you * may not use this file except in compliance with the License. You * may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. See the License for the specific language governing * permissions and limitations under the License. See accompanying * LICENSE file. */ package com.pivotal.gemfirexd.internal.engine.sql.catalog; import java.util.ArrayList; import java.util.Arrays; import java.util.SortedSet; import com.pivotal.gemfirexd.internal.engine.Misc; import com.pivotal.gemfirexd.internal.engine.distributed.GfxdDistributionAdvisor; import com.pivotal.gemfirexd.internal.engine.store.GemFireStore; import com.pivotal.gemfirexd.internal.iapi.error.StandardException; import com.pivotal.gemfirexd.internal.iapi.reference.Limits; import com.pivotal.gemfirexd.internal.iapi.sql.conn.LanguageConnectionContext; import com.pivotal.gemfirexd.internal.iapi.sql.dictionary.ColumnDescriptor; import com.pivotal.gemfirexd.internal.iapi.sql.dictionary.TableDescriptor; import com.pivotal.gemfirexd.internal.iapi.sql.dictionary.TupleDescriptor; import com.pivotal.gemfirexd.internal.iapi.types.DataValueDescriptor; import com.pivotal.gemfirexd.internal.shared.common.reference.SQLState; import com.pivotal.gemfirexd.internal.shared.common.sanity.SanityManager; /** * @author yjing * */ public final class DistributionDescriptor extends TupleDescriptor { public final static int NONE = 0; public final static int REPLICATE = 1; //Policy number range belonging to policy type partitioning //If changing these numbers , also modify method #isPartitioned public final static int COLOCATE = 2; public final static int PARTITIONBYGENERATEDKEY = 3; public final static int PARTITIONBYPRIMARYKEY = 4; public final static int PARTITIONBYEXPRESSION = 5; public final static int PARTITIONBYRANGE = 6; public final static int PARTITIONBYLIST = 7; //Policy number range belonging to policy type partitioning public final static int LOCAL = 8; public static boolean TEST_BYPASS_DATASTORE_CHECK = false; // the partition policy. private final int policy; private String[] columns; private ArrayList<ArrayList<DataValueDescriptor>> values; private final int redundancy; private final int maxPartSize; private String colocateTable; private int[] columnPositions; private final boolean isPersistent; private SortedSet<String> serverGroups; public DistributionDescriptor(int policy, String[] columns, int redundancy, int maxPartSize, String colocateTable, boolean isPersistent, SortedSet<String> serverGroups) { this.policy = policy; this.columns = columns != null ? columns.clone() : null; this.values = null; this.redundancy = redundancy; this.maxPartSize = maxPartSize; this.colocateTable = colocateTable; this.isPersistent = isPersistent; this.serverGroups = serverGroups; } public final String getColocateTableName() { return this.colocateTable; } public void setColocateTableName(String colocatedTable) { this.colocateTable = colocatedTable; } public void addValueSet(ArrayList<DataValueDescriptor> valueSet) { if (this.values == null) { this.values = new ArrayList<ArrayList<DataValueDescriptor>>(); } this.values.add(valueSet); } public void setPartitionColumnNames(String[] cols) { this.columns = cols != null ? cols.clone() : null; } public final String[] getPartitionColumnNames() { return this.columns; } public void setColumnPositions(int[] cols) { if (cols != null && cols.length > 0) { this.columnPositions = cols.clone(); // don't change original Arrays.sort(this.columnPositions); } else { this.columnPositions = null; } } public final int[] getColumnPositionsSorted() { return this.columnPositions; } public int getMaxPartSize() { return this.maxPartSize; } public ArrayList<ArrayList<DataValueDescriptor>> getPartitionColumnValues() { return this.values; } public int getPolicy() { return this.policy; } public int getRedundancy() { return this.redundancy; } public boolean getPersistence() { return this.isPersistent; } public boolean isPartitioned() { //TODO:Asif: check if this is a good logic return this.policy >= COLOCATE && this.policy <= PARTITIONBYLIST; } public SortedSet<String> getServerGroups() { return this.serverGroups; } public void setServerGroups(SortedSet<String> sgs) { this.serverGroups = sgs; } public void resolveColumnPositions(TableDescriptor td) throws StandardException { assert td != null: "Table descriptor should be non-null!"; // the partition column is the generated key if (this.columns == null) { return; } this.columnPositions = new int[this.columns.length]; // Bitmap to ensure no duplicates exist in the list of columns boolean[] dupDetectBitmap = new boolean[Limits.DB2_MAX_COLUMNS_IN_TABLE+1]; int columnPosition = 0; // Clear bitmap for dup detection Arrays.fill(dupDetectBitmap, false); for (int index = 0; index < this.columns.length; ++index) { ColumnDescriptor cd = td.getColumnDescriptor(this.columns[index]); if (cd == null) { SanityManager.DEBUG_PRINT("warning:syntax", "Failed to find column " + this.columns[index] + " in TableDescriptor " + td); throw StandardException.newException(SQLState.LANG_SYNTAX_ERROR, "Failed to find column " + this.columns[index] + " in table " + td.getQualifiedName()); } columnPosition = cd.getPosition(); if (!dupDetectBitmap[columnPosition]) { // We haven't seen this column yet, set bit dupDetectBitmap[columnPosition] = true; } else { // We already saw this column, throw dup error throw StandardException.newException( SQLState.LANG_DUPLICATE_COLUMN_NAME_CREATE, this.columns[index]); } this.columnPositions[index] = cd.getPosition(); } Arrays.sort(this.columnPositions); } /** * Checks if there are any datastores available for this table or database * object. * * @throws StandardException * if no datastore is available for this table or database object * with SQLState {@link SQLState#LANG_INVALID_MEMBER_REFERENCE} */ public static void checkAvailableDataStore( final LanguageConnectionContext lcc, final SortedSet<String> serverGroups, final String op) throws StandardException { final GemFireStore memStore; if (lcc != null && !lcc.isConnectionForRemote() && (memStore = Misc.getMemStore()).initialDDLReplayDone() && memStore.getDDLStmtQueue() != null && !TEST_BYPASS_DATASTORE_CHECK) { GfxdDistributionAdvisor advisor = memStore.getDistributionAdvisor(); if (advisor.adviseDataStore(serverGroups, true) == null) { throw StandardException.newException(SQLState.NO_DATASTORE_FOUND, "execution of " + op + " in " + (serverGroups == null || serverGroups.size() == 0 ? "distributed system" : "server groups '" + serverGroups + "'")); } } } public int getPartitioningColumnIdx(String colName) { for (int i = 0; i < columns.length; i++) { if (columns[i].equalsIgnoreCase(colName)) { return i; } } return -1; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.processors.cache.query.continuous; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.ConcurrentSkipListSet; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.regex.Pattern; import javax.cache.configuration.Factory; import javax.cache.configuration.MutableCacheEntryListenerConfiguration; import javax.cache.event.CacheEntryCreatedListener; import javax.cache.event.CacheEntryEvent; import javax.cache.event.CacheEntryEventFilter; import javax.cache.event.CacheEntryListener; import javax.cache.event.CacheEntryListenerException; import javax.cache.event.CacheEntryRemovedListener; import javax.cache.event.CacheEntryUpdatedListener; import org.apache.ignite.Ignite; import org.apache.ignite.IgniteCache; import org.apache.ignite.IgniteException; import org.apache.ignite.cache.CacheAtomicityMode; import org.apache.ignite.cache.query.ContinuousQuery; import org.apache.ignite.cache.query.QueryCursor; import org.apache.ignite.cluster.ClusterNode; import org.apache.ignite.configuration.CacheConfiguration; import org.apache.ignite.configuration.IgniteConfiguration; import org.apache.ignite.internal.util.lang.GridAbsPredicate; import org.apache.ignite.internal.util.typedef.PA; import org.apache.ignite.lang.IgnitePredicate; import org.apache.ignite.resources.IgniteInstanceResource; import org.apache.ignite.testframework.GridTestUtils; import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest; import org.junit.Test; import static org.apache.ignite.cache.CacheAtomicityMode.ATOMIC; import static org.apache.ignite.cache.CacheMode.PARTITIONED; import static org.apache.ignite.cache.CacheMode.REPLICATED; import static org.apache.ignite.cache.CacheWriteSynchronizationMode.FULL_SYNC; /** */ @SuppressWarnings("unchecked") public class GridCacheContinuousQueryMultiNodesFilteringTest extends GridCommonAbstractTest { /** */ private static final int SERVER_GRIDS_COUNT = 6; /** */ public static final int KEYS = 2_000; /** Cache entry operations' counts. */ private static final ConcurrentMap<String, AtomicInteger> opCounts = new ConcurrentHashMap<>(); /** Client. */ private static boolean client = false; /** {@inheritDoc} */ @Override protected void afterTest() throws Exception { stopAllGrids(); client = false; super.afterTest(); } /** */ @Test public void testFiltersAndListeners() throws Exception { for (int i = 1; i <= SERVER_GRIDS_COUNT; i++) startGrid(i, false); startGrid(SERVER_GRIDS_COUNT + 1, true); for (int i = 1; i <= SERVER_GRIDS_COUNT + 1; i++) { for (int j = 0; j < i; j++) { jcache(i, "part" + i).put("k" + j, "v0"); jcache(i, "repl" + i).put("k" + j, "v0"); // Should trigger updates jcache(i, "part" + i).put("k" + j, "v1"); jcache(i, "repl" + i).put("k" + j, "v1"); jcache(i, "part" + i).remove("k" + j); jcache(i, "repl" + i).remove("k" + j); } } for (int i = 1; i <= SERVER_GRIDS_COUNT + 1; i++) { // For each i, we did 3 ops on 2 caches on i keys, hence expected number. final int expTotal = i * 3 * 2; final int i0 = i; GridTestUtils.waitForCondition(new GridAbsPredicate() { @Override public boolean apply() { return opCounts.get("qry" + i0 + "_total").get() == expTotal; } }, 5000); int partInserts = opCounts.get("part" + i + "_ins").get(); int replInserts = opCounts.get("repl" + i + "_ins").get(); int partUpdates = opCounts.get("part" + i + "_upd").get(); int replUpdates = opCounts.get("repl" + i + "_upd").get(); int partRemoves = opCounts.get("part" + i + "_rmv").get(); int replRemoves = opCounts.get("repl" + i + "_rmv").get(); int totalQryOps = opCounts.get("qry" + i + "_total").get(); assertEquals(i, partInserts); assertEquals(i, replInserts); assertEquals(i, partUpdates); assertEquals(i, replUpdates); assertEquals(i, partRemoves); assertEquals(i, replRemoves); assertEquals(expTotal, totalQryOps); assertEquals(totalQryOps, partInserts + replInserts + partUpdates + replUpdates + partRemoves + replRemoves); } } /** * @throws Exception If failed. */ @Test public void testWithNodeFilter() throws Exception { List<QueryCursor> qryCursors = new ArrayList<>(); final int nodesCnt = 3; startGridsMultiThreaded(nodesCnt); awaitPartitionMapExchange(); CacheConfiguration ccfg = cacheConfiguration(new NodeFilterByRegexp(".*(0|1)$")); grid(0).createCache(ccfg); final AtomicInteger cntr = new AtomicInteger(); final ConcurrentMap<ClusterNode, Set<Integer>> maps = new ConcurrentHashMap<>(); final AtomicBoolean doubleNtfFail = new AtomicBoolean(false); CacheEntryUpdatedListener<Integer, Integer> lsnr = new CacheEntryUpdatedListener<Integer, Integer>() { @Override public void onUpdated(Iterable<CacheEntryEvent<? extends Integer, ? extends Integer>> evts) throws CacheEntryListenerException { for (CacheEntryEvent<? extends Integer, ? extends Integer> e : evts) { cntr.incrementAndGet(); ClusterNode node = ((Ignite)e.getSource().unwrap(Ignite.class)).cluster().localNode(); Set<Integer> set = maps.get(node); if (set == null) { set = new ConcurrentSkipListSet<>(); Set<Integer> oldVal = maps.putIfAbsent(node, set); set = oldVal != null ? oldVal : set; } if (!set.add(e.getValue())) doubleNtfFail.set(false); } } }; for (int i = 0; i < nodesCnt; i++) { ContinuousQuery<Integer, Integer> qry = new ContinuousQuery<>(); qry.setLocalListener(lsnr); Ignite ignite = grid(i); log.info("Try to start CQ on node: " + ignite.cluster().localNode().id()); qryCursors.add(ignite.cache(ccfg.getName()).query(qry)); log.info("CQ started on node: " + ignite.cluster().localNode().id()); } client = true; startGrid(nodesCnt); awaitPartitionMapExchange(); ContinuousQuery<Integer, Integer> qry = new ContinuousQuery<>(); qry.setLocalListener(lsnr); qryCursors.add(grid(nodesCnt).cache(ccfg.getName()).query(qry)); for (int i = 0; i <= nodesCnt; i++) { for (int key = 0; key < KEYS; key++) { int val = (i * KEYS) + key; grid(i).cache(ccfg.getName()).put(val, val); } } assertTrue(GridTestUtils.waitForCondition(new PA() { @Override public boolean apply() { return cntr.get() >= 2 * (nodesCnt + 1) * KEYS; } }, 5000L)); assertFalse("Got duplicate", doubleNtfFail.get()); for (int i = 0; i < (nodesCnt + 1) * KEYS; i++) { for (Map.Entry<ClusterNode, Set<Integer>> e : maps.entrySet()) assertTrue("Lost event on node: " + e.getKey().id() + ", event: " + i, e.getValue().remove(i)); } for (Map.Entry<ClusterNode, Set<Integer>> e : maps.entrySet()) assertTrue("Unexpected event on node: " + e.getKey(), e.getValue().isEmpty()); assertEquals("Not expected count of CQ", nodesCnt + 1, qryCursors.size()); for (QueryCursor cur : qryCursors) cur.close(); } /** */ private Ignite startGrid(final int idx, boolean isClientMode) throws Exception { String igniteInstanceName = getTestIgniteInstanceName(idx); IgniteConfiguration cfg = optimize(getConfiguration(igniteInstanceName)).setClientMode(isClientMode); cfg.setUserAttributes(Collections.singletonMap("idx", idx)); Ignite node = startGrid(igniteInstanceName, cfg); IgnitePredicate<ClusterNode> nodeFilter = new NodeFilter(idx); String partCacheName = "part" + idx; IgniteCache partCache = node.createCache(defaultCacheConfiguration().setName("part" + idx) .setCacheMode(PARTITIONED).setBackups(1).setNodeFilter(nodeFilter)); opCounts.put(partCacheName + "_ins", new AtomicInteger()); opCounts.put(partCacheName + "_upd", new AtomicInteger()); opCounts.put(partCacheName + "_rmv", new AtomicInteger()); partCache.registerCacheEntryListener(new ListenerConfiguration(partCacheName, ListenerConfiguration.Op.INSERT)); partCache.registerCacheEntryListener(new ListenerConfiguration(partCacheName, ListenerConfiguration.Op.UPDATE)); partCache.registerCacheEntryListener(new ListenerConfiguration(partCacheName, ListenerConfiguration.Op.REMOVE)); String replCacheName = "repl" + idx; IgniteCache replCache = node.createCache(defaultCacheConfiguration().setName("repl" + idx) .setCacheMode(REPLICATED).setNodeFilter(nodeFilter)); opCounts.put(replCacheName + "_ins", new AtomicInteger()); opCounts.put(replCacheName + "_upd", new AtomicInteger()); opCounts.put(replCacheName + "_rmv", new AtomicInteger()); replCache.registerCacheEntryListener(new ListenerConfiguration(replCacheName, ListenerConfiguration.Op.INSERT)); replCache.registerCacheEntryListener(new ListenerConfiguration(replCacheName, ListenerConfiguration.Op.UPDATE)); replCache.registerCacheEntryListener(new ListenerConfiguration(replCacheName, ListenerConfiguration.Op.REMOVE)); opCounts.put("qry" + idx + "_total", new AtomicInteger()); ContinuousQuery qry = new ContinuousQuery(); qry.setRemoteFilterFactory(new EntryEventFilterFactory(idx)); qry.setLocalListener(new CacheEntryUpdatedListener() { /** {@inheritDoc} */ @Override public void onUpdated(Iterable evts) { opCounts.get("qry" + idx + "_total").incrementAndGet(); } }); partCache.query(qry); replCache.query(qry); return node; } /** {@inheritDoc} */ @Override protected IgniteConfiguration getConfiguration(String igniteInstanceName) throws Exception { IgniteConfiguration cfg = super.getConfiguration(igniteInstanceName); cfg.setClientMode(client); return cfg; } /** * @param filter Node filter. * @return Cache configuration. */ private CacheConfiguration cacheConfiguration(NodeFilterByRegexp filter) { return new CacheConfiguration("test-cache-cq") .setBackups(1) .setNodeFilter(filter) .setAtomicityMode(atomicityMode()) .setWriteSynchronizationMode(FULL_SYNC) .setCacheMode(PARTITIONED); } /** * @return Atomicity mode. */ protected CacheAtomicityMode atomicityMode() { return ATOMIC; } /** */ private static final class ListenerConfiguration extends MutableCacheEntryListenerConfiguration { /** Operation. */ enum Op { /** Insert. */ INSERT, /** Update. */ UPDATE, /** Remove. */ REMOVE } /** */ ListenerConfiguration(final String cacheName, final Op op) { super(new Factory<CacheEntryListener>() { /** {@inheritDoc} */ @Override public CacheEntryListener create() { switch (op) { case INSERT: return new CacheEntryCreatedListener() { /** {@inheritDoc} */ @Override public void onCreated(Iterable iterable) { for (Object evt : iterable) opCounts.get(cacheName + "_ins").getAndIncrement(); } }; case UPDATE: return new CacheEntryUpdatedListener() { /** {@inheritDoc} */ @Override public void onUpdated(Iterable iterable) { for (Object evt : iterable) opCounts.get(cacheName + "_upd").getAndIncrement(); } }; case REMOVE: return new CacheEntryRemovedListener() { /** {@inheritDoc} */ @Override public void onRemoved(Iterable iterable) { for (Object evt : iterable) opCounts.get(cacheName + "_rmv").getAndIncrement(); } }; default: throw new IgniteException(new IllegalArgumentException()); } } }, null, true, false); } } /** */ private static final class EntryEventFilterFactory implements Factory<CacheEntryEventFilter> { /** */ @IgniteInstanceResource private Ignite ignite; /** Grid index to determine whether node filter has been invoked. */ private final int idx; /** */ private EntryEventFilterFactory(int idx) { this.idx = idx; } /** {@inheritDoc} */ @Override public CacheEntryEventFilter create() { return new CacheEntryEventFilter() { /** {@inheritDoc} */ @Override public boolean evaluate(CacheEntryEvent evt) throws CacheEntryListenerException { int evtNodeIdx = (Integer)(ignite.cluster().localNode().attributes().get("idx")); assertTrue(evtNodeIdx % 2 == idx % 2); return true; } }; } } /** */ private static final class NodeFilter implements IgnitePredicate<ClusterNode> { /** */ private final int idx; /** */ private NodeFilter(int idx) { this.idx = idx; } /** {@inheritDoc} */ @Override public boolean apply(ClusterNode clusterNode) { return ((Integer)clusterNode.attributes().get("idx") % 2) == idx % 2; } } /** */ private static final class NodeFilterByRegexp implements IgnitePredicate<ClusterNode> { /** */ private final Pattern pattern; /** */ private NodeFilterByRegexp(String regExp) { this.pattern = Pattern.compile(regExp); } /** {@inheritDoc} */ @Override public boolean apply(ClusterNode clusterNode) { return pattern.matcher(clusterNode.id().toString()).matches(); } } }
/* * Copyright (c) 2003, KNOPFLERFISH project * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following * conditions are met: * * - Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * - Redistributions in binary form must reproduce the above * copyright notice, this list of conditions and the following * disclaimer in the documentation and/or other materials * provided with the distribution. * * - Neither the name of the KNOPFLERFISH project nor the names of its * contributors may be used to endorse or promote products derived * from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED * OF THE POSSIBILITY OF SUCH DAMAGE. */ package org.knopflerfish.tools.jarunpacker; import java.awt.*; import javax.swing.*; import javax.swing.plaf.ComponentUI; import java.awt.event.*; import java.util.*; import java.lang.Thread; public class StatusBar extends JComponent implements Runnable, MouseListener { static int MODE_UNKNOWN = 0; static int MODE_PERCENTAGE = 1; int perc = 0; int delta = 5; String msg = ""; int block = 6; int pad = 2; int barWidth = 150; boolean bShowPerc = false; Thread runner = null; int delay = 50; int mode = MODE_UNKNOWN; boolean bIsBroken = false; // Set to true if user clicks in progress bar boolean bRun = false; // run() will only loop while this is set Color bgColor = Color.lightGray; Color fgColor = Color.black; Color txtColor = Color.black; Color lowColor = Color.black; Color highColor = Color.blue; String name; boolean bIsAlive = false; public StatusBar(String name) { this.name = name; msg = ""; Dimension d = new Dimension(400, 17); setMinimumSize(d); setPreferredSize(d); addMouseListener(this); bgColor = getBackground(); highColor = UIManager.getColor("ScrollBar.thumb"); } public void setBackground(Color c) { super.setBackground(c); bgColor = c; } public void setForeground(Color c) { super.setForeground(c); fgColor = c; } protected void setUI(ComponentUI newUI) { super.setUI(newUI); highColor = UIManager.getColor("ScrollBar.thumb"); } public void run() { bIsAlive = true; while(bRun && !isBroken()) { updateProgress(-1); try { Thread.sleep(delay); } catch (Exception e) { } } bIsAlive = false; runner = null; } Object lock = new Object(); private void stopRunner() { // synchronized(lock) { bRun = false; if(runner != null) { try { runner.join(20 * 1000); } catch (Exception ignored) { } } } } public void theEnd() { } public void updateProgress(int percent) { updateProgress("", percent); } public void updateProgress(String msg, int percent) { // System.out.print("updateProgress " + msg + " " + percent + "%"); this.msg = msg; if(percent == -1) { mode = MODE_UNKNOWN; bShowPerc = false; block = 2; pad = 1; perc += delta; if(perc > 100) { perc = 100; delta = -delta; } if(perc < 0) { perc = 0; delta = -delta; } } else { perc = Math.min(Math.max(percent, 0), 100); mode = MODE_PERCENTAGE; } Graphics g = getGraphics(); if(g != null) { paint(g); g.dispose(); } else { } } public boolean isBroken() { return bIsBroken; } public void startProgress(String msg, int delay) { if(runner != null && mode == MODE_UNKNOWN) { this.msg = msg; return; } this.delta = 5; this.perc = 0; this.msg = msg; this.delay = delay; if(runner == null) { // synchronized(lock) { bIsBroken = false; bRun = true; runner= new Thread(this, "StatusBar update " + name); runner.start(); } } } public void startProgress(String msg) { if(runner != null && mode == MODE_UNKNOWN) { this.msg = msg; return; } this.delta = 5; this.perc = 0; this.msg = msg; repaint(); setCursor(Cursor.WAIT_CURSOR); } public void stopProgress() { stopRunner(); updateProgress(0); perc = 0; msg = ""; repaint(); setCursor(Cursor.DEFAULT_CURSOR); } public void showStatus(String msg) { this.msg = msg; Graphics g = getGraphics(); if(g != null) { paint(g); g.dispose(); } else { // No graphics in showStatus } } public void update(Graphics g) { // Override this method, we do not need any background handling paint(g); } public void paint(Graphics g) { highColor = UIManager.getColor("ScrollBar.thumb"); if(highColor == null && highColor.equals(getBackground())) { highColor = UIManager.getColor("controlShadow"); } // Canvas size Dimension d = getSize(); if (d.width==0||d.height==0) return; //Called before added to visible frame // Center Dimension center = new Dimension(d.width/2, d.height/2); // Create memory image, for double buffering Image memImage = createImage(d.width, d.height); if (memImage==null) return; //Called before added to visible frame Graphics memG = memImage.getGraphics(); // Set background memG.setColor(getBackground()); memG.fillRect(0,0,d.width,d.height); memG.setColor(txtColor); String s = msg; if(bShowPerc) { s = s + " " + perc + "%"; } Shape clip = memG.getClip(); memG.setClip(0, 0, d.width-barWidth-12, d.height - 1); memG.drawString(s, 5, 14); memG.setClip(clip); memG.setColor(getBackground()); memG.draw3DRect(0,0,d.width-barWidth-12,d.height-1, false); memG.draw3DRect(d.width-barWidth-10,0,barWidth+9,d.height-1, false); int x0 = d.width - barWidth - 5; int x1 = d.width - 5; int diff = x1 - x0; int xmax = x0 + diff * perc / 100; int h = d.height; if(mode == MODE_PERCENTAGE) { for(int x = x0; x < xmax; x = x + block + pad) { double k = (x - x0) / (double)diff; Color c = Util.rgbInterPolate(lowColor, highColor, k); memG.setColor(c); memG.fillRect(x, 3, block, h - 6); } } else { int trail = diff / 3; if(delta > 0) { int xstart = xmax - trail; if(xstart < x0) xstart = x0; for(int x = xstart; x < xmax; x++) { double k = (x - xstart) / (double)trail; Color c = Util.rgbInterPolate(getBackground(), highColor, k); memG.setColor(c); memG.fillRect(x, 3, 1, h - 6); } } else if(delta < 0) { int xend = xmax + trail; if(xend > x1) xend = x1; for(int x = xend; x > xmax; x--) { double k = (xend - x) / (double)trail; Color c = Util.rgbInterPolate(getBackground(), highColor, k); memG.setColor(c); memG.fillRect(x, 3, 1, h - 6); } } } // Copy image to canvas g.drawImage(memImage, 0,0, this); } public void mouseClicked(MouseEvent e) { } public void mouseEntered(MouseEvent e) { } public void mouseExited(MouseEvent e) { } public void mousePressed(MouseEvent e) { bIsBroken = true; showStatus(""); } public void mouseReleased(MouseEvent e) { } void setCursor(int c) { final Component root = SwingUtilities.getRoot(this); if (root!=null) root.setCursor(Cursor.getPredefinedCursor(c)); } }
// Copyright 2014 Google Inc. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.rules; import static com.google.devtools.build.lib.packages.Attribute.ConfigurationTransition.DATA; import static com.google.devtools.build.lib.packages.Attribute.ConfigurationTransition.HOST; import static com.google.devtools.build.lib.packages.Attribute.ConfigurationTransition.NONE; import static com.google.devtools.build.lib.packages.Attribute.attr; import static com.google.devtools.build.lib.packages.Type.BOOLEAN; import static com.google.devtools.build.lib.packages.Type.INTEGER; import static com.google.devtools.build.lib.packages.Type.LABEL; import static com.google.devtools.build.lib.packages.Type.LABEL_LIST; import static com.google.devtools.build.lib.packages.Type.LICENSE; import static com.google.devtools.build.lib.packages.Type.STRING; import static com.google.devtools.build.lib.packages.Type.STRING_LIST; import static com.google.devtools.build.lib.syntax.SkylarkType.castList; import static com.google.devtools.build.lib.syntax.SkylarkType.castMap; import com.google.common.annotations.VisibleForTesting; import com.google.common.cache.CacheBuilder; import com.google.common.cache.CacheLoader; import com.google.common.cache.LoadingCache; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.devtools.build.lib.analysis.BaseRuleClasses; import com.google.devtools.build.lib.analysis.config.BuildConfiguration; import com.google.devtools.build.lib.analysis.config.RunUnder; import com.google.devtools.build.lib.events.Location; import com.google.devtools.build.lib.packages.Attribute; import com.google.devtools.build.lib.packages.Attribute.ConfigurationTransition; import com.google.devtools.build.lib.packages.Attribute.LateBoundLabel; import com.google.devtools.build.lib.packages.AttributeMap; import com.google.devtools.build.lib.packages.ImplicitOutputsFunction.SkylarkImplicitOutputsFunctionWithCallback; import com.google.devtools.build.lib.packages.ImplicitOutputsFunction.SkylarkImplicitOutputsFunctionWithMap; import com.google.devtools.build.lib.packages.Package.NameConflictException; import com.google.devtools.build.lib.packages.PackageFactory; import com.google.devtools.build.lib.packages.PackageFactory.PackageContext; import com.google.devtools.build.lib.packages.Rule; import com.google.devtools.build.lib.packages.RuleClass; import com.google.devtools.build.lib.packages.RuleClass.Builder; import com.google.devtools.build.lib.packages.RuleClass.Builder.RuleClassType; import com.google.devtools.build.lib.packages.RuleFactory; import com.google.devtools.build.lib.packages.RuleFactory.InvalidRuleException; import com.google.devtools.build.lib.packages.TargetUtils; import com.google.devtools.build.lib.packages.TestSize; import com.google.devtools.build.lib.packages.Type; import com.google.devtools.build.lib.packages.Type.ConversionException; import com.google.devtools.build.lib.syntax.BaseFunction; import com.google.devtools.build.lib.syntax.BuiltinFunction; import com.google.devtools.build.lib.syntax.ClassObject; import com.google.devtools.build.lib.syntax.ClassObject.SkylarkClassObject; import com.google.devtools.build.lib.syntax.Environment; import com.google.devtools.build.lib.syntax.Environment.NoSuchVariableException; import com.google.devtools.build.lib.syntax.EvalException; import com.google.devtools.build.lib.syntax.EvalUtils; import com.google.devtools.build.lib.syntax.FuncallExpression; import com.google.devtools.build.lib.syntax.FunctionSignature; import com.google.devtools.build.lib.syntax.Label; import com.google.devtools.build.lib.syntax.Runtime; import com.google.devtools.build.lib.syntax.SkylarkCallbackFunction; import com.google.devtools.build.lib.syntax.SkylarkEnvironment; import com.google.devtools.build.lib.syntax.SkylarkList; import com.google.devtools.build.lib.syntax.SkylarkModuleNameResolver; import com.google.devtools.build.lib.syntax.SkylarkSignature; import com.google.devtools.build.lib.syntax.SkylarkSignature.Param; import com.google.devtools.build.lib.syntax.SkylarkSignatureProcessor; import com.google.devtools.build.lib.vfs.PathFragment; import java.util.HashMap; import java.util.Map; import java.util.concurrent.ExecutionException; /** * A helper class to provide an easier API for Skylark rule definitions. */ public class SkylarkRuleClassFunctions { //TODO(bazel-team): proper enum support @SkylarkSignature(name = "DATA_CFG", returnType = ConfigurationTransition.class, doc = "Experimental. Specifies a transition to the data configuration.") private static final Object dataTransition = ConfigurationTransition.DATA; @SkylarkSignature(name = "HOST_CFG", returnType = ConfigurationTransition.class, doc = "Specifies a transition to the host configuration.") private static final Object hostTransition = ConfigurationTransition.HOST; private static final LateBoundLabel<BuildConfiguration> RUN_UNDER = new LateBoundLabel<BuildConfiguration>() { @Override public Label getDefault(Rule rule, BuildConfiguration configuration) { RunUnder runUnder = configuration.getRunUnder(); return runUnder == null ? null : runUnder.getLabel(); } }; // TODO(bazel-team): Copied from ConfiguredRuleClassProvider for the transition from built-in // rules to skylark extensions. Using the same instance would require a large refactoring. // If we don't want to support old built-in rules and Skylark simultaneously // (except for transition phase) it's probably OK. private static LoadingCache<String, Label> labelCache = CacheBuilder.newBuilder().build(new CacheLoader<String, Label>() { @Override public Label load(String from) throws Exception { try { return Label.parseAbsolute(from); } catch (Label.SyntaxException e) { throw new Exception(from); } } }); // TODO(bazel-team): Remove the code duplication (BaseRuleClasses and this class). /** Parent rule class for non-executable non-test Skylark rules. */ public static final RuleClass baseRule = BaseRuleClasses.commonCoreAndSkylarkAttributes( new RuleClass.Builder("$base_rule", RuleClassType.ABSTRACT, true)) .add(attr("expect_failure", STRING)) .build(); /** Parent rule class for executable non-test Skylark rules. */ public static final RuleClass binaryBaseRule = new RuleClass.Builder("$binary_base_rule", RuleClassType.ABSTRACT, true, baseRule) .add( attr("args", STRING_LIST) .nonconfigurable("policy decision: should be consistent across configurations")) .add(attr("output_licenses", LICENSE)) .build(); /** Parent rule class for test Skylark rules. */ public static final RuleClass testBaseRule = new RuleClass.Builder("$test_base_rule", RuleClassType.ABSTRACT, true, baseRule) .add(attr("size", STRING).value("medium").taggable() .nonconfigurable("used in loading phase rule validation logic")) .add(attr("timeout", STRING).taggable() .nonconfigurable("used in loading phase rule validation logic").value( new Attribute.ComputedDefault() { @Override public Object getDefault(AttributeMap rule) { TestSize size = TestSize.getTestSize(rule.get("size", Type.STRING)); if (size != null) { String timeout = size.getDefaultTimeout().toString(); if (timeout != null) { return timeout; } } return "illegal"; } })) .add(attr("flaky", BOOLEAN).value(false).taggable() .nonconfigurable("taggable - called in Rule.getRuleTags")) .add(attr("shard_count", INTEGER).value(-1)) .add(attr("local", BOOLEAN).value(false).taggable() .nonconfigurable("policy decision: this should be consistent across configurations")) .add(attr("args", STRING_LIST) .nonconfigurable("policy decision: should be consistent across configurations")) .add(attr("$test_runtime", LABEL_LIST).cfg(HOST).value(ImmutableList.of( labelCache.getUnchecked("//tools/test:runtime")))) .add(attr(":run_under", LABEL).cfg(DATA).value(RUN_UNDER)) .build(); /** * In native code, private values start with $. * In Skylark, private values start with _, because of the grammar. */ private static String attributeToNative(String oldName, Location loc, boolean isLateBound) throws EvalException { if (oldName.isEmpty()) { throw new EvalException(loc, "Attribute name cannot be empty"); } if (isLateBound) { if (oldName.charAt(0) != '_') { throw new EvalException(loc, "When an attribute value is a function, " + "the attribute must be private (start with '_')"); } return ":" + oldName.substring(1); } if (oldName.charAt(0) == '_') { return "$" + oldName.substring(1); } return oldName; } // TODO(bazel-team): implement attribute copy and other rule properties @SkylarkSignature(name = "rule", doc = "Creates a new rule. Store it in a global value, so that it can be loaded and called " + "from BUILD files.", returnType = BaseFunction.class, mandatoryPositionals = { @Param(name = "implementation", type = BaseFunction.class, doc = "the function implementing this rule, must have exactly one parameter: " + "<a href=\"ctx.html\">ctx</a>. The function is called during the analysis phase " + "for each instance of the rule. It can access the attributes provided by the user. " + "It must create actions to generate all the declared outputs.") }, optionalPositionals = { @Param(name = "test", type = Boolean.class, defaultValue = "False", doc = "Whether this rule is a test rule. " + "If True, the rule must end with <code>_test</code> (otherwise it must not), " + "and there must be an action that generates <code>ctx.outputs.executable</code>."), @Param(name = "attrs", type = Map.class, noneable = true, defaultValue = "None", doc = "dictionary to declare all the attributes of the rule. It maps from an attribute name " + "to an attribute object (see <a href=\"attr.html\">attr</a> module). " + "Attributes starting with <code>_</code> are private, and can be used to add " + "an implicit dependency on a label. The attribute <code>name</code> is implicitly " + "added and must not be specified. Attributes <code>visibility</code>, " + "<code>deprecation</code>, <code>tags</code>, <code>testonly</code>, and " + "<code>features</code> are implicitly added and might be overriden."), // TODO(bazel-team): need to give the types of these builtin attributes @Param(name = "outputs", type = Map.class, callbackEnabled = true, noneable = true, defaultValue = "None", doc = "outputs of this rule. " + "It is a dictionary mapping from string to a template name. " + "For example: <code>{\"ext\": \"%{name}.ext\"}</code>. <br>" + "The dictionary key becomes an attribute in <code>ctx.outputs</code>. " // TODO(bazel-team): Make doc more clear, wrt late-bound attributes. + "It may also be a function (which receives <code>ctx.attr</code> as argument) " + "returning such a dictionary."), @Param(name = "executable", type = Boolean.class, defaultValue = "False", doc = "whether this rule is marked as executable or not. If True, " + "there must be an action that generates <code>ctx.outputs.executable</code>."), @Param(name = "output_to_genfiles", type = Boolean.class, defaultValue = "False", doc = "If true, the files will be generated in the genfiles directory instead of the " + "bin directory. This is used for compatibility with existing rules."), @Param(name = "fragments", type = SkylarkList.class, generic1 = String.class, defaultValue = "[]", doc = "List of names of configuration fragments that the rule requires " + "in target configuration."), @Param(name = "host_fragments", type = SkylarkList.class, generic1 = String.class, defaultValue = "[]", doc = "List of names of configuration fragments that the rule requires " + "in host configuration.")}, useAst = true, useEnvironment = true) private static final BuiltinFunction rule = new BuiltinFunction("rule") { @SuppressWarnings({"rawtypes", "unchecked"}) // castMap produces // an Attribute.Builder instead of a Attribute.Builder<?> but it's OK. public BaseFunction invoke(BaseFunction implementation, Boolean test, Object attrs, Object implicitOutputs, Boolean executable, Boolean outputToGenfiles, SkylarkList fragments, SkylarkList hostFragments, FuncallExpression ast, Environment funcallEnv) throws EvalException, ConversionException { funcallEnv.checkLoadingPhase("rule", ast.getLocation()); RuleClassType type = test ? RuleClassType.TEST : RuleClassType.NORMAL; RuleClass parent = test ? testBaseRule : (executable ? binaryBaseRule : baseRule); // We'll set the name later, pass the empty string for now. RuleClass.Builder builder = new RuleClass.Builder("", type, true, parent); if (attrs != Runtime.NONE) { for (Map.Entry<String, Attribute.Builder> attr : castMap(attrs, String.class, Attribute.Builder.class, "attrs").entrySet()) { Attribute.Builder<?> attrBuilder = (Attribute.Builder<?>) attr.getValue(); String attrName = attributeToNative(attr.getKey(), ast.getLocation(), attrBuilder.hasLateBoundValue()); builder.addOrOverrideAttribute(attrBuilder.build(attrName)); } } if (executable || test) { builder.addOrOverrideAttribute( attr("$is_executable", BOOLEAN) .value(true) .nonconfigurable("Called from RunCommand.isExecutable, which takes a Target") .build()); builder.setOutputsDefaultExecutable(); } if (implicitOutputs != Runtime.NONE) { if (implicitOutputs instanceof BaseFunction) { BaseFunction func = (BaseFunction) implicitOutputs; final SkylarkCallbackFunction callback = new SkylarkCallbackFunction(func, ast, (SkylarkEnvironment) funcallEnv); builder.setImplicitOutputsFunction( new SkylarkImplicitOutputsFunctionWithCallback(callback, ast.getLocation())); } else { builder.setImplicitOutputsFunction( new SkylarkImplicitOutputsFunctionWithMap(ImmutableMap.copyOf(castMap(implicitOutputs, String.class, String.class, "implicit outputs of the rule class")))); } } if (outputToGenfiles) { builder.setOutputToGenfiles(); } registerRequiredFragments(fragments, hostFragments, builder); builder.setConfiguredTargetFunction(implementation); builder.setRuleDefinitionEnvironment( ((SkylarkEnvironment) funcallEnv).getGlobalEnvironment()); return new RuleFunction(builder, type); } private void registerRequiredFragments( SkylarkList fragments, SkylarkList hostFragments, RuleClass.Builder builder) { Map<ConfigurationTransition, ImmutableSet<String>> map = new HashMap<>(); addFragmentsToMap(map, fragments, NONE); // NONE represents target configuration addFragmentsToMap(map, hostFragments, HOST); builder.requiresConfigurationFragments(new SkylarkModuleNameResolver(), map); } private void addFragmentsToMap(Map<ConfigurationTransition, ImmutableSet<String>> map, SkylarkList fragments, ConfigurationTransition config) { if (!fragments.isEmpty()) { map.put(config, ImmutableSet.copyOf(castList(fragments, String.class))); } } }; // This class is needed for testing static final class RuleFunction extends BaseFunction { // Note that this means that we can reuse the same builder. // This is fine since we don't modify the builder from here. private final RuleClass.Builder builder; private final RuleClassType type; private PathFragment skylarkFile; private String ruleClassName; public RuleFunction(Builder builder, RuleClassType type) { super("rule", FunctionSignature.KWARGS); this.builder = builder; this.type = type; } @Override @SuppressWarnings("unchecked") // the magic hidden $pkg_context variable is guaranteed // to be a PackageContext public Object call(Object[] args, FuncallExpression ast, Environment env) throws EvalException, InterruptedException, ConversionException { env.checkLoadingPhase(getName(), ast.getLocation()); try { if (ruleClassName == null || skylarkFile == null) { throw new EvalException(ast.getLocation(), "Invalid rule class hasn't been exported by a Skylark file"); } if (type == RuleClassType.TEST != TargetUtils.isTestRuleName(ruleClassName)) { throw new EvalException(ast.getLocation(), "Invalid rule class name '" + ruleClassName + "', test rule class names must end with '_test' and other rule classes must not"); } RuleClass ruleClass = builder.build(ruleClassName); PackageContext pkgContext = (PackageContext) env.lookup(PackageFactory.PKG_CONTEXT); return RuleFactory.createAndAddRule( pkgContext, ruleClass, (Map<String, Object>) args[0], ast, env.getStackTrace()); } catch (InvalidRuleException | NameConflictException | NoSuchVariableException e) { throw new EvalException(ast.getLocation(), e.getMessage()); } } /** * Export a RuleFunction from a Skylark file with a given name. */ void export(PathFragment skylarkFile, String ruleClassName) { this.skylarkFile = skylarkFile; this.ruleClassName = ruleClassName; } @VisibleForTesting RuleClass.Builder getBuilder() { return builder; } } public static void exportRuleFunctions(SkylarkEnvironment env, PathFragment skylarkFile) { for (String name : env.getDirectVariableNames()) { try { Object value = env.lookup(name); if (value instanceof RuleFunction) { RuleFunction function = (RuleFunction) value; if (function.skylarkFile == null) { function.export(skylarkFile, name); } } } catch (NoSuchVariableException e) { throw new AssertionError(e); } } } @SkylarkSignature(name = "Label", doc = "Creates a Label referring to a BUILD target. Use " + "this function only when you want to give a default value for the label attributes. " + "The argument must refer to an absolute label. " + "Example: <br><pre class=language-python>Label(\"//tools:default\")</pre>", returnType = Label.class, mandatoryPositionals = {@Param(name = "label_string", type = String.class, doc = "the label string")}, useLocation = true) private static final BuiltinFunction label = new BuiltinFunction("Label") { public Label invoke(String labelString, Location loc) throws EvalException, ConversionException { try { return labelCache.get(labelString); } catch (ExecutionException e) { throw new EvalException(loc, "Illegal absolute label syntax: " + labelString); } } }; @SkylarkSignature(name = "FileType", doc = "Creates a file filter from a list of strings. For example, to match files ending " + "with .cc or .cpp, use: <pre class=language-python>FileType([\".cc\", \".cpp\"])</pre>", returnType = SkylarkFileType.class, mandatoryPositionals = { @Param(name = "types", type = SkylarkList.class, generic1 = String.class, defaultValue = "[]", doc = "a list of the accepted file extensions")}) private static final BuiltinFunction fileType = new BuiltinFunction("FileType") { public SkylarkFileType invoke(SkylarkList types) throws ConversionException { return SkylarkFileType.of(castList(types, String.class)); } }; @SkylarkSignature(name = "to_proto", doc = "Creates a text message from the struct parameter. This method only works if all " + "struct elements (recursively) are strings, ints, booleans, other structs or a " + "list of these types. Quotes and new lines in strings are escaped. " + "Examples:<br><pre class=language-python>" + "struct(key=123).to_proto()\n# key: 123\n\n" + "struct(key=True).to_proto()\n# key: true\n\n" + "struct(key=[1, 2, 3]).to_proto()\n# key: 1\n# key: 2\n# key: 3\n\n" + "struct(key='text').to_proto()\n# key: \"text\"\n\n" + "struct(key=struct(inner_key='text')).to_proto()\n" + "# key {\n# inner_key: \"text\"\n# }\n\n" + "struct(key=[struct(inner_key=1), struct(inner_key=2)]).to_proto()\n" + "# key {\n# inner_key: 1\n# }\n# key {\n# inner_key: 2\n# }\n\n" + "struct(key=struct(inner_key=struct(inner_inner_key='text'))).to_proto()\n" + "# key {\n# inner_key {\n# inner_inner_key: \"text\"\n# }\n# }\n</pre>", objectType = SkylarkClassObject.class, returnType = String.class, mandatoryPositionals = { // TODO(bazel-team): shouldn't we accept any ClassObject? @Param(name = "self", type = SkylarkClassObject.class, doc = "this struct")}, useLocation = true) private static final BuiltinFunction toProto = new BuiltinFunction("to_proto") { public String invoke(SkylarkClassObject self, Location loc) throws EvalException { StringBuilder sb = new StringBuilder(); printTextMessage(self, sb, 0, loc); return sb.toString(); } private void printTextMessage(ClassObject object, StringBuilder sb, int indent, Location loc) throws EvalException { for (String key : object.getKeys()) { printTextMessage(key, object.getValue(key), sb, indent, loc); } } private void printSimpleTextMessage(String key, Object value, StringBuilder sb, int indent, Location loc, String container) throws EvalException { if (value instanceof ClassObject) { print(sb, key + " {", indent); printTextMessage((ClassObject) value, sb, indent + 1, loc); print(sb, "}", indent); } else if (value instanceof String) { print(sb, key + ": \"" + escape((String) value) + "\"", indent); } else if (value instanceof Integer) { print(sb, key + ": " + value, indent); } else if (value instanceof Boolean) { // We're relying on the fact that Java converts Booleans to Strings in the same way // as the protocol buffers do. print(sb, key + ": " + value, indent); } else { throw new EvalException(loc, "Invalid text format, expected a struct, a string, a bool, or an int but got a " + EvalUtils.getDataTypeName(value) + " for " + container + " '" + key + "'"); } } private void printTextMessage(String key, Object value, StringBuilder sb, int indent, Location loc) throws EvalException { if (value instanceof SkylarkList) { for (Object item : ((SkylarkList) value)) { // TODO(bazel-team): There should be some constraint on the fields of the structs // in the same list but we ignore that for now. printSimpleTextMessage(key, item, sb, indent, loc, "list element in struct field"); } } else { printSimpleTextMessage(key, value, sb, indent, loc, "struct field"); } } private String escape(String string) { // TODO(bazel-team): use guava's SourceCodeEscapers when it's released. return string.replace("\"", "\\\"").replace("\n", "\\n"); } private void print(StringBuilder sb, String text, int indent) { for (int i = 0; i < indent; i++) { sb.append(" "); } sb.append(text); sb.append("\n"); } }; static { SkylarkSignatureProcessor.configureSkylarkFunctions(SkylarkRuleClassFunctions.class); } }
import java.util.*; // https://en.wikipedia.org/wiki/Treap public class TreapImplicitKey { // Modify the following 5 methods to implement your custom operations on the tree. // This example implements Add/Max operations. Operations like Add/Sum, Set/Max can also be implemented. static int modifyOperation(int x, int y) { return x + y; } // query (or combine) operation static int queryOperation(int leftValue, int rightValue) { return Math.max(leftValue, rightValue); } static int deltaEffectOnSegment(int delta, int segmentLength) { if (delta == getNeutralDelta()) return getNeutralDelta(); // Here you must write a fast equivalent of following slow code: // int result = delta; // for (int i = 1; i < segmentLength; i++) result = queryOperation(result, delta); // return result; return delta; } static int getNeutralDelta() { return 0; } static int getNeutralValue() { return Integer.MIN_VALUE; } // generic code static Random random = new Random(); static int joinValueWithDelta(int value, int delta) { if (delta == getNeutralDelta()) return value; return modifyOperation(value, delta); } static int joinDeltas(int delta1, int delta2) { if (delta1 == getNeutralDelta()) return delta2; if (delta2 == getNeutralDelta()) return delta1; return modifyOperation(delta1, delta2); } static void pushDelta(Treap root) { if (root == null) return; root.nodeValue = joinValueWithDelta(root.nodeValue, root.delta); root.subTreeValue = joinValueWithDelta(root.subTreeValue, deltaEffectOnSegment(root.delta, root.size)); if (root.left != null) root.left.delta = joinDeltas(root.left.delta, root.delta); if (root.right != null) root.right.delta = joinDeltas(root.right.delta, root.delta); root.delta = getNeutralDelta(); } public static class Treap { int nodeValue; int subTreeValue; int delta; // delta affects nodeValue, subTreeValue, left.delta and right.delta int size; long prio; Treap left; Treap right; Treap(int value) { nodeValue = value; subTreeValue = value; delta = getNeutralDelta(); size = 1; prio = random.nextLong(); } void update() { subTreeValue = queryOperation(queryOperation(getSubTreeValue(left), joinValueWithDelta(nodeValue, delta)), getSubTreeValue(right)); size = 1 + getSize(left) + getSize(right); } } static int getSize(Treap root) { return root == null ? 0 : root.size; } static int getSubTreeValue(Treap root) { return root == null ? getNeutralValue() : joinValueWithDelta(root.subTreeValue, deltaEffectOnSegment(root.delta, root.size)); } public static class TreapPair { Treap left; Treap right; TreapPair(Treap left, Treap right) { this.left = left; this.right = right; } } public static TreapPair split(Treap root, int minRight) { if (root == null) return new TreapPair(null, null); pushDelta(root); if (getSize(root.left) >= minRight) { TreapPair sub = split(root.left, minRight); root.left = sub.right; root.update(); sub.right = root; return sub; } else { TreapPair sub = split(root.right, minRight - getSize(root.left) - 1); root.right = sub.left; root.update(); sub.left = root; return sub; } } public static Treap merge(Treap left, Treap right) { pushDelta(left); pushDelta(right); if (left == null) return right; if (right == null) return left; // if (random.nextInt(left.size + right.size) < left.size) { if (left.prio > right.prio) { left.right = merge(left.right, right); left.update(); return left; } else { right.left = merge(left, right.left); right.update(); return right; } } public static Treap insert(Treap root, int index, int value) { TreapPair t = split(root, index); return merge(merge(t.left, new Treap(value)), t.right); } public static Treap remove(Treap root, int index) { TreapPair t = split(root, index); return merge(t.left, split(t.right, index + 1 - getSize(t.left)).right); } public static Treap modify(Treap root, int a, int b, int delta) { TreapPair t1 = split(root, b + 1); TreapPair t2 = split(t1.left, a); t2.right.delta = joinDeltas(t2.right.delta, delta); return merge(merge(t2.left, t2.right), t1.right); } public static class TreapAndResult { Treap treap; int value; TreapAndResult(Treap t, int value) { this.treap = t; this.value = value; } } public static TreapAndResult query(Treap root, int a, int b) { TreapPair t1 = split(root, b + 1); TreapPair t2 = split(t1.left, a); int value = getSubTreeValue(t2.right); return new TreapAndResult(merge(merge(t2.left, t2.right), t1.right), value); } public static void print(Treap root) { if (root == null) return; pushDelta(root); print(root.left); System.out.print(root.nodeValue + " "); print(root.right); } // Random test public static void main(String[] args) { Treap treap = null; List<Integer> list = new ArrayList<>(); Random rnd = new Random(1); for (int step = 0; step < 100000; step++) { int cmd = rnd.nextInt(6); if (cmd < 2 && list.size() < 100) { int pos = rnd.nextInt(list.size() + 1); int value = rnd.nextInt(100); list.add(pos, value); treap = insert(treap, pos, value); } else if (cmd < 3 && list.size() > 0) { int pos = rnd.nextInt(list.size()); list.remove(pos); treap = remove(treap, pos); } else if (cmd < 4 && list.size() > 0) { int b = rnd.nextInt(list.size()); int a = rnd.nextInt(b + 1); int res = list.get(a); for (int i = a + 1; i <= b; i++) res = queryOperation(res, list.get(i)); TreapAndResult tr = query(treap, a, b); treap = tr.treap; if (res != tr.value) throw new RuntimeException(); } else if (cmd < 5 && list.size() > 0) { int b = rnd.nextInt(list.size()); int a = rnd.nextInt(b + 1); int delta = rnd.nextInt(100) - 50; for (int i = a; i <= b; i++) list.set(i, joinValueWithDelta(list.get(i), delta)); treap = modify(treap, a, b, delta); } else { for (int i = 0; i < list.size(); i++) { TreapAndResult tr = query(treap, i, i); treap = tr.treap; int v = tr.value; if (list.get(i) != v) throw new RuntimeException(); } } } System.out.println("Test passed"); } }
/****************************************************************************** * Spine Runtimes Software License * Version 2.3 * <p> * Copyright (c) 2013-2015, Esoteric Software * All rights reserved. * <p> * You are granted a perpetual, non-exclusive, non-sublicensable and * non-transferable license to use, install, execute and perform the Spine * Runtimes Software (the "Software") and derivative works solely for personal * or internal use. Without the written permission of Esoteric Software (see * Section 2 of the Spine Software License Agreement), you may not (a) modify, * translate, adapt or otherwise create derivative works, improvements of the * Software or develop new applications using the Software or (b) remove, * delete, alter or obscure any trademarks or any copyright, trademark, patent * or other intellectual property or proprietary rights notices on or in the * Software, including any copy thereof. Redistributions in binary or source * form must include this license and terms. * <p> * THIS SOFTWARE IS PROVIDED BY ESOTERIC SOFTWARE "AS IS" AND ANY EXPRESS OR * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO * EVENT SHALL ESOTERIC SOFTWARE BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. *****************************************************************************/ // Ported from the Spine runtime by defrac 2015. package defrac.animation.spine; import defrac.util.Array; import javax.annotation.Nonnull; import javax.annotation.Nullable; public final class SkeletonData { @Nonnull final Array<BoneData> bones = new Array<>(); // Ordered parents first. @Nonnull final Array<SlotData> slots = new Array<>(); // Setup pose draw order. @Nonnull final Array<Skin> skins = new Array<>(); @Nonnull final Array<EventData> events = new Array<>(); @Nonnull final Array<Animation> animations = new Array<>(); @Nonnull final Array<IkConstraintData> ikConstraints = new Array<>(); @Nullable String name; @Nullable Skin defaultSkin; float width, height; String version, hash, imagesPath; // --- Bones. @Nonnull public Array<BoneData> bones() { return bones; } /** @return May be null. */ @Nullable public BoneData findBone(@Nonnull final String boneName) { final int index = findBoneIndex(boneName); return index == -1 ? null : bones.get(index); } /** @return -1 if the bone was not found. */ public int findBoneIndex(@Nonnull final String boneName) { int index = 0; for(final BoneData bone : bones) { if(boneName.equals(bone.name)) { return index; } ++index; } return -1; } // --- Slots. @Nonnull public Array<SlotData> slots() { return slots; } /** @return May be null. */ public SlotData findSlot(@Nonnull final String slotName) { final int index = findSlotIndex(slotName); return index == -1 ? null : slots.get(index); } /** @return -1 if the bone was not found. */ public int findSlotIndex(@Nonnull final String slotName) { int index = 0; for(final SlotData slot : slots) { if(slotName.equals(slot.name)) { return index; } ++index; } return -1; } // --- Skins. /** @return May be null. */ @Nullable public Skin defaultSkin() { return defaultSkin; } /** @param value May be null. */ public void defaultSkin(@Nullable final Skin value) { defaultSkin = value; } /** @return May be null. */ @Nullable public Skin findSkin(@Nonnull final String skinName) { for(final Skin skin : skins) { if(skinName.equals(skin.name)) { return skin; } } return null; } /** Returns all skins, including the default skin. */ @Nonnull public Array<Skin> skins() { return skins; } // --- Events. /** @return May be null. */ @Nullable public EventData findEvent(@Nonnull final String eventDataName) { for(final EventData eventData : events) { if(eventDataName.equals(eventData.name)) { return eventData; } } return null; } @Nonnull public Array<EventData> events() { return events; } // --- Animations. @Nonnull public Array<Animation> animations() { return animations; } /** @return May be null. */ @Nullable public Animation findAnimation(@Nonnull final String animationName) { for(final Animation animation : animations) { if(animationName.equals(animation.name)) { return animation; } } return null; } // --- IK @Nonnull public Array<IkConstraintData> ikConstraints() { return ikConstraints; } /** @return May be null. */ @Nullable public IkConstraintData findIkConstraint(@Nonnull final String ikConstraintName) { for(final IkConstraintData ikConstraintData : ikConstraints) { if(ikConstraintName.equals(ikConstraintData.name)) { return ikConstraintData; } } return null; } // --- /** @return May be null. */ @Nullable public String name() { return name; } /** @param value May be null. */ public void name(@Nullable final String value) { name = value; } public float width() { return width; } public void width(final float value) { width = value; } public float height() { return height; } public void height(final float value) { height = value; } /** Returns the Spine version used to export this data, or null. */ @Nullable public String version() { return version; } /** @param value May be null. */ public void version(@Nullable final String value) { version = value; } /** @return May be null. */ @Nullable public String hash() { return hash; } /** @param value May be null. */ public void setHash(@Nullable final String value) { hash = value; } /** @return May be null. */ @Nullable public String imagesPath() { return imagesPath; } /** @param value May be null. */ public void imagesPath(@Nullable final String value) { imagesPath = value; } public String toString() { return name != null ? name : super.toString(); } }
/* * Copyright (c) 2015 Twitter, Inc. All rights reserved. * Licensed under the Apache License v2.0 * http://www.apache.org/licenses/LICENSE-2.0 */ package com.twitter.whiskey.nio; import com.twitter.whiskey.futures.CompletableFuture; import com.twitter.whiskey.futures.Listener; import com.twitter.whiskey.futures.ReactiveFuture; import com.twitter.whiskey.net.Protocol; import com.twitter.whiskey.util.Origin; import java.io.IOException; import java.net.InetSocketAddress; import java.nio.ByteBuffer; import java.nio.channels.SelectionKey; import java.nio.channels.SocketChannel; import java.util.ArrayDeque; import java.util.ArrayList; import java.util.Deque; import java.util.concurrent.TimeUnit; /** * An asynchronous TCP socket interface. * * @author Michael Schore * @author Bill Gallagher */ public class Socket extends Selectable { private final Origin origin; private final RunLoop runLoop; private boolean closed = false; private SocketChannel channel; private SelectionKey key; private ConnectFuture connectFuture; private CloseFuture closeFuture; private Deque<ReadFuture> readQueue = new ArrayDeque<>(1); private Deque<WriteFuture> writeQueue = new ArrayDeque<>(32); public Socket(Origin origin, RunLoop runLoop) { this.origin = origin; this.runLoop = runLoop; } public ConnectFuture connect() { connectFuture = new ConnectFuture(); runLoop.execute(new Runnable() { public void run() { try { channel = SocketChannel.open(); channel.configureBlocking(false); channel.connect(new InetSocketAddress(origin.getHost(), origin.getPort())); reregister(); } catch (IOException e) { connectFuture.fail(e); closed = true; } } }); return connectFuture; } public void addCloseListener(Listener<Void> listener) { closeFuture.addListener(listener); } public ReadFuture read() { return read(new ReadFuture()); } public ReadFuture read(final ReadFuture readFuture) { runLoop.execute(new Runnable() { public void run() { getReadQueue().add(readFuture); if (channel != null && getReadQueue().size() == 1) { reregister(); } } }); return readFuture; } public ReadFuture read(ByteBuffer readBuffer) { return read(new ReadFuture(readBuffer)); } public ReadFuture read(int timeout, TimeUnit timeoutUnit) { return read(); } public WriteFuture write(ByteBuffer data) { return write(new ByteBuffer[]{data}); } public WriteFuture write(ByteBuffer[] data) { return write(new WriteFuture(data)); } public WriteFuture write(ByteBuffer data, int timeout, TimeUnit timeoutUnit) { return write(new ByteBuffer[]{data}); } public WriteFuture write(ByteBuffer[] data, int timeout, TimeUnit timeoutUnit) { return write(data); } public WriteFuture write(final WriteFuture writeFuture) { runLoop.execute(new Runnable() { public void run() { getWriteQueue().add(writeFuture); if (isConnected() && getWriteQueue().size() == 1) { reregister(); } } }); return writeFuture; } protected Deque<ReadFuture> getReadQueue() { return readQueue; } protected Deque<WriteFuture> getWriteQueue() { return writeQueue; } @Override public void onConnect() { try { channel.finishConnect(); finishConnect(); } catch (IOException e) { connectFuture.fail(e); closed = true; } } void finishConnect() throws IOException { closeFuture = new CloseFuture(); connectFuture.set(origin); reregister(); } void failConnect(Throwable thr) { if (!connectFuture.isDone()) { connectFuture.fail(thr); } } @Override public void onReadable() { if (closed) { return; } Deque<ReadFuture> readQueue = getReadQueue(); if (readQueue.isEmpty()) { reregister(); return; } ReadFuture currentRead = readQueue.peek(); assert (!currentRead.isDone()); boolean complete; try { complete = currentRead.doRead(channel); } catch (IOException e) { close(e); return; } if (complete) { readQueue.poll(); } reregister(); } @Override public void onWriteable() { if (closed) { return; } Deque<WriteFuture> writeQueue = getWriteQueue(); if (writeQueue.isEmpty()) { reregister(); return; } WriteFuture currentWrite = writeQueue.peek(); assert(!currentWrite.isDone()); boolean complete; try { complete = currentWrite.doWrite(); } catch (IOException e) { close(e); return; } if (complete) { getWriteQueue().poll(); } reregister(); } void reregister() { runLoop.register(interestSet(), this); } @Override public SocketChannel getChannel() { return channel; } @Override public void setSelectionKey(SelectionKey key) { this.key = key; } public boolean isConnected() { return !closed && channel != null && channel.isConnected(); } private int interestSet() { if (channel.isConnectionPending()) return SelectionKey.OP_CONNECT; int interestSet = 0; if (!getReadQueue().isEmpty()) interestSet = SelectionKey.OP_READ; if (!getWriteQueue().isEmpty()) interestSet |= SelectionKey.OP_WRITE; return interestSet; } @Override public void onClose(Throwable e) { if (closed) return; closed = true; key = null; closeFuture.fail(e); } boolean isSecure() { return false; } private void close(Throwable e) { if (closed) return; closed = true; if (key != null) key.cancel(); closeFuture.fail(e); } public void close() { if (closed) return; closed = true; if (key != null) key.cancel(); try { channel.close(); } catch (IOException ignored) { } closeFuture.set(null); } public Protocol getProtocol() { return Protocol.SPDY_3_1; } // TODO: split internal futures into public interface and package-private implementation public class ConnectFuture extends CompletableFuture<Origin> { } public class CloseFuture extends CompletableFuture<Void> { } public class ReadFuture extends CompletableFuture<ByteBuffer> { private static final int DEFAULT_BUFFER_SIZE = 18 * 1024; private final ByteBuffer buffer; ReadFuture() { this(ByteBuffer.allocate(DEFAULT_BUFFER_SIZE)); } ReadFuture(ByteBuffer buffer) { this.buffer = buffer; } boolean doRead(SocketChannel channel) throws IOException { ByteBuffer buffer = getBuffer(); int bytesRead = channel.read(buffer); assert (bytesRead != 0); if (bytesRead > 0) { buffer.flip(); set(buffer); } else { fail(new IOException("connection closed")); } return true; } @Override public boolean cancel(boolean mayInterruptIfRunning) { return getReadQueue().contains(this) && super.cancel(mayInterruptIfRunning) && getReadQueue().remove(this); } public ByteBuffer getBuffer() { return buffer; } } public class WriteFuture extends ReactiveFuture<Long, Long> { private ByteBuffer[] data; ArrayList<Long> bytesWritten; Long totalBytesWritten; WriteFuture(ByteBuffer[] data) { this.data = data; bytesWritten = new ArrayList<>(); totalBytesWritten = (long)0; } ByteBuffer[] pending() throws IOException { return data; } public void setPending(ByteBuffer[] pending) { this.data = pending; } boolean doWrite() throws IOException { long bytesWritten = channel.write(data); ByteBuffer finalData = data[data.length - 1]; boolean writeComplete = finalData.position() == finalData.limit(); provide(bytesWritten); if (writeComplete) finish(); return writeComplete; } @Override protected void accumulate(Long element) { bytesWritten.add(element); totalBytesWritten += element; } @Override protected Iterable<Long> drain() { return bytesWritten; } @Override protected boolean complete() { return set(totalBytesWritten); } @Override public boolean cancel(boolean mayInterruptIfRunning) { return getWriteQueue().contains(this) && super.cancel(mayInterruptIfRunning) && getWriteQueue().remove(this); } } }
/* * Created on Dec 1, 2009 * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. * * Copyright @2009-2013 the original author or authors. */ package org.fest.swing.driver; import static java.util.concurrent.TimeUnit.SECONDS; import static org.fest.assertions.Assertions.assertThat; import static org.fest.swing.driver.JProgressBarIndeterminateQuery.isIndeterminate; import static org.fest.swing.driver.JProgressBarMinimumAndMaximumQuery.minimumAndMaximumOf; import static org.fest.swing.driver.JProgressBarStringQuery.stringOf; import static org.fest.swing.driver.JProgressBarValueQuery.valueOf; import static org.fest.swing.driver.JProgressBarWaitUntilIsDeterminate.waitUntilValueIsDeterminate; import static org.fest.swing.driver.JProgressBarWaitUntilValueIsEqualToExpectedTask.waitUntilValueIsEqualToExpected; import static org.fest.swing.driver.TextAssert.verifyThat; import static org.fest.swing.timing.Timeout.timeout; import static org.fest.util.Preconditions.checkNotNull; import java.util.regex.Pattern; import javax.annotation.Nonnull; import javax.annotation.Nullable; import javax.swing.JProgressBar; import org.fest.swing.annotation.RunsInEDT; import org.fest.swing.core.Robot; import org.fest.swing.exception.WaitTimedOutError; import org.fest.swing.timing.Timeout; import org.fest.swing.util.Pair; import org.fest.util.InternalApi; /** * <p> * Supports functional testing of {@code JProgressBar}s. * </p> * * <p> * <b>Note:</b> This class is intended for internal use only. Please use the classes in the package * {@link org.fest.swing.fixture} in your tests. * </p> * * @author Alex Ruiz * * @since 1.2 */ @InternalApi public class JProgressBarDriver extends JComponentDriver implements TextDisplayDriver<JProgressBar> { private static final Timeout DEFAULT_TIMEOUT = timeout(30, SECONDS); private static final String TEXT_PROPERTY = "string"; /** * Creates a new {@link JProgressBarDriver}. * * @param robot the robot to use to simulate user input. */ public JProgressBarDriver(@Nonnull Robot robot) { super(robot); } /** * Asserts that the text of the {@code JProgressBar} is equal to the specified {@code String}. * * @param progressBar the target {@code JProgressBar}. * @param expected the text to match. * @throws AssertionError if the text of the {@code JProgressBar} is not equal to the given one. * @see JProgressBar#getString() */ @RunsInEDT @Override public void requireText(@Nonnull JProgressBar progressBar, @Nullable String expected) { verifyThat(stringOf(progressBar)).as(propertyName(progressBar, TEXT_PROPERTY)).isEqualOrMatches(expected); } /** * Asserts that the text of the {@code JProgressBar} matches the given regular expression pattern. * * @param progressBar the target {@code JProgressBar}. * @param pattern the regular expression pattern to match. * @throws AssertionError if the text of the {@code JProgressBar} does not match the given regular expression pattern. * @throws NullPointerException if the given regular expression pattern is {@code null}. * @see JProgressBar#getString() */ @RunsInEDT @Override public void requireText(@Nonnull JProgressBar progressBar, @Nonnull Pattern pattern) { verifyThat(stringOf(progressBar)).as(propertyName(progressBar, TEXT_PROPERTY)).matches(pattern); } /** * Verifies that the value of the given {@code JProgressBar} is equal to the given one. * * @param progressBar the target {@code JProgressBar}. * @param value the expected value. * @throws AssertionError if the value of the {@code JProgressBar} is not equal to the given one. */ @RunsInEDT public void requireValue(@Nonnull JProgressBar progressBar, int value) { assertThat(valueOf(progressBar)).as(propertyName(progressBar, "value")).isEqualTo(value); } /** * Verifies that the given {@code JProgressBar} is in indeterminate mode. * * @param progressBar the target {@code JProgressBar}. * @throws AssertionError if the given {@code JProgressBar} is not in indeterminate mode. */ @RunsInEDT public void requireIndeterminate(@Nonnull JProgressBar progressBar) { requireIndeterminate(progressBar, true); } /** * Verifies that the given {@code JProgressBar} is in determinate mode. * * @param progressBar the target {@code JProgressBar}. * @throws AssertionError if the given {@code JProgressBar} is not in determinate mode. */ @RunsInEDT public void requireDeterminate(@Nonnull JProgressBar progressBar) { requireIndeterminate(progressBar, false); } @RunsInEDT private void requireIndeterminate(@Nonnull JProgressBar progressBar, boolean indeterminate) { assertThat(isIndeterminate(progressBar)).as(propertyName(progressBar, "indeterminate")).isEqualTo(indeterminate); } /** * Waits until the value of the given {@code JProgressBar} is equal to the given value. * * @param progressBar the target {@code JProgressBar}. * @param value the expected value. * @throws IllegalArgumentException if the given value is less than the {@code JProgressBar}'s minimum value. * @throws IllegalArgumentException if the given value is greater than the {@code JProgressBar}'s maximum value. * @throws WaitTimedOutError if the value of the {@code JProgressBar} does not reach the expected value within 30 * seconds. */ @RunsInEDT public void waitUntilValueIs(@Nonnull JProgressBar progressBar, int value) { waitUntilValueIs(progressBar, value, DEFAULT_TIMEOUT); } /** * Waits until the value of the given {@code JProgressBar} is equal to the given value. * * @param progressBar the target {@code JProgressBar}. * @param value the expected value. * @param timeout the amount of time to wait. * @throws IllegalArgumentException if the given value is less than the {@code JProgressBar}'s minimum value. * @throws IllegalArgumentException if the given value is greater than the {@code JProgressBar}'s maximum value. * @throws NullPointerException if the given timeout is {@code null}. * @throws WaitTimedOutError if the value of the {@code JProgressBar} does not reach the expected value within the * specified timeout. */ @RunsInEDT public void waitUntilValueIs(@Nonnull JProgressBar progressBar, int value, @Nonnull Timeout timeout) { checkInBetweenMinAndMax(progressBar, value); checkNotNull(timeout); waitUntilValueIsEqualToExpected(progressBar, value, timeout); } @RunsInEDT private void checkInBetweenMinAndMax(@Nonnull JProgressBar progressBar, int value) { Pair<Integer, Integer> minAndMax = minimumAndMaximumOf(progressBar); assertIsInBetweenMinAndMax(value, minAndMax.first, minAndMax.second); } private void assertIsInBetweenMinAndMax(int value, int min, int max) { if (value >= min && value <= max) { return; } String msg = String.format("Value <%d> should be between <[%d, %d]>", value, min, max); throw new IllegalArgumentException(msg); } /** * Waits until the value of the given {@code JProgressBar} is in determinate mode. * * @param progressBar the target {@code JProgressBar}. * @throws WaitTimedOutError if the {@code JProgressBar} does not reach determinate mode within 30 seconds. */ @RunsInEDT public void waitUntilIsDeterminate(@Nonnull JProgressBar progressBar) { waitUntilIsDeterminate(progressBar, DEFAULT_TIMEOUT); } /** * Waits until the value of the given {@code JProgressBar} is in determinate mode. * * @param progressBar the target {@code JProgressBar}. * @param timeout the amount of time to wait. * @throws NullPointerException if the given timeout is {@code null}. * @throws WaitTimedOutError if the {@code JProgressBar} does not reach determinate mode within the specified timeout. */ @RunsInEDT public void waitUntilIsDeterminate(@Nonnull JProgressBar progressBar, @Nonnull Timeout timeout) { checkNotNull(timeout); waitUntilValueIsDeterminate(progressBar, timeout); } /** * Returns the text of the given {@code JProgressBar}. * * @param progressBar the target {@code JProgressBar}. * @return the text of the given {@code JProgressBar}. */ @Override @RunsInEDT public @Nullable String textOf(@Nonnull JProgressBar progressBar) { return stringOf(progressBar); } }
/* This file is generated by TestGenerator, any edits will be overwritten by the next generation. */ package org.antlr.v4.test.runtime.javascript.node; import org.junit.Ignore; import org.junit.Test; import static org.junit.Assert.*; @SuppressWarnings("unused") public class TestParserErrors extends BaseTest { /* This file and method are generated by TestGenerator, any edits will be overwritten by the next generation. */ @Test public void testConjuringUpToken() throws Exception { mkdir(tmpdir); StringBuilder grammarBuilder = new StringBuilder(63); grammarBuilder.append("grammar T;\n"); grammarBuilder.append("a : 'a' x='b' {console.log(\"conjured=\" + $x);} 'c' ;"); String grammar = grammarBuilder.toString(); String input ="ac"; String found = execParser("T.g4", grammar, "TParser", "TLexer", "TListener", "TVisitor", "a", input, false); assertEquals("conjured=[@-1,-1:-1='<missing 'b'>',<2>,1:1]\n", found); assertEquals("line 1:1 missing 'b' at 'c'\n", this.stderrDuringParse); } /* This file and method are generated by TestGenerator, any edits will be overwritten by the next generation. */ @Test public void testConjuringUpTokenFromSet() throws Exception { mkdir(tmpdir); StringBuilder grammarBuilder = new StringBuilder(69); grammarBuilder.append("grammar T;\n"); grammarBuilder.append("a : 'a' x=('b'|'c') {console.log(\"conjured=\" + $x);} 'd' ;"); String grammar = grammarBuilder.toString(); String input ="ad"; String found = execParser("T.g4", grammar, "TParser", "TLexer", "TListener", "TVisitor", "a", input, false); assertEquals("conjured=[@-1,-1:-1='<missing 'b'>',<2>,1:1]\n", found); assertEquals("line 1:1 missing {'b', 'c'} at 'd'\n", this.stderrDuringParse); } /* This file and method are generated by TestGenerator, any edits will be overwritten by the next generation. */ @Test public void testContextListGetters() throws Exception { mkdir(tmpdir); StringBuilder grammarBuilder = new StringBuilder(211); grammarBuilder.append("grammar T;\n"); grammarBuilder.append("@parser::members{\n"); grammarBuilder.append(" function foo() {\n"); grammarBuilder.append(" var s = new SContext();\n"); grammarBuilder.append(" var a = s.a();\n"); grammarBuilder.append(" var b = s.b();\n"); grammarBuilder.append(" };\n"); grammarBuilder.append("}\n"); grammarBuilder.append("s : (a | b)+;\n"); grammarBuilder.append("a : 'a' {process.stdout.write('a');};\n"); grammarBuilder.append("b : 'b' {process.stdout.write('b');};"); String grammar = grammarBuilder.toString(); String input ="abab"; String found = execParser("T.g4", grammar, "TParser", "TLexer", "TListener", "TVisitor", "s", input, true); assertEquals("abab\n", found); assertNull(this.stderrDuringParse); } /* This file and method are generated by TestGenerator, any edits will be overwritten by the next generation. */ @Test public void testDuplicatedLeftRecursiveCall_1() throws Exception { mkdir(tmpdir); StringBuilder grammarBuilder = new StringBuilder(63); grammarBuilder.append("grammar T;\n"); grammarBuilder.append("start : expr EOF;\n"); grammarBuilder.append("expr : 'x'\n"); grammarBuilder.append(" | expr expr\n"); grammarBuilder.append(" ;"); String grammar = grammarBuilder.toString(); String input ="x"; String found = execParser("T.g4", grammar, "TParser", "TLexer", "TListener", "TVisitor", "start", input, true); assertEquals("", found); assertNull(this.stderrDuringParse); } /* This file and method are generated by TestGenerator, any edits will be overwritten by the next generation. */ @Test public void testDuplicatedLeftRecursiveCall_2() throws Exception { mkdir(tmpdir); StringBuilder grammarBuilder = new StringBuilder(63); grammarBuilder.append("grammar T;\n"); grammarBuilder.append("start : expr EOF;\n"); grammarBuilder.append("expr : 'x'\n"); grammarBuilder.append(" | expr expr\n"); grammarBuilder.append(" ;"); String grammar = grammarBuilder.toString(); String input ="xx"; String found = execParser("T.g4", grammar, "TParser", "TLexer", "TListener", "TVisitor", "start", input, true); assertEquals("", found); assertNull(this.stderrDuringParse); } /* This file and method are generated by TestGenerator, any edits will be overwritten by the next generation. */ @Test public void testDuplicatedLeftRecursiveCall_3() throws Exception { mkdir(tmpdir); StringBuilder grammarBuilder = new StringBuilder(63); grammarBuilder.append("grammar T;\n"); grammarBuilder.append("start : expr EOF;\n"); grammarBuilder.append("expr : 'x'\n"); grammarBuilder.append(" | expr expr\n"); grammarBuilder.append(" ;"); String grammar = grammarBuilder.toString(); String input ="xxx"; String found = execParser("T.g4", grammar, "TParser", "TLexer", "TListener", "TVisitor", "start", input, true); assertEquals("", found); assertNull(this.stderrDuringParse); } /* This file and method are generated by TestGenerator, any edits will be overwritten by the next generation. */ @Test public void testDuplicatedLeftRecursiveCall_4() throws Exception { mkdir(tmpdir); StringBuilder grammarBuilder = new StringBuilder(63); grammarBuilder.append("grammar T;\n"); grammarBuilder.append("start : expr EOF;\n"); grammarBuilder.append("expr : 'x'\n"); grammarBuilder.append(" | expr expr\n"); grammarBuilder.append(" ;"); String grammar = grammarBuilder.toString(); String input ="xxxx"; String found = execParser("T.g4", grammar, "TParser", "TLexer", "TListener", "TVisitor", "start", input, true); assertEquals("", found); assertNull(this.stderrDuringParse); } /* This file and method are generated by TestGenerator, any edits will be overwritten by the next generation. */ @Test public void testInvalidATNStateRemoval() throws Exception { mkdir(tmpdir); StringBuilder grammarBuilder = new StringBuilder(98); grammarBuilder.append("grammar T;\n"); grammarBuilder.append("start : ID ':' expr;\n"); grammarBuilder.append("expr : primary expr? {} | expr '->' ID;\n"); grammarBuilder.append("primary : ID;\n"); grammarBuilder.append("ID : [a-z]+;"); String grammar = grammarBuilder.toString(); String input ="x:x"; String found = execParser("T.g4", grammar, "TParser", "TLexer", "TListener", "TVisitor", "start", input, false); assertEquals("", found); assertNull(this.stderrDuringParse); } /* This file and method are generated by TestGenerator, any edits will be overwritten by the next generation. */ @Test public void testInvalidEmptyInput() throws Exception { mkdir(tmpdir); StringBuilder grammarBuilder = new StringBuilder(36); grammarBuilder.append("grammar T;\n"); grammarBuilder.append("start : ID+;\n"); grammarBuilder.append("ID : [a-z]+;"); String grammar = grammarBuilder.toString(); String input =""; String found = execParser("T.g4", grammar, "TParser", "TLexer", "TListener", "TVisitor", "start", input, true); assertEquals("", found); assertEquals("line 1:0 missing ID at '<EOF>'\n", this.stderrDuringParse); } /* This file and method are generated by TestGenerator, any edits will be overwritten by the next generation. */ @Test public void testLL1ErrorInfo() throws Exception { mkdir(tmpdir); StringBuilder grammarBuilder = new StringBuilder(301); grammarBuilder.append("grammar T;\n"); grammarBuilder.append("start : animal (AND acClass)? service EOF;\n"); grammarBuilder.append("animal : (DOG | CAT );\n"); grammarBuilder.append("service : (HARDWARE | SOFTWARE) ;\n"); grammarBuilder.append("AND : 'and';\n"); grammarBuilder.append("DOG : 'dog';\n"); grammarBuilder.append("CAT : 'cat';\n"); grammarBuilder.append("HARDWARE: 'hardware';\n"); grammarBuilder.append("SOFTWARE: 'software';\n"); grammarBuilder.append("WS : ' ' -> skip ;\n"); grammarBuilder.append("acClass\n"); grammarBuilder.append("@init\n"); grammarBuilder.append("{console.log(this.getExpectedTokens().toString(this.literalNames));}\n"); grammarBuilder.append(" : ;"); String grammar = grammarBuilder.toString(); String input ="dog and software"; String found = execParser("T.g4", grammar, "TParser", "TLexer", "TListener", "TVisitor", "start", input, false); assertEquals("{'hardware', 'software'}\n", found); assertNull(this.stderrDuringParse); } /* This file and method are generated by TestGenerator, any edits will be overwritten by the next generation. */ @Test public void testLL2() throws Exception { mkdir(tmpdir); StringBuilder grammarBuilder = new StringBuilder(46); grammarBuilder.append("grammar T;\n"); grammarBuilder.append("a : 'a' 'b'\n"); grammarBuilder.append(" | 'a' 'c'\n"); grammarBuilder.append(";\n"); grammarBuilder.append("q : 'e' ;"); String grammar = grammarBuilder.toString(); String input ="ae"; String found = execParser("T.g4", grammar, "TParser", "TLexer", "TListener", "TVisitor", "a", input, false); assertEquals("", found); assertEquals("line 1:1 no viable alternative at input 'ae'\n", this.stderrDuringParse); } /* This file and method are generated by TestGenerator, any edits will be overwritten by the next generation. */ @Test public void testLL3() throws Exception { mkdir(tmpdir); StringBuilder grammarBuilder = new StringBuilder(55); grammarBuilder.append("grammar T;\n"); grammarBuilder.append("a : 'a' 'b'* 'c'\n"); grammarBuilder.append(" | 'a' 'b' 'd'\n"); grammarBuilder.append(";\n"); grammarBuilder.append("q : 'e' ;"); String grammar = grammarBuilder.toString(); String input ="abe"; String found = execParser("T.g4", grammar, "TParser", "TLexer", "TListener", "TVisitor", "a", input, false); assertEquals("", found); assertEquals("line 1:2 no viable alternative at input 'abe'\n", this.stderrDuringParse); } /* This file and method are generated by TestGenerator, any edits will be overwritten by the next generation. */ @Test public void testLLStar() throws Exception { mkdir(tmpdir); StringBuilder grammarBuilder = new StringBuilder(48); grammarBuilder.append("grammar T;\n"); grammarBuilder.append("a : 'a'+ 'b'\n"); grammarBuilder.append(" | 'a'+ 'c'\n"); grammarBuilder.append(";\n"); grammarBuilder.append("q : 'e' ;"); String grammar = grammarBuilder.toString(); String input ="aaae"; String found = execParser("T.g4", grammar, "TParser", "TLexer", "TListener", "TVisitor", "a", input, false); assertEquals("", found); assertEquals("line 1:3 no viable alternative at input 'aaae'\n", this.stderrDuringParse); } /* This file and method are generated by TestGenerator, any edits will be overwritten by the next generation. */ @Test public void testMultiTokenDeletionBeforeLoop() throws Exception { mkdir(tmpdir); StringBuilder grammarBuilder = new StringBuilder(28); grammarBuilder.append("grammar T;\n"); grammarBuilder.append("a : 'a' 'b'* 'c';"); String grammar = grammarBuilder.toString(); String input ="aacabc"; String found = execParser("T.g4", grammar, "TParser", "TLexer", "TListener", "TVisitor", "a", input, false); assertEquals("", found); assertEquals("line 1:1 extraneous input 'a' expecting {'b', 'c'}\n", this.stderrDuringParse); } /* This file and method are generated by TestGenerator, any edits will be overwritten by the next generation. */ @Test public void testMultiTokenDeletionBeforeLoop2() throws Exception { mkdir(tmpdir); StringBuilder grammarBuilder = new StringBuilder(36); grammarBuilder.append("grammar T;\n"); grammarBuilder.append("a : 'a' ('b'|'z'{})* 'c';"); String grammar = grammarBuilder.toString(); String input ="aacabc"; String found = execParser("T.g4", grammar, "TParser", "TLexer", "TListener", "TVisitor", "a", input, false); assertEquals("", found); assertEquals("line 1:1 extraneous input 'a' expecting {'b', 'z', 'c'}\n", this.stderrDuringParse); } /* This file and method are generated by TestGenerator, any edits will be overwritten by the next generation. */ @Test public void testMultiTokenDeletionDuringLoop() throws Exception { mkdir(tmpdir); StringBuilder grammarBuilder = new StringBuilder(29); grammarBuilder.append("grammar T;\n"); grammarBuilder.append("a : 'a' 'b'* 'c' ;"); String grammar = grammarBuilder.toString(); String input ="abaaababc"; String found = execParser("T.g4", grammar, "TParser", "TLexer", "TListener", "TVisitor", "a", input, false); assertEquals("", found); assertEquals( "line 1:2 extraneous input 'a' expecting {'b', 'c'}\n" + "line 1:6 extraneous input 'a' expecting {'b', 'c'}\n", this.stderrDuringParse); } /* This file and method are generated by TestGenerator, any edits will be overwritten by the next generation. */ @Test public void testMultiTokenDeletionDuringLoop2() throws Exception { mkdir(tmpdir); StringBuilder grammarBuilder = new StringBuilder(37); grammarBuilder.append("grammar T;\n"); grammarBuilder.append("a : 'a' ('b'|'z'{})* 'c' ;"); String grammar = grammarBuilder.toString(); String input ="abaaababc"; String found = execParser("T.g4", grammar, "TParser", "TLexer", "TListener", "TVisitor", "a", input, false); assertEquals("", found); assertEquals( "line 1:2 extraneous input 'a' expecting {'b', 'z', 'c'}\n" + "line 1:6 extraneous input 'a' expecting {'b', 'z', 'c'}\n", this.stderrDuringParse); } /* This file and method are generated by TestGenerator, any edits will be overwritten by the next generation. */ @Test public void testNoViableAltAvoidance() throws Exception { mkdir(tmpdir); StringBuilder grammarBuilder = new StringBuilder(83); grammarBuilder.append("grammar T;\n"); grammarBuilder.append("s : e '!' ;\n"); grammarBuilder.append("e : 'a' 'b'\n"); grammarBuilder.append(" | 'a'\n"); grammarBuilder.append(" ;\n"); grammarBuilder.append("DOT : '.' ;\n"); grammarBuilder.append("WS : [ \\t\\r\\n]+ -> skip;"); String grammar = grammarBuilder.toString(); String input ="a."; String found = execParser("T.g4", grammar, "TParser", "TLexer", "TListener", "TVisitor", "s", input, false); assertEquals("", found); assertEquals("line 1:1 mismatched input '.' expecting '!'\n", this.stderrDuringParse); } /* This file and method are generated by TestGenerator, any edits will be overwritten by the next generation. */ @Test public void testSingleSetInsertion() throws Exception { mkdir(tmpdir); StringBuilder grammarBuilder = new StringBuilder(34); grammarBuilder.append("grammar T;\n"); grammarBuilder.append("a : 'a' ('b'|'c') 'd' ;"); String grammar = grammarBuilder.toString(); String input ="ad"; String found = execParser("T.g4", grammar, "TParser", "TLexer", "TListener", "TVisitor", "a", input, false); assertEquals("", found); assertEquals("line 1:1 missing {'b', 'c'} at 'd'\n", this.stderrDuringParse); } /* This file and method are generated by TestGenerator, any edits will be overwritten by the next generation. */ @Test public void testSingleSetInsertionConsumption() throws Exception { mkdir(tmpdir); StringBuilder grammarBuilder = new StringBuilder(82); grammarBuilder.append("grammar T;\n"); grammarBuilder.append("myset: ('b'|'c') ;\n"); grammarBuilder.append("a: 'a' myset 'd' {console.log(\"\" + $myset.stop);} ; "); String grammar = grammarBuilder.toString(); String input ="ad"; String found = execParser("T.g4", grammar, "TParser", "TLexer", "TListener", "TVisitor", "a", input, false); assertEquals("[@0,0:0='a',<3>,1:0]\n", found); assertEquals("line 1:1 missing {'b', 'c'} at 'd'\n", this.stderrDuringParse); } /* This file and method are generated by TestGenerator, any edits will be overwritten by the next generation. */ @Test public void testSingleTokenDeletion() throws Exception { mkdir(tmpdir); StringBuilder grammarBuilder = new StringBuilder(24); grammarBuilder.append("grammar T;\n"); grammarBuilder.append("a : 'a' 'b' ;"); String grammar = grammarBuilder.toString(); String input ="aab"; String found = execParser("T.g4", grammar, "TParser", "TLexer", "TListener", "TVisitor", "a", input, false); assertEquals("", found); assertEquals("line 1:1 extraneous input 'a' expecting 'b'\n", this.stderrDuringParse); } /* This file and method are generated by TestGenerator, any edits will be overwritten by the next generation. */ @Test public void testSingleTokenDeletionBeforeLoop() throws Exception { mkdir(tmpdir); StringBuilder grammarBuilder = new StringBuilder(25); grammarBuilder.append("grammar T;\n"); grammarBuilder.append("a : 'a' 'b'* ;"); String grammar = grammarBuilder.toString(); String input ="aabc"; String found = execParser("T.g4", grammar, "TParser", "TLexer", "TListener", "TVisitor", "a", input, false); assertEquals("", found); assertEquals( "line 1:1 extraneous input 'a' expecting {<EOF>, 'b'}\n" + "line 1:3 token recognition error at: 'c'\n", this.stderrDuringParse); } /* This file and method are generated by TestGenerator, any edits will be overwritten by the next generation. */ @Test public void testSingleTokenDeletionBeforeLoop2() throws Exception { mkdir(tmpdir); StringBuilder grammarBuilder = new StringBuilder(32); grammarBuilder.append("grammar T;\n"); grammarBuilder.append("a : 'a' ('b'|'z'{})*;"); String grammar = grammarBuilder.toString(); String input ="aabc"; String found = execParser("T.g4", grammar, "TParser", "TLexer", "TListener", "TVisitor", "a", input, false); assertEquals("", found); assertEquals( "line 1:1 extraneous input 'a' expecting {<EOF>, 'b', 'z'}\n" + "line 1:3 token recognition error at: 'c'\n", this.stderrDuringParse); } /* This file and method are generated by TestGenerator, any edits will be overwritten by the next generation. */ @Test public void testSingleTokenDeletionConsumption() throws Exception { mkdir(tmpdir); StringBuilder grammarBuilder = new StringBuilder(82); grammarBuilder.append("grammar T;\n"); grammarBuilder.append("myset: ('b'|'c') ;\n"); grammarBuilder.append("a: 'a' myset 'd' {console.log(\"\" + $myset.stop);} ; "); String grammar = grammarBuilder.toString(); String input ="aabd"; String found = execParser("T.g4", grammar, "TParser", "TLexer", "TListener", "TVisitor", "a", input, false); assertEquals("[@2,2:2='b',<1>,1:2]\n", found); assertEquals("line 1:1 extraneous input 'a' expecting {'b', 'c'}\n", this.stderrDuringParse); } /* This file and method are generated by TestGenerator, any edits will be overwritten by the next generation. */ @Test public void testSingleTokenDeletionDuringLoop() throws Exception { mkdir(tmpdir); StringBuilder grammarBuilder = new StringBuilder(29); grammarBuilder.append("grammar T;\n"); grammarBuilder.append("a : 'a' 'b'* 'c' ;"); String grammar = grammarBuilder.toString(); String input ="ababbc"; String found = execParser("T.g4", grammar, "TParser", "TLexer", "TListener", "TVisitor", "a", input, false); assertEquals("", found); assertEquals("line 1:2 extraneous input 'a' expecting {'b', 'c'}\n", this.stderrDuringParse); } /* This file and method are generated by TestGenerator, any edits will be overwritten by the next generation. */ @Test public void testSingleTokenDeletionDuringLoop2() throws Exception { mkdir(tmpdir); StringBuilder grammarBuilder = new StringBuilder(37); grammarBuilder.append("grammar T;\n"); grammarBuilder.append("a : 'a' ('b'|'z'{})* 'c' ;"); String grammar = grammarBuilder.toString(); String input ="ababbc"; String found = execParser("T.g4", grammar, "TParser", "TLexer", "TListener", "TVisitor", "a", input, false); assertEquals("", found); assertEquals("line 1:2 extraneous input 'a' expecting {'b', 'z', 'c'}\n", this.stderrDuringParse); } /* This file and method are generated by TestGenerator, any edits will be overwritten by the next generation. */ @Test public void testSingleTokenDeletionExpectingSet() throws Exception { mkdir(tmpdir); StringBuilder grammarBuilder = new StringBuilder(30); grammarBuilder.append("grammar T;\n"); grammarBuilder.append("a : 'a' ('b'|'c') ;"); String grammar = grammarBuilder.toString(); String input ="aab"; String found = execParser("T.g4", grammar, "TParser", "TLexer", "TListener", "TVisitor", "a", input, false); assertEquals("", found); assertEquals("line 1:1 extraneous input 'a' expecting {'b', 'c'}\n", this.stderrDuringParse); } /* This file and method are generated by TestGenerator, any edits will be overwritten by the next generation. */ @Test public void testSingleTokenInsertion() throws Exception { mkdir(tmpdir); StringBuilder grammarBuilder = new StringBuilder(28); grammarBuilder.append("grammar T;\n"); grammarBuilder.append("a : 'a' 'b' 'c' ;"); String grammar = grammarBuilder.toString(); String input ="ac"; String found = execParser("T.g4", grammar, "TParser", "TLexer", "TListener", "TVisitor", "a", input, false); assertEquals("", found); assertEquals("line 1:1 missing 'b' at 'c'\n", this.stderrDuringParse); } /* This file and method are generated by TestGenerator, any edits will be overwritten by the next generation. */ @Test public void testTokenMismatch() throws Exception { mkdir(tmpdir); StringBuilder grammarBuilder = new StringBuilder(24); grammarBuilder.append("grammar T;\n"); grammarBuilder.append("a : 'a' 'b' ;"); String grammar = grammarBuilder.toString(); String input ="aa"; String found = execParser("T.g4", grammar, "TParser", "TLexer", "TListener", "TVisitor", "a", input, false); assertEquals("", found); assertEquals("line 1:1 mismatched input 'a' expecting 'b'\n", this.stderrDuringParse); } /* This file and method are generated by TestGenerator, any edits will be overwritten by the next generation. */ @Test public void testTokenMismatch2() throws Exception { mkdir(tmpdir); StringBuilder grammarBuilder = new StringBuilder(165); grammarBuilder.append("grammar T;\n"); grammarBuilder.append("\n"); grammarBuilder.append("stat: ( '(' expr? ')' )? EOF ;\n"); grammarBuilder.append("expr: ID '=' STR ;\n"); grammarBuilder.append("\n"); grammarBuilder.append("ERR : '~FORCE_ERROR~' ;\n"); grammarBuilder.append("ID : [a-zA-Z]+ ;\n"); grammarBuilder.append("STR : '\"' ~[\"]* '\"' ;\n"); grammarBuilder.append("WS : [ \\t\\r\\n]+ -> skip ;"); String grammar = grammarBuilder.toString(); String input ="( ~FORCE_ERROR~ "; String found = execParser("T.g4", grammar, "TParser", "TLexer", "TListener", "TVisitor", "stat", input, false); assertEquals("", found); assertEquals("line 1:2 mismatched input '~FORCE_ERROR~' expecting ')'\n", this.stderrDuringParse); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jackrabbit.oak.security.authorization.permission; import com.google.common.collect.ImmutableSet; import org.apache.jackrabbit.api.security.JackrabbitAccessControlList; import org.apache.jackrabbit.commons.jackrabbit.authorization.AccessControlUtils; import org.apache.jackrabbit.oak.AbstractSecurityTest; import org.apache.jackrabbit.oak.api.CommitFailedException; import org.apache.jackrabbit.oak.api.Root; import org.apache.jackrabbit.oak.api.Tree; import org.apache.jackrabbit.oak.commons.PathUtils; import org.apache.jackrabbit.oak.plugins.tree.TreeUtil; import org.apache.jackrabbit.oak.security.authorization.ProviderCtx; import org.apache.jackrabbit.oak.spi.commit.MoveTracker; import org.apache.jackrabbit.oak.spi.commit.Validator; import org.apache.jackrabbit.oak.spi.commit.VisibleValidator; import org.apache.jackrabbit.oak.spi.nodetype.NodeTypeConstants; import org.apache.jackrabbit.oak.spi.security.ConfigurationParameters; import org.apache.jackrabbit.oak.spi.security.Context; import org.apache.jackrabbit.oak.spi.security.authorization.permission.PermissionProvider; import org.apache.jackrabbit.oak.spi.security.authorization.permission.Permissions; import org.apache.jackrabbit.oak.spi.security.authorization.restriction.RestrictionProvider; import org.apache.jackrabbit.oak.spi.security.principal.EveryonePrincipal; import org.apache.jackrabbit.oak.spi.security.privilege.PrivilegeConstants; import org.apache.jackrabbit.oak.spi.state.NodeState; import org.jetbrains.annotations.NotNull; import org.junit.After; import org.junit.Before; import org.junit.Test; import javax.jcr.security.AccessControlManager; import java.security.Principal; import java.util.Set; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.never; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; public class MoveAwarePermissionValidatorTest extends AbstractSecurityTest { private Tree t; private PermissionProvider pp; private JackrabbitAccessControlList acl; @Before public void before() throws Exception { super.before(); Tree rootTree = root.getTree(PathUtils.ROOT_PATH); TreeUtil.addChild(rootTree, "src", NodeTypeConstants.NT_OAK_UNSTRUCTURED); TreeUtil.addChild(rootTree, "dest", NodeTypeConstants.NT_OAK_UNSTRUCTURED); root.commit(); } @After public void after() throws Exception { try { if (acl != null) { getAccessControlManager(root).removePolicy(acl.getPath(), acl); } Tree src = root.getTree("/src"); if (src.exists()) { src.remove(); } Tree dest = root.getTree("/dest"); if (dest.exists()) { dest.remove(); } root.commit(); } finally { super.after(); } } private void grant(@NotNull String path, @NotNull Principal principal, @NotNull String... privilegeNames) throws Exception { AccessControlManager acMgr = getAccessControlManager(root); JackrabbitAccessControlList acl = AccessControlUtils.getAccessControlList(acMgr, path); acl.addEntry(principal, AccessControlUtils.privilegesFromNames(acMgr, privilegeNames), true); acMgr.setPolicy(path, acl); root.commit(); this.acl = acl; } @NotNull private MoveAwarePermissionValidator createRootValidator(@NotNull Set<Principal> principals, @NotNull MoveTracker tracker) { ProviderCtx ctx = mock(ProviderCtx.class); when(ctx.getSecurityProvider()).thenReturn(getSecurityProvider()); when(ctx.getTreeProvider()).thenReturn(getTreeProvider()); when(ctx.getRootProvider()).thenReturn(getRootProvider()); String wspName = root.getContentSession().getWorkspaceName(); Root readonlyRoot = getRootProvider().createReadOnlyRoot(root); t = readonlyRoot.getTree(PathUtils.ROOT_PATH); pp = spy(new PermissionProviderImpl(readonlyRoot, wspName, principals, RestrictionProvider.EMPTY, ConfigurationParameters.EMPTY, Context.DEFAULT, ctx)); PermissionValidatorProvider pvp = new PermissionValidatorProvider(wspName, principals, tracker, ctx); NodeState ns = getTreeProvider().asNodeState(t); return new MoveAwarePermissionValidator(ns, ns, pp, pvp, tracker); } @Test public void testChildNodeAddedNoMatchingMove() throws Exception { MoveAwarePermissionValidator maValidator = spy(createRootValidator(adminSession.getAuthInfo().getPrincipals(), new MoveTracker())); Validator validator = maValidator.childNodeAdded("name", mock(NodeState.class)); assertTrue(validator instanceof VisibleValidator); verify(maValidator, times(1)).checkPermissions(t.getChild("name"), false, Permissions.ADD_NODE); } @Test public void testChildNodeAddedNonExistingSrc() throws Exception { MoveTracker moveTracker = new MoveTracker(); moveTracker.addMove("/srcNonExisting", "/dest"); MoveAwarePermissionValidator maValidator = spy(createRootValidator(adminSession.getAuthInfo().getPrincipals(), moveTracker)); Validator validator = maValidator.childNodeAdded("dest", mock(NodeState.class)); assertTrue(validator instanceof VisibleValidator); verify(maValidator, times(1)).checkPermissions(t.getChild("dest"), false, Permissions.ADD_NODE); verify(pp, never()).isGranted(t.getChild("src"), null, Permissions.REMOVE_NODE); } @Test public void testChildNodeAddedExistingSrc() throws Exception { MoveTracker moveTracker = new MoveTracker(); moveTracker.addMove("/src", "/dest"); MoveAwarePermissionValidator maValidator = spy(createRootValidator(adminSession.getAuthInfo().getPrincipals(), moveTracker)); Validator validator = maValidator.childNodeAdded("dest", mock(NodeState.class)); assertNull(validator); verify(maValidator, times(1)).checkPermissions(t.getChild("dest"), false, Permissions.ADD_NODE|Permissions.NODE_TYPE_MANAGEMENT); verify(pp, times(1)).isGranted(t.getChild("src"), null, Permissions.REMOVE_NODE); } @Test public void testChildNodeAddedNullPraent() throws Exception { MoveTracker moveTracker = new MoveTracker(); moveTracker.addMove("/src", "/dest"); MoveAwarePermissionValidator maValidator = spy(createRootValidator(adminSession.getAuthInfo().getPrincipals(), moveTracker)); when(maValidator.getParentAfter()).thenReturn(null); Validator validator = maValidator.childNodeAdded("dest", mock(NodeState.class)); assertTrue(validator instanceof VisibleValidator); verify(maValidator, times(1)).checkPermissions(t.getChild("dest"), false, Permissions.ADD_NODE); verify(pp, never()).isGranted(t.getChild("src"), null, Permissions.REMOVE_NODE); } @Test(expected = CommitFailedException.class) public void testChildNodeAddedMissingPermissionAtSrc() throws Exception { grant("/", EveryonePrincipal.getInstance(), PrivilegeConstants.JCR_ADD_CHILD_NODES, PrivilegeConstants.JCR_NODE_TYPE_MANAGEMENT); MoveTracker moveTracker = new MoveTracker(); moveTracker.addMove("/src", "/dest"); MoveAwarePermissionValidator maValidator = spy(createRootValidator(ImmutableSet.of(EveryonePrincipal.getInstance()), moveTracker)); try { maValidator.childNodeAdded("dest", mock(NodeState.class)); } catch (CommitFailedException e){ verify(maValidator, times(1)).checkPermissions(t.getChild("dest"), false, Permissions.ADD_NODE|Permissions.NODE_TYPE_MANAGEMENT); verify(pp, times(1)).isGranted(t.getChild("src"), null, Permissions.REMOVE_NODE); assertTrue(e.isAccessViolation()); assertEquals(0, e.getCode()); throw e; } } @Test public void testChildNodeDeletedNoMatchingMove() throws Exception { MoveAwarePermissionValidator maValidator = spy(createRootValidator(adminSession.getAuthInfo().getPrincipals(), new MoveTracker())); Validator validator = maValidator.childNodeDeleted("name", mock(NodeState.class)); assertNull(validator); verify(maValidator, times(1)).checkPermissions(t.getChild("name"), true, Permissions.REMOVE_NODE); } @Test public void testChildNodeDeletedNonExistingDestination() throws Exception { MoveTracker moveTracker = new MoveTracker(); moveTracker.addMove("/src", "/nonExistingDest"); MoveAwarePermissionValidator maValidator = spy(createRootValidator(adminSession.getAuthInfo().getPrincipals(), moveTracker)); Validator validator = maValidator.childNodeDeleted("src", mock(NodeState.class)); assertNull(validator); verify(maValidator, times(1)).checkPermissions(t.getChild("src"), true, Permissions.REMOVE_NODE); verify(pp, never()).isGranted(t.getChild("nonExistingDest"), null, Permissions.ADD_NODE|Permissions.NODE_TYPE_MANAGEMENT); } @Test public void testChildNodeDeletedExistingDestination() throws Exception { MoveTracker moveTracker = new MoveTracker(); moveTracker.addMove("/src", "/dest"); MoveAwarePermissionValidator maValidator = spy(createRootValidator(adminSession.getAuthInfo().getPrincipals(), moveTracker)); Validator validator = maValidator.childNodeDeleted("src", mock(NodeState.class)); assertNull(validator); verify(maValidator, times(1)).checkPermissions(t.getChild("src"), true, Permissions.REMOVE_NODE); verify(pp, times(1)).isGranted(t.getChild("dest"), null, Permissions.ADD_NODE|Permissions.NODE_TYPE_MANAGEMENT); } @Test public void testChildNodeDeletedNullParent() throws Exception { MoveTracker moveTracker = new MoveTracker(); moveTracker.addMove("/src", "/dest"); MoveAwarePermissionValidator maValidator = spy(createRootValidator(adminSession.getAuthInfo().getPrincipals(), moveTracker)); when(maValidator.getParentBefore()).thenReturn(null); Validator validator = maValidator.childNodeDeleted("src", mock(NodeState.class)); assertNull(validator); verify(maValidator, times(1)).checkPermissions(t.getChild("src"), true, Permissions.REMOVE_NODE); verify(pp, never()).isGranted(t.getChild("dest"), null, Permissions.ADD_NODE|Permissions.NODE_TYPE_MANAGEMENT); } @Test(expected = CommitFailedException.class) public void testChildNodeDeletedMissingPermissionAtDestination() throws Exception { grant(PathUtils.ROOT_PATH, EveryonePrincipal.getInstance(), PrivilegeConstants.JCR_REMOVE_CHILD_NODES, PrivilegeConstants.JCR_REMOVE_NODE); MoveTracker moveTracker = new MoveTracker(); moveTracker.addMove("/src", "/dest"); MoveAwarePermissionValidator maValidator = spy(createRootValidator(ImmutableSet.of(EveryonePrincipal.getInstance()), moveTracker)); try { maValidator.childNodeDeleted("src", mock(NodeState.class)); } catch (CommitFailedException e){ verify(maValidator, times(1)).checkPermissions(t.getChild("src"), true, Permissions.REMOVE_NODE); verify(pp, times(1)).isGranted(t.getChild("dest"), null, Permissions.ADD_NODE|Permissions.NODE_TYPE_MANAGEMENT); assertTrue(e.isAccessViolation()); assertEquals(0, e.getCode()); throw e; } } }
/* * Copyright 2017-present Facebook, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may obtain * a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.facebook.buck.distributed.build_client; import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; import static org.junit.Assume.assumeTrue; import com.facebook.buck.distributed.DistBuildService; import com.facebook.buck.distributed.thrift.BuildSlaveInfo; import com.facebook.buck.distributed.thrift.BuildSlaveRunId; import com.facebook.buck.distributed.thrift.LogLineBatch; import com.facebook.buck.distributed.thrift.LogLineBatchRequest; import com.facebook.buck.distributed.thrift.LogStreamType; import com.facebook.buck.distributed.thrift.SlaveStream; import com.facebook.buck.distributed.thrift.StreamLogs; import com.facebook.buck.io.filesystem.ProjectFilesystem; import com.facebook.buck.io.filesystem.TestProjectFilesystems; import com.facebook.buck.util.environment.Platform; import com.google.common.collect.ImmutableList; import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; import java.util.List; import java.util.concurrent.atomic.AtomicInteger; import java.util.stream.Stream; import org.easymock.EasyMock; import org.hamcrest.Matchers; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TemporaryFolder; public class LogStateTrackerTest { @Rule public TemporaryFolder projectDir = new TemporaryFolder(); private static final String LOG_DIR = "logs"; private static final String RUN_ONE_ID = "buildSlaveRunIdOne"; private static final String RUN_TWO_ID = "buildSlaveRunIdTwo"; /* ********************************** * Log streaming test data ********************************** */ private static final String RUN_ONE_STD_ERR_LOG = "dist-build-slave-buildSlaveRunIdOne/STDERR.log"; private static final String RUN_ONE_STD_OUT_LOG = "dist-build-slave-buildSlaveRunIdOne/STDOUT.log"; private static final String RUN_TWO_STD_OUT_LOG = "dist-build-slave-buildSlaveRunIdTwo/STDOUT.log"; private Path logDir; private LogStateTracker distBuildLogStateTracker; /* ********************************** * Tests ********************************** */ @Before public void setUp() throws InterruptedException { assumeTrue(!Platform.detect().equals(Platform.WINDOWS)); ProjectFilesystem projectFilesystem = TestProjectFilesystems.createProjectFilesystem(projectDir.getRoot().toPath()); DistBuildService service = EasyMock.createMock(DistBuildService.class); logDir = projectDir.getRoot().toPath().resolve(LOG_DIR); distBuildLogStateTracker = new LogStateTracker(logDir, projectFilesystem, service); } @Test public void testStreamsLogsForRuns() throws IOException { BuildSlaveInfo runOneSlaveInfo = new BuildSlaveInfo(); BuildSlaveRunId runOneId = new BuildSlaveRunId(); runOneId.setId(RUN_ONE_ID); runOneSlaveInfo.setBuildSlaveRunId(runOneId); BuildSlaveInfo runTwoSlaveInfo = new BuildSlaveInfo(); BuildSlaveRunId runTwoId = new BuildSlaveRunId(); runTwoId.setId(RUN_TWO_ID); runTwoSlaveInfo.setBuildSlaveRunId(runTwoId); // runOne has stdErr, and runTwo has stdOut to download. List<BuildSlaveInfo> buildSlaveInfos = ImmutableList.of(runOneSlaveInfo, runTwoSlaveInfo); List<LogLineBatchRequest> requestsOne = distBuildLogStateTracker.createStreamLogRequests(buildSlaveInfos); assertThat(requestsOne.size(), Matchers.equalTo(4)); // Request runOne/stdErr from batch 1 assertTrue( requestsOne .stream() .anyMatch( r -> r.slaveStream.buildSlaveRunId.equals(runOneId) && r.slaveStream.streamType.equals(LogStreamType.STDERR) && r.batchNumber == 0)); // Request runTwo/stdOut from batch 1 assertTrue( requestsOne .stream() .anyMatch( r -> r.slaveStream.buildSlaveRunId.equals(runTwoId) && r.slaveStream.streamType.equals(LogStreamType.STDOUT) && r.batchNumber == 0)); // Process new logs SlaveStream runOneStdErrStream = new SlaveStream(); runOneStdErrStream.setBuildSlaveRunId(runOneId); runOneStdErrStream.setStreamType(LogStreamType.STDERR); SlaveStream runOneStdOutStream = new SlaveStream(); runOneStdOutStream.setBuildSlaveRunId(runOneId); runOneStdOutStream.setStreamType(LogStreamType.STDOUT); SlaveStream runTwoStdOutStream = new SlaveStream(); runTwoStdOutStream.setBuildSlaveRunId(runTwoId); runTwoStdOutStream.setStreamType(LogStreamType.STDOUT); StreamLogs runOneStdErrLogs = new StreamLogs(); runOneStdErrLogs.setSlaveStream(runOneStdErrStream); LogLineBatch runOneStdErrLogsBatchOne = new LogLineBatch(); runOneStdErrLogsBatchOne.setBatchNumber(1); runOneStdErrLogsBatchOne.setLines( ImmutableList.of("runOneStdErrLine1\n", "runOneStdErrLine2\n")); runOneStdErrLogs.setLogLineBatches(ImmutableList.of(runOneStdErrLogsBatchOne)); StreamLogs runTwoStdOutLogs = new StreamLogs(); runTwoStdOutLogs.setSlaveStream(runTwoStdOutStream); LogLineBatch runTwoStdOutLogsBatchOne = new LogLineBatch(); runTwoStdOutLogsBatchOne.setBatchNumber(1); runTwoStdOutLogsBatchOne.setLines(ImmutableList.of("runTwoStdOutLine1\n")); LogLineBatch runTwoStdOutLogsBatchTwo = new LogLineBatch(); runTwoStdOutLogsBatchTwo.setBatchNumber(2); runTwoStdOutLogsBatchTwo.setLines( ImmutableList.of("runTwoStdOutLine2\n", "runTwoStdOutLine3\n")); runTwoStdOutLogs.setLogLineBatches( ImmutableList.of(runTwoStdOutLogsBatchOne, runTwoStdOutLogsBatchTwo)); List<StreamLogs> streamLogsOne = ImmutableList.of(runOneStdErrLogs, runTwoStdOutLogs); distBuildLogStateTracker.processStreamLogs(streamLogsOne); assertLogLines(RUN_ONE_STD_ERR_LOG, ImmutableList.of("runOneStdErrLine1", "runOneStdErrLine2")); assertLogLines( RUN_TWO_STD_OUT_LOG, ImmutableList.of("runTwoStdOutLine1", "runTwoStdOutLine2", "runTwoStdOutLine3")); // Process new logs runTwoStdOutLogs = new StreamLogs(); runTwoStdOutLogs.setSlaveStream(runTwoStdOutStream); runTwoStdOutLogsBatchTwo = new LogLineBatch(); runTwoStdOutLogsBatchTwo.setBatchNumber(2); runTwoStdOutLogsBatchTwo.setLines( ImmutableList.of("runTwoStdOutLine2\n", "runTwoStdOutLine3\n", "runTwoStdOutLine4\n")); runTwoStdOutLogs.setLogLineBatches(ImmutableList.of(runTwoStdOutLogsBatchTwo)); List<StreamLogs> streamLogsTwo = ImmutableList.of(runTwoStdOutLogs); distBuildLogStateTracker.processStreamLogs(streamLogsTwo); assertLogLines( RUN_TWO_STD_OUT_LOG, ImmutableList.of( "runTwoStdOutLine1", "runTwoStdOutLine2", "runTwoStdOutLine3", "runTwoStdOutLine4")); // runOne has stdErr, and runTwo has stdOut to download. buildSlaveInfos = ImmutableList.of(runOneSlaveInfo, runTwoSlaveInfo); List<LogLineBatchRequest> requestTwo = distBuildLogStateTracker.createStreamLogRequests(buildSlaveInfos); assertThat(requestTwo.size(), Matchers.equalTo(4)); // Request runOne/stdErr from batch 1 assertTrue( requestTwo .stream() .anyMatch( r -> r.slaveStream.buildSlaveRunId.equals(runOneId) && r.slaveStream.streamType.equals(LogStreamType.STDERR) && r.batchNumber == 1)); // Request runOne/stdOut from batch 1 assertTrue( requestTwo .stream() .anyMatch( r -> r.slaveStream.buildSlaveRunId.equals(runOneId) && r.slaveStream.streamType.equals(LogStreamType.STDOUT) && r.batchNumber == 0)); // Request runTwo/stdOut from batch 2 assertTrue( requestTwo .stream() .anyMatch( r -> r.slaveStream.buildSlaveRunId.equals(runTwoId) && r.slaveStream.streamType.equals(LogStreamType.STDOUT) && r.batchNumber == 2)); // Process new logs // runOne/stdErr runOneStdErrLogs = new StreamLogs(); runOneStdErrLogs.setSlaveStream(runOneStdErrStream); runOneStdErrLogsBatchOne = new LogLineBatch(); runOneStdErrLogsBatchOne.setBatchNumber(1); runOneStdErrLogsBatchOne.setLines( ImmutableList.of("runOneStdErrLine1\n", "runOneStdErrLine2\n", "runOneStdErrLine3\n")); LogLineBatch runOneStdErrLogsBatchTwo = new LogLineBatch(); runOneStdErrLogsBatchTwo.setBatchNumber(2); runOneStdErrLogsBatchTwo.setLines(ImmutableList.of("runOneStdErrLine4\n")); runOneStdErrLogs.setLogLineBatches( ImmutableList.of(runOneStdErrLogsBatchOne, runOneStdErrLogsBatchTwo)); // runOne/stdOut StreamLogs runOneStdOutLogs = new StreamLogs(); runOneStdOutLogs.setSlaveStream(runOneStdOutStream); LogLineBatch runOneStdOutLogsBatchOne = new LogLineBatch(); runOneStdOutLogsBatchOne.setBatchNumber(1); runOneStdOutLogsBatchOne.setLines( ImmutableList.of("runOneStdOutLine1\n", "runOneStdOutLine2\n")); LogLineBatch runOneStdOutLogsBatchTwo = new LogLineBatch(); runOneStdOutLogsBatchTwo.setBatchNumber(2); runOneStdOutLogsBatchTwo.setLines( ImmutableList.of("runOneStdOutLine3\n", "runOneStdOutLine4\n")); runOneStdOutLogs.setLogLineBatches( ImmutableList.of(runOneStdOutLogsBatchOne, runOneStdOutLogsBatchTwo)); // runTwo/stdOut runTwoStdOutLogs = new StreamLogs(); runTwoStdOutLogs.setSlaveStream(runTwoStdOutStream); runTwoStdOutLogsBatchTwo = new LogLineBatch(); runTwoStdOutLogsBatchTwo.setBatchNumber(2); runTwoStdOutLogsBatchTwo.setLines( ImmutableList.of("runTwoStdOutLine2\n", "runTwoStdOutLine3\n", "runTwoStdOutLine4\n")); LogLineBatch runTwoStdOutLogsBatchThree = new LogLineBatch(); runTwoStdOutLogsBatchThree.setBatchNumber(3); runTwoStdOutLogsBatchThree.setLines( ImmutableList.of("runTwoStdOutLine5\n", "runTwoStdOutLine6\n")); runTwoStdOutLogs.setLogLineBatches( ImmutableList.of(runTwoStdOutLogsBatchTwo, runTwoStdOutLogsBatchThree)); List<StreamLogs> streamLogsThree = ImmutableList.of(runOneStdErrLogs, runOneStdOutLogs, runTwoStdOutLogs); distBuildLogStateTracker.processStreamLogs(streamLogsThree); assertLogLines( RUN_ONE_STD_OUT_LOG, ImmutableList.of( "runOneStdOutLine1", "runOneStdOutLine2", "runOneStdOutLine3", "runOneStdOutLine4")); assertLogLines( RUN_ONE_STD_ERR_LOG, ImmutableList.of( "runOneStdErrLine1", "runOneStdErrLine2", "runOneStdErrLine3", "runOneStdErrLine4")); assertLogLines( RUN_TWO_STD_OUT_LOG, ImmutableList.of( "runTwoStdOutLine1", "runTwoStdOutLine2", "runTwoStdOutLine3", "runTwoStdOutLine4", "runTwoStdOutLine5", "runTwoStdOutLine6")); } private void assertLogLines(String filePath, List<String> logLines) throws IOException { assertTrue("Log file does not exist: " + filePath, logDir.resolve(filePath).toFile().exists()); try (Stream<String> stream = Files.lines(logDir.resolve(filePath).toAbsolutePath())) { AtomicInteger lineIndex = new AtomicInteger(0); stream.forEachOrdered( line -> { assertThat( "Expected number of log lines lower than actual number.", lineIndex.get(), Matchers.lessThan(logLines.size())); assertThat(logLines.get(lineIndex.get()), Matchers.equalTo(line)); lineIndex.getAndIncrement(); }); assertThat( "Expected number of log lines greater than actual number.", lineIndex.get(), Matchers.equalTo(logLines.size())); } } }
/* * Copyright 2000-2015 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.find; import com.intellij.find.editorHeaderActions.ContextAwareShortcutProvider; import com.intellij.find.editorHeaderActions.ShowMoreOptions; import com.intellij.find.editorHeaderActions.Utils; import com.intellij.find.editorHeaderActions.VariantsCompletionAction; import com.intellij.icons.AllIcons; import com.intellij.ide.DataManager; import com.intellij.openapi.actionSystem.*; import com.intellij.openapi.actionSystem.impl.ActionToolbarImpl; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.editor.impl.EditorHeaderComponent; import com.intellij.openapi.keymap.KeymapUtil; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.BooleanGetter; import com.intellij.openapi.util.SystemInfo; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.wm.IdeFocusManager; import com.intellij.ui.*; import com.intellij.ui.components.panels.NonOpaquePanel; import com.intellij.ui.components.panels.Wrapper; import com.intellij.ui.speedSearch.SpeedSearchSupply; import com.intellij.util.EventDispatcher; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.ui.JBUI; import com.intellij.util.ui.UIUtil; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import javax.swing.event.DocumentEvent; import javax.swing.text.JTextComponent; import java.awt.*; import java.awt.event.*; import java.util.EventListener; import java.util.List; import static java.awt.event.InputEvent.*; public class SearchReplaceComponent extends EditorHeaderComponent implements DataProvider { private final EventDispatcher<Listener> myEventDispatcher = EventDispatcher.create(Listener.class); private final MyTextComponentWrapper mySearchFieldWrapper; private JTextComponent mySearchTextComponent; private final MyTextComponentWrapper myReplaceFieldWrapper; private JTextComponent myReplaceTextComponent; private final JPanel myLeftPanel; private final JPanel myRightPanel; private final DefaultActionGroup mySearchFieldActions; private final ActionToolbarImpl mySearchActionsToolbar1; private final ActionToolbarImpl mySearchActionsToolbar2; private final ActionToolbarImpl.PopupStateModifier mySearchToolbar1PopupStateModifier; private final DefaultActionGroup myReplaceFieldActions; private final ActionToolbarImpl myReplaceActionsToolbar1; private final ActionToolbarImpl myReplaceActionsToolbar2; private final JPanel myReplaceToolbarWrapper; private final Project myProject; private final JComponent myTargetComponent; private final Runnable myCloseAction; private final Runnable myReplaceAction; private final DataProvider myDataProviderDelegate; private boolean myMultilineMode; private String myStatusText = ""; @NotNull public static Builder buildFor(@Nullable Project project, @NotNull JComponent component) { return new Builder(project, component); } private SearchReplaceComponent(@Nullable Project project, @NotNull JComponent targetComponent, @NotNull DefaultActionGroup searchToolbar1Actions, @NotNull final BooleanGetter searchToolbar1ModifiedFlagGetter, @NotNull DefaultActionGroup searchToolbar2Actions, @NotNull DefaultActionGroup searchFieldActions, @NotNull DefaultActionGroup replaceToolbar1Actions, @NotNull DefaultActionGroup replaceToolbar2Actions, @NotNull DefaultActionGroup replaceFieldActions, @Nullable Runnable replaceAction, @Nullable Runnable closeAction, @Nullable DataProvider dataProvider) { myProject = project; myTargetComponent = targetComponent; mySearchFieldActions = searchFieldActions; myReplaceFieldActions = replaceFieldActions; myReplaceAction = replaceAction; myCloseAction = closeAction; mySearchToolbar1PopupStateModifier = new ActionToolbarImpl.PopupStateModifier() { @Override public int getModifiedPopupState() { return ActionButtonComponent.PUSHED; } @Override public boolean willModify() { return searchToolbar1ModifiedFlagGetter.get(); } }; mySearchFieldWrapper = new MyTextComponentWrapper() { @Override public void setContent(JComponent wrapped) { super.setContent(wrapped); mySearchTextComponent = unwrapTextComponent(wrapped); } }; myReplaceFieldWrapper = new MyTextComponentWrapper() { @Override public void setContent(JComponent wrapped) { super.setContent(wrapped); myReplaceTextComponent = unwrapTextComponent(wrapped); } }; myLeftPanel = new NonOpaquePanel(new BorderLayout()); myLeftPanel.setBorder(JBUI.Borders.emptyLeft(6)); myLeftPanel.add(mySearchFieldWrapper, BorderLayout.NORTH); myLeftPanel.add(myReplaceFieldWrapper, BorderLayout.SOUTH); mySearchActionsToolbar1 = createSearchToolbar1(searchToolbar1Actions); Wrapper searchToolbarWrapper1 = new NonOpaquePanel(new BorderLayout()); searchToolbarWrapper1.add(mySearchActionsToolbar1, BorderLayout.WEST); mySearchActionsToolbar2 = createSearchToolbar2(searchToolbar2Actions); Wrapper searchToolbarWrapper2 = new Wrapper(mySearchActionsToolbar2); mySearchActionsToolbar2.setBorder(JBUI.Borders.emptyLeft(16)); JPanel searchPair = new NonOpaquePanel(new BorderLayout()).setVerticalSizeReferent(mySearchFieldWrapper); searchPair.add(searchToolbarWrapper1, BorderLayout.WEST); searchPair.add(searchToolbarWrapper2, BorderLayout.CENTER); myReplaceActionsToolbar1 = createReplaceToolbar1(replaceToolbar1Actions); Wrapper replaceToolbarWrapper1 = new Wrapper(myReplaceActionsToolbar1).setVerticalSizeReferent(myReplaceFieldWrapper); myReplaceActionsToolbar2 = createReplaceToolbar2(replaceToolbar2Actions); Wrapper replaceToolbarWrapper2 = new Wrapper(myReplaceActionsToolbar2).setVerticalSizeReferent(myReplaceFieldWrapper); myReplaceActionsToolbar2.setBorder(JBUI.Borders.emptyLeft(16)); myReplaceToolbarWrapper = new NonOpaquePanel(new BorderLayout()); myReplaceToolbarWrapper.add(replaceToolbarWrapper1, BorderLayout.WEST); myReplaceToolbarWrapper.add(replaceToolbarWrapper2, BorderLayout.CENTER); searchToolbarWrapper1.setHorizontalSizeReferent(replaceToolbarWrapper1); JLabel closeLabel = new JLabel(null, AllIcons.Actions.Cross, SwingConstants.RIGHT); closeLabel.setBorder(JBUI.Borders.empty(5)); closeLabel.setVerticalAlignment(SwingConstants.TOP); closeLabel.addMouseListener(new MouseAdapter() { @Override public void mousePressed(final MouseEvent e) { close(); } }); closeLabel.setToolTipText("Close search bar (Escape)"); searchPair.add(new Wrapper.North(closeLabel), BorderLayout.EAST); myRightPanel = new NonOpaquePanel(new BorderLayout()); myRightPanel.add(searchPair, BorderLayout.NORTH); myRightPanel.add(myReplaceToolbarWrapper, BorderLayout.CENTER); OnePixelSplitter splitter = new OnePixelSplitter(false, .25F); myRightPanel.setBorder(JBUI.Borders.emptyLeft(6)); splitter.setFirstComponent(myLeftPanel); splitter.setSecondComponent(myRightPanel); splitter.setHonorComponentsMinimumSize(true); splitter.setAndLoadSplitterProportionKey("FindSplitterProportion"); splitter.setOpaque(false); splitter.getDivider().setOpaque(false); add(splitter, BorderLayout.CENTER); update("", "", false, false); // it's assigned after all action updates so that actions don't get access to uninitialized components myDataProviderDelegate = dataProvider; setFocusCycleRoot(true); setFocusTraversalPolicy(new LayoutFocusTraversalPolicy()); } public void resetUndoRedoActions() { UIUtil.resetUndoRedoActions(mySearchTextComponent); UIUtil.resetUndoRedoActions(myReplaceTextComponent); } @Override public void removeNotify() { super.removeNotify(); addTextToRecent(mySearchTextComponent); if (myReplaceTextComponent != null) { addTextToRecent(myReplaceTextComponent); } } public void requestFocusInTheSearchFieldAndSelectContent(Project project) { mySearchTextComponent.selectAll(); IdeFocusManager.getInstance(project).requestFocus(mySearchTextComponent, true); if (myReplaceTextComponent != null) { myReplaceTextComponent.selectAll(); } } public void setStatusText(@NotNull String status) { myStatusText = status; } @NotNull public String getStatusText() { return myStatusText; } public void replace() { if (myReplaceAction != null) { myReplaceAction.run(); } } public void close() { if (myCloseAction != null) { myCloseAction.run(); } } public void setRegularBackground() { mySearchTextComponent.setBackground(UIUtil.getTextFieldBackground()); } public void setNotFoundBackground() { mySearchTextComponent.setBackground(LightColors.RED); } @Override public Insets getInsets() { Insets insets = super.getInsets(); if (UIUtil.isUnderGTKLookAndFeel() || UIUtil.isUnderNimbusLookAndFeel()) { insets.top += 1; insets.bottom += 2; } return insets; } @Nullable @Override public Object getData(@NonNls String dataId) { if (SpeedSearchSupply.SPEED_SEARCH_CURRENT_QUERY.is(dataId)) { return mySearchTextComponent.getText(); } return myDataProviderDelegate != null ? myDataProviderDelegate.getData(dataId) : null; } public Project getProject() { return myProject; } public void addListener(@NotNull Listener listener) { myEventDispatcher.addListener(listener); } public boolean isMultiline() { return myMultilineMode; } private void setMultilineInternal(boolean multiline) { boolean stateChanged = multiline != myMultilineMode; myMultilineMode = multiline; if (stateChanged) { multilineStateChanged(); } } @NotNull public JTextComponent getSearchTextComponent() { return mySearchTextComponent; } @NotNull public JTextComponent getReplaceTextComponent() { return myReplaceTextComponent; } private void updateSearchComponent(@NotNull String textToSet) { if (!updateTextComponent(true)) { String existingText = mySearchTextComponent.getText(); if (!existingText.equals(textToSet)) { mySearchTextComponent.setText(textToSet); // textToSet should be selected even if we have no selection before (if we have the selection then setText will remain it) if (existingText.length() == 0) mySearchTextComponent.selectAll(); } return; } mySearchTextComponent.getDocument().addDocumentListener(new DocumentAdapter() { @Override protected void textChanged(DocumentEvent e) { ApplicationManager.getApplication().invokeLater(() -> searchFieldDocumentChanged()); } }); mySearchTextComponent.registerKeyboardAction(new ActionListener() { @Override public void actionPerformed(final ActionEvent e) { if (StringUtil.isEmpty(mySearchTextComponent.getText())) { close(); } else { IdeFocusManager.getInstance(myProject).requestFocus(myTargetComponent, true); addTextToRecent(mySearchTextComponent); } } }, KeyStroke.getKeyStroke(KeyEvent.VK_ENTER, SystemInfo.isMac ? META_DOWN_MASK : CTRL_DOWN_MASK), JComponent.WHEN_FOCUSED); new VariantsCompletionAction(mySearchTextComponent); // It registers a shortcut set automatically on construction } private void updateReplaceComponent(@NotNull String textToSet) { if (!updateTextComponent(false)) { String existingText = myReplaceTextComponent.getText(); if (!existingText.equals(textToSet)) { myReplaceTextComponent.setText(textToSet); if (existingText.length() == 0) myReplaceTextComponent.selectAll(); } return; } myReplaceTextComponent.setText(textToSet); myReplaceTextComponent.getDocument().addDocumentListener(new DocumentAdapter() { @Override protected void textChanged(DocumentEvent e) { ApplicationManager.getApplication().invokeLater(() -> replaceFieldDocumentChanged()); } }); if (!isMultiline()) { installReplaceOnEnterAction(myReplaceTextComponent); } new VariantsCompletionAction(myReplaceTextComponent); myReplaceFieldWrapper.revalidate(); myReplaceFieldWrapper.repaint(); } public void update(@NotNull String findText, @NotNull String replaceText, boolean replaceMode, boolean multiline) { setMultilineInternal(multiline); boolean needToResetSearchFocus = mySearchTextComponent != null && mySearchTextComponent.hasFocus(); boolean needToResetReplaceFocus = myReplaceTextComponent != null && myReplaceTextComponent.hasFocus(); updateSearchComponent(findText); updateReplaceComponent(replaceText); if (replaceMode) { if (myReplaceFieldWrapper.getParent() == null) { myLeftPanel.add(myReplaceFieldWrapper, BorderLayout.CENTER); } if (myReplaceToolbarWrapper.getParent() == null) { myRightPanel.add(myReplaceToolbarWrapper, BorderLayout.CENTER); } if (needToResetReplaceFocus) { myReplaceTextComponent.requestFocusInWindow(); } } else { if (myReplaceFieldWrapper.getParent() != null) { myLeftPanel.remove(myReplaceFieldWrapper); } if (myReplaceToolbarWrapper.getParent() != null) { myRightPanel.remove(myReplaceToolbarWrapper); } } if (needToResetSearchFocus) mySearchTextComponent.requestFocusInWindow(); updateBindings(); updateActions(); revalidate(); repaint(); } public void updateActions() { mySearchActionsToolbar1.updateActionsImmediately(); mySearchActionsToolbar2.updateActionsImmediately(); myReplaceActionsToolbar1.updateActionsImmediately(); myReplaceActionsToolbar2.updateActionsImmediately(); } public void addTextToRecent(@NotNull JTextComponent textField) { final String text = textField.getText(); if (text.length() > 0) { FindInProjectSettings findInProjectSettings = FindInProjectSettings.getInstance(myProject); if (textField == mySearchTextComponent) { findInProjectSettings.addStringToFind(text); if (mySearchFieldWrapper.getTargetComponent() instanceof SearchTextField) { ((SearchTextField)mySearchFieldWrapper.getTargetComponent()).addCurrentTextToHistory(); } } else { findInProjectSettings.addStringToReplace(text); if (myReplaceFieldWrapper.getTargetComponent() instanceof SearchTextField) { ((SearchTextField)myReplaceFieldWrapper.getTargetComponent()).addCurrentTextToHistory(); } } } } private boolean updateTextComponent(boolean search) { JTextComponent oldComponent = search ? mySearchTextComponent : myReplaceTextComponent; if (oldComponent != null) return false; final MyTextComponentWrapper wrapper = search ? mySearchFieldWrapper : myReplaceFieldWrapper; final JTextComponent textComponent; SearchTextArea textArea = new SearchTextArea(search); textComponent = textArea.getTextArea(); ((JTextArea)textComponent).setRows(isMultiline() ? 2 : 1); wrapper.setContent(textArea); UIUtil.addUndoRedoActions(textComponent); textComponent.putClientProperty("AuxEditorComponent", Boolean.TRUE); textComponent.setBackground(UIUtil.getTextFieldBackground()); textComponent.addFocusListener(new FocusListener() { @Override public void focusGained(final FocusEvent e) { textComponent.repaint(); } @Override public void focusLost(final FocusEvent e) { textComponent.repaint(); } }); installCloseOnEscapeAction(textComponent); return true; } private void searchFieldDocumentChanged() { if (mySearchTextComponent instanceof JTextArea) { adjustRows((JTextArea)mySearchTextComponent); } myEventDispatcher.getMulticaster().searchFieldDocumentChanged(); } private void replaceFieldDocumentChanged() { if (myReplaceTextComponent instanceof JTextArea) { adjustRows((JTextArea)myReplaceTextComponent); } myReplaceActionsToolbar2.invalidate(); doLayout(); myEventDispatcher.getMulticaster().replaceFieldDocumentChanged(); } private void multilineStateChanged() { myEventDispatcher.getMulticaster().multilineStateChanged(); } private static void adjustRows(@NotNull JTextArea area) { area.setRows(Math.max(1, Math.min(3, StringUtil.countChars(area.getText(), '\n') + 1))); } private void installCloseOnEscapeAction(@NotNull JTextComponent c) { ActionListener action = new ActionListener() { @Override public void actionPerformed(ActionEvent e) { close(); } }; c.registerKeyboardAction(action, KeyStroke.getKeyStroke(KeyEvent.VK_ESCAPE, 0), JComponent.WHEN_FOCUSED); if (KeymapUtil.isEmacsKeymap()) { c.registerKeyboardAction(action, KeyStroke.getKeyStroke(KeyEvent.VK_G, CTRL_MASK), JComponent.WHEN_FOCUSED); } } private void installReplaceOnEnterAction(@NotNull JTextComponent c) { ActionListener action = new ActionListener() { @Override public void actionPerformed(ActionEvent e) { replace(); } }; c.registerKeyboardAction(action, KeyStroke.getKeyStroke(KeyEvent.VK_ENTER, 0), JComponent.WHEN_FOCUSED); } private void updateBindings() { updateBindings(mySearchFieldActions, mySearchFieldWrapper); updateBindings(mySearchActionsToolbar1, mySearchFieldWrapper); updateBindings(mySearchActionsToolbar2, mySearchFieldWrapper); updateBindings(myReplaceFieldActions, myReplaceFieldWrapper); updateBindings(myReplaceActionsToolbar1, myReplaceToolbarWrapper); updateBindings(myReplaceActionsToolbar2, myReplaceToolbarWrapper); } private void updateBindings(@NotNull DefaultActionGroup group, @NotNull JComponent shortcutHolder) { updateBindings(ContainerUtil.immutableList(group.getChildActionsOrStubs()), shortcutHolder); } private void updateBindings(@NotNull ActionToolbarImpl toolbar, @NotNull JComponent shortcutHolder) { updateBindings(toolbar.getActions(), shortcutHolder); } private void updateBindings(@NotNull List<? extends AnAction> actions, @NotNull JComponent shortcutHolder) { DataContext context = DataManager.getInstance().getDataContext(this); for (AnAction action : actions) { ShortcutSet shortcut = null; if (action instanceof ContextAwareShortcutProvider) { shortcut = ((ContextAwareShortcutProvider)action).getShortcut(context); } else if (action instanceof ShortcutProvider) { shortcut = ((ShortcutProvider)action).getShortcut(); } if (shortcut != null) { action.registerCustomShortcutSet(shortcut, shortcutHolder); } } } @NotNull private ActionToolbarImpl createSearchToolbar1(@NotNull DefaultActionGroup group) { ActionToolbarImpl toolbar = createToolbar(group); toolbar.setForceMinimumSize(true); toolbar.setReservePlaceAutoPopupIcon(false); toolbar.setSecondaryButtonPopupStateModifier(mySearchToolbar1PopupStateModifier); toolbar.setSecondaryActionsTooltip("More Options(" + KeymapUtil.getShortcutText(ShowMoreOptions.SHORT_CUT) + ")"); toolbar.setSecondaryActionsIcon(AllIcons.General.Filter); new ShowMoreOptions(toolbar, mySearchFieldWrapper); return toolbar; } @NotNull private ActionToolbarImpl createSearchToolbar2(@NotNull DefaultActionGroup group) { return createToolbar(group); } @NotNull private ActionToolbarImpl createReplaceToolbar1(@NotNull DefaultActionGroup group) { ActionToolbarImpl toolbar = createToolbar(group); toolbar.setForceMinimumSize(true); toolbar.setReservePlaceAutoPopupIcon(false); return toolbar; } @NotNull private ActionToolbarImpl createReplaceToolbar2(@NotNull DefaultActionGroup group) { return createToolbar(group); } @NotNull private ActionToolbarImpl createToolbar(@NotNull ActionGroup group) { return tweakToolbar((ActionToolbarImpl)ActionManager.getInstance().createActionToolbar(ActionPlaces.EDITOR_TOOLBAR, group, true)); } @NotNull private ActionToolbarImpl tweakToolbar(@NotNull ActionToolbarImpl toolbar) { toolbar.setTargetComponent(this); toolbar.setLayoutPolicy(ActionToolbar.AUTO_LAYOUT_POLICY); toolbar.setBorder(null); Utils.setSmallerFontForChildren(toolbar); return toolbar; } public interface Listener extends EventListener { void searchFieldDocumentChanged(); void replaceFieldDocumentChanged(); void multilineStateChanged(); } public static class Builder { private final Project myProject; private final JComponent myTargetComponent; private DataProvider myDataProvider; private Runnable myReplaceAction; private Runnable myCloseAction; private DefaultActionGroup mySearchActions = new DefaultActionGroup("search bar 1", false); private DefaultActionGroup myExtraSearchActions = new DefaultActionGroup("search bar 2", false); private DefaultActionGroup mySearchFieldActions = new DefaultActionGroup("search field actions", false); private BooleanGetter mySearchToolbarModifiedFlagGetter = BooleanGetter.FALSE; private DefaultActionGroup myReplaceActions = new DefaultActionGroup("replace bar 1", false); private DefaultActionGroup myExtraReplaceActions = new DefaultActionGroup("replace bar 1", false); private DefaultActionGroup myReplaceFieldActions = new DefaultActionGroup("replace field actions", false); private Builder(@Nullable Project project, @NotNull JComponent component) { myProject = project; myTargetComponent = component; } @NotNull public Builder withDataProvider(@NotNull DataProvider provider) { myDataProvider = provider; return this; } @NotNull public Builder withReplaceAction(@NotNull Runnable action) { myReplaceAction = action; return this; } @NotNull public Builder withCloseAction(@NotNull Runnable action) { myCloseAction = action; return this; } @NotNull public Builder addSearchFieldActions(@NotNull AnAction... actions) { mySearchFieldActions.addAll(actions); return this; } @NotNull public Builder addReplaceFieldActions(@NotNull AnAction... actions) { myReplaceFieldActions.addAll(actions); return this; } @NotNull public Builder addPrimarySearchActions(@NotNull AnAction... actions) { mySearchActions.addAll(actions); return this; } @NotNull public Builder addSecondarySearchActions(@NotNull AnAction... actions) { for (AnAction action : actions) { mySearchActions.addAction(action).setAsSecondary(true); } return this; } @NotNull public Builder withSecondarySearchActionsIsModifiedGetter(@NotNull BooleanGetter getter) { mySearchToolbarModifiedFlagGetter = getter; return this; } @NotNull public Builder addExtraSearchActions(@NotNull AnAction... actions) { myExtraSearchActions.addAll(actions); return this; } @NotNull public Builder addPrimaryReplaceActions(@NotNull AnAction... actions) { myReplaceActions.addAll(actions); return this; } @NotNull public Builder addExtraReplaceAction(@NotNull AnAction... actions) { myExtraReplaceActions.addAll(actions); return this; } @NotNull public SearchReplaceComponent build() { return new SearchReplaceComponent(myProject, myTargetComponent, mySearchActions, mySearchToolbarModifiedFlagGetter, myExtraSearchActions, mySearchFieldActions, myReplaceActions, myExtraReplaceActions, myReplaceFieldActions, myReplaceAction, myCloseAction, myDataProvider); } } private static class MyTextComponentWrapper extends Wrapper { @Nullable public JTextComponent getTextComponent() { JComponent wrapped = getTargetComponent(); return wrapped != null ? unwrapTextComponent(wrapped) : null; } @NotNull protected static JTextComponent unwrapTextComponent(@NotNull JComponent wrapped) { if (wrapped instanceof SearchTextField) { return ((SearchTextField)wrapped).getTextEditor(); } if (wrapped instanceof SearchTextArea) { return ((SearchTextArea)wrapped).getTextArea(); } throw new AssertionError(); } } }
/******************************************************************************* * Copyright (c) Intel Corporation * Copyright (c) 2017 * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. *******************************************************************************/ package org.osc.core.broker.rest.client.openstack.vmidc.notification.listener; import java.util.List; import javax.persistence.EntityManager; import org.osc.core.broker.job.lock.LockObjectReference; import org.osc.core.broker.model.entities.BaseEntity; import org.osc.core.broker.model.entities.events.SystemFailureType; import org.osc.core.broker.model.entities.virtualization.SecurityGroup; import org.osc.core.broker.model.entities.virtualization.VirtualizationConnector; import org.osc.core.broker.model.entities.virtualization.openstack.DeploymentSpec; import org.osc.core.broker.model.entities.virtualization.openstack.VM; import org.osc.core.broker.rest.client.openstack.discovery.VmDiscoveryCache; import org.osc.core.broker.rest.client.openstack.discovery.VmDiscoveryCache.VmInfo; import org.osc.core.broker.rest.client.openstack.vmidc.notification.OsNotificationKeyType; import org.osc.core.broker.rest.client.openstack.vmidc.notification.OsNotificationObjectType; import org.osc.core.broker.rest.client.openstack.vmidc.notification.OsNotificationUtil; import org.osc.core.broker.rest.client.openstack.vmidc.notification.runner.RabbitMQRunner; import org.osc.core.broker.service.DeploymentSpecConformJobFactory; import org.osc.core.broker.service.SecurityGroupConformJobFactory; import org.osc.core.broker.service.alert.AlertGenerator; import org.osc.core.broker.service.api.RestConstants; import org.osc.core.broker.service.persistence.SecurityGroupEntityMgr; import org.osc.core.broker.service.persistence.VMEntityManager; import org.osc.core.broker.util.SessionUtil; import org.osc.core.broker.util.db.DBConnectionManager; import org.osgi.service.transaction.control.ScopedWorkException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class OsVMNotificationListener extends OsNotificationListener { private static final Logger log = LoggerFactory.getLogger(OsVMNotificationListener.class); private static final String REGION_NOTIFICATION_KEY = "region"; private final DeploymentSpecConformJobFactory dsConformJobFactory; private final SecurityGroupConformJobFactory sgConformJobFactory; private final AlertGenerator alertGenerator; private final DBConnectionManager dbMgr; public OsVMNotificationListener(VirtualizationConnector vc, OsNotificationObjectType objectType, List<String> objectIdList, BaseEntity entity, DeploymentSpecConformJobFactory dsConformJobFactory, SecurityGroupConformJobFactory sgConformJobFactory, AlertGenerator alertGenerator, RabbitMQRunner activeRunner, DBConnectionManager dbMgr) { super(vc, OsNotificationObjectType.VM, objectIdList, entity, activeRunner); this.dsConformJobFactory = dsConformJobFactory; this.sgConformJobFactory = sgConformJobFactory; this.alertGenerator = alertGenerator; this.dbMgr = dbMgr; register(vc, objectType); } @Override public void onMessage(String message) { String eventType = OsNotificationUtil.getEventTypeFromMessage(message); if (eventType.contains(OsNotificationEventState.CREATE.toString()) || eventType.contains(OsNotificationEventState.DELETE.toString()) || eventType.contains(OsNotificationEventState.POWER_OFF.toString()) || eventType.contains(OsNotificationEventState.RESIZE_CONFIRM_END.toString())) { String vmOpenstackId = OsNotificationUtil.isMessageRelevant(message, this.objectIdList, OsNotificationKeyType.INSTANCE_ID.toString()); if (vmOpenstackId != null) { SessionUtil.getInstance().setUser(RestConstants.OSC_DEFAULT_LOGIN); log.info(" [Instance] : message received - " + message); try { this.dbMgr.getTransactionControl().required(() -> { if (this.entity instanceof SecurityGroup) { if (!eventType.contains(OsNotificationEventState.POWER_OFF.toString())) { // if the listener is tied to SG then handle SG messages handleSGMessages(vmOpenstackId, message); } } else if (this.entity instanceof DeploymentSpec) { // / if the listener is tied to DAI which belongs to a DS then handle DAI messages if (!eventType.contains(OsNotificationEventState.CREATE.toString())) { // If DAI/SVA is migrated, deleted or powered off then trigger DS sync handleDAIMessages(vmOpenstackId, eventType, message); } } return null; }); } catch (Exception e) { log.error( "Fail to process Openstack VM (" + vmOpenstackId + ") notification - " + this.vc.getControllerIpAddress(), e); this.alertGenerator.processSystemFailureEvent(SystemFailureType.OS_NOTIFICATION_FAILURE, LockObjectReference.getLockObjectReference(this.entity, new LockObjectReference(this.vc)), "Fail to process Openstack VM (" + vmOpenstackId + ") notification (" + e.getMessage() + ")"); } } } } private void handleSGMessages(String vmOpenstackId, String message) throws Exception { SecurityGroup securityGroup = (SecurityGroup) this.entity; // if the VM changes is part of Security Group if (!isVmMigrated(vmOpenstackId, message)) { /* * If VM is not migrated then it is deleted we must trigger a SG Sync */ this.sgConformJobFactory.startSecurityGroupConformanceJob(securityGroup); } else { /* * VM is migrated * Queue SG Sync first */ try { // open a new Hibernate Session EntityManager em = this.dbMgr.getTransactionalEntityManager(); // begin transaction this.dbMgr.getTransactionControl().required(() -> { // load this entity from database to avoid any lazy loading issues SecurityGroup sg = SecurityGroupEntityMgr.findById(em, securityGroup.getId()); // iterate through all SGI -> DDS mappings to trigger required DDS Sync return this.sgConformJobFactory.startSecurityGroupConformanceJob(em, sg, null, true); }); } catch (ScopedWorkException e) { log.error("Failed to check if VM openstack Id - " + vmOpenstackId + " is migrated or not!", e.getCause()); throw e.as(Exception.class); } catch (Exception e) { log.error("Failed to check if VM openstack Id - " + vmOpenstackId + " is migrated or not!", e); throw e; } } } private void handleDAIMessages(String vmOpenstackId, String eventType, String message) throws Exception { if (eventType.contains(OsNotificationEventState.RESIZE_CONFIRM_END.toString())) { if (isVmMigrated(vmOpenstackId, message)) { // When some one migrate DAI then we trigger sync Job to fix this issue this.dsConformJobFactory.startDsConformanceJob((DeploymentSpec) this.entity, null); } } else { // DAI is either powered off or deleted. We must trigger sync for this this.dsConformJobFactory.startDsConformanceJob((DeploymentSpec) this.entity, null); } } /** * * This method verifies VM host from Openstack with one we have in our database to ensure that VM is actually * migrated across host. * * @param vmOpenstackId * Vm Openstack ID from the Received Notification * @return * True if Server Host ID does not match with VM Host ID i.e. VM is migrated to a new host * False: if both IDs are same i.e. VM is not migrated it is just resized * @throws Exception */ private boolean isVmMigrated(String vmOpenstackId, String message) throws Exception { /* * To verify a VM is migrated we perform the following checks * 1. Get Host Id from the VM in context * 2. Check Database and query Host Openstack ID for this VM entry * 3. Compare Both IDs * 4. If match return false * 5 If not same then return true as this VM is migrated to another host */ try { VmDiscoveryCache vmCache = new VmDiscoveryCache(this.vc, this.vc.getProviderAdminProjectName()); // parse Region from incoming Notification message String region = OsNotificationUtil.getPropertyFromNotificationMessage(message, REGION_NOTIFICATION_KEY); VmInfo vmInfo = vmCache.discover(region, vmOpenstackId); if (vmInfo == null) { log.error("Got VM notification and checking VM migration but Failed to discover VM openstack Id - '" + vmOpenstackId + "'"); return false; } EntityManager em = this.dbMgr.getTransactionalEntityManager(); return this.dbMgr.getTransactionControl().required(() -> { VM vm = VMEntityManager.findByOpenstackId(em, vmOpenstackId); if (vm == null) { log.error("Got VM notification and checking VM migration but find this VM in our DB. openstack Id - '" + vmOpenstackId + "'"); return false; } // if the migrated VM host is same as what we have in the database then VM was resized and not Migrated return !vm.getHost().equals(vmInfo.getHost()); }); } catch (ScopedWorkException e) { log.error("Failed to check if VM openstack Id - " + vmOpenstackId + " is migrated or not!", e.getCause()); throw e.as(Exception.class); } catch (Exception e) { log.error("Failed to check if VM openstack Id - " + vmOpenstackId + " is migrated or not!", e); throw e; } } }
package com.planet_ink.coffee_mud.WebMacros; import com.planet_ink.coffee_mud.core.exceptions.CMException; import com.planet_ink.coffee_mud.core.interfaces.*; import com.planet_ink.coffee_mud.core.*; import com.planet_ink.coffee_mud.core.collections.*; import com.planet_ink.coffee_mud.Abilities.interfaces.*; import com.planet_ink.coffee_mud.Areas.interfaces.*; import com.planet_ink.coffee_mud.Behaviors.interfaces.*; import com.planet_ink.coffee_mud.CharClasses.interfaces.*; import com.planet_ink.coffee_mud.Libraries.interfaces.*; import com.planet_ink.coffee_mud.Common.interfaces.*; import com.planet_ink.coffee_mud.Exits.interfaces.*; import com.planet_ink.coffee_mud.Items.interfaces.*; import com.planet_ink.coffee_mud.Locales.interfaces.*; import com.planet_ink.coffee_mud.MOBS.interfaces.*; import com.planet_ink.coffee_mud.Races.interfaces.*; import com.planet_ink.coffee_web.interfaces.*; import java.util.*; /* Copyright 2008-2022 Bo Zimmerman Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ public class AbilityRecipeData extends StdWebMacro { @Override public String name() { return "AbilityRecipeData"; } // valid parms include help, ranges, quality, target, alignment, domain, // qualifyQ, auto @Override public String runMacro(final HTTPRequest httpReq, final String parm, final HTTPResponse httpResp) { final java.util.Map<String,String> parms=parseParms(parm); final String replaceCommand=httpReq.getUrlParameter("REPLACE"); if((replaceCommand != null) && (replaceCommand.length()>0) && (replaceCommand.indexOf('=')>0)) { final int eq=replaceCommand.indexOf('='); final String field=replaceCommand.substring(0,eq); final String value=replaceCommand.substring(eq+1); httpReq.addFakeUrlParameter(field, value); httpReq.addFakeUrlParameter("REPLACE",""); } final String last=httpReq.getUrlParameter("ABILITY"); if(last==null) return " @break@"; final String rownum=httpReq.getUrlParameter("ABILITYRECIPEROW"); if(last.length()>0) { final Ability A=CMClass.getAbility(last); if((A!=null) &&(A instanceof CraftorAbility) &&(((CraftorAbility)A).parametersFile()!=null) &&(((CraftorAbility)A).parametersFile().length()>0) &&(((CraftorAbility)A).parametersFormat()!=null) &&(((CraftorAbility)A).parametersFormat().length()>0)) { AbilityParameters.AbilityRecipeData recipeData = (AbilityParameters.AbilityRecipeData)httpReq.getRequestObjects().get("ABILITYRECIPEDATA-"+last); if(recipeData == null) { recipeData = CMLib.ableParms().parseRecipe(((CraftorAbility)A).parametersFile(),((CraftorAbility)A).parametersFormat()); if(recipeData.parseError() != null) { Log.errOut(ID(),recipeData.parseError()); return " @break@"; } httpReq.getRequestObjects().put("ABILITYRECIPEDATA-"+last,recipeData); } final StringBuffer str=new StringBuffer(""); final String sfont=(parms.containsKey("FONT"))?("<FONT "+(parms.get("FONT"))+">"):""; final String efont=(parms.containsKey("FONT"))?"</FONT>":""; final String hsfont=(parms.containsKey("HFONT"))?("<FONT "+(parms.get("HFONT"))+">"):""; final String hefont=(parms.containsKey("HFONT"))?"</FONT>":""; if(parms.containsKey("SAVETOVFS")) { if(httpReq.isUrlParameter("SAVETOVFS")) str.append(CMath.s_bool(httpReq.getUrlParameter("SAVETOVFS"))?"CHECKED":""); else str.append(recipeData.wasVFS()?"CHECKED":""); } else if(parms.containsKey("ROWTABLE")&&(CMath.isInteger(rownum))) { final int row = CMath.s_int(rownum); DVector dataRow = null; final int classFieldIndex = recipeData.getClassFieldIndex(); if((row>0)&&((row-1)<recipeData.dataRows().size())) dataRow = recipeData.dataRows().get(row-1); else dataRow=recipeData.newRow(httpReq.getUrlParameter("CLASSFIELD")); str.append("\n\r<TABLE WIDTH=100% BORDER=1 CELLSPACING=0 CELLPADDING=0>"); for(int c=0;c<dataRow.size();c++) { final AbilityParameters.AbilityParmEditor editor = CMLib.ableParms().getEditors().get(dataRow.elementAt(c,1)); final String oldVal = (String)dataRow.elementAt(c,2); if(!editor.ID().equalsIgnoreCase("N_A")) { str.append("\n\r<TR>"); str.append("<TD WIDTH=20%>" + hsfont + editor.prompt() + hefont + "</TD>"); if(c==classFieldIndex) str.append("<TD>" + sfont + editor.webValue(httpReq,parms,oldVal,"DATA_"+row+"_"+c) + efont + "</TD>"); else str.append("<TD>" + sfont + editor.webField(httpReq,parms,oldVal,"DATA_"+row+"_"+c) + efont + "</TD>"); str.append("</TR>"); } } str.append("\n\r</TABLE>"); if(classFieldIndex>=0) { final String oldVal = (String)dataRow.elementAt(classFieldIndex,2); final AbilityParameters.AbilityParmEditor editor = CMLib.ableParms().getEditors().get(dataRow.elementAt(classFieldIndex,1)); str.append("<INPUT TYPE=HIDDEN NAME=CLASSFIELD VALUE=\""+editor.webValue(httpReq,parms,oldVal,"CLASSFIELD")+"\">"); } } else if(parms.containsKey("ADDROW")) { AbilityParameters.AbilityParmEditor classFieldEditor = null; final int cfIndex = recipeData.getClassFieldIndex(); if(recipeData.dataRows().size()==0) { final DVector editRow = new DVector(2); for(int c=0;c<recipeData.columns().size();c++) { if(recipeData.columns().get(c) instanceof List) editRow.addElement(recipeData.columns().get(c),""); } @SuppressWarnings("unchecked") final List<String> o=(List<String>)editRow.elementAt(cfIndex,1); classFieldEditor = CMLib.ableParms().getEditors().get(o.get(0).toString()); } else for(int row=0;row<recipeData.dataRows().size();row++) { if(cfIndex>=0) { final DVector dataRow = recipeData.dataRows().get(row); classFieldEditor = CMLib.ableParms().getEditors().get(dataRow.elementAt(cfIndex,1)); } } if(classFieldEditor != null) str.append(classFieldEditor.webField(httpReq,parms,classFieldEditor.defaultValue(),"NEWCLASSFIELD")); } else if(parms.containsKey("SAVEROW")&&(CMath.isInteger(rownum))) { DVector dataRow = null; final int row = CMath.s_int(rownum); if((row-1>=0)&&(row-1<recipeData.dataRows().size())) dataRow = recipeData.dataRows().get(row-1); else { dataRow=recipeData.newRow(httpReq.getUrlParameter("CLASSFIELD")); recipeData.dataRows().add(dataRow); } for(int c=0;c<dataRow.size();c++) { final AbilityParameters.AbilityParmEditor editor = CMLib.ableParms().getEditors().get(dataRow.elementAt(c,1)); final String oldVal = (String)dataRow.elementAt(c,2); String newVal = editor.webValue(httpReq,parms,oldVal,"DATA_"+row+"_"+c); if(newVal != null) newVal = newVal.replace('\'', '`'); if(!editor.confirmValue(newVal)) return L("The value for field "+editor.colHeader()+" is invalid."); dataRow.setElementAt(c,2,newVal); } final MOB M = Authenticate.getAuthenticatedMob(httpReq); if(M==null) return " @break@"; final boolean saveToVFS = CMath.s_bool(httpReq.getUrlParameter("SAVETOVFS")); if(CMSecurity.isAllowedAnywhere(M,CMSecurity.SecFlag.CMDRECIPES)) CMLib.ableParms().resaveRecipeFile(M,recipeData.recipeFilename(),recipeData.dataRows(),recipeData.columns(), saveToVFS); else return " @break@"; } else if(parms.containsKey("DELROW")&&(CMath.isInteger(rownum))) { final int row = CMath.s_int(rownum); if((row-1>=0)&&(row-1<recipeData.dataRows().size())) recipeData.dataRows().remove(row-1); else return " @break@"; final MOB M = Authenticate.getAuthenticatedMob(httpReq); if(M==null) return " @break@"; final boolean saveToVFS = CMath.s_bool(httpReq.getUrlParameter("SAVETOVFS")); if(CMSecurity.isAllowedAnywhere(M,CMSecurity.SecFlag.CMDRECIPES)) CMLib.ableParms().resaveRecipeFile(M,recipeData.recipeFilename(),recipeData.dataRows(),recipeData.columns(), saveToVFS); else return " @break@"; } else if(parms.containsKey("TABLE")) { str.append("\n\r<TABLE WIDTH=100% BORDER=1 CELLSPACING=0 CELLPADDING=0>"); //int currLenTotal = 0; //for(int l=0;l<recipeData.columnLengths().length;l++) // currLenTotal+=recipeData.columnLengths()[l]; str.append("\n\r<TR>"); str.append("<TD WIDTH=1%>" + hsfont + "#" + hefont + "</TD>"); for(int c=0;c<recipeData.columnHeaders().length;c++) { str.append("<TD WIDTH="+Math.round(CMath.div(recipeData.columnLengths()[c],72) * 100.0)+"%>"); str.append(hsfont + recipeData.columnHeaders()[c] + hefont); str.append("</TD>"); } str.append("</TR>"); for(int r=0;r<recipeData.dataRows().size();r++) { final DVector dataRow = recipeData.dataRows().get(r); str.append("\n\r<TR>"); str.append("<TD>"); str.append("<A HREF=\"javascript:Select("+(r+1)+")\">" + sfont + "<B><FONT COLOR=YELLOW>"+(r+1)+"</FONT></B>"); str.append(efont + "</A>"); str.append("</TD>"); for(int c=0;c<dataRow.size();c++) { str.append("<TD>" + sfont); String val = (String)dataRow.elementAt(c,2); final AbilityParameters.AbilityParmEditor editor = CMLib.ableParms().getEditors().get(dataRow.elementAt(c,1)); val = editor.webTableField(httpReq, parms, val); int width=(int)Math.round(CMath.div(recipeData.columnLengths()[c],36) * 100.0); if(width<2) width=2; str.append(CMStrings.limit(val,width)); str.append(efont + "</TD>"); } str.append("</A></TR>"); } str.append("\n\r</TABLE>"); } String strstr=str.toString(); if(strstr.endsWith(", ")) strstr=strstr.substring(0,strstr.length()-2); return clearWebMacros(strstr); } } return ""; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ // For unit tests @see TestCookieManager package org.apache.jmeter.protocol.http.control; import java.io.BufferedReader; import java.io.File; import java.io.FileReader; import java.io.FileWriter; import java.io.IOException; import java.io.PrintWriter; import java.io.Serializable; import java.net.URL; import java.util.ArrayList; import org.apache.http.client.config.CookieSpecs; import org.apache.jmeter.config.ConfigTestElement; import org.apache.jmeter.engine.event.LoopIterationEvent; import org.apache.jmeter.testelement.TestIterationListener; import org.apache.jmeter.testelement.TestStateListener; import org.apache.jmeter.testelement.property.BooleanProperty; import org.apache.jmeter.testelement.property.CollectionProperty; import org.apache.jmeter.testelement.property.JMeterProperty; import org.apache.jmeter.testelement.property.PropertyIterator; import org.apache.jmeter.threads.JMeterContext; import org.apache.jmeter.util.JMeterUtils; import org.apache.jorphan.reflect.ClassTools; import org.apache.jorphan.util.JMeterException; import org.apache.jorphan.util.JOrphanUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * This class provides an interface to the netscape cookies file to pass cookies * along with a request. */ public class CookieManager extends ConfigTestElement implements TestStateListener, TestIterationListener, Serializable { private static final long serialVersionUID = 234L; private static final Logger log = LoggerFactory.getLogger(CookieManager.class); //++ JMX tag values private static final String CLEAR = "CookieManager.clearEachIteration";// $NON-NLS-1$ private static final String COOKIES = "CookieManager.cookies";// $NON-NLS-1$ private static final String POLICY = "CookieManager.policy"; //$NON-NLS-1$ private static final String IMPLEMENTATION = "CookieManager.implementation"; //$NON-NLS-1$ //-- JMX tag values private static final String TAB = "\t"; //$NON-NLS-1$ // See bug 33796 private static final boolean DELETE_NULL_COOKIES = JMeterUtils.getPropDefault("CookieManager.delete_null_cookies", true);// $NON-NLS-1$ // See bug 28715 // Package protected for tests static final boolean ALLOW_VARIABLE_COOKIES = JMeterUtils.getPropDefault("CookieManager.allow_variable_cookies", true);// $NON-NLS-1$ private static final String COOKIE_NAME_PREFIX = JMeterUtils.getPropDefault("CookieManager.name.prefix", "COOKIE_").trim();// $NON-NLS-1$ $NON-NLS-2$ private static final boolean SAVE_COOKIES = JMeterUtils.getPropDefault("CookieManager.save.cookies", false);// $NON-NLS-1$ private static final boolean CHECK_COOKIES = JMeterUtils.getPropDefault("CookieManager.check.cookies", true);// $NON-NLS-1$ static { log.info("Settings: Delete null: {} Check: {} Allow variable: {} Save: {} Prefix: {}", DELETE_NULL_COOKIES, CHECK_COOKIES, ALLOW_VARIABLE_COOKIES, SAVE_COOKIES, COOKIE_NAME_PREFIX); } private transient CookieHandler cookieHandler; private transient CollectionProperty initialCookies; /** * Defines the policy that is assumed when the JMX file does not contain an entry for it * MUST NOT BE CHANGED otherwise JMX files will not be correctly interpreted * <p> * The default policy for new CookieManager elements is defined by * {@link org.apache.jmeter.protocol.http.gui.CookiePanel#DEFAULT_POLICY CookiePanel#DEFAULT_POLICY} * */ private static final String DEFAULT_POLICY = CookieSpecs.STANDARD; /** * Defines the implementation that is assumed when the JMX file does not contain an entry for it * MUST NOT BE CHANGED otherwise JMX files will not be correctly interpreted * <p> * The default implementation for new CookieManager elements is defined by * {@link org.apache.jmeter.protocol.http.gui.CookiePanel#DEFAULT_IMPLEMENTATION CookiePanel#DEFAULT_IMPLEMENTATION} * */ private static final String DEFAULT_IMPLEMENTATION = HC4CookieHandler.class.getName(); public CookieManager() { clearCookies(); // Ensure that there is always a collection available } // ensure that the initial cookies are copied to the per-thread instances /** {@inheritDoc} */ @Override public Object clone(){ CookieManager clone = (CookieManager) super.clone(); clone.initialCookies = initialCookies; clone.cookieHandler = cookieHandler; return clone; } public String getPolicy() { return getPropertyAsString(POLICY, DEFAULT_POLICY); } public void setCookiePolicy(String policy){ setProperty(POLICY, policy, DEFAULT_POLICY); } public CollectionProperty getCookies() { return (CollectionProperty) getProperty(COOKIES); } public int getCookieCount() {// Used by GUI return getCookies().size(); } public boolean getClearEachIteration() { return getPropertyAsBoolean(CLEAR); } public void setClearEachIteration(boolean clear) { setProperty(new BooleanProperty(CLEAR, clear)); } public String getImplementation() { return getPropertyAsString(IMPLEMENTATION, DEFAULT_IMPLEMENTATION); } public void setImplementation(String implementation){ setProperty(IMPLEMENTATION, implementation, DEFAULT_IMPLEMENTATION); } /** * Save the static cookie data to a file. * <p> * Cookies are only taken from the GUI - runtime cookies are not included. * * @param authFile * name of the file to store the cookies into. If the name is * relative, the system property <code>user.dir</code> will be * prepended * @throws IOException * when writing to that file fails */ public void save(String authFile) throws IOException { File file = new File(authFile); if (!file.isAbsolute()) { file = new File(System.getProperty("user.dir") // $NON-NLS-1$ + File.separator + authFile); } try(PrintWriter writer = new PrintWriter(new FileWriter(file))) { // TODO Charset ? writer.println("# JMeter generated Cookie file");// $NON-NLS-1$ long now = System.currentTimeMillis(); for (JMeterProperty jMeterProperty : getCookies()) { Cookie cook = (Cookie) jMeterProperty.getObjectValue(); final long expiresMillis = cook.getExpiresMillis(); if (expiresMillis == 0 || expiresMillis > now) { // only save unexpired cookies writer.println(cookieToString(cook)); } } writer.flush(); } } /** * Add cookie data from a file. * * @param cookieFile * name of the file to read the cookies from. If the name is * relative, the system property <code>user.dir</code> will be * prepended * @throws IOException * if reading the file fails */ public void addFile(String cookieFile) throws IOException { File file = new File(cookieFile); if (!file.isAbsolute()) { file = new File(System.getProperty("user.dir") // $NON-NLS-1$ + File.separator + cookieFile); } BufferedReader reader = null; if (file.canRead()) { reader = new BufferedReader(new FileReader(file)); // TODO Charset ? } else { throw new IOException("The file you specified cannot be read."); } // N.B. this must agree with the save() and cookieToString() methods String line; try { final CollectionProperty cookies = getCookies(); while ((line = reader.readLine()) != null) { try { if (line.startsWith("#") || JOrphanUtils.isBlank(line)) {//$NON-NLS-1$ continue; } String[] st = JOrphanUtils.split(line, TAB, false); final int _domain = 0; //final int _ignored = 1; final int _path = 2; final int _secure = 3; final int _expires = 4; final int _name = 5; final int _value = 6; final int _fields = 7; if (st.length!=_fields) { throw new IOException("Expected "+_fields+" fields, found "+st.length+" in "+line); } if (st[_path].length()==0) { st[_path] = "/"; //$NON-NLS-1$ } boolean secure = Boolean.parseBoolean(st[_secure]); long expires = Long.parseLong(st[_expires]); if (expires==Long.MAX_VALUE) { expires=0; } //long max was used to represent a non-expiring cookie, but that caused problems Cookie cookie = new Cookie(st[_name], st[_value], st[_domain], st[_path], secure, expires); cookies.addItem(cookie); } catch (NumberFormatException e) { throw new IOException("Error parsing cookie line\n\t'" + line + "'\n\t" + e); } } } finally { reader.close(); } } private String cookieToString(Cookie c){ StringBuilder sb=new StringBuilder(80); sb.append(c.getDomain()); //flag - if all machines within a given domain can access the variable. //(from http://www.cookiecentral.com/faq/ 3.5) sb.append(TAB).append("TRUE"); sb.append(TAB).append(c.getPath()); sb.append(TAB).append(JOrphanUtils.booleanToSTRING(c.getSecure())); sb.append(TAB).append(c.getExpires()); sb.append(TAB).append(c.getName()); sb.append(TAB).append(c.getValue()); return sb.toString(); } /** {@inheritDoc} */ @Override public void recoverRunningVersion() { // do nothing, the cookie manager has to accept changes. } /** {@inheritDoc} */ @Override public void setRunningVersion(boolean running) { // do nothing, the cookie manager has to accept changes. } /** * Add a cookie. * * @param c cookie to be added */ public void add(Cookie c) { String cv = c.getValue(); String cn = c.getName(); removeMatchingCookies(c); // Can't have two matching cookies if (DELETE_NULL_COOKIES && (null == cv || cv.length()==0)) { if (log.isDebugEnabled()) { log.debug("Dropping cookie with null value {}", c.toString()); } } else { if (log.isDebugEnabled()) { log.debug("Add cookie to store {}", c.toString()); } getCookies().addItem(c); if (SAVE_COOKIES) { JMeterContext context = getThreadContext(); if (context.isSamplingStarted()) { context.getVariables().put(COOKIE_NAME_PREFIX+cn, cv); } } } } /** {@inheritDoc} */ @Override public void clear(){ super.clear(); clearCookies(); // ensure data is set up OK initially } /* * Remove all the cookies. */ private void clearCookies() { log.debug("Clear all cookies from store"); setProperty(new CollectionProperty(COOKIES, new ArrayList<>())); } /** * Remove a cookie. * * @param index index of the cookie to remove */ public void remove(int index) {// TODO not used by GUI getCookies().remove(index); } /** * Return the cookie at index i. * * @param i index of the cookie to get * @return cookie at index <code>i</code> */ public Cookie get(int i) {// Only used by GUI return (Cookie) getCookies().get(i).getObjectValue(); } /** * Find cookies applicable to the given URL and build the Cookie header from * them. * * @param url * URL of the request to which the returned header will be added. * @return the value string for the cookie header (goes after "Cookie: "). */ public String getCookieHeaderForURL(URL url) { return cookieHandler.getCookieHeaderForURL(getCookies(), url, ALLOW_VARIABLE_COOKIES); } public void addCookieFromHeader(String cookieHeader, URL url){ cookieHandler.addCookieFromHeader(this, CHECK_COOKIES, cookieHeader, url); } /** * Check if cookies match, i.e. name, path and domain are equal. * <br/> * TODO - should we compare secure too? * @param a * @param b * @return true if cookies match */ private boolean match(Cookie a, Cookie b){ return a.getName().equals(b.getName()) && a.getPath().equals(b.getPath()) && a.getDomain().equals(b.getDomain()); } void removeMatchingCookies(Cookie newCookie){ // Scan for any matching cookies PropertyIterator iter = getCookies().iterator(); while (iter.hasNext()) { Cookie cookie = (Cookie) iter.next().getObjectValue(); if (cookie == null) {// TODO is this possible? continue; } if (match(cookie,newCookie)) { if (log.isDebugEnabled()) { log.debug("New Cookie = {} removing matching Cookie {}", newCookie.toString(), cookie.toString()); } iter.remove(); } } } /** {@inheritDoc} */ @Override public void testStarted() { initialCookies = getCookies(); try { cookieHandler = (CookieHandler) ClassTools.construct(getImplementation(), getPolicy()); } catch (JMeterException e) { log.error("Unable to load or invoke class: {}", getImplementation(), e); } if (log.isDebugEnabled()){ log.debug("Policy: {} Clear: {}", getPolicy(), getClearEachIteration()); } } /** {@inheritDoc} */ @Override public void testEnded() { } /** {@inheritDoc} */ @Override public void testStarted(String host) { testStarted(); } /** {@inheritDoc} */ @Override public void testEnded(String host) { } /** {@inheritDoc} */ @Override public void testIterationStart(LoopIterationEvent event) { if (getClearEachIteration()) { log.debug("Initialise cookies from pre-defined list"); // No need to call clear setProperty(initialCookies.clone()); } } /** * Package protected for tests * @return the cookieHandler */ CookieHandler getCookieHandler() { return cookieHandler; } }
package team2485.auto; import team2485.auto.sequenceditems.*; import team2485.comp.IntakeArm; /** * The sequencer factory instantiates all robot sequences * * @author Marty Kausas * @author Bryce Matsumori * @author Anoushka Bose * @author Camille Considine */ public class SequencerFactory { public static final int // Autonomous options NONE = -1, // just move forward FORWARD = 0, ONE_BALL_LEFT = 3, // if hot, shoot, not - wait to shoot, move forward, stay, start on right ONE_BALL_RIGHT = 4, TWO_BALL_NO_HOT = 14, // two balls, move forward TWO_BALL_HOT = 12, // three balls, move forward THREE_BALL = 13, THREE_BALL_HOT = 15, // Shot options TARGET_SHOT = 0, TRUSS_SHOT = 1, BOOT = 2, FORWARD_PASS = 3, POWER_HIGH_SHOT = 4, TARGET_SHOT_WITHOUT_RETRACTION = 5, MIDRANGE_SHOT_THREE_CYLINDER = 6, MIDRANGE_SHOT_TWO_CYLINDER = 7, OVER_TRUSS_CATCH = 8, BLOOP_SHOT = 9; public static final double TARGET_FLIP_PAUSE_TIME = 0.2, OPERATOR_WAIT_TIME = 1.8; public static final double RETRACT_EXTEND_TIME = 0.7; /** * Creates the requested autonomous sequence. * * @param type The autonomous type. * @return The created {@code Sequencer}. */ public static Sequencer createAuto(int type) { switch (type) { case NONE: return new Sequencer(); // Starting position from anywhere on the field case FORWARD: return new Sequencer(new SequencedItem[] { new Drive(45) }); // Aligned on left or right side case ONE_BALL_LEFT: case ONE_BALL_RIGHT: return new Sequencer(new SequencedItem[] { new SequencedMultipleItem(new SequencedItem[] { new SequencedPause(OPERATOR_WAIT_TIME), // wait until the Operator has shown card new MoveArmNoWait(IntakeArm.IN_CATAPULT - 150), new FullyExtendShoe(), new Drive(45), }), new WaitForTarget(), new SequencedPause(0.7), new WaitForHot(type == ONE_BALL_LEFT ? WaitForHot.LEFT : WaitForHot.RIGHT), new DisableArmPID(), new InnerSequencer(SequencerFactory.createShot(SequencerFactory.TARGET_SHOT)), new SequencedPause(1), new Drive(60) }); case TWO_BALL_NO_HOT: return new Sequencer(new SequencedItem[] { new MoveArm(IntakeArm.PICKUP - 150), new InnerSequencer(SequencerFactory.createShot(SequencerFactory.TARGET_SHOT_WITHOUT_RETRACTION)), new SequencedMultipleItem(new SequencedItem[] { new MoveArm(IntakeArm.PICKUP, false), new RetractShooter(), new WaitForBallToLeave(), }), new SequencedPause(0.2), new SequencedMultipleItem(new SequencedItem[] { new FullyRetractShoe(), new MoveArm(IntakeArm.PICKUP, true) }), new DetectBallInCatapult(), new SequencedPause(0.1), new SequencedMultipleItem(new SequencedItem[] { new MoveArm(IntakeArm.IN_CATAPULT - 25), new StopRollers() }), new DisableArmPID(), new SequencedPause(0.6), // settle time new InnerSequencer(SequencerFactory.createShot(SequencerFactory.TARGET_SHOT_WITHOUT_RETRACTION)), // second shot new SequencedMultipleItem(new SequencedItem[] { new RetractShooter(), new WaitForBallToLeave(), new FullyRetractShoe(), new Drive(65) }), new DisableEncoderPID() }); // from left case TWO_BALL_HOT: return new Sequencer(new SequencedItem[] { new FullyExtendShoe(), new MoveArmNoWait(IntakeArm.IN_CATAPULT - 100, false), new Drive(45), new DisableEncoderPID(), new WaitForTarget(), new TurnToTarget(), new DisableIMUPID(), new SequencedPause(0.6), new InnerSequencer(SequencerFactory.createShot(SequencerFactory.TARGET_SHOT_WITHOUT_RETRACTION)), new SequencedMultipleItem(new SequencedItem[] { new RetractShooter(), new WaitForBallToLeave(), new FullyRetractShoe(), new MoveArm(IntakeArm.PICKUP, true), new TurnToZero() }), new DisableIMUPID(), new SequencedPause(0.15), new Drive(-5), new DisableEncoderPID(), new DetectBallInCatapult(), new SequencedMultipleItem(new SequencedItem[] { new MoveArm(IntakeArm.IN_CATAPULT - 100), new StopRollers(), }), new SequencedMultipleItem(new SequencedItem[] { new Drive(50), new FullyExtendShoe(), }), new DisableEncoderPID(), new TurnToOtherTarget(), new DisableIMUPID(), new SequencedPause(0.6), new InnerSequencer(SequencerFactory.createShot(SequencerFactory.TARGET_SHOT)), new RetractShooter() }); case THREE_BALL: return new Sequencer(new SequencedItem[] { new SequencedMultipleItem(new SequencedItem[] { new MoveArmNoWait(IntakeArm.PICKUP - 100, false), new SequencedPause(0.4) }), new DisableArmPID(), new SequencedPause(0.8), new InnerSequencer(SequencerFactory.createShot(SequencerFactory.TARGET_SHOT_WITHOUT_RETRACTION)), new SequencedMultipleItem(new SequencedItem[] { new MoveArm(IntakeArm.PICKUP, false), new RetractShooter(), new WaitForBallToLeave(), }), new SequencedMultipleItem(new SequencedItem[] { new FullyRetractShoe(), new MoveArm(IntakeArm.PICKUP, true) }), new DetectBallInCatapult(), new SequencedPause(0.1), new SequencedMultipleItem(new SequencedItem[] { new MoveArm(IntakeArm.IN_CATAPULT - 25), new StopRollers(), }), new DisableArmPID(), new SequencedPause(0.5), // settle time new InnerSequencer(SequencerFactory.createShot(SequencerFactory.TARGET_SHOT_WITHOUT_RETRACTION)), // second shot new SequencedMultipleItem(new SequencedItem[] { new RetractShooter(), new MoveArm(IntakeArm.PICKUP) }), new SequencedMultipleItem(new SequencedItem[] { new FullyRetractShoe(), new Drive(-24), }), new DetectBallInCatapult(), new SequencedMultipleItem(new SequencedItem[] { new MoveArm(IntakeArm.IN_CATAPULT - 25), new StopRollers(), new Drive(50) }), new DisableArmPID(), new SequencedPause(0.6), // settle time new InnerSequencer(SequencerFactory.createShot(SequencerFactory.TARGET_SHOT_WITHOUT_RETRACTION)), // third shot new SequencedMultipleItem(new SequencedItem[] { new RetractShooter(), new WaitForBallToLeave() }), new FullyRetractShoe(), new DisableEncoderPID() }); case THREE_BALL_HOT: return new Sequencer(new SequencedItem[] { new SequencedMultipleItem(new SequencedItem[] { new SequencedPause(TARGET_FLIP_PAUSE_TIME), // wait until the targets have flipped new FullyExtendShoe(), new SetLowGear(), new ExtendCatcher(), new MoveArmNoWait(IntakeArm.PICKUP - 100) }), new TurnToTarget(), new DisableIMUPID(), new SequencedMultipleItem(new SequencedItem[] { new SequencedPause(0.8), new DisableArmPID(), }), new InnerSequencer(SequencerFactory.createShot(SequencerFactory.TARGET_SHOT_WITHOUT_RETRACTION)), new SequencedMultipleItem(new SequencedItem[] { new MoveArmNoWait(IntakeArm.PICKUP, false), new RetractShooter(), new WaitForBallToLeave(), new FullyRetractShoe(), }), new SequencedPause(1), new MoveArm(IntakeArm.PICKUP, true), new DetectBallInCatapult(), new SequencedMultipleItem(new SequencedItem[] { new MoveArmNoWait(IntakeArm.PICKUP - 100, false), new StopRollers(), new FullyExtendShoe(), }), new TurnToOtherTarget(), new DisableIMUPID(), new SequencedPause(0.8), // settle time new InnerSequencer(SequencerFactory.createShot(SequencerFactory.TARGET_SHOT_WITHOUT_RETRACTION)), new SequencedMultipleItem(new SequencedItem[] { new RetractShooter(), new WaitForBallToLeave(), new FullyRetractShoe(), }), new SequencedPause(0.8), new MoveArm(IntakeArm.PICKUP, true), new DetectBallInCatapult(), new SequencedMultipleItem(new SequencedItem[] { new StopRollers(), new FullyExtendShoe() }), new SequencedPause(0.8), // settle time new InnerSequencer(SequencerFactory.createShot(SequencerFactory.TARGET_SHOT_WITHOUT_RETRACTION)), new SequencedMultipleItem(new SequencedItem[] { new SetHighGear(), new Drive(80), new RetractShooter(), new WaitForBallToLeave(), new FullyRetractShoe() }), new DisableEncoderPID() }); default: return new Sequencer(); // return an empty sequence } } private static final double AUTO_TRUSS_DRIVE_DIST = 96.0; public static Sequencer createAutoTrussShot() { return new Sequencer(new SequencedItem[] { new InnerSequencer(createAuto(TRUSS_SHOT)), new Rotate(180), // turn to catch? new Drive(AUTO_TRUSS_DRIVE_DIST) }); } /** * Creates the requested shot sequence. * * @param type The shot type. * @return The created {@code Sequencer}. */ public static Sequencer createShot(int type) { switch (type) { case POWER_HIGH_SHOT: return new Sequencer(new SequencedItem[] { new FullyRetractShoe(), new ExtendThreePistons(), new RetractShooter(), new FullyRetractShoe() }); case TRUSS_SHOT: return new Sequencer(new SequencedItem[] { new ExtendShoeShortPiston(), new ExtendTwoPistons(), new RetractShooter(), new FullyRetractShoe() }); case TARGET_SHOT: return new Sequencer(new SequencedItem[] { new FullyExtendShoe(), new ExtendThreePistons(), new RetractShooter(), new FullyRetractShoe() }); case BOOT: return new Sequencer(new SequencedItem[] { new ExtendBoot(), new RetractBoot() }); case FORWARD_PASS: return new Sequencer(new SequencedItem[] { new FullyExtendShoe(), new ExtendRightSidePiston(), new RetractShooter(), new FullyRetractShoe() }); case TARGET_SHOT_WITHOUT_RETRACTION: return new Sequencer(new SequencedItem[] { new FullyExtendShoe(), new ExtendThreePistons(), new Print("time during shot = ") }); case MIDRANGE_SHOT_THREE_CYLINDER: return new Sequencer(new SequencedItem[] { new ExtendShoeLongPiston(), new ExtendThreePistons(), new RetractShooter(), new FullyRetractShoe() }); case MIDRANGE_SHOT_TWO_CYLINDER: return new Sequencer(new SequencedItem[] { new ExtendShoeLongPiston(), new ExtendTwoPistons(), new RetractShooter(), new FullyRetractShoe() }); case OVER_TRUSS_CATCH: return new Sequencer(new SequencedItem[] { new FullyRetractShoe(), new ExtendTwoPistons(), new RetractShooter(), new FullyRetractShoe() }); case BLOOP_SHOT: return new Sequencer(new SequencedItem[] { new FullyRetractShoe(), new ExtendOnePiston(), new RetractShooter(), }); default: return new Sequencer(); // return an empty sequence } } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.cluster.metadata; import com.carrotsearch.hppc.LongArrayList; import com.carrotsearch.hppc.cursors.IntObjectCursor; import com.carrotsearch.hppc.cursors.ObjectCursor; import com.carrotsearch.hppc.cursors.ObjectObjectCursor; import org.elasticsearch.Version; import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.cluster.Diff; import org.elasticsearch.cluster.Diffable; import org.elasticsearch.cluster.DiffableUtils; import org.elasticsearch.cluster.block.ClusterBlock; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.node.DiscoveryNodeFilters; import org.elasticsearch.cluster.routing.allocation.IndexMetaDataUpdater; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.collect.ImmutableOpenIntMap; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.collect.MapBuilder; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.loader.SettingsLoader; import org.elasticsearch.common.xcontent.FromXContentBuilder; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.gateway.MetaDataStateFormat; import org.elasticsearch.index.Index; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.rest.RestStatus; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; import java.io.IOException; import java.text.ParseException; import java.util.Arrays; import java.util.Collections; import java.util.EnumSet; import java.util.HashMap; import java.util.HashSet; import java.util.Locale; import java.util.Map; import java.util.Set; import java.util.function.Function; import static org.elasticsearch.cluster.node.DiscoveryNodeFilters.OpType.AND; import static org.elasticsearch.cluster.node.DiscoveryNodeFilters.OpType.OR; import static org.elasticsearch.common.settings.Settings.readSettingsFromStream; import static org.elasticsearch.common.settings.Settings.writeSettingsToStream; public class IndexMetaData implements Diffable<IndexMetaData>, FromXContentBuilder<IndexMetaData>, ToXContent { public interface Custom extends Diffable<Custom>, ToXContent { String type(); Custom fromMap(Map<String, Object> map) throws IOException; Custom fromXContent(XContentParser parser) throws IOException; /** * Merges from this to another, with this being more important, i.e., if something exists in this and another, * this will prevail. */ Custom mergeWith(Custom another); } public static Map<String, Custom> customPrototypes = new HashMap<>(); /** * Register a custom index meta data factory. Make sure to call it from a static block. */ public static void registerPrototype(String type, Custom proto) { customPrototypes.put(type, proto); } @Nullable public static <T extends Custom> T lookupPrototype(String type) { //noinspection unchecked return (T) customPrototypes.get(type); } public static <T extends Custom> T lookupPrototypeSafe(String type) { //noinspection unchecked T proto = (T) customPrototypes.get(type); if (proto == null) { throw new IllegalArgumentException("No custom metadata prototype registered for type [" + type + "]"); } return proto; } public static final ClusterBlock INDEX_READ_ONLY_BLOCK = new ClusterBlock(5, "index read-only (api)", false, false, RestStatus.FORBIDDEN, EnumSet.of(ClusterBlockLevel.WRITE, ClusterBlockLevel.METADATA_WRITE)); public static final ClusterBlock INDEX_READ_BLOCK = new ClusterBlock(7, "index read (api)", false, false, RestStatus.FORBIDDEN, EnumSet.of(ClusterBlockLevel.READ)); public static final ClusterBlock INDEX_WRITE_BLOCK = new ClusterBlock(8, "index write (api)", false, false, RestStatus.FORBIDDEN, EnumSet.of(ClusterBlockLevel.WRITE)); public static final ClusterBlock INDEX_METADATA_BLOCK = new ClusterBlock(9, "index metadata (api)", false, false, RestStatus.FORBIDDEN, EnumSet.of(ClusterBlockLevel.METADATA_WRITE, ClusterBlockLevel.METADATA_READ)); public static enum State { OPEN((byte) 0), CLOSE((byte) 1); private final byte id; State(byte id) { this.id = id; } public byte id() { return this.id; } public static State fromId(byte id) { if (id == 0) { return OPEN; } else if (id == 1) { return CLOSE; } throw new IllegalStateException("No state match for id [" + id + "]"); } public static State fromString(String state) { if ("open".equals(state)) { return OPEN; } else if ("close".equals(state)) { return CLOSE; } throw new IllegalStateException("No state match for [" + state + "]"); } } static Setting<Integer> buildNumberOfShardsSetting() { /* This is a safety limit that should only be exceeded in very rare and special cases. The assumption is that * 99% of the users have less than 1024 shards per index. We also make it a hard check that requires restart of nodes * if a cluster should allow to create more than 1024 shards per index. NOTE: this does not limit the number of shards per cluster. * this also prevents creating stuff like a new index with millions of shards by accident which essentially kills the entire cluster * with OOM on the spot.*/ final int maxNumShards = Integer.parseInt(System.getProperty("es.index.max_number_of_shards", "1024")); if (maxNumShards < 1) { throw new IllegalArgumentException("es.index.max_number_of_shards must be > 0"); } return Setting.intSetting(SETTING_NUMBER_OF_SHARDS, Math.min(5, maxNumShards), 1, maxNumShards, Property.IndexScope); } public static final String INDEX_SETTING_PREFIX = "index."; public static final String SETTING_NUMBER_OF_SHARDS = "index.number_of_shards"; public static final Setting<Integer> INDEX_NUMBER_OF_SHARDS_SETTING = buildNumberOfShardsSetting(); public static final String SETTING_NUMBER_OF_REPLICAS = "index.number_of_replicas"; public static final Setting<Integer> INDEX_NUMBER_OF_REPLICAS_SETTING = Setting.intSetting(SETTING_NUMBER_OF_REPLICAS, 1, 0, Property.Dynamic, Property.IndexScope); public static final String SETTING_SHADOW_REPLICAS = "index.shadow_replicas"; public static final Setting<Boolean> INDEX_SHADOW_REPLICAS_SETTING = Setting.boolSetting(SETTING_SHADOW_REPLICAS, false, Property.IndexScope); public static final String SETTING_SHARED_FILESYSTEM = "index.shared_filesystem"; public static final Setting<Boolean> INDEX_SHARED_FILESYSTEM_SETTING = Setting.boolSetting(SETTING_SHARED_FILESYSTEM, false, Property.IndexScope); public static final String SETTING_AUTO_EXPAND_REPLICAS = "index.auto_expand_replicas"; public static final Setting<AutoExpandReplicas> INDEX_AUTO_EXPAND_REPLICAS_SETTING = AutoExpandReplicas.SETTING; public static final String SETTING_READ_ONLY = "index.blocks.read_only"; public static final Setting<Boolean> INDEX_READ_ONLY_SETTING = Setting.boolSetting(SETTING_READ_ONLY, false, Property.Dynamic, Property.IndexScope); public static final String SETTING_BLOCKS_READ = "index.blocks.read"; public static final Setting<Boolean> INDEX_BLOCKS_READ_SETTING = Setting.boolSetting(SETTING_BLOCKS_READ, false, Property.Dynamic, Property.IndexScope); public static final String SETTING_BLOCKS_WRITE = "index.blocks.write"; public static final Setting<Boolean> INDEX_BLOCKS_WRITE_SETTING = Setting.boolSetting(SETTING_BLOCKS_WRITE, false, Property.Dynamic, Property.IndexScope); public static final String SETTING_BLOCKS_METADATA = "index.blocks.metadata"; public static final Setting<Boolean> INDEX_BLOCKS_METADATA_SETTING = Setting.boolSetting(SETTING_BLOCKS_METADATA, false, Property.Dynamic, Property.IndexScope); public static final String SETTING_VERSION_CREATED = "index.version.created"; public static final String SETTING_VERSION_CREATED_STRING = "index.version.created_string"; public static final String SETTING_VERSION_UPGRADED = "index.version.upgraded"; public static final String SETTING_VERSION_UPGRADED_STRING = "index.version.upgraded_string"; public static final String SETTING_VERSION_MINIMUM_COMPATIBLE = "index.version.minimum_compatible"; public static final String SETTING_CREATION_DATE = "index.creation_date"; /** * The user provided name for an index. This is the plain string provided by the user when the index was created. * It might still contain date math expressions etc. (added in 5.0) */ public static final String SETTING_INDEX_PROVIDED_NAME = "index.provided_name"; public static final String SETTING_PRIORITY = "index.priority"; public static final Setting<Integer> INDEX_PRIORITY_SETTING = Setting.intSetting("index.priority", 1, 0, Property.Dynamic, Property.IndexScope); public static final String SETTING_CREATION_DATE_STRING = "index.creation_date_string"; public static final String SETTING_INDEX_UUID = "index.uuid"; public static final String SETTING_DATA_PATH = "index.data_path"; public static final Setting<String> INDEX_DATA_PATH_SETTING = new Setting<>(SETTING_DATA_PATH, "", Function.identity(), Property.IndexScope); public static final String SETTING_SHARED_FS_ALLOW_RECOVERY_ON_ANY_NODE = "index.shared_filesystem.recover_on_any_node"; public static final Setting<Boolean> INDEX_SHARED_FS_ALLOW_RECOVERY_ON_ANY_NODE_SETTING = Setting.boolSetting(SETTING_SHARED_FS_ALLOW_RECOVERY_ON_ANY_NODE, false, Property.Dynamic, Property.IndexScope); public static final String INDEX_UUID_NA_VALUE = "_na_"; public static final String INDEX_ROUTING_REQUIRE_GROUP_PREFIX = "index.routing.allocation.require"; public static final String INDEX_ROUTING_INCLUDE_GROUP_PREFIX = "index.routing.allocation.include"; public static final String INDEX_ROUTING_EXCLUDE_GROUP_PREFIX = "index.routing.allocation.exclude"; public static final Setting<Settings> INDEX_ROUTING_REQUIRE_GROUP_SETTING = Setting.groupSetting(INDEX_ROUTING_REQUIRE_GROUP_PREFIX + ".", Property.Dynamic, Property.IndexScope); public static final Setting<Settings> INDEX_ROUTING_INCLUDE_GROUP_SETTING = Setting.groupSetting(INDEX_ROUTING_INCLUDE_GROUP_PREFIX + ".", Property.Dynamic, Property.IndexScope); public static final Setting<Settings> INDEX_ROUTING_EXCLUDE_GROUP_SETTING = Setting.groupSetting(INDEX_ROUTING_EXCLUDE_GROUP_PREFIX + ".", Property.Dynamic, Property.IndexScope); public static final Setting<Settings> INDEX_ROUTING_INITIAL_RECOVERY_GROUP_SETTING = Setting.groupSetting("index.routing.allocation.initial_recovery."); // this is only setable internally not a registered setting!! /** * The number of active shard copies to check for before proceeding with a write operation. */ public static final Setting<ActiveShardCount> SETTING_WAIT_FOR_ACTIVE_SHARDS = new Setting<>("index.write.wait_for_active_shards", "1", ActiveShardCount::parseString, Setting.Property.Dynamic, Setting.Property.IndexScope); public static final IndexMetaData PROTO = IndexMetaData.builder("") .settings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)) .numberOfShards(1).numberOfReplicas(0).build(); public static final String KEY_IN_SYNC_ALLOCATIONS = "in_sync_allocations"; static final String KEY_VERSION = "version"; static final String KEY_ROUTING_NUM_SHARDS = "routing_num_shards"; static final String KEY_SETTINGS = "settings"; static final String KEY_STATE = "state"; static final String KEY_MAPPINGS = "mappings"; static final String KEY_ALIASES = "aliases"; public static final String KEY_PRIMARY_TERMS = "primary_terms"; public static final String INDEX_STATE_FILE_PREFIX = "state-"; private final int routingNumShards; private final int routingFactor; private final int numberOfShards; private final int numberOfReplicas; private final Index index; private final long version; private final long[] primaryTerms; private final State state; private final ImmutableOpenMap<String, AliasMetaData> aliases; private final Settings settings; private final ImmutableOpenMap<String, MappingMetaData> mappings; private final ImmutableOpenMap<String, Custom> customs; private final ImmutableOpenIntMap<Set<String>> inSyncAllocationIds; private final transient int totalNumberOfShards; private final DiscoveryNodeFilters requireFilters; private final DiscoveryNodeFilters includeFilters; private final DiscoveryNodeFilters excludeFilters; private final DiscoveryNodeFilters initialRecoveryFilters; private final Version indexCreatedVersion; private final Version indexUpgradedVersion; private final org.apache.lucene.util.Version minimumCompatibleLuceneVersion; private final ActiveShardCount waitForActiveShards; private IndexMetaData(Index index, long version, long[] primaryTerms, State state, int numberOfShards, int numberOfReplicas, Settings settings, ImmutableOpenMap<String, MappingMetaData> mappings, ImmutableOpenMap<String, AliasMetaData> aliases, ImmutableOpenMap<String, Custom> customs, ImmutableOpenIntMap<Set<String>> inSyncAllocationIds, DiscoveryNodeFilters requireFilters, DiscoveryNodeFilters initialRecoveryFilters, DiscoveryNodeFilters includeFilters, DiscoveryNodeFilters excludeFilters, Version indexCreatedVersion, Version indexUpgradedVersion, org.apache.lucene.util.Version minimumCompatibleLuceneVersion, int routingNumShards, ActiveShardCount waitForActiveShards) { this.index = index; this.version = version; this.primaryTerms = primaryTerms; assert primaryTerms.length == numberOfShards; this.state = state; this.numberOfShards = numberOfShards; this.numberOfReplicas = numberOfReplicas; this.totalNumberOfShards = numberOfShards * (numberOfReplicas + 1); this.settings = settings; this.mappings = mappings; this.customs = customs; this.aliases = aliases; this.inSyncAllocationIds = inSyncAllocationIds; this.requireFilters = requireFilters; this.includeFilters = includeFilters; this.excludeFilters = excludeFilters; this.initialRecoveryFilters = initialRecoveryFilters; this.indexCreatedVersion = indexCreatedVersion; this.indexUpgradedVersion = indexUpgradedVersion; this.minimumCompatibleLuceneVersion = minimumCompatibleLuceneVersion; this.routingNumShards = routingNumShards; this.routingFactor = routingNumShards / numberOfShards; this.waitForActiveShards = waitForActiveShards; assert numberOfShards * routingFactor == routingNumShards : routingNumShards + " must be a multiple of " + numberOfShards; } public Index getIndex() { return index; } public String getIndexUUID() { return index.getUUID(); } /** * Test whether the current index UUID is the same as the given one. Returns true if either are _na_ */ public boolean isSameUUID(String otherUUID) { assert otherUUID != null; assert getIndexUUID() != null; if (INDEX_UUID_NA_VALUE.equals(otherUUID) || INDEX_UUID_NA_VALUE.equals(getIndexUUID())) { return true; } return otherUUID.equals(getIndexUUID()); } public long getVersion() { return this.version; } /** * The term of the current selected primary. This is a non-negative number incremented when * a primary shard is assigned after a full cluster restart or a replica shard is promoted to a primary. * * Note: since we increment the term every time a shard is assigned, the term for any operational shard (i.e., a shard * that can be indexed into) is larger than 0. See {@link IndexMetaDataUpdater#applyChanges}. **/ public long primaryTerm(int shardId) { return this.primaryTerms[shardId]; } /** * Return the {@link Version} on which this index has been created. This * information is typically useful for backward compatibility. */ public Version getCreationVersion() { return indexCreatedVersion; } /** * Return the {@link Version} on which this index has been upgraded. This * information is typically useful for backward compatibility. */ public Version getUpgradedVersion() { return indexUpgradedVersion; } /** * Return the {@link org.apache.lucene.util.Version} of the oldest lucene segment in the index */ public org.apache.lucene.util.Version getMinimumCompatibleVersion() { return minimumCompatibleLuceneVersion; } public long getCreationDate() { return settings.getAsLong(SETTING_CREATION_DATE, -1L); } public State getState() { return this.state; } public int getNumberOfShards() { return numberOfShards; } public int getNumberOfReplicas() { return numberOfReplicas; } public int getTotalNumberOfShards() { return totalNumberOfShards; } /** * Returns the configured {@link #SETTING_WAIT_FOR_ACTIVE_SHARDS}, which defaults * to an active shard count of 1 if not specified. */ public ActiveShardCount getWaitForActiveShards() { return waitForActiveShards; } public Settings getSettings() { return settings; } public ImmutableOpenMap<String, AliasMetaData> getAliases() { return this.aliases; } public ImmutableOpenMap<String, MappingMetaData> getMappings() { return mappings; } @Nullable public MappingMetaData mapping(String mappingType) { return mappings.get(mappingType); } public static final Setting<String> INDEX_SHRINK_SOURCE_UUID = Setting.simpleString("index.shrink.source.uuid"); public static final Setting<String> INDEX_SHRINK_SOURCE_NAME = Setting.simpleString("index.shrink.source.name"); public Index getMergeSourceIndex() { return INDEX_SHRINK_SOURCE_UUID.exists(settings) ? new Index(INDEX_SHRINK_SOURCE_NAME.get(settings), INDEX_SHRINK_SOURCE_UUID.get(settings)) : null; } /** * Sometimes, the default mapping exists and an actual mapping is not created yet (introduced), * in this case, we want to return the default mapping in case it has some default mapping definitions. * <p> * Note, once the mapping type is introduced, the default mapping is applied on the actual typed MappingMetaData, * setting its routing, timestamp, and so on if needed. */ @Nullable public MappingMetaData mappingOrDefault(String mappingType) { MappingMetaData mapping = mappings.get(mappingType); if (mapping != null) { return mapping; } return mappings.get(MapperService.DEFAULT_MAPPING); } public ImmutableOpenMap<String, Custom> getCustoms() { return this.customs; } @SuppressWarnings("unchecked") public <T extends Custom> T custom(String type) { return (T) customs.get(type); } public ImmutableOpenIntMap<Set<String>> getInSyncAllocationIds() { return inSyncAllocationIds; } public Set<String> inSyncAllocationIds(int shardId) { assert shardId >= 0 && shardId < numberOfShards; return inSyncAllocationIds.get(shardId); } @Nullable public DiscoveryNodeFilters requireFilters() { return requireFilters; } @Nullable public DiscoveryNodeFilters getInitialRecoveryFilters() { return initialRecoveryFilters; } @Nullable public DiscoveryNodeFilters includeFilters() { return includeFilters; } @Nullable public DiscoveryNodeFilters excludeFilters() { return excludeFilters; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } IndexMetaData that = (IndexMetaData) o; if (version != that.version) { return false; } if (!aliases.equals(that.aliases)) { return false; } if (!index.equals(that.index)) { return false; } if (!mappings.equals(that.mappings)) { return false; } if (!settings.equals(that.settings)) { return false; } if (state != that.state) { return false; } if (!customs.equals(that.customs)) { return false; } if (routingNumShards != that.routingNumShards) { return false; } if (routingFactor != that.routingFactor) { return false; } if (Arrays.equals(primaryTerms, that.primaryTerms) == false) { return false; } if (!inSyncAllocationIds.equals(that.inSyncAllocationIds)) { return false; } return true; } @Override public int hashCode() { int result = index.hashCode(); result = 31 * result + Long.hashCode(version); result = 31 * result + state.hashCode(); result = 31 * result + aliases.hashCode(); result = 31 * result + settings.hashCode(); result = 31 * result + mappings.hashCode(); result = 31 * result + customs.hashCode(); result = 31 * result + Long.hashCode(routingFactor); result = 31 * result + Long.hashCode(routingNumShards); result = 31 * result + Arrays.hashCode(primaryTerms); result = 31 * result + inSyncAllocationIds.hashCode(); return result; } @Override public Diff<IndexMetaData> diff(IndexMetaData previousState) { return new IndexMetaDataDiff(previousState, this); } @Override public Diff<IndexMetaData> readDiffFrom(StreamInput in) throws IOException { return new IndexMetaDataDiff(in); } @Override public IndexMetaData fromXContent(XContentParser parser, ParseFieldMatcher parseFieldMatcher) throws IOException { return Builder.fromXContent(parser); } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { Builder.toXContent(this, builder, params); return builder; } private static class IndexMetaDataDiff implements Diff<IndexMetaData> { private final String index; private final int routingNumShards; private final long version; private final long[] primaryTerms; private final State state; private final Settings settings; private final Diff<ImmutableOpenMap<String, MappingMetaData>> mappings; private final Diff<ImmutableOpenMap<String, AliasMetaData>> aliases; private final Diff<ImmutableOpenMap<String, Custom>> customs; private final Diff<ImmutableOpenIntMap<Set<String>>> inSyncAllocationIds; public IndexMetaDataDiff(IndexMetaData before, IndexMetaData after) { index = after.index.getName(); version = after.version; routingNumShards = after.routingNumShards; state = after.state; settings = after.settings; primaryTerms = after.primaryTerms; mappings = DiffableUtils.diff(before.mappings, after.mappings, DiffableUtils.getStringKeySerializer()); aliases = DiffableUtils.diff(before.aliases, after.aliases, DiffableUtils.getStringKeySerializer()); customs = DiffableUtils.diff(before.customs, after.customs, DiffableUtils.getStringKeySerializer()); inSyncAllocationIds = DiffableUtils.diff(before.inSyncAllocationIds, after.inSyncAllocationIds, DiffableUtils.getVIntKeySerializer(), DiffableUtils.StringSetValueSerializer.getInstance()); } public IndexMetaDataDiff(StreamInput in) throws IOException { index = in.readString(); routingNumShards = in.readInt(); version = in.readLong(); state = State.fromId(in.readByte()); settings = Settings.readSettingsFromStream(in); primaryTerms = in.readVLongArray(); mappings = DiffableUtils.readImmutableOpenMapDiff(in, DiffableUtils.getStringKeySerializer(), MappingMetaData.PROTO); aliases = DiffableUtils.readImmutableOpenMapDiff(in, DiffableUtils.getStringKeySerializer(), AliasMetaData.PROTO); customs = DiffableUtils.readImmutableOpenMapDiff(in, DiffableUtils.getStringKeySerializer(), new DiffableUtils.DiffableValueSerializer<String, Custom>() { @Override public Custom read(StreamInput in, String key) throws IOException { return lookupPrototypeSafe(key).readFrom(in); } @Override public Diff<Custom> readDiff(StreamInput in, String key) throws IOException { return lookupPrototypeSafe(key).readDiffFrom(in); } }); inSyncAllocationIds = DiffableUtils.readImmutableOpenIntMapDiff(in, DiffableUtils.getVIntKeySerializer(), DiffableUtils.StringSetValueSerializer.getInstance()); } @Override public void writeTo(StreamOutput out) throws IOException { out.writeString(index); out.writeInt(routingNumShards); out.writeLong(version); out.writeByte(state.id); Settings.writeSettingsToStream(settings, out); out.writeVLongArray(primaryTerms); mappings.writeTo(out); aliases.writeTo(out); customs.writeTo(out); inSyncAllocationIds.writeTo(out); } @Override public IndexMetaData apply(IndexMetaData part) { Builder builder = builder(index); builder.version(version); builder.setRoutingNumShards(routingNumShards); builder.state(state); builder.settings(settings); builder.primaryTerms(primaryTerms); builder.mappings.putAll(mappings.apply(part.mappings)); builder.aliases.putAll(aliases.apply(part.aliases)); builder.customs.putAll(customs.apply(part.customs)); builder.inSyncAllocationIds.putAll(inSyncAllocationIds.apply(part.inSyncAllocationIds)); return builder.build(); } } @Override public IndexMetaData readFrom(StreamInput in) throws IOException { Builder builder = new Builder(in.readString()); builder.version(in.readLong()); builder.setRoutingNumShards(in.readInt()); builder.state(State.fromId(in.readByte())); builder.settings(readSettingsFromStream(in)); builder.primaryTerms(in.readVLongArray()); int mappingsSize = in.readVInt(); for (int i = 0; i < mappingsSize; i++) { MappingMetaData mappingMd = MappingMetaData.PROTO.readFrom(in); builder.putMapping(mappingMd); } int aliasesSize = in.readVInt(); for (int i = 0; i < aliasesSize; i++) { AliasMetaData aliasMd = AliasMetaData.Builder.readFrom(in); builder.putAlias(aliasMd); } int customSize = in.readVInt(); for (int i = 0; i < customSize; i++) { String type = in.readString(); Custom customIndexMetaData = lookupPrototypeSafe(type).readFrom(in); builder.putCustom(type, customIndexMetaData); } int inSyncAllocationIdsSize = in.readVInt(); for (int i = 0; i < inSyncAllocationIdsSize; i++) { int key = in.readVInt(); Set<String> allocationIds = DiffableUtils.StringSetValueSerializer.getInstance().read(in, key); builder.putInSyncAllocationIds(key, allocationIds); } return builder.build(); } @Override public void writeTo(StreamOutput out) throws IOException { out.writeString(index.getName()); // uuid will come as part of settings out.writeLong(version); out.writeInt(routingNumShards); out.writeByte(state.id()); writeSettingsToStream(settings, out); out.writeVLongArray(primaryTerms); out.writeVInt(mappings.size()); for (ObjectCursor<MappingMetaData> cursor : mappings.values()) { cursor.value.writeTo(out); } out.writeVInt(aliases.size()); for (ObjectCursor<AliasMetaData> cursor : aliases.values()) { cursor.value.writeTo(out); } out.writeVInt(customs.size()); for (ObjectObjectCursor<String, Custom> cursor : customs) { out.writeString(cursor.key); cursor.value.writeTo(out); } out.writeVInt(inSyncAllocationIds.size()); for (IntObjectCursor<Set<String>> cursor : inSyncAllocationIds) { out.writeVInt(cursor.key); DiffableUtils.StringSetValueSerializer.getInstance().write(cursor.value, out); } } public static Builder builder(String index) { return new Builder(index); } public static Builder builder(IndexMetaData indexMetaData) { return new Builder(indexMetaData); } public static class Builder { private String index; private State state = State.OPEN; private long version = 1; private long[] primaryTerms = null; private Settings settings = Settings.Builder.EMPTY_SETTINGS; private final ImmutableOpenMap.Builder<String, MappingMetaData> mappings; private final ImmutableOpenMap.Builder<String, AliasMetaData> aliases; private final ImmutableOpenMap.Builder<String, Custom> customs; private final ImmutableOpenIntMap.Builder<Set<String>> inSyncAllocationIds; private Integer routingNumShards; public Builder(String index) { this.index = index; this.mappings = ImmutableOpenMap.builder(); this.aliases = ImmutableOpenMap.builder(); this.customs = ImmutableOpenMap.builder(); this.inSyncAllocationIds = ImmutableOpenIntMap.builder(); } public Builder(IndexMetaData indexMetaData) { this.index = indexMetaData.getIndex().getName(); this.state = indexMetaData.state; this.version = indexMetaData.version; this.settings = indexMetaData.getSettings(); this.primaryTerms = indexMetaData.primaryTerms.clone(); this.mappings = ImmutableOpenMap.builder(indexMetaData.mappings); this.aliases = ImmutableOpenMap.builder(indexMetaData.aliases); this.customs = ImmutableOpenMap.builder(indexMetaData.customs); this.routingNumShards = indexMetaData.routingNumShards; this.inSyncAllocationIds = ImmutableOpenIntMap.builder(indexMetaData.inSyncAllocationIds); } public String index() { return index; } public Builder index(String index) { this.index = index; return this; } public Builder numberOfShards(int numberOfShards) { settings = Settings.builder().put(settings).put(SETTING_NUMBER_OF_SHARDS, numberOfShards).build(); return this; } /** * Sets the number of shards that should be used for routing. This should only be used if the number of shards in * an index has changed ie if the index is shrunk. */ public Builder setRoutingNumShards(int routingNumShards) { this.routingNumShards = routingNumShards; return this; } /** * Returns number of shards that should be used for routing. By default this method will return the number of shards * for this index. * * @see #setRoutingNumShards(int) * @see #numberOfShards() */ public int getRoutingNumShards() { return routingNumShards == null ? numberOfShards() : routingNumShards; } public int numberOfShards() { return settings.getAsInt(SETTING_NUMBER_OF_SHARDS, -1); } public Builder numberOfReplicas(int numberOfReplicas) { settings = Settings.builder().put(settings).put(SETTING_NUMBER_OF_REPLICAS, numberOfReplicas).build(); return this; } public int numberOfReplicas() { return settings.getAsInt(SETTING_NUMBER_OF_REPLICAS, -1); } public Builder creationDate(long creationDate) { settings = Settings.builder().put(settings).put(SETTING_CREATION_DATE, creationDate).build(); return this; } public Builder settings(Settings.Builder settings) { return settings(settings.build()); } public Builder settings(Settings settings) { this.settings = settings; return this; } public MappingMetaData mapping(String type) { return mappings.get(type); } public Builder putMapping(String type, String source) throws IOException { try (XContentParser parser = XContentFactory.xContent(source).createParser(source)) { putMapping(new MappingMetaData(type, parser.mapOrdered())); } return this; } public Builder putMapping(MappingMetaData mappingMd) { mappings.put(mappingMd.type(), mappingMd); return this; } public Builder state(State state) { this.state = state; return this; } public Builder putAlias(AliasMetaData aliasMetaData) { aliases.put(aliasMetaData.alias(), aliasMetaData); return this; } public Builder putAlias(AliasMetaData.Builder aliasMetaData) { aliases.put(aliasMetaData.alias(), aliasMetaData.build()); return this; } public Builder removeAlias(String alias) { aliases.remove(alias); return this; } public Builder removeAllAliases() { aliases.clear(); return this; } public Builder putCustom(String type, Custom customIndexMetaData) { this.customs.put(type, customIndexMetaData); return this; } public Set<String> getInSyncAllocationIds(int shardId) { return inSyncAllocationIds.get(shardId); } public Builder putInSyncAllocationIds(int shardId, Set<String> allocationIds) { inSyncAllocationIds.put(shardId, new HashSet(allocationIds)); return this; } public long version() { return this.version; } public Builder version(long version) { this.version = version; return this; } /** * returns the primary term for the given shard. * See {@link IndexMetaData#primaryTerm(int)} for more information. */ public long primaryTerm(int shardId) { if (primaryTerms == null) { initializePrimaryTerms(); } return this.primaryTerms[shardId]; } /** * sets the primary term for the given shard. * See {@link IndexMetaData#primaryTerm(int)} for more information. */ public Builder primaryTerm(int shardId, long primaryTerm) { if (primaryTerms == null) { initializePrimaryTerms(); } this.primaryTerms[shardId] = primaryTerm; return this; } private void primaryTerms(long[] primaryTerms) { this.primaryTerms = primaryTerms.clone(); } private void initializePrimaryTerms() { assert primaryTerms == null; if (numberOfShards() < 0) { throw new IllegalStateException("you must set the number of shards before setting/reading primary terms"); } primaryTerms = new long[numberOfShards()]; } public IndexMetaData build() { ImmutableOpenMap.Builder<String, AliasMetaData> tmpAliases = aliases; Settings tmpSettings = settings; // update default mapping on the MappingMetaData if (mappings.containsKey(MapperService.DEFAULT_MAPPING)) { MappingMetaData defaultMapping = mappings.get(MapperService.DEFAULT_MAPPING); for (ObjectCursor<MappingMetaData> cursor : mappings.values()) { cursor.value.updateDefaultMapping(defaultMapping); } } Integer maybeNumberOfShards = settings.getAsInt(SETTING_NUMBER_OF_SHARDS, null); if (maybeNumberOfShards == null) { throw new IllegalArgumentException("must specify numberOfShards for index [" + index + "]"); } int numberOfShards = maybeNumberOfShards; if (numberOfShards <= 0) { throw new IllegalArgumentException("must specify positive number of shards for index [" + index + "]"); } Integer maybeNumberOfReplicas = settings.getAsInt(SETTING_NUMBER_OF_REPLICAS, null); if (maybeNumberOfReplicas == null) { throw new IllegalArgumentException("must specify numberOfReplicas for index [" + index + "]"); } int numberOfReplicas = maybeNumberOfReplicas; if (numberOfReplicas < 0) { throw new IllegalArgumentException("must specify non-negative number of shards for index [" + index + "]"); } // fill missing slots in inSyncAllocationIds with empty set if needed and make all entries immutable ImmutableOpenIntMap.Builder<Set<String>> filledInSyncAllocationIds = ImmutableOpenIntMap.builder(); for (int i = 0; i < numberOfShards; i++) { if (inSyncAllocationIds.containsKey(i)) { filledInSyncAllocationIds.put(i, Collections.unmodifiableSet(new HashSet<>(inSyncAllocationIds.get(i)))); } else { filledInSyncAllocationIds.put(i, Collections.emptySet()); } } final Map<String, String> requireMap = INDEX_ROUTING_REQUIRE_GROUP_SETTING.get(settings).getAsMap(); final DiscoveryNodeFilters requireFilters; if (requireMap.isEmpty()) { requireFilters = null; } else { requireFilters = DiscoveryNodeFilters.buildFromKeyValue(AND, requireMap); } Map<String, String> includeMap = INDEX_ROUTING_INCLUDE_GROUP_SETTING.get(settings).getAsMap(); final DiscoveryNodeFilters includeFilters; if (includeMap.isEmpty()) { includeFilters = null; } else { includeFilters = DiscoveryNodeFilters.buildFromKeyValue(OR, includeMap); } Map<String, String> excludeMap = INDEX_ROUTING_EXCLUDE_GROUP_SETTING.get(settings).getAsMap(); final DiscoveryNodeFilters excludeFilters; if (excludeMap.isEmpty()) { excludeFilters = null; } else { excludeFilters = DiscoveryNodeFilters.buildFromKeyValue(OR, excludeMap); } Map<String, String> initialRecoveryMap = INDEX_ROUTING_INITIAL_RECOVERY_GROUP_SETTING.get(settings).getAsMap(); final DiscoveryNodeFilters initialRecoveryFilters; if (initialRecoveryMap.isEmpty()) { initialRecoveryFilters = null; } else { initialRecoveryFilters = DiscoveryNodeFilters.buildFromKeyValue(OR, initialRecoveryMap); } Version indexCreatedVersion = Version.indexCreated(settings); Version indexUpgradedVersion = settings.getAsVersion(IndexMetaData.SETTING_VERSION_UPGRADED, indexCreatedVersion); String stringLuceneVersion = settings.get(SETTING_VERSION_MINIMUM_COMPATIBLE); final org.apache.lucene.util.Version minimumCompatibleLuceneVersion; if (stringLuceneVersion != null) { try { minimumCompatibleLuceneVersion = org.apache.lucene.util.Version.parse(stringLuceneVersion); } catch (ParseException ex) { throw new IllegalStateException("Cannot parse lucene version [" + stringLuceneVersion + "] in the [" + SETTING_VERSION_MINIMUM_COMPATIBLE + "] setting", ex); } } else { minimumCompatibleLuceneVersion = null; } if (primaryTerms == null) { initializePrimaryTerms(); } else if (primaryTerms.length != numberOfShards) { throw new IllegalStateException("primaryTerms length is [" + primaryTerms.length + "] but should be equal to number of shards [" + numberOfShards() + "]"); } final ActiveShardCount waitForActiveShards = SETTING_WAIT_FOR_ACTIVE_SHARDS.get(settings); if (waitForActiveShards.validate(numberOfReplicas) == false) { throw new IllegalArgumentException("invalid " + SETTING_WAIT_FOR_ACTIVE_SHARDS.getKey() + "[" + waitForActiveShards + "]: cannot be greater than " + "number of shard copies [" + (numberOfReplicas + 1) + "]"); } final String uuid = settings.get(SETTING_INDEX_UUID, INDEX_UUID_NA_VALUE); return new IndexMetaData(new Index(index, uuid), version, primaryTerms, state, numberOfShards, numberOfReplicas, tmpSettings, mappings.build(), tmpAliases.build(), customs.build(), filledInSyncAllocationIds.build(), requireFilters, initialRecoveryFilters, includeFilters, excludeFilters, indexCreatedVersion, indexUpgradedVersion, minimumCompatibleLuceneVersion, getRoutingNumShards(), waitForActiveShards); } public static void toXContent(IndexMetaData indexMetaData, XContentBuilder builder, ToXContent.Params params) throws IOException { builder.startObject(indexMetaData.getIndex().getName()); builder.field(KEY_VERSION, indexMetaData.getVersion()); builder.field(KEY_ROUTING_NUM_SHARDS, indexMetaData.getRoutingNumShards()); builder.field(KEY_STATE, indexMetaData.getState().toString().toLowerCase(Locale.ENGLISH)); boolean binary = params.paramAsBoolean("binary", false); builder.startObject(KEY_SETTINGS); for (Map.Entry<String, String> entry : indexMetaData.getSettings().getAsMap().entrySet()) { builder.field(entry.getKey(), entry.getValue()); } builder.endObject(); builder.startArray(KEY_MAPPINGS); for (ObjectObjectCursor<String, MappingMetaData> cursor : indexMetaData.getMappings()) { if (binary) { builder.value(cursor.value.source().compressed()); } else { byte[] data = cursor.value.source().uncompressed(); try (XContentParser parser = XContentFactory.xContent(data).createParser(data)) { Map<String, Object> mapping = parser.mapOrdered(); builder.map(mapping); } } } builder.endArray(); for (ObjectObjectCursor<String, Custom> cursor : indexMetaData.getCustoms()) { builder.startObject(cursor.key); cursor.value.toXContent(builder, params); builder.endObject(); } builder.startObject(KEY_ALIASES); for (ObjectCursor<AliasMetaData> cursor : indexMetaData.getAliases().values()) { AliasMetaData.Builder.toXContent(cursor.value, builder, params); } builder.endObject(); builder.startArray(KEY_PRIMARY_TERMS); for (int i = 0; i < indexMetaData.getNumberOfShards(); i++) { builder.value(indexMetaData.primaryTerm(i)); } builder.endArray(); builder.startObject(KEY_IN_SYNC_ALLOCATIONS); for (IntObjectCursor<Set<String>> cursor : indexMetaData.inSyncAllocationIds) { builder.startArray(String.valueOf(cursor.key)); for (String allocationId : cursor.value) { builder.value(allocationId); } builder.endArray(); } builder.endObject(); builder.endObject(); } public static IndexMetaData fromXContent(XContentParser parser) throws IOException { if (parser.currentToken() == null) { // fresh parser? move to the first token parser.nextToken(); } if (parser.currentToken() == XContentParser.Token.START_OBJECT) { // on a start object move to next token parser.nextToken(); } if (parser.currentToken() != XContentParser.Token.FIELD_NAME) { throw new IllegalArgumentException("expected field name but got a " + parser.currentToken()); } Builder builder = new Builder(parser.currentName()); String currentFieldName = null; XContentParser.Token token = parser.nextToken(); if (token != XContentParser.Token.START_OBJECT) { throw new IllegalArgumentException("expected object but got a " + token); } while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); } else if (token == XContentParser.Token.START_OBJECT) { if (KEY_SETTINGS.equals(currentFieldName)) { builder.settings(Settings.builder().put(SettingsLoader.Helper.loadNestedFromMap(parser.mapOrdered()))); } else if (KEY_MAPPINGS.equals(currentFieldName)) { while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); } else if (token == XContentParser.Token.START_OBJECT) { String mappingType = currentFieldName; Map<String, Object> mappingSource = MapBuilder.<String, Object>newMapBuilder().put(mappingType, parser.mapOrdered()).map(); builder.putMapping(new MappingMetaData(mappingType, mappingSource)); } else { throw new IllegalArgumentException("Unexpected token: " + token); } } } else if (KEY_ALIASES.equals(currentFieldName)) { while (parser.nextToken() != XContentParser.Token.END_OBJECT) { builder.putAlias(AliasMetaData.Builder.fromXContent(parser)); } } else if (KEY_IN_SYNC_ALLOCATIONS.equals(currentFieldName)) { while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); } else if (token == XContentParser.Token.START_ARRAY) { String shardId = currentFieldName; Set<String> allocationIds = new HashSet<>(); while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { if (token == XContentParser.Token.VALUE_STRING) { allocationIds.add(parser.text()); } } builder.putInSyncAllocationIds(Integer.valueOf(shardId), allocationIds); } else { throw new IllegalArgumentException("Unexpected token: " + token); } } } else if ("warmers".equals(currentFieldName)) { // TODO: do this in 6.0: // throw new IllegalArgumentException("Warmers are not supported anymore - are you upgrading from 1.x?"); // ignore: warmers have been removed in 5.0 and are // simply ignored when upgrading from 2.x assert Version.CURRENT.major <= 5; parser.skipChildren(); } else { // check if its a custom index metadata Custom proto = lookupPrototype(currentFieldName); if (proto == null) { //TODO warn parser.skipChildren(); } else { Custom custom = proto.fromXContent(parser); builder.putCustom(custom.type(), custom); } } } else if (token == XContentParser.Token.START_ARRAY) { if (KEY_MAPPINGS.equals(currentFieldName)) { while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { if (token == XContentParser.Token.VALUE_EMBEDDED_OBJECT) { builder.putMapping(new MappingMetaData(new CompressedXContent(parser.binaryValue()))); } else { Map<String, Object> mapping = parser.mapOrdered(); if (mapping.size() == 1) { String mappingType = mapping.keySet().iterator().next(); builder.putMapping(new MappingMetaData(mappingType, mapping)); } } } } else if (KEY_PRIMARY_TERMS.equals(currentFieldName)) { LongArrayList list = new LongArrayList(); while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { if (token == XContentParser.Token.VALUE_NUMBER) { list.add(parser.longValue()); } else { throw new IllegalStateException("found a non-numeric value under [" + KEY_PRIMARY_TERMS + "]"); } } builder.primaryTerms(list.toArray()); } else { throw new IllegalArgumentException("Unexpected field for an array " + currentFieldName); } } else if (token.isValue()) { if (KEY_STATE.equals(currentFieldName)) { builder.state(State.fromString(parser.text())); } else if (KEY_VERSION.equals(currentFieldName)) { builder.version(parser.longValue()); } else if (KEY_ROUTING_NUM_SHARDS.equals(currentFieldName)) { builder.setRoutingNumShards(parser.intValue()); } else { throw new IllegalArgumentException("Unexpected field [" + currentFieldName + "]"); } } else { throw new IllegalArgumentException("Unexpected token " + token); } } return builder.build(); } public static IndexMetaData readFrom(StreamInput in) throws IOException { return PROTO.readFrom(in); } } /** * Returns <code>true</code> iff the given settings indicate that the index * associated with these settings allocates it's shards on a shared * filesystem. Otherwise <code>false</code>. The default setting for this * is the returned value from * {@link #isIndexUsingShadowReplicas(org.elasticsearch.common.settings.Settings)}. */ public static boolean isOnSharedFilesystem(Settings settings) { return settings.getAsBoolean(SETTING_SHARED_FILESYSTEM, isIndexUsingShadowReplicas(settings)); } /** * Returns <code>true</code> iff the given settings indicate that the index associated * with these settings uses shadow replicas. Otherwise <code>false</code>. The default * setting for this is <code>false</code>. */ public static boolean isIndexUsingShadowReplicas(Settings settings) { return settings.getAsBoolean(SETTING_SHADOW_REPLICAS, false); } /** * Adds human readable version and creation date settings. * This method is used to display the settings in a human readable format in REST API */ public static Settings addHumanReadableSettings(Settings settings) { Settings.Builder builder = Settings.builder().put(settings); Version version = settings.getAsVersion(SETTING_VERSION_CREATED, null); if (version != null) { builder.put(SETTING_VERSION_CREATED_STRING, version.toString()); } Version versionUpgraded = settings.getAsVersion(SETTING_VERSION_UPGRADED, null); if (versionUpgraded != null) { builder.put(SETTING_VERSION_UPGRADED_STRING, versionUpgraded.toString()); } Long creationDate = settings.getAsLong(SETTING_CREATION_DATE, null); if (creationDate != null) { DateTime creationDateTime = new DateTime(creationDate, DateTimeZone.UTC); builder.put(SETTING_CREATION_DATE_STRING, creationDateTime.toString()); } return builder.build(); } private static final ToXContent.Params FORMAT_PARAMS = new MapParams(Collections.singletonMap("binary", "true")); /** * State format for {@link IndexMetaData} to write to and load from disk */ public static final MetaDataStateFormat<IndexMetaData> FORMAT = new MetaDataStateFormat<IndexMetaData>(XContentType.SMILE, INDEX_STATE_FILE_PREFIX) { @Override public void toXContent(XContentBuilder builder, IndexMetaData state) throws IOException { Builder.toXContent(state, builder, FORMAT_PARAMS); } @Override public IndexMetaData fromXContent(XContentParser parser) throws IOException { return Builder.fromXContent(parser); } }; /** * Returns the number of shards that should be used for routing. This basically defines the hash space we use in * {@link org.elasticsearch.cluster.routing.OperationRouting#generateShardId(IndexMetaData, String, String)} to route documents * to shards based on their ID or their specific routing value. The default value is {@link #getNumberOfShards()}. This value only * changes if and index is shrunk. */ public int getRoutingNumShards() { return routingNumShards; } /** * Returns the routing factor for this index. The default is <tt>1</tt>. * * @see #getRoutingFactor(IndexMetaData, int) for details */ public int getRoutingFactor() { return routingFactor; } /** * Returns the source shard ids to shrink into the given shard id. * @param shardId the id of the target shard to shrink to * @param sourceIndexMetadata the source index metadata * @param numTargetShards the total number of shards in the target index * @return a set of shard IDs to shrink into the given shard ID. */ public static Set<ShardId> selectShrinkShards(int shardId, IndexMetaData sourceIndexMetadata, int numTargetShards) { if (shardId >= numTargetShards) { throw new IllegalArgumentException("the number of target shards (" + numTargetShards + ") must be greater than the shard id: " + shardId); } int routingFactor = getRoutingFactor(sourceIndexMetadata, numTargetShards); Set<ShardId> shards = new HashSet<>(routingFactor); for (int i = shardId * routingFactor; i < routingFactor*shardId + routingFactor; i++) { shards.add(new ShardId(sourceIndexMetadata.getIndex(), i)); } return shards; } /** * Returns the routing factor for and shrunk index with the given number of target shards. * This factor is used in the hash function in * {@link org.elasticsearch.cluster.routing.OperationRouting#generateShardId(IndexMetaData, String, String)} to guarantee consistent * hashing / routing of documents even if the number of shards changed (ie. a shrunk index). * * @param sourceIndexMetadata the metadata of the source index * @param targetNumberOfShards the total number of shards in the target index * @return the routing factor for and shrunk index with the given number of target shards. * @throws IllegalArgumentException if the number of source shards is greater than the number of target shards or if the source shards * are not divisible by the number of target shards. */ public static int getRoutingFactor(IndexMetaData sourceIndexMetadata, int targetNumberOfShards) { int sourceNumberOfShards = sourceIndexMetadata.getNumberOfShards(); if (sourceNumberOfShards < targetNumberOfShards) { throw new IllegalArgumentException("the number of target shards must be less that the number of source shards"); } int factor = sourceNumberOfShards / targetNumberOfShards; if (factor * targetNumberOfShards != sourceNumberOfShards || factor <= 1) { throw new IllegalArgumentException("the number of source shards [" + sourceNumberOfShards + "] must be a must be a multiple of [" + targetNumberOfShards + "]"); } return factor; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.changelog.fs; import org.apache.flink.changelog.fs.StateChangeUploader.UploadTask; import org.apache.flink.core.testutils.ManuallyTriggeredScheduledExecutorService; import org.apache.flink.runtime.state.changelog.SequenceNumber; import org.apache.flink.runtime.state.changelog.StateChange; import org.apache.flink.runtime.testutils.DirectScheduledExecutorService; import org.apache.flink.util.function.BiConsumerWithException; import org.junit.Test; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.List; import java.util.Random; import java.util.UUID; import java.util.concurrent.CompletableFuture; import java.util.concurrent.RejectedExecutionException; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.atomic.AtomicInteger; import java.util.stream.Collectors; import static java.util.Collections.emptyList; import static java.util.Collections.singletonList; import static java.util.concurrent.TimeUnit.MILLISECONDS; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; /** {@link BatchingStateChangeUploader} test. */ public class BatchingStateChangeUploaderTest { private final Random random = new Random(); @Test public void testNoDelayAndThreshold() throws Exception { withStore( 0, 0, (store, probe) -> { List<StateChangeSet> changes1 = getChanges(4); upload(store, changes1); assertSaved(probe, changes1); List<StateChangeSet> changes2 = getChanges(4); upload(store, changes2); assertSaved(probe, changes1, changes2); }); } private void upload(BatchingStateChangeUploader store, List<StateChangeSet> changeSets) { store.upload(new UploadTask(changeSets, unused -> {}, (unused0, unused1) -> {})); } @Test public void testSizeThreshold() throws Exception { int numChanges = 7; int changeSize = 11; int threshold = changeSize * numChanges; withStore( Integer.MAX_VALUE, threshold, (store, probe) -> { List<StateChangeSet> expected = new ArrayList<>(); int runningSize = 0; for (int i = 0; i < numChanges; i++) { List<StateChangeSet> changes = getChanges(changeSize); runningSize += changes.stream().mapToLong(StateChangeSet::getSize).sum(); upload(store, changes); expected.addAll(changes); if (runningSize >= threshold) { assertSaved(probe, expected); } else { assertTrue(probe.getUploaded().isEmpty()); } } }); } @Test public void testDelay() throws Exception { int delayMs = 50; ManuallyTriggeredScheduledExecutorService scheduler = new ManuallyTriggeredScheduledExecutorService(); withStore( delayMs, Integer.MAX_VALUE, scheduler, (store, probe) -> { scheduler.triggerAll(); List<StateChangeSet> changeSets = getChanges(4); upload(store, changeSets); assertTrue(probe.getUploaded().isEmpty()); assertTrue( scheduler.getAllNonPeriodicScheduledTask().stream() .anyMatch( scheduled -> scheduled.getDelay(MILLISECONDS) == delayMs)); scheduler.triggerAllNonPeriodicTasks(); assertEquals(changeSets, probe.getUploaded()); }); } /** Test integration with {@link RetryingExecutor}. */ @Test public void testRetry() throws Exception { final int maxAttempts = 5; try (BatchingStateChangeUploader store = new BatchingStateChangeUploader( 0, 0, RetryPolicy.fixed(maxAttempts, 0, 0), new TestingStateChangeUploader() { final AtomicInteger currentAttempt = new AtomicInteger(0); @Override public void upload(UploadTask uploadTask) throws IOException { if (currentAttempt.getAndIncrement() < maxAttempts - 1) { throw new IOException(); } else { uploadTask.complete(emptyList()); } } }, new DirectScheduledExecutorService(), new RetryingExecutor(new DirectScheduledExecutorService()), 10_000)) { CompletableFuture<List<UploadResult>> completionFuture = new CompletableFuture<>(); store.upload( new UploadTask( getChanges(4), completionFuture::complete, (unused, throwable) -> completionFuture.completeExceptionally(throwable))); completionFuture.get(); } } @Test(expected = RejectedExecutionException.class) public void testErrorHandling() throws Exception { TestingStateChangeUploader probe = new TestingStateChangeUploader(); DirectScheduledExecutorService scheduler = new DirectScheduledExecutorService(); try (BatchingStateChangeUploader store = new BatchingStateChangeUploader( Integer.MAX_VALUE, Integer.MAX_VALUE, RetryPolicy.NONE, probe, scheduler, new RetryingExecutor(5), 10_000)) { scheduler.shutdown(); upload(store, getChanges(4)); } } @Test public void testClose() throws Exception { TestingStateChangeUploader probe = new TestingStateChangeUploader(); DirectScheduledExecutorService scheduler = new DirectScheduledExecutorService(); DirectScheduledExecutorService retryScheduler = new DirectScheduledExecutorService(); new BatchingStateChangeUploader( 0, 0, RetryPolicy.NONE, probe, scheduler, new RetryingExecutor(retryScheduler), 10_000) .close(); assertTrue(probe.isClosed()); assertTrue(scheduler.isShutdown()); assertTrue(retryScheduler.isShutdown()); } private List<StateChangeSet> getChanges(int size) { byte[] change = new byte[size]; random.nextBytes(change); return singletonList( new StateChangeSet( UUID.randomUUID(), SequenceNumber.of(0), singletonList(new StateChange(0, change)))); } private static void withStore( int delayMs, int sizeThreshold, BiConsumerWithException< BatchingStateChangeUploader, TestingStateChangeUploader, Exception> test) throws Exception { withStore(delayMs, sizeThreshold, new DirectScheduledExecutorService(), test); } private static void withStore( int delayMs, int sizeThreshold, ScheduledExecutorService scheduler, BiConsumerWithException< BatchingStateChangeUploader, TestingStateChangeUploader, Exception> test) throws Exception { TestingStateChangeUploader probe = new TestingStateChangeUploader(); try (BatchingStateChangeUploader store = new BatchingStateChangeUploader( delayMs, sizeThreshold, RetryPolicy.NONE, probe, scheduler, new RetryingExecutor(new DirectScheduledExecutorService()), 10_000)) { test.accept(store, probe); } } @SafeVarargs private final void assertSaved( TestingStateChangeUploader probe, List<StateChangeSet>... expected) { assertEquals( Arrays.stream(expected).flatMap(Collection::stream).collect(Collectors.toList()), new ArrayList<>(probe.getUploaded())); } }
/* * Copyright (C) 2014 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.android.exoplayer.ext.vp9; import com.google.android.exoplayer.CodecCounters; import com.google.android.exoplayer.ExoPlaybackException; import com.google.android.exoplayer.ExoPlayer; import com.google.android.exoplayer.MediaFormat; import com.google.android.exoplayer.MediaFormatHolder; import com.google.android.exoplayer.SampleSource; import com.google.android.exoplayer.SampleSourceTrackRenderer; import com.google.android.exoplayer.TrackRenderer; import com.google.android.exoplayer.ext.vp9.VpxDecoderWrapper.InputBuffer; import com.google.android.exoplayer.ext.vp9.VpxDecoderWrapper.OutputBuffer; import com.google.android.exoplayer.util.MimeTypes; import android.graphics.Bitmap; import android.graphics.Canvas; import android.os.Handler; import android.os.SystemClock; import android.view.Surface; /** * Decodes and renders video using the native VP9 decoder. */ public final class LibvpxVideoTrackRenderer extends SampleSourceTrackRenderer { /** * Interface definition for a callback to be notified of {@link LibvpxVideoTrackRenderer} events. */ public interface EventListener { /** * Invoked to report the number of frames dropped by the renderer. Dropped frames are reported * whenever the renderer is stopped having dropped frames, and optionally, whenever the count * reaches a specified threshold whilst the renderer is started. * * @param count The number of dropped frames. * @param elapsed The duration in milliseconds over which the frames were dropped. This * duration is timed from when the renderer was started or from when dropped frames were * last reported (whichever was more recent), and not from when the first of the reported * drops occurred. */ void onDroppedFrames(int count, long elapsed); /** * Invoked each time there's a change in the size of the video being rendered. * * @param width The video width in pixels. * @param height The video height in pixels. */ void onVideoSizeChanged(int width, int height); /** * Invoked when a frame is rendered to a surface for the first time following that surface * having been set as the target for the renderer. * * @param surface The surface to which a first frame has been rendered. */ void onDrawnToSurface(Surface surface); /** * Invoked when one of the following happens: libvpx initialization failure, decoder error, * renderer error. * * @param e The corresponding exception. */ void onDecoderError(VpxDecoderException e); } /** * The type of a message that can be passed to an instance of this class via * {@link ExoPlayer#sendMessage} or {@link ExoPlayer#blockingSendMessage}. The message object * should be the target {@link Surface}, or null. */ public static final int MSG_SET_SURFACE = 1; public static final int MSG_SET_VPX_SURFACE_VIEW = 2; public final CodecCounters codecCounters = new CodecCounters(); private final boolean scaleToFit; private final Handler eventHandler; private final EventListener eventListener; private final int maxDroppedFrameCountToNotify; private final MediaFormatHolder formatHolder; private MediaFormat format; private VpxDecoderWrapper decoder; private InputBuffer inputBuffer; private OutputBuffer outputBuffer; private Bitmap bitmap; private boolean drawnToSurface; private boolean renderedFirstFrame; private Surface surface; private VpxVideoSurfaceView vpxVideoSurfaceView; private int outputMode; private boolean inputStreamEnded; private boolean outputStreamEnded; private boolean sourceIsReady; private int previousWidth; private int previousHeight; private int droppedFrameCount; private long droppedFrameAccumulationStartTimeMs; /** * @param source The upstream source from which the renderer obtains samples. * @param scaleToFit Boolean that indicates if video frames should be scaled to fit when * rendering. */ public LibvpxVideoTrackRenderer(SampleSource source, boolean scaleToFit) { this(source, scaleToFit, null, null, 0); } /** * @param source The upstream source from which the renderer obtains samples. * @param scaleToFit Boolean that indicates if video frames should be scaled to fit when * rendering. * @param eventHandler A handler to use when delivering events to {@code eventListener}. May be * null if delivery of events is not required. * @param eventListener A listener of events. May be null if delivery of events is not required. * @param maxDroppedFrameCountToNotify The maximum number of frames that can be dropped between * invocations of {@link EventListener#onDroppedFrames(int, long)}. */ public LibvpxVideoTrackRenderer(SampleSource source, boolean scaleToFit, Handler eventHandler, EventListener eventListener, int maxDroppedFrameCountToNotify) { super(source); this.scaleToFit = scaleToFit; this.eventHandler = eventHandler; this.eventListener = eventListener; this.maxDroppedFrameCountToNotify = maxDroppedFrameCountToNotify; previousWidth = -1; previousHeight = -1; formatHolder = new MediaFormatHolder(); outputMode = VpxDecoder.OUTPUT_MODE_UNKNOWN; } /** * Returns whether the underlying libvpx library is available. */ public static boolean isLibvpxAvailable() { return VpxDecoder.isLibvpxAvailable(); } /** * Returns the version of the underlying libvpx library if available, otherwise {@code null}. */ public static String getLibvpxVersion() { return isLibvpxAvailable() ? VpxDecoder.getLibvpxVersion() : null; } @Override protected boolean handlesTrack(MediaFormat mediaFormat) { return MimeTypes.VIDEO_VP9.equalsIgnoreCase(mediaFormat.mimeType); } @Override protected void doSomeWork(long positionUs, long elapsedRealtimeUs) throws ExoPlaybackException { if (outputStreamEnded) { return; } sourceIsReady = continueBufferingSource(positionUs); checkForDiscontinuity(positionUs); // Try and read a format if we don't have one already. if (format == null && !readFormat(positionUs)) { // We can't make progress without one. return; } // If we don't have a decoder yet, we need to instantiate one. if (decoder == null) { decoder = new VpxDecoderWrapper(outputMode); decoder.start(); } // Rendering loop. try { processOutputBuffer(positionUs, elapsedRealtimeUs); while (feedInputBuffer(positionUs)) {} } catch (VpxDecoderException e) { notifyDecoderError(e); throw new ExoPlaybackException(e); } } private void processOutputBuffer(long positionUs, long elapsedRealtimeUs) throws VpxDecoderException { if (outputStreamEnded) { return; } if (outputBuffer == null) { outputBuffer = decoder.dequeueOutputBuffer(); if (outputBuffer == null) { return; } } if (outputBuffer.flags == VpxDecoderWrapper.FLAG_END_OF_STREAM) { outputStreamEnded = true; releaseOutputBuffer(); return; } long elapsedSinceStartOfLoop = SystemClock.elapsedRealtime() * 1000 - elapsedRealtimeUs; long timeToRenderUs = outputBuffer.timestampUs - positionUs - elapsedSinceStartOfLoop; if (timeToRenderUs < -30000 || outputBuffer.timestampUs < positionUs) { // Drop frame if we are too late. codecCounters.droppedOutputBufferCount++; droppedFrameCount++; if (droppedFrameCount == maxDroppedFrameCountToNotify) { notifyAndResetDroppedFrameCount(); } releaseOutputBuffer(); return; } // If we have not rendered any frame so far (either initially or immediately following a seek), // render one frame irrespective of the state. if (!renderedFirstFrame) { renderBuffer(); renderedFirstFrame = true; return; } // Do nothing if we are not playing or if we are too early to render the next frame. if (getState() != TrackRenderer.STATE_STARTED || timeToRenderUs > 30000) { return; } if (timeToRenderUs > 11000) { try { // Subtracting 10000 rather than 11000 ensures that the sleep time will be at least 1ms. Thread.sleep((timeToRenderUs - 10000) / 1000); } catch (InterruptedException e) { Thread.currentThread().interrupt(); } } renderBuffer(); } private void renderBuffer() throws VpxDecoderException { codecCounters.renderedOutputBufferCount++; notifyIfVideoSizeChanged(outputBuffer); if (outputBuffer.mode == VpxDecoder.OUTPUT_MODE_RGB && surface != null) { renderRgbFrame(outputBuffer, scaleToFit); if (!drawnToSurface) { drawnToSurface = true; notifyDrawnToSurface(surface); } } else if (outputBuffer.mode == VpxDecoder.OUTPUT_MODE_YUV && vpxVideoSurfaceView != null) { vpxVideoSurfaceView.renderFrame(outputBuffer); } releaseOutputBuffer(); } private void releaseOutputBuffer() throws VpxDecoderException { decoder.releaseOutputBuffer(outputBuffer); outputBuffer = null; } private void renderRgbFrame(OutputBuffer outputBuffer, boolean scale) { if (bitmap == null || bitmap.getWidth() != outputBuffer.width || bitmap.getHeight() != outputBuffer.height) { bitmap = Bitmap.createBitmap(outputBuffer.width, outputBuffer.height, Bitmap.Config.RGB_565); } bitmap.copyPixelsFromBuffer(outputBuffer.data); Canvas canvas = surface.lockCanvas(null); if (scale) { canvas.scale(((float) canvas.getWidth()) / outputBuffer.width, ((float) canvas.getHeight()) / outputBuffer.height); } canvas.drawBitmap(bitmap, 0, 0, null); surface.unlockCanvasAndPost(canvas); } private boolean feedInputBuffer(long positionUs) throws VpxDecoderException { if (inputStreamEnded) { return false; } if (inputBuffer == null) { inputBuffer = decoder.dequeueInputBuffer(); if (inputBuffer == null) { return false; } } int result = readSource(positionUs, formatHolder, inputBuffer.sampleHolder, false); if (result == SampleSource.NOTHING_READ) { return false; } if (result == SampleSource.DISCONTINUITY_READ) { flushDecoder(); return true; } if (result == SampleSource.FORMAT_READ) { format = formatHolder.format; return true; } if (result == SampleSource.END_OF_STREAM) { inputBuffer.flags = VpxDecoderWrapper.FLAG_END_OF_STREAM; decoder.queueInputBuffer(inputBuffer); inputBuffer = null; inputStreamEnded = true; return false; } inputBuffer.width = format.width; inputBuffer.height = format.height; decoder.queueInputBuffer(inputBuffer); inputBuffer = null; return true; } private void checkForDiscontinuity(long positionUs) { if (decoder == null) { return; } int result = readSource(positionUs, formatHolder, null, true); if (result == SampleSource.DISCONTINUITY_READ) { flushDecoder(); } } private void flushDecoder() { inputBuffer = null; outputBuffer = null; decoder.flush(); } @Override protected boolean isEnded() { return outputStreamEnded; } @Override protected boolean isReady() { return format != null && (sourceIsReady || outputBuffer != null); } @Override protected void seekTo(long positionUs) throws ExoPlaybackException { super.seekTo(positionUs); seekToInternal(); } @Override protected void onEnabled(int track, long positionUs, boolean joining) throws ExoPlaybackException { super.onEnabled(track, positionUs, joining); seekToInternal(); } private void seekToInternal() { sourceIsReady = false; inputStreamEnded = false; outputStreamEnded = false; renderedFirstFrame = false; } @Override protected void onStarted() { droppedFrameCount = 0; droppedFrameAccumulationStartTimeMs = SystemClock.elapsedRealtime(); } @Override protected void onStopped() { notifyAndResetDroppedFrameCount(); } @Override protected void onDisabled() throws ExoPlaybackException { inputBuffer = null; outputBuffer = null; format = null; try { if (decoder != null) { decoder.release(); decoder = null; } } finally { super.onDisabled(); } } private boolean readFormat(long positionUs) { int result = readSource(positionUs, formatHolder, null, false); if (result == SampleSource.FORMAT_READ) { format = formatHolder.format; return true; } return false; } @Override public void handleMessage(int messageType, Object message) throws ExoPlaybackException { if (messageType == MSG_SET_SURFACE) { setSurface((Surface) message); } else if (messageType == MSG_SET_VPX_SURFACE_VIEW) { setVpxVideoSurfaceView((VpxVideoSurfaceView) message); } else { super.handleMessage(messageType, message); } } private void setSurface(Surface surface) { if (this.surface == surface) { return; } this.surface = surface; vpxVideoSurfaceView = null; outputMode = (surface != null) ? VpxDecoder.OUTPUT_MODE_RGB : VpxDecoder.OUTPUT_MODE_UNKNOWN; if (decoder != null) { decoder.setOutputMode(outputMode); } drawnToSurface = false; } private void setVpxVideoSurfaceView(VpxVideoSurfaceView vpxVideoSurfaceView) { if (this.vpxVideoSurfaceView == vpxVideoSurfaceView) { return; } this.vpxVideoSurfaceView = vpxVideoSurfaceView; surface = null; outputMode = (vpxVideoSurfaceView != null) ? VpxDecoder.OUTPUT_MODE_YUV : VpxDecoder.OUTPUT_MODE_UNKNOWN; if (decoder != null) { decoder.setOutputMode(outputMode); } } private void notifyIfVideoSizeChanged(final OutputBuffer outputBuffer) { if (previousWidth == -1 || previousHeight == -1 || previousWidth != outputBuffer.width || previousHeight != outputBuffer.height) { previousWidth = outputBuffer.width; previousHeight = outputBuffer.height; if (eventHandler != null && eventListener != null) { eventHandler.post(new Runnable() { @Override public void run() { eventListener.onVideoSizeChanged(outputBuffer.width, outputBuffer.height); } }); } } } private void notifyAndResetDroppedFrameCount() { if (eventHandler != null && eventListener != null && droppedFrameCount > 0) { long now = SystemClock.elapsedRealtime(); final int countToNotify = droppedFrameCount; final long elapsedToNotify = now - droppedFrameAccumulationStartTimeMs; droppedFrameCount = 0; droppedFrameAccumulationStartTimeMs = now; eventHandler.post(new Runnable() { @Override public void run() { eventListener.onDroppedFrames(countToNotify, elapsedToNotify); } }); } } private void notifyDrawnToSurface(final Surface surface) { if (eventHandler != null && eventListener != null) { eventHandler.post(new Runnable() { @Override public void run() { eventListener.onDrawnToSurface(surface); } }); } } private void notifyDecoderError(final VpxDecoderException e) { if (eventHandler != null && eventListener != null) { eventHandler.post(new Runnable() { @Override public void run() { eventListener.onDecoderError(e); } }); } } }
package testSuite; import java.io.File; import main.Transactions; import org.junit.After; import org.junit.Assert; import org.junit.Before; import org.junit.Test; /** * Performs Condition and Loop Coverage on {@link Transactions#findNextLogout(int)} * * @author Ryan Crawford * @author Khalil Fazal * @author Joseph Heron * @author Carly Marshall */ public class CondLoopTest { /** * The location of the test files */ private static String tests = "./tests/"; /** * The location of the global user account file and ticket file */ private static String global = tests + "global/"; /** * The input user accounts file */ private static String uao = global + "glob_account.inp"; /** * The input ticket file */ private static String ato = global + "glob_available_tickets.inp"; /** * A sample new User Account file name */ private static String uaoSample = global + "global.usr"; /** * A sample new Tickets file name */ private static String atoSample = global + "global.tic"; /** * The location of the test daily transaction files for loop coverage */ private static String etfs = tests + "loopcoverage/"; /** * A daily transaction file that will never cover the loop */ private static String zeroEtf = etfs + "zero.etf"; /** * A daily transaction file that will cover the loop many times */ private static String manyEtf = etfs + "many.etf"; /** * A daily transaction file that will cover the loop once */ private static String onceEtf = etfs + "one.etf"; /** * A daily transaction file that will cover the loop twice */ private static String twiceEtf = etfs + "twice.etf"; /** * The location of the test daily transaction files for condition covreage */ private static String ifs = tests + "condcoverage/"; /** * A daily transaction file that will cause the if statement to be true */ private static String trueEtf = ifs + "true.etf"; /** * A daily transaction file that will cause the if statement to be false */ private static String falseEtf = ifs + "false.etf"; /** * Instance of a transaction that will never execute the loop */ private Transactions transactionZero; /** * Instance of a transaction that will execute the loop once */ private Transactions transactionOnce; /** * Instance of a transaction that will execute the loop twice */ private Transactions transactionTwice; /** * Instance of a transaction that will execute the loop many times */ private Transactions transactionMany; /** * Instance of a transaction that will cause the if statement to be true */ private Transactions transactionTrue; /** * Instance of a transaction that will cause the if statement to be false */ private Transactions transactionFalse; /** * A sample new user accounts file */ private File uaoSampleFile; /** * A sample new ticket file */ private File atoSampleFile; /** * Set up the transaction files and the output files */ @Before public void setUp() { // Loop coverage this.transactionZero = new Transactions(zeroEtf, uao, ato, uaoSample, atoSample); this.transactionOnce = new Transactions(onceEtf, uao, ato, uaoSample, atoSample); this.transactionTwice = new Transactions(twiceEtf, uao, ato, uaoSample, atoSample); this.transactionMany = new Transactions(manyEtf, uao, ato, uaoSample, atoSample); // Condition coverage this.transactionTrue = new Transactions(trueEtf, uao, ato, uaoSample, atoSample); this.transactionFalse = new Transactions(falseEtf, uao, ato, uaoSample, atoSample); this.uaoSampleFile = new File(uaoSample); this.atoSampleFile = new File(atoSample); } /** * Delete the files created by {@link Transactions} */ @After public void tearDown() { this.uaoSampleFile.delete(); this.atoSampleFile.delete(); } /** * A test that runs the loop zero times */ @Test public void loopCoverageZero() { this.transactionZero.initTransactionList(); Assert.assertEquals(-1, this.transactionZero.findNextLogout(0)); } /** * A test that runs the loop once */ @Test public void loopCoverageOnce() { this.transactionOnce.initTransactionList(); Assert.assertEquals(0, this.transactionOnce.findNextLogout(0)); } /** * A test that runs the loop twice */ @Test public void loopCoverageTwice() { this.transactionTwice.initTransactionList(); Assert.assertEquals(1, this.transactionTwice.findNextLogout(0)); } /** * A test that runs the loop many times */ @Test public void loopCoverageMany() { this.transactionMany.initTransactionList(); Assert.assertEquals(2, this.transactionMany.findNextLogout(0)); } /** * A test that causes the if statement to be true */ @Test public void ifCoverageTrue() { this.transactionTrue.initTransactionList(); Assert.assertEquals(0, this.transactionTrue.findNextLogout(0)); } /** * A test that causes the if statement to be false */ @Test public void ifCoverageFalse() { this.transactionFalse.initTransactionList(); Assert.assertEquals(0 - 1, this.transactionFalse.findNextLogout(0)); } }
/* * Copyright 2016 John Grosh (jagrosh) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.jagrosh.jmusicbot; import com.jagrosh.jmusicbot.utils.FormatUtil; import java.io.IOException; import java.nio.file.Files; import java.nio.file.Paths; import java.util.LinkedList; import java.util.List; import java.util.Scanner; import javax.swing.JOptionPane; /** * * @author John Grosh (jagrosh) */ public class Config { private boolean nogui; private String prefix; private String altprefix; private String token; private String owner; private String success; private String warning; private String error; private String game; private String help; private boolean stay; private boolean dbots; private boolean songingame; public Config(boolean nogui) { this.nogui = nogui; List<String> lines; try { lines = Files.readAllLines(Paths.get("config.txt")); System.out.println("[INFO] Loading config: "+Paths.get("config.txt").toFile().getAbsolutePath()); for(String line: lines) { String[] parts = line.split("=",2); String key = parts[0].trim().toLowerCase(); String value = parts.length>1 ? parts[1].trim() : null; switch(key) { case "token": token = value; break; case "prefix": prefix = value; break; case "altprefix": altprefix = value; break; case "owner": owner = value; break; case "success": success = value; break; case "warning": warning = value; break; case "error": error = value; break; case "game": game = value; break; case "help": help = value; break; case "noprogressintopic": FormatUtil.NO_PROGRESS_BAR_IN_TOPIC = "true".equalsIgnoreCase(value); break; case "songinstatus": songingame = "true".equalsIgnoreCase(value); break; case "stayinchannel": stay = "true".equalsIgnoreCase(value); break; case "dbots": dbots = "110373943822540800".equals(value); break; } } } catch (IOException ex) { alert("'config.txt' was not found!"); lines = new LinkedList<>(); } boolean write = false; if(token==null || token.isEmpty()) { token = prompt("Please provide a bot token." + "\nInstructions for obtaining a token can be found here:" + "\nhttps://github.com/jagrosh/MusicBot/wiki/Getting-a-Bot-Token." + "\nBot Token: "); if(token==null) { alert("No token provided! Exiting."); System.exit(0); } else { lines.add("token="+token); write = true; } } if(owner==null || !owner.matches("\\d{17,20}")) { owner = prompt("Owner ID was missing, or the provided owner ID is not valid." + "\nPlease provide the User ID of the bot's owner." + "\nInstructions for obtaining your User ID can be found here:" + "\nhttps://github.com/jagrosh/MusicBot/wiki/Finding-Your-User-ID" + "\nOwner User ID: "); if(owner==null || !owner.matches("\\d{17,20}")) { alert("Invalid User ID! Exiting."); System.exit(0); } else { lines.add("owner="+owner); write = true; } } if(write) { StringBuilder builder = new StringBuilder(); lines.stream().forEach(s -> builder.append(s).append("\r\n")); try { Files.write(Paths.get("config.txt"), builder.toString().trim().getBytes()); } catch(IOException ex) { alert("Failed to write new config options to config.txt: "+ex + "\nPlease make sure that the files are not on your desktop or some other restricted area."); } } } public String getPrefix() { return prefix; } public String getAltPrefix() { return altprefix; } public String getToken() { return token; } public String getOwnerId() { return owner; } public String getSuccess() { return success==null ? "\uD83C\uDFB6" : success; } public String getWarning() { return warning==null ? "\uD83D\uDCA1" : warning; } public String getError() { return error==null ? "\uD83D\uDEAB" : error; } public String getGame() { return game; } public String getHelp() { return help==null ? "help" : help; } public boolean getNoGui() { return nogui; } public boolean getStay() { return stay; } public boolean getSongInStatus() { return songingame; } public boolean getDBots() { return dbots; } private void alert(String message) { if(nogui) System.out.println("[WARNING] "+message); else { try { JOptionPane.showMessageDialog(null, message, "JMusicBot", JOptionPane.WARNING_MESSAGE); } catch(Exception e) { nogui = true; alert("Switching to nogui mode. You can manually start in nogui mode by including the -nogui flag."); alert(message); } } } private String prompt(String content) { if(nogui) { Scanner scanner = new Scanner(System.in); System.out.println(content); return scanner.next(); } else { try { return JOptionPane.showInputDialog(null, content, "JMusicBot", JOptionPane.WARNING_MESSAGE); } catch(Exception e) { nogui = true; alert("Switching to nogui mode. You can manually start in nogui mode by including the -nogui flag."); return prompt(content); } } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to you under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.calcite.linq4j.test; import org.apache.calcite.linq4j.tree.Primitive; import org.junit.Test; import java.util.ArrayList; import java.util.List; import static org.hamcrest.CoreMatchers.equalTo; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; /** * Unit test for {@link Primitive}. */ public class PrimitiveTest { @Test public void testIsAssignableFrom() { assertTrue(Primitive.INT.assignableFrom(Primitive.BYTE)); assertTrue(Primitive.INT.assignableFrom(Primitive.SHORT)); assertTrue(Primitive.INT.assignableFrom(Primitive.CHAR)); assertTrue(Primitive.INT.assignableFrom(Primitive.INT)); assertTrue(Primitive.INT.assignableFrom(Primitive.SHORT)); assertFalse(Primitive.INT.assignableFrom(Primitive.LONG)); assertFalse(Primitive.INT.assignableFrom(Primitive.FLOAT)); assertFalse(Primitive.INT.assignableFrom(Primitive.DOUBLE)); assertTrue(Primitive.LONG.assignableFrom(Primitive.BYTE)); assertTrue(Primitive.LONG.assignableFrom(Primitive.SHORT)); assertTrue(Primitive.LONG.assignableFrom(Primitive.CHAR)); assertTrue(Primitive.LONG.assignableFrom(Primitive.INT)); assertTrue(Primitive.LONG.assignableFrom(Primitive.LONG)); assertFalse(Primitive.LONG.assignableFrom(Primitive.FLOAT)); assertFalse(Primitive.LONG.assignableFrom(Primitive.DOUBLE)); // SHORT and CHAR cannot be assigned to each other assertTrue(Primitive.SHORT.assignableFrom(Primitive.BYTE)); assertTrue(Primitive.SHORT.assignableFrom(Primitive.SHORT)); assertFalse(Primitive.SHORT.assignableFrom(Primitive.CHAR)); assertFalse(Primitive.SHORT.assignableFrom(Primitive.INT)); assertFalse(Primitive.SHORT.assignableFrom(Primitive.LONG)); assertFalse(Primitive.SHORT.assignableFrom(Primitive.FLOAT)); assertFalse(Primitive.SHORT.assignableFrom(Primitive.DOUBLE)); assertFalse(Primitive.CHAR.assignableFrom(Primitive.BYTE)); assertFalse(Primitive.CHAR.assignableFrom(Primitive.SHORT)); assertTrue(Primitive.CHAR.assignableFrom(Primitive.CHAR)); assertFalse(Primitive.CHAR.assignableFrom(Primitive.INT)); assertFalse(Primitive.CHAR.assignableFrom(Primitive.LONG)); assertFalse(Primitive.CHAR.assignableFrom(Primitive.FLOAT)); assertFalse(Primitive.CHAR.assignableFrom(Primitive.DOUBLE)); assertTrue(Primitive.DOUBLE.assignableFrom(Primitive.BYTE)); assertTrue(Primitive.DOUBLE.assignableFrom(Primitive.SHORT)); assertTrue(Primitive.DOUBLE.assignableFrom(Primitive.CHAR)); assertTrue(Primitive.DOUBLE.assignableFrom(Primitive.INT)); assertTrue(Primitive.DOUBLE.assignableFrom(Primitive.LONG)); assertTrue(Primitive.DOUBLE.assignableFrom(Primitive.FLOAT)); assertTrue(Primitive.DOUBLE.assignableFrom(Primitive.DOUBLE)); // cross-family assignments assertFalse(Primitive.BOOLEAN.assignableFrom(Primitive.INT)); assertFalse(Primitive.INT.assignableFrom(Primitive.BOOLEAN)); } @Test public void testBox() { assertEquals(String.class, Primitive.box(String.class)); assertEquals(Integer.class, Primitive.box(int.class)); assertEquals(Integer.class, Primitive.box(Integer.class)); assertEquals(boolean[].class, Primitive.box(boolean[].class)); } @Test public void testOfBox() { assertEquals(Primitive.INT, Primitive.ofBox(Integer.class)); assertNull(Primitive.ofBox(int.class)); assertNull(Primitive.ofBox(String.class)); assertNull(Primitive.ofBox(Integer[].class)); } @Test public void testOfBoxOr() { assertEquals(Primitive.INT, Primitive.ofBox(Integer.class)); assertNull(Primitive.ofBox(int.class)); assertNull(Primitive.ofBox(String.class)); assertNull(Primitive.ofBox(Integer[].class)); } /** Tests the {@link Primitive#number(Number)} method. */ @Test public void testNumber() { Number number = Primitive.SHORT.number(Integer.valueOf(2)); assertTrue(number instanceof Short); assertEquals(2, number.shortValue()); number = Primitive.FLOAT.number(Integer.valueOf(2)); assertTrue(number instanceof Float); assertEquals(2.0d, number.doubleValue(), 0d); try { number = Primitive.INT.number(null); fail("expected exception, got " + number); } catch (NullPointerException e) { // ok } // not a number try { number = Primitive.CHAR.number(3); fail("expected exception, got " + number); } catch (AssertionError e) { // ok } // not a number try { number = Primitive.BOOLEAN.number(null); fail("expected exception, got " + number); } catch (AssertionError e) { // ok } } /** Test for * {@link Primitive#send(org.apache.calcite.linq4j.tree.Primitive.Source, org.apache.calcite.linq4j.tree.Primitive.Sink)}. */ @Test public void testSendSource() { final List<Object> list = new ArrayList<Object>(); for (Primitive primitive : Primitive.values()) { primitive.send( new Primitive.Source() { public boolean getBoolean() { list.add(boolean.class); return true; } public byte getByte() { list.add(byte.class); return 0; } public char getChar() { list.add(char.class); return 0; } public short getShort() { list.add(short.class); return 0; } public int getInt() { list.add(int.class); return 0; } public long getLong() { list.add(long.class); return 0; } public float getFloat() { list.add(float.class); return 0; } public double getDouble() { list.add(double.class); return 0; } public Object getObject() { list.add(Object.class); return 0; } }, new Primitive.Sink() { public void set(boolean v) { list.add(boolean.class); list.add(v); } public void set(byte v) { list.add(byte.class); list.add(v); } public void set(char v) { list.add(char.class); list.add(v); } public void set(short v) { list.add(short.class); list.add(v); } public void set(int v) { list.add(int.class); list.add(v); } public void set(long v) { list.add(long.class); list.add(v); } public void set(float v) { list.add(float.class); list.add(v); } public void set(double v) { list.add(double.class); list.add(v); } public void set(Object v) { list.add(Object.class); list.add(v); } }); } assertThat(list.toString(), equalTo("[boolean, boolean, true, " + "byte, byte, 0, " + "char, char, \u0000, " + "short, short, 0, " + "int, int, 0, " + "long, long, 0, " + "float, float, 0.0, " + "double, double, 0.0, " + "class java.lang.Object, class java.lang.Object, 0, " + "class java.lang.Object, class java.lang.Object, 0]")); } /** Test for {@link Primitive#permute(Object, int[])}. */ @Test public void testPermute() { char[] chars = {'a', 'b', 'c', 'd', 'e', 'f', 'g'}; int[] sources = {1, 2, 3, 4, 5, 6, 0}; final Object permute = Primitive.CHAR.permute(chars, sources); assertTrue(permute instanceof char[]); assertEquals("bcdefga", String.valueOf((char[]) permute)); } /** Test for {@link Primitive#arrayToString(Object)}. */ @Test public void testArrayToString() { char[] chars = {'a', 'b', 'c', 'd', 'e', 'f', 'g'}; assertEquals("[a, b, c, d, e, f, g]", Primitive.CHAR.arrayToString(chars)); } /** Test for {@link Primitive#sortArray(Object)}. */ @Test public void testArraySort() { char[] chars = {'m', 'o', 'n', 'o', 'l', 'a', 'k', 'e'}; Primitive.CHAR.sortArray(chars); assertEquals("[a, e, k, l, m, n, o, o]", Primitive.CHAR.arrayToString(chars)); // mixed true and false boolean[] booleans0 = {true, false, true, true, false}; Primitive.BOOLEAN.sortArray(booleans0); assertEquals("[false, false, true, true, true]", Primitive.BOOLEAN.arrayToString(booleans0)); // all false boolean[] booleans1 = {false, false, false, false, false}; Primitive.BOOLEAN.sortArray(booleans1); assertEquals("[false, false, false, false, false]", Primitive.BOOLEAN.arrayToString(booleans1)); // all true boolean[] booleans2 = {true, true, true, true, true}; Primitive.BOOLEAN.sortArray(booleans2); assertEquals("[true, true, true, true, true]", Primitive.BOOLEAN.arrayToString(booleans2)); // empty boolean[] booleans3 = {}; Primitive.BOOLEAN.sortArray(booleans3); assertEquals("[]", Primitive.BOOLEAN.arrayToString(booleans3)); // ranges specified boolean[] booleans4 = {true, true, false, false, true, false, false}; Primitive.BOOLEAN.sortArray(booleans4, 1, 6); assertEquals("[true, false, false, false, true, true, false]", Primitive.BOOLEAN.arrayToString(booleans4)); } } // End PrimitiveTest.java
/* * Copyright [2006] PurePerfect.com * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, * either express or implied. * * See the License for the specific language governing permissions * and limitations under the License. */ package com.pureperfect.purview.validators.text; import junit.framework.TestCase; import com.pureperfect.purview.Purview; import com.pureperfect.purview.ValidationException; /** * Unit test for {@link Default} annotation. * * @author J. Chris Folsom * @version 1.3 * @since 1.3 */ public class DefaultTest extends TestCase { /* * Stub test class. */ public class MethodStub { private String value; @Default("foo") public String getValue() { return this.value; } public void setValue(String value) { this.value = value; } } /* * Stub test class. */ public class MethodStubMissingSetter { private String value; @Default("foo") public String getValue() { return this.value; } } /* * Stub test class. */ public class FieldStub { @Default("bar") public String value; } /* * Stub test class. */ public class MethodValueNotString { private Integer value; @Default("uhoh") public Integer getValue() { return this.value; } public void setValue(Integer value) { this.value = value; } } /* * Stub test class. */ public class FieldValueNotString { @Default("foo") public Integer value; } /* * Check for expected behavior when the annotation is attached to a field * and the field is not populated. */ public void testFieldValueIsPopulated() throws Exception { FieldStub stub = new FieldStub(); stub.value = "test"; Purview.validateFields(stub); assertEquals("test", stub.value); } /* * Check for expected behavior when the annotation is attached to a field * and the field is not populated. */ public void testFieldValueIsNotPopulated() throws Exception { FieldStub stub = new FieldStub(); Purview.validateFields(stub); assertEquals("bar", stub.value); } /* * Check for expected behavior when the annotation is attached to a getter * and the field is populated. */ public void testGetterMethodValueIsPopulated() throws Exception { MethodStub stub = new MethodStub(); stub.setValue("bar"); Purview.validateMethods(stub); assertEquals("bar", stub.getValue()); } /* * Check for expected behavior when the annotation is attached to a getter * and the field is not populated. */ public void testGetterMethodValueIsNotPopulated() throws Exception { MethodStub stub = new MethodStub(); Purview.validateMethods(stub); assertEquals("foo", stub.getValue()); } /* * Check for expected behavior when the annotation is attached to a getter * without a corresponding setter. */ public void testGetterMethodDoesNotHaveCorrespondingSetter() throws Exception { MethodStubMissingSetter stub = new MethodStubMissingSetter(); try { Purview.validateMethods(stub); fail("Should have thrown validation exception."); } catch (ValidationException e) { assertTrue(e.getCause() instanceof NoSuchMethodException); } } /* * Check for expected behavior when the annotation is attached to a method * that is not a string. */ public void testCorrespondingMethodValueIsNotAString() { MethodValueNotString stub = new MethodValueNotString(); try { Purview.validateMethods(stub); fail("Should have thrown validation exception."); } catch (ValidationException e) { assertTrue(e.getCause() instanceof NoSuchMethodException); } } /* * Check for expected behavior when the annotation is attached to a field * that is not a string. */ public void testCorrespondingFieldValueIsNotAString() { FieldValueNotString stub = new FieldValueNotString(); try { Purview.validateFields(stub); } catch (ValidationException e) { assertTrue(e.getCause() instanceof IllegalArgumentException); } } public class MethodWithString { public String value; @Default("fooBaroo") public void fooBar(String s) { this.value = s; } } }
package org.sagebionetworks.repo.manager.discussion; import static org.sagebionetworks.repo.manager.AuthorizationManagerImpl.ANONYMOUS_ACCESS_DENIED_REASON; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Set; import org.sagebionetworks.ids.IdGenerator; import org.sagebionetworks.ids.IdType; import org.sagebionetworks.reflection.model.PaginatedResults; import org.sagebionetworks.repo.manager.AuthorizationManager; import org.sagebionetworks.repo.model.ACCESS_TYPE; import org.sagebionetworks.repo.model.AccessControlListDAO; import org.sagebionetworks.repo.model.EntityIdList; import org.sagebionetworks.repo.model.GroupMembersDAO; import org.sagebionetworks.repo.model.ObjectType; import org.sagebionetworks.repo.model.PaginatedIds; import org.sagebionetworks.repo.model.UnauthorizedException; import org.sagebionetworks.repo.model.UploadContentToS3DAO; import org.sagebionetworks.repo.model.UserInfo; import org.sagebionetworks.repo.model.dao.discussion.DiscussionThreadDAO; import org.sagebionetworks.repo.model.dao.discussion.ForumDAO; import org.sagebionetworks.repo.model.dao.subscription.SubscriptionDAO; import org.sagebionetworks.repo.model.discussion.CreateDiscussionThread; import org.sagebionetworks.repo.model.discussion.DiscussionFilter; import org.sagebionetworks.repo.model.discussion.DiscussionThreadBundle; import org.sagebionetworks.repo.model.discussion.DiscussionThreadEntityReference; import org.sagebionetworks.repo.model.discussion.DiscussionThreadOrder; import org.sagebionetworks.repo.model.discussion.EntityThreadCounts; import org.sagebionetworks.repo.model.discussion.MessageURL; import org.sagebionetworks.repo.model.discussion.ThreadCount; import org.sagebionetworks.repo.model.discussion.UpdateThreadMessage; import org.sagebionetworks.repo.model.discussion.UpdateThreadTitle; import org.sagebionetworks.repo.model.jdo.KeyFactory; import org.sagebionetworks.repo.model.message.ChangeType; import org.sagebionetworks.repo.model.message.MessageToSend; import org.sagebionetworks.repo.model.message.TransactionalMessenger; import org.sagebionetworks.repo.model.subscription.SubscriptionObjectType; import org.sagebionetworks.repo.transactions.WriteTransaction; import org.sagebionetworks.repo.web.NotFoundException; import org.sagebionetworks.upload.discussion.MessageKeyUtils; import org.sagebionetworks.util.ValidateArgument; import org.springframework.beans.factory.annotation.Autowired; public class DiscussionThreadManagerImpl implements DiscussionThreadManager { private static final long DEFAULT_OFFSET = 0L; private static final DiscussionFilter DEFAULT_FILTER = DiscussionFilter.NO_FILTER; public static final int MAX_TITLE_LENGTH = 140; public static final long MAX_LIMIT = 20L; @Autowired private DiscussionThreadDAO threadDao; @Autowired private ForumDAO forumDao; @Autowired private UploadContentToS3DAO uploadDao; @Autowired private SubscriptionDAO subscriptionDao; @Autowired private AuthorizationManager authorizationManager; @Autowired private IdGenerator idGenerator; @Autowired private TransactionalMessenger transactionalMessenger; @Autowired private AccessControlListDAO aclDao; @Autowired private GroupMembersDAO groupMembersDao; @WriteTransaction @Override public DiscussionThreadBundle createThread(UserInfo userInfo, CreateDiscussionThread createThread) throws IOException { ValidateArgument.required(createThread, "createThread"); ValidateArgument.required(createThread.getForumId(), "CreateDiscussionThread.forumId"); ValidateArgument.required(createThread.getTitle(), "CreateDiscussionThread.title"); ValidateArgument.required(createThread.getMessageMarkdown(), "CreateDiscussionThread.messageMarkdown"); ValidateArgument.requirement(createThread.getTitle().length() <= MAX_TITLE_LENGTH, "Title cannot exceed "+MAX_TITLE_LENGTH+" characters."); UserInfo.validateUserInfo(userInfo); String projectId = forumDao.getForum(Long.parseLong(createThread.getForumId())).getProjectId(); if (authorizationManager.isAnonymousUser(userInfo)){ throw new UnauthorizedException(ANONYMOUS_ACCESS_DENIED_REASON); } authorizationManager.canAccess(userInfo, projectId, ObjectType.ENTITY, ACCESS_TYPE.READ).checkAuthorizationOrElseThrow(); Long id = idGenerator.generateNewId(IdType.DISCUSSION_THREAD_ID); String messageKey = uploadDao.uploadThreadMessage(createThread.getMessageMarkdown(), createThread.getForumId(), id.toString()); DiscussionThreadBundle thread = threadDao.createThread(createThread.getForumId(), id.toString(), createThread.getTitle(), messageKey, userInfo.getId()); MessageToSend changeMessage = new MessageToSend() .withUserId(userInfo.getId()) .withObjectType(ObjectType.THREAD) .withObjectId(id.toString()) .withChangeType(ChangeType.CREATE); transactionalMessenger.sendMessageAfterCommit(changeMessage); subscriptionDao.create(userInfo.getId().toString(), id.toString(), SubscriptionObjectType.THREAD); List<DiscussionThreadEntityReference> entityRefs = DiscussionUtils.getEntityReferences(createThread.getMessageMarkdown(), thread.getId()); entityRefs.addAll(DiscussionUtils.getEntityReferences(createThread.getTitle(), thread.getId())); threadDao.insertEntityReference(entityRefs); return thread; } @WriteTransaction @Override public DiscussionThreadBundle getThread(UserInfo userInfo, String threadId) { ValidateArgument.required(threadId, "threadId"); UserInfo.validateUserInfo(userInfo); Long threadIdLong = Long.parseLong(threadId); DiscussionThreadBundle thread = threadDao.getThread(threadIdLong, DEFAULT_FILTER); if (thread.getIsDeleted()) { try { authorizationManager.canAccess(userInfo, thread.getProjectId(), ObjectType.ENTITY, ACCESS_TYPE.MODERATE).checkAuthorizationOrElseThrow(); } catch (UnauthorizedException e) { throw new NotFoundException(); } } else { authorizationManager.canAccess(userInfo, thread.getProjectId(), ObjectType.ENTITY, ACCESS_TYPE.READ).checkAuthorizationOrElseThrow(); } threadDao.updateThreadView(threadIdLong, userInfo.getId()); MessageToSend changeMessage = new MessageToSend() .withUserId(userInfo.getId()) .withObjectType(ObjectType.THREAD) .withObjectId(threadId) .withChangeType(ChangeType.UPDATE); transactionalMessenger.sendMessageAfterCommit(changeMessage); return thread; } @Override public void checkPermission(UserInfo userInfo, String threadId, ACCESS_TYPE accessType) { ValidateArgument.required(threadId, "threadId"); ValidateArgument.required(accessType, "accessType"); UserInfo.validateUserInfo(userInfo); String projectId = threadDao.getProjectId(threadId); authorizationManager.canAccess(userInfo, projectId, ObjectType.ENTITY, accessType).checkAuthorizationOrElseThrow(); } @WriteTransaction @Override public DiscussionThreadBundle updateTitle(UserInfo userInfo, String threadId, UpdateThreadTitle newTitle) { ValidateArgument.required(threadId, "threadId"); ValidateArgument.required(newTitle, "newTitle"); ValidateArgument.required(newTitle.getTitle(), "UpdateThreadTitle.title"); UserInfo.validateUserInfo(userInfo); Long threadIdLong = Long.parseLong(threadId); String author = threadDao.getAuthorForUpdate(threadId); if (authorizationManager.isUserCreatorOrAdmin(userInfo, author)) { DiscussionThreadBundle thread = threadDao.updateTitle(threadIdLong, newTitle.getTitle()); threadDao.insertEntityReference(DiscussionUtils.getEntityReferences(newTitle.getTitle(), thread.getId())); return thread; } else { throw new UnauthorizedException("Only the user that created the thread can modify it."); } } @WriteTransaction @Override public DiscussionThreadBundle updateMessage(UserInfo userInfo, String threadId, UpdateThreadMessage newMessage) throws IOException { ValidateArgument.required(threadId, "threadId"); ValidateArgument.required(newMessage, "newMessage"); ValidateArgument.required(newMessage.getMessageMarkdown(), "UpdateThreadMessage.messageMarkdown"); UserInfo.validateUserInfo(userInfo); Long threadIdLong = Long.parseLong(threadId); DiscussionThreadBundle thread = threadDao.getThread(threadIdLong, DiscussionFilter.EXCLUDE_DELETED); if (authorizationManager.isUserCreatorOrAdmin(userInfo, thread.getCreatedBy())) { String messageKey = uploadDao.uploadThreadMessage(newMessage.getMessageMarkdown(), thread.getForumId(), thread.getId()); thread = threadDao.updateMessageKey(threadIdLong, messageKey); threadDao.insertEntityReference(DiscussionUtils.getEntityReferences(newMessage.getMessageMarkdown(), thread.getId())); return thread; } else { throw new UnauthorizedException("Only the user that created the thread can modify it."); } } @WriteTransaction @Override public void markThreadAsDeleted(UserInfo userInfo, String threadId) { checkPermission(userInfo, threadId, ACCESS_TYPE.MODERATE); threadDao.markThreadAsDeleted(Long.parseLong(threadId)); } @WriteTransaction @Override public void pinThread(UserInfo userInfo, String threadId) { if (threadDao.isThreadDeleted(threadId)) { throw new NotFoundException(); } checkPermission(userInfo, threadId, ACCESS_TYPE.MODERATE); threadDao.pinThread(Long.parseLong(threadId)); } @WriteTransaction @Override public void unpinThread(UserInfo userInfo, String threadId) { if (threadDao.isThreadDeleted(threadId)) { throw new NotFoundException(); } checkPermission(userInfo, threadId, ACCESS_TYPE.MODERATE); threadDao.unpinThread(Long.parseLong(threadId)); } @Override public PaginatedResults<DiscussionThreadBundle> getThreadsForForum( UserInfo userInfo, String forumId, Long limit, Long offset, DiscussionThreadOrder order, Boolean ascending, DiscussionFilter filter) { ValidateArgument.required(forumId, "forumId"); ValidateArgument.required(filter, "filter"); UserInfo.validateUserInfo(userInfo); if (limit == null) { limit = MAX_LIMIT; } if (offset == null) { offset = DEFAULT_OFFSET; } ValidateArgument.requirement(limit >= 0 && offset >= 0 && limit <= MAX_LIMIT, "Limit and offset must be greater than 0, and limit must be smaller than or equal to "+MAX_LIMIT); String projectId = forumDao.getForum(Long.parseLong(forumId)).getProjectId(); if (filter.equals(DiscussionFilter.EXCLUDE_DELETED)) { authorizationManager.canAccess(userInfo, projectId, ObjectType.ENTITY, ACCESS_TYPE.READ).checkAuthorizationOrElseThrow(); } else { authorizationManager.canAccess(userInfo, projectId, ObjectType.ENTITY, ACCESS_TYPE.MODERATE).checkAuthorizationOrElseThrow(); } List<DiscussionThreadBundle> results = threadDao.getThreadsForForum(Long.parseLong(forumId), limit, offset, order, ascending, filter); return PaginatedResults.createWithLimitAndOffset(results, limit, offset); } @Override public MessageURL getMessageUrl(UserInfo userInfo, String messageKey) { ValidateArgument.required(messageKey, "messageKey"); String threadId = MessageKeyUtils.getThreadId(messageKey); checkPermission(userInfo, threadId, ACCESS_TYPE.READ); threadDao.updateThreadView(Long.parseLong(threadId), userInfo.getId()); return uploadDao.getThreadUrl(messageKey); } @Override public ThreadCount getThreadCountForForum(UserInfo userInfo, String forumId, DiscussionFilter filter) { ValidateArgument.required(forumId, "forumId"); ValidateArgument.required(filter, "filter"); UserInfo.validateUserInfo(userInfo); String projectId = forumDao.getForum(Long.parseLong(forumId)).getProjectId(); authorizationManager.canAccess(userInfo, projectId, ObjectType.ENTITY, ACCESS_TYPE.READ).checkAuthorizationOrElseThrow(); ThreadCount count = new ThreadCount(); count.setCount(threadDao.getThreadCountForForum(Long.parseLong(forumId), filter)); return count; } @Override public PaginatedResults<DiscussionThreadBundle> getThreadsForEntity(UserInfo userInfo, String entityId, Long limit, Long offset, DiscussionThreadOrder order, Boolean ascending) { ValidateArgument.required(entityId, "entityId"); UserInfo.validateUserInfo(userInfo); if (limit == null) { limit = MAX_LIMIT; } if (offset == null) { offset = DEFAULT_OFFSET; } ValidateArgument.requirement(limit >= 0 && offset >= 0 && limit <= MAX_LIMIT, "Limit and offset must be greater than 0, and limit must be smaller than or equal to "+MAX_LIMIT); Long entityIdLong = KeyFactory.stringToKey(entityId); Set<Long> projectIds = threadDao.getDistinctProjectIdsOfThreadsReferencesEntityIds(Arrays.asList(entityIdLong)); projectIds = aclDao.getAccessibleBenefactors(userInfo.getGroups(), projectIds, ObjectType.ENTITY, ACCESS_TYPE.READ); List<DiscussionThreadBundle> results = threadDao.getThreadsForEntity(entityIdLong, limit, offset, order, ascending, DiscussionFilter.EXCLUDE_DELETED, projectIds); return PaginatedResults.createWithLimitAndOffset(results, limit, offset); } @Override public EntityThreadCounts getEntityThreadCounts(UserInfo userInfo, EntityIdList entityIdList) { UserInfo.validateUserInfo(userInfo); ValidateArgument.required(entityIdList, "entityIdList"); ValidateArgument.required(entityIdList.getIdList(), "EntityIdList.list"); ValidateArgument.requirement(entityIdList.getIdList().size() <= MAX_LIMIT, "The size of entityIdList cannot exceed "+MAX_LIMIT); List<Long> entityIds = KeyFactory.stringToKey(entityIdList.getIdList()); Set<Long> projectIds = threadDao.getDistinctProjectIdsOfThreadsReferencesEntityIds(entityIds); projectIds = aclDao.getAccessibleBenefactors(userInfo.getGroups(), projectIds, ObjectType.ENTITY, ACCESS_TYPE.READ); return threadDao.getThreadCounts(entityIds, projectIds); } @Override public void markThreadAsNotDeleted(UserInfo userInfo, String threadId) { checkPermission(userInfo, threadId, ACCESS_TYPE.MODERATE); threadDao.markThreadAsNotDeleted(Long.parseLong(threadId)); } @Override public PaginatedIds getModerators(UserInfo userInfo, String forumId, Long limit, Long offset) { ValidateArgument.required(forumId, "forumId"); UserInfo.validateUserInfo(userInfo); if (limit == null) { limit = MAX_LIMIT; } if (offset == null) { offset = DEFAULT_OFFSET; } ValidateArgument.requirement(limit >= 0 && offset >= 0 && limit <= MAX_LIMIT, "Limit and offset must be greater than 0, and limit must be smaller than or equal to "+MAX_LIMIT); PaginatedIds results = new PaginatedIds(); List<String> userIds = new ArrayList<String>(); results.setResults(userIds); String projectId = forumDao.getForum(Long.parseLong(forumId)).getProjectId(); Set<String> principalIds = aclDao.getPrincipalIds(projectId, ObjectType.ENTITY, ACCESS_TYPE.MODERATE); if (principalIds.isEmpty()) { results.setTotalNumberOfResults(0L); return results; } userIds.addAll(groupMembersDao.getIndividuals(principalIds, limit, offset)); results.setTotalNumberOfResults(groupMembersDao.getIndividualCount(principalIds)); return results; } }
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License * 2.0 and the Server Side Public License, v 1; you may not use this file except * in compliance with, at your election, the Elastic License 2.0 or the Server * Side Public License, v 1. */ package org.elasticsearch.common.rounding; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.unit.TimeValue; import org.joda.time.DateTimeField; import org.joda.time.DateTimeZone; import org.joda.time.IllegalInstantException; import java.io.IOException; import java.util.Objects; /** * A strategy for rounding long values. * * Use the java based Rounding class where applicable */ @Deprecated public abstract class Rounding implements Writeable { public abstract byte id(); /** * Rounds the given value. */ public abstract long round(long value); /** * Given the rounded value (which was potentially generated by {@link #round(long)}, returns the next rounding value. For example, with * interval based rounding, if the interval is 3, {@code nextRoundValue(6) = 9 }. * * @param value The current rounding value * @return The next rounding value; */ public abstract long nextRoundingValue(long value); @Override public abstract boolean equals(Object obj); @Override public abstract int hashCode(); public static Builder builder(DateTimeUnit unit) { return new Builder(unit); } public static Builder builder(TimeValue interval) { return new Builder(interval); } public static class Builder { private final DateTimeUnit unit; private final long interval; private DateTimeZone timeZone = DateTimeZone.UTC; public Builder(DateTimeUnit unit) { this.unit = unit; this.interval = -1; } public Builder(TimeValue interval) { this.unit = null; if (interval.millis() < 1) throw new IllegalArgumentException("Zero or negative time interval not supported"); this.interval = interval.millis(); } public Builder timeZone(DateTimeZone timeZone) { if (timeZone == null) { throw new IllegalArgumentException("Setting null as timezone is not supported"); } this.timeZone = timeZone; return this; } public Rounding build() { Rounding timeZoneRounding; if (unit != null) { timeZoneRounding = new TimeUnitRounding(unit, timeZone); } else { timeZoneRounding = new TimeIntervalRounding(interval, timeZone); } return timeZoneRounding; } } static class TimeUnitRounding extends Rounding { static final byte ID = 1; private final DateTimeUnit unit; private final DateTimeField field; private final DateTimeZone timeZone; private final boolean unitRoundsToMidnight; TimeUnitRounding(DateTimeUnit unit, DateTimeZone timeZone) { this.unit = unit; this.field = unit.field(timeZone); unitRoundsToMidnight = this.field.getDurationField().getUnitMillis() > 60L * 60L * 1000L; this.timeZone = timeZone; } TimeUnitRounding(StreamInput in) throws IOException { unit = DateTimeUnit.resolve(in.readByte()); timeZone = DateTimeZone.forID(in.readString()); field = unit.field(timeZone); unitRoundsToMidnight = field.getDurationField().getUnitMillis() > 60L * 60L * 1000L; } @Override public byte id() { return ID; } /** * @return The latest timestamp T which is strictly before utcMillis * and such that timeZone.getOffset(T) != timeZone.getOffset(utcMillis). * If there is no such T, returns Long.MAX_VALUE. */ private long previousTransition(long utcMillis) { final int offsetAtInputTime = timeZone.getOffset(utcMillis); do { // Some timezones have transitions that do not change the offset, so we have to // repeatedly call previousTransition until a nontrivial transition is found. long previousTransition = timeZone.previousTransition(utcMillis); if (previousTransition == utcMillis) { // There are no earlier transitions return Long.MAX_VALUE; } assert previousTransition < utcMillis; // Progress was made utcMillis = previousTransition; } while (timeZone.getOffset(utcMillis) == offsetAtInputTime); return utcMillis; } @Override public long round(long utcMillis) { // field.roundFloor() works as long as the offset doesn't change. It is worth getting this case out of the way first, as // the calculations for fixing things near to offset changes are a little expensive and are unnecessary in the common case // of working in UTC. if (timeZone.isFixed()) { return field.roundFloor(utcMillis); } // When rounding to hours we consider any local time of the form 'xx:00:00' as rounded, even though this gives duplicate // bucket names for the times when the clocks go back. Shorter units behave similarly. However, longer units round down to // midnight, and on the days where there are two midnights we would rather pick the earlier one, so that buckets are // uniquely identified by the date. if (unitRoundsToMidnight) { final long anyLocalStartOfDay = field.roundFloor(utcMillis); // `anyLocalStartOfDay` is _supposed_ to be the Unix timestamp for the start of the day in question in the current time // zone. Mostly this just means "midnight", which is fine, and on days with no local midnight it's the first time that // does occur on that day which is also ok. However, on days with >1 local midnight this is _one_ of the midnights, but // may not be the first. Check whether this is happening, and fix it if so. final long previousTransition = previousTransition(anyLocalStartOfDay); if (previousTransition == Long.MAX_VALUE) { // No previous transitions, so there can't be another earlier local midnight. return anyLocalStartOfDay; } final long currentOffset = timeZone.getOffset(anyLocalStartOfDay); final long previousOffset = timeZone.getOffset(previousTransition); assert currentOffset != previousOffset; // NB we only assume interference from one previous transition. It's theoretically possible to have two transitions in // quick succession, both of which have a midnight in them, but this doesn't appear to happen in the TZDB so (a) it's // pointless to implement and (b) it won't be tested. I recognise that this comment is tempting fate and will likely // cause this very situation to occur in the near future, and eagerly look forward to fixing this using a loop over // previous transitions when it happens. final long alsoLocalStartOfDay = anyLocalStartOfDay + currentOffset - previousOffset; // `alsoLocalStartOfDay` is the Unix timestamp for the start of the day in question if the previous offset were in // effect. if (alsoLocalStartOfDay <= previousTransition) { // Therefore the previous offset _is_ in effect at `alsoLocalStartOfDay`, and it's earlier than anyLocalStartOfDay, // so this is the answer to use. return alsoLocalStartOfDay; } else { // The previous offset is not in effect at `alsoLocalStartOfDay`, so the current offset must be. return anyLocalStartOfDay; } } else { do { long rounded = field.roundFloor(utcMillis); // field.roundFloor() mostly works as long as the offset hasn't changed in [rounded, utcMillis], so look at where // the offset most recently changed. final long previousTransition = previousTransition(utcMillis); if (previousTransition == Long.MAX_VALUE || previousTransition < rounded) { // The offset did not change in [rounded, utcMillis], so roundFloor() worked as expected. return rounded; } // The offset _did_ change in [rounded, utcMillis]. Put differently, this means that none of the times in // [previousTransition+1, utcMillis] were rounded, so the rounded time must be <= previousTransition. This means // it's sufficient to try and round previousTransition down. assert previousTransition < utcMillis; utcMillis = previousTransition; } while (true); } } @Override public long nextRoundingValue(long utcMillis) { long floor = round(utcMillis); // add one unit and round to get to next rounded value long next = round(field.add(floor, 1)); if (next == floor) { // in rare case we need to add more than one unit next = round(field.add(floor, 2)); } return next; } @Override public void writeTo(StreamOutput out) throws IOException { out.writeByte(unit.id()); out.writeString(timeZone.getID()); } @Override public int hashCode() { return Objects.hash(unit, timeZone); } @Override public boolean equals(Object obj) { if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } TimeUnitRounding other = (TimeUnitRounding) obj; return Objects.equals(unit, other.unit) && Objects.equals(timeZone, other.timeZone); } @Override public String toString() { return "[" + timeZone + "][" + unit + "]"; } } static class TimeIntervalRounding extends Rounding { static final byte ID = 2; private final long interval; private final DateTimeZone timeZone; TimeIntervalRounding(long interval, DateTimeZone timeZone) { if (interval < 1) throw new IllegalArgumentException("Zero or negative time interval not supported"); this.interval = interval; this.timeZone = timeZone; } TimeIntervalRounding(StreamInput in) throws IOException { interval = in.readVLong(); timeZone = DateTimeZone.forID(in.readString()); } @Override public byte id() { return ID; } @Override public long round(long utcMillis) { long timeLocal = timeZone.convertUTCToLocal(utcMillis); long rounded = roundKey(timeLocal, interval) * interval; long roundedUTC; if (isInDSTGap(rounded) == false) { roundedUTC = timeZone.convertLocalToUTC(rounded, true, utcMillis); // check if we crossed DST transition, in this case we want the // last rounded value before the transition long transition = timeZone.previousTransition(utcMillis); if (transition != utcMillis && transition > roundedUTC) { roundedUTC = round(transition - 1); } } else { /* * Edge case where the rounded local time is illegal and landed * in a DST gap. In this case, we choose 1ms tick after the * transition date. We don't want the transition date itself * because those dates, when rounded themselves, fall into the * previous interval. This would violate the invariant that the * rounding operation should be idempotent. */ roundedUTC = timeZone.previousTransition(utcMillis) + 1; } return roundedUTC; } private static long roundKey(long value, long interval) { if (value < 0) { return (value - interval + 1) / interval; } else { return value / interval; } } /** * Determine whether the local instant is a valid instant in the given * time zone. The logic for this is taken from * {@link DateTimeZone#convertLocalToUTC(long, boolean)} for the * `strict` mode case, but instead of throwing an * {@link IllegalInstantException}, which is costly, we want to return a * flag indicating that the value is illegal in that time zone. */ private boolean isInDSTGap(long instantLocal) { if (timeZone.isFixed()) { return false; } // get the offset at instantLocal (first estimate) int offsetLocal = timeZone.getOffset(instantLocal); // adjust instantLocal using the estimate and recalc the offset int offset = timeZone.getOffset(instantLocal - offsetLocal); // if the offsets differ, we must be near a DST boundary if (offsetLocal != offset) { // determine if we are in the DST gap long nextLocal = timeZone.nextTransition(instantLocal - offsetLocal); if (nextLocal == (instantLocal - offsetLocal)) { nextLocal = Long.MAX_VALUE; } long nextAdjusted = timeZone.nextTransition(instantLocal - offset); if (nextAdjusted == (instantLocal - offset)) { nextAdjusted = Long.MAX_VALUE; } if (nextLocal != nextAdjusted) { // we are in the DST gap return true; } } return false; } @Override public long nextRoundingValue(long time) { long timeLocal = time; timeLocal = timeZone.convertUTCToLocal(time); long next = timeLocal + interval; return timeZone.convertLocalToUTC(next, false); } @Override public void writeTo(StreamOutput out) throws IOException { out.writeVLong(interval); out.writeString(timeZone.getID()); } @Override public int hashCode() { return Objects.hash(interval, timeZone); } @Override public boolean equals(Object obj) { if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } TimeIntervalRounding other = (TimeIntervalRounding) obj; return Objects.equals(interval, other.interval) && Objects.equals(timeZone, other.timeZone); } } public static class Streams { public static void write(Rounding rounding, StreamOutput out) throws IOException { out.writeByte(rounding.id()); rounding.writeTo(out); } public static Rounding read(StreamInput in) throws IOException { Rounding rounding; byte id = in.readByte(); switch (id) { case TimeUnitRounding.ID: rounding = new TimeUnitRounding(in); break; case TimeIntervalRounding.ID: rounding = new TimeIntervalRounding(in); break; default: throw new ElasticsearchException("unknown rounding id [" + id + "]"); } return rounding; } } }
package org.webcurator.ui.site.controller; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Date; import java.util.List; import java.util.Locale; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.junit.Test; import org.springframework.mock.web.MockHttpServletRequest; import org.springframework.mock.web.MockHttpServletResponse; import org.springframework.validation.BindException; import org.springframework.web.servlet.ModelAndView; import org.webcurator.core.sites.MockSiteManagerImpl; import org.webcurator.core.sites.SiteManager; import org.webcurator.core.util.AuthUtil; import org.webcurator.domain.model.core.Annotation; import org.webcurator.domain.model.core.Site; import org.webcurator.test.BaseWCTTest; import org.webcurator.ui.site.SiteEditorContext; import org.webcurator.ui.site.command.SiteCommand; import org.webcurator.ui.site.validator.SiteValidator; import org.webcurator.ui.util.Tab; import org.webcurator.ui.util.TabConfig; import org.webcurator.ui.util.TabStatus; import org.webcurator.ui.util.TabbedController; public class SiteGeneralHandlerTest extends BaseWCTTest<SiteGeneralHandler>{ public SiteGeneralHandlerTest() { super(SiteGeneralHandler.class, "src/test/java/org/webcurator/ui/site/controller/SiteGeneralHandlerTest.xml"); } private SimpleDateFormat sdf = new SimpleDateFormat("dd-MMM-yyyy HH:mm:ss", Locale.UK); private final String[][] ANNOTATIONS = { {"4", "01-APR-2001 00:00:00"}, {"2", "01-FEB-2001 00:00:00"}, {"1", "01-JAN-2001 00:00:00"}, {"3", "01-MAR-2001 00:00:00"} }; private Annotation createAnnotation(String note, String date) throws ParseException { Annotation ann = new Annotation(); ann.setDate(sdf.parse(date)); ann.setNote(note); ann.setUser(AuthUtil.getRemoteUserObject()); return ann; } private List<Annotation> createAnnotationList() throws ParseException { List<Annotation> list = new ArrayList<Annotation>(); for(int i = 0; i < ANNOTATIONS.length; i++) { list.add(createAnnotation(ANNOTATIONS[i][0], ANNOTATIONS[i][1])); } return list; } private boolean checkSortedList(List<Annotation> list) throws ParseException { Date lastDate = sdf.parse("01-JAN-2070 00:00:00"); Annotation[] array = list.toArray(new Annotation[list.size()]); assertEquals(array.length, list.size()); for(int i = 0; i < array.length; i++) { if(array[i].getDate().after(lastDate)) { return false; } lastDate = array[i].getDate(); } return true; } private List<Tab> getTabList(SiteManager siteManager) { List<Tab> tabs = new ArrayList<Tab>(); Tab tabGeneral = new Tab(); tabGeneral.setCommandClass(SiteCommand.class); tabGeneral.setJsp("../site-general.jsp"); tabGeneral.setPageId("GENERAL"); tabGeneral.setValidator(new SiteValidator()); SiteGeneralHandler genHandler = new SiteGeneralHandler(); genHandler.setSiteManager(siteManager); tabGeneral.setTabHandler(genHandler); tabs.add(tabGeneral); return tabs; } @Test public final void testPreProcessNextTab() { try { HttpServletRequest aReq = new MockHttpServletRequest(); SiteManager siteManager = new MockSiteManagerImpl(testFile); testInstance.setSiteManager(siteManager); Site site = siteManager.getSite(9000L, true); List<Annotation> list = createAnnotationList(); assertFalse(checkSortedList(list)); site.setAnnotations(list); SiteEditorContext ctx = new SiteEditorContext(site); aReq.getSession().setAttribute(SiteController.EDITOR_CONTEXT, ctx); HttpServletResponse aResp = new MockHttpServletResponse(); SiteCommand aCmd = new SiteCommand(); TabbedController tc = new SiteController(); TabConfig tabConfig = new TabConfig(); tabConfig.setViewName("site"); List<Tab> tabs = getTabList(siteManager); tabConfig.setTabs(tabs); tc.setTabConfig(tabConfig); Tab currentTab = tabs.get(0); BindException aErrors = new BindException(aCmd, aCmd.getCmdAction()); ModelAndView mav = testInstance.preProcessNextTab(tc, currentTab, aReq, aResp, aCmd, aErrors); assertNotNull(mav); assertTrue(checkSortedList(site.getAnnotations())); } catch(Exception e) { fail(e.getMessage()); } } @Test public final void testProcessOther() { try { HttpServletRequest aReq = new MockHttpServletRequest(); SiteManager siteManager = new MockSiteManagerImpl(testFile); testInstance.setSiteManager(siteManager); Site site = siteManager.getSite(9000L, true); List<Annotation> list = createAnnotationList(); assertFalse(checkSortedList(list)); site.setAnnotations(list); SiteEditorContext ctx = new SiteEditorContext(site); aReq.getSession().setAttribute(SiteController.EDITOR_CONTEXT, ctx); HttpServletResponse aResp = new MockHttpServletResponse(); SiteCommand aCmd = new SiteCommand(); TabbedController tc = new SiteController(); TabConfig tabConfig = new TabConfig(); tabConfig.setViewName("site"); List<Tab> tabs = getTabList(siteManager); tabConfig.setTabs(tabs); tc.setTabConfig(tabConfig); Tab currentTab = tabs.get(0); aCmd.setCmdAction(SiteCommand.ACTION_ADD_NOTE); aCmd.setAnnotation("A note"); BindException aErrors = new BindException(aCmd, aCmd.getCmdAction()); int numAnnotations = site.getAnnotations().size(); ModelAndView mav = testInstance.processOther(tc, currentTab, aReq, aResp, aCmd, aErrors); assertTrue(mav != null); assertTrue(mav.getViewName().equals("site")); assertTrue(((TabStatus)mav.getModel().get("tabStatus")).getCurrentTab().getPageId().equals("GENERAL")); int listSize = site.getAnnotations().size(); assertTrue(listSize > 0); int noteIndex = 0; assertTrue(site.getAnnotations().size() == (numAnnotations+1)); assertTrue(site.getAnnotations().get(noteIndex).getNote().equals("A note")); assertTrue(checkSortedList(site.getAnnotations())); currentTab = tabs.get(0); aCmd.setCmdAction(SiteCommand.ACTION_MODIFY_NOTE); aCmd.setAnnotation("A new note"); aCmd.setAnnotationIndex(noteIndex); aErrors = new BindException(aCmd, aCmd.getCmdAction()); mav = testInstance.processOther(tc, currentTab, aReq, aResp, aCmd, aErrors); assertTrue(mav != null); assertTrue(mav.getViewName().equals("site")); assertTrue(((TabStatus)mav.getModel().get("tabStatus")).getCurrentTab().getPageId().equals("GENERAL")); listSize = site.getAnnotations().size(); assertTrue(listSize > 0); int newNoteIndex = 0; assertTrue(newNoteIndex == noteIndex); assertTrue(site.getAnnotations().size() == (numAnnotations+1)); assertFalse(site.getAnnotations().get(noteIndex).getNote().equals("A note")); assertTrue(site.getAnnotations().get(noteIndex).getNote().equals("A new note")); assertTrue(checkSortedList(site.getAnnotations())); currentTab = tabs.get(0); aCmd.setCmdAction(SiteCommand.ACTION_DELETE_NOTE); aCmd.setAnnotationIndex(noteIndex); aErrors = new BindException(aCmd, aCmd.getCmdAction()); mav = testInstance.processOther(tc, currentTab, aReq, aResp, aCmd, aErrors); assertTrue(mav != null); assertTrue(mav.getViewName().equals("site")); assertTrue(((TabStatus)mav.getModel().get("tabStatus")).getCurrentTab().getPageId().equals("GENERAL")); int newListSize = site.getAnnotations().size(); assertTrue(newListSize == (listSize-1)); assertTrue(site.getAnnotations().size() == numAnnotations); assertTrue(checkSortedList(site.getAnnotations())); } catch(Exception e) { fail(e.getMessage()); } } /** * This test ensures that the "processTab" method correctly adds an annotation. * This function is required to support the new prompt for saving the record when * the user clicks "add" for annotations. * Refer to https://sourceforge.net/p/webcurator/enhancements/84/ * @throws Exception */ @Test public final void testProcessTab() throws Exception { HttpServletRequest aReq = new MockHttpServletRequest(); SiteManager siteManager = new MockSiteManagerImpl(testFile); testInstance.setSiteManager(siteManager); Site site = siteManager.getSite(9000L, true); List<Annotation> list = createAnnotationList(); assertFalse(checkSortedList(list)); site.setAnnotations(list); SiteEditorContext ctx = new SiteEditorContext(site); aReq.getSession().setAttribute(SiteController.EDITOR_CONTEXT, ctx); HttpServletResponse aResp = new MockHttpServletResponse(); SiteCommand aCmd = new SiteCommand(); TabbedController tc = new SiteController(); TabConfig tabConfig = new TabConfig(); tabConfig.setViewName("site"); List<Tab> tabs = getTabList(siteManager); tabConfig.setTabs(tabs); tc.setTabConfig(tabConfig); Tab currentTab = tabs.get(0); aCmd.setCmdAction(SiteCommand.ACTION_ADD_NOTE); aCmd.setAnnotation("A note"); BindException aErrors = new BindException(aCmd, aCmd.getCmdAction()); List<Annotation> resultAnnotations = site.getAnnotations(); int numAnnotations = resultAnnotations.size(); testInstance.processTab(tc, currentTab, aReq, aResp, aCmd, aErrors); assertEquals(resultAnnotations.size(), numAnnotations+1); Annotation resultAnnotation = resultAnnotations.get(resultAnnotations.size()-1); assertEquals("A note", resultAnnotation.getNote()); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flume.channel.jdbc; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.Callable; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; import org.apache.flume.Context; import org.apache.flume.Event; import org.apache.flume.Transaction; import org.apache.flume.channel.jdbc.impl.JdbcChannelProviderImpl; import org.junit.After; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public abstract class BaseJdbcChannelProviderTest { private static final Logger LOGGER = LoggerFactory.getLogger(BaseJdbcChannelProviderTest.class); private Context derbyCtx = new Context(); private File derbyDbDir; private JdbcChannelProviderImpl provider; protected abstract void configureChannel(Context context); @Before public void setUp() throws IOException { derbyCtx.clear(); derbyCtx.put(ConfigurationConstants.CONFIG_CREATE_SCHEMA, "true"); derbyCtx.put(ConfigurationConstants.CONFIG_DATABASE_TYPE, "DERBY"); derbyCtx.put(ConfigurationConstants.CONFIG_JDBC_DRIVER_CLASS, "org.apache.derby.jdbc.EmbeddedDriver"); derbyCtx.put(ConfigurationConstants.CONFIG_PASSWORD, ""); derbyCtx.put(ConfigurationConstants.CONFIG_USERNAME, "sa"); File tmpDir = new File("target/test"); tmpDir.mkdirs(); File derbyLogFile = new File(tmpDir, "derbytest.log"); String derbyLogFilePath = derbyLogFile.getCanonicalPath(); derbyCtx.put(ConfigurationConstants.CONFIG_JDBC_SYSPROP_PREFIX + "derby.stream.error.file", derbyLogFilePath); // Use a temp file to create a temporary directory File tempFile = File.createTempFile("temp", "_db", tmpDir); String absFileName = tempFile.getCanonicalPath(); tempFile.delete(); derbyDbDir = new File(absFileName + "_dir"); if (!derbyDbDir.exists()) { derbyDbDir.mkdirs(); } derbyCtx.put(ConfigurationConstants.CONFIG_URL, "jdbc:derby:" + derbyDbDir.getCanonicalPath() + "/db;create=true"); configureChannel(derbyCtx); LOGGER.info("Derby Properties: " + derbyCtx); } @Test public void testDerbyChannelCapacity() { provider = new JdbcChannelProviderImpl(); derbyCtx.put(ConfigurationConstants.CONFIG_MAX_CAPACITY, "10"); provider.initialize(derbyCtx); Set<MockEvent> events = new HashSet<MockEvent>(); for (int i = 1; i < 12; i++) { events.add(MockEventUtils.generateMockEvent(i, i, i, 61%i, 1)); } Iterator<MockEvent> meIt = events.iterator(); int count = 0; while (meIt.hasNext()) { count++; MockEvent me = meIt.next(); String chName = me.getChannel(); try { provider.persistEvent(chName, me); if (count == 11) { Assert.fail(); } } catch (JdbcChannelException ex) { // This is expected if the count is 10 Assert.assertEquals(11, count); } // Now should be able to remove one event and add this one Event e = provider.removeEvent(chName); Assert.assertNotNull(e); // The current event should safely persist now provider.persistEvent(chName, me); } } @Test public void testDerbySetup() { provider = new JdbcChannelProviderImpl(); provider.initialize(derbyCtx); Transaction tx1 = provider.getTransaction(); tx1.begin(); Transaction tx2 = provider.getTransaction(); Assert.assertSame(tx1, tx2); tx2.begin(); tx2.close(); tx1.close(); Transaction tx3 = provider.getTransaction(); Assert.assertNotSame(tx1, tx3); tx3.begin(); tx3.close(); provider.close(); provider = null; } /** * Creates 120 events split over 10 channels, stores them via multiple * simulated sources and consumes them via multiple simulated channels. */ @Test public void testEventWithSimulatedSourceAndSinks() throws Exception { provider = new JdbcChannelProviderImpl(); provider.initialize(derbyCtx); Map<String, List<MockEvent>> eventMap = new HashMap<String, List<MockEvent>>(); for (int i = 1; i < 121; i++) { MockEvent me = MockEventUtils.generateMockEvent(i, i, i, 61%i, 10); List<MockEvent> meList = eventMap.get(me.getChannel()); if (meList == null) { meList = new ArrayList<MockEvent>(); eventMap.put(me.getChannel(), meList); } meList.add(me); } List<MockSource> sourceList = new ArrayList<MockSource>(); List<MockSink> sinkList = new ArrayList<MockSink>(); for (String channel : eventMap.keySet()) { List<MockEvent> meList = eventMap.get(channel); sourceList.add(new MockSource(channel, meList, provider)); sinkList.add(new MockSink(channel, meList, provider)); } ExecutorService sourceExecutor = Executors.newFixedThreadPool(10); ExecutorService sinkExecutor = Executors.newFixedThreadPool(10); List<Future<Integer>> srcResults = sourceExecutor.invokeAll(sourceList, 300, TimeUnit.SECONDS); Thread.sleep(MockEventUtils.generateSleepInterval(3000)); List<Future<Integer>> sinkResults = sinkExecutor.invokeAll(sinkList, 300, TimeUnit.SECONDS); int srcCount = 0; for (Future<Integer> srcOutput : srcResults) { srcCount += srcOutput.get(); } Assert.assertEquals(120, srcCount); int sinkCount = 0; for (Future<Integer> sinkOutput : sinkResults) { sinkCount += sinkOutput.get(); } Assert.assertEquals(120, sinkCount); } /** * creates 80 events split over 5 channels, stores them */ @Test public void testPeristingEvents() { provider = new JdbcChannelProviderImpl(); provider.initialize(derbyCtx); Map<String, List<MockEvent>> eventMap = new HashMap<String, List<MockEvent>>(); Set<MockEvent> events = new HashSet<MockEvent>(); for (int i = 1; i < 81; i++) { events.add(MockEventUtils.generateMockEvent(i, i, i, 61%i, 5)); } Iterator<MockEvent> meIt = events.iterator(); while (meIt.hasNext()) { MockEvent me = meIt.next(); String chName = me.getChannel(); List<MockEvent> eventList = eventMap.get(chName); if (eventList == null) { eventList = new ArrayList<MockEvent>(); eventMap.put(chName, eventList); } eventList.add(me); provider.persistEvent(me.getChannel(), me); } // Now retrieve the events and they should be in the persistence order for (String chName : eventMap.keySet()) { List<MockEvent> meList = eventMap.get(chName); Iterator<MockEvent> it = meList.iterator(); while (it.hasNext()) { MockEvent me = it.next(); Event event = provider.removeEvent(chName); assertEquals(me, event); } // Now the there should be no more events for this channel Event nullEvent = provider.removeEvent(chName); Assert.assertNull(nullEvent); } provider.close(); provider = null; } private static void assertEquals(Event e1, Event e2) { byte[] pl1 = e1.getBody(); byte[] pl2 = e2.getBody(); Assert.assertArrayEquals(pl1, pl2); Map<String, String> h1 = e1.getHeaders(); Map<String, String> h2 = e2.getHeaders(); if (h1 == null || h1.size() == 0) { Assert.assertTrue(h2 == null || h2.size() == 0); } else { Assert.assertTrue(h1.size() == h2.size()); for (String key : h1.keySet()) { Assert.assertTrue(h2.containsKey(key)); String v1 = h1.get(key); String v2 = h2.remove(key); Assert.assertEquals(v1, v2); } Assert.assertTrue(h2.size() == 0); } } @After public void tearDown() throws IOException { if (provider != null) { try { provider.close(); } catch (Exception ex) { LOGGER.error("Unable to close provider", ex); } } provider = null; } private static class MockSink implements Callable<Integer> { private final String channel; private final List<MockEvent> events; private final JdbcChannelProviderImpl provider; private MockSink(String channel, List<MockEvent> events, JdbcChannelProviderImpl provider) { this.channel = channel; this.events = events; this.provider = provider; } @Override public Integer call() throws Exception { LOGGER.debug("Sink for channel[" + channel + "]: starting"); if (events == null) { return 0; } Iterator<MockEvent> it = events.iterator(); while (it.hasNext()) { MockEvent me = it.next(); Event event = null; while (event == null) { event = provider.removeEvent(channel); if (event == null) { LOGGER.debug("Sink for channel[" + channel + "]: empty queue"); try { Thread.sleep(MockEventUtils.generateSleepInterval(1000)); } catch (InterruptedException ex) { Thread.currentThread().interrupt(); } } else { LOGGER.debug("Sink for channel[" + channel + "]: removed event: " + event); } } BaseJdbcChannelProviderTest.assertEquals(me, event); } LOGGER.debug("Sink for channel[" + channel + "]: retrieved all events"); return events.size(); } } private static class MockSource implements Callable<Integer> { private final String channel; private final List<MockEvent> events; private final JdbcChannelProviderImpl provider; private MockSource(String channel, List<MockEvent> events, JdbcChannelProviderImpl provider) { this.channel = channel; this.events = events; this.provider = provider; } @Override public Integer call() throws Exception { LOGGER.debug("Source for channel[" + channel + "]: starting"); if (events == null) { return 0; } Iterator<MockEvent> it = events.iterator(); while (it.hasNext()) { MockEvent me = it.next(); Assert.assertEquals(channel, me.getChannel()); provider.persistEvent(channel, me); try { Thread.sleep(MockEventUtils.generateSleepInterval(1000)); } catch (InterruptedException ex) { Thread.currentThread().interrupt(); } } LOGGER.debug("Source for channel[" + channel + "]: submitted all events"); return events.size(); } } }
/** *============================================================================ * Copyright The Ohio State University Research Foundation, The University of Chicago - * Argonne National Laboratory, Emory University, SemanticBits LLC, and * Ekagra Software Technologies Ltd. * * Distributed under the OSI-approved BSD 3-Clause License. * See http://ncip.github.com/cagrid-core/LICENSE.txt for details. *============================================================================ **/ package gov.nih.nci.cagrid.gts.service; import gov.nih.nci.cagrid.gts.bean.Permission; import gov.nih.nci.cagrid.gts.bean.PermissionFilter; import gov.nih.nci.cagrid.gts.bean.Role; import gov.nih.nci.cagrid.gts.common.Constants; import gov.nih.nci.cagrid.gts.common.Database; import gov.nih.nci.cagrid.gts.service.db.DBManager; import gov.nih.nci.cagrid.gts.service.db.PermissionsTable; import gov.nih.nci.cagrid.gts.stubs.types.GTSInternalFault; import gov.nih.nci.cagrid.gts.stubs.types.IllegalPermissionFault; import gov.nih.nci.cagrid.gts.stubs.types.InvalidPermissionFault; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.util.ArrayList; import java.util.List; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; /** * @author <A HREF="MAILTO:langella@bmi.osu.edu">Stephen Langella </A> * @author <A HREF="MAILTO:oster@bmi.osu.edu">Scott Oster </A> * @author <A HREF="MAILTO:hastings@bmi.osu.edu">Shannon Hastings </A> * @version $Id: TrustedAuthorityManager.java,v 1.1 2006/03/08 19:48:46 langella * Exp $ */ public class PermissionManager { private Log log; private boolean dbBuilt = false; private DBManager dbManager; private Database db; public PermissionManager(DBManager dbManager) { log = LogFactory.getLog(this.getClass().getName()); this.dbManager = dbManager; this.db = dbManager.getDatabase(); } public synchronized void addPermission(Permission p) throws GTSInternalFault, IllegalPermissionFault { // This method assumes that any Trusted Authorites associated with a // permission is valid this.buildDatabase(); if (p.getTrustedAuthorityName() == null) { p.setTrustedAuthorityName(Constants.ALL_TRUST_AUTHORITIES); } if (p.getGridIdentity() == null) { IllegalPermissionFault fault = new IllegalPermissionFault(); fault.setFaultString("The permission " + formatPermission(p) + " no grid identity specified."); throw fault; } if (p.getRole() == null) { IllegalPermissionFault fault = new IllegalPermissionFault(); fault.setFaultString("The permission " + formatPermission(p) + " no role specified."); throw fault; } if ((p.getTrustedAuthorityName().equals(Constants.ALL_TRUST_AUTHORITIES)) && (!p.getRole().equals(Role.TrustServiceAdmin))) { IllegalPermissionFault fault = new IllegalPermissionFault(); fault.setFaultString("The permission " + formatPermission(p) + " must specify a specific Trust Authority."); throw fault; } if ((!p.getTrustedAuthorityName().equals(Constants.ALL_TRUST_AUTHORITIES)) && (p.getRole().equals(Role.TrustServiceAdmin))) { IllegalPermissionFault fault = new IllegalPermissionFault(); fault.setFaultString("The permission " + formatPermission(p) + " cannot specify a specific Trust Authority."); throw fault; } if (this.doesPermissionExist(p)) { IllegalPermissionFault fault = new IllegalPermissionFault(); fault.setFaultString("The permission " + formatPermission(p) + " cannot be added, it already exists."); throw fault; } StringBuffer insert = new StringBuffer(); Connection c = null; try { insert.append("INSERT INTO " + PermissionsTable.TABLE_NAME + " SET " + PermissionsTable.GRID_IDENTITY + "= ?," + PermissionsTable.ROLE + "= ?," + PermissionsTable.TRUSTED_AUTHORITY + "= ?"); c = db.getConnection(); PreparedStatement s = c.prepareStatement(insert.toString()); s.setString(1, p.getGridIdentity()); s.setString(2, p.getRole().getValue()); s.setString(3, p.getTrustedAuthorityName()); s.execute(); s.close(); } catch (Exception e) { this.log.error("Unexpected database error incurred in adding the permission " + formatPermission(p) + ", the following statement generated the error: \n" + insert.toString() + "\n", e); GTSInternalFault fault = new GTSInternalFault(); fault.setFaultString("Unexpected error adding the permission " + formatPermission(p) + "!!!"); throw fault; } finally { db.releaseConnection(c); } } public synchronized void revokePermission(Permission p) throws GTSInternalFault, InvalidPermissionFault { buildDatabase(); if (!doesPermissionExist(p)) { InvalidPermissionFault fault = new InvalidPermissionFault(); fault.setFaultString("Could not revoke " + formatPermission(p) + ", the permission does not exist!!!"); throw fault; } String sql = "delete from " + PermissionsTable.TABLE_NAME + " where " + PermissionsTable.GRID_IDENTITY + "= ? AND " + PermissionsTable.ROLE + "= ? AND " + PermissionsTable.TRUSTED_AUTHORITY + "= ?"; Connection c = null; try { c = db.getConnection(); PreparedStatement s = c.prepareStatement(sql); s.setString(1, p.getGridIdentity()); s.setString(2, p.getRole().getValue()); s.setString(3, p.getTrustedAuthorityName()); s.execute(); s.close(); } catch (Exception e) { String perm = formatPermission(p); this.log.error("Unexpected database error incurred in removing the permission " + perm + " exists, the following statement generated the error: \n" + sql + "\n", e); GTSInternalFault fault = new GTSInternalFault(); fault.setFaultString("Unexpected error in removing the permission " + perm + " exists."); throw fault; } finally { db.releaseConnection(c); } } public synchronized void revokePermissions(String trustedAuthorityName) throws GTSInternalFault { buildDatabase(); String sql = "delete from " + PermissionsTable.TABLE_NAME + " where " + PermissionsTable.TRUSTED_AUTHORITY + "= ?"; Connection c = null; try { c = db.getConnection(); PreparedStatement s = c.prepareStatement(sql); s.setString(1, trustedAuthorityName); s.execute(); s.close(); } catch (Exception e) { this.log.error("Unexpected database error incurred in removing the permissions for the trusted authority " + trustedAuthorityName + ".", e); GTSInternalFault fault = new GTSInternalFault(); fault .setFaultString("Unexpected database error incurred in removing the permissions for the trusted authority " + trustedAuthorityName + "."); throw fault; } finally { db.releaseConnection(c); } } public synchronized boolean doesPermissionExist(Permission p) throws GTSInternalFault { String sql = "select count(*) from " + PermissionsTable.TABLE_NAME + " where " + PermissionsTable.GRID_IDENTITY + "= ? AND " + PermissionsTable.ROLE + "= ? AND " + PermissionsTable.TRUSTED_AUTHORITY + "= ?"; Connection c = null; boolean exists = false; try { c = db.getConnection(); PreparedStatement s = c.prepareStatement(sql); s.setString(1, p.getGridIdentity()); s.setString(2, p.getRole().getValue()); s.setString(3, p.getTrustedAuthorityName()); ResultSet rs = s.executeQuery(); if (rs.next()) { int count = rs.getInt(1); if (count > 0) { exists = true; } } rs.close(); s.close(); } catch (Exception e) { String perm = formatPermission(p); this.log.error("Unexpected database error incurred in determining if the permission " + perm + " exists, the following statement generated the error: \n" + sql + "\n", e); GTSInternalFault fault = new GTSInternalFault(); fault.setFaultString("Unexpected error in determining if the permission " + perm + " exists."); throw fault; } finally { db.releaseConnection(c); } return exists; } public boolean isUserTrustServiceAdmin(String gridIdentity) throws GTSInternalFault { this.buildDatabase(); Connection c = null; boolean isAdmin = false; StringBuffer sql = new StringBuffer(); sql.append("select count(*) from " + PermissionsTable.TABLE_NAME); sql.append(" WHERE " + PermissionsTable.GRID_IDENTITY + " = ? AND "); sql.append(PermissionsTable.ROLE + "='" + Role.TrustServiceAdmin + "' AND "); sql.append(PermissionsTable.TRUSTED_AUTHORITY + " = '" + Constants.ALL_TRUST_AUTHORITIES + "'"); try { c = db.getConnection(); PreparedStatement s = c.prepareStatement(sql.toString()); s.setString(1, gridIdentity); ResultSet rs = s.executeQuery(); if (rs.next()) { int count = rs.getInt(1); if (count > 0) { isAdmin = true; } } rs.close(); s.close(); } catch (Exception e) { this.log.error("Unexpected database error incurred in determining whether or not the user " + gridIdentity + " is a trust service administrator, the following statement generated the error: \n" + sql.toString() + "\n", e); GTSInternalFault fault = new GTSInternalFault(); fault.setFaultString("Unexpected error occurred in determining whether or not the user " + gridIdentity + " is a trust service administrator."); throw fault; } finally { db.releaseConnection(c); } return isAdmin; } public boolean isUserTrustedAuthorityAdmin(String authority, String gridIdentity) throws GTSInternalFault { this.buildDatabase(); Connection c = null; boolean isAdmin = false; StringBuffer sql = new StringBuffer(); sql.append("select count(*) from " + PermissionsTable.TABLE_NAME); sql.append(" WHERE " + PermissionsTable.GRID_IDENTITY + " = ?" + " AND "); sql.append(PermissionsTable.ROLE + "='" + Role.TrustAuthorityManager + "' AND "); sql.append(PermissionsTable.TRUSTED_AUTHORITY + " = '" + authority + "'"); try { c = db.getConnection(); PreparedStatement s = c.prepareStatement(sql.toString()); s.setString(1, gridIdentity); ResultSet rs = s.executeQuery(); if (rs.next()) { int count = rs.getInt(1); if (count > 0) { isAdmin = true; } } rs.close(); s.close(); } catch (Exception e) { this.log.error("Unexpected database error incurred in determining whether or not the user " + gridIdentity + " is a trust service administrator, the following statement generated the error: \n" + sql.toString() + "\n", e); GTSInternalFault fault = new GTSInternalFault(); fault.setFaultString("Unexpected error occurred in determining whether or not the user " + gridIdentity + " is a trust service administrator."); throw fault; } finally { db.releaseConnection(c); } return isAdmin; } public synchronized Permission[] findPermissions(PermissionFilter filter) throws GTSInternalFault { this.buildDatabase(); Connection c = null; List<Permission> permissions = new ArrayList<Permission>(); StringBuffer sql = new StringBuffer(); try { c = db.getConnection(); PreparedStatement s = null; if (filter != null) { s = c.prepareStatement("select * from " + PermissionsTable.TABLE_NAME + " WHERE " + PermissionsTable.GRID_IDENTITY + " LIKE ? AND " + PermissionsTable.ROLE + " LIKE ? AND " + PermissionsTable.TRUSTED_AUTHORITY + " LIKE ?"); if (filter.getGridIdentity() != null) { s.setString(1, "%" + filter.getGridIdentity() + "%"); } else { s.setString(1, "%"); } if (filter.getRole() != null) { s.setString(2, "%" + filter.getRole().getValue() + "%"); } else { s.setString(2, "%"); } if (filter.getTrustedAuthorityName() != null) { s.setString(3, "%" + filter.getTrustedAuthorityName() + "%"); } else { s.setString(3, "%"); } } else { s = c.prepareStatement("select * from " + PermissionsTable.TABLE_NAME); } ResultSet rs = s.executeQuery(); while (rs.next()) { Permission p = new Permission(); p.setGridIdentity(rs.getString(PermissionsTable.GRID_IDENTITY)); p.setRole(Role.fromValue(rs.getString(PermissionsTable.ROLE))); p.setTrustedAuthorityName(clean(rs.getString(PermissionsTable.TRUSTED_AUTHORITY))); permissions.add(p); } rs.close(); s.close(); Permission[] list = new Permission[permissions.size()]; for (int i = 0; i < list.length; i++) { list[i] = (Permission) permissions.get(i); } return list; } catch (Exception e) { this.log.error( "Unexpected database error incurred in finding permissions, the following statement generated the error: \n" + sql.toString() + "\n", e); GTSInternalFault fault = new GTSInternalFault(); fault.setFaultString("Unexpected error occurred in finding permissions."); throw fault; } finally { db.releaseConnection(c); } } private String clean(String s) { if ((s == null) || (s.trim().length() == 0)) { return null; } else { return s; } } private String formatPermission(Permission p) { String role = null; if (p.getRole() != null) { role = p.getRole().getValue(); } return "[" + p.getGridIdentity() + "," + role + "," + p.getTrustedAuthorityName() + "]"; } public synchronized void buildDatabase() throws GTSInternalFault { if (!dbBuilt) { try { db.createDatabase(); if (!this.db.tableExists(PermissionsTable.TABLE_NAME)) { String sql = this.dbManager.getPermissionsTable().getCreateTableSQL(); db.update(sql); } dbBuilt = true; } catch (Exception e) { this.log.error("Unexpected error in creating the database.", e); GTSInternalFault fault = new GTSInternalFault(); fault.setFaultString("Unexpected error in creating the database."); throw fault; } } } public synchronized void clearDatabase() throws GTSInternalFault { try { buildDatabase(); db.update("delete FROM " + PermissionsTable.TABLE_NAME); } catch (Exception e) { this.log.error("Unexpected error in removing the database.", e); GTSInternalFault fault = new GTSInternalFault(); fault.setFaultString("Unexpected error in removing the database."); throw fault; } } }
/* * Copyright 2018 Red Hat, Inc. and/or its affiliates * and other contributors as indicated by the @author tags. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.keycloak.testsuite.arquillian.undertow; import io.undertow.Undertow; import io.undertow.server.handlers.PathHandler; import io.undertow.servlet.api.DeploymentInfo; import io.undertow.servlet.api.DeploymentManager; import io.undertow.servlet.api.ServletContainer; import io.undertow.servlet.api.ServletInfo; import java.io.File; import java.io.IOException; import java.lang.reflect.Field; import java.nio.charset.Charset; import java.util.Collection; import java.util.Map; import java.util.Optional; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.stream.Collectors; import javax.servlet.ServletException; import javax.ws.rs.Path; import javax.ws.rs.core.Application; import org.apache.commons.io.FileUtils; import org.apache.commons.io.IOUtils; import org.arquillian.undertow.UndertowContainerConfiguration; import org.jboss.arquillian.container.spi.client.container.DeployableContainer; import org.jboss.arquillian.container.spi.client.container.DeploymentException; import org.jboss.arquillian.container.spi.client.container.LifecycleException; import org.jboss.arquillian.container.spi.client.protocol.ProtocolDescription; import org.jboss.arquillian.container.spi.client.protocol.metadata.HTTPContext; import org.jboss.arquillian.container.spi.client.protocol.metadata.ProtocolMetaData; import org.jboss.arquillian.container.spi.client.protocol.metadata.Servlet; import org.jboss.logging.Logger; import org.jboss.resteasy.plugins.server.undertow.UndertowJaxrsServer; import org.jboss.resteasy.spi.ResteasyDeployment; import org.jboss.shrinkwrap.api.Archive; import org.jboss.shrinkwrap.api.Node; import org.jboss.shrinkwrap.api.asset.ClassAsset; import org.jboss.shrinkwrap.api.spec.WebArchive; import org.jboss.shrinkwrap.descriptor.api.Descriptor; import org.jboss.shrinkwrap.undertow.api.UndertowWebArchive; import org.keycloak.common.util.reflections.Reflections; import org.keycloak.testsuite.arquillian.undertow.saml.util.RestSamlApplicationConfig; import org.keycloak.testsuite.utils.undertow.UndertowDeployerHelper; import org.keycloak.testsuite.utils.undertow.UndertowWarClassLoader; import java.io.InputStream; /** * @author <a href="mailto:vramik@redhat.com">Vlasta Ramik</a> */ public class UndertowAppServer implements DeployableContainer<UndertowAppServerConfiguration> { private static final Logger log = Logger.getLogger(UndertowAppServer.class); private UndertowContainerConfiguration configuration; private UndertowJaxrsServer undertow; Map<String, String> deployedArchivesToContextPath = new ConcurrentHashMap<>(); @Override public Class<UndertowAppServerConfiguration> getConfigurationClass() { return UndertowAppServerConfiguration.class; } @Override public void setup(UndertowAppServerConfiguration configuration) { this.configuration = configuration; } @Override public void start() throws LifecycleException { long start = System.currentTimeMillis(); undertow = new UndertowJaxrsServer(); undertow.start(Undertow.builder() .addHttpListener(configuration.getBindHttpPort(), configuration.getBindAddress())); log.infof("App server started in %dms on http://%s:%d/", (System.currentTimeMillis() - start), configuration.getBindAddress(), configuration.getBindHttpPort()); } @Override public void stop() throws LifecycleException { undertow.stop(); log.info("App Server stopped."); } @Override public ProtocolDescription getDefaultProtocol() { return new ProtocolDescription("Servlet 3.1"); } @Override public ProtocolMetaData deploy(Archive<?> archive) throws DeploymentException { log.info("Deploying archive " + archive.getName()); // Remove jsps String ioTMPDir = System.getProperty("java.io.tmpdir", ""); // My Intellij and Terminal stores tmp directory in this property if (!ioTMPDir.isEmpty()) { ioTMPDir = ioTMPDir.endsWith("/") ? ioTMPDir : ioTMPDir + "/"; File tmpUndertowJSPDirectory = new File(ioTMPDir + "org/apache/jsp"); if (tmpUndertowJSPDirectory.exists()) { try { FileUtils.deleteDirectory(tmpUndertowJSPDirectory); } catch (IOException e) { e.printStackTrace(); } } } DeploymentInfo di; if (archive instanceof UndertowWebArchive) { di = ((UndertowWebArchive) archive).getDeploymentInfo(); } else if (archive instanceof WebArchive) { WebArchive webArchive = (WebArchive)archive; Optional<Node> applicationClassNode = archive.getContent(archivePath -> archivePath.get().startsWith("/WEB-INF/classes/") && archivePath.get().endsWith("Application.class")) .values().stream().findFirst(); if (isJaxrsApp(webArchive)) { di = new UndertowDeployerHelper().getDeploymentInfo(configuration, webArchive, undertow.undertowDeployment(discoverPathAnnotatedClasses(webArchive))); } else if (applicationClassNode.isPresent()) { String applicationPath = applicationClassNode.get().getPath().get(); ResteasyDeployment deployment = new ResteasyDeployment(); deployment.setApplicationClass(extractClassName(applicationPath)); di = new UndertowDeployerHelper().getDeploymentInfo(configuration, (WebArchive) archive, undertow.undertowDeployment(deployment)); } else { di = new UndertowDeployerHelper().getDeploymentInfo(configuration, webArchive); } } else { throw new IllegalArgumentException("UndertowContainer only supports UndertowWebArchive or WebArchive."); } if ("ROOT.war".equals(archive.getName())) { di.setContextPath("/"); } ClassLoader parentCl = Thread.currentThread().getContextClassLoader(); UndertowWarClassLoader classLoader = new UndertowWarClassLoader(parentCl, archive); Thread.currentThread().setContextClassLoader(classLoader); try { undertow.deploy(di); } finally { Thread.currentThread().setContextClassLoader(parentCl); } deployedArchivesToContextPath.put(archive.getName(), di.getContextPath()); return new ProtocolMetaData().addContext( createHttpContextForDeploymentInfo(di)); } private String extractClassName(String applicationPath) { applicationPath = applicationPath .substring(0, applicationPath.lastIndexOf(".class")) // Remove .class .replaceFirst("^/WEB-INF/classes/", ""); // Remove /WEB-INF/classes/ from beginning return applicationPath.replaceAll("/", "."); } @Override public void undeploy(Archive<?> archive) throws DeploymentException { log.info("Undeploying archive " + archive.getName()); Field containerField = Reflections.findDeclaredField(UndertowJaxrsServer.class, "container"); Reflections.setAccessible(containerField); ServletContainer container = (ServletContainer) Reflections.getFieldValue(containerField, undertow); DeploymentManager deploymentMgr = container.getDeployment(archive.getName()); if (deploymentMgr != null) { DeploymentInfo deployment = deploymentMgr.getDeployment().getDeploymentInfo(); try { deploymentMgr.stop(); } catch (ServletException se) { throw new DeploymentException(se.getMessage(), se); } deploymentMgr.undeploy(); Field rootField = Reflections.findDeclaredField(UndertowJaxrsServer.class, "root"); Reflections.setAccessible(rootField); PathHandler root = (PathHandler) Reflections.getFieldValue(rootField, undertow); String path = deployedArchivesToContextPath.get(archive.getName()); root.removePrefixPath(path); container.removeDeployment(deployment); } else { log.warnf("Deployment '%s' not found", archive.getName()); } } @Override public void deploy(Descriptor descriptor) throws DeploymentException { throw new UnsupportedOperationException("Not implemented"); } @Override public void undeploy(Descriptor descriptor) throws DeploymentException { throw new UnsupportedOperationException("Not implemented"); } private HTTPContext createHttpContextForDeploymentInfo(DeploymentInfo deploymentInfo) { HTTPContext httpContext = new HTTPContext(configuration.getBindAddress(), configuration.getBindHttpPort()); final Map<String, ServletInfo> servlets = deploymentInfo.getServlets(); final Collection<ServletInfo> servletsInfo = servlets.values(); for (ServletInfo servletInfo : servletsInfo) { httpContext.add(new Servlet(servletInfo.getName(), deploymentInfo.getContextPath())); } return httpContext; } private boolean isJaxrsApp(WebArchive archive) throws DeploymentException { if (! archive.contains("/WEB-INF/web.xml")) { return false; } try (InputStream stream = archive.get("/WEB-INF/web.xml").getAsset().openStream()) { return IOUtils.toString(stream, Charset.forName("UTF-8")) .contains(Application.class.getName()); } catch (IOException e) { throw new DeploymentException("Unable to read archive.", e); } } private ResteasyDeployment discoverPathAnnotatedClasses(WebArchive webArchive) { //take all classes from war and add those with @Path annotation to RestSamlApplicationConfig Set<Class<?>> classes = webArchive.getContent(archivePath -> archivePath.get().startsWith("/WEB-INF/classes/") && archivePath.get().endsWith(".class") ).values().stream() .filter(node -> node.getAsset() instanceof ClassAsset) .map(node -> ((ClassAsset)node.getAsset()).getSource()) .filter(clazz -> clazz.isAnnotationPresent(Path.class)) .collect(Collectors.toSet()); ResteasyDeployment deployment = new ResteasyDeployment(); deployment.setApplication(new RestSamlApplicationConfig(classes)); return deployment; } }
/* * Copyright 2012 Oracle Corporation. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.fatwire.gst.foundation.wra.navigation.support; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import org.apache.commons.lang3.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import COM.FutureTense.Interfaces.ICS; import com.fatwire.assetapi.query.Query; import com.fatwire.cs.core.db.PreparedStmt; import com.fatwire.cs.core.db.StatementParam; import com.fatwire.gst.foundation.facade.assetapi.asset.TemplateAsset; import com.fatwire.gst.foundation.facade.assetapi.asset.TemplateAssetAccess; import com.fatwire.gst.foundation.facade.sql.Row; import com.fatwire.gst.foundation.facade.sql.SqlHelper; import com.fatwire.gst.foundation.wra.navigation.NavigationNode; import com.fatwire.gst.foundation.wra.navigation.NavigationService; /** * * @deprecated as of release 12.x, will be replaced with a brand new, significantly improved NavigationService implementation (coming soon) * */ public abstract class AbstractNavigationService implements NavigationService { private static final Logger LOG = LoggerFactory.getLogger("com.fatwire.gst.foundation.wra.navigation.support.AbstractNavigationService"); private static final String NODE_SQL = "SELECT nid,oid,otype FROM SitePlanTree WHERE otype='Publication' AND exists (SELECT 1 FROM Publication WHERE name=? AND id=SitePlanTree.oid)"; private static final PreparedStmt NODE_STMT = new PreparedStmt(NODE_SQL, Arrays.asList("SitePlanTree", "Publication")); private static final String NAME_SQL = "SELECT nid,oid,otype FROM SitePlanTree WHERE EXISTS( SELECT 1 FROM Page p ,AssetPublication ap , Publication pub WHERE p.name=? AND pub.name=? AND ap.assetid=p.id AND pub.id = ap.pubid AND SitePlanTree.oid = p.id) AND ncode='Placed' ORDER BY nrank"; private static final PreparedStmt NAME_STMT = new PreparedStmt(NAME_SQL, Arrays.asList("SitePlanTree", "Page", "AssetPublication", "Publication")); private static final String ID_SQL = "SELECT nid,oid,otype FROM SitePlanTree WHERE SitePlanTree.oid = ? AND ncode='Placed' ORDER BY nrank"; private static final PreparedStmt ID_STMT = new PreparedStmt(ID_SQL, Arrays.asList("SitePlanTree")); static { NODE_STMT.setElement(0, "Publication", "name"); NAME_STMT.setElement(0, "Page", "name"); NAME_STMT.setElement(1, "Publication", "name"); ID_STMT.setElement(0, "SitePlanTree", "oid"); } protected final ICS ics; protected final TemplateAssetAccess assetTemplate; protected String linkLabelAttribute = "linktext"; protected String pathAttribute = "path"; protected AbstractNavigationService(ICS ics) { this(ics, new TemplateAssetAccess(ics)); } protected AbstractNavigationService(ICS ics, TemplateAssetAccess assetTemplate) { super(); this.ics = ics; this.assetTemplate = assetTemplate; } /** * Constructor that sets the linkLabel and path attributes. * * @param ics Content Server context object * @param assetTemplate template asset access * @param linkLabelAttribute link label attribute string * @param pathAttribute path attribute string */ protected AbstractNavigationService(ICS ics, TemplateAssetAccess assetTemplate, String linkLabelAttribute, String pathAttribute) { this.ics = ics; this.assetTemplate = assetTemplate; if (StringUtils.isBlank(linkLabelAttribute)) throw new IllegalArgumentException("linkLabelAttribute cannot be blank"); if (StringUtils.isBlank(pathAttribute)) throw new IllegalArgumentException("pathAttribute cannot be blank"); this.pathAttribute = pathAttribute; this.linkLabelAttribute = linkLabelAttribute; } /** * @param site site to run process over * @return the root SitePlanTree nodes for this site */ public Collection<NavigationNode> getRootNodesForSite(String site) { return getRootNodesForSite(site, -1); } @Override public Collection<NavigationNode> getRootNodesForSite(int depth) { return getRootNodesForSite(ics.GetVar("site")); } @Override public Collection<NavigationNode> getRootNodesForSite(String site, int depth) { return getRootNodesForSite(site, depth, linkLabelAttribute); } @Override public NavigationNode getNodeByName(String site, String pagename, int depth) { return getNodeByName(site, pagename, depth, this.linkLabelAttribute); } @Override public NavigationNode getNodeByName(String pagename, int depth, String linkAttribute) { return getNodeByName(ics.GetVar("site"), pagename, depth, this.linkLabelAttribute); } @Override public NavigationNode getNodeByName(String pagename, int depth) { return getNodeByName(ics.GetVar("site"), pagename, depth); } /** * @param site site to run process over * @param depth depth to return * @param linkAttribute link attribute * @return collection of navigation nodes */ @Override public Collection<NavigationNode> getRootNodesForSite(String site, int depth, String linkAttribute) { if (StringUtils.isBlank(site)) throw new IllegalArgumentException("site cannot be blank"); if (StringUtils.isBlank(linkAttribute)) throw new IllegalArgumentException("linkAttribute cannot be blank"); StatementParam param = NODE_STMT.newParam(); param.setString(0, site); Row root = SqlHelper.selectSingle(ics, NODE_STMT, param); if (root != null) { Long nid = root.getLong("nid"); return getNodeChildren(nid, 0, depth, linkAttribute); } else { LOG.debug("No root SitePlanTree nodes found for site " + site); } return Collections.emptyList(); } @Override public NavigationNode getNodeByName(String site, String pagename, int depth, String linkAttribute) { if (StringUtils.isBlank(site)) throw new IllegalArgumentException("site cannot be blank"); if (StringUtils.isBlank(pagename)) throw new IllegalArgumentException("pagename cannot be blank"); if (StringUtils.isBlank(linkAttribute)) throw new IllegalArgumentException("linkAttribute cannot be blank"); StatementParam param = NAME_STMT.newParam(); param.setString(0, pagename); param.setString(1, site); Row root = SqlHelper.selectSingle(ics, NAME_STMT, param); if (root != null) { final NavigationNode node = getNode(root, 0, depth, linkAttribute); return node; } else { LOG.debug("No SitePlanTree nodes found for Page " + pagename + " in site " + site); } return null; } @Override public NavigationNode getNodeByQuery(Query query, int depth, String linkAttribute) { Iterable<TemplateAsset> assets = assetTemplate.query(query); TemplateAsset asset; if (assets != null) asset = assets.iterator().next(); else return null; StatementParam param = ID_STMT.newParam(); param.setLong(0, asset.getAssetId().getId()); Row root = SqlHelper.selectSingle(ics, ID_STMT, param); if (root != null) { final NavigationNode node = getNode(root, 0, depth, linkAttribute); return node; } else { LOG.debug("No SitePlanTree nodes found for Query " + query.toString()); } return null; } /** * List all the child NavigationNode at this SitePlanTree nodeId. * * @param nodeId the nodeId from the SitePlanTree * @param level the tree level depth * @param depth the maximum depth * @param linkAttribute the attribute to use for the link text * @return collection of navigation nodes */ protected abstract Collection<NavigationNode> getNodeChildren(long nodeId, int level, int depth, String linkAttribute); /** * @param row the resultlist containing nid/oid/otype * @param level the current level * @param depth the maximum depth * @param linkAttribute the attribute for the link text. * @return the NavigationNode */ protected abstract NavigationNode getNode(Row row, int level, int depth, String linkAttribute); /** * @return the linkLabelAttribute */ public String getLinkLabelAttribute() { return linkLabelAttribute; } /** * @param linkLabelAttribute string value of link label attribute */ public void setLinkLabelAttribute(String linkLabelAttribute) { this.linkLabelAttribute = linkLabelAttribute; } /** * @return the pathAttribute */ public String getPathAttribute() { return pathAttribute; } /** * @param pathAttribute path attribute string */ public void setPathAttribute(String pathAttribute) { this.pathAttribute = pathAttribute; } }
// Copyright (C) 2014 The Android Open Source Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.gerrit.server.notedb; import static com.google.inject.Scopes.SINGLETON; import static java.util.concurrent.TimeUnit.SECONDS; import com.google.common.collect.ImmutableList; import com.google.gerrit.common.TimeUtil; import com.google.gerrit.common.data.SubmitRecord; import com.google.gerrit.extensions.config.FactoryModule; import com.google.gerrit.metrics.DisabledMetricMaker; import com.google.gerrit.metrics.MetricMaker; import com.google.gerrit.reviewdb.client.Account; import com.google.gerrit.reviewdb.client.Change; import com.google.gerrit.reviewdb.client.Comment; import com.google.gerrit.reviewdb.client.CommentRange; import com.google.gerrit.reviewdb.client.PatchSet; import com.google.gerrit.reviewdb.client.Project; import com.google.gerrit.reviewdb.server.ReviewDb; import com.google.gerrit.server.CurrentUser; import com.google.gerrit.server.GerritPersonIdent; import com.google.gerrit.server.IdentifiedUser; import com.google.gerrit.server.InternalUser; import com.google.gerrit.server.account.AccountCache; import com.google.gerrit.server.account.FakeRealm; import com.google.gerrit.server.account.GroupBackend; import com.google.gerrit.server.account.Realm; import com.google.gerrit.server.config.AllUsersName; import com.google.gerrit.server.config.AllUsersNameProvider; import com.google.gerrit.server.config.AnonymousCowardName; import com.google.gerrit.server.config.AnonymousCowardNameProvider; import com.google.gerrit.server.config.CanonicalWebUrl; import com.google.gerrit.server.config.DisableReverseDnsLookup; import com.google.gerrit.server.config.GerritServerConfig; import com.google.gerrit.server.config.GerritServerId; import com.google.gerrit.server.extensions.events.GitReferenceUpdated; import com.google.gerrit.server.git.GitModule; import com.google.gerrit.server.git.GitRepositoryManager; import com.google.gerrit.server.group.SystemGroupBackend; import com.google.gerrit.server.project.ProjectCache; import com.google.gerrit.testing.ConfigSuite; import com.google.gerrit.testing.FakeAccountCache; import com.google.gerrit.testing.GerritBaseTests; import com.google.gerrit.testing.InMemoryRepositoryManager; import com.google.gerrit.testing.TestChanges; import com.google.gerrit.testing.TestTimeUtil; import com.google.gwtorm.server.OrmException; import com.google.gwtorm.server.SchemaFactory; import com.google.inject.Guice; import com.google.inject.Inject; import com.google.inject.Injector; import com.google.inject.TypeLiteral; import com.google.inject.util.Providers; import java.sql.Timestamp; import java.util.TimeZone; import org.eclipse.jgit.internal.storage.dfs.InMemoryRepository; import org.eclipse.jgit.junit.TestRepository; import org.eclipse.jgit.lib.Config; import org.eclipse.jgit.lib.PersonIdent; import org.eclipse.jgit.revwalk.RevWalk; import org.junit.After; import org.junit.Before; import org.junit.Ignore; import org.junit.runner.RunWith; @Ignore @RunWith(ConfigSuite.class) public abstract class AbstractChangeNotesTest extends GerritBaseTests { @ConfigSuite.Default public static Config changeNotesLegacy() { Config cfg = new Config(); cfg.setBoolean("notedb", null, "writeJson", false); return cfg; } @ConfigSuite.Config public static Config changeNotesJson() { Config cfg = new Config(); cfg.setBoolean("notedb", null, "writeJson", true); return cfg; } @ConfigSuite.Parameter public Config testConfig; private static final TimeZone TZ = TimeZone.getTimeZone("America/Los_Angeles"); protected Account.Id otherUserId; protected FakeAccountCache accountCache; protected IdentifiedUser changeOwner; protected IdentifiedUser otherUser; protected InMemoryRepository repo; protected InMemoryRepositoryManager repoManager; protected PersonIdent serverIdent; protected InternalUser internalUser; protected Project.NameKey project; protected RevWalk rw; protected TestRepository<InMemoryRepository> tr; @Inject protected IdentifiedUser.GenericFactory userFactory; @Inject protected NoteDbUpdateManager.Factory updateManagerFactory; @Inject protected AllUsersName allUsers; @Inject protected AbstractChangeNotes.Args args; @Inject @GerritServerId private String serverId; protected Injector injector; private String systemTimeZone; @Before public void setUp() throws Exception { setTimeForTesting(); serverIdent = new PersonIdent("Gerrit Server", "noreply@gerrit.com", TimeUtil.nowTs(), TZ); project = new Project.NameKey("test-project"); repoManager = new InMemoryRepositoryManager(); repo = repoManager.createRepository(project); tr = new TestRepository<>(repo); rw = tr.getRevWalk(); accountCache = new FakeAccountCache(); Account co = new Account(new Account.Id(1), TimeUtil.nowTs()); co.setFullName("Change Owner"); co.setPreferredEmail("change@owner.com"); accountCache.put(co); Account ou = new Account(new Account.Id(2), TimeUtil.nowTs()); ou.setFullName("Other Account"); ou.setPreferredEmail("other@account.com"); accountCache.put(ou); injector = Guice.createInjector( new FactoryModule() { @Override public void configure() { install(new GitModule()); install(NoteDbModule.forTest(testConfig)); bind(AllUsersName.class).toProvider(AllUsersNameProvider.class); bind(String.class).annotatedWith(GerritServerId.class).toInstance("gerrit"); bind(GitRepositoryManager.class).toInstance(repoManager); bind(ProjectCache.class).toProvider(Providers.<ProjectCache>of(null)); bind(Config.class).annotatedWith(GerritServerConfig.class).toInstance(testConfig); bind(String.class) .annotatedWith(AnonymousCowardName.class) .toProvider(AnonymousCowardNameProvider.class); bind(String.class) .annotatedWith(CanonicalWebUrl.class) .toInstance("http://localhost:8080/"); bind(Boolean.class) .annotatedWith(DisableReverseDnsLookup.class) .toInstance(Boolean.FALSE); bind(Realm.class).to(FakeRealm.class); bind(GroupBackend.class).to(SystemGroupBackend.class).in(SINGLETON); bind(AccountCache.class).toInstance(accountCache); bind(PersonIdent.class) .annotatedWith(GerritPersonIdent.class) .toInstance(serverIdent); bind(GitReferenceUpdated.class).toInstance(GitReferenceUpdated.DISABLED); bind(MetricMaker.class).to(DisabledMetricMaker.class); bind(ReviewDb.class).toProvider(Providers.<ReviewDb>of(null)); MutableNotesMigration migration = MutableNotesMigration.newDisabled(); migration.setFrom(NotesMigrationState.FINAL); bind(MutableNotesMigration.class).toInstance(migration); bind(NotesMigration.class).to(MutableNotesMigration.class); // Tests don't support ReviewDb at all, but bindings are required via NoteDbModule. bind(new TypeLiteral<SchemaFactory<ReviewDb>>() {}) .toInstance( () -> { throw new UnsupportedOperationException(); }); bind(ChangeBundleReader.class) .toInstance( (db, id) -> { throw new UnsupportedOperationException(); }); } }); injector.injectMembers(this); repoManager.createRepository(allUsers); changeOwner = userFactory.create(co.getId()); otherUser = userFactory.create(ou.getId()); otherUserId = otherUser.getAccountId(); internalUser = new InternalUser(); } private void setTimeForTesting() { systemTimeZone = System.setProperty("user.timezone", "US/Eastern"); TestTimeUtil.resetWithClockStep(1, SECONDS); } @After public void resetTime() { TestTimeUtil.useSystemTime(); System.setProperty("user.timezone", systemTimeZone); } protected Change newChange(boolean workInProgress) throws Exception { Change c = TestChanges.newChange(project, changeOwner.getAccountId()); ChangeUpdate u = newUpdate(c, changeOwner); u.setChangeId(c.getKey().get()); u.setBranch(c.getDest().get()); u.setWorkInProgress(workInProgress); u.commit(); return c; } protected Change newWorkInProgressChange() throws Exception { return newChange(true); } protected Change newChange() throws Exception { return newChange(false); } protected ChangeUpdate newUpdate(Change c, CurrentUser user) throws Exception { ChangeUpdate update = TestChanges.newUpdate(injector, c, user); update.setPatchSetId(c.currentPatchSetId()); update.setAllowWriteToNewRef(true); return update; } protected ChangeNotes newNotes(Change c) throws OrmException { return new ChangeNotes(args, c).load(); } protected static SubmitRecord submitRecord( String status, String errorMessage, SubmitRecord.Label... labels) { SubmitRecord rec = new SubmitRecord(); rec.status = SubmitRecord.Status.valueOf(status); rec.errorMessage = errorMessage; if (labels.length > 0) { rec.labels = ImmutableList.copyOf(labels); } return rec; } protected static SubmitRecord.Label submitLabel( String name, String status, Account.Id appliedBy) { SubmitRecord.Label label = new SubmitRecord.Label(); label.label = name; label.status = SubmitRecord.Label.Status.valueOf(status); label.appliedBy = appliedBy; return label; } protected Comment newComment( PatchSet.Id psId, String filename, String UUID, CommentRange range, int line, IdentifiedUser commenter, String parentUUID, Timestamp t, String message, short side, String commitSHA1, boolean unresolved) { Comment c = new Comment( new Comment.Key(UUID, filename, psId.get()), commenter.getAccountId(), t, side, message, serverId, unresolved); c.lineNbr = line; c.parentUuid = parentUUID; c.revId = commitSHA1; c.setRange(range); return c; } protected static Timestamp truncate(Timestamp ts) { return new Timestamp((ts.getTime() / 1000) * 1000); } protected static Timestamp after(Change c, long millis) { return new Timestamp(c.getCreatedOn().getTime() + millis); } }
package utilities.json; import java.lang.reflect.Constructor; import java.lang.reflect.Field; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Modifier; import java.lang.reflect.ParameterizedType; import java.lang.reflect.Type; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.logging.Level; import java.util.logging.Logger; import argo.jdom.JsonNode; import argo.jdom.JsonNodeFactories; import argo.jdom.JsonStringNode; import utilities.StringUtilities; /** * Automatically turns a JSON into a Java object and back using reflection. * Only consider private fields. Inherited fields are not considered. */ public class Jsonizer { private static final Logger LOGGER = Logger.getLogger(Jsonizer.class.getName()); private Jsonizer() {} public static boolean parse(JsonNode node, IJsonable dest) { try { return internalParse(node, dest); } catch (NoSuchFieldException | SecurityException | IllegalArgumentException | IllegalAccessException | InstantiationException | InvocationTargetException e) { LOGGER.log(Level.WARNING, "Unable to parse JSON into object.", e); return false; } } private static boolean internalParse(JsonNode node, Object dest) throws NoSuchFieldException, SecurityException, IllegalArgumentException, IllegalAccessException, InstantiationException, InvocationTargetException { Class<?> clazz = dest.getClass(); if (isMapType(clazz)) { throw new IllegalArgumentException("Cannot parse the following JSON node as map since type information is not available.\n" + JSONUtility.jsonToString(node)); } for (Entry<JsonStringNode, JsonNode> inner : node.getFields().entrySet()) { JsonStringNode nameNode = inner.getKey(); JsonNode valueNode = inner.getValue(); String fieldName = StringUtilities.toCamelCase(nameNode.getStringValue()); Field field = clazz.getDeclaredField(fieldName); int modifier = field.getModifiers(); if (Modifier.isStatic(modifier) || !Modifier.isPrivate(modifier)) { LOGGER.warning("Skipping field " + fieldName + " when parsing JSON. Field is either static or non-private."); continue; } field.setAccessible(true); if (isPrimitiveOrString(field.getType())) { field.set(dest, toPrimitiveOrString(valueNode, field.getType())); continue; } if (isIterableType(field.getType())) { field.set(dest, parseIterableField(valueNode, field)); continue; } Object o = getDefaultConstructor(field.getType()).newInstance(); if (!internalParse(valueNode, o)) { return false; } field.set(dest, o); } return true; } private static List<Object> parseIterableField(JsonNode valueNode, Field field) throws IllegalArgumentException, IllegalAccessException, InstantiationException, InvocationTargetException, NoSuchFieldException, SecurityException { if (!valueNode.isArrayNode()) { throw new IllegalArgumentException("Expecting node to be array but is type " + valueNode.getType() + ". " + JSONUtility.jsonToString(valueNode)); } List<JsonNode> valueNodes = valueNode.getArrayNode(); ParameterizedType genericType = (ParameterizedType) field.getGenericType(); Type[] iterableTypes = genericType.getActualTypeArguments(); if (iterableTypes.length != 1) { throw new IllegalArgumentException("Expecting one type arguments for iterable attribute but found " + iterableTypes.length); } Class<?> clazz = (Class<?>) iterableTypes[0]; if (isPrimitiveOrString(clazz)) { List<Object> output = new ArrayList<>(); for (JsonNode n : valueNodes) { output.add(toPrimitiveOrString(n, clazz)); } return output; } Constructor<?> constructor = getDefaultConstructor(clazz); List<Object> output = new ArrayList<>(); for (JsonNode n : valueNodes) { Object o = constructor.newInstance(); if (!internalParse(n, o)) { throw new IllegalArgumentException("Unable to parse internal node."); } output.add(o); } return output; } /** * Retrieves the constructor with zero parameter and set it to be accessible. */ private static Constructor<?> getDefaultConstructor(Class<?> clazz) { for (Constructor<?> constructor : clazz.getDeclaredConstructors()) { if (constructor.getParameterCount() != 0) { continue; } constructor.setAccessible(true); return constructor; } throw new IllegalArgumentException("No constructor with zero parameter found for " + clazz.getName()); } public static JsonNode jsonize(Object o) { try { return internalJsonize(o); } catch (IllegalArgumentException | IllegalAccessException e) { LOGGER.log(Level.WARNING,"Failed to jsonize object " + o.getClass(), e); return null; } } private static JsonNode internalJsonize(Object o) throws IllegalArgumentException, IllegalAccessException { Class<?> objectClass = o.getClass(); if (isPrimitiveOrString(objectClass)) { return fromPrimitiveOrString(objectClass, o); } if (isIterableType(objectClass)) { Iterable<?> it = (Iterable<?>) o; List<JsonNode> nodes = new ArrayList<>(); for (Iterator<?> i = it.iterator(); i.hasNext(); ) { Object next = i.next(); JsonNode node = internalJsonize(next); nodes.add(node); } return JsonNodeFactories.array(nodes); } if (isMapType(objectClass)) { throw new IllegalArgumentException("Cannot convert map to JSON since type will not be available to parse back."); } Map<JsonStringNode, JsonNode> data = new HashMap<>(); Class<?> clazz = o.getClass(); Field[] fields = clazz.getDeclaredFields(); for (Field field : fields) { int modifier = field.getModifiers(); if (Modifier.isStatic(modifier) || !Modifier.isPrivate(modifier)) { continue; } field.setAccessible(true); Object value = field.get(o); String jsonName = field.getName(); JsonStringNode nameNode = JsonNodeFactories.string(jsonName); if (value == null) { continue; } JsonNode node = internalJsonize(value); data.put(nameNode, node); } return JsonNodeFactories.object(data); } @SuppressWarnings("rawtypes") private static Object toPrimitiveOrString(JsonNode node, Class clazz) throws IllegalArgumentException, IllegalAccessException { if (clazz == String.class) { return node.getStringValue(); } else if (clazz == Boolean.TYPE || clazz == Boolean.class) { return node.getBooleanValue(); } else if (clazz == Byte.TYPE || clazz == Byte.class) { int value = Integer.parseInt(node.getNumberValue()); return (byte) value; } else if (clazz == Character.TYPE || clazz == Character.class) { String value = node.getStringValue(); return value.charAt(0); } else if (clazz == Short.TYPE || clazz == Short.class) { int value = Integer.parseInt(node.getNumberValue()); return (short) value; } else if (clazz == Integer.TYPE || clazz == Integer.class) { int value = Integer.parseInt(node.getNumberValue()); return value; } else if (clazz == Long.TYPE || clazz == Long.class) { long value = Long.parseLong(node.getNumberValue()); return (long) value; } else if (clazz == Float.TYPE || clazz == Float.class) { float value = Float.parseFloat(node.getNumberValue()); return value; } else if (clazz == Double.TYPE || clazz == Double.class) { double value = Double.parseDouble(node.getNumberValue()); return value; } else { throw new IllegalArgumentException("Unknown type " + clazz); } } private static JsonNode fromPrimitiveOrString(Class<?> clazz, Object value) { if (clazz == String.class) { return JsonNodeFactories.string((String)value); } else if (clazz == Boolean.class) { return JsonNodeFactories.booleanNode((boolean) value); } else if (clazz == Byte.class) { return JsonNodeFactories.number((byte)value); } else if (clazz == Character.class) { return JsonNodeFactories.string(Character.toString((char)value)); } else if (clazz == Short.class) { return JsonNodeFactories.number((short)value); } else if (clazz == Integer.class) { return JsonNodeFactories.number((int)value); } else if (clazz == Long.class) { return JsonNodeFactories.number((long)value); } else if (clazz == Float.class) { return JsonNodeFactories.number("" + (float)value); } else if (clazz == Double.class) { return JsonNodeFactories.number("" + (double)value); } throw new IllegalArgumentException("Unknown type " + clazz); } private static boolean isIterableType(Class<?> clazz) { return Iterable.class.isAssignableFrom(clazz); } private static boolean isMapType(Class<?> clazz) { return Map.class.isAssignableFrom(clazz); } public static boolean isPrimitiveOrString(Class<?> clazz) { return isString(clazz) || clazz == Boolean.class || clazz == Boolean.TYPE || clazz == Byte.class || clazz == Byte.TYPE || clazz == Character.class|| clazz == Character.TYPE || clazz == Short.class || clazz == Short.TYPE || clazz == Integer.class || clazz == Integer.TYPE || clazz == Long.class || clazz == Long.TYPE || clazz == Float.class || clazz == Float.TYPE || clazz == Double.class || clazz == Double.TYPE; } private static boolean isString(Class<?> clazz) { return clazz == String.class; } }
package org.camunda.bpm.engine.rest; import static com.jayway.restassured.RestAssured.expect; import static com.jayway.restassured.RestAssured.given; import static com.jayway.restassured.path.json.JsonPath.from; import static org.fest.assertions.Assertions.assertThat; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.mockito.Matchers.anySetOf; import static org.mockito.Matchers.argThat; import static org.mockito.Matchers.eq; import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verifyNoMoreInteractions; import static org.mockito.Mockito.when; import java.util.ArrayList; import java.util.Arrays; import java.util.Date; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response.Status; import org.camunda.bpm.engine.ProcessEngineException; import org.camunda.bpm.engine.impl.calendar.DateTimeUtil; import org.camunda.bpm.engine.rest.exception.InvalidRequestException; import org.camunda.bpm.engine.rest.helper.MockProvider; import org.camunda.bpm.engine.rest.helper.variable.EqualsPrimitiveValue; import org.camunda.bpm.engine.rest.util.OrderingBuilder; import org.camunda.bpm.engine.rest.util.container.TestContainerRule; import org.camunda.bpm.engine.runtime.ProcessInstance; import org.camunda.bpm.engine.runtime.ProcessInstanceQuery; import org.junit.Assert; import org.junit.Before; import org.junit.ClassRule; import org.junit.Test; import org.mockito.InOrder; import org.mockito.Mockito; import com.jayway.restassured.http.ContentType; import com.jayway.restassured.response.Response; public class ProcessInstanceRestServiceQueryTest extends AbstractRestServiceTest { @ClassRule public static TestContainerRule rule = new TestContainerRule(); protected static final String PROCESS_INSTANCE_QUERY_URL = TEST_RESOURCE_ROOT_PATH + "/process-instance"; protected static final String PROCESS_INSTANCE_COUNT_QUERY_URL = PROCESS_INSTANCE_QUERY_URL + "/count"; protected ProcessInstanceQuery mockedQuery; @Before public void setUpRuntimeData() { mockedQuery = setUpMockInstanceQuery(createMockInstanceList()); } private ProcessInstanceQuery setUpMockInstanceQuery(List<ProcessInstance> mockedInstances) { ProcessInstanceQuery sampleInstanceQuery = mock(ProcessInstanceQuery.class); when(sampleInstanceQuery.list()).thenReturn(mockedInstances); when(sampleInstanceQuery.count()).thenReturn((long) mockedInstances.size()); when(processEngine.getRuntimeService().createProcessInstanceQuery()).thenReturn(sampleInstanceQuery); return sampleInstanceQuery; } private List<ProcessInstance> createMockInstanceList() { List<ProcessInstance> mocks = new ArrayList<ProcessInstance>(); mocks.add(MockProvider.createMockInstance()); return mocks; } @Test public void testEmptyQuery() { String queryKey = ""; given().queryParam("processDefinitionKey", queryKey) .then().expect().statusCode(Status.OK.getStatusCode()) .when().get(PROCESS_INSTANCE_QUERY_URL); } @Test public void testInvalidVariableRequests() { // invalid comparator String invalidComparator = "anInvalidComparator"; String variableName = "varName"; String variableValue = "varValue"; String queryValue = variableName + "_" + invalidComparator + "_" + variableValue; given().queryParam("variables", queryValue) .then().expect().statusCode(Status.BAD_REQUEST.getStatusCode()).contentType(ContentType.JSON) .body("type", equalTo(InvalidRequestException.class.getSimpleName())) .body("message", containsString("Invalid variable comparator specified: " + invalidComparator)) .when().get(PROCESS_INSTANCE_QUERY_URL); // invalid format queryValue = "invalidFormattedVariableQuery"; given().queryParam("variables", queryValue) .then().expect().statusCode(Status.BAD_REQUEST.getStatusCode()).contentType(ContentType.JSON) .body("type", equalTo(InvalidRequestException.class.getSimpleName())) .body("message", containsString("variable query parameter has to have format KEY_OPERATOR_VALUE")) .when().get(PROCESS_INSTANCE_QUERY_URL); } @Test public void testInvalidSortingOptions() { executeAndVerifySorting("anInvalidSortByOption", "asc", Status.BAD_REQUEST); executeAndVerifySorting("definitionId", "anInvalidSortOrderOption", Status.BAD_REQUEST); } protected void executeAndVerifySorting(String sortBy, String sortOrder, Status expectedStatus) { given().queryParam("sortBy", sortBy).queryParam("sortOrder", sortOrder) .then().expect().statusCode(expectedStatus.getStatusCode()) .when().get(PROCESS_INSTANCE_QUERY_URL); } @Test public void testSortByParameterOnly() { given().queryParam("sortBy", "definitionId") .then().expect().statusCode(Status.BAD_REQUEST.getStatusCode()) .when().get(PROCESS_INSTANCE_QUERY_URL); } @Test public void testSortOrderParameterOnly() { given().queryParam("sortOrder", "asc") .then().expect().statusCode(Status.BAD_REQUEST.getStatusCode()) .when().get(PROCESS_INSTANCE_QUERY_URL); } @Test public void testInstanceRetrieval() { String queryKey = "key"; Response response = given().queryParam("processDefinitionKey", queryKey) .then().expect().statusCode(Status.OK.getStatusCode()) .when().get(PROCESS_INSTANCE_QUERY_URL); // assert query invocation InOrder inOrder = Mockito.inOrder(mockedQuery); inOrder.verify(mockedQuery).processDefinitionKey(queryKey); inOrder.verify(mockedQuery).list(); String content = response.asString(); List<String> instances = from(content).getList(""); Assert.assertEquals("There should be one process definition returned.", 1, instances.size()); Assert.assertNotNull("There should be one process definition returned", instances.get(0)); String returnedInstanceId = from(content).getString("[0].id"); Boolean returnedIsEnded = from(content).getBoolean("[0].ended"); String returnedDefinitionId = from(content).getString("[0].definitionId"); String returnedBusinessKey = from(content).getString("[0].businessKey"); Boolean returnedIsSuspended = from(content).getBoolean("[0].suspended"); String returnedCaseInstanceId = from(content).getString("[0].caseInstanceId"); String returnedTenantId = from(content).getString("[0].tenantId"); Assert.assertEquals(MockProvider.EXAMPLE_PROCESS_INSTANCE_ID, returnedInstanceId); Assert.assertEquals(MockProvider.EXAMPLE_PROCESS_INSTANCE_IS_ENDED, returnedIsEnded); Assert.assertEquals(MockProvider.EXAMPLE_PROCESS_DEFINITION_ID, returnedDefinitionId); Assert.assertEquals(MockProvider.EXAMPLE_PROCESS_INSTANCE_BUSINESS_KEY, returnedBusinessKey); Assert.assertEquals(MockProvider.EXAMPLE_PROCESS_INSTANCE_IS_SUSPENDED, returnedIsSuspended); Assert.assertEquals(MockProvider.EXAMPLE_CASE_INSTANCE_ID, returnedCaseInstanceId); Assert.assertEquals(MockProvider.EXAMPLE_TENANT_ID, returnedTenantId); } @Test public void testIncompleteProcessInstance() { setUpMockInstanceQuery(createIncompleteMockInstances()); Response response = expect().statusCode(Status.OK.getStatusCode()) .when().get(PROCESS_INSTANCE_QUERY_URL); String content = response.asString(); String returnedBusinessKey = from(content).getString("[0].businessKey"); Assert.assertNull("Should be null, as it is also null in the original process instance on the server.", returnedBusinessKey); } private List<ProcessInstance> createIncompleteMockInstances() { List<ProcessInstance> mocks = new ArrayList<ProcessInstance>(); ProcessInstance mockInstance = mock(ProcessInstance.class); when(mockInstance.getId()).thenReturn(MockProvider.EXAMPLE_PROCESS_INSTANCE_ID); mocks.add(mockInstance); return mocks; } @Test public void testNoParametersQuery() { expect().statusCode(Status.OK.getStatusCode()).when().get(PROCESS_INSTANCE_QUERY_URL); verify(mockedQuery).list(); verifyNoMoreInteractions(mockedQuery); } @Test public void testAdditionalParametersExcludingVariables() { Map<String, String> queryParameters = getCompleteQueryParameters(); given().queryParams(queryParameters) .expect().statusCode(Status.OK.getStatusCode()) .when().get(PROCESS_INSTANCE_QUERY_URL); verify(mockedQuery).caseInstanceId(queryParameters.get("caseInstanceId")); verify(mockedQuery).processInstanceBusinessKey(queryParameters.get("businessKey")); verify(mockedQuery).processInstanceBusinessKeyLike(queryParameters.get("businessKeyLike")); verify(mockedQuery).processDefinitionKey(queryParameters.get("processDefinitionKey")); verify(mockedQuery).processDefinitionId(queryParameters.get("processDefinitionId")); verify(mockedQuery).deploymentId(queryParameters.get("deploymentId")); verify(mockedQuery).superProcessInstanceId(queryParameters.get("superProcessInstance")); verify(mockedQuery).subProcessInstanceId(queryParameters.get("subProcessInstance")); verify(mockedQuery).superCaseInstanceId(queryParameters.get("superCaseInstance")); verify(mockedQuery).subCaseInstanceId(queryParameters.get("subCaseInstance")); verify(mockedQuery).suspended(); verify(mockedQuery).active(); verify(mockedQuery).incidentId(queryParameters.get("incidentId")); verify(mockedQuery).incidentMessage(queryParameters.get("incidentMessage")); verify(mockedQuery).incidentMessageLike(queryParameters.get("incidentMessageLike")); verify(mockedQuery).incidentType(queryParameters.get("incidentType")); verify(mockedQuery).list(); } private Map<String, String> getCompleteQueryParameters() { Map<String, String> parameters = new HashMap<String, String>(); parameters.put("businessKey", "aBusinessKey"); parameters.put("businessKeyLike", "aKeyLike"); parameters.put("processDefinitionKey", "aProcDefKey"); parameters.put("processDefinitionId", "aProcDefId"); parameters.put("deploymentId", "deploymentId"); parameters.put("superProcessInstance", "aSuperProcInstId"); parameters.put("subProcessInstance", "aSubProcInstId"); parameters.put("superCaseInstance", "aSuperCaseInstId"); parameters.put("subCaseInstance", "aSubCaseInstId"); parameters.put("suspended", "true"); parameters.put("active", "true"); parameters.put("incidentId", "incId"); parameters.put("incidentMessage", "incMessage"); parameters.put("incidentMessageLike", "incMessageLike"); parameters.put("incidentType", "incType"); parameters.put("caseInstanceId", "aCaseInstanceId"); return parameters; } @Test public void testVariableParameters() { String variableName = "varName"; String variableValue = "varValue"; String queryValue = variableName + "_eq_" + variableValue; given().queryParam("variables", queryValue) .then().expect().statusCode(Status.OK.getStatusCode()) .when().get(PROCESS_INSTANCE_QUERY_URL); verify(mockedQuery).variableValueEquals(variableName, variableValue); queryValue = variableName + "_gt_" + variableValue; given().queryParam("variables", queryValue) .then().expect().statusCode(Status.OK.getStatusCode()) .when().get(PROCESS_INSTANCE_QUERY_URL); verify(mockedQuery).variableValueGreaterThan(variableName, variableValue); queryValue = variableName + "_gteq_" + variableValue; given().queryParam("variables", queryValue) .then().expect().statusCode(Status.OK.getStatusCode()) .when().get(PROCESS_INSTANCE_QUERY_URL); verify(mockedQuery).variableValueGreaterThanOrEqual(variableName, variableValue); queryValue = variableName + "_lt_" + variableValue; given().queryParam("variables", queryValue) .then().expect().statusCode(Status.OK.getStatusCode()) .when().get(PROCESS_INSTANCE_QUERY_URL); verify(mockedQuery).variableValueLessThan(variableName, variableValue); queryValue = variableName + "_lteq_" + variableValue; given().queryParam("variables", queryValue) .then().expect().statusCode(Status.OK.getStatusCode()) .when().get(PROCESS_INSTANCE_QUERY_URL); verify(mockedQuery).variableValueLessThanOrEqual(variableName, variableValue); queryValue = variableName + "_like_" + variableValue; given().queryParam("variables", queryValue) .then().expect().statusCode(Status.OK.getStatusCode()) .when().get(PROCESS_INSTANCE_QUERY_URL); verify(mockedQuery).variableValueLike(variableName, variableValue); queryValue = variableName + "_neq_" + variableValue; given().queryParam("variables", queryValue) .then().expect().statusCode(Status.OK.getStatusCode()) .when().get(PROCESS_INSTANCE_QUERY_URL); verify(mockedQuery).variableValueNotEquals(variableName, variableValue); } @Test public void testMultipleVariableParameters() { String variableName1 = "varName"; String variableValue1 = "varValue"; String variableParameter1 = variableName1 + "_eq_" + variableValue1; String variableName2 = "anotherVarName"; String variableValue2 = "anotherVarValue"; String variableParameter2 = variableName2 + "_neq_" + variableValue2; String queryValue = variableParameter1 + "," + variableParameter2; given().queryParam("variables", queryValue) .then().expect().statusCode(Status.OK.getStatusCode()) .when().get(PROCESS_INSTANCE_QUERY_URL); verify(mockedQuery).variableValueEquals(variableName1, variableValue1); verify(mockedQuery).variableValueNotEquals(variableName2, variableValue2); } @Test public void testMultipleVariableParametersAsPost() { String variableName = "varName"; String variableValue = "varValue"; String anotherVariableName = "anotherVarName"; Integer anotherVariableValue = 30; Map<String, Object> variableJson = new HashMap<String, Object>(); variableJson.put("name", variableName); variableJson.put("operator", "eq"); variableJson.put("value", variableValue); Map<String, Object> anotherVariableJson = new HashMap<String, Object>(); anotherVariableJson.put("name", anotherVariableName); anotherVariableJson.put("operator", "neq"); anotherVariableJson.put("value", anotherVariableValue); List<Map<String, Object>> variables = new ArrayList<Map<String, Object>>(); variables.add(variableJson); variables.add(anotherVariableJson); Map<String, Object> json = new HashMap<String, Object>(); json.put("variables", variables); given().contentType(POST_JSON_CONTENT_TYPE).body(json) .then().expect().statusCode(Status.OK.getStatusCode()) .when().post(PROCESS_INSTANCE_QUERY_URL); verify(mockedQuery).variableValueEquals(variableName, variableValue); verify(mockedQuery).variableValueNotEquals(eq(anotherVariableName), argThat(EqualsPrimitiveValue.numberValue(anotherVariableValue))); } @Test public void testDateVariableParameter() { String variableName = "varName"; String variableValue = "2014-06-16T10:00:00"; String queryValue = variableName + "_eq_" + variableValue; given() .queryParam("variables", queryValue) .then() .expect() .statusCode(Status.OK.getStatusCode()) .when() .get(PROCESS_INSTANCE_QUERY_URL); Date date = DateTimeUtil.parseDate(variableValue); verify(mockedQuery).variableValueEquals(variableName, date); } @Test public void testDateVariableParameterAsPost() { String variableName = "varName"; String variableValue = "2014-06-16T10:00:00"; Map<String, Object> variableJson = new HashMap<String, Object>(); variableJson.put("name", variableName); variableJson.put("operator", "eq"); variableJson.put("value", variableValue); List<Map<String, Object>> variables = new ArrayList<Map<String, Object>>(); variables.add(variableJson); Map<String, Object> json = new HashMap<String, Object>(); json.put("variables", variables); given() .contentType(POST_JSON_CONTENT_TYPE) .body(json) .then() .expect() .statusCode(Status.OK.getStatusCode()) .when() .post(PROCESS_INSTANCE_QUERY_URL); Date date = DateTimeUtil.parseDate(variableValue); verify(mockedQuery).variableValueEquals(variableName, date); } @Test public void testCompletePostParameters() { Map<String, String> queryParameters = getCompleteQueryParameters(); given().contentType(POST_JSON_CONTENT_TYPE).body(queryParameters) .expect().statusCode(Status.OK.getStatusCode()) .when().post(PROCESS_INSTANCE_QUERY_URL); verify(mockedQuery).caseInstanceId(queryParameters.get("caseInstanceId")); verify(mockedQuery).processInstanceBusinessKey(queryParameters.get("businessKey")); verify(mockedQuery).processInstanceBusinessKeyLike(queryParameters.get("businessKeyLike")); verify(mockedQuery).processDefinitionKey(queryParameters.get("processDefinitionKey")); verify(mockedQuery).processDefinitionId(queryParameters.get("processDefinitionId")); verify(mockedQuery).deploymentId(queryParameters.get("deploymentId")); verify(mockedQuery).superProcessInstanceId(queryParameters.get("superProcessInstance")); verify(mockedQuery).subProcessInstanceId(queryParameters.get("subProcessInstance")); verify(mockedQuery).superCaseInstanceId(queryParameters.get("superCaseInstance")); verify(mockedQuery).subCaseInstanceId(queryParameters.get("subCaseInstance")); verify(mockedQuery).suspended(); verify(mockedQuery).active(); verify(mockedQuery).incidentId(queryParameters.get("incidentId")); verify(mockedQuery).incidentMessage(queryParameters.get("incidentMessage")); verify(mockedQuery).incidentMessageLike(queryParameters.get("incidentMessageLike")); verify(mockedQuery).incidentType(queryParameters.get("incidentType")); verify(mockedQuery).list(); } @Test public void testTenantIdListParameter() { mockedQuery = setUpMockInstanceQuery(createMockProcessInstancesTwoTenants()); Response response = given() .queryParam("tenantIdIn", MockProvider.EXAMPLE_TENANT_ID_LIST) .then().expect() .statusCode(Status.OK.getStatusCode()) .when() .get(PROCESS_INSTANCE_QUERY_URL); verify(mockedQuery).tenantIdIn(MockProvider.EXAMPLE_TENANT_ID, MockProvider.ANOTHER_EXAMPLE_TENANT_ID); verify(mockedQuery).list(); String content = response.asString(); List<String> instances = from(content).getList(""); assertThat(instances).hasSize(2); String returnedTenantId1 = from(content).getString("[0].tenantId"); String returnedTenantId2 = from(content).getString("[1].tenantId"); assertThat(returnedTenantId1).isEqualTo(MockProvider.EXAMPLE_TENANT_ID); assertThat(returnedTenantId2).isEqualTo(MockProvider.ANOTHER_EXAMPLE_TENANT_ID); } @Test public void testWithoutTenantIdParameter() { mockedQuery = setUpMockInstanceQuery(Arrays.asList(MockProvider.createMockInstance(null))); Response response = given() .queryParam("withoutTenantId", true) .then().expect() .statusCode(Status.OK.getStatusCode()) .when() .get(PROCESS_INSTANCE_QUERY_URL); verify(mockedQuery).withoutTenantId(); verify(mockedQuery).list(); String content = response.asString(); List<String> definitions = from(content).getList(""); assertThat(definitions).hasSize(1); String returnedTenantId1 = from(content).getString("[0].tenantId"); assertThat(returnedTenantId1).isEqualTo(null); } @Test public void testTenantIdListPostParameter() { mockedQuery = setUpMockInstanceQuery(createMockProcessInstancesTwoTenants()); Map<String, Object> queryParameters = new HashMap<String, Object>(); queryParameters.put("tenantIdIn", MockProvider.EXAMPLE_TENANT_ID_LIST.split(",")); Response response = given() .contentType(POST_JSON_CONTENT_TYPE) .body(queryParameters) .expect() .statusCode(Status.OK.getStatusCode()) .when() .post(PROCESS_INSTANCE_QUERY_URL); verify(mockedQuery).tenantIdIn(MockProvider.EXAMPLE_TENANT_ID, MockProvider.ANOTHER_EXAMPLE_TENANT_ID); verify(mockedQuery).list(); String content = response.asString(); List<String> executions = from(content).getList(""); assertThat(executions).hasSize(2); String returnedTenantId1 = from(content).getString("[0].tenantId"); String returnedTenantId2 = from(content).getString("[1].tenantId"); assertThat(returnedTenantId1).isEqualTo(MockProvider.EXAMPLE_TENANT_ID); assertThat(returnedTenantId2).isEqualTo(MockProvider.ANOTHER_EXAMPLE_TENANT_ID); } @Test public void testWithoutTenantIdPostParameter() { mockedQuery = setUpMockInstanceQuery(Arrays.asList(MockProvider.createMockInstance(null))); Map<String, Object> queryParameters = new HashMap<String, Object>(); queryParameters.put("withoutTenantId", true); Response response = given() .contentType(POST_JSON_CONTENT_TYPE) .body(queryParameters) .expect() .statusCode(Status.OK.getStatusCode()) .when() .post(PROCESS_INSTANCE_QUERY_URL); verify(mockedQuery).withoutTenantId(); verify(mockedQuery).list(); String content = response.asString(); List<String> definitions = from(content).getList(""); assertThat(definitions).hasSize(1); String returnedTenantId1 = from(content).getString("[0].tenantId"); assertThat(returnedTenantId1).isEqualTo(null); } private List<ProcessInstance> createMockProcessInstancesTwoTenants() { return Arrays.asList( MockProvider.createMockInstance(MockProvider.EXAMPLE_TENANT_ID), MockProvider.createMockInstance(MockProvider.ANOTHER_EXAMPLE_TENANT_ID)); } @Test public void testActivityIdListParameter() { given() .queryParam("activityIdIn", MockProvider.EXAMPLE_ACTIVITY_ID_LIST) .then().expect() .statusCode(Status.OK.getStatusCode()) .when() .get(PROCESS_INSTANCE_QUERY_URL); verify(mockedQuery).activityIdIn(MockProvider.EXAMPLE_ACTIVITY_ID, MockProvider.ANOTHER_EXAMPLE_ACTIVITY_ID); verify(mockedQuery).list(); } @Test public void testActivityIdListPostParameter() { Map<String, Object> queryParameters = new HashMap<String, Object>(); queryParameters.put("activityIdIn", MockProvider.EXAMPLE_ACTIVITY_ID_LIST.split(",")); given() .contentType(POST_JSON_CONTENT_TYPE) .body(queryParameters) .then().expect() .statusCode(Status.OK.getStatusCode()) .when() .post(PROCESS_INSTANCE_QUERY_URL); verify(mockedQuery).activityIdIn(MockProvider.EXAMPLE_ACTIVITY_ID, MockProvider.ANOTHER_EXAMPLE_ACTIVITY_ID); verify(mockedQuery).list(); } @Test public void testSortingParameters() { InOrder inOrder = Mockito.inOrder(mockedQuery); executeAndVerifySorting("instanceId", "asc", Status.OK); inOrder.verify(mockedQuery).orderByProcessInstanceId(); inOrder.verify(mockedQuery).asc(); inOrder = Mockito.inOrder(mockedQuery); executeAndVerifySorting("definitionKey", "desc", Status.OK); inOrder.verify(mockedQuery).orderByProcessDefinitionKey(); inOrder.verify(mockedQuery).desc(); inOrder = Mockito.inOrder(mockedQuery); executeAndVerifySorting("definitionId", "asc", Status.OK); inOrder.verify(mockedQuery).orderByProcessDefinitionId(); inOrder.verify(mockedQuery).asc(); inOrder = Mockito.inOrder(mockedQuery); executeAndVerifySorting("tenantId", "asc", Status.OK); inOrder.verify(mockedQuery).orderByTenantId(); inOrder.verify(mockedQuery).asc(); inOrder = Mockito.inOrder(mockedQuery); executeAndVerifySorting("businessKey", "asc", Status.OK); inOrder.verify(mockedQuery).orderByBusinessKey(); inOrder.verify(mockedQuery).asc(); } @Test public void testSecondarySortingAsPost() { InOrder inOrder = Mockito.inOrder(mockedQuery); Map<String, Object> json = new HashMap<String, Object>(); json.put("sorting", OrderingBuilder.create() .orderBy("definitionKey").desc() .orderBy("definitionId").asc() .getJson()); given().contentType(POST_JSON_CONTENT_TYPE).body(json) .header("accept", MediaType.APPLICATION_JSON) .then().expect().statusCode(Status.OK.getStatusCode()) .when().post(PROCESS_INSTANCE_QUERY_URL); inOrder.verify(mockedQuery).orderByProcessDefinitionKey(); inOrder.verify(mockedQuery).desc(); inOrder.verify(mockedQuery).orderByProcessDefinitionId(); inOrder.verify(mockedQuery).asc(); } @Test public void testSuccessfulPagination() { int firstResult = 0; int maxResults = 10; given().queryParam("firstResult", firstResult).queryParam("maxResults", maxResults) .then().expect().statusCode(Status.OK.getStatusCode()) .when().get(PROCESS_INSTANCE_QUERY_URL); verify(mockedQuery).listPage(firstResult, maxResults); } /** * If parameter "firstResult" is missing, we expect 0 as default. */ @Test public void testMissingFirstResultParameter() { int maxResults = 10; given().queryParam("maxResults", maxResults) .then().expect().statusCode(Status.OK.getStatusCode()) .when().get(PROCESS_INSTANCE_QUERY_URL); verify(mockedQuery).listPage(0, maxResults); } /** * If parameter "maxResults" is missing, we expect Integer.MAX_VALUE as default. */ @Test public void testMissingMaxResultsParameter() { int firstResult = 10; given().queryParam("firstResult", firstResult) .then().expect().statusCode(Status.OK.getStatusCode()) .when().get(PROCESS_INSTANCE_QUERY_URL); verify(mockedQuery).listPage(firstResult, Integer.MAX_VALUE); } @Test public void testQueryCount() { expect().statusCode(Status.OK.getStatusCode()) .body("count", equalTo(1)) .when().get(PROCESS_INSTANCE_COUNT_QUERY_URL); verify(mockedQuery).count(); } @Test public void testQueryCountForPost() { given().contentType(POST_JSON_CONTENT_TYPE).body(EMPTY_JSON_OBJECT) .expect().statusCode(Status.OK.getStatusCode()) .body("count", equalTo(1)) .when().post(PROCESS_INSTANCE_COUNT_QUERY_URL); verify(mockedQuery).count(); } @Test public void testInstanceRetrievalByList() { List<ProcessInstance> mockProcessInstanceList = new ArrayList<ProcessInstance>(); mockProcessInstanceList.add(MockProvider.createMockInstance()); mockProcessInstanceList.add(MockProvider.createAnotherMockInstance()); ProcessInstanceQuery instanceQuery = mock(ProcessInstanceQuery.class); when(processEngine.getRuntimeService().createProcessInstanceQuery()).thenReturn(instanceQuery); when(instanceQuery.list()).thenReturn(mockProcessInstanceList); Response response = given() .queryParam("processInstanceIds", MockProvider.EXAMPLE_PROCESS_INSTANCE_ID_LIST) .then() .expect() .statusCode(Status.OK.getStatusCode()) .when() .get(PROCESS_INSTANCE_QUERY_URL); // assert query invocation InOrder inOrder = Mockito.inOrder(instanceQuery); Set<String> expectedSet = MockProvider.createMockSetFromList(MockProvider.EXAMPLE_PROCESS_INSTANCE_ID_LIST); inOrder.verify(instanceQuery).processInstanceIds(expectedSet); inOrder.verify(instanceQuery).list(); String content = response.asString(); List<String> instances = from(content).getList(""); Assert.assertEquals("There should be two process definitions returned.", 2, instances.size()); String returnedInstanceId1 = from(content).getString("[0].id"); String returnedInstanceId2 = from(content).getString("[1].id"); Assert.assertEquals(MockProvider.EXAMPLE_PROCESS_INSTANCE_ID, returnedInstanceId1); Assert.assertEquals(MockProvider.ANOTHER_EXAMPLE_PROCESS_INSTANCE_ID, returnedInstanceId2); } @Test public void testInstanceRetrievalByListAsPost() { List<ProcessInstance> mockProcessInstanceList = new ArrayList<ProcessInstance>(); mockProcessInstanceList.add(MockProvider.createMockInstance()); mockProcessInstanceList.add(MockProvider.createAnotherMockInstance()); ProcessInstanceQuery instanceQuery = mock(ProcessInstanceQuery.class); when(processEngine.getRuntimeService().createProcessInstanceQuery()).thenReturn(instanceQuery); when(instanceQuery.list()).thenReturn(mockProcessInstanceList); Map<String, Set<String>> params = new HashMap<String, Set<String>>(); Set<String> processInstanceIds = MockProvider.createMockSetFromList(MockProvider.EXAMPLE_PROCESS_INSTANCE_ID_LIST); params.put("processInstanceIds", processInstanceIds); Response response = given() .contentType(POST_JSON_CONTENT_TYPE) .body(params) .then() .expect() .statusCode(Status.OK.getStatusCode()) .when() .post(PROCESS_INSTANCE_QUERY_URL); // assert query invocation InOrder inOrder = Mockito.inOrder(instanceQuery); inOrder.verify(instanceQuery).processInstanceIds(processInstanceIds); inOrder.verify(instanceQuery).list(); String content = response.asString(); List<String> instances = from(content).getList(""); Assert.assertEquals("There should be two process definitions returned.", 2, instances.size()); String returnedInstanceId1 = from(content).getString("[0].id"); String returnedInstanceId2 = from(content).getString("[1].id"); Assert.assertEquals(MockProvider.EXAMPLE_PROCESS_INSTANCE_ID, returnedInstanceId1); Assert.assertEquals(MockProvider.ANOTHER_EXAMPLE_PROCESS_INSTANCE_ID, returnedInstanceId2); } @Test public void testInstanceRetrievalByListWithDuplicate() { List<ProcessInstance> mockProcessInstanceList = new ArrayList<ProcessInstance>(); mockProcessInstanceList.add(MockProvider.createMockInstance()); mockProcessInstanceList.add(MockProvider.createAnotherMockInstance()); ProcessInstanceQuery instanceQuery = mock(ProcessInstanceQuery.class); when(instanceQuery.list()).thenReturn(mockProcessInstanceList); when(processEngine.getRuntimeService().createProcessInstanceQuery()).thenReturn(instanceQuery); Response response = given() .queryParam("processInstanceIds", MockProvider.EXAMPLE_PROCESS_INSTANCE_ID_LIST_WITH_DUP) .then() .expect() .statusCode(Status.OK.getStatusCode()) .when() .get(PROCESS_INSTANCE_QUERY_URL); // assert query invocation InOrder inOrder = Mockito.inOrder(instanceQuery); Set<String> expectedSet = MockProvider.createMockSetFromList(MockProvider.EXAMPLE_PROCESS_INSTANCE_ID_LIST); inOrder.verify(instanceQuery).processInstanceIds(expectedSet); inOrder.verify(instanceQuery).list(); String content = response.asString(); List<String> instances = from(content).getList(""); Assert.assertEquals("There should be two process definitions returned.", 2, instances.size()); String returnedInstanceId1 = from(content).getString("[0].id"); String returnedInstanceId2 = from(content).getString("[1].id"); Assert.assertEquals(MockProvider.EXAMPLE_PROCESS_INSTANCE_ID, returnedInstanceId1); Assert.assertEquals(MockProvider.ANOTHER_EXAMPLE_PROCESS_INSTANCE_ID, returnedInstanceId2); } @Test public void testInstanceRetrievalByListWithDuplicateAsPost() { List<ProcessInstance> mockProcessInstanceList = new ArrayList<ProcessInstance>(); mockProcessInstanceList.add(MockProvider.createMockInstance()); mockProcessInstanceList.add(MockProvider.createAnotherMockInstance()); ProcessInstanceQuery instanceQuery = mock(ProcessInstanceQuery.class); when(instanceQuery.list()).thenReturn(mockProcessInstanceList); when(processEngine.getRuntimeService().createProcessInstanceQuery()).thenReturn(instanceQuery); Map<String, Set<String>> params = new HashMap<String, Set<String>>(); Set<String> processInstanceIds = MockProvider.createMockSetFromList(MockProvider.EXAMPLE_PROCESS_INSTANCE_ID_LIST); params.put("processInstanceIds", processInstanceIds); Response response = given() .contentType(POST_JSON_CONTENT_TYPE) .body(params) .then() .expect() .statusCode(Status.OK.getStatusCode()) .when() .post(PROCESS_INSTANCE_QUERY_URL); // assert query invocation InOrder inOrder = Mockito.inOrder(instanceQuery); inOrder.verify(instanceQuery).processInstanceIds(processInstanceIds); inOrder.verify(instanceQuery).list(); String content = response.asString(); List<String> instances = from(content).getList(""); Assert.assertEquals("There should be two process definitions returned.", 2, instances.size()); String returnedInstanceId1 = from(content).getString("[0].id"); String returnedInstanceId2 = from(content).getString("[1].id"); Assert.assertEquals(MockProvider.EXAMPLE_PROCESS_INSTANCE_ID, returnedInstanceId1); Assert.assertEquals(MockProvider.ANOTHER_EXAMPLE_PROCESS_INSTANCE_ID, returnedInstanceId2); } @Test public void testInstanceRetrievalByListWithEmpty() { ProcessInstanceQuery instanceQuery = mock(ProcessInstanceQuery.class); when(instanceQuery.list()).thenReturn(null); String expectedExceptionMessage = "Set of process instance ids is empty"; doThrow(new ProcessEngineException(expectedExceptionMessage)).when(instanceQuery).processInstanceIds(anySetOf(String.class)); when(processEngine.getRuntimeService().createProcessInstanceQuery()).thenReturn(instanceQuery); String emptyList = ""; given() .queryParam("processInstanceIds", emptyList) .then() .expect() .statusCode(Status.INTERNAL_SERVER_ERROR.getStatusCode()) .body("type", equalTo(ProcessEngineException.class.getSimpleName())) .body("message", equalTo(expectedExceptionMessage)) .when() .get(PROCESS_INSTANCE_QUERY_URL); } @Test public void testInstanceRetrievalByListWithEmptyAsPost() { ProcessInstanceQuery instanceQuery = mock(ProcessInstanceQuery.class); when(instanceQuery.list()).thenReturn(null); String expectedExceptionMessage = "Set of process instance ids is empty"; doThrow(new ProcessEngineException(expectedExceptionMessage)).when(instanceQuery).processInstanceIds(anySetOf(String.class)); when(processEngine.getRuntimeService().createProcessInstanceQuery()).thenReturn(instanceQuery); Map<String, Set<String>> params = new HashMap<String, Set<String>>(); params.put("processInstanceIds", new HashSet<String>()); given() .contentType(POST_JSON_CONTENT_TYPE) .body(params) .then() .expect() .statusCode(Status.INTERNAL_SERVER_ERROR.getStatusCode()) .body("type", equalTo(ProcessEngineException.class.getSimpleName())) .body("message", equalTo(expectedExceptionMessage)) .when() .post(PROCESS_INSTANCE_QUERY_URL); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.language; import java.io.IOException; import java.io.InputStream; import java.io.UnsupportedEncodingException; import java.net.URLEncoder; import org.apache.camel.Component; import org.apache.camel.Consumer; import org.apache.camel.Expression; import org.apache.camel.Processor; import org.apache.camel.Producer; import org.apache.camel.RuntimeCamelException; import org.apache.camel.component.ResourceEndpoint; import org.apache.camel.spi.Language; import org.apache.camel.spi.Metadata; import org.apache.camel.spi.UriEndpoint; import org.apache.camel.spi.UriParam; import org.apache.camel.spi.UriPath; import org.apache.camel.support.ResourceHelper; import org.apache.camel.util.IOHelper; import org.apache.camel.util.ObjectHelper; /** * The language component allows you to send a message to an endpoint which executes a script by any of the supported Languages in Camel. * * By having a component to execute language scripts, it allows more dynamic routing capabilities. * For example by using the Routing Slip or Dynamic Router EIPs you can send messages to language endpoints * where the script is dynamic defined as well. */ @UriEndpoint(firstVersion = "2.5.0", scheme = "language", title = "Language", syntax = "language:languageName:resourceUri", producerOnly = true, label = "core,script") public class LanguageEndpoint extends ResourceEndpoint { private Language language; private Expression expression; private boolean contentResolvedFromResource; @UriPath(enums = "bean,constant,exchangeProperty,file,groovy,header,javascript,jsonpath,mvel,ognl," + ",ref,simple,spel,sql,terser,tokenize,xpath,xquery,xtokenize") @Metadata(required = true) private String languageName; // resourceUri is optional in the language endpoint @UriPath(description = "Path to the resource, or a reference to lookup a bean in the Registry to use as the resource") @Metadata(required = false) private String resourceUri; @UriParam private String script; @UriParam(defaultValue = "true") private boolean transform = true; @UriParam private boolean binary; @UriParam private boolean cacheScript; @UriParam(defaultValue = "true", description = "Sets whether to use resource content cache or not") private boolean contentCache; public LanguageEndpoint() { // enable cache by default setContentCache(true); } public LanguageEndpoint(String endpointUri, Component component, Language language, Expression expression, String resourceUri) { super(endpointUri, component, resourceUri); this.language = language; this.expression = expression; // enable cache by default setContentCache(true); } public Producer createProducer() throws Exception { ObjectHelper.notNull(getCamelContext(), "CamelContext", this); if (language == null && languageName != null) { language = getCamelContext().resolveLanguage(languageName); } ObjectHelper.notNull(language, "language", this); if (cacheScript && expression == null && script != null) { script = resolveScript(script); expression = language.createExpression(script); } return new LanguageProducer(this); } public Consumer createConsumer(Processor processor) throws Exception { throw new RuntimeCamelException("Cannot consume to a LanguageEndpoint: " + getEndpointUri()); } /** * Resolves the script. * * @param script script or uri for a script to load * @return the script * @throws IOException is thrown if error loading the script */ protected String resolveScript(String script) throws IOException { String answer; if (ResourceHelper.hasScheme(script)) { InputStream is = loadResource(script); answer = getCamelContext().getTypeConverter().convertTo(String.class, is); IOHelper.close(is); } else { answer = script; } return answer; } @Override protected String createEndpointUri() { String s = script; try { s = URLEncoder.encode(s, "UTF-8"); } catch (UnsupportedEncodingException e) { // ignore } return languageName + ":" + s; } public Language getLanguage() { return language; } public Expression getExpression() { if (isContentResolvedFromResource() && isContentCacheCleared()) { return null; } return expression; } public void setExpression(Expression expression) { this.expression = expression; } public boolean isTransform() { return transform; } /** * Whether or not the result of the script should be used as message body. * <p/> * This options is default <tt>true</tt>. * * @param transform <tt>true</tt> to use result as new message body, <tt>false</tt> to keep the existing message body */ public void setTransform(boolean transform) { this.transform = transform; } public boolean isBinary() { return binary; } /** * Whether the script is binary content or text content. * <p/> * By default the script is read as text content (eg <tt>java.lang.String</tt>) * * @param binary <tt>true</tt> to read the script as binary, instead of text based. */ public void setBinary(boolean binary) { this.binary = binary; } /** * Sets the name of the language to use * * @param languageName the name of the language */ public void setLanguageName(String languageName) { this.languageName = languageName; } /** * Path to the resource, or a reference to lookup a bean in the Registry to use as the resource * * @param resourceUri the resource path */ @Override public void setResourceUri(String resourceUri) { super.setResourceUri(resourceUri); } @Override public String getResourceUri() { return super.getResourceUri(); } /** * Sets the script to execute * * @param script the script */ public void setScript(String script) { this.script = script; } public String getScript() { return script; } public boolean isContentResolvedFromResource() { return contentResolvedFromResource; } public void setContentResolvedFromResource(boolean contentResolvedFromResource) { this.contentResolvedFromResource = contentResolvedFromResource; } public boolean isCacheScript() { return cacheScript; } /** * Whether to cache the compiled script and reuse * <p/> * Notice reusing the script can cause side effects from processing one Camel * {@link org.apache.camel.Exchange} to the next {@link org.apache.camel.Exchange}. */ public void setCacheScript(boolean cacheScript) { this.cacheScript = cacheScript; } public void clearContentCache() { super.clearContentCache(); // must also clear expression and script expression = null; script = null; } }
package org.openapitools.model; import java.util.Objects; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonCreator; import io.swagger.annotations.ApiModel; import io.swagger.annotations.ApiModelProperty; @javax.annotation.Generated(value = "org.openapitools.codegen.languages.JavaInflectorServerCodegen", date = "2022-02-13T02:15:54.605692Z[Etc/UTC]") public class GithubContent { @JsonProperty("name") private String name; @JsonProperty("sha") private String sha; @JsonProperty("_class") private String propertyClass; @JsonProperty("repo") private String repo; @JsonProperty("size") private Integer size; @JsonProperty("owner") private String owner; @JsonProperty("path") private String path; @JsonProperty("base64Data") private String base64Data; /** **/ public GithubContent name(String name) { this.name = name; return this; } @ApiModelProperty(value = "") @JsonProperty("name") public String getName() { return name; } public void setName(String name) { this.name = name; } /** **/ public GithubContent sha(String sha) { this.sha = sha; return this; } @ApiModelProperty(value = "") @JsonProperty("sha") public String getSha() { return sha; } public void setSha(String sha) { this.sha = sha; } /** **/ public GithubContent propertyClass(String propertyClass) { this.propertyClass = propertyClass; return this; } @ApiModelProperty(value = "") @JsonProperty("_class") public String getPropertyClass() { return propertyClass; } public void setPropertyClass(String propertyClass) { this.propertyClass = propertyClass; } /** **/ public GithubContent repo(String repo) { this.repo = repo; return this; } @ApiModelProperty(value = "") @JsonProperty("repo") public String getRepo() { return repo; } public void setRepo(String repo) { this.repo = repo; } /** **/ public GithubContent size(Integer size) { this.size = size; return this; } @ApiModelProperty(value = "") @JsonProperty("size") public Integer getSize() { return size; } public void setSize(Integer size) { this.size = size; } /** **/ public GithubContent owner(String owner) { this.owner = owner; return this; } @ApiModelProperty(value = "") @JsonProperty("owner") public String getOwner() { return owner; } public void setOwner(String owner) { this.owner = owner; } /** **/ public GithubContent path(String path) { this.path = path; return this; } @ApiModelProperty(value = "") @JsonProperty("path") public String getPath() { return path; } public void setPath(String path) { this.path = path; } /** **/ public GithubContent base64Data(String base64Data) { this.base64Data = base64Data; return this; } @ApiModelProperty(value = "") @JsonProperty("base64Data") public String getBase64Data() { return base64Data; } public void setBase64Data(String base64Data) { this.base64Data = base64Data; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } GithubContent githubContent = (GithubContent) o; return Objects.equals(name, githubContent.name) && Objects.equals(sha, githubContent.sha) && Objects.equals(propertyClass, githubContent.propertyClass) && Objects.equals(repo, githubContent.repo) && Objects.equals(size, githubContent.size) && Objects.equals(owner, githubContent.owner) && Objects.equals(path, githubContent.path) && Objects.equals(base64Data, githubContent.base64Data); } @Override public int hashCode() { return Objects.hash(name, sha, propertyClass, repo, size, owner, path, base64Data); } @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("class GithubContent {\n"); sb.append(" name: ").append(toIndentedString(name)).append("\n"); sb.append(" sha: ").append(toIndentedString(sha)).append("\n"); sb.append(" propertyClass: ").append(toIndentedString(propertyClass)).append("\n"); sb.append(" repo: ").append(toIndentedString(repo)).append("\n"); sb.append(" size: ").append(toIndentedString(size)).append("\n"); sb.append(" owner: ").append(toIndentedString(owner)).append("\n"); sb.append(" path: ").append(toIndentedString(path)).append("\n"); sb.append(" base64Data: ").append(toIndentedString(base64Data)).append("\n"); sb.append("}"); return sb.toString(); } /** * Convert the given object to string with each line indented by 4 spaces * (except the first line). */ private String toIndentedString(Object o) { if (o == null) { return "null"; } return o.toString().replace("\n", "\n "); } }
/** * <copyright> * </copyright> * * $Id$ */ package net.opengis.gml.provider; import java.util.Collection; import java.util.List; import net.opengis.citygml.building.provider.CityGMLEditPlugin; import net.opengis.gml.GmlFactory; import net.opengis.gml.GmlPackage; import net.opengis.gml.ImageCRSRefType; import org.eclipse.emf.common.notify.AdapterFactory; import org.eclipse.emf.common.notify.Notification; import org.eclipse.emf.common.util.ResourceLocator; import org.eclipse.emf.ecore.EStructuralFeature; import org.eclipse.emf.edit.provider.ComposeableAdapterFactory; import org.eclipse.emf.edit.provider.IEditingDomainItemProvider; import org.eclipse.emf.edit.provider.IItemLabelProvider; import org.eclipse.emf.edit.provider.IItemPropertyDescriptor; import org.eclipse.emf.edit.provider.IItemPropertySource; import org.eclipse.emf.edit.provider.IStructuredItemContentProvider; import org.eclipse.emf.edit.provider.ITreeItemContentProvider; import org.eclipse.emf.edit.provider.ItemPropertyDescriptor; import org.eclipse.emf.edit.provider.ItemProviderAdapter; import org.eclipse.emf.edit.provider.ViewerNotification; import org.w3._1999.xlink.ActuateType; /** * This is the item provider adapter for a {@link net.opengis.gml.ImageCRSRefType} object. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public class ImageCRSRefTypeItemProvider extends ItemProviderAdapter implements IEditingDomainItemProvider, IStructuredItemContentProvider, ITreeItemContentProvider, IItemLabelProvider, IItemPropertySource { /** * This constructs an instance from a factory and a notifier. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public ImageCRSRefTypeItemProvider(AdapterFactory adapterFactory) { super(adapterFactory); } /** * This returns the property descriptors for the adapted class. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public List<IItemPropertyDescriptor> getPropertyDescriptors(Object object) { if (itemPropertyDescriptors == null) { super.getPropertyDescriptors(object); addActuatePropertyDescriptor(object); addArcrolePropertyDescriptor(object); addHrefPropertyDescriptor(object); addRemoteSchemaPropertyDescriptor(object); addRolePropertyDescriptor(object); addShowPropertyDescriptor(object); addTitlePropertyDescriptor(object); addTypePropertyDescriptor(object); } return itemPropertyDescriptors; } /** * This adds a property descriptor for the Actuate feature. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected void addActuatePropertyDescriptor(Object object) { itemPropertyDescriptors.add (createItemPropertyDescriptor (((ComposeableAdapterFactory)adapterFactory).getRootAdapterFactory(), getResourceLocator(), getString("_UI_ImageCRSRefType_actuate_feature"), getString("_UI_PropertyDescriptor_description", "_UI_ImageCRSRefType_actuate_feature", "_UI_ImageCRSRefType_type"), GmlPackage.eINSTANCE.getImageCRSRefType_Actuate(), true, false, false, ItemPropertyDescriptor.GENERIC_VALUE_IMAGE, null, null)); } /** * This adds a property descriptor for the Arcrole feature. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected void addArcrolePropertyDescriptor(Object object) { itemPropertyDescriptors.add (createItemPropertyDescriptor (((ComposeableAdapterFactory)adapterFactory).getRootAdapterFactory(), getResourceLocator(), getString("_UI_ImageCRSRefType_arcrole_feature"), getString("_UI_PropertyDescriptor_description", "_UI_ImageCRSRefType_arcrole_feature", "_UI_ImageCRSRefType_type"), GmlPackage.eINSTANCE.getImageCRSRefType_Arcrole(), true, false, false, ItemPropertyDescriptor.GENERIC_VALUE_IMAGE, null, null)); } /** * This adds a property descriptor for the Href feature. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected void addHrefPropertyDescriptor(Object object) { itemPropertyDescriptors.add (createItemPropertyDescriptor (((ComposeableAdapterFactory)adapterFactory).getRootAdapterFactory(), getResourceLocator(), getString("_UI_ImageCRSRefType_href_feature"), getString("_UI_PropertyDescriptor_description", "_UI_ImageCRSRefType_href_feature", "_UI_ImageCRSRefType_type"), GmlPackage.eINSTANCE.getImageCRSRefType_Href(), true, false, false, ItemPropertyDescriptor.GENERIC_VALUE_IMAGE, null, null)); } /** * This adds a property descriptor for the Remote Schema feature. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected void addRemoteSchemaPropertyDescriptor(Object object) { itemPropertyDescriptors.add (createItemPropertyDescriptor (((ComposeableAdapterFactory)adapterFactory).getRootAdapterFactory(), getResourceLocator(), getString("_UI_ImageCRSRefType_remoteSchema_feature"), getString("_UI_PropertyDescriptor_description", "_UI_ImageCRSRefType_remoteSchema_feature", "_UI_ImageCRSRefType_type"), GmlPackage.eINSTANCE.getImageCRSRefType_RemoteSchema(), true, false, false, ItemPropertyDescriptor.GENERIC_VALUE_IMAGE, null, null)); } /** * This adds a property descriptor for the Role feature. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected void addRolePropertyDescriptor(Object object) { itemPropertyDescriptors.add (createItemPropertyDescriptor (((ComposeableAdapterFactory)adapterFactory).getRootAdapterFactory(), getResourceLocator(), getString("_UI_ImageCRSRefType_role_feature"), getString("_UI_PropertyDescriptor_description", "_UI_ImageCRSRefType_role_feature", "_UI_ImageCRSRefType_type"), GmlPackage.eINSTANCE.getImageCRSRefType_Role(), true, false, false, ItemPropertyDescriptor.GENERIC_VALUE_IMAGE, null, null)); } /** * This adds a property descriptor for the Show feature. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected void addShowPropertyDescriptor(Object object) { itemPropertyDescriptors.add (createItemPropertyDescriptor (((ComposeableAdapterFactory)adapterFactory).getRootAdapterFactory(), getResourceLocator(), getString("_UI_ImageCRSRefType_show_feature"), getString("_UI_PropertyDescriptor_description", "_UI_ImageCRSRefType_show_feature", "_UI_ImageCRSRefType_type"), GmlPackage.eINSTANCE.getImageCRSRefType_Show(), true, false, false, ItemPropertyDescriptor.GENERIC_VALUE_IMAGE, null, null)); } /** * This adds a property descriptor for the Title feature. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected void addTitlePropertyDescriptor(Object object) { itemPropertyDescriptors.add (createItemPropertyDescriptor (((ComposeableAdapterFactory)adapterFactory).getRootAdapterFactory(), getResourceLocator(), getString("_UI_ImageCRSRefType_title_feature"), getString("_UI_PropertyDescriptor_description", "_UI_ImageCRSRefType_title_feature", "_UI_ImageCRSRefType_type"), GmlPackage.eINSTANCE.getImageCRSRefType_Title(), true, false, false, ItemPropertyDescriptor.GENERIC_VALUE_IMAGE, null, null)); } /** * This adds a property descriptor for the Type feature. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected void addTypePropertyDescriptor(Object object) { itemPropertyDescriptors.add (createItemPropertyDescriptor (((ComposeableAdapterFactory)adapterFactory).getRootAdapterFactory(), getResourceLocator(), getString("_UI_ImageCRSRefType_type_feature"), getString("_UI_PropertyDescriptor_description", "_UI_ImageCRSRefType_type_feature", "_UI_ImageCRSRefType_type"), GmlPackage.eINSTANCE.getImageCRSRefType_Type(), true, false, false, ItemPropertyDescriptor.GENERIC_VALUE_IMAGE, null, null)); } /** * This specifies how to implement {@link #getChildren} and is used to deduce an appropriate feature for an * {@link org.eclipse.emf.edit.command.AddCommand}, {@link org.eclipse.emf.edit.command.RemoveCommand} or * {@link org.eclipse.emf.edit.command.MoveCommand} in {@link #createCommand}. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public Collection<? extends EStructuralFeature> getChildrenFeatures(Object object) { if (childrenFeatures == null) { super.getChildrenFeatures(object); childrenFeatures.add(GmlPackage.eINSTANCE.getImageCRSRefType_ImageCRS()); } return childrenFeatures; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override protected EStructuralFeature getChildFeature(Object object, Object child) { // Check the type of the specified child object and return the proper feature to use for // adding (see {@link AddCommand}) it as a child. return super.getChildFeature(object, child); } /** * This returns ImageCRSRefType.gif. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public Object getImage(Object object) { return overlayImage(object, getResourceLocator().getImage("full/obj16/ImageCRSRefType")); } /** * This returns the label text for the adapted class. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public String getText(Object object) { ActuateType labelValue = ((ImageCRSRefType)object).getActuate(); String label = labelValue == null ? null : labelValue.toString(); return label == null || label.length() == 0 ? getString("_UI_ImageCRSRefType_type") : getString("_UI_ImageCRSRefType_type") + " " + label; } /** * This handles model notifications by calling {@link #updateChildren} to update any cached * children and by creating a viewer notification, which it passes to {@link #fireNotifyChanged}. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void notifyChanged(Notification notification) { updateChildren(notification); switch (notification.getFeatureID(ImageCRSRefType.class)) { case GmlPackage.IMAGE_CRS_REF_TYPE__ACTUATE: case GmlPackage.IMAGE_CRS_REF_TYPE__ARCROLE: case GmlPackage.IMAGE_CRS_REF_TYPE__HREF: case GmlPackage.IMAGE_CRS_REF_TYPE__REMOTE_SCHEMA: case GmlPackage.IMAGE_CRS_REF_TYPE__ROLE: case GmlPackage.IMAGE_CRS_REF_TYPE__SHOW: case GmlPackage.IMAGE_CRS_REF_TYPE__TITLE: case GmlPackage.IMAGE_CRS_REF_TYPE__TYPE: fireNotifyChanged(new ViewerNotification(notification, notification.getNotifier(), false, true)); return; case GmlPackage.IMAGE_CRS_REF_TYPE__IMAGE_CRS: fireNotifyChanged(new ViewerNotification(notification, notification.getNotifier(), true, false)); return; } super.notifyChanged(notification); } /** * This adds {@link org.eclipse.emf.edit.command.CommandParameter}s describing the children * that can be created under this object. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override protected void collectNewChildDescriptors(Collection<Object> newChildDescriptors, Object object) { super.collectNewChildDescriptors(newChildDescriptors, object); newChildDescriptors.add (createChildParameter (GmlPackage.eINSTANCE.getImageCRSRefType_ImageCRS(), GmlFactory.eINSTANCE.createImageCRSType())); } /** * Return the resource locator for this item provider's resources. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public ResourceLocator getResourceLocator() { return CityGMLEditPlugin.INSTANCE; } }
/* Copyright (c) 2020 W.T.J. Riezebos * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.riezebos.thoth.util; import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import org.junit.Test; public class ThothUtilTest { @Test public void testConstruct() { new ThothUtil(); } @Test public void testGetArgumentsMap() { Map<String, String> argumentsMap = ThothUtil.getArgumentsMap(new String[] {"-file", "filename", "-flag", "true", "-toggle", "-toggle2"}); assertEquals("filename", argumentsMap.get("file")); assertEquals("true", argumentsMap.get("flag")); assertEquals(null, argumentsMap.get("toggle")); assertTrue(argumentsMap.containsKey("toggle")); assertTrue(argumentsMap.containsKey("toggle2")); } @Test public void testGetArgumentsMixed() { String[] args = new String[] {"file", "filename", "-flag", "true", "somethingelse", "-toggle"}; Map<String, String> argumentsMap = ThothUtil.getArgumentsMap(args); assertNull(argumentsMap.get("file")); assertEquals("true", argumentsMap.get("flag")); assertEquals(null, argumentsMap.get("toggle")); assertTrue(argumentsMap.containsKey("toggle")); List<String> argumentsList = ThothUtil.getArgumentsList(args); assertEquals(3, argumentsList.size()); assertTrue(argumentsList.contains("file")); assertTrue(argumentsList.contains("filename")); assertTrue(argumentsList.contains("somethingelse")); } @Test public void testTidyRelativePath() { assertNull(ThothUtil.tidyRelativePath(null)); assertEquals("", ThothUtil.tidyRelativePath("")); assertEquals("", ThothUtil.tidyRelativePath("/")); assertEquals("some/other/path", ThothUtil.tidyRelativePath("/some/other/path")); } @Test public void testNormalSlashes() { assertNull(ThothUtil.normalSlashes(null)); assertEquals("/some/other/path", ThothUtil.normalSlashes("/some/other/path")); assertEquals("/some/other/path", ThothUtil.normalSlashes("\\some\\other\\path")); } @Test public void testGetNameOnly() { assertNull(ThothUtil.getNameOnly(null)); assertEquals("path", ThothUtil.getNameOnly("/some/other/path")); assertEquals("", ThothUtil.getNameOnly("/some/other/path/")); assertEquals("file", ThothUtil.getNameOnly("/some/other/path/file")); assertEquals("file", ThothUtil.getNameOnly("/some/other/path/file.txt")); } @Test public void testGetFileName() { assertNull(ThothUtil.getFileName(null)); assertEquals("path", ThothUtil.getFileName("/some/other/path")); assertEquals("", ThothUtil.getFileName("/some/other/path/")); assertEquals("file", ThothUtil.getFileName("/some/other/path/file")); assertEquals("file.txt", ThothUtil.getFileName("/some/other/path/file.txt")); assertEquals("file.txt", ThothUtil.getFileName("file.txt")); } @Test public void testWrapWithNewLines() { assertNull(ThothUtil.wrapWithNewLines(null)); assertArrayEquals("\na\n".toCharArray(), ThothUtil.wrapWithNewLines("a".toCharArray())); } @Test public void testGetFolder() { assertNull(ThothUtil.getFolder(null)); assertEquals("path", ThothUtil.getFolder("path")); assertEquals("/some/other", ThothUtil.getFolder("/some/other/path")); assertEquals("/some/other/path", ThothUtil.getFolder("/some/other/path/")); assertEquals("/some/other/path", ThothUtil.getFolder("/some/other/path/file")); assertEquals("/some/other/path", ThothUtil.getFolder("/some/other/path/file.txt")); } @Test public void testStripSuffix() { assertNull(ThothUtil.stripSuffix(null, ".txt")); assertNull(ThothUtil.stripSuffix(null, null)); assertEquals("/some/other/path/file", ThothUtil.stripSuffix("/some/other/path/file", null)); assertEquals("/some/other/path/file", ThothUtil.stripSuffix("/some/other/path/file", ".txt")); assertEquals("/some/other/path/file", ThothUtil.stripSuffix("/some/other/path/file.txt", ".txt")); } @Test public void testEncodeBookmark() { assertNull(ThothUtil.encodeBookmark(null, true)); assertEquals("", ThothUtil.encodeBookmark("", true)); assertEquals("a1bc", ThothUtil.encodeBookmark("a-1 #B C", true)); assertEquals("a1BC", ThothUtil.encodeBookmark("a-1 #B C", false)); } @Test public void testReplaceKeywords() { Map<String, Object> args = new HashMap<String, Object>(); args.put("one", 1); args.put("two", "2"); args.put("three", "$1"); args.put("four", null); assertNull(ThothUtil.replaceKeywords(null, args)); assertEquals("test 1 and 2", ThothUtil.replaceKeywords("test ${one} and ${two}", args)); assertEquals("test 1 and 22", ThothUtil.replaceKeywords("test ${one} and ${two}${two}", args)); assertEquals("test 1 and 22", ThothUtil.replaceKeywords("test ${one} and ${two}${two}", args)); assertEquals("test $1", ThothUtil.replaceKeywords("test ${three}", args)); assertEquals("test ", ThothUtil.replaceKeywords("test ${four}", args)); } @Test public void testRegExpescapeString() { assertEquals("\\[\\^a-9\\]\\.\\*\\?", ThothUtil.regExpescape("[^a-9].*?")); } @Test public void testExtractKeyswords() { List<String> keywords = ThothUtil.extractKeyswords("There should be ${1} and ${another} keyword"); assertArrayEquals(new String[] {"1", "another"}, keywords.toArray(new String[0])); keywords = ThothUtil.extractKeyswords("There should be no keyword"); assertArrayEquals(new String[0], keywords.toArray(new String[0])); assertArrayEquals(new String[0], ThothUtil.extractKeyswords(null).toArray(new String[0])); } @Test public void testSpecAsRegExp() { assertEquals("(.*?)\\.(.*?)", ThothUtil.specAsRegExp("*.*").toString()); assertEquals("(.*?)\\.png", ThothUtil.specAsRegExp("*.png").toString()); } @Test public void testTokenize() { assertArrayEquals(new String[0], ThothUtil.tokenize(null).toArray(new String[0])); assertArrayEquals(new String[0], ThothUtil.tokenize("").toArray(new String[0])); assertArrayEquals(new String[] {"one", "two", "three"}, ThothUtil.tokenize("one, two, three").toArray(new String[0])); assertArrayEquals(new String[] {"one", "two", "three"}, ThothUtil.tokenize("one, two, three,").toArray(new String[0])); } @Test public void testSort() { List<String> lst = new ArrayList<String>(); lst.add("4"); lst.add("1"); lst.add("3"); lst.add("2"); assertArrayEquals(new String[] {"1", "2", "3", "4"}, ThothUtil.sort(lst).toArray(new String[0])); } @Test public void testStripNumericPrefix() { assertNull(ThothUtil.stripNumericPrefix(null)); assertEquals("abc", ThothUtil.stripNumericPrefix("abc")); assertEquals("abc", ThothUtil.stripNumericPrefix("12abc")); assertEquals("abc", ThothUtil.stripNumericPrefix("12 abc")); assertEquals("", ThothUtil.stripNumericPrefix("12")); } @Test public void testGetPartBeforeFirst() { assertNull(ThothUtil.getPartBeforeFirst(null, null)); assertNull(ThothUtil.getPartBeforeFirst(null, "/")); assertEquals("some", ThothUtil.getPartBeforeFirst("some/other/part", "/")); assertEquals("some/other/part", ThothUtil.getPartBeforeFirst("some/other/part", "%")); } @Test public void testGetPartAfterFirst() { assertNull(ThothUtil.getPartAfterFirst(null, null)); assertNull(ThothUtil.getPartAfterFirst(null, "/")); assertEquals("other/part", ThothUtil.getPartAfterFirst("some/other/part", "/")); assertEquals("", ThothUtil.getPartAfterFirst("some/other/part", "%")); } @Test public void testGetPartAfterLast() { assertNull(ThothUtil.getPartAfterLast(null, null)); assertNull(ThothUtil.getPartAfterLast(null, "/")); assertEquals("part", ThothUtil.getPartAfterLast("some/other/part", "/")); assertEquals("some/other/part", ThothUtil.getPartAfterLast("some/other/part", "%")); } @Test public void testGetPartBeforeLast() { assertNull(ThothUtil.getPartBeforeLast(null, null)); assertNull(ThothUtil.getPartBeforeLast(null, "/")); assertEquals("some/other", ThothUtil.getPartBeforeLast("some/other/part", "/")); assertEquals("some/other/part", ThothUtil.getPartBeforeLast("some/other/part", "%")); } @Test public void testStripPrefix() { assertNull(ThothUtil.stripPrefix(null, null)); assertNull(ThothUtil.stripPrefix(null, "a")); assertEquals("b", ThothUtil.stripPrefix("b", "a")); assertEquals("/other/part", ThothUtil.stripPrefix("some/other/part", "some")); } @Test public void testGetExtension() { assertNull(ThothUtil.getExtension(null)); assertNull(ThothUtil.getExtension("/some/folder/file")); assertEquals("txt", ThothUtil.getExtension("/some/folder/file.txt")); } @Test public void testPrefix() { assertEquals("/", ThothUtil.prefix(null, "/")); assertEquals("/", ThothUtil.prefix("", "/")); assertEquals("/", ThothUtil.prefix("/", "/")); assertEquals("/a", ThothUtil.prefix("/a", "/")); assertEquals("/a", ThothUtil.prefix("a", "/")); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.processors.cache.version; import org.apache.ignite.*; import org.apache.ignite.internal.*; import org.apache.ignite.internal.processors.cache.*; import org.apache.ignite.internal.processors.datastreamer.*; import org.apache.ignite.internal.util.typedef.internal.*; import org.apache.ignite.marshaller.*; import org.apache.ignite.plugin.extensions.communication.*; import org.jetbrains.annotations.*; import java.io.*; import java.nio.*; /** * Raw versioned entry. */ public class GridCacheRawVersionedEntry<K, V> extends DataStreamerEntry implements GridCacheVersionedEntry<K, V>, GridCacheVersionable, Externalizable { /** */ private static final long serialVersionUID = 0L; /** Key bytes. */ @GridDirectTransient private byte[] keyBytes; /** Value bytes. */ private byte[] valBytes; /** TTL. */ private long ttl; /** Expire time. */ private long expireTime; /** Version. */ private GridCacheVersion ver; /** * {@code Externalizable) support. */ public GridCacheRawVersionedEntry() { // No-op. } /** * Constructor used for local store load when key and value are available. * * @param key Key. * @param val Value. * @param expireTime Expire time. * @param ttl TTL. * @param ver Version. */ public GridCacheRawVersionedEntry(KeyCacheObject key, @Nullable CacheObject val, long ttl, long expireTime, GridCacheVersion ver) { assert key != null; this.key = key; this.val = val; this.ttl = ttl; this.expireTime = expireTime; this.ver = ver; } /** * Constructor used in receiver hub where marshalled key and value are available and we do not want to * unmarshal value. * * @param keyBytes Key. * @param valBytes Value bytes. * @param expireTime Expire time. * @param ttl TTL. * @param ver Version. */ public GridCacheRawVersionedEntry(byte[] keyBytes, byte[] valBytes, long ttl, long expireTime, GridCacheVersion ver) { this.keyBytes = keyBytes; this.valBytes = valBytes; this.ttl = ttl; this.expireTime = expireTime; this.ver = ver; } /** {@inheritDoc} */ @Override public K key() { assert key != null : "Entry is being improperly processed."; return key.value(null, false); } /** * @param key Key. */ public void key(KeyCacheObject key) { this.key = key; } /** * @return Key bytes. */ public byte[] keyBytes() { return keyBytes; } /** {@inheritDoc} */ @Override public V value() { return val != null ? val.<V>value(null, false) : null; } /** * @return Value bytes. */ public byte[] valueBytes() { return valBytes; } /** {@inheritDoc} */ @Override public long ttl() { return ttl; } /** {@inheritDoc} */ @Override public long expireTime() { return expireTime; } /** {@inheritDoc} */ @Override public byte dataCenterId() { return ver.dataCenterId(); } /** {@inheritDoc} */ @Override public int topologyVersion() { return ver.topologyVersion(); } /** {@inheritDoc} */ @Override public long order() { return ver.order(); } /** {@inheritDoc} */ @Override public long globalTime() { return ver.globalTime(); } /** {@inheritDoc} */ @Override public GridCacheVersion version() { return ver; } /** * Perform internal unmarshal of this entry. It must be performed after entry is deserialized and before * its restored key/value are needed. * * @param ctx Context. * @param marsh Marshaller. * @throws IgniteCheckedException If failed. */ public void unmarshal(CacheObjectContext ctx, Marshaller marsh) throws IgniteCheckedException { unmarshalKey(ctx, marsh); if (val == null && valBytes != null) { val = marsh.unmarshal(valBytes, null); val.finishUnmarshal(ctx, null); } } /** * @param ctx Context. * @throws IgniteCheckedException If failed. */ public void unmarshal(CacheObjectContext ctx) throws IgniteCheckedException { assert key != null; key.finishUnmarshal(ctx, null); if (val != null) val.finishUnmarshal(ctx, null); } /** * Perform internal key unmarshal of this entry. It must be performed after entry is deserialized and before * its restored key/value are needed. * * @param ctx Context. * @param marsh Marshaller. * @throws IgniteCheckedException If failed. */ public void unmarshalKey(CacheObjectContext ctx, Marshaller marsh) throws IgniteCheckedException { if (key == null) { assert keyBytes != null; key = marsh.unmarshal(keyBytes, null); key.finishUnmarshal(ctx, null); } } /** * Perform internal marshal of this entry before it will be serialized. * * @param ctx Context. * @param marsh Marshaller. * @throws IgniteCheckedException If failed. */ public void marshal(CacheObjectContext ctx, Marshaller marsh) throws IgniteCheckedException { if (keyBytes == null) { key.prepareMarshal(ctx); keyBytes = marsh.marshal(key); } if (valBytes == null && val != null) { val.prepareMarshal(ctx); valBytes = marsh.marshal(val); } } /** * @param ctx Context. * @throws IgniteCheckedException If failed. */ public void prepareDirectMarshal(CacheObjectContext ctx) throws IgniteCheckedException { key.prepareMarshal(ctx); if (val != null) val.prepareMarshal(ctx); } /** {@inheritDoc} */ @Override public byte directType() { return 103; } /** {@inheritDoc} */ @Override public boolean readFrom(ByteBuffer buf, MessageReader reader) { reader.setBuffer(buf); if (!reader.beforeMessageRead()) return false; if (!super.readFrom(buf, reader)) return false; switch (reader.state()) { case 2: expireTime = reader.readLong("expireTime"); if (!reader.isLastRead()) return false; reader.incrementState(); case 3: ttl = reader.readLong("ttl"); if (!reader.isLastRead()) return false; reader.incrementState(); case 4: valBytes = reader.readByteArray("valBytes"); if (!reader.isLastRead()) return false; reader.incrementState(); case 5: ver = reader.readMessage("ver"); if (!reader.isLastRead()) return false; reader.incrementState(); } assert key != null; assert !(val != null && valBytes != null); return true; } /** {@inheritDoc} */ @Override public boolean writeTo(ByteBuffer buf, MessageWriter writer) { assert key != null; assert !(val != null && valBytes != null); writer.setBuffer(buf); if (!super.writeTo(buf, writer)) return false; if (!writer.isHeaderWritten()) { if (!writer.writeHeader(directType(), fieldsCount())) return false; writer.onHeaderWritten(); } switch (writer.state()) { case 2: if (!writer.writeLong("expireTime", expireTime)) return false; writer.incrementState(); case 3: if (!writer.writeLong("ttl", ttl)) return false; writer.incrementState(); case 4: if (!writer.writeByteArray("valBytes", valBytes)) return false; writer.incrementState(); case 5: if (!writer.writeMessage("ver", ver)) return false; writer.incrementState(); } return true; } /** {@inheritDoc} */ @Override public byte fieldsCount() { return 6; } /** {@inheritDoc} */ @Override public void writeExternal(ObjectOutput out) throws IOException { assert false; } /** {@inheritDoc} */ @Override public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException { assert false; } /** {@inheritDoc} */ @Override public String toString() { return S.toString(GridCacheRawVersionedEntry.class, this, "keyBytesLen", keyBytes != null ? keyBytes.length : "n/a", "valBytesLen", valBytes != null ? valBytes.length : "n/a"); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.runtime.query; import akka.actor.ActorRef; import akka.actor.ActorSystem; import akka.dispatch.Futures; import akka.dispatch.Mapper; import akka.dispatch.Recover; import akka.pattern.Patterns; import org.apache.flink.api.common.JobID; import org.apache.flink.runtime.akka.AkkaUtils; import org.apache.flink.runtime.instance.ActorGateway; import org.apache.flink.runtime.instance.AkkaActorGateway; import org.apache.flink.runtime.leaderretrieval.LeaderRetrievalListener; import org.apache.flink.runtime.leaderretrieval.LeaderRetrievalService; import org.apache.flink.util.Preconditions; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import scala.concurrent.Future; import scala.concurrent.duration.FiniteDuration; import scala.reflect.ClassTag$; import java.util.UUID; import java.util.concurrent.Callable; /** * Akka-based {@link KvStateLocationLookupService} that retrieves the current * JobManager address and uses it for lookups. */ class AkkaKvStateLocationLookupService implements KvStateLocationLookupService, LeaderRetrievalListener { private static final Logger LOG = LoggerFactory.getLogger(KvStateLocationLookupService.class); /** Future returned when no JobManager is available */ private static final Future<ActorGateway> UNKNOWN_JOB_MANAGER = Futures.failed(new UnknownJobManager()); /** Leader retrieval service to retrieve the current job manager. */ private final LeaderRetrievalService leaderRetrievalService; /** The actor system used to resolve the JobManager address. */ private final ActorSystem actorSystem; /** Timeout for JobManager ask-requests. */ private final FiniteDuration askTimeout; /** Retry strategy factory on future failures. */ private final LookupRetryStrategyFactory retryStrategyFactory; /** Current job manager future. */ private volatile Future<ActorGateway> jobManagerFuture = UNKNOWN_JOB_MANAGER; /** * Creates the Akka-based {@link KvStateLocationLookupService}. * * @param leaderRetrievalService Leader retrieval service to use. * @param actorSystem Actor system to use. * @param askTimeout Timeout for JobManager ask-requests. * @param retryStrategyFactory Retry strategy if no JobManager available. */ AkkaKvStateLocationLookupService( LeaderRetrievalService leaderRetrievalService, ActorSystem actorSystem, FiniteDuration askTimeout, LookupRetryStrategyFactory retryStrategyFactory) { this.leaderRetrievalService = Preconditions.checkNotNull(leaderRetrievalService, "Leader retrieval service"); this.actorSystem = Preconditions.checkNotNull(actorSystem, "Actor system"); this.askTimeout = Preconditions.checkNotNull(askTimeout, "Ask Timeout"); this.retryStrategyFactory = Preconditions.checkNotNull(retryStrategyFactory, "Retry strategy factory"); } public void start() { try { leaderRetrievalService.start(this); } catch (Exception e) { LOG.error("Failed to start leader retrieval service", e); throw new RuntimeException(e); } } public void shutDown() { try { leaderRetrievalService.stop(); } catch (Exception e) { LOG.error("Failed to stop leader retrieval service", e); throw new RuntimeException(e); } } @Override @SuppressWarnings("unchecked") public Future<KvStateLocation> getKvStateLookupInfo(final JobID jobId, final String registrationName) { return getKvStateLookupInfo(jobId, registrationName, retryStrategyFactory.createRetryStrategy()); } /** * Returns a future holding the {@link KvStateLocation} for the given job * and KvState registration name. * * <p>If there is currently no JobManager registered with the service, the * request is retried. The retry behaviour is specified by the * {@link LookupRetryStrategy} of the lookup service. * * @param jobId JobID the KvState instance belongs to * @param registrationName Name under which the KvState has been registered * @param lookupRetryStrategy Retry strategy to use for retries on UnknownJobManager failures. * @return Future holding the {@link KvStateLocation} */ @SuppressWarnings("unchecked") private Future<KvStateLocation> getKvStateLookupInfo( final JobID jobId, final String registrationName, final LookupRetryStrategy lookupRetryStrategy) { return jobManagerFuture .flatMap(new Mapper<ActorGateway, Future<Object>>() { @Override public Future<Object> apply(ActorGateway jobManager) { // Lookup the KvStateLocation Object msg = new KvStateMessage.LookupKvStateLocation(jobId, registrationName); return jobManager.ask(msg, askTimeout); } }, actorSystem.dispatcher()) .mapTo(ClassTag$.MODULE$.<KvStateLocation>apply(KvStateLocation.class)) .recoverWith(new Recover<Future<KvStateLocation>>() { @Override public Future<KvStateLocation> recover(Throwable failure) throws Throwable { // If the Future fails with UnknownJobManager, retry // the request. Otherwise all Futures will be failed // during the start up phase, when the JobManager did // not notify this service yet or leadership is lost // intermittently. if (failure instanceof UnknownJobManager && lookupRetryStrategy.tryRetry()) { return Patterns.after( lookupRetryStrategy.getRetryDelay(), actorSystem.scheduler(), actorSystem.dispatcher(), new Callable<Future<KvStateLocation>>() { @Override public Future<KvStateLocation> call() throws Exception { return getKvStateLookupInfo( jobId, registrationName, lookupRetryStrategy); } }); } else { return Futures.failed(failure); } } }, actorSystem.dispatcher()); } @Override public void notifyLeaderAddress(String leaderAddress, final UUID leaderSessionID) { if (LOG.isDebugEnabled()) { LOG.debug("Received leader address notification {}:{}", leaderAddress, leaderSessionID); } if (leaderAddress == null) { jobManagerFuture = UNKNOWN_JOB_MANAGER; } else { jobManagerFuture = AkkaUtils.getActorRefFuture(leaderAddress, actorSystem, askTimeout) .map(new Mapper<ActorRef, ActorGateway>() { @Override public ActorGateway apply(ActorRef actorRef) { return new AkkaActorGateway(actorRef, leaderSessionID); } }, actorSystem.dispatcher()); } } @Override public void handleError(Exception exception) { jobManagerFuture = Futures.failed(exception); } // ------------------------------------------------------------------------ /** * Retry strategy for failed lookups. * * <p>Usage: * <pre> * LookupRetryStrategy retryStrategy = LookupRetryStrategyFactory.create(); * * if (retryStrategy.tryRetry()) { * // OK to retry * FiniteDuration retryDelay = retryStrategy.getRetryDelay(); * } * </pre> */ interface LookupRetryStrategy { /** * Returns the current retry. * * @return Current retry delay. */ FiniteDuration getRetryDelay(); /** * Tries another retry and returns whether it is allowed or not. * * @return Whether it is allowed to do another restart or not. */ boolean tryRetry(); } /** * Factory for retry strategies. */ interface LookupRetryStrategyFactory { /** * Creates a new retry strategy. * * @return The retry strategy. */ LookupRetryStrategy createRetryStrategy(); } /** * Factory for disabled retries. */ static class DisabledLookupRetryStrategyFactory implements LookupRetryStrategyFactory { private static final DisabledLookupRetryStrategy RETRY_STRATEGY = new DisabledLookupRetryStrategy(); @Override public LookupRetryStrategy createRetryStrategy() { return RETRY_STRATEGY; } private static class DisabledLookupRetryStrategy implements LookupRetryStrategy { @Override public FiniteDuration getRetryDelay() { return FiniteDuration.Zero(); } @Override public boolean tryRetry() { return false; } } } /** * Factory for fixed delay retries. */ static class FixedDelayLookupRetryStrategyFactory implements LookupRetryStrategyFactory { private final int maxRetries; private final FiniteDuration retryDelay; FixedDelayLookupRetryStrategyFactory(int maxRetries, FiniteDuration retryDelay) { this.maxRetries = maxRetries; this.retryDelay = retryDelay; } @Override public LookupRetryStrategy createRetryStrategy() { return new FixedDelayLookupRetryStrategy(maxRetries, retryDelay); } private static class FixedDelayLookupRetryStrategy implements LookupRetryStrategy { private final Object retryLock = new Object(); private final int maxRetries; private final FiniteDuration retryDelay; private int numRetries; public FixedDelayLookupRetryStrategy(int maxRetries, FiniteDuration retryDelay) { Preconditions.checkArgument(maxRetries >= 0, "Negative number maximum retries"); this.maxRetries = maxRetries; this.retryDelay = Preconditions.checkNotNull(retryDelay, "Retry delay"); } @Override public FiniteDuration getRetryDelay() { synchronized (retryLock) { return retryDelay; } } @Override public boolean tryRetry() { synchronized (retryLock) { if (numRetries < maxRetries) { numRetries++; return true; } else { return false; } } } } } }
/* ***** BEGIN LICENSE BLOCK ***** Version: Apache 2.0/GPL 3.0/LGPL 3.0 CCT - Computational Chemistry Tools Jamberoo - Java Molecules Editor Copyright 2008-2015 Dr. Vladislav Vasilyev Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. Contributor(s): Dr. Vladislav Vasilyev <vvv900@gmail.com> (original author) Alternatively, the contents of this file may be used under the terms of either the GNU General Public License Version 2 or later (the "GPL"), or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"), in which case the provisions of the GPL or the LGPL are applicable instead of those above. If you wish to allow use of your version of this file only under the terms of either the GPL or the LGPL, and not to allow others to use your version of this file under the terms of the Apache 2.0, indicate your decision by deleting the provisions above and replace them with the notice and other provisions required by the GPL or the LGPL. If you do not delete the provisions above, a recipient may use your version of this file under the terms of any one of the Apache 2.0, the GPL or the LGPL. ***** END LICENSE BLOCK *****/ package cct.j3d; import java.awt.Color; import java.util.ArrayList; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Set; import java.util.TreeMap; import java.util.logging.Logger; import java.util.prefs.Preferences; import org.scijava.java3d.Material; import org.scijava.vecmath.Color3f; import cct.modelling.ChemicalElements; /** * <p>Title: </p> * * <p>Description: </p> * * <p>Copyright: Copyright (c) 2004</p> * * <p>Company: ANU</p> * * @author not attributable * @version 1.0 */ public class ChemicalElementsColors extends ChemicalElements { static final String DEFAULT_ATOM_COLOR_SCHEME = "Sybyl Scheme"; static final String ATOM_COLOR_SCHEME_2 = "GaussView Scheme"; static final String ATOM_COLOR_SCHEME_JMOL = "Jmol Scheme"; static final String ATOM_COLOR_SCHEME_RASMOL = "Rasmol Scheme"; static final String ATOM_COLOR_SCHEME_RASMOL_NEW = "Rasmol CPKnew Scheme"; private static String currentColorSchemeName = DEFAULT_ATOM_COLOR_SCHEME; private static List currentColorScheme; private static String atomColorSchemeKey = "atomColorScheme"; private static Preferences prefs; // = Preferences.userNodeForPackage(getClass()); static final Logger logger = Logger.getLogger(ChemicalElementsColors.class.getCanonicalName()); static TreeMap atomColours = new TreeMap(); static List elementMaterial = new ArrayList(); static List elementColors = new ArrayList(); static List gaussianElementColors = new ArrayList(); static List rasmolElementColors = new ArrayList(); static List rasmolNewElementColors = new ArrayList(); static List jmolElementColors = new ArrayList(); static Map colorScheme = new LinkedHashMap(); static Material highlightMaterial = new Material( new Color3f(0.333f, 0.777f, 0.222f), new Color3f(0.777f, 0.222f, 0.555f), new Color3f(0.777f, 0.222f, 0.555f), new Color3f(1.0f, 1.0f, 1.0f), 15.0f); static Color3f highlightColor = new Color3f(Color.MAGENTA); static { elementColors.add(new Color3f(Color.DARK_GRAY)); // 0 - Dummy elementColors.add(new Color3f(Color.CYAN)); // 1 - H elementColors.add(new Color3f(Color.MAGENTA)); // 2 - He elementColors.add(new Color3f(Color.MAGENTA)); // 3 - Li elementColors.add(new Color3f(Color.MAGENTA)); // 4 - Be elementColors.add(new Color3f(Color.MAGENTA)); // 5 - B elementColors.add(new Color3f(Color.WHITE)); // 6 - C elementColors.add(new Color3f(Color.BLUE)); // 7 - N elementColors.add(new Color3f(Color.RED)); // 8 - O elementColors.add(new Color3f(Color.GREEN)); // 9 - F elementColors.add(new Color3f(Color.MAGENTA)); // 10 - Ne elementColors.add(new Color3f(Color.MAGENTA)); // 11 - Na elementColors.add(new Color3f(Color.MAGENTA)); // 12 - Mg elementColors.add(new Color3f(Color.CYAN)); // 13 - Al elementColors.add(new Color3f(Color.YELLOW)); // 14 - Si elementColors.add(new Color3f(Color.ORANGE)); // 15 - P elementColors.add(new Color3f(Color.YELLOW)); // 16 - S elementColors.add(new Color3f(Color.GREEN)); // 17 - Cl elementColors.add(new Color3f(Color.MAGENTA)); // 18 - Ar elementColors.add(new Color3f(Color.MAGENTA)); // 19 - K elementColors.add(new Color3f(Color.MAGENTA)); // 20 - Ca elementColors.add(new Color3f(Color.MAGENTA)); // 21 - Sc elementColors.add(new Color3f(Color.MAGENTA)); // 22 - Ti elementColors.add(new Color3f(Color.MAGENTA)); // 23 - V elementColors.add(new Color3f(Color.MAGENTA)); // 24 - Cr elementColors.add(new Color3f(Color.MAGENTA)); // 25 - Mn elementColors.add(new Color3f(Color.MAGENTA)); // 26 - Fe elementColors.add(new Color3f(Color.MAGENTA)); // 27 - Co elementColors.add(new Color3f(Color.MAGENTA)); // 28 - Ni elementColors.add(new Color3f(Color.MAGENTA)); // 29 - Cu elementColors.add(new Color3f(Color.MAGENTA)); // 30 - Zn elementColors.add(new Color3f(Color.MAGENTA)); // 31 - Ga elementColors.add(new Color3f(Color.MAGENTA)); // 32 - Ge elementColors.add(new Color3f(Color.MAGENTA)); // 33 - As elementColors.add(new Color3f(Color.MAGENTA)); // 34 - Se elementColors.add(new Color3f(Color.GREEN)); // 35 - Br elementColors.add(new Color3f(Color.MAGENTA)); // 36 - Kr elementColors.add(new Color3f(Color.MAGENTA)); // 37 - Rb elementColors.add(new Color3f(Color.MAGENTA)); // 38 - Sr elementColors.add(new Color3f(Color.MAGENTA)); // 39 - Y elementColors.add(new Color3f(Color.MAGENTA)); // 40 - Zr elementColors.add(new Color3f(Color.MAGENTA)); // 41 - Nb elementColors.add(new Color3f(Color.MAGENTA)); // 42 - Mo elementColors.add(new Color3f(Color.MAGENTA)); // 43 - Tc elementColors.add(new Color3f(Color.MAGENTA)); // 44 - Ru elementColors.add(new Color3f(Color.MAGENTA)); // 45 - Rh elementColors.add(new Color3f(Color.MAGENTA)); // 46 - Pd elementColors.add(new Color3f(Color.MAGENTA)); // 47 - Ag elementColors.add(new Color3f(Color.MAGENTA)); // 48 - Cd elementColors.add(new Color3f(Color.MAGENTA)); // 49 - In elementColors.add(new Color3f(Color.MAGENTA)); // 50 - Sn elementColors.add(new Color3f(Color.MAGENTA)); // 51 - Sb elementColors.add(new Color3f(Color.MAGENTA)); // 52 - Te elementColors.add(new Color3f(Color.PINK)); // 53 - I elementColors.add(new Color3f(Color.MAGENTA)); // 54 - Xe elementColors.add(new Color3f(Color.MAGENTA)); // 55 - Cs elementColors.add(new Color3f(Color.MAGENTA)); // 56 - Ba elementColors.add(new Color3f(Color.MAGENTA)); // 57 - La elementColors.add(new Color3f(Color.MAGENTA)); // 58 - Ce elementColors.add(new Color3f(Color.MAGENTA)); // 59 - Pr elementColors.add(new Color3f(Color.MAGENTA)); // 60 - Nd elementColors.add(new Color3f(Color.MAGENTA)); // 61 - Pm elementColors.add(new Color3f(Color.MAGENTA)); // 62 - Sm elementColors.add(new Color3f(Color.MAGENTA)); // 63 - Eu elementColors.add(new Color3f(Color.MAGENTA)); // 64 - Gd elementColors.add(new Color3f(Color.MAGENTA)); // 65 - Tb elementColors.add(new Color3f(Color.MAGENTA)); // 66 - Dy elementColors.add(new Color3f(Color.MAGENTA)); // 67 - Ho elementColors.add(new Color3f(Color.MAGENTA)); // 68 - Er elementColors.add(new Color3f(Color.MAGENTA)); // 69 - Tm elementColors.add(new Color3f(Color.MAGENTA)); // 70 - Yb elementColors.add(new Color3f(Color.MAGENTA)); // 71 - Lu elementColors.add(new Color3f(Color.MAGENTA)); // 72 - Hf elementColors.add(new Color3f(Color.MAGENTA)); // 73 - Ta elementColors.add(new Color3f(Color.MAGENTA)); // 74 - W elementColors.add(new Color3f(Color.MAGENTA)); // 75 - Re elementColors.add(new Color3f(Color.MAGENTA)); // 76 - Os elementColors.add(new Color3f(Color.MAGENTA)); // 77 - Ir elementColors.add(new Color3f(Color.MAGENTA)); // 78 - Pt elementColors.add(new Color3f(Color.MAGENTA)); // 79 - Au elementColors.add(new Color3f(Color.MAGENTA)); // 80 - Hg elementColors.add(new Color3f(Color.MAGENTA)); // 81 - Tl elementColors.add(new Color3f(Color.MAGENTA)); // 82 - Pb elementColors.add(new Color3f(Color.MAGENTA)); // 83 - Bi elementColors.add(new Color3f(Color.MAGENTA)); // 84 - Po elementColors.add(new Color3f(Color.MAGENTA)); // 85 - At elementColors.add(new Color3f(Color.MAGENTA)); // 86 - Rn elementColors.add(new Color3f(Color.MAGENTA)); // 87 - Fr elementColors.add(new Color3f(Color.MAGENTA)); // 88 - Ra elementColors.add(new Color3f(Color.MAGENTA)); // 89 - Ac elementColors.add(new Color3f(Color.MAGENTA)); // 90 - Th elementColors.add(new Color3f(Color.MAGENTA)); // 91 - Pa elementColors.add(new Color3f(Color.MAGENTA)); // 92 - U elementColors.add(new Color3f(Color.MAGENTA)); // 93 - Np elementColors.add(new Color3f(Color.MAGENTA)); // 94 - Pu elementColors.add(new Color3f(Color.MAGENTA)); // 95 - Am elementColors.add(new Color3f(Color.MAGENTA)); // 96 - Cm elementColors.add(new Color3f(Color.MAGENTA)); // 97 - Bk elementColors.add(new Color3f(Color.MAGENTA)); // 98 - Cf elementColors.add(new Color3f(Color.MAGENTA)); // 99 - Es elementColors.add(new Color3f(Color.MAGENTA)); // 100 - Fm elementColors.add(new Color3f(Color.MAGENTA)); // 101 - Md elementColors.add(new Color3f(Color.MAGENTA)); // 102 - No elementColors.add(new Color3f(Color.MAGENTA)); // 103 - Lr elementColors.add(new Color3f(Color.MAGENTA)); // 104 - Db elementColors.add(new Color3f(Color.MAGENTA)); // 105 - Jl elementColors.add(new Color3f(Color.MAGENTA)); // 106 - Rf elementColors.add(new Color3f(Color.MAGENTA)); // 107 - Bh elementColors.add(new Color3f(Color.MAGENTA)); // 108 - Hn elementColors.add(new Color3f(Color.MAGENTA)); // 109 - Mt gaussianElementColors.add(new Color3f(Color.DARK_GRAY)); // 0 - Dummy gaussianElementColors.add(new Color3f(0.80f, 0.80f, 0.80f)); // 1 - H gaussianElementColors.add(new Color3f(0.85f, 1.00f, 1.00f)); // 2 - He gaussianElementColors.add(new Color3f(0.80f, 0.49f, 1.00f)); // 3 - Li gaussianElementColors.add(new Color3f(0.80f, 1.00f, 0.00f)); // 4 - Be gaussianElementColors.add(new Color3f(1.00f, 0.71f, 0.71f)); // 5 - B gaussianElementColors.add(new Color3f(0.56f, 0.56f, 0.56f)); // 6 - C gaussianElementColors.add(new Color3f(0.10f, 0.10f, 0.90f)); // 7 - N gaussianElementColors.add(new Color3f(0.90f, 0.00f, 0.00f)); // 8 - O gaussianElementColors.add(new Color3f(0.70f, 1.00f, 1.00f)); // 9 - F gaussianElementColors.add(new Color3f(0.69f, 0.89f, 0.96f)); // 10 - Ne gaussianElementColors.add(new Color3f(0.67f, 0.36f, 0.95f)); // 11 - Na gaussianElementColors.add(new Color3f(0.70f, 0.80f, 0.00f)); // 12 - Mg gaussianElementColors.add(new Color3f(0.82f, 0.65f, 0.65f)); // 13 - Al gaussianElementColors.add(new Color3f(0.50f, 0.60f, 0.60f)); // 14 - Si gaussianElementColors.add(new Color3f(1.00f, 0.50f, 0.00f)); // 15 - P gaussianElementColors.add(new Color3f(1.00f, 0.78f, 0.16f)); // 16 - S gaussianElementColors.add(new Color3f(0.10f, 0.94f, 0.10f)); // 17 - Cl gaussianElementColors.add(new Color3f(0.50f, 0.82f, 0.89f)); // 18 - Ar gaussianElementColors.add(new Color3f(0.56f, 0.25f, 0.83f)); // 19 - K gaussianElementColors.add(new Color3f(0.60f, 0.60f, 0.00f)); // 20 - Ca gaussianElementColors.add(new Color3f(0.90f, 0.90f, 0.89f)); // 21 - Sc gaussianElementColors.add(new Color3f(0.75f, 0.76f, 0.78f)); // 22 - Ti gaussianElementColors.add(new Color3f(0.65f, 0.65f, 0.67f)); // 23 - V gaussianElementColors.add(new Color3f(0.54f, 0.60f, 0.78f)); // 24 - Cr gaussianElementColors.add(new Color3f(0.61f, 0.48f, 0.78f)); // 25 - Mn gaussianElementColors.add(new Color3f(0.50f, 0.48f, 0.78f)); // 26 - Fe gaussianElementColors.add(new Color3f(0.36f, 0.43f, 1.00f)); // 27 - Co gaussianElementColors.add(new Color3f(0.36f, 0.48f, 0.76f)); // 28 - Ni gaussianElementColors.add(new Color3f(1.00f, 0.48f, 0.38f)); // 29 - Cu gaussianElementColors.add(new Color3f(0.49f, 0.50f, 0.69f)); // 30 - Zn gaussianElementColors.add(new Color3f(0.76f, 0.56f, 0.56f)); // 31 - Ga gaussianElementColors.add(new Color3f(0.40f, 0.56f, 0.56f)); // 32 - Ge gaussianElementColors.add(new Color3f(0.74f, 0.50f, 0.89f)); // 33 - As gaussianElementColors.add(new Color3f(1.00f, 0.63f, 0.00f)); // 34 - Se gaussianElementColors.add(new Color3f(0.65f, 0.13f, 0.13f)); // 35 - Br gaussianElementColors.add(new Color3f(0.36f, 0.73f, 0.82f)); // 36 - Kr gaussianElementColors.add(new Color3f(0.44f, 0.18f, 0.69f)); // 37 - Rb gaussianElementColors.add(new Color3f(0.50f, 0.40f, 0.00f)); // 38 - Sr gaussianElementColors.add(new Color3f(0.58f, 0.99f, 1.00f)); // 39 - Y gaussianElementColors.add(new Color3f(0.58f, 0.88f, 0.88f)); // 40 - Zr gaussianElementColors.add(new Color3f(0.45f, 0.76f, 0.79f)); // 41 - Nb gaussianElementColors.add(new Color3f(0.33f, 0.71f, 0.71f)); // 42 - Mo gaussianElementColors.add(new Color3f(0.23f, 0.62f, 0.66f)); // 43 - Tc gaussianElementColors.add(new Color3f(0.14f, 0.56f, 0.59f)); // 44 - Ru gaussianElementColors.add(new Color3f(0.04f, 0.49f, 0.55f)); // 45 - Rh gaussianElementColors.add(new Color3f(0.00f, 0.41f, 0.52f)); // 46 - Pd gaussianElementColors.add(new Color3f(0.60f, 0.78f, 1.00f)); // 47 - Ag gaussianElementColors.add(new Color3f(1.00f, 0.85f, 0.56f)); // 48 - Cd gaussianElementColors.add(new Color3f(0.65f, 0.46f, 0.45f)); // 49 - In gaussianElementColors.add(new Color3f(0.40f, 0.50f, 0.50f)); // 50 - Sn gaussianElementColors.add(new Color3f(0.62f, 0.39f, 0.71f)); // 51 - Sb gaussianElementColors.add(new Color3f(0.83f, 0.48f, 0.00f)); // 52 - Te gaussianElementColors.add(new Color3f(0.58f, 0.00f, 0.58f)); // 53 - I gaussianElementColors.add(new Color3f(0.26f, 0.62f, 0.69f)); // 54 - Xe gaussianElementColors.add(new Color3f(0.34f, 0.09f, 0.56f)); // 55 - Cs gaussianElementColors.add(new Color3f(0.40f, 0.20f, 0.00f)); // 56 - Ba gaussianElementColors.add(new Color3f(0.44f, 0.87f, 1.00f)); // 57 - La gaussianElementColors.add(new Color3f(1.00f, 1.00f, 0.78f)); // 58 - Ce gaussianElementColors.add(new Color3f(0.85f, 1.00f, 0.78f)); // 59 - Pr gaussianElementColors.add(new Color3f(0.78f, 1.00f, 0.78f)); // 60 - Nd gaussianElementColors.add(new Color3f(0.64f, 1.00f, 0.78f)); // 61 - Pm gaussianElementColors.add(new Color3f(0.56f, 1.00f, 0.78f)); // 62 - Sm gaussianElementColors.add(new Color3f(0.38f, 1.00f, 0.78f)); // 63 - Eu gaussianElementColors.add(new Color3f(0.27f, 1.00f, 0.78f)); // 64 - Gd gaussianElementColors.add(new Color3f(0.19f, 1.00f, 0.78f)); // 65 - Tb gaussianElementColors.add(new Color3f(0.12f, 1.00f, 0.71f)); // 66 - Dy gaussianElementColors.add(new Color3f(0.00f, 1.00f, 0.71f)); // 67 - Ho gaussianElementColors.add(new Color3f(0.00f, 0.90f, 0.46f)); // 68 - Er gaussianElementColors.add(new Color3f(0.00f, 0.83f, 0.32f)); // 69 - Tm gaussianElementColors.add(new Color3f(0.00f, 0.75f, 0.22f)); // 70 - Yb gaussianElementColors.add(new Color3f(0.00f, 0.67f, 0.14f)); // 71 - Lu gaussianElementColors.add(new Color3f(0.30f, 0.76f, 1.00f)); // 72 - Hf gaussianElementColors.add(new Color3f(0.30f, 0.65f, 1.00f)); // 73 - Ta gaussianElementColors.add(new Color3f(0.15f, 0.58f, 0.84f)); // 74 - W gaussianElementColors.add(new Color3f(0.15f, 0.49f, 0.67f)); // 75 - Re gaussianElementColors.add(new Color3f(0.15f, 0.40f, 0.59f)); // 76 - Os gaussianElementColors.add(new Color3f(0.09f, 0.33f, 0.53f)); // 77 - Ir gaussianElementColors.add(new Color3f(0.09f, 0.36f, 0.56f)); // 78 - Pt gaussianElementColors.add(new Color3f(1.00f, 0.82f, 0.14f)); // 79 - Au gaussianElementColors.add(new Color3f(0.71f, 0.71f, 0.76f)); // 80 - Hg gaussianElementColors.add(new Color3f(0.65f, 0.33f, 0.30f)); // 81 - Tl gaussianElementColors.add(new Color3f(0.34f, 0.35f, 0.38f)); // 82 - Pb gaussianElementColors.add(new Color3f(0.62f, 0.31f, 0.71f)); // 83 - Bi gaussianElementColors.add(new Color3f(0.67f, 0.36f, 0.00f)); // 84 - Po gaussianElementColors.add(new Color3f(0.46f, 0.31f, 0.27f)); // 85 - At gaussianElementColors.add(new Color3f(0.26f, 0.51f, 0.59f)); // 86 - Rn gaussianElementColors.add(new Color3f(0.26f, 0.00f, 0.40f)); // 87 - Fr gaussianElementColors.add(new Color3f(0.30f, 0.10f, 0.00f)); // 88 - Ra gaussianElementColors.add(new Color3f(0.44f, 0.67f, 0.98f)); // 89 - Ac gaussianElementColors.add(new Color3f(0.00f, 0.73f, 1.00f)); // 90 - Th gaussianElementColors.add(new Color3f(0.00f, 0.63f, 1.00f)); // 91 - Pa gaussianElementColors.add(new Color3f(0.00f, 0.56f, 1.00f)); // 92 - U gaussianElementColors.add(new Color3f(0.00f, 0.50f, 0.95f)); // 93 - Np gaussianElementColors.add(new Color3f(0.00f, 0.42f, 0.95f)); // 94 - Pu gaussianElementColors.add(new Color3f(0.33f, 0.36f, 0.95f)); // 95 - Am gaussianElementColors.add(new Color3f(0.47f, 0.36f, 0.89f)); // 96 - Cm gaussianElementColors.add(new Color3f(0.54f, 0.37f, 0.89f)); // 97 - Bk gaussianElementColors.add(new Color3f(0.63f, 0.21f, 0.83f)); // 98 - Cf gaussianElementColors.add(new Color3f(0.66f, 0.17f, 0.78f)); // 99 - Es gaussianElementColors.add(new Color3f(0.70f, 0.12f, 0.73f)); // 100 - Fm gaussianElementColors.add(new Color3f(0.70f, 0.05f, 0.65f)); // 101 - Md gaussianElementColors.add(new Color3f(0.74f, 0.05f, 0.53f)); // 102 - No gaussianElementColors.add(new Color3f(0.78f, 0.00f, 0.40f)); // 103 - Lr gaussianElementColors.add(new Color3f(1.00f, 0.50f, 0.50f)); // 104 - Db gaussianElementColors.add(new Color3f(0.90f, 0.40f, 0.40f)); // 105 - Jl gaussianElementColors.add(new Color3f(0.80f, 0.30f, 0.30f)); // 106 - Rf gaussianElementColors.add(new Color3f(0.70f, 0.20f, 0.20f)); // 107 - Bh gaussianElementColors.add(new Color3f(0.60f, 0.10f, 0.10f)); // 108 - Hn gaussianElementColors.add(new Color3f(0.50f, 0.00f, 0.00f)); // 109 - Mt jmolElementColors.add(new Color3f(new Color(0xFA1691))); // 0 - Dummy jmolElementColors.add(new Color3f(new Color(255, 255, 255))); // 1 - H jmolElementColors.add(new Color3f(new Color(217, 255, 255))); // 2 - He jmolElementColors.add(new Color3f(new Color(204, 128, 255))); // 3 - Li jmolElementColors.add(new Color3f(new Color(194, 255, 0))); // 4 - Be jmolElementColors.add(new Color3f(new Color(255, 181, 181))); // 5 - B jmolElementColors.add(new Color3f(new Color(144, 144, 144))); // 6 - C jmolElementColors.add(new Color3f(new Color(48, 80, 248))); // 7 - N jmolElementColors.add(new Color3f(new Color(255, 13, 13))); // 8 - O jmolElementColors.add(new Color3f(new Color(144, 224, 80))); // 9 - F jmolElementColors.add(new Color3f(new Color(179, 227, 245))); // 10 - Ne jmolElementColors.add(new Color3f(new Color(171, 92, 242))); // 11 - Na jmolElementColors.add(new Color3f(new Color(138, 255, 0))); // 12 - Mg jmolElementColors.add(new Color3f(new Color(191, 166, 166))); // 13 - Al jmolElementColors.add(new Color3f(new Color(240, 200, 160))); // 14 - Si jmolElementColors.add(new Color3f(new Color(255, 128, 0))); // 15 - P jmolElementColors.add(new Color3f(new Color(255, 255, 48))); // 16 - S jmolElementColors.add(new Color3f(new Color(31, 240, 31))); // 17 - Cl jmolElementColors.add(new Color3f(new Color(128, 209, 227))); // 18 - Ar jmolElementColors.add(new Color3f(new Color(143, 64, 212))); // 19 - K jmolElementColors.add(new Color3f(new Color(61, 255, 0))); // 20 - Ca jmolElementColors.add(new Color3f(new Color(230, 230, 230))); // 21 - Sc jmolElementColors.add(new Color3f(new Color(191, 194, 199))); // 22 - Ti jmolElementColors.add(new Color3f(new Color(166, 166, 171))); // 23 - V jmolElementColors.add(new Color3f(new Color(138, 153, 199))); // 24 - Cr jmolElementColors.add(new Color3f(new Color(156, 122, 199))); // 25 - Mn jmolElementColors.add(new Color3f(new Color(224, 102, 51))); // 26 - Fe jmolElementColors.add(new Color3f(new Color(240, 144, 160))); // 27 - Co jmolElementColors.add(new Color3f(new Color(80, 208, 80))); // 28 - Ni jmolElementColors.add(new Color3f(new Color(200, 128, 51))); // 29 - Cu jmolElementColors.add(new Color3f(new Color(125, 128, 176))); // 30 - Zn jmolElementColors.add(new Color3f(new Color(194, 143, 143))); // 31 - Ga jmolElementColors.add(new Color3f(new Color(102, 143, 143))); // 32 - Ge jmolElementColors.add(new Color3f(new Color(189, 128, 227))); // 33 - As jmolElementColors.add(new Color3f(new Color(255, 161, 0))); // 34 - Se jmolElementColors.add(new Color3f(new Color(166, 41, 41))); // 35 - Br jmolElementColors.add(new Color3f(new Color(92, 184, 209))); // 36 - Kr jmolElementColors.add(new Color3f(new Color(112, 46, 176))); // 37 - Rb jmolElementColors.add(new Color3f(new Color(0, 255, 0))); // 38 - Sr jmolElementColors.add(new Color3f(new Color(148, 255, 255))); // 39 - Y jmolElementColors.add(new Color3f(new Color(148, 224, 224))); // 40 - Zr jmolElementColors.add(new Color3f(new Color(115, 194, 201))); // 41 - Nb jmolElementColors.add(new Color3f(new Color(84, 181, 181))); // 42 - Mo jmolElementColors.add(new Color3f(new Color(59, 158, 158))); // 43 - Tc jmolElementColors.add(new Color3f(new Color(36, 143, 143))); // 44 - Ru jmolElementColors.add(new Color3f(new Color(10, 125, 140))); // 45 - Rh jmolElementColors.add(new Color3f(new Color(0, 105, 133))); // 46 - Pd jmolElementColors.add(new Color3f(new Color(192, 192, 192))); // 47 - Ag jmolElementColors.add(new Color3f(new Color(255, 217, 143))); // 48 - Cd jmolElementColors.add(new Color3f(new Color(166, 117, 115))); // 49 - In jmolElementColors.add(new Color3f(new Color(102, 128, 128))); // 50 - Sn jmolElementColors.add(new Color3f(new Color(158, 99, 181))); // 51 - Sb jmolElementColors.add(new Color3f(new Color(212, 122, 0))); // 52 - Te jmolElementColors.add(new Color3f(new Color(148, 0, 148))); // 53 - I jmolElementColors.add(new Color3f(new Color(66, 158, 176))); // 54 - Xe jmolElementColors.add(new Color3f(new Color(87, 23, 143))); // 55 - Cs jmolElementColors.add(new Color3f(new Color(0, 201, 0))); // 56 - Ba jmolElementColors.add(new Color3f(new Color(112, 212, 255))); // 57 - La jmolElementColors.add(new Color3f(new Color(255, 255, 199))); // 58 - Ce jmolElementColors.add(new Color3f(new Color(217, 255, 199))); // 59 - Pr jmolElementColors.add(new Color3f(new Color(199, 255, 199))); // 60 - Nd jmolElementColors.add(new Color3f(new Color(163, 255, 199))); // 61 - Pm jmolElementColors.add(new Color3f(new Color(143, 255, 199))); // 62 - Sm jmolElementColors.add(new Color3f(new Color(97, 255, 199))); // 63 - Eu jmolElementColors.add(new Color3f(new Color(69, 255, 199))); // 64 - Gd jmolElementColors.add(new Color3f(new Color(48, 255, 199))); // 65 - Tb jmolElementColors.add(new Color3f(new Color(31, 255, 199))); // 66 - Dy jmolElementColors.add(new Color3f(new Color(0, 255, 156))); // 67 - Ho jmolElementColors.add(new Color3f(new Color(0, 230, 117))); // 68 - Er jmolElementColors.add(new Color3f(new Color(0, 212, 82))); // 69 - Tm jmolElementColors.add(new Color3f(new Color(0, 191, 56))); // 70 - Yb jmolElementColors.add(new Color3f(new Color(0, 171, 36))); // 71 - Lu jmolElementColors.add(new Color3f(new Color(77, 194, 255))); // 72 - Hf jmolElementColors.add(new Color3f(new Color(77, 166, 255))); // 73 - Ta jmolElementColors.add(new Color3f(new Color(33, 148, 214))); // 74 - W jmolElementColors.add(new Color3f(new Color(38, 125, 171))); // 75 - Re jmolElementColors.add(new Color3f(new Color(38, 102, 150))); // 76 - Os jmolElementColors.add(new Color3f(new Color(23, 84, 135))); // 77 - Ir jmolElementColors.add(new Color3f(new Color(208, 208, 224))); // 78 - Pt jmolElementColors.add(new Color3f(new Color(255, 209, 35))); // 79 - Au jmolElementColors.add(new Color3f(new Color(184, 184, 208))); // 80 - Hg jmolElementColors.add(new Color3f(new Color(166, 84, 77))); // 81 - Tl jmolElementColors.add(new Color3f(new Color(87, 89, 97))); // 82 - Pb jmolElementColors.add(new Color3f(new Color(158, 79, 181))); // 83 - Bi jmolElementColors.add(new Color3f(new Color(171, 92, 0))); // 84 - Po jmolElementColors.add(new Color3f(new Color(117, 79, 69))); // 85 - At jmolElementColors.add(new Color3f(new Color(66, 130, 150))); // 86 - Rn jmolElementColors.add(new Color3f(new Color(66, 0, 102))); // 87 - Fr jmolElementColors.add(new Color3f(new Color(0, 125, 0))); // 88 - Ra jmolElementColors.add(new Color3f(new Color(112, 171, 250))); // 89 - Ac jmolElementColors.add(new Color3f(new Color(0, 186, 255))); // 90 - Th jmolElementColors.add(new Color3f(new Color(0, 161, 255))); // 91 - Pa jmolElementColors.add(new Color3f(new Color(0, 143, 255))); // 92 - U jmolElementColors.add(new Color3f(new Color(0, 128, 255))); // 93 - Np jmolElementColors.add(new Color3f(new Color(0, 107, 255))); // 94 - Pu jmolElementColors.add(new Color3f(new Color(84, 92, 242))); // 95 - Am jmolElementColors.add(new Color3f(new Color(120, 92, 227))); // 96 - Cm jmolElementColors.add(new Color3f(new Color(138, 79, 227))); // 97 - Bk jmolElementColors.add(new Color3f(new Color(161, 54, 212))); // 98 - Cf jmolElementColors.add(new Color3f(new Color(179, 31, 212))); // 99 - Es jmolElementColors.add(new Color3f(new Color(179, 31, 186))); // 100 - Fm jmolElementColors.add(new Color3f(new Color(179, 13, 166))); // 101 - Md jmolElementColors.add(new Color3f(new Color(189, 13, 135))); // 102 - No jmolElementColors.add(new Color3f(new Color(199, 0, 102))); // 103 - Lr jmolElementColors.add(new Color3f(new Color(204, 0, 89))); // 104 - Db jmolElementColors.add(new Color3f(new Color(209, 0, 79))); // 105 - Jl jmolElementColors.add(new Color3f(new Color(217, 0, 69))); // 106 - Rf jmolElementColors.add(new Color3f(new Color(224, 0, 56))); // 107 - Bh jmolElementColors.add(new Color3f(new Color(230, 0, 46))); // 108 - Hn jmolElementColors.add(new Color3f(new Color(235, 0, 38))); // 109 - Mt rasmolElementColors.add(new Color3f(new Color(0xFF1493))); // 0 - Dummy rasmolElementColors.add(new Color3f(new Color(0xFFFFFF))); // 1 - H rasmolElementColors.add(new Color3f(new Color(0xFFC0CB))); // 2 - He rasmolElementColors.add(new Color3f(new Color(0xB22222))); // 3 - Li rasmolElementColors.add(new Color3f(new Color(0xFF1493))); // 4 - Be rasmolElementColors.add(new Color3f(new Color(0x00FF00))); // 5 - B rasmolElementColors.add(new Color3f(new Color(0xC8C8C8))); // 6 - C rasmolElementColors.add(new Color3f(new Color(0x8F8FFF))); // 7 - N rasmolElementColors.add(new Color3f(new Color(0xF00000))); // 8 - O rasmolElementColors.add(new Color3f(new Color(0xDAA520))); // 9 - F rasmolElementColors.add(new Color3f(new Color(0xFF1493))); // 10 - Ne rasmolElementColors.add(new Color3f(new Color(0x0000FF))); // 11 - Na rasmolElementColors.add(new Color3f(new Color(0x228B22))); // 12 - Mg rasmolElementColors.add(new Color3f(new Color(0x808090))); // 13 - Al rasmolElementColors.add(new Color3f(new Color(0xDAA520))); // 14 - Si rasmolElementColors.add(new Color3f(new Color(0xFFA500))); // 15 - P rasmolElementColors.add(new Color3f(new Color(0xFFC832))); // 16 - S rasmolElementColors.add(new Color3f(new Color(0x00FF00))); // 17 - Cl rasmolElementColors.add(new Color3f(new Color(0xFF1493))); // 18 - Ar rasmolElementColors.add(new Color3f(new Color(0xFF1493))); // 19 - K rasmolElementColors.add(new Color3f(new Color(0x808090))); // 20 - Ca rasmolElementColors.add(new Color3f(new Color(0xFF1493))); // 21 - Sc rasmolElementColors.add(new Color3f(new Color(0x808090))); // 22 - Ti rasmolElementColors.add(new Color3f(new Color(0xFF1493))); // 23 - V rasmolElementColors.add(new Color3f(new Color(0x808090))); // 24 - Cr rasmolElementColors.add(new Color3f(new Color(0x808090))); // 25 - Mn rasmolElementColors.add(new Color3f(new Color(0xFFA500))); // 26 - Fe rasmolElementColors.add(new Color3f(new Color(0xFF1493))); // 27 - Co rasmolElementColors.add(new Color3f(new Color(0xA52A2A))); // 28 - Ni rasmolElementColors.add(new Color3f(new Color(0xA52A2A))); // 29 - Cu rasmolElementColors.add(new Color3f(new Color(0xA52A2A))); // 30 - Zn rasmolElementColors.add(new Color3f(new Color(0xFF1493))); // 31 - Ga rasmolElementColors.add(new Color3f(new Color(0xFF1493))); // 32 - Ge rasmolElementColors.add(new Color3f(new Color(0xFF1493))); // 33 - As rasmolElementColors.add(new Color3f(new Color(0xFF1493))); // 34 - Se rasmolElementColors.add(new Color3f(new Color(0xA52A2A))); // 35 - Br rasmolElementColors.add(new Color3f(new Color(0xFF1493))); // 36 - Kr rasmolElementColors.add(new Color3f(new Color(0xFF1493))); // 37 - Rb rasmolElementColors.add(new Color3f(new Color(0xFF1493))); // 38 - Sr rasmolElementColors.add(new Color3f(new Color(0xFF1493))); // 39 - Y rasmolElementColors.add(new Color3f(new Color(0xFF1493))); // 40 - Zr rasmolElementColors.add(new Color3f(new Color(0xFF1493))); // 41 - Nb rasmolElementColors.add(new Color3f(new Color(0xFF1493))); // 42 - Mo rasmolElementColors.add(new Color3f(new Color(0xFF1493))); // 43 - Tc rasmolElementColors.add(new Color3f(new Color(0xFF1493))); // 44 - Ru rasmolElementColors.add(new Color3f(new Color(0xFF1493))); // 45 - Rh rasmolElementColors.add(new Color3f(new Color(0xFF1493))); // 46 - Pd rasmolElementColors.add(new Color3f(new Color(0x808090))); // 47 - Ag rasmolElementColors.add(new Color3f(new Color(0xFF1493))); // 48 - Cd rasmolElementColors.add(new Color3f(new Color(0xFF1493))); // 49 - In rasmolElementColors.add(new Color3f(new Color(0xFF1493))); // 50 - Sn rasmolElementColors.add(new Color3f(new Color(0xFF1493))); // 51 - Sb rasmolElementColors.add(new Color3f(new Color(0xFF1493))); // 52 - Te rasmolElementColors.add(new Color3f(new Color(0xA020F0))); // 53 - I rasmolElementColors.add(new Color3f(new Color(0xFF1493))); // 54 - Xe rasmolElementColors.add(new Color3f(new Color(0xFF1493))); // 55 - Cs rasmolElementColors.add(new Color3f(new Color(0xFFA500))); // 56 - Ba rasmolElementColors.add(new Color3f(new Color(0xFF1493))); // 57 - La rasmolElementColors.add(new Color3f(new Color(0xFF1493))); // 58 - Ce rasmolElementColors.add(new Color3f(new Color(0xFF1493))); // 59 - Pr rasmolElementColors.add(new Color3f(new Color(0xFF1493))); // 60 - Nd rasmolElementColors.add(new Color3f(new Color(0xFF1493))); // 61 - Pm rasmolElementColors.add(new Color3f(new Color(0xFF1493))); // 62 - Sm rasmolElementColors.add(new Color3f(new Color(0xFF1493))); // 63 - Eu rasmolElementColors.add(new Color3f(new Color(0xFF1493))); // 64 - Gd rasmolElementColors.add(new Color3f(new Color(0xFF1493))); // 65 - Tb rasmolElementColors.add(new Color3f(new Color(0xFF1493))); // 66 - Dy rasmolElementColors.add(new Color3f(new Color(0xFF1493))); // 67 - Ho rasmolElementColors.add(new Color3f(new Color(0xFF1493))); // 68 - Er rasmolElementColors.add(new Color3f(new Color(0xFF1493))); // 69 - Tm rasmolElementColors.add(new Color3f(new Color(0xFF1493))); // 70 - Yb rasmolElementColors.add(new Color3f(new Color(0xFF1493))); // 71 - Lu rasmolElementColors.add(new Color3f(new Color(0xFF1493))); // 72 - Hf rasmolElementColors.add(new Color3f(new Color(0xFF1493))); // 73 - Ta rasmolElementColors.add(new Color3f(new Color(0xFF1493))); // 74 - W rasmolElementColors.add(new Color3f(new Color(0xFF1493))); // 75 - Re rasmolElementColors.add(new Color3f(new Color(0xFF1493))); // 76 - Os rasmolElementColors.add(new Color3f(new Color(0xFF1493))); // 77 - Ir rasmolElementColors.add(new Color3f(new Color(0xFF1493))); // 78 - Pt rasmolElementColors.add(new Color3f(new Color(0xDAA520))); // 79 - Au rasmolElementColors.add(new Color3f(new Color(0xFF1493))); // 80 - Hg rasmolElementColors.add(new Color3f(new Color(0xFF1493))); // 81 - Tl rasmolElementColors.add(new Color3f(new Color(0xFF1493))); // 82 - Pb rasmolElementColors.add(new Color3f(new Color(0xFF1493))); // 83 - Bi rasmolElementColors.add(new Color3f(new Color(0xFF1493))); // 84 - Po rasmolElementColors.add(new Color3f(new Color(0xFF1493))); // 85 - At rasmolElementColors.add(new Color3f(new Color(0xFF1493))); // 86 - Rn rasmolElementColors.add(new Color3f(new Color(0xFF1493))); // 87 - Fr rasmolElementColors.add(new Color3f(new Color(0xFF1493))); // 88 - Ra rasmolElementColors.add(new Color3f(new Color(0xFF1493))); // 89 - Ac rasmolElementColors.add(new Color3f(new Color(0xFF1493))); // 90 - Th rasmolElementColors.add(new Color3f(new Color(0xFF1493))); // 91 - Pa rasmolElementColors.add(new Color3f(new Color(0xFF1493))); // 92 - U rasmolElementColors.add(new Color3f(new Color(0xFF1493))); // 93 - Np rasmolElementColors.add(new Color3f(new Color(0xFF1493))); // 94 - Pu rasmolElementColors.add(new Color3f(new Color(0xFF1493))); // 95 - Am rasmolElementColors.add(new Color3f(new Color(0xFF1493))); // 96 - Cm rasmolElementColors.add(new Color3f(new Color(0xFF1493))); // 97 - Bk rasmolElementColors.add(new Color3f(new Color(0xFF1493))); // 98 - Cf rasmolElementColors.add(new Color3f(new Color(0xFF1493))); // 99 - Es rasmolElementColors.add(new Color3f(new Color(0xFF1493))); // 100 - Fm rasmolElementColors.add(new Color3f(new Color(0xFF1493))); // 101 - Md rasmolElementColors.add(new Color3f(new Color(0xFF1493))); // 102 - No rasmolElementColors.add(new Color3f(new Color(0xFF1493))); // 103 - Lr rasmolElementColors.add(new Color3f(new Color(0xFF1493))); // 104 - Db rasmolElementColors.add(new Color3f(new Color(0xFF1493))); // 105 - Jl rasmolElementColors.add(new Color3f(new Color(0xFF1493))); // 106 - Rf rasmolElementColors.add(new Color3f(new Color(0xFF1493))); // 107 - Bh rasmolElementColors.add(new Color3f(new Color(0xFF1493))); // 108 - Hn rasmolElementColors.add(new Color3f(new Color(0xFF1493))); // 109 - Mt rasmolNewElementColors.addAll(rasmolElementColors); rasmolNewElementColors.set(0, new Color3f(new Color(0xFA1691))); // 0 - Unknown rasmolNewElementColors.set(3, new Color3f(new Color(0xB22121))); // 3 - Li rasmolNewElementColors.set(6, new Color3f(new Color(0xD3D3D3))); // 6 - C rasmolNewElementColors.set(7, new Color3f(new Color(0x87CEE6))); // 7 - N rasmolNewElementColors.set(8, new Color3f(new Color(0xFF0000))); // 8 - O rasmolNewElementColors.set(13, new Color3f(new Color(0x696969))); // 13 - Al rasmolNewElementColors.set(15, new Color3f(new Color(0xFFAA00))); // 15 - P rasmolNewElementColors.set(16, new Color3f(new Color(0xFFFF00))); // 16 - S rasmolNewElementColors.set(20, new Color3f(new Color(0x696969))); // 20 - Ca rasmolNewElementColors.set(22, new Color3f(new Color(0x696969))); // 22 - Ti rasmolNewElementColors.set(24, new Color3f(new Color(0x696969))); // 24 - Cr rasmolNewElementColors.set(25, new Color3f(new Color(0x696969))); // 25 - Mn rasmolNewElementColors.set(26, new Color3f(new Color(0xFFAA00))); // 26 - Fe rasmolNewElementColors.set(28, new Color3f(new Color(0x802828))); // 28 - Ni rasmolNewElementColors.set(29, new Color3f(new Color(0x802828))); // 29 - Cu rasmolNewElementColors.set(30, new Color3f(new Color(0x802828))); // 30 - Zn rasmolNewElementColors.set(35, new Color3f(new Color(0x802828))); // 35 - Br rasmolNewElementColors.set(47, new Color3f(new Color(0x696969))); // 47 - Ag rasmolNewElementColors.set(56, new Color3f(new Color(0xFFAA00))); // 56 - Ba colorScheme.put(DEFAULT_ATOM_COLOR_SCHEME, elementColors); colorScheme.put(ATOM_COLOR_SCHEME_2, gaussianElementColors); colorScheme.put(ATOM_COLOR_SCHEME_JMOL, jmolElementColors); colorScheme.put(ATOM_COLOR_SCHEME_RASMOL, rasmolElementColors); colorScheme.put(ATOM_COLOR_SCHEME_RASMOL_NEW, rasmolNewElementColors); currentColorScheme = elementColors; } public static Color3f getElementColor(int atomNumber) { Color3f color = new Color3f(Color.LIGHT_GRAY); if (atomNumber < 0 || atomNumber >= elementColors.size()) { return color; } try { //color.set( (Color3f) elementColors.get(atomNumber)); color.set( (Color3f) currentColorScheme.get(atomNumber)); } catch (IndexOutOfBoundsException e) { // } return color; } public static Material createMaterial(Color3f color) { if (color == null) { return null; } Material material = new Material(); // --- Diffuse Color material.setDiffuseColor(color); // --- Ambient Color material.setAmbientColor(0.2f * color.x, 0.2f * color.y, 0.2f * color.z); // --- Emissive Color material.setEmissiveColor(0.0f, 0.0f, 0.0f); // --- Specular Color material.setSpecularColor(1.0f, 1.0f, 1.0f); // --- Set Shininess material.setShininess(15.0f); material.setCapability(Material.ALLOW_COMPONENT_READ); material.setCapability(Material.ALLOW_COMPONENT_WRITE); return material; } public static Material getElementMaterial(int atomNumber) { Material material = new Material(); if (atomNumber < 0 || atomNumber >= elementColors.size()) { return material; } Color3f color = new Color3f(); try { color.set( (Color3f) currentColorScheme.get(atomNumber)); //color.set( (Color3f) elementColors.get(atomNumber)); } catch (IndexOutOfBoundsException e) { // } return createMaterial(color); } /** * Returns number of available atom color schemes * @return int */ public static int getAtomColorSchemeNumber() { return colorScheme.size(); } public static String getCurrentAtomColorScheme() { return currentColorSchemeName; } public static void setCurrentAtomColorScheme(String scheme) { if (colorScheme.containsKey(scheme)) { currentColorSchemeName = scheme; currentColorScheme = (List) colorScheme.get(scheme); return; } System.err.println("No such atom color scheme: " + scheme + " Ignored..."); } public static String[] getAtomColorSchemeNames() { if (colorScheme == null || colorScheme.size() < 1) { return null; } String[] schemes = new String[colorScheme.size()]; Set set = colorScheme.entrySet(); Iterator iter = set.iterator(); int count = 0; while (iter.hasNext()) { Map.Entry me = (Map.Entry) iter.next(); schemes[count] = me.getKey().toString(); ++count; } return schemes; } public static void retrieveAtomColorSchemePrefs(Class c) { try { prefs = Preferences.userNodeForPackage(c); } catch (Exception ex) { System.err.println("Error retrieving Atom Color Scheme Preferences: " + ex.getMessage() + " Ignored..."); return; } String scheme = prefs.get(atomColorSchemeKey, currentColorSchemeName); if (colorScheme.containsKey(scheme)) { currentColorSchemeName = scheme; currentColorScheme = (List) colorScheme.get(scheme); return; } else { logger.info("Retrieving Preferences: There is no such Atom Color Scheme: " + scheme); currentColorSchemeName = ATOM_COLOR_SCHEME_2; currentColorScheme = (List) colorScheme.get(currentColorSchemeName); } } public static void saveAtomColorSchemePrefs(Class c) { try { prefs = Preferences.userNodeForPackage(c); } catch (Exception ex) { System.err.println("Error saving Atom Color Scheme Preferences: " + ex.getMessage()); return; } try { prefs.put(atomColorSchemeKey, currentColorSchemeName); } catch (Exception ex) { System.err.println("Cannot save Atom Color Scheme Preferences: " + ex.getMessage()); } } public static Material getHighlightMaterial() { return highlightMaterial; } public static Color3f getHighlightColor3f() { return highlightColor; } }
/* * Copyright 2000-2014 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.codeInsight.completion; import com.intellij.codeInsight.ExpectedTypeInfo; import com.intellij.codeInsight.ExpectedTypesProvider; import com.intellij.codeInsight.TailType; import com.intellij.codeInsight.completion.scope.JavaCompletionProcessor; import com.intellij.codeInsight.daemon.impl.quickfix.ImportClassFix; import com.intellij.codeInsight.lookup.*; import com.intellij.featureStatistics.FeatureUsageTracker; import com.intellij.lang.LangBundle; import com.intellij.lang.java.JavaLanguage; import com.intellij.openapi.actionSystem.IdeActions; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.editor.Document; import com.intellij.openapi.editor.Editor; import com.intellij.openapi.editor.ex.EditorEx; import com.intellij.openapi.editor.highlighter.HighlighterIterator; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.Comparing; import com.intellij.openapi.util.Condition; import com.intellij.openapi.util.text.StringUtil; import com.intellij.patterns.ElementPattern; import com.intellij.patterns.PatternCondition; import com.intellij.patterns.PsiJavaElementPattern; import com.intellij.patterns.PsiNameValuePairPattern; import com.intellij.psi.*; import com.intellij.psi.codeStyle.CodeStyleManager; import com.intellij.psi.filters.*; import com.intellij.psi.filters.classes.AnnotationTypeFilter; import com.intellij.psi.filters.classes.AssignableFromContextFilter; import com.intellij.psi.filters.element.ExcludeDeclaredFilter; import com.intellij.psi.filters.element.ModifierFilter; import com.intellij.psi.filters.getters.ExpectedTypesGetter; import com.intellij.psi.impl.source.PsiJavaCodeReferenceElementImpl; import com.intellij.psi.impl.source.PsiLabelReference; import com.intellij.psi.impl.source.tree.ElementType; import com.intellij.psi.scope.ElementClassFilter; import com.intellij.psi.util.PsiTreeUtil; import com.intellij.psi.util.PsiUtil; import com.intellij.psi.util.PsiUtilCore; import com.intellij.psi.util.TypeConversionUtil; import com.intellij.util.Consumer; import com.intellij.util.DocumentUtil; import com.intellij.util.PairConsumer; import com.intellij.util.ProcessingContext; import com.intellij.util.containers.ContainerUtil; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.*; import static com.intellij.patterns.PsiJavaPatterns.*; import static com.intellij.util.ObjectUtils.assertNotNull; /** * @author peter */ public class JavaCompletionContributor extends CompletionContributor { private static final Logger LOG = Logger.getInstance("#com.intellij.codeInsight.completion.JavaCompletionContributor"); public static final ElementPattern<PsiElement> ANNOTATION_NAME = psiElement(). withParents(PsiJavaCodeReferenceElement.class, PsiAnnotation.class).afterLeaf("@"); private static final PsiJavaElementPattern.Capture<PsiElement> UNEXPECTED_REFERENCE_AFTER_DOT = psiElement().afterLeaf(".").insideStarting(psiExpressionStatement()); private static final PsiNameValuePairPattern NAME_VALUE_PAIR = psiNameValuePair().withSuperParent(2, psiElement(PsiAnnotation.class)); private static final ElementPattern<PsiElement> ANNOTATION_ATTRIBUTE_NAME = or(psiElement(PsiIdentifier.class).withParent(NAME_VALUE_PAIR), psiElement().afterLeaf("(").withParent(psiReferenceExpression().withParent(NAME_VALUE_PAIR))); private static final ElementPattern SWITCH_LABEL = psiElement().withSuperParent(2, psiElement(PsiSwitchLabelStatement.class).withSuperParent(2, psiElement(PsiSwitchStatement.class).with(new PatternCondition<PsiSwitchStatement>("enumExpressionType") { @Override public boolean accepts(@NotNull PsiSwitchStatement psiSwitchStatement, ProcessingContext context) { final PsiExpression expression = psiSwitchStatement.getExpression(); if(expression == null) return false; PsiClass aClass = PsiUtil.resolveClassInClassTypeOnly(expression.getType()); return aClass != null && aClass.isEnum(); } }))); private static final ElementPattern<PsiElement> AFTER_NUMBER_LITERAL = psiElement().afterLeaf(psiElement().withElementType(elementType().oneOf(JavaTokenType.DOUBLE_LITERAL, JavaTokenType.LONG_LITERAL, JavaTokenType.FLOAT_LITERAL, JavaTokenType.INTEGER_LITERAL))); private static final ElementPattern<PsiElement> IMPORT_REFERENCE = psiElement().withParent(psiElement(PsiJavaCodeReferenceElement.class).withParent(PsiImportStatementBase.class)); @Nullable public static ElementFilter getReferenceFilter(PsiElement position) { // Completion after extends in interface, type parameter and implements in class final PsiClass containingClass = PsiTreeUtil.getParentOfType(position, PsiClass.class, false, PsiCodeBlock.class, PsiMethod.class, PsiExpressionList.class, PsiVariable.class, PsiAnnotation.class); if (containingClass != null && psiElement().afterLeaf(PsiKeyword.EXTENDS, PsiKeyword.IMPLEMENTS, ",", "&").accepts(position)) { return new AndFilter(ElementClassFilter.CLASS, new NotFilter(new AssignableFromContextFilter())); } if (ANNOTATION_NAME.accepts(position)) { return new AnnotationTypeFilter(); } if (JavaKeywordCompletion.DECLARATION_START.getValue().accepts(position) || JavaKeywordCompletion.isInsideParameterList(position) || isInsideAnnotationName(position)) { return new OrFilter(ElementClassFilter.CLASS, ElementClassFilter.PACKAGE_FILTER); } if (psiElement().afterLeaf(PsiKeyword.INSTANCEOF).accepts(position)) { return new ElementExtractorFilter(ElementClassFilter.CLASS); } if (JavaKeywordCompletion.VARIABLE_AFTER_FINAL.accepts(position)) { return ElementClassFilter.CLASS; } if (isCatchFinallyPosition(position) || JavaKeywordCompletion.START_SWITCH.accepts(position) || JavaKeywordCompletion.isInstanceofPlace(position) || JavaKeywordCompletion.isAfterPrimitiveOrArrayType(position)) { return null; } if (JavaKeywordCompletion.START_FOR.accepts(position)) { return new OrFilter(ElementClassFilter.CLASS, ElementClassFilter.VARIABLE); } if (JavaSmartCompletionContributor.AFTER_NEW.accepts(position)) { return ElementClassFilter.CLASS; } if (psiElement().inside(PsiReferenceParameterList.class).accepts(position)) { return ElementClassFilter.CLASS; } if (psiElement().inside(PsiAnnotationParameterList.class).accepts(position)) { return createAnnotationFilter(position); } PsiVariable var = PsiTreeUtil.getParentOfType(position, PsiVariable.class, false, PsiClass.class); if (var != null && PsiTreeUtil.isAncestor(var.getInitializer(), position, false)) { return new ExcludeDeclaredFilter(new ClassFilter(PsiVariable.class)); } if (SWITCH_LABEL.accepts(position)) { return new ClassFilter(PsiField.class) { @Override public boolean isAcceptable(Object element, PsiElement context) { return element instanceof PsiEnumConstant; } }; } return TrueFilter.INSTANCE; } private static boolean isCatchFinallyPosition(PsiElement position) { PsiElement leaf = PsiTreeUtil.prevVisibleLeaf(position); return leaf != null && leaf.textMatches("}") && leaf.getParent() instanceof PsiCodeBlock && leaf.getParent().getParent() instanceof PsiTryStatement && ((PsiTryStatement)leaf.getParent().getParent()).getResourceList() == null; } private static boolean isInsideAnnotationName(PsiElement position) { PsiAnnotation anno = PsiTreeUtil.getParentOfType(position, PsiAnnotation.class, true, PsiMember.class); return anno != null && PsiTreeUtil.isAncestor(anno.getNameReferenceElement(), position, true); } private static ElementFilter createAnnotationFilter(PsiElement position) { OrFilter orFilter = new OrFilter(ElementClassFilter.CLASS, ElementClassFilter.PACKAGE_FILTER, new AndFilter(new ClassFilter(PsiField.class), new ModifierFilter(PsiModifier.STATIC, PsiModifier.FINAL))); if (psiElement().insideStarting(psiNameValuePair()).accepts(position)) { orFilter.addFilter(new ClassFilter(PsiAnnotationMethod.class) { @Override public boolean isAcceptable(Object element, PsiElement context) { return element instanceof PsiAnnotationMethod && PsiUtil.isAnnotationMethod((PsiElement)element); } }); } return orFilter; } @Override public void fillCompletionVariants(@NotNull final CompletionParameters parameters, @NotNull final CompletionResultSet _result) { if (parameters.getCompletionType() != CompletionType.BASIC) { return; } final PsiElement position = parameters.getPosition(); if (!isInJavaContext(position)) { return; } if (AFTER_NUMBER_LITERAL.accepts(position) || UNEXPECTED_REFERENCE_AFTER_DOT.accepts(position)) { _result.stopHere(); return; } final CompletionResultSet result = JavaCompletionSorting.addJavaSorting(parameters, _result); if (ANNOTATION_ATTRIBUTE_NAME.accepts(position) && !JavaKeywordCompletion.isAfterPrimitiveOrArrayType(position)) { JavaKeywordCompletion.addExpectedTypeMembers(parameters, result); completeAnnotationAttributeName(result, position, parameters); result.stopHere(); return; } final InheritorsHolder inheritors = new InheritorsHolder(result); if (TypeArgumentCompletionProvider.IN_TYPE_ARGS.accepts(position)) { new TypeArgumentCompletionProvider(false, inheritors).addCompletions(parameters, new ProcessingContext(), result); } result.addAllElements(FunctionalExpressionCompletionProvider.getLambdaVariants(parameters, true)); PrefixMatcher matcher = result.getPrefixMatcher(); if (JavaSmartCompletionContributor.AFTER_NEW.accepts(position)) { new JavaInheritorsGetter(ConstructorInsertHandler.BASIC_INSTANCE).generateVariants(parameters, matcher, inheritors); } if (MethodReturnTypeProvider.IN_METHOD_RETURN_TYPE.accepts(position)) { MethodReturnTypeProvider.addProbableReturnTypes(parameters, new Consumer<LookupElement>() { @Override public void consume(LookupElement element) { registerClassFromTypeElement(element, inheritors); result.addElement(element); } }); } if (SmartCastProvider.shouldSuggestCast(parameters)) { SmartCastProvider.addCastVariants(parameters, new Consumer<LookupElement>() { @Override public void consume(LookupElement element) { registerClassFromTypeElement(element, inheritors); result.addElement(PrioritizedLookupElement.withPriority(element, 1)); } }); } PsiElement parent = position.getParent(); if (parent instanceof PsiReferenceExpression) { final List<ExpectedTypeInfo> expected = Arrays.asList(ExpectedTypesProvider.getExpectedTypes((PsiExpression)parent, true)); CollectConversion.addCollectConversion((PsiReferenceExpression)parent, expected, JavaSmartCompletionContributor.decorateWithoutTypeCheck(result, expected)); } if (IMPORT_REFERENCE.accepts(position)) { result.addElement(LookupElementBuilder.create("*")); } addKeywords(parameters, result); addExpressionVariants(parameters, position, result); Set<String> usedWords = addReferenceVariants(parameters, result, inheritors); if (psiElement().inside(PsiLiteralExpression.class).accepts(position)) { PsiReference reference = position.getContainingFile().findReferenceAt(parameters.getOffset()); if (reference == null || reference.isSoft()) { WordCompletionContributor.addWordCompletionVariants(result, parameters, usedWords); } } JavaGenerateMemberCompletionContributor.fillCompletionVariants(parameters, result); addAllClasses(parameters, result, inheritors); if (parent instanceof PsiReferenceExpression && !((PsiReferenceExpression)parent).isQualified() && parameters.isExtendedCompletion() && StringUtil.isNotEmpty(matcher.getPrefix())) { new JavaStaticMemberProcessor(parameters).processStaticMethodsGlobally(matcher, result); } result.stopHere(); } private static void registerClassFromTypeElement(LookupElement element, InheritorsHolder inheritors) { PsiType type = assertNotNull(element.as(PsiTypeLookupItem.CLASS_CONDITION_KEY)).getType(); PsiClass aClass = type instanceof PsiClassType && ((PsiClassType)type).getParameterCount() == 0 ? ((PsiClassType)type).resolve() : null; if (aClass != null) { inheritors.registerClass(aClass); } } private static void addExpressionVariants(@NotNull CompletionParameters parameters, PsiElement position, CompletionResultSet result) { if (JavaSmartCompletionContributor.INSIDE_EXPRESSION.accepts(position) && !JavaKeywordCompletion.AFTER_DOT.accepts(position) && !SmartCastProvider.shouldSuggestCast(parameters)) { JavaKeywordCompletion.addExpectedTypeMembers(parameters, result); if (SameSignatureCallParametersProvider.IN_CALL_ARGUMENT.accepts(position)) { new SameSignatureCallParametersProvider().addCompletions(parameters, new ProcessingContext(), result); } } } public static boolean isInJavaContext(PsiElement position) { return PsiUtilCore.findLanguageFromElement(position).isKindOf(JavaLanguage.INSTANCE); } public static void addAllClasses(final CompletionParameters parameters, final CompletionResultSet result, final InheritorsHolder inheritors) { if (!isClassNamePossible(parameters) || !mayStartClassName(result)) { return; } if (parameters.getInvocationCount() >= 2) { JavaClassNameCompletionContributor.addAllClasses(parameters, parameters.getInvocationCount() <= 2, result.getPrefixMatcher(), new Consumer<LookupElement>() { @Override public void consume(LookupElement element) { if (!inheritors.alreadyProcessed(element)) { result.addElement(JavaClassNameCompletionContributor.highlightIfNeeded((JavaPsiClassReferenceElement)element, parameters)); } } }); } else { advertiseSecondCompletion(parameters.getPosition().getProject(), result); } } public static void advertiseSecondCompletion(Project project, CompletionResultSet result) { if (FeatureUsageTracker.getInstance().isToBeAdvertisedInLookup(CodeCompletionFeatures.SECOND_BASIC_COMPLETION, project)) { result.addLookupAdvertisement("Press " + getActionShortcut(IdeActions.ACTION_CODE_COMPLETION) + " to see non-imported classes"); } } private static Set<String> addReferenceVariants(final CompletionParameters parameters, CompletionResultSet result, final InheritorsHolder inheritors) { final Set<String> usedWords = new HashSet<String>(); final PsiElement position = parameters.getPosition(); final boolean first = parameters.getInvocationCount() <= 1; final boolean isSwitchLabel = SWITCH_LABEL.accepts(position); final boolean isAfterNew = JavaClassNameCompletionContributor.AFTER_NEW.accepts(position); final boolean pkgContext = JavaCompletionUtil.inSomePackage(position); final PsiType[] expectedTypes = ExpectedTypesGetter.getExpectedTypes(parameters.getPosition(), true); LegacyCompletionContributor.processReferences(parameters, result, new PairConsumer<PsiReference, CompletionResultSet>() { @Override public void consume(final PsiReference reference, final CompletionResultSet result) { if (reference instanceof PsiJavaReference) { final ElementFilter filter = getReferenceFilter(position); if (filter != null) { final PsiFile originalFile = parameters.getOriginalFile(); JavaCompletionProcessor.Options options = JavaCompletionProcessor.Options.DEFAULT_OPTIONS .withCheckAccess(first) .withFilterStaticAfterInstance(first) .withShowInstanceInStaticContext(!first); for (LookupElement element : JavaCompletionUtil.processJavaReference(position, (PsiJavaReference)reference, new ElementExtractorFilter(filter), options, result.getPrefixMatcher(), parameters)) { if (inheritors.alreadyProcessed(element)) { continue; } if (isSwitchLabel) { result.addElement(new IndentingDecorator(TailTypeDecorator.withTail(element, TailType.createSimpleTailType(':')))); } else { final LookupItem item = element.as(LookupItem.CLASS_CONDITION_KEY); if (originalFile instanceof PsiJavaCodeReferenceCodeFragment && !((PsiJavaCodeReferenceCodeFragment)originalFile).isClassesAccepted() && item != null) { item.setTailType(TailType.NONE); } if (item instanceof JavaMethodCallElement) { JavaMethodCallElement call = (JavaMethodCallElement)item; final PsiMethod method = call.getObject(); if (method.getTypeParameters().length > 0) { final PsiType returned = TypeConversionUtil.erasure(method.getReturnType()); PsiType matchingExpectation = returned == null ? null : ContainerUtil.find(expectedTypes, new Condition<PsiType>() { @Override public boolean value(PsiType type) { return type.isAssignableFrom(returned); } }); if (matchingExpectation != null && SmartCompletionDecorator.hasUnboundTypeParams(method, matchingExpectation)) { call.setInferenceSubstitutor(SmartCompletionDecorator.calculateMethodReturnTypeSubstitutor(method, matchingExpectation), position); } } } result.addElement(element); } } } return; } if (reference instanceof PsiLabelReference) { LabelReferenceCompletion.processLabelReference(result, (PsiLabelReference)reference); return; } final Object[] variants = reference.getVariants(); //noinspection ConstantConditions if (variants == null) { LOG.error("Reference=" + reference); } for (Object completion : variants) { if (completion == null) { LOG.error("Position=" + position + "\n;Reference=" + reference + "\n;variants=" + Arrays.toString(variants)); } if (completion instanceof LookupElement && !inheritors.alreadyProcessed((LookupElement)completion)) { usedWords.add(((LookupElement)completion).getLookupString()); result.addElement((LookupElement)completion); } else if (completion instanceof PsiClass) { for (JavaPsiClassReferenceElement item : JavaClassNameCompletionContributor.createClassLookupItems((PsiClass)completion, isAfterNew, JavaClassNameInsertHandler.JAVA_CLASS_INSERT_HANDLER, new Condition<PsiClass>() { @Override public boolean value(PsiClass psiClass) { return !inheritors.alreadyProcessed(psiClass) && JavaCompletionUtil.isSourceLevelAccessible(position, psiClass, pkgContext); } })) { usedWords.add(item.getLookupString()); result.addElement(item); } } else { //noinspection deprecation LookupElement element = LookupItemUtil.objectToLookupItem(completion); usedWords.add(element.getLookupString()); result.addElement(element); } } } }); return usedWords; } private static void addKeywords(CompletionParameters parameters, final CompletionResultSet result) { Consumer<LookupElement> noMiddleMatches = new Consumer<LookupElement>() { @Override public void consume(LookupElement element) { if (element.getLookupString().startsWith(result.getPrefixMatcher().getPrefix())) { result.addElement(element); } } }; JavaKeywordCompletion.addKeywords(parameters, noMiddleMatches); } static boolean isClassNamePossible(CompletionParameters parameters) { boolean isSecondCompletion = parameters.getInvocationCount() >= 2; PsiElement position = parameters.getPosition(); if (JavaKeywordCompletion.isInstanceofPlace(position)) return false; final PsiElement parent = position.getParent(); if (!(parent instanceof PsiJavaCodeReferenceElement)) return isSecondCompletion; if (((PsiJavaCodeReferenceElement)parent).getQualifier() != null) return isSecondCompletion; if (parent instanceof PsiJavaCodeReferenceElementImpl && ((PsiJavaCodeReferenceElementImpl)parent).getKind(parent.getContainingFile()) == PsiJavaCodeReferenceElementImpl.PACKAGE_NAME_KIND) { return false; } PsiElement grand = parent.getParent(); if (grand instanceof PsiSwitchLabelStatement) { return false; } if (psiElement().inside(PsiImportStatement.class).accepts(parent)) { return isSecondCompletion; } if (grand instanceof PsiAnonymousClass) { grand = grand.getParent(); } if (grand instanceof PsiNewExpression && ((PsiNewExpression)grand).getQualifier() != null) { return false; } if (JavaKeywordCompletion.isAfterPrimitiveOrArrayType(position)) { return false; } return true; } public static boolean mayStartClassName(CompletionResultSet result) { return StringUtil.isNotEmpty(result.getPrefixMatcher().getPrefix()); } private static void completeAnnotationAttributeName(CompletionResultSet result, PsiElement insertedElement, CompletionParameters parameters) { PsiNameValuePair pair = PsiTreeUtil.getParentOfType(insertedElement, PsiNameValuePair.class); PsiAnnotationParameterList parameterList = (PsiAnnotationParameterList)assertNotNull(pair).getParent(); PsiAnnotation anno = (PsiAnnotation)parameterList.getParent(); boolean showClasses = psiElement().afterLeaf("(").accepts(insertedElement); PsiClass annoClass = null; final PsiJavaCodeReferenceElement referenceElement = anno.getNameReferenceElement(); if (referenceElement != null) { final PsiElement element = referenceElement.resolve(); if (element instanceof PsiClass) { annoClass = (PsiClass)element; if (annoClass.findMethodsByName("value", false).length == 0) { showClasses = false; } } } if (showClasses && insertedElement.getParent() instanceof PsiReferenceExpression) { final Set<LookupElement> set = JavaCompletionUtil.processJavaReference( insertedElement, (PsiJavaReference)insertedElement.getParent(), new ElementExtractorFilter(createAnnotationFilter(insertedElement)), JavaCompletionProcessor.Options.DEFAULT_OPTIONS, result.getPrefixMatcher(), parameters); for (final LookupElement element : set) { result.addElement(element); } addAllClasses(parameters, result, new InheritorsHolder(result)); } if (annoClass != null) { final PsiNameValuePair[] existingPairs = parameterList.getAttributes(); methods: for (PsiMethod method : annoClass.getMethods()) { if (!(method instanceof PsiAnnotationMethod)) continue; final String attrName = method.getName(); for (PsiNameValuePair existingAttr : existingPairs) { if (PsiTreeUtil.isAncestor(existingAttr, insertedElement, false)) break; if (Comparing.equal(existingAttr.getName(), attrName) || PsiAnnotation.DEFAULT_REFERENCED_METHOD_NAME.equals(attrName) && existingAttr.getName() == null) continue methods; } LookupElementBuilder element = LookupElementBuilder.createWithIcon(method).withInsertHandler(new InsertHandler<LookupElement>() { @Override public void handleInsert(InsertionContext context, LookupElement item) { final Editor editor = context.getEditor(); TailType.EQ.processTail(editor, editor.getCaretModel().getOffset()); context.setAddCompletionChar(false); context.commitDocument(); PsiAnnotationParameterList paramList = PsiTreeUtil.findElementOfClassAtOffset(context.getFile(), context.getStartOffset(), PsiAnnotationParameterList.class, false); if (paramList != null && paramList.getAttributes().length > 0 && paramList.getAttributes()[0].getName() == null) { int valueOffset = paramList.getAttributes()[0].getTextRange().getStartOffset(); context.getDocument().insertString(valueOffset, PsiAnnotation.DEFAULT_REFERENCED_METHOD_NAME); TailType.EQ.processTail(editor, valueOffset + PsiAnnotation.DEFAULT_REFERENCED_METHOD_NAME.length()); } } }); PsiAnnotationMemberValue defaultValue = ((PsiAnnotationMethod)method).getDefaultValue(); if (defaultValue != null) { element = element.withTailText(" default " + defaultValue.getText(), true); } result.addElement(element); } } } @Override public String advertise(@NotNull final CompletionParameters parameters) { if (!(parameters.getOriginalFile() instanceof PsiJavaFile)) return null; if (parameters.getCompletionType() == CompletionType.BASIC && parameters.getInvocationCount() > 0) { PsiElement position = parameters.getPosition(); if (psiElement().withParent(psiReferenceExpression().withFirstChild(psiReferenceExpression().referencing(psiClass()))).accepts(position)) { if (CompletionUtil.shouldShowFeature(parameters, JavaCompletionFeatures.GLOBAL_MEMBER_NAME)) { final String shortcut = getActionShortcut(IdeActions.ACTION_CODE_COMPLETION); if (StringUtil.isNotEmpty(shortcut)) { return "Pressing " + shortcut + " twice without a class qualifier would show all accessible static methods"; } } } } if (parameters.getCompletionType() != CompletionType.SMART && shouldSuggestSmartCompletion(parameters.getPosition())) { if (CompletionUtil.shouldShowFeature(parameters, CodeCompletionFeatures.EDITING_COMPLETION_SMARTTYPE_GENERAL)) { final String shortcut = getActionShortcut(IdeActions.ACTION_SMART_TYPE_COMPLETION); if (StringUtil.isNotEmpty(shortcut)) { return CompletionBundle.message("completion.smart.hint", shortcut); } } } if (parameters.getCompletionType() == CompletionType.SMART && parameters.getInvocationCount() == 1) { final PsiType[] psiTypes = ExpectedTypesGetter.getExpectedTypes(parameters.getPosition(), true); if (psiTypes.length > 0) { if (CompletionUtil.shouldShowFeature(parameters, JavaCompletionFeatures.SECOND_SMART_COMPLETION_TOAR)) { final String shortcut = getActionShortcut(IdeActions.ACTION_SMART_TYPE_COMPLETION); if (StringUtil.isNotEmpty(shortcut)) { for (final PsiType psiType : psiTypes) { final PsiType type = PsiUtil.extractIterableTypeParameter(psiType, false); if (type != null) { return CompletionBundle.message("completion.smart.aslist.hint", shortcut, type.getPresentableText()); } } } } if (CompletionUtil.shouldShowFeature(parameters, JavaCompletionFeatures.SECOND_SMART_COMPLETION_ASLIST)) { final String shortcut = getActionShortcut(IdeActions.ACTION_SMART_TYPE_COMPLETION); if (StringUtil.isNotEmpty(shortcut)) { for (final PsiType psiType : psiTypes) { if (psiType instanceof PsiArrayType) { final PsiType componentType = ((PsiArrayType)psiType).getComponentType(); if (!(componentType instanceof PsiPrimitiveType)) { return CompletionBundle.message("completion.smart.toar.hint", shortcut, componentType.getPresentableText()); } } } } } if (CompletionUtil.shouldShowFeature(parameters, JavaCompletionFeatures.SECOND_SMART_COMPLETION_CHAIN)) { final String shortcut = getActionShortcut(IdeActions.ACTION_SMART_TYPE_COMPLETION); if (StringUtil.isNotEmpty(shortcut)) { return CompletionBundle.message("completion.smart.chain.hint", shortcut); } } } } return null; } @Override public String handleEmptyLookup(@NotNull final CompletionParameters parameters, final Editor editor) { if (!(parameters.getOriginalFile() instanceof PsiJavaFile)) return null; final String ad = advertise(parameters); final String suffix = ad == null ? "" : "; " + StringUtil.decapitalize(ad); if (parameters.getCompletionType() == CompletionType.SMART) { PsiExpression expression = PsiTreeUtil.getContextOfType(parameters.getPosition(), PsiExpression.class, true); if (expression instanceof PsiLiteralExpression) { return LangBundle.message("completion.no.suggestions") + suffix; } if (expression instanceof PsiInstanceOfExpression) { final PsiInstanceOfExpression instanceOfExpression = (PsiInstanceOfExpression)expression; if (PsiTreeUtil.isAncestor(instanceOfExpression.getCheckType(), parameters.getPosition(), false)) { return LangBundle.message("completion.no.suggestions") + suffix; } } final Set<PsiType> expectedTypes = JavaCompletionUtil.getExpectedTypes(parameters); if (expectedTypes != null) { PsiType type = expectedTypes.size() == 1 ? expectedTypes.iterator().next() : null; if (type != null) { final PsiType deepComponentType = type.getDeepComponentType(); String expectedType = type.getPresentableText(); if (expectedType.contains(CompletionUtil.DUMMY_IDENTIFIER_TRIMMED)) { return null; } if (deepComponentType instanceof PsiClassType) { if (((PsiClassType)deepComponentType).resolve() != null) { return CompletionBundle.message("completion.no.suggestions.of.type", expectedType) + suffix; } return CompletionBundle.message("completion.unknown.type", expectedType) + suffix; } if (!PsiType.NULL.equals(type)) { return CompletionBundle.message("completion.no.suggestions.of.type", expectedType) + suffix; } } } } return LangBundle.message("completion.no.suggestions") + suffix; } @Override public boolean invokeAutoPopup(@NotNull PsiElement position, char typeChar) { return typeChar == ':' && JavaTokenType.COLON == position.getNode().getElementType(); } private static boolean shouldSuggestSmartCompletion(final PsiElement element) { if (shouldSuggestClassNameCompletion(element)) return false; final PsiElement parent = element.getParent(); if (parent instanceof PsiReferenceExpression && ((PsiReferenceExpression)parent).getQualifier() != null) return false; if (parent instanceof PsiReferenceExpression && parent.getParent() instanceof PsiReferenceExpression) return true; return ExpectedTypesGetter.getExpectedTypes(element, false).length > 0; } private static boolean shouldSuggestClassNameCompletion(final PsiElement element) { if (element == null) return false; final PsiElement parent = element.getParent(); if (parent == null) return false; return parent.getParent() instanceof PsiTypeElement || parent.getParent() instanceof PsiExpressionStatement || parent.getParent() instanceof PsiReferenceList; } @Override public void beforeCompletion(@NotNull final CompletionInitializationContext context) { final PsiFile file = context.getFile(); if (file instanceof PsiJavaFile) { if (context.getInvocationCount() > 0) { autoImport(file, context.getStartOffset() - 1, context.getEditor()); PsiElement leaf = file.findElementAt(context.getStartOffset() - 1); if (leaf != null) leaf = PsiTreeUtil.prevVisibleLeaf(leaf); PsiVariable variable = PsiTreeUtil.getParentOfType(leaf, PsiVariable.class); if (variable != null) { PsiTypeElement typeElement = variable.getTypeElement(); if (typeElement != null) { PsiType type = typeElement.getType(); if (type instanceof PsiClassType && ((PsiClassType)type).resolve() == null) { autoImportReference(file, context.getEditor(), typeElement.getInnermostComponentReferenceElement()); } } } } if (context.getCompletionType() == CompletionType.BASIC) { if (semicolonNeeded(context.getEditor(), file, context.getStartOffset())) { context.setDummyIdentifier(CompletionInitializationContext.DUMMY_IDENTIFIER.trim() + ";"); return; } final PsiJavaCodeReferenceElement ref = PsiTreeUtil.findElementOfClassAtOffset(file, context.getStartOffset(), PsiJavaCodeReferenceElement.class, false); if (ref != null && !(ref instanceof PsiReferenceExpression)) { if (JavaSmartCompletionContributor.AFTER_NEW.accepts(ref)) { final PsiReferenceParameterList paramList = ref.getParameterList(); if (paramList != null && paramList.getTextLength() > 0) { context.getOffsetMap().addOffset(ConstructorInsertHandler.PARAM_LIST_START, paramList.getTextRange().getStartOffset()); context.getOffsetMap().addOffset(ConstructorInsertHandler.PARAM_LIST_END, paramList.getTextRange().getEndOffset()); } } return; } final PsiElement element = file.findElementAt(context.getStartOffset()); if (psiElement().inside(PsiAnnotation.class).accepts(element)) { return; } context.setDummyIdentifier(CompletionInitializationContext.DUMMY_IDENTIFIER_TRIMMED); } } } public static boolean semicolonNeeded(final Editor editor, PsiFile file, final int startOffset) { final PsiJavaCodeReferenceElement ref = PsiTreeUtil.findElementOfClassAtOffset(file, startOffset, PsiJavaCodeReferenceElement.class, false); if (ref != null && !(ref instanceof PsiReferenceExpression)) { if (ref.getParent() instanceof PsiTypeElement) { return true; } } HighlighterIterator iterator = ((EditorEx)editor).getHighlighter().createIterator(startOffset); if (iterator.atEnd()) return false; if (iterator.getTokenType() == JavaTokenType.IDENTIFIER) { iterator.advance(); } while (!iterator.atEnd() && ElementType.JAVA_COMMENT_OR_WHITESPACE_BIT_SET.contains(iterator.getTokenType())) { iterator.advance(); } if (!iterator.atEnd() && (iterator.getTokenType() == JavaTokenType.LPARENTH)) { return true; } if (!iterator.atEnd() && (iterator.getTokenType() == JavaTokenType.COLON) && null == PsiTreeUtil.findElementOfClassAtOffset(file, startOffset, PsiConditionalExpression.class, false)) { return true; } while (!iterator.atEnd() && ElementType.JAVA_COMMENT_OR_WHITESPACE_BIT_SET.contains(iterator.getTokenType())) { iterator.advance(); } if (iterator.atEnd() || iterator.getTokenType() != JavaTokenType.IDENTIFIER) return false; iterator.advance(); while (!iterator.atEnd() && ElementType.JAVA_COMMENT_OR_WHITESPACE_BIT_SET.contains(iterator.getTokenType())) { iterator.advance(); } if (iterator.atEnd()) return false; return iterator.getTokenType() == JavaTokenType.EQ; // <caret> foo = something, we don't want the reference to be treated as a type } private static void autoImport(@NotNull final PsiFile file, int offset, @NotNull final Editor editor) { final CharSequence text = editor.getDocument().getCharsSequence(); while (offset > 0 && Character.isJavaIdentifierPart(text.charAt(offset))) offset--; if (offset <= 0) return; while (offset > 0 && Character.isWhitespace(text.charAt(offset))) offset--; if (offset <= 0 || text.charAt(offset) != '.') return; offset--; while (offset > 0 && Character.isWhitespace(text.charAt(offset))) offset--; if (offset <= 0) return; autoImportReference(file, editor, extractReference(PsiTreeUtil.findElementOfClassAtOffset(file, offset, PsiExpression.class, false))); } private static void autoImportReference(@NotNull PsiFile file, @NotNull Editor editor, @Nullable PsiJavaCodeReferenceElement element) { if (element == null) return; while (true) { final PsiJavaCodeReferenceElement qualifier = extractReference(element.getQualifier()); if (qualifier == null) break; element = qualifier; } if (!(element.getParent() instanceof PsiMethodCallExpression) && element.multiResolve(true).length == 0) { new ImportClassFix(element).doFix(editor, false, false); PsiDocumentManager.getInstance(file.getProject()).commitDocument(editor.getDocument()); } } @Nullable private static PsiJavaCodeReferenceElement extractReference(@Nullable PsiElement expression) { if (expression instanceof PsiJavaCodeReferenceElement) { return (PsiJavaCodeReferenceElement)expression; } if (expression instanceof PsiMethodCallExpression) { return ((PsiMethodCallExpression)expression).getMethodExpression(); } return null; } private static class IndentingDecorator extends LookupElementDecorator<LookupElement> { public IndentingDecorator(LookupElement delegate) { super(delegate); } @Override public void handleInsert(InsertionContext context) { super.handleInsert(context); Project project = context.getProject(); Document document = context.getDocument(); int lineStartOffset = DocumentUtil.getLineStartOffset(context.getStartOffset(), document); PsiDocumentManager.getInstance(project).commitDocument(document); CodeStyleManager.getInstance(project).adjustLineIndent(context.getFile(), lineStartOffset); } } }
package de.mycrobase.ssim.ed.app; import org.apache.log4j.Logger; import com.jme3.app.Application; import com.jme3.app.state.AppState; import com.jme3.app.state.AppStateManager; import com.jme3.material.Material; import com.jme3.material.RenderState.BlendMode; import com.jme3.math.ColorRGBA; import com.jme3.math.FastMath; import com.jme3.math.Vector2f; import com.jme3.math.Vector3f; import com.jme3.renderer.queue.RenderQueue.Bucket; import com.jme3.scene.Geometry; import de.mycrobase.ssim.ed.mesh.CloudPlane; import de.mycrobase.ssim.ed.sky.CPUCloudProcessor; import de.mycrobase.ssim.ed.sky.CloudProcessor; import de.mycrobase.ssim.ed.sky.GPUCloudProcessor; import de.mycrobase.ssim.ed.util.TempVars; import de.mycrobase.ssim.ed.weather.Weather; /** * <b>Higher layer</b> {@link AppState} responsible for cloud rendering. * * @author cn */ public class CloudAppState extends BasicAppState { private static final Logger logger = Logger.getLogger(CloudAppState.class); private static final float UpdateInterval = 10f; // in seconds private static final boolean UseGPU = true; private static final Vector3f CloudPlaneTranslation = new Vector3f(0, 7000, 0); private static final float CloudPlaneSize = 20000f; // in m private static final float CloudPlaneHeightScale = 0f; // in m /** * x: scale the wind/displacement component of cloudShift * y: scale the time component of cloudShift */ private static final Vector2f ShiftScale = new Vector2f(0.001f, 0.003f); // exists only while AppState is attached private CloudProcessor cloudProcessor; private Geometry geom; private int texSize; private int cloudZoom; private Vector3f cloudShift; public CloudAppState() { super(UpdateInterval); } @Override public void initialize(AppStateManager stateManager, Application baseApp) { super.initialize(stateManager, baseApp); evalSettings(); logger.info(String.format("Cloud texture size: %d", texSize)); if(UseGPU) { cloudProcessor = new GPUCloudProcessor(getApp().getAssetManager(), texSize, UpdateInterval, getApp().getExecutor()); } else { cloudProcessor = new CPUCloudProcessor(texSize, UpdateInterval, getApp().getExecutor()); } getApp().getViewPort().addProcessor(cloudProcessor); cloudProcessor.setZoom(cloudZoom); //Quad cloudQuad = new Quad(10, 10); CloudPlane cloudQuad = new CloudPlane(CloudPlaneSize, CloudPlaneHeightScale, CloudPlaneTranslation); geom = new Geometry("CloudPlane", cloudQuad); //Material mat = new Material(getApp().getAssetManager(), "Common/MatDefs/Misc/Unshaded.j3md"); //mat.setColor("Color", ColorRGBA.Orange); //mat.getAdditionalRenderState().setWireframe(true); //mat.getAdditionalRenderState().setFaceCullMode(FaceCullMode.Off); Material mat = new Material(getApp().getAssetManager(), "shaders/CloudFinal.j3md"); mat.setTexture("ColorMap", cloudProcessor.getCloudTex()); //mat.getAdditionalRenderState().setFaceCullMode(FaceCullMode.Off); mat.getAdditionalRenderState().setBlendMode(BlendMode.Alpha); geom.setMaterial(mat); geom.setQueueBucket(Bucket.Transparent); // update before attaching since all SceneProcessors are initialized // then (and the CP requires some variables set) updateCloudParameters(0f); getSkyAppState().getSkyNode().attachChild(geom); } @Override protected void intervalUpdate(float dt) { updateCloudParameters(dt); } @Override public void cleanup() { super.cleanup(); if(getSkyAppState() != null && getSkyAppState().getSkyNode() != null) { getSkyAppState().getSkyNode().detachChild(geom); } getApp().getViewPort().removeProcessor(cloudProcessor); cloudProcessor = null; geom = null; } private void updateCloudParameters(float dt) { cloudProcessor.setCloudCover(getWeather().getFloat("cloud.cover")); cloudProcessor.setCloudSharpness(getWeather().getFloat("cloud.sharpness")); cloudProcessor.setWayFactor(getWeather().getFloat("cloud.way-factor")); TempVars vars = TempVars.get(); // Simply pass the sun light's current color to the cloudProcessor who // will pass it to the renderer that uses it to give the clouds color // some touch of the sun light color and not just pure white as base ColorRGBA sunLightColor = getSkyAppState().getSkyGradient().getSunLightColor(vars.color1); //ColorRGBA sunLightColor = ColorRGBA.White; //System.out.println(sunLightColor); cloudProcessor.setSunLightColor(sunLightColor); // The following calculation determines the position of the sun in the // virtual cloud heightfield grid (in pixels) - this position has only // the purpose to produce good looking results during render and highly // depends on the renderer implementation, there are no relations to // physics etc so the formula below might be tweaked { Vector3f vToSun = getSkyAppState().getSun().getSunPosition(vars.vect1); vToSun.set(vToSun.x, -vToSun.z, vToSun.y); // from J3D to Sky float x = vToSun.x; // from 1f to -1f float y = vToSun.y; // from 1f to -1f final float convFactor = 4f; Vector3f sunPosition = vars.vect2.set(x * texSize * convFactor, y * texSize * convFactor, 5000); // add the virtual origin for the cloud heightfield (center) sunPosition.addLocal(.5f * texSize, .5f * texSize, 0); //System.out.println(vToSun+" "+x+" "+y); cloudProcessor.setSunPosition(sunPosition); } // To reflect the impact of wind (shift) and change of the cloud face // and structure over time (permutation) we use this 3D (x,y: shift, // z: permutation) "shift" vector since it's all implemented as a shift // of the noise parameters if(cloudShift == null) { cloudShift = new Vector3f(Vector3f.ZERO); } cloudProcessor.setShift(cloudShift); { float direction = getWeather().getFloat("wind.direction"); float strength = getWeather().getFloat("wind.strength"); // windVelo will be: direction into which wind is blowing and magnitude // reflects strength of wind Vector3f windVelo = vars.vect1.set( (float) Math.sin(direction * FastMath.DEG_TO_RAD), 0, -(float) Math.cos(direction * FastMath.DEG_TO_RAD)); // We do not negate here since e.g. an x++ in cloudShift will create // an animation looking like an x--, so we would have to double negate //windVelo.negateLocal(); windVelo.multLocal(strength*0.514f); // in m/s // derive a shift from the windVelo, z component is 1.0 to reflect // the change over time Vector3f cloudShiftAdd = vars.vect2.set(windVelo.x, -windVelo.z, 1f); cloudShiftAdd.multLocal(dt); cloudShiftAdd.multLocal(ShiftScale.x, ShiftScale.x, ShiftScale.y); cloudShift.addLocal(cloudShiftAdd); } vars.release(); } private void evalSettings() { int detailLevel = getApp().getSettingsManager().getInteger("engine.detail.level"); switch(detailLevel) { case 0: { texSize = 256; cloudZoom = 40; break; } case 1: { texSize = 512; cloudZoom = 80; break; } case 2: { texSize = 1024; cloudZoom = 160; break; } } } private Weather getWeather() { return getState(WeatherAppState.class).getWeather(); } private SkyAppState getSkyAppState() { return getState(SkyAppState.class); } }
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ package org.elasticsearch.xpack.security.authc.pki; import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.RestHighLevelClient; import org.elasticsearch.client.ValidationException; import org.elasticsearch.client.security.AuthenticateResponse; import org.elasticsearch.client.security.PutRoleMappingRequest; import org.elasticsearch.client.security.RefreshPolicy; import org.elasticsearch.client.security.AuthenticateResponse.RealmInfo; import org.elasticsearch.client.security.DeleteRoleMappingRequest; import org.elasticsearch.client.security.support.expressiondsl.fields.FieldRoleMapperExpression; import org.elasticsearch.client.security.DelegatePkiAuthenticationRequest; import org.elasticsearch.client.security.DelegatePkiAuthenticationResponse; import org.elasticsearch.client.security.InvalidateTokenRequest; import org.elasticsearch.client.security.InvalidateTokenResponse; import org.elasticsearch.client.security.user.User; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.SecurityIntegTestCase; import org.elasticsearch.test.SecuritySettingsSourceField; import org.elasticsearch.xpack.core.XPackSettings; import org.elasticsearch.xpack.core.security.action.realm.ClearRealmCacheRequestBuilder; import org.elasticsearch.xpack.core.security.authc.support.Hasher; import org.junit.Before; import org.elasticsearch.test.SecuritySettingsSource; import java.io.InputStream; import java.nio.file.Files; import java.nio.file.Path; import java.security.cert.CertificateFactory; import java.security.cert.X509Certificate; import java.util.Collections; import java.util.Arrays; import static org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken.basicAuthHeaderValue; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.emptyCollectionOf; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.startsWith; public class PkiAuthDelegationIntegTests extends SecurityIntegTestCase { @Override public Settings nodeSettings(int nodeOrdinal) { return Settings.builder() .put(super.nodeSettings(nodeOrdinal)) .put(XPackSettings.TOKEN_SERVICE_ENABLED_SETTING.getKey(), true) // pki1 does not allow delegation .put("xpack.security.authc.realms.pki.pki1.order", "2") .putList("xpack.security.authc.realms.pki.pki1.certificate_authorities", getDataPath("/org/elasticsearch/xpack/security/action/pki_delegation/testRootCA.crt").toString()) .put("xpack.security.authc.realms.pki.pki1.files.role_mapping", getDataPath("role_mapping.yml")) // pki2 allows delegation but has a non-matching username pattern .put("xpack.security.authc.realms.pki.pki2.order", "3") .putList("xpack.security.authc.realms.pki.pki2.certificate_authorities", getDataPath("/org/elasticsearch/xpack/security/action/pki_delegation/testRootCA.crt").toString()) .put("xpack.security.authc.realms.pki.pki2.username_pattern", "CN=MISMATCH(.*?)(?:,|$)") .put("xpack.security.authc.realms.pki.pki2.delegation.enabled", true) .put("xpack.security.authc.realms.pki.pki2.files.role_mapping", getDataPath("role_mapping.yml")) // pki3 allows delegation and the username pattern (default) matches .put("xpack.security.authc.realms.pki.pki3.order", "4") .putList("xpack.security.authc.realms.pki.pki3.certificate_authorities", getDataPath("/org/elasticsearch/xpack/security/action/pki_delegation/testRootCA.crt").toString()) .put("xpack.security.authc.realms.pki.pki3.delegation.enabled", true) .put("xpack.security.authc.realms.pki.pki3.files.role_mapping", getDataPath("role_mapping.yml")) .build(); } @Override protected String configUsers() { final String usersPasswdHashed = new String(Hasher.resolve( randomFrom("pbkdf2", "pbkdf2_1000", "bcrypt", "bcrypt9")).hash(SecuritySettingsSourceField.TEST_PASSWORD_SECURE_STRING)); return super.configUsers() + "user_manage:" + usersPasswdHashed + "\n" + "user_manage_security:" + usersPasswdHashed + "\n" + "user_delegate_pki:" + usersPasswdHashed + "\n" + "user_all:" + usersPasswdHashed + "\n" + "my_kibana_system:" + usersPasswdHashed + "\n"; } @Override protected String configRoles() { return super.configRoles() + "\n" + "role_manage:\n" + " cluster: [ manage ]\n" + "\n" + "role_manage_security:\n" + " cluster: [ manage_security ]\n" + "\n" + "role_delegate_pki:\n" + " cluster: [ delegate_pki ]\n" + "\n" + "role_all:\n" + " cluster: [ all ]\n"; } @Override protected String configUsersRoles() { return super.configUsersRoles() + "\n" + "role_manage:user_manage\n" + "role_manage_security:user_manage_security\n" + "role_delegate_pki:user_delegate_pki\n" + "role_all:user_all\n" + "kibana_system:my_kibana_system\n"; } @Override protected boolean transportSSLEnabled() { return true; } @Override protected boolean addMockHttpTransport() { return false; // enable http } @Before void clearRealmCache() { new ClearRealmCacheRequestBuilder(client()).get(); } public void testDelegateThenAuthenticate() throws Exception { final X509Certificate clientCertificate = readCertForPkiDelegation("testClient.crt"); final X509Certificate intermediateCA = readCertForPkiDelegation("testIntermediateCA.crt"); final X509Certificate rootCA = readCertForPkiDelegation("testRootCA.crt"); DelegatePkiAuthenticationRequest delegatePkiRequest; // trust root is optional if (randomBoolean()) { delegatePkiRequest = new DelegatePkiAuthenticationRequest(Arrays.asList(clientCertificate, intermediateCA)); } else { delegatePkiRequest = new DelegatePkiAuthenticationRequest(Arrays.asList(clientCertificate, intermediateCA, rootCA)); } try (RestHighLevelClient restClient = new TestRestHighLevelClient()) { for (String delegateeUsername : Arrays.asList("user_all", "user_delegate_pki", "my_kibana_system")) { // delegate RequestOptions.Builder optionsBuilder = RequestOptions.DEFAULT.toBuilder(); optionsBuilder.addHeader("Authorization", basicAuthHeaderValue(delegateeUsername, SecuritySettingsSourceField.TEST_PASSWORD_SECURE_STRING)); DelegatePkiAuthenticationResponse delegatePkiResponse = restClient.security().delegatePkiAuthentication(delegatePkiRequest, optionsBuilder.build()); String token = delegatePkiResponse.getAccessToken(); assertThat(token, is(notNullValue())); // authenticate optionsBuilder = RequestOptions.DEFAULT.toBuilder(); optionsBuilder.addHeader("Authorization", "Bearer " + token); AuthenticateResponse resp = restClient.security().authenticate(optionsBuilder.build()); User user = resp.getUser(); assertThat(user, is(notNullValue())); assertThat(user.getUsername(), is("Elasticsearch Test Client")); RealmInfo authnRealm = resp.getAuthenticationRealm(); assertThat(authnRealm, is(notNullValue())); assertThat(authnRealm.getName(), is("pki3")); assertThat(authnRealm.getType(), is("pki")); assertThat(resp.getAuthenticationType(), is("token")); } } } public void testTokenInvalidate() throws Exception { final X509Certificate clientCertificate = readCertForPkiDelegation("testClient.crt"); final X509Certificate intermediateCA = readCertForPkiDelegation("testIntermediateCA.crt"); final X509Certificate rootCA = readCertForPkiDelegation("testRootCA.crt"); DelegatePkiAuthenticationRequest delegatePkiRequest; // trust root is optional if (randomBoolean()) { delegatePkiRequest = new DelegatePkiAuthenticationRequest(Arrays.asList(clientCertificate, intermediateCA)); } else { delegatePkiRequest = new DelegatePkiAuthenticationRequest(Arrays.asList(clientCertificate, intermediateCA, rootCA)); } try (RestHighLevelClient restClient = new TestRestHighLevelClient()) { String delegateeUsername = randomFrom("user_all", "user_delegate_pki", "my_kibana_system"); // delegate RequestOptions.Builder optionsBuilder = RequestOptions.DEFAULT.toBuilder(); optionsBuilder.addHeader("Authorization", basicAuthHeaderValue(delegateeUsername, SecuritySettingsSourceField.TEST_PASSWORD_SECURE_STRING)); DelegatePkiAuthenticationResponse delegatePkiResponse = restClient.security().delegatePkiAuthentication(delegatePkiRequest, optionsBuilder.build()); String token = delegatePkiResponse.getAccessToken(); assertThat(token, is(notNullValue())); // authenticate optionsBuilder = RequestOptions.DEFAULT.toBuilder(); optionsBuilder.addHeader("Authorization", "Bearer " + token); AuthenticateResponse resp = restClient.security().authenticate(optionsBuilder.build()); User user = resp.getUser(); assertThat(user, is(notNullValue())); assertThat(user.getUsername(), is("Elasticsearch Test Client")); assertThat(user.getMetadata().get("pki_dn"), is(notNullValue())); assertThat(user.getMetadata().get("pki_dn"), is("O=org, OU=Elasticsearch, CN=Elasticsearch Test Client")); assertThat(user.getMetadata().get("pki_delegated_by_user"), is(notNullValue())); assertThat(user.getMetadata().get("pki_delegated_by_user"), is(delegateeUsername)); assertThat(user.getMetadata().get("pki_delegated_by_realm"), is(notNullValue())); assertThat(user.getMetadata().get("pki_delegated_by_realm"), is("file")); // no roles because no role mappings assertThat(user.getRoles(), is(emptyCollectionOf(String.class))); RealmInfo authnRealm = resp.getAuthenticationRealm(); assertThat(authnRealm, is(notNullValue())); assertThat(authnRealm.getName(), is("pki3")); assertThat(authnRealm.getType(), is("pki")); assertThat(resp.getAuthenticationType(), is("token")); // invalidate InvalidateTokenRequest invalidateRequest = new InvalidateTokenRequest(token, null, null, null); optionsBuilder = RequestOptions.DEFAULT.toBuilder(); optionsBuilder.addHeader("Authorization", basicAuthHeaderValue(delegateeUsername, SecuritySettingsSourceField.TEST_PASSWORD_SECURE_STRING)); InvalidateTokenResponse invalidateResponse = restClient.security().invalidateToken(invalidateRequest, optionsBuilder.build()); assertThat(invalidateResponse.getInvalidatedTokens(), is(1)); assertThat(invalidateResponse.getErrorsCount(), is(0)); // failed authenticate ElasticsearchStatusException e1 = expectThrows(ElasticsearchStatusException.class, () -> restClient.security() .authenticate(RequestOptions.DEFAULT.toBuilder().addHeader("Authorization", "Bearer " + token).build())); assertThat(e1.getMessage(), is("Elasticsearch exception [type=security_exception, reason=token expired]")); } } public void testDelegateUnauthorized() throws Exception { final X509Certificate clientCertificate = readCertForPkiDelegation("testClient.crt"); final X509Certificate intermediateCA = readCertForPkiDelegation("testIntermediateCA.crt"); final X509Certificate rootCA = readCertForPkiDelegation("testRootCA.crt"); DelegatePkiAuthenticationRequest delegatePkiRequest; // trust root is optional if (randomBoolean()) { delegatePkiRequest = new DelegatePkiAuthenticationRequest(Arrays.asList(clientCertificate, intermediateCA)); } else { delegatePkiRequest = new DelegatePkiAuthenticationRequest(Arrays.asList(clientCertificate, intermediateCA, rootCA)); } try (RestHighLevelClient restClient = new TestRestHighLevelClient()) { for (String delegateeUsername : Arrays.asList("user_manage", "user_manage_security")) { RequestOptions.Builder optionsBuilder = RequestOptions.DEFAULT.toBuilder(); optionsBuilder.addHeader("Authorization", basicAuthHeaderValue(delegateeUsername, SecuritySettingsSourceField.TEST_PASSWORD_SECURE_STRING)); ElasticsearchStatusException e = expectThrows(ElasticsearchStatusException.class, () -> { restClient.security().delegatePkiAuthentication(delegatePkiRequest, optionsBuilder.build()); }); assertThat(e.getMessage(), startsWith("Elasticsearch exception [type=security_exception, reason=action" + " [cluster:admin/xpack/security/delegate_pki] is unauthorized for user")); } } } public void testDelegatePkiWithRoleMapping() throws Exception { X509Certificate clientCertificate = readCertForPkiDelegation("testClient.crt"); X509Certificate intermediateCA = readCertForPkiDelegation("testIntermediateCA.crt"); X509Certificate rootCA = readCertForPkiDelegation("testRootCA.crt"); DelegatePkiAuthenticationRequest delegatePkiRequest; // trust root is optional if (randomBoolean()) { delegatePkiRequest = new DelegatePkiAuthenticationRequest(Arrays.asList(clientCertificate, intermediateCA)); } else { delegatePkiRequest = new DelegatePkiAuthenticationRequest(Arrays.asList(clientCertificate, intermediateCA, rootCA)); } final RequestOptions testUserOptions = RequestOptions.DEFAULT.toBuilder() .addHeader("Authorization", basicAuthHeaderValue(SecuritySettingsSource.TEST_USER_NAME, new SecureString(SecuritySettingsSourceField.TEST_PASSWORD.toCharArray()))) .build(); try (RestHighLevelClient restClient = new TestRestHighLevelClient()) { // put role mappings for delegated PKI PutRoleMappingRequest request = new PutRoleMappingRequest("role_by_delegated_user", true, Collections.singletonList("role_by_delegated_user"), Collections.emptyList(), new FieldRoleMapperExpression("metadata.pki_delegated_by_user", "test_user"), null, RefreshPolicy.IMMEDIATE); restClient.security().putRoleMapping(request, testUserOptions); request = new PutRoleMappingRequest("role_by_delegated_realm", true, Collections.singletonList("role_by_delegated_realm"), Collections.emptyList(), new FieldRoleMapperExpression("metadata.pki_delegated_by_realm", "file"), null, RefreshPolicy.IMMEDIATE); restClient.security().putRoleMapping(request, testUserOptions); // delegate DelegatePkiAuthenticationResponse delegatePkiResponse = restClient.security().delegatePkiAuthentication(delegatePkiRequest, testUserOptions); // authenticate AuthenticateResponse resp = restClient.security().authenticate(RequestOptions.DEFAULT.toBuilder() .addHeader("Authorization", "Bearer " + delegatePkiResponse.getAccessToken()).build()); User user = resp.getUser(); assertThat(user, is(notNullValue())); assertThat(user.getUsername(), is("Elasticsearch Test Client")); assertThat(user.getMetadata().get("pki_dn"), is(notNullValue())); assertThat(user.getMetadata().get("pki_dn"), is("O=org, OU=Elasticsearch, CN=Elasticsearch Test Client")); assertThat(user.getMetadata().get("pki_delegated_by_user"), is(notNullValue())); assertThat(user.getMetadata().get("pki_delegated_by_user"), is("test_user")); assertThat(user.getMetadata().get("pki_delegated_by_realm"), is(notNullValue())); assertThat(user.getMetadata().get("pki_delegated_by_realm"), is("file")); // assert roles assertThat(user.getRoles(), containsInAnyOrder("role_by_delegated_user", "role_by_delegated_realm")); RealmInfo authnRealm = resp.getAuthenticationRealm(); assertThat(authnRealm, is(notNullValue())); assertThat(authnRealm.getName(), is("pki3")); assertThat(authnRealm.getType(), is("pki")); assertThat(resp.getAuthenticationType(), is("token")); // delete role mappings for delegated PKI restClient.security().deleteRoleMapping(new DeleteRoleMappingRequest("role_by_delegated_user", RefreshPolicy.IMMEDIATE), testUserOptions); restClient.security().deleteRoleMapping(new DeleteRoleMappingRequest("role_by_delegated_realm", RefreshPolicy.IMMEDIATE), testUserOptions); } } public void testIncorrectCertChain() throws Exception { X509Certificate clientCertificate = readCertForPkiDelegation("testClient.crt"); X509Certificate intermediateCA = readCertForPkiDelegation("testIntermediateCA.crt"); X509Certificate bogusCertificate = readCertForPkiDelegation("bogus.crt"); RequestOptions.Builder optionsBuilder = RequestOptions.DEFAULT.toBuilder(); optionsBuilder.addHeader("Authorization", basicAuthHeaderValue(SecuritySettingsSource.TEST_USER_NAME, new SecureString(SecuritySettingsSourceField.TEST_PASSWORD.toCharArray()))); try (RestHighLevelClient restClient = new TestRestHighLevelClient()) { // incomplete cert chain DelegatePkiAuthenticationRequest delegatePkiRequest1 = new DelegatePkiAuthenticationRequest(Arrays.asList(clientCertificate)); ElasticsearchStatusException e1 = expectThrows(ElasticsearchStatusException.class, () -> restClient.security().delegatePkiAuthentication(delegatePkiRequest1, optionsBuilder.build())); assertThat(e1.getMessage(), is("Elasticsearch exception [type=security_exception, reason=unable to authenticate user" + " [O=org, OU=Elasticsearch, CN=Elasticsearch Test Client] for action [cluster:admin/xpack/security/delegate_pki]]")); // swapped order DelegatePkiAuthenticationRequest delegatePkiRequest2 = new DelegatePkiAuthenticationRequest( Arrays.asList(intermediateCA, clientCertificate)); ValidationException e2 = expectThrows(ValidationException.class, () -> restClient.security().delegatePkiAuthentication(delegatePkiRequest2, optionsBuilder.build())); assertThat(e2.getMessage(), is("Validation Failed: 1: certificates chain must be an ordered chain;")); // bogus certificate DelegatePkiAuthenticationRequest delegatePkiRequest3 = new DelegatePkiAuthenticationRequest(Arrays.asList(bogusCertificate)); ElasticsearchStatusException e3 = expectThrows(ElasticsearchStatusException.class, () -> restClient.security().delegatePkiAuthentication(delegatePkiRequest3, optionsBuilder.build())); assertThat(e3.getMessage(), startsWith("Elasticsearch exception [type=security_exception, reason=unable to authenticate user")); } } private X509Certificate readCertForPkiDelegation(String certName) throws Exception { Path path = getDataPath("/org/elasticsearch/xpack/security/action/pki_delegation/" + certName); try (InputStream in = Files.newInputStream(path)) { CertificateFactory factory = CertificateFactory.getInstance("X.509"); return (X509Certificate) factory.generateCertificate(in); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.ml.math.primitives.vector; import java.io.IOException; import java.io.ObjectInput; import java.io.ObjectOutput; import java.io.Serializable; import java.util.Arrays; import java.util.HashMap; import java.util.Iterator; import java.util.Map; import java.util.NoSuchElementException; import java.util.Spliterator; import java.util.function.Consumer; import java.util.function.IntToDoubleFunction; import org.apache.ignite.lang.IgniteUuid; import org.apache.ignite.ml.math.exceptions.CardinalityException; import org.apache.ignite.ml.math.exceptions.IndexException; import org.apache.ignite.ml.math.exceptions.UnsupportedOperationException; import org.apache.ignite.ml.math.functions.Functions; import org.apache.ignite.ml.math.functions.IgniteBiFunction; import org.apache.ignite.ml.math.functions.IgniteDoubleFunction; import org.apache.ignite.ml.math.functions.IgniteIntDoubleToDoubleBiFunction; import org.apache.ignite.ml.math.primitives.matrix.Matrix; import org.apache.ignite.ml.math.primitives.matrix.impl.ViewMatrix; import org.apache.ignite.ml.math.primitives.vector.impl.VectorView; import org.jetbrains.annotations.NotNull; /** * This class provides a helper implementation of the {@link Vector} * interface to minimize the effort required to implement it. * Subclasses may override some of the implemented methods if a more * specific or optimized implementation is desirable. */ public abstract class AbstractVector implements Vector { /** Vector storage implementation. */ private VectorStorage sto; /** Meta attribute storage. */ private Map<String, Object> meta = new HashMap<>(); /** Vector's GUID. */ private IgniteUuid guid = IgniteUuid.randomUuid(); /** Cached value for length squared. */ private double lenSq = 0.0; /** Maximum cached element. */ private Element maxElm = null; /** Minimum cached element. */ private Element minElm = null; /** Readonly flag (false by default). */ private boolean readOnly = false; /** Read-only error message. */ private static final String RO_MSG = "Vector is read-only."; /** */ private void ensureReadOnly() { if (readOnly) throw new UnsupportedOperationException(RO_MSG); } /** * @param sto Storage. */ public AbstractVector(VectorStorage sto) { this(false, sto); } /** * @param readOnly Is read only. * @param sto Storage. */ public AbstractVector(boolean readOnly, VectorStorage sto) { assert sto != null; this.readOnly = readOnly; this.sto = sto; } /** * */ public AbstractVector() { // No-op. } /** * Set storage. * * @param sto Storage. */ protected void setStorage(VectorStorage sto) { this.sto = sto; } /** * @param i Index. * @param v Value. */ protected void storageSet(int i, double v) { ensureReadOnly(); sto.set(i, v); // Reset cached values. lenSq = 0.0; maxElm = minElm = null; } /** * Sets serializable value. * * @param i Index. * @param v Value. */ protected void storageSetRaw(int i, Serializable v) { ensureReadOnly(); sto.setRaw(i, v); // Reset cached values. lenSq = 0.0; maxElm = minElm = null; } /** * @param i Index. * @return Value. */ protected double storageGet(int i) { return sto.get(i); } /** * Gets serializable value from storage and casts it to targe type T. * * @param i Index. * @return Value. */ protected <T extends Serializable> T storageGetRaw(int i) { return sto.getRaw(i); } /** {@inheritDoc} */ @Override public int size() { return sto.size(); } /** * Check index bounds. * * @param idx Index to check. */ protected void checkIndex(int idx) { if (idx < 0 || idx >= sto.size()) throw new IndexException(idx); } /** {@inheritDoc} */ @Override public double get(int idx) { checkIndex(idx); return storageGet(idx); } /** {@inheritDoc} */ @Override public double getX(int idx) { return storageGet(idx); } /** {@inheritDoc} */ @Override public <T extends Serializable> T getRaw(int idx) { checkIndex(idx); return sto.getRaw(idx); } /** {@inheritDoc} */ @Override public <T extends Serializable> T getRawX(int idx) { return sto.getRaw(idx); } /** {@inheritDoc} */ @Override public boolean isArrayBased() { return sto.isArrayBased(); } /** {@inheritDoc} */ @Override public Vector sort() { if (isArrayBased()) Arrays.parallelSort(sto.data()); else throw new UnsupportedOperationException(); return this; } /** {@inheritDoc} */ @Override public Vector map(IgniteDoubleFunction<Double> fun) { if (sto.isArrayBased()) { double[] data = sto.data(); Arrays.setAll(data, (idx) -> fun.apply(data[idx])); } else { int len = size(); for (int i = 0; i < len; i++) storageSet(i, fun.apply(storageGet(i))); } return this; } /** {@inheritDoc} */ @Override public Vector map(Vector vec, IgniteBiFunction<Double, Double, Double> fun) { checkCardinality(vec); int len = size(); for (int i = 0; i < len; i++) storageSet(i, fun.apply(storageGet(i), vec.get(i))); return this; } /** {@inheritDoc} */ @Override public Vector map(IgniteBiFunction<Double, Double, Double> fun, double y) { int len = size(); for (int i = 0; i < len; i++) storageSet(i, fun.apply(storageGet(i), y)); return this; } /** * @param idx Index. * @return Value. */ protected Element makeElement(int idx) { checkIndex(idx); return new Element() { /** {@inheritDoc} */ @Override public double get() { return storageGet(idx); } /** {@inheritDoc} */ @Override public int index() { return idx; } /** {@inheritDoc} */ @Override public void set(double val) { storageSet(idx, val); } /** {@inheritDoc} */ @Override public void setRaw(Serializable val) { storageSetRaw(idx, val); } /** {@inheritDoc} */ @Override public <T extends Serializable> T getRaw() { return storageGetRaw(idx); } }; } /** {@inheritDoc} */ @Override public Element minElement() { if (minElm == null) { int minIdx = 0; int len = size(); for (int i = 0; i < len; i++) if (storageGet(i) < storageGet(minIdx)) minIdx = i; minElm = makeElement(minIdx); } return minElm; } /** {@inheritDoc} */ @Override public Element maxElement() { if (maxElm == null) { int maxIdx = 0; int len = size(); for (int i = 0; i < len; i++) if (storageGet(i) > storageGet(maxIdx)) maxIdx = i; maxElm = makeElement(maxIdx); } return maxElm; } /** {@inheritDoc} */ @Override public double minValue() { return minElement().get(); } /** {@inheritDoc} */ @Override public double maxValue() { return maxElement().get(); } /** {@inheritDoc} */ @Override public Vector set(int idx, double val) { checkIndex(idx); storageSet(idx, val); return this; } /** {@inheritDoc} */ @Override public Vector setX(int idx, double val) { storageSet(idx, val); return this; } /** {@inheritDoc} */ @Override public Vector setRaw(int idx, Serializable val) { checkIndex(idx); storageSetRaw(idx, val); return this; } /** {@inheritDoc} */ @Override public Vector setRawX(int idx, Serializable val) { storageSetRaw(idx, val); return this; } /** {@inheritDoc} */ @Override public Vector increment(int idx, double val) { checkIndex(idx); storageSet(idx, storageGet(idx) + val); return this; } /** {@inheritDoc} */ @Override public Vector incrementX(int idx, double val) { storageSet(idx, storageGet(idx) + val); return this; } /** * Tests if given value is considered a zero value. * * @param val Value to check. */ protected boolean isZero(double val) { return val == 0.0; } /** {@inheritDoc} */ @Override public double sum() { double sum = 0; int len = size(); for (int i = 0; i < len; i++) sum += storageGet(i); return sum; } /** {@inheritDoc} */ @Override public IgniteUuid guid() { return guid; } /** {@inheritDoc} */ @Override public Iterable<Element> all() { return new Iterable<Element>() { private int idx = 0; /** {@inheritDoc} */ @NotNull @Override public Iterator<Element> iterator() { return new Iterator<Element>() { /** {@inheritDoc} */ @Override public boolean hasNext() { return size() > 0 && idx < size(); } /** {@inheritDoc} */ @Override public Element next() { if (hasNext()) return getElement(idx++); throw new NoSuchElementException(); } }; } }; } /** {@inheritDoc} */ @Override public int nonZeroElements() { int cnt = 0; for (Element ignored : nonZeroes()) cnt++; return cnt; } /** {@inheritDoc} */ @Override public <T> T foldMap(IgniteBiFunction<T, Double, T> foldFun, IgniteDoubleFunction<Double> mapFun, T zeroVal) { T res = zeroVal; int len = size(); for (int i = 0; i < len; i++) res = foldFun.apply(res, mapFun.apply(storageGet(i))); return res; } /** {@inheritDoc} */ @Override public <T> T foldMap(Vector vec, IgniteBiFunction<T, Double, T> foldFun, IgniteBiFunction<Double, Double, Double> combFun, T zeroVal) { checkCardinality(vec); T res = zeroVal; int len = size(); for (int i = 0; i < len; i++) res = foldFun.apply(res, combFun.apply(storageGet(i), vec.getX(i))); return res; } /** {@inheritDoc} */ @Override public Iterable<Element> nonZeroes() { return new Iterable<Element>() { private int idx = 0; private int idxNext = -1; /** {@inheritDoc} */ @NotNull @Override public Iterator<Element> iterator() { return new Iterator<Element>() { @Override public boolean hasNext() { findNext(); return !over(); } @Override public Element next() { if (hasNext()) { idx = idxNext; return getElement(idxNext); } throw new NoSuchElementException(); } private void findNext() { if (over()) return; if (idxNextInitialized() && idx != idxNext) return; if (idxNextInitialized()) idx = idxNext + 1; while (idx < size() && isZero(get(idx))) idx++; idxNext = idx++; } private boolean over() { return idxNext >= size(); } private boolean idxNextInitialized() { return idxNext != -1; } }; } }; } /** {@inheritDoc} */ @Override public Map<String, Object> getMetaStorage() { return meta; } /** {@inheritDoc} */ @Override public Vector assign(double val) { if (sto.isArrayBased()) { ensureReadOnly(); Arrays.fill(sto.data(), val); } else { int len = size(); for (int i = 0; i < len; i++) storageSet(i, val); } return this; } /** {@inheritDoc} */ @Override public Vector assign(double[] vals) { checkCardinality(vals); if (sto.isArrayBased()) { ensureReadOnly(); System.arraycopy(vals, 0, sto.data(), 0, vals.length); lenSq = 0.0; } else { int len = size(); for (int i = 0; i < len; i++) storageSet(i, vals[i]); } return this; } /** {@inheritDoc} */ @Override public Vector assign(Vector vec) { checkCardinality(vec); for (Vector.Element x : vec.all()) storageSet(x.index(), x.get()); return this; } /** {@inheritDoc} */ @Override public Vector assign(IntToDoubleFunction fun) { assert fun != null; if (sto.isArrayBased()) { ensureReadOnly(); Arrays.setAll(sto.data(), fun); } else { int len = size(); for (int i = 0; i < len; i++) storageSet(i, fun.applyAsDouble(i)); } return this; } /** {@inheritDoc} */ @Override public Spliterator<Double> allSpliterator() { return new Spliterator<Double>() { /** {@inheritDoc} */ @Override public boolean tryAdvance(Consumer<? super Double> act) { int len = size(); for (int i = 0; i < len; i++) act.accept(storageGet(i)); return true; } /** {@inheritDoc} */ @Override public Spliterator<Double> trySplit() { return null; // No Splitting. } /** {@inheritDoc} */ @Override public long estimateSize() { return size(); } /** {@inheritDoc} */ @Override public int characteristics() { return ORDERED | SIZED; } }; } /** {@inheritDoc} */ @Override public Spliterator<Double> nonZeroSpliterator() { return new Spliterator<Double>() { /** {@inheritDoc} */ @Override public boolean tryAdvance(Consumer<? super Double> act) { int len = size(); for (int i = 0; i < len; i++) { double val = storageGet(i); if (!isZero(val)) act.accept(val); } return true; } /** {@inheritDoc} */ @Override public Spliterator<Double> trySplit() { return null; // No Splitting. } /** {@inheritDoc} */ @Override public long estimateSize() { return nonZeroElements(); } /** {@inheritDoc} */ @Override public int characteristics() { return ORDERED | SIZED; } }; } /** {@inheritDoc} */ @Override public double dot(Vector vec) { checkCardinality(vec); double sum = 0.0; int len = size(); for (int i = 0; i < len; i++) sum += storageGet(i) * vec.getX(i); return sum; } /** {@inheritDoc} */ @Override public double getLengthSquared() { if (lenSq == 0.0) lenSq = dotSelf(); return lenSq; } /** {@inheritDoc} */ @Override public boolean isDense() { return sto.isDense(); } /** {@inheritDoc} */ @Override public boolean isDistributed() { return sto.isDistributed(); } /** {@inheritDoc} */ @Override public boolean isNumeric() { return sto.isNumeric(); } /** {@inheritDoc} */ @Override public VectorStorage getStorage() { return sto; } /** {@inheritDoc} */ @Override public Vector viewPart(int off, int len) { return new VectorView(this, off, len); } /** {@inheritDoc} */ @Override public Matrix cross(Vector vec) { Matrix res = likeMatrix(size(), vec.size()); if (res == null) return null; for (Element e : nonZeroes()) { int row = e.index(); res.assignRow(row, vec.times(getX(row))); } return res; } /** {@inheritDoc} */ @Override public Matrix toMatrix(boolean rowLike) { Matrix res = likeMatrix(rowLike ? 1 : size(), rowLike ? size() : 1); if (res == null) return null; if (rowLike) res.assignRow(0, this); else res.assignColumn(0, this); return res; } /** {@inheritDoc} */ @Override public Matrix toMatrixPlusOne(boolean rowLike, double zeroVal) { Matrix res = likeMatrix(rowLike ? 1 : size() + 1, rowLike ? size() + 1 : 1); if (res == null) return null; res.set(0, 0, zeroVal); if (rowLike) new ViewMatrix(res, 0, 1, 1, size()).assignRow(0, this); else new ViewMatrix(res, 1, 0, size(), 1).assignColumn(0, this); return res; } /** {@inheritDoc} */ @Override public double getDistanceSquared(Vector vec) { checkCardinality(vec); double thisLenSq = getLengthSquared(); double thatLenSq = vec.getLengthSquared(); double dot = dot(vec); double distEst = thisLenSq + thatLenSq - 2 * dot; if (distEst > 1.0e-3 * (thisLenSq + thatLenSq)) // The vectors are far enough from each other that the formula is accurate. return Math.max(distEst, 0); else return foldMap(vec, Functions.PLUS, Functions.MINUS_SQUARED, 0d); } /** * @param vec Vector to check for valid cardinality. */ protected void checkCardinality(Vector vec) { if (vec.size() != size()) throw new CardinalityException(size(), vec.size()); } /** * @param vec Array to check for valid cardinality. */ protected void checkCardinality(double[] vec) { if (vec.length != size()) throw new CardinalityException(size(), vec.length); } /** * @param arr Array to check for valid cardinality. */ protected void checkCardinality(int[] arr) { if (arr.length != size()) throw new CardinalityException(size(), arr.length); } /** {@inheritDoc} */ @Override public Vector minus(Vector vec) { checkCardinality(vec); Vector cp = copy(); return cp.map(vec, Functions.MINUS); } /** {@inheritDoc} */ @Override public Vector plus(double x) { Vector cp = copy(); return x != 0.0 ? cp.map(Functions.plus(x)) : cp; } /** {@inheritDoc} */ @Override public Vector divide(double x) { Vector cp = copy(); if (x != 1.0) for (Element element : cp.all()) element.set(element.get() / x); return cp; } /** {@inheritDoc} */ @Override public Vector times(double x) { if (x == 0.0) return like(size()); else return copy().map(Functions.mult(x)); } /** {@inheritDoc} */ @Override public Vector times(Vector vec) { checkCardinality(vec); return copy().map(vec, Functions.MULT); } /** {@inheritDoc} */ @Override public Vector plus(Vector vec) { checkCardinality(vec); Vector cp = copy(); return cp.map(vec, Functions.PLUS); } /** {@inheritDoc} */ @Override public Vector logNormalize() { return logNormalize(2.0, Math.sqrt(getLengthSquared())); } /** {@inheritDoc} */ @Override public Vector logNormalize(double power) { return logNormalize(power, kNorm(power)); } /** * @param power Power. * @param normLen Normalized length. * @return logNormalized value. */ private Vector logNormalize(double power, double normLen) { assert !(Double.isInfinite(power) || power <= 1.0); double denominator = normLen * Math.log(power); Vector cp = copy(); for (Element element : cp.all()) element.set(Math.log1p(element.get()) / denominator); return cp; } /** {@inheritDoc} */ @Override public double kNorm(double power) { assert power >= 0.0; // Special cases. if (Double.isInfinite(power)) return foldMap(Math::max, Math::abs, 0d); else if (power == 2.0) return Math.sqrt(getLengthSquared()); else if (power == 1.0) return foldMap(Functions.PLUS, Math::abs, 0d); else if (power == 0.0) return nonZeroElements(); else // Default case. return Math.pow(foldMap(Functions.PLUS, Functions.pow(power), 0d), 1.0 / power); } /** {@inheritDoc} */ @Override public Vector normalize() { return divide(Math.sqrt(getLengthSquared())); } /** {@inheritDoc} */ @Override public Vector normalize(double power) { return divide(kNorm(power)); } /** {@inheritDoc} */ @Override public Vector copy() { return like(size()).assign(this); } /** {@inheritDoc} */ @Override public Vector copyOfRange(int from, int to) { Vector copiedVector = like(to - from); for (int i = from, j = 0; i < to; i++, j++) copiedVector.set(j, this.get(i)); return copiedVector; } /** * @return Result of dot with self. */ protected double dotSelf() { double sum = 0.0; int len = size(); for (int i = 0; i < len; i++) { double v = storageGet(i); sum += v * v; } return sum; } /** {@inheritDoc} */ @Override public Element getElement(int idx) { return makeElement(idx); } /** {@inheritDoc} */ @Override public void writeExternal(ObjectOutput out) throws IOException { out.writeObject(sto); out.writeObject(meta); out.writeObject(guid); out.writeBoolean(readOnly); } /** {@inheritDoc} */ @SuppressWarnings("unchecked") @Override public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException { sto = (VectorStorage)in.readObject(); meta = (Map<String, Object>)in.readObject(); guid = (IgniteUuid)in.readObject(); readOnly = in.readBoolean(); } /** {@inheritDoc} */ @Override public void destroy() { sto.destroy(); } /** {@inheritDoc} */ @Override public int hashCode() { int res = 1; res += res * 37 + guid.hashCode(); res += sto == null ? 0 : res * 37 + sto.hashCode(); return res; } /** {@inheritDoc} */ @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null || getClass() != obj.getClass()) return false; AbstractVector that = (AbstractVector)obj; return (sto != null ? sto.equals(that.sto) : that.sto == null); } /** {@inheritDoc} */ @Override public void compute(int idx, IgniteIntDoubleToDoubleBiFunction f) { storageSet(idx, f.apply(idx, storageGet(idx))); lenSq = 0.0; maxElm = minElm = null; } }
/* * Copyright 2004, 2005, 2006 Acegi Technology Pty Limited * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.security.web.access.channel; import static org.assertj.core.api.Assertions.*; import static org.mockito.Mockito.mock; import java.io.IOException; import java.util.Collection; import javax.servlet.FilterChain; import javax.servlet.ServletException; import org.junit.Test; import org.springframework.mock.web.MockHttpServletRequest; import org.springframework.mock.web.MockHttpServletResponse; import org.springframework.security.access.ConfigAttribute; import org.springframework.security.access.SecurityConfig; import org.springframework.security.web.FilterInvocation; import org.springframework.security.web.access.intercept.FilterInvocationSecurityMetadataSource; /** * Tests {@link ChannelProcessingFilter}. * * @author Ben Alex */ public class ChannelProcessingFilterTests { // ~ Methods // ======================================================================================================== @Test(expected = IllegalArgumentException.class) public void testDetectsMissingChannelDecisionManager() throws Exception { ChannelProcessingFilter filter = new ChannelProcessingFilter(); MockFilterInvocationDefinitionMap fids = new MockFilterInvocationDefinitionMap( "/path", true, "MOCK"); filter.setSecurityMetadataSource(fids); filter.afterPropertiesSet(); } @Test(expected = IllegalArgumentException.class) public void testDetectsMissingFilterInvocationSecurityMetadataSource() throws Exception { ChannelProcessingFilter filter = new ChannelProcessingFilter(); filter.setChannelDecisionManager(new MockChannelDecisionManager(false, "MOCK")); filter.afterPropertiesSet(); } @Test public void testDetectsSupportedConfigAttribute() throws Exception { ChannelProcessingFilter filter = new ChannelProcessingFilter(); filter.setChannelDecisionManager(new MockChannelDecisionManager(false, "SUPPORTS_MOCK_ONLY")); MockFilterInvocationDefinitionMap fids = new MockFilterInvocationDefinitionMap( "/path", true, "SUPPORTS_MOCK_ONLY"); filter.setSecurityMetadataSource(fids); filter.afterPropertiesSet(); } @Test(expected = IllegalArgumentException.class) public void testDetectsUnsupportedConfigAttribute() throws Exception { ChannelProcessingFilter filter = new ChannelProcessingFilter(); filter.setChannelDecisionManager(new MockChannelDecisionManager(false, "SUPPORTS_MOCK_ONLY")); MockFilterInvocationDefinitionMap fids = new MockFilterInvocationDefinitionMap( "/path", true, "SUPPORTS_MOCK_ONLY", "INVALID_ATTRIBUTE"); filter.setSecurityMetadataSource(fids); filter.afterPropertiesSet(); } @Test public void testDoFilterWhenManagerDoesCommitResponse() throws Exception { ChannelProcessingFilter filter = new ChannelProcessingFilter(); filter.setChannelDecisionManager(new MockChannelDecisionManager(true, "SOME_ATTRIBUTE")); MockFilterInvocationDefinitionMap fids = new MockFilterInvocationDefinitionMap( "/path", true, "SOME_ATTRIBUTE"); filter.setSecurityMetadataSource(fids); MockHttpServletRequest request = new MockHttpServletRequest(); request.setQueryString("info=now"); request.setServletPath("/path"); MockHttpServletResponse response = new MockHttpServletResponse(); filter.doFilter(request, response, mock(FilterChain.class)); } @Test public void testDoFilterWhenManagerDoesNotCommitResponse() throws Exception { ChannelProcessingFilter filter = new ChannelProcessingFilter(); filter.setChannelDecisionManager(new MockChannelDecisionManager(false, "SOME_ATTRIBUTE")); MockFilterInvocationDefinitionMap fids = new MockFilterInvocationDefinitionMap( "/path", true, "SOME_ATTRIBUTE"); filter.setSecurityMetadataSource(fids); MockHttpServletRequest request = new MockHttpServletRequest(); request.setQueryString("info=now"); request.setServletPath("/path"); MockHttpServletResponse response = new MockHttpServletResponse(); filter.doFilter(request, response, mock(FilterChain.class)); } @Test public void testDoFilterWhenNullConfigAttributeReturned() throws Exception { ChannelProcessingFilter filter = new ChannelProcessingFilter(); filter.setChannelDecisionManager(new MockChannelDecisionManager(false, "NOT_USED")); MockFilterInvocationDefinitionMap fids = new MockFilterInvocationDefinitionMap( "/path", true, "NOT_USED"); filter.setSecurityMetadataSource(fids); MockHttpServletRequest request = new MockHttpServletRequest(); request.setQueryString("info=now"); request.setServletPath("/PATH_NOT_MATCHING_CONFIG_ATTRIBUTE"); MockHttpServletResponse response = new MockHttpServletResponse(); filter.doFilter(request, response, mock(FilterChain.class)); } @Test public void testGetterSetters() throws Exception { ChannelProcessingFilter filter = new ChannelProcessingFilter(); filter.setChannelDecisionManager(new MockChannelDecisionManager(false, "MOCK")); assertThat(filter.getChannelDecisionManager() != null).isTrue(); MockFilterInvocationDefinitionMap fids = new MockFilterInvocationDefinitionMap( "/path", false, "MOCK"); filter.setSecurityMetadataSource(fids); assertThat(filter.getSecurityMetadataSource()).isSameAs(fids); filter.afterPropertiesSet(); } // ~ Inner Classes // ================================================================================================== private class MockChannelDecisionManager implements ChannelDecisionManager { private String supportAttribute; private boolean commitAResponse; MockChannelDecisionManager(boolean commitAResponse, String supportAttribute) { this.commitAResponse = commitAResponse; this.supportAttribute = supportAttribute; } public void decide(FilterInvocation invocation, Collection<ConfigAttribute> config) throws IOException, ServletException { if (commitAResponse) { invocation.getHttpResponse().sendRedirect("/redirected"); } } public boolean supports(ConfigAttribute attribute) { if (attribute.getAttribute().equals(supportAttribute)) { return true; } else { return false; } } } private class MockFilterInvocationDefinitionMap implements FilterInvocationSecurityMetadataSource { private Collection<ConfigAttribute> toReturn; private String servletPath; private boolean provideIterator; MockFilterInvocationDefinitionMap(String servletPath, boolean provideIterator, String... toReturn) { this.servletPath = servletPath; this.toReturn = SecurityConfig.createList(toReturn); this.provideIterator = provideIterator; } public Collection<ConfigAttribute> getAttributes(Object object) throws IllegalArgumentException { FilterInvocation fi = (FilterInvocation) object; if (servletPath.equals(fi.getHttpRequest().getServletPath())) { return toReturn; } else { return null; } } public Collection<ConfigAttribute> getAllConfigAttributes() { if (!provideIterator) { return null; } return toReturn; } public boolean supports(Class<?> clazz) { return true; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.undertow.server.handlers; import io.undertow.UndertowLogger; import io.undertow.server.HandlerWrapper; import io.undertow.server.HttpHandler; import io.undertow.server.HttpServerExchange; import io.undertow.server.handlers.builder.HandlerBuilder; import io.undertow.util.WorkerUtils; import org.xnio.XnioExecutor; import org.xnio.XnioIoThread; import java.util.ArrayList; import java.util.Collections; import java.util.Date; import java.util.List; import java.util.Map; import java.util.Queue; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentLinkedQueue; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; /** * This valve allows to detect requests that take a long time to process, which might * indicate that the thread that is processing it is stuck. * Based on code proposed by TomLu in Bugzilla entry #50306 * * @author slaurent * */ public class StuckThreadDetectionHandler implements HttpHandler { public static final int DEFAULT_THRESHOLD = 600; /** * Keeps count of the number of stuck threads detected */ private final AtomicInteger stuckCount = new AtomicInteger(0); /** * In seconds. Default 600 (10 minutes). */ private final int threshold; /** * The only references we keep to actual running Thread objects are in * this Map (which is automatically cleaned in invoke()s finally clause). * That way, Threads can be GC'ed, eventhough the Valve still thinks they * are stuck (caused by a long monitor interval) */ private final ConcurrentHashMap<Long, MonitoredThread> activeThreads = new ConcurrentHashMap<Long, MonitoredThread>(); /** * */ private final Queue<CompletedStuckThread> completedStuckThreadsQueue = new ConcurrentLinkedQueue<>(); private final HttpHandler next; private final Runnable stuckThreadTask = new Runnable() { @Override public void run() { timerKey = null; long thresholdInMillis = threshold * 1000L; // Check monitored threads, being careful that the request might have // completed by the time we examine it for (MonitoredThread monitoredThread : activeThreads.values()) { long activeTime = monitoredThread.getActiveTimeInMillis(); if (activeTime >= thresholdInMillis && monitoredThread.markAsStuckIfStillRunning()) { int numStuckThreads = stuckCount.incrementAndGet(); notifyStuckThreadDetected(monitoredThread, activeTime, numStuckThreads); } } // Check if any threads previously reported as stuck, have finished. for (CompletedStuckThread completedStuckThread = completedStuckThreadsQueue.poll(); completedStuckThread != null; completedStuckThread = completedStuckThreadsQueue.poll()) { int numStuckThreads = stuckCount.decrementAndGet(); notifyStuckThreadCompleted(completedStuckThread, numStuckThreads); } synchronized (StuckThreadDetectionHandler.this) { if(activeThreads.isEmpty()) { timerKey = null; } else { timerKey = WorkerUtils.executeAfter(((XnioIoThread)Thread.currentThread()), stuckThreadTask, 1, TimeUnit.SECONDS); } } } }; private volatile XnioExecutor.Key timerKey; public StuckThreadDetectionHandler(HttpHandler next) { this(DEFAULT_THRESHOLD, next); } public StuckThreadDetectionHandler(int threshold, HttpHandler next) { this.threshold = threshold; this.next = next; } /** * @return The current threshold in seconds */ public int getThreshold() { return threshold; } private void notifyStuckThreadDetected(MonitoredThread monitoredThread, long activeTime, int numStuckThreads) { Throwable th = new Throwable(); th.setStackTrace(monitoredThread.getThread().getStackTrace()); UndertowLogger.REQUEST_LOGGER.stuckThreadDetected (monitoredThread.getThread().getName(), monitoredThread.getThread().getId(), activeTime, monitoredThread.getStartTime(), monitoredThread.getRequestUri(), threshold, numStuckThreads, th); } private void notifyStuckThreadCompleted(CompletedStuckThread thread, int numStuckThreads) { UndertowLogger.REQUEST_LOGGER.stuckThreadCompleted (thread.getName(), thread.getId(), thread.getTotalActiveTime(), numStuckThreads); } /** * {@inheritDoc} */ @Override public void handleRequest(HttpServerExchange exchange) throws Exception { // Save the thread/runnable // Keeping a reference to the thread object here does not prevent // GC'ing, as the reference is removed from the Map in the finally clause Long key = Thread.currentThread().getId(); MonitoredThread monitoredThread = new MonitoredThread(Thread.currentThread(), exchange.getRequestURI() + exchange.getQueryString()); activeThreads.put(key, monitoredThread); if(timerKey == null) { synchronized (this) { if(timerKey == null) { timerKey = exchange.getIoThread().executeAfter(stuckThreadTask, 1, TimeUnit.SECONDS); } } } try { next.handleRequest(exchange); } finally { activeThreads.remove(key); if (monitoredThread.markAsDone() == MonitoredThreadState.STUCK) { completedStuckThreadsQueue.add( new CompletedStuckThread(monitoredThread.getThread(), monitoredThread.getActiveTimeInMillis())); } } } public long[] getStuckThreadIds() { List<Long> idList = new ArrayList<>(); for (MonitoredThread monitoredThread : activeThreads.values()) { if (monitoredThread.isMarkedAsStuck()) { idList.add(Long.valueOf(monitoredThread.getThread().getId())); } } long[] result = new long[idList.size()]; for (int i = 0; i < result.length; i++) { result[i] = idList.get(i).longValue(); } return result; } private static class MonitoredThread { /** * Reference to the thread to get a stack trace from background task */ private final Thread thread; private final String requestUri; private final long start; private final AtomicInteger state = new AtomicInteger( MonitoredThreadState.RUNNING.ordinal()); MonitoredThread(Thread thread, String requestUri) { this.thread = thread; this.requestUri = requestUri; this.start = System.currentTimeMillis(); } public Thread getThread() { return this.thread; } public String getRequestUri() { return requestUri; } public long getActiveTimeInMillis() { return System.currentTimeMillis() - start; } public Date getStartTime() { return new Date(start); } public boolean markAsStuckIfStillRunning() { return this.state.compareAndSet(MonitoredThreadState.RUNNING.ordinal(), MonitoredThreadState.STUCK.ordinal()); } public MonitoredThreadState markAsDone() { int val = this.state.getAndSet(MonitoredThreadState.DONE.ordinal()); return MonitoredThreadState.values()[val]; } boolean isMarkedAsStuck() { return this.state.get() == MonitoredThreadState.STUCK.ordinal(); } } private static class CompletedStuckThread { private final String threadName; private final long threadId; private final long totalActiveTime; CompletedStuckThread(Thread thread, long totalActiveTime) { this.threadName = thread.getName(); this.threadId = thread.getId(); this.totalActiveTime = totalActiveTime; } public String getName() { return this.threadName; } public long getId() { return this.threadId; } public long getTotalActiveTime() { return this.totalActiveTime; } } private enum MonitoredThreadState { RUNNING, STUCK, DONE; } public static final class Wrapper implements HandlerWrapper { private final int threshhold; public Wrapper(int threshhold) { this.threshhold = threshhold; } public Wrapper() { this.threshhold = DEFAULT_THRESHOLD; } @Override public HttpHandler wrap(HttpHandler handler) { return new StuckThreadDetectionHandler(threshhold, handler); } } public static class Builder implements HandlerBuilder { @Override public String name() { return "stuck-thread-detector"; } @Override public Map<String, Class<?>> parameters() { return Collections.<String, Class<?>>singletonMap("threshhold", Integer.class); } @Override public Set<String> requiredParameters() { return Collections.emptySet(); } @Override public String defaultParameter() { return "threshhold"; } @Override public HandlerWrapper build(Map<String, Object> config) { Integer threshhold = (Integer) config.get("threshhold"); if(threshhold == null) { return new Wrapper(); } else { return new Wrapper(threshhold); } } } }
/* * Copyright (c) 2004-2022, University of Oslo * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * Neither the name of the HISP project nor the names of its contributors may * be used to endorse or promote products derived from this software without * specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package org.hisp.dhis.webapi.json.domain; import org.hisp.dhis.jsontree.JsonArray; import org.hisp.dhis.jsontree.JsonList; import org.hisp.dhis.jsontree.JsonMap; import org.hisp.dhis.jsontree.JsonMultiMap; import org.hisp.dhis.jsontree.JsonObject; import org.hisp.dhis.jsontree.JsonString; /** * JSON equivalent of the * {@link org.hisp.dhis.dataintegrity.FlattenedDataIntegrityReport}. * * @author Jan Bernitt */ public interface JsonDataIntegrityReport extends JsonObject { default JsonList<JsonString> getDataElementsWithoutDataSet() { return getList( "dataElementsWithoutDataSet", JsonString.class ); } default JsonList<JsonString> getDataElementsWithoutGroups() { return getList( "dataElementsWithoutGroups", JsonString.class ); } default JsonMultiMap<JsonString> getDataElementsAssignedToDataSetsWithDifferentPeriodTypes() { return getMultiMap( "dataElementsAssignedToDataSetsWithDifferentPeriodTypes", JsonString.class ); } default JsonMultiMap<JsonString> getDataElementsViolatingExclusiveGroupSets() { return getMultiMap( "dataElementsViolatingExclusiveGroupSets", JsonString.class ); } default JsonMultiMap<JsonString> getDataElementsInDataSetNotInForm() { return getMultiMap( "dataElementsInDataSetNotInForm", JsonString.class ); } default JsonList<JsonString> getInvalidCategoryCombos() { return getList( "invalidCategoryCombos", JsonString.class ); } default JsonList<JsonString> getDataSetsNotAssignedToOrganisationUnits() { return getList( "dataSetsNotAssignedToOrganisationUnits", JsonString.class ); } default JsonList<JsonArray> getIndicatorsWithIdenticalFormulas() { return getList( "indicatorsWithIdenticalFormulas", JsonArray.class ); } default JsonList<JsonString> getIndicatorsWithoutGroups() { return getList( "indicatorsWithoutGroups", JsonString.class ); } default JsonMap<JsonString> getInvalidIndicatorNumerators() { return getMap( "invalidIndicatorNumerators", JsonString.class ); } default JsonMap<JsonString> getInvalidIndicatorDenominators() { return getMap( "invalidIndicatorDenominators", JsonString.class ); } default JsonMultiMap<JsonString> getIndicatorsViolatingExclusiveGroupSets() { return getMultiMap( "indicatorsViolatingExclusiveGroupSets", JsonString.class ); } default JsonList<JsonString> getDuplicatePeriods() { return getList( "duplicatePeriods", JsonString.class ); } default JsonList<JsonString> getOrganisationUnitsWithCyclicReferences() { return getList( "organisationUnitsWithCyclicReferences", JsonString.class ); } default JsonList<JsonString> getOrphanedOrganisationUnits() { return getList( "orphanedOrganisationUnits", JsonString.class ); } default JsonList<JsonString> getOrganisationUnitsWithoutGroups() { return getList( "organisationUnitsWithoutGroups", JsonString.class ); } default JsonMultiMap<JsonString> getOrganisationUnitsViolatingExclusiveGroupSets() { return getMultiMap( "organisationUnitsViolatingExclusiveGroupSets", JsonString.class ); } default JsonList<JsonString> getOrganisationUnitGroupsWithoutGroupSets() { return getList( "organisationUnitGroupsWithoutGroupSets", JsonString.class ); } default JsonList<JsonString> getValidationRulesWithoutGroups() { return getList( "validationRulesWithoutGroups", JsonString.class ); } default JsonMap<JsonString> getInvalidValidationRuleLeftSideExpressions() { return getMap( "invalidValidationRuleLeftSideExpressions", JsonString.class ); } default JsonMap<JsonString> getInvalidValidationRuleRightSideExpressions() { return getMap( "invalidValidationRuleRightSideExpressions", JsonString.class ); } default JsonMap<JsonString> getInvalidProgramIndicatorExpressions() { return getMap( "invalidProgramIndicatorExpressions", JsonString.class ); } default JsonList<JsonString> getProgramIndicatorsWithNoExpression() { return getList( "programIndicatorsWithNoExpression", JsonString.class ); } default JsonMap<JsonString> getInvalidProgramIndicatorFilters() { return getMap( "invalidProgramIndicatorFilters", JsonString.class ); } default JsonMultiMap<JsonString> getProgramRulesWithNoCondition() { return getMultiMap( "programRulesWithNoCondition", JsonString.class ); } default JsonMultiMap<JsonString> getProgramRulesWithNoPriority() { return getMultiMap( "programRulesWithNoPriority", JsonString.class ); } default JsonMultiMap<JsonString> getProgramRulesWithNoAction() { return getMultiMap( "programRulesWithNoAction", JsonString.class ); } default JsonMultiMap<JsonString> getProgramRuleVariablesWithNoDataElement() { return getMultiMap( "programRuleVariablesWithNoDataElement", JsonString.class ); } default JsonMultiMap<JsonString> getProgramRuleVariablesWithNoAttribute() { return getMultiMap( "programRuleVariablesWithNoAttribute", JsonString.class ); } default JsonMultiMap<JsonString> getProgramRuleActionsWithNoDataObject() { return getMultiMap( "programRuleActionsWithNoDataObject", JsonString.class ); } default JsonMultiMap<JsonString> getProgramRuleActionsWithNoNotification() { return getMultiMap( "programRuleActionsWithNoNotification", JsonString.class ); } default JsonMultiMap<JsonString> getProgramRuleActionsWithNoSectionId() { return getMultiMap( "programRuleActionsWithNoSectionId", JsonString.class ); } default JsonMultiMap<JsonString> getProgramRuleActionsWithNoStageId() { return getMultiMap( "programRuleActionsWithNoStageId", JsonString.class ); } }
/** * Copyright (C) 2014-2015 LinkedIn Corp. (pinot-core@linkedin.com) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.linkedin.pinot.segments.v1.creator; import com.linkedin.pinot.common.data.DimensionFieldSpec; import com.linkedin.pinot.common.data.FieldSpec; import com.linkedin.pinot.common.data.FieldSpec.DataType; import com.linkedin.pinot.common.data.Schema; import com.linkedin.pinot.common.segment.ReadMode; import com.linkedin.pinot.core.indexsegment.columnar.ColumnarSegmentLoader; import com.linkedin.pinot.core.indexsegment.generator.SegmentGeneratorConfig; import com.linkedin.pinot.core.indexsegment.utils.AvroUtils; import com.linkedin.pinot.core.segment.creator.AbstractColumnStatisticsCollector; import com.linkedin.pinot.core.segment.creator.SegmentIndexCreationDriver; import com.linkedin.pinot.core.segment.creator.impl.SegmentCreationDriverFactory; import com.linkedin.pinot.core.segment.creator.impl.SegmentDictionaryCreator; import com.linkedin.pinot.core.segment.creator.impl.V1Constants; import com.linkedin.pinot.core.segment.creator.impl.stats.DoubleColumnPreIndexStatsCollector; import com.linkedin.pinot.core.segment.creator.impl.stats.FloatColumnPreIndexStatsCollector; import com.linkedin.pinot.core.segment.creator.impl.stats.IntColumnPreIndexStatsCollector; import com.linkedin.pinot.core.segment.creator.impl.stats.LongColumnPreIndexStatsCollector; import com.linkedin.pinot.core.segment.creator.impl.stats.StringColumnPreIndexStatsCollector; import com.linkedin.pinot.core.segment.index.ColumnMetadata; import com.linkedin.pinot.core.segment.index.IndexSegmentImpl; import com.linkedin.pinot.core.segment.index.SegmentMetadataImpl; import com.linkedin.pinot.core.segment.index.readers.DoubleDictionary; import com.linkedin.pinot.core.segment.index.readers.FloatDictionary; import com.linkedin.pinot.core.segment.index.readers.ImmutableDictionaryReader; import com.linkedin.pinot.core.segment.index.readers.IntDictionary; import com.linkedin.pinot.core.segment.index.readers.LongDictionary; import com.linkedin.pinot.core.segment.index.readers.StringDictionary; import com.linkedin.pinot.util.TestUtils; import java.io.File; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.TimeUnit; import org.apache.avro.Schema.Field; import org.apache.avro.file.DataFileStream; import org.apache.avro.generic.GenericRecord; import org.apache.avro.util.Utf8; import org.apache.commons.io.FileUtils; import org.testng.Assert; import org.testng.annotations.AfterClass; import org.testng.annotations.BeforeClass; import org.testng.annotations.Test; public class DictionariesTest { private static final String AVRO_DATA = "data/test_sample_data.avro"; private static File INDEX_DIR = new File(DictionariesTest.class.toString()); static Map<String, Set<Object>> uniqueEntries; @AfterClass public static void cleanup() { FileUtils.deleteQuietly(INDEX_DIR); } @BeforeClass public static void before() throws Exception { final String filePath = TestUtils .getFileFromResourceUrl(DictionariesTest.class.getClassLoader().getResource(AVRO_DATA)); if (INDEX_DIR.exists()) { FileUtils.deleteQuietly(INDEX_DIR); } final SegmentGeneratorConfig config = SegmentTestUtils.getSegmentGenSpecWithSchemAndProjectedColumns(new File(filePath), INDEX_DIR, "time_day", TimeUnit.DAYS, "test"); final SegmentIndexCreationDriver driver = SegmentCreationDriverFactory.get(null); driver.init(config); driver.build(); final Schema schema = AvroUtils.extractSchemaFromAvro(new File(filePath)); final DataFileStream<GenericRecord> avroReader = AvroUtils.getAvroReader(new File(filePath)); final org.apache.avro.Schema avroSchema = avroReader.getSchema(); final String[] columns = new String[avroSchema.getFields().size()]; int i = 0; for (final Field f : avroSchema.getFields()) { columns[i] = f.name(); i++; } uniqueEntries = new HashMap<String, Set<Object>>(); for (final String column : columns) { uniqueEntries.put(column, new HashSet<Object>()); } while (avroReader.hasNext()) { final GenericRecord rec = avroReader.next(); for (final String column : columns) { Object val = rec.get(column); if (val instanceof Utf8) { val = ((Utf8) val).toString(); } uniqueEntries.get(column).add(getAppropriateType(schema.getFieldSpecFor(column).getDataType(), val)); } } } private static Object getAppropriateType(DataType spec, Object val) { if (val == null) { switch (spec) { case DOUBLE: return V1Constants.Numbers.NULL_DOUBLE; case FLOAT: return V1Constants.Numbers.NULL_FLOAT; case INT: return V1Constants.Numbers.NULL_INT; case LONG: return V1Constants.Numbers.NULL_LONG; default: return V1Constants.Str.NULL_STRING; } } return val; } @Test public void test1() throws Exception { final IndexSegmentImpl heapSegment = (IndexSegmentImpl) ColumnarSegmentLoader.load(INDEX_DIR, ReadMode.heap); final IndexSegmentImpl mmapSegment = (IndexSegmentImpl) ColumnarSegmentLoader.load(INDEX_DIR, ReadMode.mmap); for (final String column : ((SegmentMetadataImpl) mmapSegment.getSegmentMetadata()).getColumnMetadataMap().keySet()) { final ImmutableDictionaryReader heapDictionary = heapSegment.getDictionaryFor(column); final ImmutableDictionaryReader mmapDictionary = mmapSegment.getDictionaryFor(column); switch (((SegmentMetadataImpl) mmapSegment.getSegmentMetadata()).getColumnMetadataMap().get(column).getDataType()) { case BOOLEAN: case STRING: Assert.assertEquals(true, heapDictionary instanceof StringDictionary); Assert.assertEquals(true, mmapDictionary instanceof StringDictionary); break; case DOUBLE: Assert.assertEquals(true, heapDictionary instanceof DoubleDictionary); Assert.assertEquals(true, mmapDictionary instanceof DoubleDictionary); break; case FLOAT: Assert.assertEquals(true, heapDictionary instanceof FloatDictionary); Assert.assertEquals(true, mmapDictionary instanceof FloatDictionary); break; case LONG: Assert.assertEquals(true, heapDictionary instanceof LongDictionary); Assert.assertEquals(true, mmapDictionary instanceof LongDictionary); break; case INT: Assert.assertEquals(true, heapDictionary instanceof IntDictionary); Assert.assertEquals(true, mmapDictionary instanceof IntDictionary); break; } Assert.assertEquals(mmapDictionary.length(), heapDictionary.length()); for (int i = 0; i < heapDictionary.length(); i++) { Assert.assertEquals(mmapDictionary.get(i), heapDictionary.get(i)); } } } @Test public void test2() throws Exception { final IndexSegmentImpl heapSegment = (IndexSegmentImpl) ColumnarSegmentLoader.load(INDEX_DIR, ReadMode.heap); final IndexSegmentImpl mmapSegment = (IndexSegmentImpl) ColumnarSegmentLoader.load(INDEX_DIR, ReadMode.mmap); final Map<String, ColumnMetadata> metadataMap = ((SegmentMetadataImpl) mmapSegment.getSegmentMetadata()).getColumnMetadataMap(); for (final String column : metadataMap.keySet()) { final ImmutableDictionaryReader heapDictionary = heapSegment.getDictionaryFor(column); final ImmutableDictionaryReader mmapDictionary = mmapSegment.getDictionaryFor(column); final Set<Object> uniques = uniqueEntries.get(column); final List<Object> list = Arrays.asList(uniques.toArray()); Collections.shuffle(list); for (final Object entry : list) { Assert.assertEquals(mmapDictionary.indexOf(entry), heapDictionary.indexOf(entry)); if (!column.equals("pageKey")) { Assert.assertEquals(false, heapDictionary.indexOf(entry) < 0); Assert.assertEquals(false, mmapDictionary.indexOf(entry) < 0); } } } } @Test public void testIntColumnPreIndexStatsCollector() throws Exception { FieldSpec spec = new DimensionFieldSpec("column1", DataType.INT, true); AbstractColumnStatisticsCollector statsCollector = new IntColumnPreIndexStatsCollector(spec); statsCollector.collect(new Integer(1)); Assert.assertTrue(statsCollector.isSorted()); statsCollector.collect(new Float(2)); Assert.assertTrue(statsCollector.isSorted()); statsCollector.collect(new Long(3)); Assert.assertTrue(statsCollector.isSorted()); statsCollector.collect(new Double(4)); Assert.assertTrue(statsCollector.isSorted()); statsCollector.collect(new Integer(4)); Assert.assertTrue(statsCollector.isSorted()); statsCollector.collect(new Float(2)); Assert.assertFalse(statsCollector.isSorted()); statsCollector.collect(new Double(40)); Assert.assertFalse(statsCollector.isSorted()); statsCollector.collect(new Double(20)); Assert.assertFalse(statsCollector.isSorted()); statsCollector.seal(); Assert.assertEquals(statsCollector.getCardinality(), 6); Assert.assertEquals(((Number) statsCollector.getMinValue()).intValue(), 1); Assert.assertEquals(((Number) statsCollector.getMaxValue()).intValue(), 40); Assert.assertFalse(statsCollector.isSorted()); } @Test public void testFloatColumnPreIndexStatsCollector() throws Exception { FieldSpec spec = new DimensionFieldSpec("column1", DataType.FLOAT, true); AbstractColumnStatisticsCollector statsCollector = new FloatColumnPreIndexStatsCollector(spec); statsCollector.collect(new Integer(1)); Assert.assertTrue(statsCollector.isSorted()); statsCollector.collect(new Float(2)); Assert.assertTrue(statsCollector.isSorted()); statsCollector.collect(new Long(3)); Assert.assertTrue(statsCollector.isSorted()); statsCollector.collect(new Double(4)); Assert.assertTrue(statsCollector.isSorted()); statsCollector.collect(new Integer(4)); Assert.assertTrue(statsCollector.isSorted()); statsCollector.collect(new Float(2)); Assert.assertFalse(statsCollector.isSorted()); statsCollector.collect(new Double(40)); Assert.assertFalse(statsCollector.isSorted()); statsCollector.collect(new Double(20)); Assert.assertFalse(statsCollector.isSorted()); statsCollector.seal(); Assert.assertEquals(statsCollector.getCardinality(), 6); Assert.assertEquals(((Number) statsCollector.getMinValue()).intValue(), 1); Assert.assertEquals(((Number) statsCollector.getMaxValue()).intValue(), 40); Assert.assertFalse(statsCollector.isSorted()); } @Test public void testLongColumnPreIndexStatsCollector() throws Exception { FieldSpec spec = new DimensionFieldSpec("column1", DataType.LONG, true); AbstractColumnStatisticsCollector statsCollector = new LongColumnPreIndexStatsCollector(spec); statsCollector.collect(new Integer(1)); Assert.assertTrue(statsCollector.isSorted()); statsCollector.collect(new Float(2)); Assert.assertTrue(statsCollector.isSorted()); statsCollector.collect(new Long(3)); Assert.assertTrue(statsCollector.isSorted()); statsCollector.collect(new Double(4)); Assert.assertTrue(statsCollector.isSorted()); statsCollector.collect(new Integer(4)); Assert.assertTrue(statsCollector.isSorted()); statsCollector.collect(new Float(2)); Assert.assertFalse(statsCollector.isSorted()); statsCollector.collect(new Double(40)); Assert.assertFalse(statsCollector.isSorted()); statsCollector.collect(new Double(20)); Assert.assertFalse(statsCollector.isSorted()); statsCollector.seal(); Assert.assertEquals(statsCollector.getCardinality(), 6); Assert.assertEquals(((Number) statsCollector.getMinValue()).intValue(), 1); Assert.assertEquals(((Number) statsCollector.getMaxValue()).intValue(), 40); Assert.assertFalse(statsCollector.isSorted()); } @Test public void testDoubleColumnPreIndexStatsCollector() throws Exception { FieldSpec spec = new DimensionFieldSpec("column1", DataType.DOUBLE, true); AbstractColumnStatisticsCollector statsCollector = new DoubleColumnPreIndexStatsCollector(spec); statsCollector.collect(new Integer(1)); Assert.assertTrue(statsCollector.isSorted()); statsCollector.collect(new Float(2)); Assert.assertTrue(statsCollector.isSorted()); statsCollector.collect(new Long(3)); Assert.assertTrue(statsCollector.isSorted()); statsCollector.collect(new Double(4)); Assert.assertTrue(statsCollector.isSorted()); statsCollector.collect(new Integer(4)); Assert.assertTrue(statsCollector.isSorted()); statsCollector.collect(new Float(2)); Assert.assertFalse(statsCollector.isSorted()); statsCollector.collect(new Double(40)); Assert.assertFalse(statsCollector.isSorted()); statsCollector.collect(new Double(20)); Assert.assertFalse(statsCollector.isSorted()); statsCollector.seal(); Assert.assertEquals(statsCollector.getCardinality(), 6); Assert.assertEquals(((Number) statsCollector.getMinValue()).intValue(), 1); Assert.assertEquals(((Number) statsCollector.getMaxValue()).intValue(), 40); Assert.assertFalse(statsCollector.isSorted()); } @Test public void testStringColumnPreIndexStatsCollectorForRandomString() throws Exception { FieldSpec spec = new DimensionFieldSpec("column1", DataType.STRING, true); AbstractColumnStatisticsCollector statsCollector = new StringColumnPreIndexStatsCollector(spec); statsCollector.collect("a"); Assert.assertTrue(statsCollector.isSorted()); statsCollector.collect("b"); Assert.assertTrue(statsCollector.isSorted()); statsCollector.collect("c"); Assert.assertTrue(statsCollector.isSorted()); statsCollector.collect("d"); Assert.assertTrue(statsCollector.isSorted()); statsCollector.collect("d"); Assert.assertTrue(statsCollector.isSorted()); statsCollector.collect("b"); Assert.assertFalse(statsCollector.isSorted()); statsCollector.collect("z"); Assert.assertFalse(statsCollector.isSorted()); statsCollector.collect("u"); Assert.assertFalse(statsCollector.isSorted()); statsCollector.seal(); Assert.assertEquals(statsCollector.getCardinality(), 6); Assert.assertEquals((statsCollector.getMinValue()).toString(), "a"); Assert.assertEquals((statsCollector.getMaxValue()).toString(), "z"); Assert.assertFalse(statsCollector.isSorted()); } @Test public void testStringColumnPreIndexStatsCollectorForBoolean() throws Exception { FieldSpec spec = new DimensionFieldSpec("column1", DataType.BOOLEAN, true); AbstractColumnStatisticsCollector statsCollector = new StringColumnPreIndexStatsCollector(spec); statsCollector.collect("false"); Assert.assertTrue(statsCollector.isSorted()); statsCollector.collect("false"); Assert.assertTrue(statsCollector.isSorted()); statsCollector.collect("false"); Assert.assertTrue(statsCollector.isSorted()); statsCollector.collect("true"); Assert.assertTrue(statsCollector.isSorted()); statsCollector.collect("true"); Assert.assertTrue(statsCollector.isSorted()); statsCollector.collect("false"); Assert.assertFalse(statsCollector.isSorted()); statsCollector.collect("false"); Assert.assertFalse(statsCollector.isSorted()); statsCollector.collect("true"); Assert.assertFalse(statsCollector.isSorted()); statsCollector.seal(); Assert.assertEquals(statsCollector.getCardinality(), 2); Assert.assertEquals((statsCollector.getMinValue()).toString(), "false"); Assert.assertEquals((statsCollector.getMaxValue()).toString(), "true"); Assert.assertFalse(statsCollector.isSorted()); } /** * Tests DictionaryCreator for case when one value is a substring of another. * For example, in case of sorted values {"abc", "abc def"} after padding, * the sorted order would change to {"abc def%%%%", "abc%%%%%%%"} * * This test asserts that DictionaryCreator.indexOfSV("abc") returns 1 (ie index of "abc%%%%%%%" * in actual padded dictionary), and not 0. * * @throws Exception */ @Test public void testStringsValuesWithPadding() throws Exception { File indexDir = new File("/tmp/dict.test"); FieldSpec fieldSpec = new DimensionFieldSpec("test", DataType.STRING, true, "\t"); String[] inputStrings = new String[2]; String[] paddedStrings = new String[2]; inputStrings[0] = "abc def"; inputStrings[1] = "abc"; Arrays.sort(inputStrings); // Sorted order: {"abc", "abc def"} boolean[] isSorted = new boolean[1]; isSorted[0] = true; SegmentDictionaryCreator dictionaryCreator = new SegmentDictionaryCreator(false, inputStrings, fieldSpec, indexDir); dictionaryCreator.build(isSorted); Assert.assertFalse(isSorted[0]); // Get the padded string as stored in the dictionary. int targetPaddedLength = dictionaryCreator.getStringColumnMaxLength(); for (int i = 0; i < inputStrings.length; i++) { paddedStrings[i] = SegmentDictionaryCreator.getPaddedString(inputStrings[i], targetPaddedLength); } Arrays.sort(paddedStrings); // Sorted Order: {"abc def%%%%", "abc%%%%%%%"} // Assert that indexOfSV for un-padded string returns the index of the corresponding padded string. for (int i = 0; i < inputStrings.length; i++) { int paddedIndex = dictionaryCreator.indexOfSV(inputStrings[i]); Assert.assertTrue(paddedStrings[paddedIndex] .equals(SegmentDictionaryCreator.getPaddedString(inputStrings[i], targetPaddedLength))); } dictionaryCreator.close(); FileUtils.deleteQuietly(indexDir); } /** * Tests SegmentDictionaryCreator for case when there is only one string * and it is empty * * This test asserts that the padded length of the empty string is 1 * in actual padded dictionary), and not 0. * * @throws Exception */ @Test public void testSingleEmptyString() throws Exception { File indexDir = new File("/tmp/dict.test"); FieldSpec fieldSpec = new DimensionFieldSpec("test", DataType.STRING, true, "\t"); String[] inputStrings = new String[1]; String[] paddedStrings = new String[1]; try { inputStrings[0] = ""; Arrays.sort(inputStrings); // Sorted order: {""} boolean[] isSorted = new boolean[1]; isSorted[0] = true; SegmentDictionaryCreator dictionaryCreator = new SegmentDictionaryCreator(false, inputStrings, fieldSpec, indexDir); dictionaryCreator.build(isSorted); // Get the padded string as stored in the dictionary. int targetPaddedLength = dictionaryCreator.getStringColumnMaxLength(); Assert.assertTrue(targetPaddedLength == 1); for (int i = 0; i < inputStrings.length; i++) { paddedStrings[i] = SegmentDictionaryCreator.getPaddedString(inputStrings[i], targetPaddedLength); } Arrays.sort(paddedStrings); // Sorted Order: {"%"} // Assert that indexOfSV for un-padded string returns the index of the corresponding padded string. for (int i = 0; i < inputStrings.length; i++) { int paddedIndex = dictionaryCreator.indexOfSV(inputStrings[i]); Assert.assertTrue(paddedStrings[paddedIndex].equals( SegmentDictionaryCreator.getPaddedString(inputStrings[i], targetPaddedLength))); } // Verify that empty string got padded Assert.assertTrue(paddedStrings[0].equals("%")); dictionaryCreator.close(); } catch (Exception e) { throw e; } finally { FileUtils.deleteQuietly(indexDir); } } /** * Tests SegmentDictionaryCreator for case when there is one empty string * and a string with a single padding character * * This test asserts that the padded length of the empty string is 1 * in actual padded dictionary), and not 0. * * @throws Exception */ @Test public void testPaddedConflict() throws Exception { File indexDir = new File("/tmp/dict.test"); FieldSpec fieldSpec = new DimensionFieldSpec("test", DataType.STRING, true, "\t"); String[] inputStrings = new String[2]; String[] paddedStrings = new String[2]; try { inputStrings[0] = ""; inputStrings[1] = "%"; Arrays.sort(inputStrings); // Sorted order: {"", "%"} SegmentDictionaryCreator dictionaryCreator = new SegmentDictionaryCreator(false, inputStrings, fieldSpec, indexDir); boolean[] isSorted = new boolean[1]; isSorted[0] = true; dictionaryCreator.build(isSorted); } catch (Exception e) { Assert.assertEquals(e.getMessage(), "Number of entries in dictionary != number of unique values in the data in column test"); } finally { FileUtils.deleteQuietly(indexDir); } } /** * Tests SegmentDictionaryCreator for case when there is only one string * and it is "null" * * This test asserts that the padded length of the null string is 4 * * @throws Exception */ @Test public void testSingleNullString() throws Exception { File indexDir = new File("/tmp/dict.test"); FieldSpec fieldSpec = new DimensionFieldSpec("test", DataType.STRING, true, "\t"); String[] inputStrings = new String[1]; String[] paddedStrings = new String[1]; inputStrings[0] = "null"; Arrays.sort(inputStrings); // Sorted order: {"null"} try { SegmentDictionaryCreator dictionaryCreator = new SegmentDictionaryCreator(false, inputStrings, fieldSpec, indexDir); boolean[] isSorted = new boolean[1]; isSorted[0] = true; dictionaryCreator.build(isSorted); // Get the padded string as stored in the dictionary. int targetPaddedLength = dictionaryCreator.getStringColumnMaxLength(); Assert.assertTrue(targetPaddedLength == 4); for (int i = 0; i < inputStrings.length; i++) { paddedStrings[i] = SegmentDictionaryCreator.getPaddedString(inputStrings[i], targetPaddedLength); } Arrays.sort(paddedStrings); // Sorted Order: {"null"} // Assert that indexOfSV for un-padded string returns the index of the corresponding padded string. for (int i = 0; i < inputStrings.length; i++) { int paddedIndex = dictionaryCreator.indexOfSV(inputStrings[i]); Assert.assertTrue(paddedStrings[paddedIndex].equals( SegmentDictionaryCreator.getPaddedString(inputStrings[i], targetPaddedLength))); } // Verify that the string "null" did not get changed Assert.assertTrue(paddedStrings[0].equals("null")); dictionaryCreator.close(); } catch (Exception e) { throw e; } finally { FileUtils.deleteQuietly(indexDir); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.drill.exec.planner; import java.util.ArrayList; import java.util.Arrays; import java.util.BitSet; import java.util.Collection; import java.util.HashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import org.apache.drill.common.util.GuavaUtils; import org.apache.drill.shaded.guava.com.google.common.base.Charsets; import org.apache.drill.shaded.guava.com.google.common.base.Preconditions; import org.apache.drill.shaded.guava.com.google.common.collect.ImmutableList; import org.apache.drill.shaded.guava.com.google.common.collect.Lists; import org.apache.drill.shaded.guava.com.google.common.collect.Maps; import org.apache.calcite.adapter.enumerable.EnumerableTableScan; import org.apache.calcite.prepare.RelOptTableImpl; import org.apache.calcite.rel.core.TableScan; import org.apache.calcite.util.BitSets; import org.apache.calcite.util.Pair; import org.apache.drill.common.expression.SchemaPath; import org.apache.drill.common.types.TypeProtos; import org.apache.drill.common.types.Types; import org.apache.drill.exec.physical.base.FileGroupScan; import org.apache.drill.exec.planner.logical.DirPrunedEnumerableTableScan; import org.apache.drill.exec.planner.logical.DrillRel; import org.apache.drill.exec.planner.logical.DrillScanRel; import org.apache.drill.exec.planner.logical.DrillTable; import org.apache.drill.exec.planner.logical.DrillTranslatableTable; import org.apache.drill.exec.planner.logical.DynamicDrillTable; import org.apache.drill.exec.planner.physical.PlannerSettings; import org.apache.drill.exec.store.dfs.FileSelection; import org.apache.drill.exec.store.dfs.FormatSelection; import org.apache.drill.exec.store.dfs.MetadataContext; import org.apache.drill.exec.vector.NullableVarCharVector; import org.apache.drill.exec.vector.ValueVector; // partition descriptor for file system based tables public class FileSystemPartitionDescriptor extends AbstractPartitionDescriptor { static final int MAX_NESTED_SUBDIRS = 10; // allow up to 10 nested sub-directories private final String partitionLabel; private final int partitionLabelLength; private final Map<String, Integer> partitions = Maps.newHashMap(); private final TableScan scanRel; private final DrillTable table; public FileSystemPartitionDescriptor(PlannerSettings settings, TableScan scanRel) { Preconditions.checkArgument(scanRel instanceof DrillScanRel || scanRel instanceof EnumerableTableScan); this.partitionLabel = settings.getFsPartitionColumnLabel(); this.partitionLabelLength = partitionLabel.length(); this.scanRel = scanRel; DrillTable unwrap; unwrap = scanRel.getTable().unwrap(DrillTable.class); if (unwrap == null) { unwrap = scanRel.getTable().unwrap(DrillTranslatableTable.class).getDrillTable(); } table = unwrap; for(int i =0; i < 10; i++){ partitions.put(partitionLabel + i, i); } } @Override public int getPartitionHierarchyIndex(String partitionName) { String suffix = partitionName.substring(partitionLabelLength); // get the numeric suffix from 'dir<N>' return Integer.parseInt(suffix); } @Override public boolean isPartitionName(String name) { return partitions.containsKey(name); } @Override public Integer getIdIfValid(String name) { return partitions.get(name); } @Override public int getMaxHierarchyLevel() { return MAX_NESTED_SUBDIRS; } public DrillTable getTable() { return table; } @Override public void populatePartitionVectors(ValueVector[] vectors, List<PartitionLocation> partitions, BitSet partitionColumnBitSet, Map<Integer, String> fieldNameMap) { int record = 0; for (PartitionLocation partitionLocation: partitions) { for (int partitionColumnIndex : BitSets.toIter(partitionColumnBitSet)) { if (partitionLocation.getPartitionValue(partitionColumnIndex) == null) { // set null if dirX does not exist for the location. ((NullableVarCharVector) vectors[partitionColumnIndex]).getMutator().setNull(record); } else { byte[] bytes = (partitionLocation.getPartitionValue(partitionColumnIndex)).getBytes(Charsets.UTF_8); ((NullableVarCharVector) vectors[partitionColumnIndex]).getMutator().setSafe(record, bytes, 0, bytes.length); } } record++; } for (ValueVector v : vectors) { if (v == null) { continue; } v.getMutator().setValueCount(partitions.size()); } } @Override public TypeProtos.MajorType getVectorType(SchemaPath column, PlannerSettings plannerSettings) { return Types.optional(TypeProtos.MinorType.VARCHAR); } public String getName(int index) { return partitionLabel + index; } @Override public String getBaseTableLocation() { final FormatSelection origSelection = (FormatSelection) table.getSelection(); return origSelection.getSelection().selectionRoot; } @Override protected void createPartitionSublists() { final Pair<Collection<String>, Boolean> fileLocationsAndStatus = getFileLocationsAndStatus(); List<PartitionLocation> locations = new LinkedList<>(); boolean hasDirsOnly = fileLocationsAndStatus.right; final String selectionRoot = getBaseTableLocation(); // map used to map the partition keys (dir0, dir1, ..), to the list of partitions that share the same partition keys. // For example, // 1990/Q1/1.parquet, 2.parquet // would have <1990, Q1> as key, and value as list of partition location for 1.parquet and 2.parquet. HashMap<List<String>, List<PartitionLocation>> dirToFileMap = new HashMap<>(); // Figure out the list of leaf subdirectories. For each leaf subdirectory, find the list of files (DFSFilePartitionLocation) // it contains. for (String file: fileLocationsAndStatus.left) { DFSFilePartitionLocation dfsFilePartitionLocation = new DFSFilePartitionLocation(MAX_NESTED_SUBDIRS, selectionRoot, file, hasDirsOnly); final String[] dirs = dfsFilePartitionLocation.getDirs(); final List<String> dirList = Arrays.asList(dirs); if (!dirToFileMap.containsKey(dirList)) { dirToFileMap.put(dirList, new ArrayList<PartitionLocation>()); } dirToFileMap.get(dirList).add(dfsFilePartitionLocation); } // build a list of DFSDirPartitionLocation. for (final List<String> dirs : dirToFileMap.keySet()) { locations.add( new DFSDirPartitionLocation((String [])dirs.toArray(), dirToFileMap.get(dirs))); } locationSuperList = Lists.partition(locations, PartitionDescriptor.PARTITION_BATCH_SIZE); sublistsCreated = true; } protected Pair<Collection<String>, Boolean> getFileLocationsAndStatus() { Collection<String> fileLocations = null; Pair<Collection<String>, Boolean> fileLocationsAndStatus = null; boolean isExpandedPartial = false; if (scanRel instanceof DrillScanRel) { // If a particular GroupScan provides files, get the list of files from there rather than // DrillTable because GroupScan would have the updated version of the selection final DrillScanRel drillScan = (DrillScanRel) scanRel; if (drillScan.getGroupScan().hasFiles()) { fileLocations = drillScan.getGroupScan().getFiles(); isExpandedPartial = false; } else { FileSelection selection = ((FormatSelection) table.getSelection()).getSelection(); fileLocations = selection.getFiles(); isExpandedPartial = selection.isExpandedPartial(); } } else if (scanRel instanceof EnumerableTableScan) { FileSelection selection = ((FormatSelection) table.getSelection()).getSelection(); fileLocations = selection.getFiles(); isExpandedPartial = selection.isExpandedPartial(); } fileLocationsAndStatus = Pair.of(fileLocations, isExpandedPartial); return fileLocationsAndStatus; } @Override public TableScan createTableScan(List<PartitionLocation> newPartitionLocation, String cacheFileRoot, boolean wasAllPartitionsPruned, MetadataContext metaContext) throws Exception { List<String> newFiles = Lists.newArrayList(); for (final PartitionLocation location : newPartitionLocation) { if (!location.isCompositePartition()) { newFiles.add(location.getEntirePartitionLocation()); } else { final Collection<SimplePartitionLocation> subPartitions = location.getPartitionLocationRecursive(); for (final PartitionLocation subPart : subPartitions) { newFiles.add(subPart.getEntirePartitionLocation()); } } } if (scanRel instanceof DrillScanRel) { final FormatSelection formatSelection = (FormatSelection)table.getSelection(); final FileSelection newFileSelection = new FileSelection(null, newFiles, getBaseTableLocation(), cacheFileRoot, wasAllPartitionsPruned, formatSelection.getSelection().getDirStatus()); newFileSelection.setMetaContext(metaContext); final FileGroupScan newGroupScan = ((FileGroupScan)((DrillScanRel)scanRel).getGroupScan()).clone(newFileSelection); return new DrillScanRel(scanRel.getCluster(), scanRel.getTraitSet().plus(DrillRel.DRILL_LOGICAL), scanRel.getTable(), newGroupScan, scanRel.getRowType(), ((DrillScanRel) scanRel).getColumns(), true /*filter pushdown*/); } else if (scanRel instanceof EnumerableTableScan) { return createNewTableScanFromSelection((EnumerableTableScan)scanRel, newFiles, cacheFileRoot, wasAllPartitionsPruned, metaContext); } else { throw new UnsupportedOperationException("Only DrillScanRel and EnumerableTableScan is allowed!"); } } private TableScan createNewTableScanFromSelection(EnumerableTableScan oldScan, List<String> newFiles, String cacheFileRoot, boolean wasAllPartitionsPruned, MetadataContext metaContext) { final RelOptTableImpl t = (RelOptTableImpl) oldScan.getTable(); final FormatSelection formatSelection = (FormatSelection) table.getSelection(); final FileSelection newFileSelection = new FileSelection(null, newFiles, getBaseTableLocation(), cacheFileRoot, wasAllPartitionsPruned, formatSelection.getSelection().getDirStatus()); newFileSelection.setMetaContext(metaContext); final FormatSelection newFormatSelection = new FormatSelection(formatSelection.getFormat(), newFileSelection); final DrillTranslatableTable newTable = new DrillTranslatableTable( new DynamicDrillTable(table.getPlugin(), table.getStorageEngineName(), table.getUserName(), newFormatSelection)); final RelOptTableImpl newOptTableImpl = RelOptTableImpl.create(t.getRelOptSchema(), t.getRowType(), newTable, GuavaUtils.convertToUnshadedImmutableList(ImmutableList.of())); // return an EnumerableTableScan with fileSelection being part of digest of TableScan node. return DirPrunedEnumerableTableScan.create(oldScan.getCluster(), newOptTableImpl, newFileSelection.toString()); } @Override public TableScan createTableScan(List<PartitionLocation> newPartitionLocation, boolean wasAllPartitionsPruned) throws Exception { return createTableScan(newPartitionLocation, null, wasAllPartitionsPruned, null); } @Override public boolean supportsMetadataCachePruning() { final Object selection = this.table.getSelection(); if (selection instanceof FormatSelection && ((FormatSelection)selection).getSelection().getCacheFileRoot() != null) { return true; } return false; } }
package de.fhg.fokus.ims.core; import gov.nist.core.CommonLogger; import gov.nist.core.StackLogger; import java.net.InetAddress; import java.util.Timer; import javax.ims.ConnectionState; import javax.ims.ImsException; import javax.ims.ServiceClosedException; import javax.ims.core.Capabilities; import javax.ims.core.CoreServiceListener; import javax.ims.core.Message; import javax.ims.core.PageMessage; import javax.ims.core.Publication; import javax.ims.core.Reference; import javax.ims.core.Session; import javax.ims.core.Subscription; import javax.sip.address.SipURI; import javax.sip.header.CallIdHeader; import javax.sip.header.EventHeader; import javax.sip.header.FromHeader; import javax.sip.message.Request; import javax.sip.message.Response; import org.vertx.java.core.logging.Logger; import org.vertx.java.core.logging.impl.LoggerFactory; import de.fhg.fokus.ims.ServiceImpl; import de.fhg.fokus.ims.core.media.BasicReliableMediaImpl; import de.fhg.fokus.ims.core.media.BasicUnreliableMediaImpl; import de.fhg.fokus.ims.core.media.FramedMediaImpl; import de.fhg.fokus.ims.core.media.MediaImpl; import de.fhg.fokus.ims.core.utils.SIPUtils; /** * Implementation of the CoreService Interface. * * @version JSR281-PUBLIC-REVIEW (subject to change). * * @author Cheambe Alice <alice.cheambe@fraunhofer.fokus.de> * @author Andreas Bachmann (andreas.bachmann@fokus.fraunhofer.de) */ public class CoreServiceImpl extends ServiceImpl implements CoreService2, IMSManagerListener { private static Logger LOGGER = LoggerFactory.getLogger(CoreServiceImpl.class); /** * Reference to the manager, responsible for this service */ private IMSManager manager; /** * The listener assigned to this service */ private CoreServiceListener listener; private String localUserId; private String id; /* Service session containers */ private SessionContainer sessionContainer = new SessionContainer(); private PageMessageContainer messageContainer = new PageMessageContainer(); private SubscriptionContainer subscriptionsContainer = new SubscriptionContainer(); private PublicationContainer publicationContainer = new PublicationContainer(); private CapabilityContainer capabilityContainer = new CapabilityContainer(); private StreamMediaFactoryBase streamMediaFactory; private boolean closed = false; private ReferenceContainer referenceContainer = new ReferenceContainer(); private FeatureTagSet featureSet; private boolean defaultCoreService = false; public boolean isDefaultCoreService() { return defaultCoreService; } public void setDefaultCoreService(boolean defaultCoreService) { this.defaultCoreService = defaultCoreService; } /** * Gets a timer instance, that can be used for scheduling the * refreshing/retry/terminating tasks. */ public Timer getTimer() { return manager.getTimer(); } public CoreServiceImpl(IMSManager manager, String coreServiceId, String appId, String userId) { super(appId, "imscore"); this.manager = manager; this.localUserId = userId; this.id = coreServiceId; } public String getId() { return id; } public FeatureTagSet getFeatureSet() { return featureSet; } public void setFeatureTagSet(FeatureTagSet featureSet) { this.featureSet = featureSet; } public IMSManager getManager() { return manager; } public boolean isConnected() { return ConnectionState.getConnectionState().isConnected(); } /** * Computes the public IP address of the device * * Mechanism; opens a socket to the pCSCF and if that fails, it tries to * open the socket to the xdms In case that fails, then it returns the local * host */ public InetAddress getLocalEndpoint() { return manager.getLocalAddress(); } public String getServiceRoute() { StringBuffer buffer = new StringBuffer(); // TODO: get service route from manager return buffer.toString(); } public ReferenceContainer getReferences() { return referenceContainer; } public SubscriptionContainer getSubscriptions() { return subscriptionsContainer; } public void close() { if (closed) return; try { sessionContainer.close(); sessionContainer = null; publicationContainer.close(); publicationContainer = null; Unsubscriber unsubscriber = new Unsubscriber(subscriptionsContainer); unsubscriber.run(); } catch (Exception e) { LOGGER.error(e.getMessage(), e); } finally { closed = true; manager.closed(this); if (listener != null) listener.serviceClosed(this); } } /***************** core service implementation ********************** */ public Capabilities createCapabilities(String fromUserId, String toUserId) throws IllegalArgumentException, ServiceClosedException, ImsException { if (closed) throw new ServiceClosedException("Service is closed"); try { CapabilitiesImpl cap = new CapabilitiesImpl(this, capabilityContainer, toUserId, fromUserId); return cap; } catch (ImsException e) { LOGGER.error("there has been an error in creating the capability object"); LOGGER.error(e.getMessage(), e); throw e; } } public PageMessage createPageMessage(String fromUserId, String toUserId) throws ServiceClosedException { if (closed) throw new ServiceClosedException("Service is closed"); if (!ConnectionState.getConnectionState().isConnected()) throw new IllegalStateException("The core is not connected to the IMS network"); try { return new PageMessageImpl(this, messageContainer, toUserId, fromUserId); } catch (ImsException e) { LOGGER.error(e.getMessage(), e); return null; } } public Publication createPublication(String fromUserId, String toUserId, String event) throws IllegalArgumentException, ServiceClosedException, ImsException { if (closed) throw new ServiceClosedException("Service is closed"); Publication pub = (Publication) publicationContainer.get(event); if (pub == null) { try { pub = new PublicationImpl(this, publicationContainer, fromUserId, toUserId, event); return pub; } catch (ImsException e) { LOGGER.error("there has been an error in creating the publication object"); LOGGER.error(e.getMessage(), e); throw e; } } else return pub; } public Reference createReference(String fromUserId, String toUserId, String referToUserId, String referMethod) throws ServiceClosedException { if (closed) throw new ServiceClosedException("Service is closed"); try { return new ReferenceImpl(this, referenceContainer, toUserId, referMethod, referToUserId, fromUserId); } catch (ImsException e) { LOGGER.error(e.getMessage(), e); return null; } } public Session createSession(String fromUserId, String toUserId) throws ServiceClosedException, ImsException { if (closed) throw new ServiceClosedException("Service is closed"); if (!ConnectionState.getConnectionState().isConnected()) throw new IllegalStateException("The core is not connected to the IMS network"); if (toUserId == null) throw new IllegalArgumentException("CoreServiceImpl.createSession(): The remote uri cannot be null"); return new SessionImpl(this, sessionContainer, toUserId, fromUserId); } public Subscription createSubscription(String fromUserId, String toUserId, String event) throws ServiceClosedException, ImsException { if (closed) throw new ServiceClosedException("Service is closed"); if (!ConnectionState.getConnectionState().isConnected()) throw new IllegalStateException("The core is not connected to the IMS network"); if (event == null) throw new IllegalArgumentException("Event must not be null"); //XXX this prevented creating more than one subscription per user+event // SubscriptionImpl subscription = subscriptionsContainer.get(toUserId, event); // if (subscription != null) // return subscription; LOGGER.info("Creating subscription for: {}" + toUserId); SubscriptionImpl subscription = new SubscriptionImpl(this, subscriptionsContainer, fromUserId, toUserId, event); return subscription; } public String getLocalUserId() throws IllegalStateException { return localUserId; } public void setListener(CoreServiceListener listener) { if (listener == null) this.listener = null; else this.listener = listener; } public CoreServiceListener getListener() { return listener; } public void setStreamMediaFactory(StreamMediaFactoryBase streamMediaFactory) { this.streamMediaFactory = streamMediaFactory; } public void request(Request request) { String method = request.getMethod(); LOGGER.info("CoreServiceImpl: Incoming Request\r\n{"+request+"}"); LOGGER.info("> Incoming request: {"+method+"} {"+request.getRequestURI()+"} {"+ request.getHeader("From")+"}"); if (method.equalsIgnoreCase(Request.MESSAGE)) { handleIncomingMessage(request); } else if (method.equalsIgnoreCase(Request.NOTIFY)) { handleIncomingNotify(request); } else if (method.equalsIgnoreCase(Request.INVITE) || method.equalsIgnoreCase(Request.ACK) || method.equalsIgnoreCase(Request.BYE) || method.equalsIgnoreCase(Request.CANCEL) || method.equalsIgnoreCase(Request.PRACK)) { handleIncomingSessionResquest(request); } else if (method.equals(Request.REFER)) { handleIncomingReferRequest(request); } else if (Request.OPTIONS.equals(method)) { handleIncomingOptionsRequest(request); } else if (method.equalsIgnoreCase(Request.INFO)) { LOGGER.info(method + " requests are not handled by ims client core."); handleIncomingInfoRequest(request); } else if (method.equals(Request.SUBSCRIBE)) { handleIncomingSubscribeRequest(request); } else { LOGGER.info(method + " requests are not handled by ims client core."); sendErrorResponse(request, Response.NOT_IMPLEMENTED); } } public void provisionalResponse(Response response) { Request request = manager.getRequest(response); if (request == null) { LOGGER.info("Received response but not request exists!"); return; } String method = request.getMethod(); if (method.equalsIgnoreCase(Request.INVITE) || method.equalsIgnoreCase(Request.ACK) || method.equalsIgnoreCase(Request.PRACK) || method.equalsIgnoreCase(Request.BYE)) { sessionContainer.dispatch(response, request); } } public void finalResponse(Response response) { Request request = manager.getRequest(response); if (request == null) { LOGGER.info("Received response but not request exists!"); return; } String method = request.getMethod(); if (method.equalsIgnoreCase(Request.MESSAGE)) { messageContainer.dispatch(response, request); } else if (method.equalsIgnoreCase(Request.PUBLISH)) { publicationContainer.dispatch(response, request); } else if (method.equalsIgnoreCase(Request.SUBSCRIBE) || method.equals(Request.NOTIFY)) { SessionImpl session = null; if (sessionContainer != null) session = sessionContainer.get(request); if (session != null) { if (((EventHeader) request.getHeader("Event")).getEventType().equals("refer")) { } else { session.getSubscriptions().dispatch(response, request); } } else subscriptionsContainer.dispatch(response, request); } else if (method.equalsIgnoreCase(Request.INVITE) || method.equalsIgnoreCase(Request.ACK) || method.equalsIgnoreCase(Request.PRACK) || method.equalsIgnoreCase(Request.BYE)) { sessionContainer.dispatch(response, request); } else if (method.equalsIgnoreCase(Request.INFO)) LOGGER.info("INFO not yet implemented"); else if (method.equalsIgnoreCase(Request.REFER)) { SessionImpl session = sessionContainer.get(request); if (session == null) referenceContainer.dispatch(response, request); else session.getOutgoingReference().notifyResponse(response); } else if (method.equalsIgnoreCase(Request.OPTIONS)) { CapabilitiesImpl capabilities = capabilityContainer.get(request); if(capabilities == null) capabilityContainer.dispatch(response, request); // else // capabilities.getListener().capabilityQueryDelivered(capabilities); } else LOGGER.info("Responses for " + request.getMethod() + " are not handled." + response.getStatusCode() + " " + response.getReasonPhrase() + " received."); } public void timeout(Request request) { LOGGER.debug("Timeout for request: {"+request.getMethod()+"} {"+request.getRequestURI()+"}"); String method = request.getMethod(); if (method.equals(Request.PUBLISH)) publicationContainer.timeout(request); else if (method.equals(Request.SUBSCRIBE)) subscriptionsContainer.timeout(request); else if (method.equals(Request.MESSAGE)) messageContainer.timeout(request); else if (method.equals(Request.INVITE) || method.equals(Request.BYE) || method.equals(Request.CANCEL) || method.equals(Request.UPDATE)) sessionContainer.timeout(request); else if (method.equals(Request.REFER)) referenceContainer.timeout(request); } private void sendErrorResponse(Request request, int statusCode) { Response response; try { response = manager.createResponse(request, statusCode); manager.sendResponse(this, request, response); } catch (Exception e) { LOGGER.error(e.getMessage()); } } /* ********* handlers for incoming messages ************************** */ /** * Handles incoming page message request * @param request */ private void handleIncomingMessage(Request request) { LOGGER.debug("Handling incoming page message request"); PageMessageImpl pageMessage = null; try { pageMessage = new PageMessageImpl(this, null, request, null); pageMessage.sendResponse(Response.OK, "OK", null, null); } catch (Exception ex) { LOGGER.error(ex.getMessage(), ex); return; } if (listener != null) listener.pageMessageReceived(this, pageMessage); } private void handleIncomingNotify(Request request) { LOGGER.debug("Handling incoming notify request"); FromHeader fromHeader = (FromHeader) request.getHeader("From"); String toUserId = SIPUtils.getIdentity((SipURI) fromHeader.getAddress().getURI()); EventHeader eh = (EventHeader) request.getHeader(EventHeader.NAME); if (eh == null) { sendErrorResponse(request, 400); return; } String event = eh.getEventType().toString(); int i = event.indexOf(';'); if (i > -1) event = event.substring(0, i); if (event.equals("refer")) { LOGGER.debug("Handling NOTIFY for REFER request"); ReferenceImpl reference = referenceContainer.get(manager.getDialog(request)); if (reference == null) { SessionImpl session = sessionContainer.get(request); if (session == null || session.getOutgoingReference() == null) sendErrorResponse(request, Response.GONE); else { LOGGER.debug("Handling NOTIFY for session REFER"); session.getOutgoingReference().notifyRequest(request); } } else { LOGGER.debug("Handling NOTIFY for standalone REFER"); reference.notifyRequest(request); } } else { LOGGER.debug("Handling NOTIFY for SUBSCRIBE request"); SubscriptionImpl sub = null; if (sessionContainer != null) { SessionImpl session = sessionContainer.get(request); if (session != null) { sub = session.getSubscriptions().get(((CallIdHeader)request.getHeader("Call-ID")).getCallId()); if (sub != null) LOGGER.debug("Handling NOTIFY for session SUBSCRIBE"); else{ LOGGER.debug("could not get subscription from sessions"); } } else{ LOGGER.debug("Could not find session for subscription"); } } if (sub == null) { // XXX // sub = subscriptionsContainer.get(toUserId, event); sub = subscriptionsContainer.get(((CallIdHeader)request.getHeader("Call-ID")).getCallId()); if (sub != null) LOGGER.debug("Handling NOTIFY for session SUBSCRIBE"); } if (sub == null) { LOGGER.info("Received a NOTIFY for " + toUserId + " but I couldn't retrieve the subscription,"); sendErrorResponse(request, Response.GONE); return; } sub.notifyRequest(request); } } private void handleIncomingReferRequest(Request request) { try { ReferenceImpl reference = new ReferenceImpl(this, referenceContainer, request, null); SessionImpl session = sessionContainer.get(request); if (session != null) session.reference(reference); else if (listener != null) listener.referRequestReceived(this, reference); } catch (ImsException e) { LOGGER.error(e.getMessage(), e); } } /** * Handles incoming BYE, CANCEL, UPDATE, and INVITE requests * * @param request * - the incoming request * @param serverTransaction * - the server transaction of the request */ private void handleIncomingSessionResquest(Request request) { LOGGER.info("Handling incoming session request "+request.getMethod()); String method = request.getMethod(); SessionImpl session = sessionContainer.get(request); if (Request.INVITE.equalsIgnoreCase(method)) { try { if (session == null) { LOGGER.info("Creating new session object for incoming request"); session = new SessionImpl(this, sessionContainer, request, null); if (listener != null) listener.sessionInvitationReceived(this, session); return; } } catch (ImsException e) { LOGGER.error(e.getMessage(), e); } } if (session == null) { if (!Request.ACK.equals(method)) sendErrorResponse(request, Response.CALL_OR_TRANSACTION_DOES_NOT_EXIST); return; } LOGGER.info("Forwarding request to existing session"); session.notifyRequest(request); } private void handleIncomingOptionsRequest(Request request) { try { CapabilitiesImpl capabilities= null; if(capabilityContainer == null || (capabilities = capabilityContainer.get(request)) == null) { capabilities = new CapabilitiesImpl(this, capabilityContainer, request, null); capabilities.sendResponse(200, "OK", null, null); // if (listener != null && listener instanceof CoreServiceListenerAdapter) // ((CoreServiceListenerAdapter) listener).capabilityReceived(this, capabilities); return; } } catch (ImsException e) { LOGGER.error(e.getMessage(), e); } } private void handleIncomingSubscribeRequest(Request request) { try { SessionImpl session = null; if (sessionContainer == null || (session = sessionContainer.get(request)) == null) { SubscriptionImpl subscription = new SubscriptionImpl(this, this.subscriptionsContainer, request, null); if (listener != null && listener instanceof CoreServiceListener2) ((CoreServiceListener2) listener).subscriptionReceived(this, subscription); } else if (session != null) { SubscriptionImpl subscription = new SubscriptionImpl(this, session.getSubscriptions(), request, null); session.subscription(subscription); } } catch (ImsException e) { LOGGER.error(e.getMessage(), e); } } private void handleIncomingInfoRequest(Request request) { try { ServiceMethodImpl smethod = new ServiceMethodImpl(this, null, request, null) { protected MessageImpl createResponseMessage(Response response) { return new MessageImpl(Message.INFOMESSAGE_SEND, response); } protected MessageImpl createRequestMessage(Request request) { return null; } }; smethod.sendResponse(Response.OK, "OK", null, null); } catch (Exception ex) { LOGGER.error(ex.getMessage(), ex); return; } } /** * Creates new media object * * @param session * @param type * @return */ protected MediaImpl createMedia(SessionImpl session, String type) { if ("StreamMedia".equals(type)) { if(streamMediaFactory == null) return null; return this.streamMediaFactory.createMedia(session); } else if ("FramedMedia".equals(type)) return new FramedMediaImpl(session); else if ("BasicReliableMedia".equals(type)) return new BasicReliableMediaImpl(session); else if ("BasicUnreliableMedia".equals(type)) return new BasicUnreliableMediaImpl(session); return null; } }
/* * Copyright (C) 2004, 2005 Joe Walnes. * Copyright (C) 2006, 2007, 2009, 2010, 2011, 2014 XStream Committers. * All rights reserved. * * The software in this package is published under the terms of the BSD * style license a copy of which has been included with this distribution in * the LICENSE.txt file. * * Created on 30. July 2011 by Joerg Schaible by merging AbstractCircularReferenceTest, * AbstractDuplicateReferenceTest, AbstractNestedCircularReferenceTest and * AbstractReplacedReferenceTest. */ package com.thoughtworks.acceptance; import java.io.IOException; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.io.Serializable; import java.util.ArrayList; import java.util.List; import com.thoughtworks.acceptance.objects.StandardObject; import com.thoughtworks.acceptance.someobjects.WithNamedList; import com.thoughtworks.xstream.converters.ConversionException; import com.thoughtworks.xstream.core.AbstractReferenceMarshaller; public abstract class AbstractReferenceTest extends AbstractAcceptanceTest { protected void setUp() throws Exception { super.setUp(); xstream.alias("person", Person.class); xstream.alias("thing", Thing.class); xstream.allowTypesByWildcard(new String[]{AbstractReferenceTest.class.getName()+"$*"}); } public void testReferencesAreWorking() { Thing sameThing = new Thing("hello"); Thing anotherThing = new Thing("hello"); List list = new ArrayList(); list.add(sameThing); list.add(sameThing); list.add(anotherThing); String xml = xstream.toXML(list); List result = (List)xstream.fromXML(xml); assertEquals(list, result); } public void testReferencesAreTheSameObjectWhenDeserialized() { Thing sameThing = new Thing("hello"); Thing anotherThing = new Thing("hello"); List list = new ArrayList(); list.add(sameThing); list.add(sameThing); list.add(anotherThing); String xml = xstream.toXML(list); List result = (List)xstream.fromXML(xml); Thing t0 = (Thing)result.get(0); Thing t1 = (Thing)result.get(1); Thing t2 = (Thing)result.get(2); t0.field = "bye"; assertEquals("bye", t0.field); assertEquals("bye", t1.field); assertEquals("hello", t2.field); } public static class Thing extends StandardObject { public String field; public Thing() { } public Thing(String field) { this.field = field; } } public static class MultRef { public Object s1 = new Object(); public Object s2 = s1; } public void testMultipleReferencesToObjectsWithNoChildren() { MultRef in = new MultRef(); assertSame(in.s1, in.s2); String xml = xstream.toXML(in); MultRef out = (MultRef)xstream.fromXML(xml); assertSame(out.s1, out.s2); } public void testReferencesNotUsedForImmutableValueTypes() { MultRef in = new MultRef(); in.s1 = new Integer(4); in.s2 = in.s1; String xml = xstream.toXML(in); MultRef out = (MultRef)xstream.fromXML(xml); assertEquals(out.s1, out.s2); assertNotSame(out.s1, out.s2); } public void testReferencesUsedForMutableValueTypes() { MultRef in = new MultRef(); in.s1 = new StringBuffer("hi"); in.s2 = in.s1; String xml = xstream.toXML(in); MultRef out = (MultRef)xstream.fromXML(xml); StringBuffer buffer = (StringBuffer)out.s2; buffer.append("bye"); assertEquals("hibye", out.s1.toString()); assertSame(out.s1, out.s2); } public void testReferencesToImplicitCollectionIsNotPossible() { xstream.alias("strings", WithNamedList.class); xstream.addImplicitCollection(WithNamedList.class, "things"); WithNamedList[] wls = new WithNamedList[]{ new WithNamedList("foo"), new WithNamedList("bar")}; wls[0].things.add("Hello"); wls[0].things.add("Daniel"); wls[1].things = wls[0].things; try { xstream.toXML(wls); fail("Thrown " + AbstractReferenceMarshaller.ReferencedImplicitElementException.class .getName() + " expected"); } catch (final AbstractReferenceMarshaller.ReferencedImplicitElementException e) { // OK } } public void testReferencesToElementsOfImplicitCollectionIsPossible() { xstream.alias("strings", WithNamedList.class); xstream.addImplicitCollection(WithNamedList.class, "things"); WithNamedList[] wls = new WithNamedList[]{ new WithNamedList("foo"), new WithNamedList("bar")}; wls[0].things.add("Hello"); wls[0].things.add("Daniel"); wls[1].things.add(wls[0]); String xml = xstream.toXML(wls); WithNamedList[] out = (WithNamedList[])xstream.fromXML(xml); assertSame(out[0], out[1].things.get(0)); } public void testReferencesToElementsOfNthImplicitCollectionIsPossible() { xstream.alias("strings", WithNamedList.class); xstream.addImplicitCollection(WithNamedList.class, "things"); WithNamedList[] wls = new WithNamedList[]{ new WithNamedList("foo"), new WithNamedList("bar"), new WithNamedList("foobar")}; wls[1].things.add("Hello"); wls[1].things.add("Daniel"); wls[2].things.add(wls[1]); String xml = xstream.toXML(wls); WithNamedList[] out = (WithNamedList[])xstream.fromXML(xml); assertSame(out[1], out[2].things.get(0)); } public void testThrowsForInvalidReference() { String xml = "" // + "<list>\n" + " <thing>\n" + " <field>Hello</field>\n" + " </thing>\n" + " <thing reference=\"foo\">\n" + "</list>"; try { xstream.fromXML(xml); fail("Thrown " + ConversionException.class.getName() + " expected"); } catch (final ConversionException e) { assertEquals("foo", e.get("reference")); } } public static class Person { public String firstname; public Person likes; public Person loathes; public Person() { } public Person(String name) { this.firstname = name; } } static class LinkedElement { String name; LinkedElement next; LinkedElement(String name) { this.name = name; } } static class TreeElement { StringBuffer name; TreeElement left; TreeElement right; TreeElement(StringBuffer name) { this.name = name; } TreeElement(String name) { this.name = new StringBuffer(name); } } public void testCircularReference() { Person bob = new Person("bob"); Person jane = new Person("jane"); bob.likes = jane; jane.likes = bob; String xml = xstream.toXML(bob); Person bobOut = (Person)xstream.fromXML(xml); assertEquals("bob", bobOut.firstname); Person janeOut = bobOut.likes; assertEquals("jane", janeOut.firstname); assertSame(bobOut.likes, janeOut); assertSame(bobOut, janeOut.likes); } public void testCircularReferenceToSelf() { Person bob = new Person("bob"); bob.likes = bob; String xml = xstream.toXML(bob); Person bobOut = (Person)xstream.fromXML(xml); assertEquals("bob", bobOut.firstname); assertSame(bobOut, bobOut.likes); } public void testDeepCircularReferences() { Person bob = new Person("bob"); Person jane = new Person("jane"); Person ann = new Person("ann"); Person poo = new Person("poo"); bob.likes = jane; bob.loathes = ann; ann.likes = jane; ann.loathes = poo; poo.likes = jane; poo.loathes = ann; jane.likes = jane; jane.loathes = bob; String xml = xstream.toXML(bob); Person bobOut = (Person)xstream.fromXML(xml); Person janeOut = bobOut.likes; Person annOut = bobOut.loathes; Person pooOut = annOut.loathes; assertEquals("bob", bobOut.firstname); assertEquals("jane", janeOut.firstname); assertEquals("ann", annOut.firstname); assertEquals("poo", pooOut.firstname); assertSame(janeOut, bobOut.likes); assertSame(annOut, bobOut.loathes); assertSame(janeOut, annOut.likes); assertSame(pooOut, annOut.loathes); assertSame(janeOut, pooOut.likes); assertSame(annOut, pooOut.loathes); assertSame(janeOut, janeOut.likes); assertSame(bobOut, janeOut.loathes); } public static class WeirdThing implements Serializable { public transient Object anotherObject; private NestedThing nestedThing = new NestedThing(); private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException { in.defaultReadObject(); anotherObject = in.readObject(); } private void writeObject(ObjectOutputStream out) throws IOException { out.defaultWriteObject(); out.writeObject(anotherObject); } private class NestedThing implements Serializable { private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException { in.defaultReadObject(); } private void writeObject(ObjectOutputStream out) throws IOException { out.defaultWriteObject(); } } } public void testWeirdCircularReference() { // I cannot fully explain what's special about WeirdThing, however without ensuring that // a reference is only // put in the references map once, this fails. // This case was first noticed when serializing JComboBox, deserializing it and then // serializing it again. // Upon the second serialization, it would cause the Sun 1.4.1 JVM to crash: // Object in = new javax.swing.JComboBox(); // Object out = xstream.fromXML(xstream.toXML(in)); // xstream.toXML(out); ....causes JVM crash on 1.4.1 // WeirdThing is the least possible code I can create to reproduce the problem. // This also fails for JRockit 1.4.2 deeply nested, when it tries to set the final field // AbstractNestedCircularReferenceTest$WeirdThing$NestedThing$this$1. // setup WeirdThing in = new WeirdThing(); in.anotherObject = in; String xml = xstream.toXML(in); // System.out.println(xml + "\n"); // execute WeirdThing out = (WeirdThing)xstream.fromXML(xml); // verify assertSame(out, out.anotherObject); } public static class TreeData implements Serializable { String data; TreeData parent; List children; public TreeData(String data) { this.data = data; children = new ArrayList(); } private TreeData(TreeData clone) { data = clone.data; parent = clone.parent; children = clone.children; } public void add(TreeData child) { child.parent = this; children.add(child); } public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((this.children == null) ? 0 : this.children.hashCode()); result = prime * result + ((this.data == null) ? 0 : this.data.hashCode()); return result; } public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (!(obj instanceof TreeData)) return false; TreeData other = (TreeData)obj; if (this.children == null) { if (other.children != null) return false; } else if (!this.children.equals(other.children)) return false; if (this.data == null) { if (other.data != null) return false; } else if (!this.data.equals(other.data)) return false; return true; } private Object writeReplace() { if (getClass() == TreeData.class) { return this; } return new TreeData(this); } } public abstract void testReplacedReference(); public void replacedReference(String expectedXml) { TreeData parent = new TreeData("parent"); parent.add(new TreeData("child") { // anonymous type }); xstream.alias("element", TreeData.class); xstream.alias("anonymous-element", parent.children.get(0).getClass()); assertEquals(expectedXml, xstream.toXML(parent)); TreeData clone = (TreeData)xstream.fromXML(expectedXml); assertEquals(parent, clone); } static class Email extends StandardObject { String email; private final Email alias; Email(String email) { this(email, null); } Email(String email, Email alias) { this.email = email; this.alias = alias; } } static class EmailList extends StandardObject { List addresses = new ArrayList(); Email main; } public void testReferenceElementInImplicitCollection() { EmailList emails = new EmailList(); emails.addresses.add(new Email("private@joewalnes.com")); emails.addresses.add(new Email("joe@joewalnes.com")); emails.addresses.add(new Email("joe.walnes@thoughtworks.com")); emails.addresses.add(new Email("joe@thoughtworks.com", (Email)emails.addresses.get(2))); emails.main = (Email)emails.addresses.get(1); xstream.addImplicitCollection(EmailList.class, "addresses", "address", Email.class); String xml = xstream.toXML(emails); assertEquals(emails, xstream.fromXML(xml)); } static class EmailArray extends StandardObject { Email[] addresses; Email main; } public void testReferenceElementInImplicitArrays() { EmailArray emails = new EmailArray(); Email alias = new Email("joe.walnes@thoughtworks.com"); emails.addresses = new Email[]{ new Email("private@joewalnes.com"), new Email("joe@joewalnes.com"), alias, new Email("joe@thoughtworks.com", alias) }; emails.main = emails.addresses[1]; xstream.addImplicitArray(EmailArray.class, "addresses", "address"); String xml = xstream.toXML(emails); assertEquals(emails, xstream.fromXML(xml)); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.mongodb; import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.stream.StreamSupport; import com.fasterxml.jackson.databind.ObjectMapper; import com.mongodb.ReadPreference; import com.mongodb.WriteConcern; import com.mongodb.client.MongoClient; import com.mongodb.client.MongoCollection; import com.mongodb.client.MongoDatabase; import org.apache.camel.Category; import org.apache.camel.Consumer; import org.apache.camel.Processor; import org.apache.camel.Producer; import org.apache.camel.spi.Metadata; import org.apache.camel.spi.UriEndpoint; import org.apache.camel.spi.UriParam; import org.apache.camel.spi.UriPath; import org.apache.camel.support.CamelContextHelper; import org.apache.camel.support.DefaultEndpoint; import org.apache.camel.util.ObjectHelper; import org.bson.Document; import org.bson.conversions.Bson; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import static org.apache.camel.component.mongodb.MongoDbOperation.command; import static org.apache.camel.component.mongodb.MongoDbOperation.findAll; import static org.apache.camel.component.mongodb.MongoDbOperation.getDbStats; import static org.apache.camel.component.mongodb.MongoDbOperation.valueOf; import static org.apache.camel.component.mongodb.MongoDbOutputType.Document; import static org.apache.camel.component.mongodb.MongoDbOutputType.DocumentList; import static org.apache.camel.component.mongodb.MongoDbOutputType.MongoIterable; /** * Perform operations on MongoDB documents and collections. */ @UriEndpoint(firstVersion = "2.19.0", scheme = "mongodb", title = "MongoDB", syntax = "mongodb:connectionBean", category = { Category.DATABASE, Category.NOSQL }) public class MongoDbEndpoint extends DefaultEndpoint { private static final Logger LOG = LoggerFactory.getLogger(MongoDbEndpoint.class); @UriParam(description = "Sets the connection bean used as a client for connecting to a database.") private MongoClient mongoConnection; @UriPath(description = "Sets the connection bean reference used to lookup a client for connecting to a database.") @Metadata(required = true) private String connectionBean; @UriParam private String database; @UriParam private String collection; @UriParam private String collectionIndex; @UriParam private MongoDbOperation operation; @UriParam(defaultValue = "true") private boolean createCollection = true; @UriParam(label = "advanced") private boolean dynamicity; @UriParam(label = "advanced", defaultValue = "ACKNOWLEDGED", enums = "ACKNOWLEDGED,W1,W2,W3,UNACKNOWLEDGED,JOURNALED,MAJORITY") private String writeConcern = "ACKNOWLEDGED"; @UriParam(label = "advanced", defaultValue = "PRIMARY", enums = "PRIMARY,PRIMARY_PREFERRED,SECONDARY,SECONDARY_PREFERRED,NEAREST") private String readPreference = "PRIMARY"; @UriParam(label = "advanced") private boolean writeResultAsHeader; @UriParam(label = "consumer") private String consumerType; @UriParam(label = "advanced", defaultValue = "1000", javaType = "java.time.Duration") private long cursorRegenerationDelay = 1000L; @UriParam(label = "tail") private String tailTrackIncreasingField; @UriParam(label = "changeStream") private String streamFilter; // persistent tail tracking @UriParam(label = "tail") private boolean persistentTailTracking; @UriParam(label = "tail") private String persistentId; @UriParam(label = "tail") private String tailTrackDb; @UriParam(label = "tail") private String tailTrackCollection; @UriParam(label = "tail") private String tailTrackField; @UriParam(label = "common") private MongoDbOutputType outputType; // tailable cursor consumer by default private MongoDbConsumerType dbConsumerType; private MongoDbTailTrackingConfig tailTrackingConfig; private MongoDatabase mongoDatabase; private MongoCollection<Document> mongoCollection; public MongoDbEndpoint() { } public MongoDbEndpoint(String uri, MongoDbComponent component) { super(uri, component); } @Override public Producer createProducer() { validateProducerOptions(); initializeConnection(); return new MongoDbProducer(this); } @Override public Consumer createConsumer(Processor processor) throws Exception { validateConsumerOptions(); // we never create the collection createCollection = false; initializeConnection(); // select right consumer type try { dbConsumerType = ObjectHelper.isEmpty(consumerType) ? MongoDbConsumerType.tailable : MongoDbConsumerType.valueOf(consumerType); } catch (Exception e) { throw new CamelMongoDbException("Consumer type not supported: " + consumerType, e); } Consumer consumer; switch (dbConsumerType) { case tailable: consumer = new MongoDbTailableCursorConsumer(this, processor); break; case changeStreams: consumer = new MongoDbChangeStreamsConsumer(this, processor); break; default: throw new CamelMongoDbException("Consumer type not supported: " + dbConsumerType); } configureConsumer(consumer); return consumer; } /** * Check if outputType is compatible with operation. DbCursor and DocumentList applies to findAll. Document applies * to others. */ @SuppressWarnings("unused") // TODO: validate Output on createProducer method. private void validateOutputType() { if (!ObjectHelper.isEmpty(outputType)) { if (DocumentList.equals(outputType) && !(findAll.equals(operation))) { throw new IllegalArgumentException("outputType DocumentList is only compatible with operation findAll"); } if (MongoIterable.equals(outputType) && !(findAll.equals(operation))) { throw new IllegalArgumentException("outputType MongoIterable is only compatible with operation findAll"); } if (Document.equals(outputType) && (findAll.equals(operation))) { throw new IllegalArgumentException("outputType Document is not compatible with operation findAll"); } } } private void validateProducerOptions() throws IllegalArgumentException { // make our best effort to validate, options with defaults are checked // against their defaults, which is not always a guarantee that // they haven't been explicitly set, but it is enough if (!ObjectHelper.isEmpty(dbConsumerType) || persistentTailTracking || !ObjectHelper.isEmpty(tailTrackDb) || !ObjectHelper.isEmpty(tailTrackCollection) || !ObjectHelper.isEmpty(tailTrackField) || cursorRegenerationDelay != 1000L) { throw new IllegalArgumentException( "dbConsumerType, tailTracking, cursorRegenerationDelay options cannot appear on a producer endpoint"); } } private void validateConsumerOptions() throws IllegalArgumentException { // make our best effort to validate, options with defaults are checked // against their defaults, which is not always a guarantee that // they haven't been explicitly set, but it is enough if (!ObjectHelper.isEmpty(operation) || dynamicity || outputType != null) { throw new IllegalArgumentException( "operation, dynamicity, outputType " + "options cannot appear on a consumer endpoint"); } if (dbConsumerType == MongoDbConsumerType.tailable) { if (tailTrackIncreasingField == null) { throw new IllegalArgumentException( "tailTrackIncreasingField option must be set for tailable cursor MongoDB consumer endpoint"); } if (persistentTailTracking && (ObjectHelper.isEmpty(persistentId))) { throw new IllegalArgumentException("persistentId is compulsory for persistent tail tracking"); } } } /** * Initialises the MongoDB connection using the Mongo object provided to the endpoint * * @throws CamelMongoDbException */ public void initializeConnection() throws CamelMongoDbException { LOG.info("Initialising MongoDb endpoint: {}", this); if (database == null || (collection == null && !(getDbStats.equals(operation) || command.equals(operation)))) { throw new CamelMongoDbException("Missing required endpoint configuration: database and/or collection"); } if (mongoConnection == null) { mongoConnection = resolveMongoConnection(); if (mongoConnection == null) { throw new CamelMongoDbException( "Could not initialise MongoDbComponent. Could not resolve the mongo connection."); } } mongoDatabase = mongoConnection.getDatabase(database); if (mongoDatabase == null) { throw new CamelMongoDbException("Could not initialise MongoDbComponent. Database " + database + " does not exist."); } if (collection != null) { if (!createCollection && !databaseContainsCollection(collection)) { throw new CamelMongoDbException( "Could not initialise MongoDbComponent. Collection " + collection + " does not exist on the database and createCollection is false."); } mongoCollection = mongoDatabase.getCollection(collection, Document.class); LOG.debug("MongoDb component initialised and endpoint bound to MongoDB collection with the following parameters. " + "Cluster description: {}, Db: {}, Collection: {}", new Object[] { mongoConnection.getClusterDescription(), mongoDatabase.getName(), collection }); try { if (ObjectHelper.isNotEmpty(collectionIndex)) { ensureIndex(mongoCollection, createIndex()); } } catch (Exception e) { throw new CamelMongoDbException("Error creating index", e); } } } private boolean databaseContainsCollection(String collectionName) { return StreamSupport.stream(mongoDatabase.listCollectionNames().spliterator(), false).anyMatch(collectionName::equals); } /** * Add Index * * @param aCollection */ public void ensureIndex(MongoCollection<Document> aCollection, List<Bson> dynamicIndex) { if (dynamicIndex != null && !dynamicIndex.isEmpty()) { for (Bson index : dynamicIndex) { LOG.debug("create Document Index {}", index); aCollection.createIndex(index); } } } /** * Create technical list index * * @return technical list index */ @SuppressWarnings("unchecked") public List<Bson> createIndex() { try { List<Bson> indexList = new ArrayList<>(); if (ObjectHelper.isNotEmpty(collectionIndex)) { HashMap<String, String> indexMap = new ObjectMapper().readValue(collectionIndex, HashMap.class); for (Map.Entry<String, String> set : indexMap.entrySet()) { Document index = new Document(); // MongoDB 2.4 upwards is restrictive about the type of the // 'single field index' being // in use below (set.getValue())) as only an integer value // type is accepted, otherwise // server will throw an exception, see more details: // http://docs.mongodb.org/manual/release-notes/2.4/#improved-validation-of-index-types index.put(set.getKey(), set.getValue()); indexList.add(index); } } return indexList; } catch (IOException e) { throw new CamelMongoDbException("createIndex failed", e); } } @Override protected void doStart() throws Exception { if (mongoConnection == null) { mongoConnection = resolveMongoConnection(); } else { LOG.debug("Resolved the connection provided by mongoConnection property parameter as {}", mongoConnection); } super.doStart(); } private MongoClient resolveMongoConnection() { MongoClient mongoClient = CamelContextHelper.mandatoryLookup(getCamelContext(), connectionBean, MongoClient.class); LOG.debug("Resolved the connection provided by {} context reference as {}", connectionBean, mongoConnection); return mongoClient; } public String getConnectionBean() { return connectionBean; } /** * Name of {@link com.mongodb.client.MongoClient} to use. */ public void setConnectionBean(String connectionBean) { this.connectionBean = connectionBean; } /** * Sets the name of the MongoDB collection to bind to this endpoint * * @param collection collection name */ public void setCollection(String collection) { this.collection = collection; } public String getCollection() { return collection; } /** * Sets the collection index (JSON FORMAT : { "field1" : order1, "field2" : order2}) */ public void setCollectionIndex(String collectionIndex) { this.collectionIndex = collectionIndex; } public String getCollectionIndex() { return collectionIndex; } /** * Sets the operation this endpoint will execute against MongoDB. */ public void setOperation(String operation) throws CamelMongoDbException { try { this.operation = valueOf(operation); } catch (IllegalArgumentException e) { throw new CamelMongoDbException("Operation not supported", e); } } /** * Sets the operation this endpoint will execute against MongoDB. */ public void setOperation(MongoDbOperation operation) { this.operation = operation; } public MongoDbOperation getOperation() { return operation; } /** * Sets the name of the MongoDB database to target * * @param database name of the MongoDB database */ public void setDatabase(String database) { this.database = database; } public String getDatabase() { return database; } /** * Create collection during initialisation if it doesn't exist. Default is true. * * @param createCollection true or false */ public void setCreateCollection(boolean createCollection) { this.createCollection = createCollection; } public boolean isCreateCollection() { return createCollection; } /** * Sets the Mongo instance that represents the backing connection * * @param mongoConnection the connection to the database */ public void setMongoConnection(MongoClient mongoConnection) { this.mongoConnection = mongoConnection; } public MongoClient getMongoConnection() { return mongoConnection; } /** * Sets whether this endpoint will attempt to dynamically resolve the target database and collection from the * incoming Exchange properties. Can be used to override at runtime the database and collection specified on the * otherwise static endpoint URI. It is disabled by default to boost performance. Enabling it will take a minimal * performance hit. * * @see MongoDbConstants#DATABASE * @see MongoDbConstants#COLLECTION * @param dynamicity true or false indicated whether target database and collection should be calculated dynamically * based on Exchange properties. */ public void setDynamicity(boolean dynamicity) { this.dynamicity = dynamicity; } public boolean isDynamicity() { return dynamicity; } /** * Reserved for future use, when more consumer types are supported. * * @param dbConsumerType key of the consumer type * @throws CamelMongoDbException if consumer type is not supported */ public void setDbConsumerType(String dbConsumerType) throws CamelMongoDbException { try { this.dbConsumerType = MongoDbConsumerType.valueOf(dbConsumerType); } catch (IllegalArgumentException e) { throw new CamelMongoDbException("Consumer type not supported", e); } } public MongoDbConsumerType getDbConsumerType() { return dbConsumerType; } public String getConsumerType() { return consumerType; } /** * Consumer type. */ public void setConsumerType(String consumerType) { this.consumerType = consumerType; } public String getTailTrackDb() { return tailTrackDb; } /** * Indicates what database the tail tracking mechanism will persist to. If not specified, the current database will * be picked by default. Dynamicity will not be taken into account even if enabled, i.e. the tail tracking database * will not vary past endpoint initialisation. * * @param tailTrackDb database name */ public void setTailTrackDb(String tailTrackDb) { this.tailTrackDb = tailTrackDb; } public String getTailTrackCollection() { return tailTrackCollection; } /** * Collection where tail tracking information will be persisted. If not specified, * {@link MongoDbTailTrackingConfig#DEFAULT_COLLECTION} will be used by default. * * @param tailTrackCollection collection name */ public void setTailTrackCollection(String tailTrackCollection) { this.tailTrackCollection = tailTrackCollection; } public String getTailTrackField() { return tailTrackField; } /** * Field where the last tracked value will be placed. If not specified, * {@link MongoDbTailTrackingConfig#DEFAULT_FIELD} will be used by default. * * @param tailTrackField field name */ public void setTailTrackField(String tailTrackField) { this.tailTrackField = tailTrackField; } /** * Enable persistent tail tracking, which is a mechanism to keep track of the last consumed message across system * restarts. The next time the system is up, the endpoint will recover the cursor from the point where it last * stopped slurping records. * * @param persistentTailTracking true or false */ public void setPersistentTailTracking(boolean persistentTailTracking) { this.persistentTailTracking = persistentTailTracking; } public boolean isPersistentTailTracking() { return persistentTailTracking; } /** * Correlation field in the incoming record which is of increasing nature and will be used to position the tailing * cursor every time it is generated. The cursor will be (re)created with a query of type: tailTrackIncreasingField * greater than lastValue (possibly recovered from persistent tail tracking). Can be of type Integer, Date, String, * etc. NOTE: No support for dot notation at the current time, so the field should be at the top level of the * document. * * @param tailTrackIncreasingField */ public void setTailTrackIncreasingField(String tailTrackIncreasingField) { this.tailTrackIncreasingField = tailTrackIncreasingField; } public String getTailTrackIncreasingField() { return tailTrackIncreasingField; } public MongoDbTailTrackingConfig getTailTrackingConfig() { if (tailTrackingConfig == null) { tailTrackingConfig = new MongoDbTailTrackingConfig( persistentTailTracking, tailTrackIncreasingField, tailTrackDb == null ? database : tailTrackDb, tailTrackCollection, tailTrackField, getPersistentId()); } return tailTrackingConfig; } /** * MongoDB tailable cursors will block until new data arrives. If no new data is inserted, after some time the * cursor will be automatically freed and closed by the MongoDB server. The client is expected to regenerate the * cursor if needed. This value specifies the time to wait before attempting to fetch a new cursor, and if the * attempt fails, how long before the next attempt is made. Default value is 1000ms. * * @param cursorRegenerationDelay delay specified in milliseconds */ public void setCursorRegenerationDelay(long cursorRegenerationDelay) { this.cursorRegenerationDelay = cursorRegenerationDelay; } public long getCursorRegenerationDelay() { return cursorRegenerationDelay; } /** * One tail tracking collection can host many trackers for several tailable consumers. To keep them separate, each * tracker should have its own unique persistentId. * * @param persistentId the value of the persistent ID to use for this tailable consumer */ public void setPersistentId(String persistentId) { this.persistentId = persistentId; } public String getPersistentId() { return persistentId; } public boolean isWriteResultAsHeader() { return writeResultAsHeader; } /** * In write operations, it determines whether instead of returning WriteResult as the body of the OUT message, we * transfer the IN message to the OUT and attach the WriteResult as a header. * * @param writeResultAsHeader flag to indicate if this option is enabled */ public void setWriteResultAsHeader(boolean writeResultAsHeader) { this.writeResultAsHeader = writeResultAsHeader; } public MongoDbOutputType getOutputType() { return outputType; } /** * Convert the output of the producer to the selected type : DocumentList Document or MongoIterable. DocumentList or * MongoIterable applies to findAll and aggregate. Document applies to all other operations. * * @param outputType */ public void setOutputType(MongoDbOutputType outputType) { this.outputType = outputType; } public MongoDatabase getMongoDatabase() { return mongoDatabase; } public MongoCollection<Document> getMongoCollection() { return mongoCollection; } public String getStreamFilter() { return streamFilter; } /** * Filter condition for change streams consumer. */ public void setStreamFilter(String streamFilter) { this.streamFilter = streamFilter; } /** * Configure the connection bean with the level of acknowledgment requested from MongoDB for write operations to a * standalone mongod, replicaset or cluster. Possible values are ACKNOWLEDGED, W1, W2, W3, UNACKNOWLEDGED, JOURNALED * or MAJORITY. * * @param writeConcern */ public void setWriteConcern(String writeConcern) { this.writeConcern = writeConcern; } public String getWriteConcern() { return this.writeConcern; } public WriteConcern getWriteConcernBean() { WriteConcern writeConcernBean = WriteConcern.valueOf(getWriteConcern()); if (writeConcernBean == null) { throw new IllegalArgumentException(String.format("Unknown WriteConcern configuration %s", getWriteConcern())); } return writeConcernBean; } /** * Configure how MongoDB clients route read operations to the members of a replica set. Possible values are PRIMARY, * PRIMARY_PREFERRED, SECONDARY, SECONDARY_PREFERRED or NEAREST * * @param readPreference */ public void setReadPreference(String readPreference) { this.readPreference = readPreference; } public String getReadPreference() { return this.readPreference; } public ReadPreference getReadPreferenceBean() { // will throw an IllegalArgumentException if the input is incorrect return ReadPreference.valueOf(getReadPreference()); } }
// Copyright 2017 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.exec.local; import static com.google.common.truth.Truth.assertThat; import static org.junit.Assert.fail; import static org.mockito.Matchers.any; import static org.mockito.Matchers.eq; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; import com.google.common.collect.ImmutableList; import com.google.common.io.ByteStreams; import com.google.devtools.build.lib.actions.ActionInput; import com.google.devtools.build.lib.actions.ActionInputFileCache; import com.google.devtools.build.lib.actions.Artifact.ArtifactExpander; import com.google.devtools.build.lib.actions.ExecutionRequirements; import com.google.devtools.build.lib.actions.ResourceManager; import com.google.devtools.build.lib.actions.ResourceSet; import com.google.devtools.build.lib.actions.Spawn; import com.google.devtools.build.lib.actions.SpawnResult; import com.google.devtools.build.lib.exec.SpawnRunner.ProgressStatus; import com.google.devtools.build.lib.exec.SpawnRunner.SpawnExecutionPolicy; import com.google.devtools.build.lib.exec.util.SpawnBuilder; import com.google.devtools.build.lib.shell.JavaSubprocessFactory; import com.google.devtools.build.lib.shell.Subprocess; import com.google.devtools.build.lib.shell.SubprocessBuilder; import com.google.devtools.build.lib.shell.SubprocessFactory; import com.google.devtools.build.lib.util.NetUtil; import com.google.devtools.build.lib.util.OS; import com.google.devtools.build.lib.util.io.FileOutErr; import com.google.devtools.build.lib.vfs.FileSystem; import com.google.devtools.build.lib.vfs.FileSystemUtils; import com.google.devtools.build.lib.vfs.PathFragment; import com.google.devtools.build.lib.vfs.inmemoryfs.InMemoryFileSystem; import com.google.devtools.common.options.Options; import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.nio.charset.StandardCharsets; import java.time.Duration; import java.util.ArrayList; import java.util.List; import java.util.SortedMap; import java.util.TreeMap; import java.util.logging.Filter; import java.util.logging.LogRecord; import java.util.logging.Logger; import java.util.regex.Pattern; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; import org.mockito.ArgumentCaptor; /** * Unit tests for {@link LocalSpawnRunner}. */ @RunWith(JUnit4.class) public class LocalSpawnRunnerTest { private static final boolean USE_WRAPPER = true; private static final boolean NO_WRAPPER = false; private static class FinishedSubprocess implements Subprocess { private final int exitCode; public FinishedSubprocess(int exitCode) { this.exitCode = exitCode; } @Override public boolean destroy() { return false; } @Override public int exitValue() { return exitCode; } @Override public boolean finished() { return true; } @Override public boolean timedout() { return false; } @Override public void waitFor() throws InterruptedException { // Do nothing. } @Override public OutputStream getOutputStream() { return ByteStreams.nullOutputStream(); } @Override public InputStream getInputStream() { return new ByteArrayInputStream(new byte[0]); } @Override public InputStream getErrorStream() { return new ByteArrayInputStream(new byte[0]); } @Override public void close() { // Do nothing. } } private static final Spawn SIMPLE_SPAWN = new SpawnBuilder("/bin/echo", "Hi!").withEnvironment("VARIABLE", "value").build(); private static final class SubprocessInterceptor implements SubprocessFactory { @Override public Subprocess create(SubprocessBuilder params) throws IOException { throw new UnsupportedOperationException(); } } private final class SpawnExecutionPolicyForTesting implements SpawnExecutionPolicy { private final List<ProgressStatus> reportedStatus = new ArrayList<>(); private final TreeMap<PathFragment, ActionInput> inputMapping = new TreeMap<>(); private long timeoutMillis; private boolean prefetchCalled; private boolean lockOutputFilesCalled; @Override public int getId() { return 0; } @Override public void prefetchInputs() throws IOException { prefetchCalled = true; } @Override public void lockOutputFiles() throws InterruptedException { lockOutputFilesCalled = true; } @Override public boolean speculating() { return false; } @Override public ActionInputFileCache getActionInputFileCache() { return mockFileCache; } @Override public ArtifactExpander getArtifactExpander() { throw new UnsupportedOperationException(); } @Override public Duration getTimeout() { return Duration.ofMillis(timeoutMillis); } @Override public FileOutErr getFileOutErr() { return outErr; } @Override public SortedMap<PathFragment, ActionInput> getInputMapping() { return inputMapping; } @Override public void report(ProgressStatus state, String name) { reportedStatus.add(state); } } private FileSystem fs; private final ActionInputFileCache mockFileCache = mock(ActionInputFileCache.class); private final ResourceManager resourceManager = ResourceManager.instanceForTestingOnly(); private Logger logger; private FileOutErr outErr; private final SpawnExecutionPolicyForTesting policy = new SpawnExecutionPolicyForTesting(); @Before public final void suppressLogging() { logger = Logger.getLogger(LocalSpawnRunner.class.getName()); logger.setFilter(new Filter() { @Override public boolean isLoggable(LogRecord record) { return false; } }); } @Before public final void setup() throws Exception { fs = new InMemoryFileSystem(); // Prevent any subprocess execution at all. SubprocessBuilder.setSubprocessFactory(new SubprocessInterceptor()); resourceManager.setAvailableResources( ResourceSet.create(/*memoryMb=*/1, /*cpuUsage=*/1, /*ioUsage=*/1, /*localTestCount=*/1)); } @After public final void tearDown() { SubprocessBuilder.setSubprocessFactory(JavaSubprocessFactory.INSTANCE); } @Test public void vanillaZeroExit() throws Exception { if (OS.getCurrent() == OS.WINDOWS) { // TODO(#3536): Make this test work on Windows. // The Command API implicitly absolutizes the path, and we get weird paths on Windows: // T:\execroot\execroot\_bin\process-wrapper return; } SubprocessFactory factory = mock(SubprocessFactory.class); ArgumentCaptor<SubprocessBuilder> captor = ArgumentCaptor.forClass(SubprocessBuilder.class); when(factory.create(captor.capture())).thenReturn(new FinishedSubprocess(0)); SubprocessBuilder.setSubprocessFactory(factory); LocalExecutionOptions options = Options.getDefaults(LocalExecutionOptions.class); options.localSigkillGraceSeconds = 456; LocalSpawnRunner runner = new LocalSpawnRunner( fs.getPath("/execroot"), options, resourceManager, USE_WRAPPER, OS.LINUX, "product-name", LocalEnvProvider.UNMODIFIED); policy.timeoutMillis = 123 * 1000L; outErr = new FileOutErr(fs.getPath("/out/stdout"), fs.getPath("/out/stderr")); assertThat(fs.getPath("/execroot").createDirectory()).isTrue(); SpawnResult result = runner.exec(SIMPLE_SPAWN, policy); verify(factory).create(any(SubprocessBuilder.class)); assertThat(result.status()).isEqualTo(SpawnResult.Status.SUCCESS); assertThat(result.exitCode()).isEqualTo(0); assertThat(result.setupSuccess()).isTrue(); assertThat(result.getExecutorHostName()).isEqualTo(NetUtil.getCachedShortHostName()); assertThat(captor.getValue().getArgv()) .containsExactlyElementsIn( ImmutableList.of( "/execroot/_bin/process-wrapper", "--timeout=123", "--kill_delay=456", "--stdout=/out/stdout", "--stderr=/out/stderr", "/bin/echo", "Hi!")); assertThat(captor.getValue().getEnv()).containsExactly("VARIABLE", "value"); assertThat(captor.getValue().getTimeoutMillis()).isEqualTo(0); assertThat(policy.lockOutputFilesCalled).isTrue(); assertThat(policy.reportedStatus) .containsExactly(ProgressStatus.SCHEDULING, ProgressStatus.EXECUTING).inOrder(); } @Test public void noProcessWrapper() throws Exception { if (OS.getCurrent() == OS.WINDOWS) { // TODO(#3536): Make this test work on Windows. // The Command API implicitly absolutizes the path, and we get weird paths on Windows: // T:\execroot\bin\echo return; } SubprocessFactory factory = mock(SubprocessFactory.class); ArgumentCaptor<SubprocessBuilder> captor = ArgumentCaptor.forClass(SubprocessBuilder.class); when(factory.create(captor.capture())).thenReturn(new FinishedSubprocess(0)); SubprocessBuilder.setSubprocessFactory(factory); LocalExecutionOptions options = Options.getDefaults(LocalExecutionOptions.class); options.localSigkillGraceSeconds = 456; LocalSpawnRunner runner = new LocalSpawnRunner( fs.getPath("/execroot"), options, resourceManager, NO_WRAPPER, OS.LINUX, "product-name", LocalEnvProvider.UNMODIFIED); policy.timeoutMillis = 123 * 1000L; outErr = new FileOutErr(fs.getPath("/out/stdout"), fs.getPath("/out/stderr")); assertThat(fs.getPath("/execroot").createDirectory()).isTrue(); SpawnResult result = runner.exec(SIMPLE_SPAWN, policy); verify(factory).create(any()); assertThat(result.status()).isEqualTo(SpawnResult.Status.SUCCESS); assertThat(result.exitCode()).isEqualTo(0); assertThat(result.setupSuccess()).isTrue(); assertThat(result.getExecutorHostName()).isEqualTo(NetUtil.getCachedShortHostName()); assertThat(captor.getValue().getArgv()) .containsExactlyElementsIn(ImmutableList.of("/bin/echo", "Hi!")); assertThat(captor.getValue().getEnv()).containsExactly("VARIABLE", "value"); // Without the process wrapper, we use the Command API to enforce the timeout. assertThat(captor.getValue().getTimeoutMillis()).isEqualTo(policy.timeoutMillis); assertThat(policy.lockOutputFilesCalled).isTrue(); } @Test public void nonZeroExit() throws Exception { if (OS.getCurrent() == OS.WINDOWS) { // TODO(#3536): Make this test work on Windows. // The Command API implicitly absolutizes the path, and we get weird paths on Windows: // T:\execroot\execroot\_bin\process-wrapper return; } SubprocessFactory factory = mock(SubprocessFactory.class); ArgumentCaptor<SubprocessBuilder> captor = ArgumentCaptor.forClass(SubprocessBuilder.class); when(factory.create(captor.capture())).thenReturn(new FinishedSubprocess(3)); SubprocessBuilder.setSubprocessFactory(factory); LocalExecutionOptions options = Options.getDefaults(LocalExecutionOptions.class); LocalSpawnRunner runner = new LocalSpawnRunner( fs.getPath("/execroot"), options, resourceManager, USE_WRAPPER, OS.LINUX, "product-name", LocalEnvProvider.UNMODIFIED); outErr = new FileOutErr(fs.getPath("/out/stdout"), fs.getPath("/out/stderr")); assertThat(fs.getPath("/execroot").createDirectory()).isTrue(); SpawnResult result = runner.exec(SIMPLE_SPAWN, policy); verify(factory).create(any(SubprocessBuilder.class)); assertThat(result.status()).isEqualTo(SpawnResult.Status.SUCCESS); assertThat(result.exitCode()).isEqualTo(3); assertThat(result.setupSuccess()).isTrue(); assertThat(result.getExecutorHostName()).isEqualTo(NetUtil.getCachedShortHostName()); assertThat(captor.getValue().getArgv()) .containsExactlyElementsIn( ImmutableList.of( // process-wrapper timeout grace_time stdout stderr "/execroot/_bin/process-wrapper", "--timeout=0", "--kill_delay=15", "--stdout=/out/stdout", "--stderr=/out/stderr", "/bin/echo", "Hi!")); assertThat(captor.getValue().getEnv()).containsExactly("VARIABLE", "value"); assertThat(policy.lockOutputFilesCalled).isTrue(); } @Test public void processStartupThrows() throws Exception { SubprocessFactory factory = mock(SubprocessFactory.class); ArgumentCaptor<SubprocessBuilder> captor = ArgumentCaptor.forClass(SubprocessBuilder.class); when(factory.create(captor.capture())).thenThrow(new IOException("I'm sorry, Dave")); SubprocessBuilder.setSubprocessFactory(factory); LocalExecutionOptions options = Options.getDefaults(LocalExecutionOptions.class); LocalSpawnRunner runner = new LocalSpawnRunner( fs.getPath("/execroot"), options, resourceManager, USE_WRAPPER, OS.LINUX, "product-name", LocalEnvProvider.UNMODIFIED); assertThat(fs.getPath("/out").createDirectory()).isTrue(); outErr = new FileOutErr(fs.getPath("/out/stdout"), fs.getPath("/out/stderr")); assertThat(fs.getPath("/execroot").createDirectory()).isTrue(); SpawnResult result = runner.exec(SIMPLE_SPAWN, policy); verify(factory).create(any(SubprocessBuilder.class)); assertThat(result.status()).isEqualTo(SpawnResult.Status.EXECUTION_FAILED); assertThat(result.exitCode()).isEqualTo(-1); assertThat(result.setupSuccess()).isFalse(); assertThat(result.getWallTimeMillis()).isEqualTo(0); assertThat(result.getExecutorHostName()).isEqualTo(NetUtil.getCachedShortHostName()); assertThat(FileSystemUtils.readContent(fs.getPath("/out/stderr"), StandardCharsets.UTF_8)) .isEqualTo("Action failed to execute: java.io.IOException: I'm sorry, Dave\n"); assertThat(policy.lockOutputFilesCalled).isTrue(); } @Test public void disallowLocalExecution() throws Exception { LocalExecutionOptions options = Options.getDefaults(LocalExecutionOptions.class); options.allowedLocalAction = Pattern.compile("none"); LocalSpawnRunner runner = new LocalSpawnRunner( fs.getPath("/execroot"), options, resourceManager, USE_WRAPPER, OS.LINUX, "product-name", LocalEnvProvider.UNMODIFIED); outErr = new FileOutErr(); assertThat(fs.getPath("/execroot").createDirectory()).isTrue(); SpawnResult reply = runner.exec(SIMPLE_SPAWN, policy); assertThat(reply.status()).isEqualTo(SpawnResult.Status.LOCAL_ACTION_NOT_ALLOWED); assertThat(reply.exitCode()).isEqualTo(-1); assertThat(reply.setupSuccess()).isFalse(); assertThat(reply.getWallTimeMillis()).isEqualTo(0); assertThat(reply.getExecutorHostName()).isEqualTo(NetUtil.getCachedShortHostName()); // TODO(ulfjack): Maybe we should only lock after checking? assertThat(policy.lockOutputFilesCalled).isTrue(); } @Test public void interruptedException() throws Exception { SubprocessFactory factory = mock(SubprocessFactory.class); ArgumentCaptor<SubprocessBuilder> captor = ArgumentCaptor.forClass(SubprocessBuilder.class); when(factory.create(captor.capture())).thenReturn(new FinishedSubprocess(3) { private boolean destroyed; @Override public boolean destroy() { destroyed = true; return true; } @Override public void waitFor() throws InterruptedException { if (!destroyed) { throw new InterruptedException(); } } }); SubprocessBuilder.setSubprocessFactory(factory); LocalExecutionOptions options = Options.getDefaults(LocalExecutionOptions.class); LocalSpawnRunner runner = new LocalSpawnRunner( fs.getPath("/execroot"), options, resourceManager, USE_WRAPPER, OS.LINUX, "product-name", LocalEnvProvider.UNMODIFIED); outErr = new FileOutErr(fs.getPath("/out/stdout"), fs.getPath("/out/stderr")); assertThat(fs.getPath("/execroot").createDirectory()).isTrue(); try { runner.exec(SIMPLE_SPAWN, policy); fail(); } catch (InterruptedException expected) { // Clear the interrupted status or subsequent tests in the same process will fail. Thread.interrupted(); } assertThat(policy.lockOutputFilesCalled).isTrue(); } @Test public void checkPrefetchCalled() throws Exception { SubprocessFactory factory = mock(SubprocessFactory.class); when(factory.create(any())).thenReturn(new FinishedSubprocess(0)); SubprocessBuilder.setSubprocessFactory(factory); LocalExecutionOptions options = Options.getDefaults(LocalExecutionOptions.class); LocalSpawnRunner runner = new LocalSpawnRunner( fs.getPath("/execroot"), options, resourceManager, USE_WRAPPER, OS.LINUX, "product-name", LocalEnvProvider.UNMODIFIED); policy.timeoutMillis = 123 * 1000L; outErr = new FileOutErr(fs.getPath("/out/stdout"), fs.getPath("/out/stderr")); assertThat(fs.getPath("/execroot").createDirectory()).isTrue(); runner.exec(SIMPLE_SPAWN, policy); assertThat(policy.prefetchCalled).isTrue(); } @Test public void checkNoPrefetchCalled() throws Exception { SubprocessFactory factory = mock(SubprocessFactory.class); when(factory.create(any())).thenReturn(new FinishedSubprocess(0)); SubprocessBuilder.setSubprocessFactory(factory); LocalExecutionOptions options = Options.getDefaults(LocalExecutionOptions.class); LocalSpawnRunner runner = new LocalSpawnRunner( fs.getPath("/execroot"), options, resourceManager, USE_WRAPPER, OS.LINUX, "product-name", LocalEnvProvider.UNMODIFIED); policy.timeoutMillis = 123 * 1000L; outErr = new FileOutErr(fs.getPath("/out/stdout"), fs.getPath("/out/stderr")); Spawn spawn = new SpawnBuilder("/bin/echo", "Hi!") .withExecutionInfo(ExecutionRequirements.DISABLE_LOCAL_PREFETCH, "").build(); assertThat(fs.getPath("/execroot").createDirectory()).isTrue(); runner.exec(spawn, policy); assertThat(policy.prefetchCalled).isFalse(); } @Test public void checkLocalEnvProviderCalled() throws Exception { SubprocessFactory factory = mock(SubprocessFactory.class); when(factory.create(any())).thenReturn(new FinishedSubprocess(0)); SubprocessBuilder.setSubprocessFactory(factory); LocalEnvProvider localEnvProvider = mock(LocalEnvProvider.class); LocalExecutionOptions options = Options.getDefaults(LocalExecutionOptions.class); LocalSpawnRunner runner = new LocalSpawnRunner( fs.getPath("/execroot"), options, resourceManager, USE_WRAPPER, OS.LINUX, "product-name", localEnvProvider); policy.timeoutMillis = 123 * 1000L; outErr = new FileOutErr(fs.getPath("/out/stdout"), fs.getPath("/out/stderr")); assertThat(fs.getPath("/execroot").createDirectory()).isTrue(); runner.exec(SIMPLE_SPAWN, policy); verify(localEnvProvider) .rewriteLocalEnv( any(), eq(fs.getPath("/execroot")), eq(fs.getPath("/execroot/tmp1")), eq("product-name")); } @Test public void useCorrectExtensionOnWindows() throws Exception { if (OS.getCurrent() == OS.WINDOWS) { // TODO(#3536): Make this test work on Windows. // The Command API implicitly absolutizes the path, and we get weird paths on Windows: // T:\execroot\execroot\_bin\process-wrapper.exe return; } SubprocessFactory factory = mock(SubprocessFactory.class); ArgumentCaptor<SubprocessBuilder> captor = ArgumentCaptor.forClass(SubprocessBuilder.class); when(factory.create(captor.capture())).thenReturn(new FinishedSubprocess(0)); SubprocessBuilder.setSubprocessFactory(factory); LocalExecutionOptions options = Options.getDefaults(LocalExecutionOptions.class); options.localSigkillGraceSeconds = 654; LocalSpawnRunner runner = new LocalSpawnRunner( fs.getPath("/execroot"), options, resourceManager, USE_WRAPPER, OS.WINDOWS, "product-name", LocalEnvProvider.UNMODIFIED); policy.timeoutMillis = 321 * 1000L; outErr = new FileOutErr(fs.getPath("/out/stdout"), fs.getPath("/out/stderr")); assertThat(fs.getPath("/execroot").createDirectory()).isTrue(); SpawnResult result = runner.exec(SIMPLE_SPAWN, policy); verify(factory).create(any(SubprocessBuilder.class)); assertThat(result.status()).isEqualTo(SpawnResult.Status.SUCCESS); assertThat(captor.getValue().getArgv()) .containsExactlyElementsIn( ImmutableList.of( // process-wrapper timeout grace_time stdout stderr "/execroot/_bin/process-wrapper.exe", "--timeout=321", "--kill_delay=654", "--stdout=/out/stdout", "--stderr=/out/stderr", "/bin/echo", "Hi!")); } }
/* * Copyright 2005-2018 Dozer Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.github.dozermapper.core.classmap; import java.beans.PropertyDescriptor; import java.lang.annotation.Annotation; import java.lang.reflect.Field; import java.lang.reflect.Method; import java.util.ArrayList; import java.util.List; import java.util.Map.Entry; import java.util.Set; import com.github.dozermapper.core.Mapping; import com.github.dozermapper.core.OptionValue; import com.github.dozermapper.core.classmap.generator.BeanMappingGenerator; import com.github.dozermapper.core.classmap.generator.ClassLevelFieldMappingGenerator; import com.github.dozermapper.core.classmap.generator.GeneratorUtils; import com.github.dozermapper.core.classmap.generator.MappingType; import com.github.dozermapper.core.config.BeanContainer; import com.github.dozermapper.core.factory.DestBeanCreator; import com.github.dozermapper.core.fieldmap.DozerField; import com.github.dozermapper.core.fieldmap.FieldMap; import com.github.dozermapper.core.fieldmap.GenericFieldMap; import com.github.dozermapper.core.fieldmap.MapFieldMap; import com.github.dozermapper.core.propertydescriptor.PropertyDescriptorFactory; import com.github.dozermapper.core.util.DozerConstants; import com.github.dozermapper.core.util.MappingOptions; import com.github.dozermapper.core.util.MappingUtils; import com.github.dozermapper.core.util.ReflectionUtils; import org.apache.commons.lang3.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Internal class for adding implicit field mappings to a ClassMap. Also, builds implicit ClassMap for class mappings * that don't have an explicit custom xml mapping. Only intended for internal use. */ public final class ClassMapBuilder { private static final Logger log = LoggerFactory.getLogger(ClassMapBuilder.class); private final List<ClassMappingGenerator> buildTimeGenerators = new ArrayList<>(); private final List<ClassMappingGenerator> runTimeGenerators = new ArrayList<>(); private final BeanContainer beanContainer; public ClassMapBuilder(BeanContainer beanContainer, DestBeanCreator destBeanCreator, BeanMappingGenerator beanMappingGenerator, PropertyDescriptorFactory propertyDescriptorFactory) { this.beanContainer = beanContainer; buildTimeGenerators.add(new ClassLevelFieldMappingGenerator(beanContainer, destBeanCreator, propertyDescriptorFactory)); buildTimeGenerators.add(new AnnotationPropertiesGenerator(beanContainer, destBeanCreator, propertyDescriptorFactory)); buildTimeGenerators.add(new AnnotationFieldsGenerator(beanContainer, destBeanCreator, propertyDescriptorFactory)); buildTimeGenerators.add(new AnnotationClassesGenerator()); buildTimeGenerators.add(new MapMappingGenerator(beanContainer, destBeanCreator, propertyDescriptorFactory)); buildTimeGenerators.add(beanMappingGenerator); buildTimeGenerators.add(new CollectionMappingGenerator(beanContainer, destBeanCreator, propertyDescriptorFactory)); runTimeGenerators.add(new ClassLevelFieldMappingGenerator(beanContainer, destBeanCreator, propertyDescriptorFactory)); runTimeGenerators.add(new AnnotationPropertiesGenerator(beanContainer, destBeanCreator, propertyDescriptorFactory)); runTimeGenerators.add(new AnnotationFieldsGenerator(beanContainer, destBeanCreator, propertyDescriptorFactory)); runTimeGenerators.add(new AnnotationClassesGenerator()); runTimeGenerators.add(new MapMappingGenerator(beanContainer, destBeanCreator, propertyDescriptorFactory)); runTimeGenerators.add(beanMappingGenerator); } // TODO Cover with test cases // TODO Remove duplication // TODO Use Dozer Builder if possible ? // TODO Add Exclude Annotation process by separate generator // TODO Add Pluggable Buidlers // TODO Add field matcher based builder // TODO Add annotation based builder /** * Builds new default mapping on-the-fly for previously unknown mapped class pairs. * * @param globalConfiguration configuration of Dozer * @param srcClass type to convert from * @param destClass type to convert to * @return information about the classes being mapped */ public ClassMap createDefaultClassMap(Configuration globalConfiguration, Class<?> srcClass, Class<?> destClass) { return createDefaultClassMap(globalConfiguration, srcClass, destClass, true); } public ClassMap createDefaultClassMap(Configuration globalConfiguration, Class<?> srcClass, Class<?> destClass, Boolean shouldGenerateMapping) { DozerClass srcDozerClass = new DozerClass(srcClass.getName(), srcClass, globalConfiguration.getBeanFactory(), null, null, null, null, globalConfiguration.getMapNull(), globalConfiguration.getMapEmptyString(), false, null, beanContainer); DozerClass destDozerClass = new DozerClass(destClass.getName(), destClass, globalConfiguration.getBeanFactory(), null, null, null, null, globalConfiguration.getMapNull(), globalConfiguration.getMapEmptyString(), false, null, beanContainer); ClassMap classMap = new ClassMap(globalConfiguration); classMap.setSrcClass(srcDozerClass); classMap.setDestClass(destDozerClass); if (shouldGenerateMapping) { generateMapping(classMap, globalConfiguration, buildTimeGenerators); } return classMap; } /** * Prepares default mappings based on provided mapping definition * * @param classMappings information about the classes being mapped * @param globalConfiguration configuration of Dozer */ public void addDefaultFieldMappings(ClassMappings classMappings, Configuration globalConfiguration) { Set<Entry<String, ClassMap>> entries = classMappings.getAll().entrySet(); for (Entry<String, ClassMap> entry : entries) { ClassMap classMap = entry.getValue(); generateMapping(classMap, globalConfiguration, runTimeGenerators); } } private void generateMapping(ClassMap classMap, Configuration configuration, List<ClassMappingGenerator> mappingGenerators) { if (!classMap.isWildcard()) { return; } for (ClassMappingGenerator generator : mappingGenerators) { if (generator.accepts(classMap)) { if (generator.apply(classMap, configuration)) { return; } } } } public interface ClassMappingGenerator { boolean accepts(ClassMap classMap); /** * true if we should stop after applied * * @param classMap information about the classes being mapped * @param configuration configuration of the mapping * @return true if we should stop after applied */ boolean apply(ClassMap classMap, Configuration configuration); } public static class MapMappingGenerator implements ClassMappingGenerator { private final BeanContainer beanContainer; private final DestBeanCreator destBeanCreator; private final PropertyDescriptorFactory propertyDescriptorFactory; public MapMappingGenerator(BeanContainer beanContainer, DestBeanCreator destBeanCreator, PropertyDescriptorFactory propertyDescriptorFactory) { this.beanContainer = beanContainer; this.destBeanCreator = destBeanCreator; this.propertyDescriptorFactory = propertyDescriptorFactory; } public boolean accepts(ClassMap classMap) { Class<?> srcClass = classMap.getSrcClassToMap(); Class<?> destClass = classMap.getDestClassToMap(); return MappingUtils.isSupportedMap(srcClass) || classMap.getSrcClassMapGetMethod() != null || MappingUtils.isSupportedMap(destClass) || classMap.getDestClassMapGetMethod() != null; } public boolean apply(ClassMap classMap, Configuration configuration) { Class<?> srcClass = classMap.getSrcClassToMap(); Class<?> destClass = classMap.getDestClassToMap(); PropertyDescriptor[] properties; boolean destinationIsMap = false; if (MappingUtils.isSupportedMap(srcClass) || classMap.getSrcClassMapGetMethod() != null) { properties = ReflectionUtils.getPropertyDescriptors(destClass); } else { properties = ReflectionUtils.getPropertyDescriptors(srcClass); destinationIsMap = true; } for (PropertyDescriptor property : properties) { String fieldName = property.getName(); if (GeneratorUtils.shouldIgnoreField(fieldName, srcClass, destClass, beanContainer)) { continue; } // already mapped if (destinationIsMap && classMap.getFieldMapUsingSrc(fieldName) != null) { continue; } // already mapped if (!destinationIsMap && classMap.getFieldMapUsingDest(fieldName, true) != null) { continue; } FieldMap fieldMap = new MapFieldMap(classMap, beanContainer, destBeanCreator, propertyDescriptorFactory); DozerField srcField = new DozerField(MappingUtils.isSupportedMap(srcClass) ? DozerConstants.SELF_KEYWORD : fieldName, null); srcField.setKey(fieldName); if (StringUtils.isNotEmpty(classMap.getSrcClassMapGetMethod()) || StringUtils.isNotEmpty(classMap.getSrcClassMapSetMethod())) { srcField.setMapGetMethod(classMap.getSrcClassMapGetMethod()); srcField.setMapSetMethod(classMap.getSrcClassMapSetMethod()); srcField.setName(DozerConstants.SELF_KEYWORD); } DozerField destField = new DozerField(MappingUtils.isSupportedMap(destClass) ? DozerConstants.SELF_KEYWORD : fieldName, null); srcField.setKey(fieldName); if (StringUtils.isNotEmpty(classMap.getDestClassMapGetMethod()) || StringUtils.isNotEmpty(classMap.getDestClassMapSetMethod())) { destField.setMapGetMethod(classMap.getDestClassMapGetMethod()); destField.setMapSetMethod(classMap.getDestClassMapSetMethod()); destField.setName(DozerConstants.SELF_KEYWORD); } fieldMap.setSrcField(srcField); fieldMap.setDestField(destField); classMap.addFieldMapping(fieldMap); } return true; } } public static class CollectionMappingGenerator implements ClassMappingGenerator { private final BeanContainer beanContainer; private final DestBeanCreator destBeanCreator; private final PropertyDescriptorFactory propertyDescriptorFactory; public CollectionMappingGenerator(BeanContainer beanContainer, DestBeanCreator destBeanCreator, PropertyDescriptorFactory propertyDescriptorFactory) { this.beanContainer = beanContainer; this.destBeanCreator = destBeanCreator; this.propertyDescriptorFactory = propertyDescriptorFactory; } public boolean accepts(ClassMap classMap) { Class<?> srcClass = classMap.getSrcClassToMap(); Class<?> destClass = classMap.getDestClassToMap(); return MappingUtils.isSupportedCollection(srcClass) && MappingUtils.isSupportedCollection(destClass); } public boolean apply(ClassMap classMap, Configuration configuration) { FieldMap fieldMap = new GenericFieldMap(classMap, beanContainer, destBeanCreator, propertyDescriptorFactory); DozerField selfReference = new DozerField(DozerConstants.SELF_KEYWORD, null); fieldMap.setSrcField(selfReference); fieldMap.setDestField(selfReference); classMap.addFieldMapping(fieldMap); return true; } } public static class AnnotationClassesGenerator implements ClassMappingGenerator { public boolean accepts(ClassMap classMap) { return true; } public boolean apply(ClassMap classMap, Configuration configuration) { Class<?> srcType = classMap.getSrcClassToMap(); Class<?> dstType = classMap.getDestClassToMap(); applyClassMappingOptions(classMap, reconcileOptions(srcType, dstType)); return false; } private static MappingOptions reconcileOptions(final Class<?> srcClass, final Class<?> dstClass) { final MappingOptions srcOpts = srcClass.getAnnotation(MappingOptions.class); final MappingOptions dstOpts = dstClass.getAnnotation(MappingOptions.class); if (srcOpts == null) { return dstOpts; } if (dstOpts == null) { return srcOpts; } return new MappingOptions() { private OptionValue reconcile(String fieldName, OptionValue srcOption, OptionValue dstOption) { if (srcOption == dstOption) { return srcOption; } if (srcOption == OptionValue.INHERITED) { return dstOption; } if (dstOption == OptionValue.INHERITED) { return srcOption; } log.info("Conflicting class annotations for " + fieldName + " on src class " + srcClass.getCanonicalName() + " and dst class " + dstClass.getCanonicalName()); return dstOption; } private String reconcile(String fieldName, String srcOption, String dstOption) { if (srcOption.equals(dstOption)) { return srcOption; } if (srcOption.isEmpty()) { return dstOption; } if (dstOption.isEmpty()) { return srcOption; } log.info("Conflicting class annotations for " + fieldName + " on src class " + srcClass.getCanonicalName() + " and dst class " + dstClass.getCanonicalName()); return dstOption; } @Override public OptionValue wildCard() { return reconcile("wildCard", srcOpts.wildCard(), dstOpts.wildCard()); } @Override public OptionValue wildCardCaseInsensitive() { return reconcile("wildCardCaseInsensitive", srcOpts.wildCardCaseInsensitive(), dstOpts.wildCardCaseInsensitive()); } @Override public OptionValue stopOnErrors() { return reconcile("stopOnErrors", srcOpts.stopOnErrors(), dstOpts.stopOnErrors()); } @Override public OptionValue mapNull() { return reconcile("mapNull", srcOpts.mapNull(), dstOpts.mapNull()); } @Override public OptionValue mapEmptyString() { return reconcile("mapEmptyString", srcOpts.mapEmptyString(), dstOpts.mapEmptyString()); } @Override public String dateFormat() { return reconcile("dateFormat", srcOpts.dateFormat(), dstOpts.dateFormat()); } @Override public Class<? extends Annotation> annotationType() { return MappingOptions.class; } }; } private static void applyClassMappingOptions(ClassMap classMap, MappingOptions mappingOptions) { if (mappingOptions != null) { classMap.setWildcard(mappingOptions.wildCard().toBoolean()); classMap.setWildcardCaseInsensitive(mappingOptions.wildCardCaseInsensitive().toBoolean()); classMap.setStopOnErrors(mappingOptions.stopOnErrors().toBoolean()); Boolean mapNull = mappingOptions.mapNull().toBoolean(); classMap.getDestClass().setMapNull(mapNull); classMap.getSrcClass().setMapNull(mapNull); Boolean mapEmptyString = mappingOptions.mapEmptyString().toBoolean(); classMap.getDestClass().setMapEmptyString(mapEmptyString); classMap.getSrcClass().setMapEmptyString(mapEmptyString); String dateFormat = mappingOptions.dateFormat(); if (!dateFormat.isEmpty()) { classMap.setDateFormat(dateFormat); } } } } public static class AnnotationPropertiesGenerator implements ClassMappingGenerator { private final BeanContainer beanContainer; private final DestBeanCreator destBeanCreator; private final PropertyDescriptorFactory propertyDescriptorFactory; public AnnotationPropertiesGenerator(BeanContainer beanContainer, DestBeanCreator destBeanCreator, PropertyDescriptorFactory propertyDescriptorFactory) { this.beanContainer = beanContainer; this.destBeanCreator = destBeanCreator; this.propertyDescriptorFactory = propertyDescriptorFactory; } public boolean accepts(ClassMap classMap) { return true; } public boolean apply(ClassMap classMap, Configuration configuration) { Class<?> srcType = classMap.getSrcClassToMap(); PropertyDescriptor[] srcProperties = ReflectionUtils.getPropertyDescriptors(srcType); for (PropertyDescriptor property : srcProperties) { Method readMethod = property.getReadMethod(); if (readMethod != null) { Mapping mapping = readMethod.getAnnotation(Mapping.class); if (mapping != null) { String propertyName = property.getName(); String pairName = mapping.value().trim(); if (requireMapping(mapping, classMap.getDestClassToMap(), propertyName, pairName) && classMap.getFieldMapUsingSrc(propertyName) == null) { GeneratorUtils.addGenericMapping(MappingType.GETTER_TO_SETTER, classMap, configuration, propertyName, pairName.isEmpty() ? propertyName : pairName, beanContainer, destBeanCreator, propertyDescriptorFactory); } } } } Class<?> destType = classMap.getDestClassToMap(); PropertyDescriptor[] destProperties = ReflectionUtils.getPropertyDescriptors(destType); for (PropertyDescriptor property : destProperties) { Method readMethod = property.getReadMethod(); if (readMethod != null) { Mapping mapping = readMethod.getAnnotation(Mapping.class); if (mapping != null) { String propertyName = property.getName(); String pairName = mapping.value().trim(); if (requireMapping(mapping, classMap.getSrcClassToMap(), propertyName, pairName)) { GeneratorUtils.addGenericMapping(MappingType.GETTER_TO_SETTER, classMap, configuration, pairName.isEmpty() ? propertyName : pairName, propertyName, beanContainer, destBeanCreator, propertyDescriptorFactory); } } } } return false; } } public static class AnnotationFieldsGenerator implements ClassMappingGenerator { private final BeanContainer beanContainer; private final DestBeanCreator destBeanCreator; private final PropertyDescriptorFactory propertyDescriptorFactory; public AnnotationFieldsGenerator(BeanContainer beanContainer, DestBeanCreator destBeanCreator, PropertyDescriptorFactory propertyDescriptorFactory) { this.beanContainer = beanContainer; this.destBeanCreator = destBeanCreator; this.propertyDescriptorFactory = propertyDescriptorFactory; } public boolean accepts(ClassMap classMap) { return true; } public boolean apply(ClassMap classMap, Configuration configuration) { Class<?> srcType = classMap.getSrcClassToMap(); do { for (Field field : srcType.getDeclaredFields()) { Mapping mapping = field.getAnnotation(Mapping.class); String fieldName = field.getName(); if (mapping != null) { String pairName = mapping.value().trim(); if (requireMapping(mapping, classMap.getDestClassToMap(), fieldName, pairName)) { GeneratorUtils.addGenericMapping(MappingType.FIELD_TO_FIELD, classMap, configuration, fieldName, pairName.isEmpty() ? fieldName : pairName, beanContainer, destBeanCreator, propertyDescriptorFactory); } } } srcType = srcType.getSuperclass(); } while (srcType != null); Class<?> destType = classMap.getDestClassToMap(); do { for (Field field : destType.getDeclaredFields()) { Mapping mapping = field.getAnnotation(Mapping.class); String fieldName = field.getName(); if (mapping != null) { String pairName = mapping.value().trim(); if (requireMapping(mapping, classMap.getSrcClassToMap(), fieldName, pairName)) { GeneratorUtils.addGenericMapping(MappingType.FIELD_TO_FIELD, classMap, configuration, pairName.isEmpty() ? fieldName : pairName, fieldName, beanContainer, destBeanCreator, propertyDescriptorFactory); } } } destType = destType.getSuperclass(); } while (destType != null); return false; } } private static boolean requireMapping(Mapping mapping, Class<?> clazz, String fieldName, String pairName) { try { return !mapping.optional() || (mapping.optional() && clazz.getDeclaredField(pairName.isEmpty() ? fieldName : pairName) != null); } catch (NoSuchFieldException e) { return false; } } }
/** ========================================================================= * * Copyright (C) 2009, 2014 IBM Corporation ( http://www.ibm.com/ ) * * All rights reserved. * * * * @author David King <dlking@us.ibm.com> * * @author Stephan H. Wissel <st.wissel@sg.ibm.com> * * * * @version 1.0 * * ========================================================================== * * * * Licensed under the Apache License, Version 2.0 (the "License"). You may * * not use this file except in compliance with the License. You may obtain a * * copy of the License at <http://www.apache.org/licenses/LICENSE-2.0>. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * License for the specific language governing permissions and limitations * * under the License. * * * * ========================================================================== * * SmartFile - SwiftFile for the Rest Of Us. * <p> * SmartFile emulates the vector space model analysis performed by SwiftFile to * populate three SwiftFile folder name fields in the Lotus Notes e-mail Message * form with recommendations for filing e-mail messages. * <p> * The vector space model used here is based on the example provided by Dr E Garcia: * http://www.miislita.com/term-vector/term-vector-3.html. * <p> * Additional inspiration came from papers written by the SwiftFile authors, Richard * Segal and Jeffrey Kephart: * <br />http://www.research.ibm.com/swiftfile/dynlearn.pdf * <br />http://www.research.ibm.com/swiftfile/mailcat.pdf * ========================================================================== */ package com.ibm.notes.smartfile; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.io.OutputStream; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Scanner; import java.util.Vector; import lotus.domino.Database; import lotus.domino.Document; import lotus.domino.NotesException; import lotus.domino.Session; import lotus.domino.View; import lotus.domino.ViewEntry; import lotus.domino.ViewNavigator; import org.eclipse.core.runtime.IProgressMonitor; import org.eclipse.core.runtime.IStatus; import org.eclipse.core.runtime.Status; /** * The SmartFile Engine that learns the words and folder distribution * * @author stw * */ public class Engine { // The number of folders in the database including their UNID // Key = FolderName, Value = UNID private HashMap<String, String> folderList = new HashMap<String, String>(); // Key = UNID, Value = FolderName private HashMap<String, String> folderRef = new HashMap<String, String>(); // A two dimensional HashMap where word -> (folder name -> tf) // "tf" = term frequency = word count private HashMap<String, HashMap<String, Double>> wordCounts = new HashMap<String, HashMap<String, Double>>(); // A one dimensional HashMap where word -> idf // "idf" = inverse document frequency = log(total_number_of folders / // number_of_folders_that_contain_this_word) private HashMap<String, Double> wordIDFs = new HashMap<String, Double>(); // A two dimensional HashMap where word -> (folder name -> tf * idf) private HashMap<String, HashMap<String, Double>> wordTFIDFs = new HashMap<String, HashMap<String, Double>>(); // A one dimensional HashMap where folder name -> vector length // "vector length" = sqrt( tf*idf[1]^2 + tf*idf[2]^2 + ... + tf*idf[n]^2 ) // for each folder ("^2" means "squared") private HashMap<String, Double> wordVectorLengths = new HashMap<String, Double>(); /** * Track if the model database has been loaded */ private boolean modelLoaded = false; /** * The settings from the preferences */ private Configuration config = null; /** * The Engine can only be initialized when we have a configuration * * @param config */ public Engine(Configuration config) { this.config = config; } /** * Save it into the Eclipse directory? */ public void save() { OutputStream out; try { out = new FileOutputStream( this.config.getSmartfilePersistenceFile()); // Persist the vector model, and other values // goes into the workspace directory for the file ObjectOutputStream oos = new ObjectOutputStream(out); oos.writeObject(folderList); oos.writeObject(wordCounts); oos.writeObject(wordIDFs); oos.writeObject(wordTFIDFs); oos.writeObject(wordVectorLengths); oos.close(); out.close(); } catch (IOException e) { // We don't consider the model to be loaded if something fails here Utils.logError(e.getMessage(), e); // We don't consider the model to be loaded if something fails here this.modelLoaded = false; } return; } /** * The scheduled processing routine is triggered by startup, replication (or * any other event when we suspect the folders need update) It kicks of all * work necessary by the engine * * @param s * @param monitor */ public IStatus scheduledProcessing(Session s, IProgressMonitor monitor) { IStatus result = null; Database mail = null; View v = null; ViewEntry ve = null; ViewEntry ven = null; Document doc = null; ViewNavigator n = null; Configuration config = Activator.getDefault().getConfig(); // point to the current mail file String mailFileName = config.getMailFileName(s); if (mailFileName == null || mailFileName.equals("") || !config.isEnabled()) { Utils.logError("Mailfile came back empty!"); result = Status.CANCEL_STATUS; return result; // Early exit } Engine engine = Activator.getDefault().getEngine(); try { mail = s.getDatabase("", mailFileName, true); // Make sure we have everything in place engine.checkDatabaseConditions(mail); // Load the existing persistence file try { engine.load(); } catch (Exception e) { Utils.logError(e); // Loading didn't work, so we need to start from scratch // true means: save the model engine.rebuildModel(mail, monitor, true); } // Now process the folders and work on documents that // have been Changed to update the model List<String> foldersToProcess = new ArrayList<String>(); foldersToProcess.add("($Inbox)"); foldersToProcess.add("($Drafts)"); this.setSFLabels(mail, foldersToProcess); // Now check all documents that have been filed elsewhere // and not been captured in the model yet // and "learn" from those files boolean learned = false; v = mail.getView(Configuration.SMARTFILE_VIEW); n = v.createViewNav(); ve = n.getFirstDocument(); while (ve != null) { ven = n.getNextDocument(); doc = ve.getDocument(); learned = this.checkOneDocumentForChanges(doc, learned); doc.recycle(); ve.recycle(); ve = ven; } boolean foldersChanged = false; if (!learned) { // We check folders only if we don't have to rebuild yet // Check the folder structure. If a folder has been deleted, // delete it from wordCounts HashMap<String, String> newFolderList = new HashMap<String, String>(); HashMap<String, String> newFolderRef = new HashMap<String, String>(); List<View> allViews = this.getFoldersFromDB(mail, newFolderList, newFolderRef); // We don't need the views we are only interested in the // newFolderList for (View v2 : allViews) { Utils.shred(v2); } for (Map.Entry<String, String> oldFolder : this.folderList .entrySet()) { if (newFolderList.containsKey(oldFolder.getKey())) { newFolderList.remove(oldFolder.getKey()); } else { foldersChanged = true; break; } } if (newFolderList.size() > 0) { foldersChanged = true; } } if (learned || foldersChanged) { // Build that sums again, rumbles through a lot // of linked lists this.refeshWords(monitor); this.save(); } // If we got here everything worked result = Status.OK_STATUS; } catch (NotesException ne) { Utils.logError(ne); result = Status.CANCEL_STATUS; } finally { // Cleanup Utils.shred(v, mail, ve, ven, doc, n); } return result; } /** * Adds the values of the second hash map to the first one if keys exist, * the value gets incremented, if keys not exist they get added * * @param result * the merged/added hashmap * @param newVals * the hashmap with values to be added */ private void addHashMapValues(HashMap<String, Double> result, HashMap<String, Double> newVals) { for (Map.Entry<String, Double> newEntry : newVals.entrySet()) { String curKey = newEntry.getKey(); Double curVal = newEntry.getValue(); if (result.containsKey(curKey)) { Double oldVal = result.get(curKey); Double newVal = new Double(curVal.doubleValue() + oldVal.doubleValue()); result.put(curKey, newVal); } else { result.put(curKey, curVal); } } } /************************************************************************************************* * c a l c u l a t e I D F s **************************************************************************************************/ // Do the term weighting calculations for each word and return them as a one // dimensional HashMap private HashMap<String, Double> calculateIDFs( HashMap<String, HashMap<String, Double>> wordCounts, IProgressMonitor monitor) { monitor.subTask("calcuate IDFs"); HashMap<String, Double> idfmap = new HashMap<String, Double>(); for (Map.Entry<String, HashMap<String, Double>> me : wordCounts .entrySet()) { String curKey = me.getKey(); HashMap<String, Double> tmpmap = me.getValue(); // Get the number of folders that contain this word int df = tmpmap.size(); // TODO: how does the folderlist get populated? // Calculate the IDF (inverse document frequency) for this word double idf = Math .log((double) this.folderList.size() / (double) df); // Store it in the HashMap idfmap.put(curKey, new Double(idf)); } // TODO: better process monitor monitor.internalWorked(10); return idfmap; } /************************************************************************************************* * c a l c u l a t e T F I D F s **************************************************************************************************/ // For each word, in each folder, calculate tf*idf and return a two // dimensional HashMap // where word -> (folder -> tf*idf) private HashMap<String, HashMap<String, Double>> calculateTFIDFs( HashMap<String, HashMap<String, Double>> wordCounts, HashMap<String, Double> wordIDFs, IProgressMonitor monitor) { HashMap<String, HashMap<String, Double>> tfidfmap = new HashMap<String, HashMap<String, Double>>(); monitor.subTask("calculate TFIDs"); for (Map.Entry<String, HashMap<String, Double>> me : wordCounts .entrySet()) { // Map Entries String curKey = me.getKey(); HashMap<String, Double> tmpmap = me.getValue(); // X-REF Double idf = wordIDFs.get(curKey); HashMap<String, Double> newmap = new HashMap<String, Double>(); for (Map.Entry<String, Double> me2 : tmpmap.entrySet()) { String key2 = me2.getKey(); Double count = me2.getValue(); double tfidf = count.doubleValue() * idf.doubleValue(); newmap.put(key2, new Double(tfidf)); } tfidfmap.put(curKey, newmap); } // TODO: better process monitor monitor.internalWorked(10); return tfidfmap; } /************************************************************************************************* * c a l c u l a t e V e c t o r L e n g t h s **************************************************************************************************/ // Calculate the vector length for each folder private HashMap<String, Double> calculateVectorLengths( HashMap<String, HashMap<String, Double>> wordTFIDFs, IProgressMonitor monitor) { monitor.subTask("calculate Vector length"); HashMap<String, Double> vectorLengths = new HashMap<String, Double>(); for (Map.Entry<String, HashMap<String, Double>> me : wordTFIDFs .entrySet()) { HashMap<String, Double> tmpmap = me.getValue(); for (Map.Entry<String, Double> me2 : tmpmap.entrySet()) { String key2 = me2.getKey(); double tfidf = me2.getValue().doubleValue(); if (vectorLengths.containsKey(key2)) { double accum = vectorLengths.get(key2).doubleValue(); accum += (tfidf * tfidf); vectorLengths.put(key2, new Double(accum)); } else { vectorLengths.put(key2, new Double(tfidf * tfidf)); } } } // Calculate the square root of the sum of the squares for each folder, // this is the vector length for the folder. HashMap<String, Double> newmap = new HashMap<String, Double>(); for (Map.Entry<String, Double> me : vectorLengths.entrySet()) { String curKey = me.getKey(); double curVal = me.getValue().doubleValue(); newmap.put(curKey, new Double(Math.sqrt(curVal))); } // TODO: better process monitor monitor.internalWorked(10); return newmap; } /** * Checks the preconditions for a given database To work and for performance * we watch out for - Folder Reference enabled - View that compares the * FolderRef with the SmartFolder entries This saves the need to scan all of * the database and is more efficient than going through $All * * @param db */ private void checkDatabaseConditions(Database db) { checkforFolderReference(db); checkforSmartFileView(db); } private void checkforFolderReference(Database db) { // If FolderReferencesEnabled is not turned on, do that try { if (!db.getFolderReferencesEnabled()) { db.setFolderReferencesEnabled(true); Utils.logInfo("\tTurned on FolderReferences in the database"); } } catch (NotesException e) { Utils.logError(e); } } private void checkforSmartFileView(Database db) { View v = null; try { v = db.getView(Configuration.SMARTFILE_VIEW); if (v != null) { String selectionFormula = v.getSelectionFormula(); if (!selectionFormula.equals(this .getSmartFileSelectionFormula())) { v.setSelectionFormula(this.getSmartFileSelectionFormula()); } } else { // We need to create a view from DXL since we need one that is // FLAT // and does not show categories Utils.createFlatViewFromDXL(db, Configuration.SMARTFILE_VIEW, this.getSmartFileSelectionFormula()); } } catch (NotesException e) { Utils.logError(e); } finally { Utils.shred(v); } } private boolean checkOneDocumentForChanges(Document doc, boolean oldStatus) { boolean result = false; // Learn from unprocessed files located in other folders // (probably recently moved into those folders) try { // Leave deleted documents alone if (doc.isDeleted()) { return oldStatus; } // Get list of folders this document belongs to @SuppressWarnings("rawtypes") Vector refs = doc.getFolderReferences(); // If it's a document we've seen before if (doc.hasItem(Configuration.SMARTFILE_ITEMNAME)) { // Get the list of folders we think it's // supposed to be in @SuppressWarnings("rawtypes") Vector myFlag = doc .getItemValue(Configuration.SMARTFILE_ITEMNAME); // If it still has its SFLabels field set or // it it isn't where we think it is if ((doc.getItemValueString(Configuration.SFLABELS_FIELD) .length() != 0) || (!myFlag.equals(refs))) { // Learn from this document result = this.learn(doc); if (result) { Utils.logInfo("\tLearning from document: \"" + doc.getUniversalID() + " - " + doc.getItemValueString("Subject") + "\""); // Clear the SFLabel_ fields to flag // the fact that we've processed // this file doc.replaceItemValue(Configuration.SFLABELS_FIELD, ""); // TODO: externalise string doc.replaceItemValue("SFLabel1", ""); doc.replaceItemValue("SFLabel2", ""); doc.replaceItemValue("SFLabel3", ""); // Set SmartFile field to the // current folder(s) so that we can // recognize changes later. doc.replaceItemValue("SmartFile", refs); doc.save(); } } } else { // Record where the document is // currently located doc.replaceItemValue(Configuration.SMARTFILE_ITEMNAME, refs); doc.save(); } } catch (NotesException e) { // TODO: more detailed error handling - see original code Utils.logError(e); } // Trap door boolean. if Oldstatus was true, it must be true in any case if (oldStatus) { return true; } return result; } /************************************************************************************************* * c o u n t W o r d s **************************************************************************************************/ // Count all the words in each of the folders we care about and return those // the folders are provided as a List, so selection of folders needs to // happen outside this function! // counts in a two dimensional HashMap // where word -> (folder name -> tf) // tf = term frequesncy = word count private HashMap<String, HashMap<String, Double>> countWordsInDatabase( List<View> views, Database db, IProgressMonitor monitor) { // Holds the result for the word count HashMap<String, HashMap<String, Double>> totalCounts = new HashMap<String, HashMap<String, Double>>(); // Holds all the document that don't have a folder reference yet (should // be empty after the // first run and the folderreference activation // UNID -> Foldernames HashMap<String, List<String>> docsWithoutFolderRef = new HashMap<String, List<String>>(); Document doc = null; Document nextDoc = null; for (View v : views) { try { monitor.subTask("Processing " + v.getName()); HashMap<String, Double> folderCount = new HashMap<String, Double>(); // We need the docs, so we can skip the viewentrycollection doc = v.getFirstDocument(); while (doc != null) { nextDoc = v.getNextDocument(doc); HashMap<String, Double> docCount = this .extractWordsFromDocument(doc); this.addHashMapValues(folderCount, docCount); // We need to process this document later on // We only can do that after all the folders have // been processed since we would not catch if it was // in a second folder if (!doc.hasItem(Configuration.FOLDER_REF)) { List<String> docFolders; String unid = doc.getUniversalID(); if (!docsWithoutFolderRef.containsKey(unid)) { docFolders = new ArrayList<String>(); } else { docFolders = docsWithoutFolderRef.get(unid); } docFolders.add(v.getName()); docsWithoutFolderRef.put(unid, docFolders); } Utils.shred(doc); doc = nextDoc; } for (Map.Entry<String, Double> folderEntry : folderCount .entrySet()) { String curKey = folderEntry.getKey(); HashMap<String, Double> curMap = null; if (totalCounts.containsKey(curKey)) { curMap = totalCounts.get(curKey); } else { curMap = new HashMap<String, Double>(); } // Capture that this folder contains this word curMap.put(v.getName(), folderEntry.getValue()); totalCounts.put(curKey, curMap); } } catch (NotesException e) { Utils.logError(e.id + " " + e.text, e); } // Update the process monitor monitor.internalWorked(1); } // We have 20 working units for the move to folder // so we report when dCount / processChunks = 0 int dCount = docsWithoutFolderRef.size(); dCount = dCount - (dCount % 20); int processChunks = dCount / 20; // Now we need to move all documents into the folders to update the // $FolderRef for the missing documents for (Map.Entry<String, List<String>> curDocEntry : docsWithoutFolderRef .entrySet()) { String unid = curDocEntry.getKey(); List<String> folders2Move = curDocEntry.getValue(); try { doc = db.getDocumentByUNID(unid); for (String f : folders2Move) { doc.putInFolder(f); } } catch (NotesException e) { Utils.logError(e); } finally { Utils.shred(doc); } // Monitor update if (processChunks == 0) { monitor.internalWorked(1); // We have less than 20 documents // here } else if (dCount > 0 && (dCount % processChunks == 0)) { monitor.internalWorked(1); // We report in respective chunks } dCount--; } return totalCounts; } /** * Takes a document and counts all the words except the words in the * Stopword list. Returns a hashmap with the words as keys and the count of * each word as value. This is the base to compute proximity * * @param doc * the document to be processed * @return HashMap with Word -> CountInDocument */ private HashMap<String, Double> extractWordsFromDocument(Document doc) { // Count the words in this document, // adding those to the wordCounts HashMap HashMap<String, Double> tf = new HashMap<String, Double>(); String language = this.getLanguageFromDocument(doc); Scanner s = getScannerFromDocument(doc); while (s.hasNext()) { String w = s.next().toLowerCase(); w = w.replaceAll("\\W*$", ""); // remove trailing non-word // characters w = w.replaceAll("^\\W*", ""); // remove leading non-word // characters w = w.trim(); // remove leading and trailing whitespace if ((w.length() > 1) && (!config.isStopWord(w, language))) { if (tf.containsKey(w)) { Double ctr = tf.get(w); ctr = new Double(ctr.doubleValue() + 1); tf.put(w, ctr); } else { tf.put(w, new Double(1)); } } } return tf; } /** * public ArrayList<String> getFolderList() { return folderList; } */ private List<View> getFoldersFromDB(Database db, HashMap<String, String> folderNames, HashMap<String, String> folderRef) { // Provides only Folders that are not excluded List<View> result = null; String unid = null; String vName = null; try { @SuppressWarnings("rawtypes") Vector allViews = db.getViews(); result = new ArrayList<View>(allViews.size()); for (Object x : allViews) { View v = (View) x; // IsExcludedFolder returns true for any VIEW too if (this.config.isExcludedFolder(v.getName())) { Utils.shred(v); } else { result.add(v); if (folderNames != null) { vName = v.getName(); unid = v.getUniversalID(); folderNames.put(vName, unid); folderRef.put(unid, vName); } } } } catch (NotesException e) { Utils.logError(e); } return result; } private String getLabelWithOffset(int base) { return "SFLabel" + String.valueOf(base + 1); } /** * Determines the language of a document to pick the right Stopword list for * processing * * @param doc * @return */ private String getLanguageFromDocument(Document doc) { // TODO check with development how to figure the language // for now just support the default language return config.getDefaultLanguage(); } private Scanner getScannerFromDocument(Document doc) { StringBuilder builder = new StringBuilder(); // First all fields that can't have spaces like From, To etc for (String curFieldNoSpaces : config.getFieldsToProcessNoSpaces()) { try { if (doc.hasItem(curFieldNoSpaces)) { @SuppressWarnings("rawtypes") Vector values = doc.getItemValue(curFieldNoSpaces); for (int i = 0; i < values.size(); i++) { builder.append(values.elementAt(i).toString() .replace(" ", "_")); builder.append(" "); } } } catch (NotesException e) { // We don't care if that doesn't work for on element } } // Now the as-is fields for (String curFieldNoSpaces : config.getFieldsToProcess()) { try { if (doc.hasItem(curFieldNoSpaces)) { builder.append(doc.getItemValueString(curFieldNoSpaces)); builder.append(" "); } } catch (NotesException e) { // We don't care if that doesn't work for on element } } Scanner s = new Scanner(builder.toString()); return s; } /** * The formula that shows all documents that have been moved around since we * worked on them with SmartFile the last time SmartFile ran * * @return The formula to select all the unprocessed documents */ private String getSmartFileSelectionFormula() { return "@Trim(@Replace(@Text(" + Configuration.FOLDER_REF + ");" + Configuration.SMARTFILE_REFNAME + ");\"\")) != \"\""; } /************************************************************************************************* * l e a r n ************************************************************************************************** * * Processed one document and adds to the Engine's knowledge * */ private boolean learn(Document doc) { boolean learned = false; try { HashMap<String, Double> tf = extractWordsFromDocument(doc); @SuppressWarnings("rawtypes") Vector refs = doc.getFolderReferences(); @SuppressWarnings("rawtypes") Vector myFlag = doc.getItemValue("SmartFile"); // If this document has been processed before but it isn't were we // think it should be if ((doc.getItemValueString("SFLabels").length() == 0) && (!myFlag.equals(refs))) { // Subtract this document's word counts from the wordCount // HashMap for the "SFLabels" folders for (int i = 0; i < myFlag.size(); i++) { String folder = (String) myFlag.elementAt(i); if (!config.isExcludedFolder(folder)) { learned = true; for (Map.Entry<String, Double> me : tf.entrySet()) { String curWord = me.getKey(); Double value = me.getValue(); HashMap<String, Double> tmpmap = null; if (this.wordCounts.containsKey(curWord)) { tmpmap = this.wordCounts.get(curWord); } else { tmpmap = new HashMap<String, Double>(); } // If that HashMap has an entry for this folder if (tmpmap.containsKey(folder)) { // Subtract the word's count from it Double accum = tmpmap.get(folder); accum = new Double(accum.doubleValue() - value.doubleValue()); // If that leaves anything if (accum.doubleValue() > 0) { // Put the new count into the map tmpmap.put(folder, accum); } else { // Otherwise remove the entry for this // folder from the map tmpmap.remove(folder); } } // If the entire contents of the folder map have // been deleted if (tmpmap.isEmpty()) { // Remove this word from wordCounts this.wordCounts.remove(curWord); } else { // Otherwise, store the updated map this.wordCounts.put(curWord, tmpmap); } } } } } // Add the counts for this document to the wordCounts HashMap for // the "refs" folders for (int i = 0; i < refs.size(); i++) { String folder = (String) refs.elementAt(i); if (!config.isExcludedFolder(folder)) { learned = true; for (Map.Entry<String, Double> me : tf.entrySet()) { String curWord = me.getKey(); Double curValue = me.getValue(); HashMap<String, Double> tmpmap = null; if (this.wordCounts.containsKey(curWord)) { tmpmap = this.wordCounts.get(curWord); } else { tmpmap = new HashMap<String, Double>(); } double accum = curValue.doubleValue(); if (tmpmap.containsKey(folder)) { accum += tmpmap.get(folder).doubleValue(); } tmpmap.put(folder, new Double(accum)); this.wordCounts.put(curWord, tmpmap); } } } } catch (NotesException e) { System.out.println("NotesException: " + e.id + " " + e.text); Utils.logError(e.id + " " + e.text, e); } catch (Exception e) { Utils.logError("Java Exception in learn:", e); } return learned; } @SuppressWarnings("unchecked") private void load() throws IOException, ClassNotFoundException { // Loading the model is slow, so we avoid if possible if (!this.modelLoaded) { File inFile = new File(this.config.getSmartfilePersistenceFile()); InputStream in = new FileInputStream(inFile); // Read the vector model, and other values, from a file ObjectInputStream ois = new ObjectInputStream(in); this.folderList = (HashMap<String, String>) ois.readObject(); this.wordCounts = (HashMap<String, HashMap<String, Double>>) ois .readObject(); this.wordIDFs = (HashMap<String, Double>) ois.readObject(); this.wordTFIDFs = (HashMap<String, HashMap<String, Double>>) ois .readObject(); this.wordVectorLengths = (HashMap<String, Double>) ois.readObject(); ois.close(); this.modelLoaded = true; } return; } /************************************************************************************************* * p r o c e s s D o c u m e n t **************************************************************************************************/ // Analyze a document and set the three SwiftFile fields to the folders that // most closely match private void processDocument(Document doc, HashMap<String, Double> wordIDFs, HashMap<String, HashMap<String, Double>> wordTFIDFs, HashMap<String, Double> wordVectorLengths) { try { // Parse out the individual words and accumulate their counts (tf) // in a HashMap HashMap<String, Double> tf = this.extractWordsFromDocument(doc); // Calculate tf * idf for each word in the document and save those // in a HashMap // Also sum their squares and calculate the vector length for the // document HashMap<String, Double> tfidfMap = new HashMap<String, Double>(); double accum = 0; // For each word in the document ... for (Map.Entry<String, Double> me : tf.entrySet()) { String curWord = me.getKey(); Double curCount = me.getValue(); Double idf = null; // Get the matching IDF from the wordIDFs HashMap ... if (wordIDFs.containsKey(curWord)) { idf = wordIDFs.get(curWord); } else { idf = new Double(0); } // Calculate tf * idf and save it in a HashMap for later ... Double tfidf = new Double(curCount.doubleValue() * idf.doubleValue()); tfidfMap.put(curWord, tfidf); // Sum the square ... accum += tfidf.doubleValue() * tfidf.doubleValue(); } double docVectorLength = Math.sqrt(accum); // Calculate the dot products for each folder HashMap<String, Double> dotProductsByFolder = new HashMap<String, Double>(); // For each word in the document ... for (Map.Entry<String, Double> me : tfidfMap.entrySet()) { String curKey = me.getKey(); Double docTFIDF = me.getValue(); // Get the matching folderName -> tf*idf HashMap from wordTFIDFs if (wordTFIDFs.containsKey(curKey)) { HashMap<String, Double> tmpmap = wordTFIDFs.get(curKey); // For each Folder in that map ... for (Map.Entry<String, Double> me2 : tmpmap.entrySet()) { String key2 = me2.getKey(); Double folderTFIDF = me2.getValue(); if (dotProductsByFolder.containsKey(key2)) { accum = dotProductsByFolder.get(key2).doubleValue(); } else { accum = 0; } dotProductsByFolder.put( key2, new Double(accum + (folderTFIDF.doubleValue() * docTFIDF .doubleValue()))); } } } // Calculate the similarity values for each folder. Find the top // three. These will be the recommended folders. double[] sim = { 0, 0, 0 }; String[] folder = { "", "", "" }; // For each folder for (Map.Entry<String, Double> me : dotProductsByFolder.entrySet()) { String curKey = me.getKey(); double dotProduct = me.getValue().doubleValue(); double folderVectorLenght = wordVectorLengths.get(curKey) .doubleValue(); // This is the key! double simValue = dotProduct / docVectorLength * folderVectorLenght; // Final comparison if (simValue > sim[2]) { sim[2] = simValue; folder[2] = curKey; if (simValue > sim[1]) { sim[2] = sim[1]; folder[2] = folder[1]; sim[1] = simValue; folder[1] = curKey; if (simValue > sim[0]) { sim[1] = sim[0]; folder[1] = folder[0]; sim[0] = simValue; folder[0] = curKey; } } } } // Set the document's SwiftFile fields Vector<String> sflabels = new Vector<String>(); for (int i = 0; i < 3; i++) { if (folder[i] != null && folder[i].length() != 0) { sflabels.add(folder[i]); doc.replaceItemValue(this.getLabelWithOffset(i), folder[i]); } } if (sflabels != null && sflabels.size() != 0) { doc.replaceItemValue("SFLabels", sflabels); } // Now bring Folder references and our recording of them into // the SmartFile fields - this @SuppressWarnings("rawtypes") Vector refs = doc.getFolderReferences(); Vector<String> unidrefs = new Vector<String>(refs.size()); for (int i = 0; i < refs.size(); i++) { // We don't use empty references if (!"".equals(refs.get(i))) { // Add the UNID of the folder design to the field // since the native field $FolderRefs stores the UNID // of the Folder design element, so we need the UNID to // be able to compare them in a view to make things faster unidrefs.add(this.folderList.get((String) refs.get(i))); } } doc.replaceItemValue(Configuration.SMARTFILE_ITEMNAME, refs); doc.replaceItemValue(Configuration.SMARTFILE_REFNAME, unidrefs); doc.save(); } catch (NotesException e) { Utils.logError(e); } catch (Exception e) { Utils.logError(e); } } private void rebuildModel(Database db, IProgressMonitor monitor, boolean saveModelAfterRebuild) { this.folderList = new HashMap<String, String>(); this.folderRef = new HashMap<String, String>(); List<View> views = this.getFoldersFromDB(db, this.folderList, this.folderRef); // We progress per view monitor.beginTask("Rebuilding vector model from scratch", views.size() + 30); // Rebuild the vector model Utils.logInfo("\tRebuilding vector model from scratch"); // Count all the words in all the folders (tf) 20 items for moving docs this.wordCounts = this.countWordsInDatabase(views, db, monitor); // 30 items for refeshing words this.refeshWords(monitor); if (saveModelAfterRebuild) { this.save(); } for (View v : views) { Utils.shred(v); } } private void refeshWords(IProgressMonitor monitor) { // Calculate the idf (inverse document frequency) for each word this.wordIDFs = this.calculateIDFs(wordCounts, monitor); monitor.worked(10); // Calculate ( tf * idf ) for each word in each folder this.wordTFIDFs = this.calculateTFIDFs(wordCounts, wordIDFs, monitor); monitor.worked(10); // Calculate vector length for each folder this.wordVectorLengths = this.calculateVectorLengths(wordTFIDFs, monitor); monitor.worked(10); } /************************************************************************************************* * s e t S F L a b e l s Set the SFLables fields in all the documents in the * specified views **************************************************************************************************/ private void setSFLabels(Database db, List<String> viewNames) { View v = null; Document doc = null; Document nextDoc = null; for (String viewName : viewNames) { try { v = db.getView(viewName); doc = v.getFirstDocument(); while (doc != null) { nextDoc = v.getNextDocument(doc); if (!doc.hasItem(Configuration.SFLABELS_FIELD) || doc.getItemValueString( Configuration.SFLABELS_FIELD).equals("")) { this.processDocument(doc, this.wordIDFs, this.wordTFIDFs, this.wordVectorLengths); } Utils.logInfo("\tSetting SwiftFile fields in " + viewName + " document: \"" + doc.getUniversalID() + " - " + doc.getItemValueString("Subject") + "\""); if (doc.getItemValueString(Configuration.SFLABELS_FIELD) .length() == 0) { Utils.logWarning("\t\tNo recommended folders. Index is empty? Rebuild your index?"); } else { Utils.logInfo("\t\tRecommending folders:\t\t\t1: " + doc.getItemValueString("SFLabel1") + "\t\t\t2: " + doc.getItemValueString("SFLabel2") + "\t\t\t3: " + doc.getItemValueString("SFLabel3")); } doc.recycle(); doc = nextDoc; } } catch (NotesException e) { Utils.logError( "Unexpected exception during setSFLabels processing:" + e.id + " " + e.text, e); } finally { Utils.shred(v, doc, nextDoc); } } } }
/* * Zed Attack Proxy (ZAP) and its related class files. * * ZAP is an HTTP/HTTPS proxy for assessing web application security. * * Copyright 2016 The ZAP Development Team * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.zaproxy.zap.extension.pscanrules; import com.shapesecurity.salvation.ParserWithLocation; import com.shapesecurity.salvation.data.Notice; import com.shapesecurity.salvation.data.Origin; import com.shapesecurity.salvation.data.Policy; import com.shapesecurity.salvation.data.URI; import java.util.ArrayList; import java.util.List; import java.util.regex.Matcher; import java.util.regex.Pattern; import net.htmlparser.jericho.Source; import org.apache.commons.lang.StringUtils; import org.apache.log4j.Logger; import org.parosproxy.paros.Constant; import org.parosproxy.paros.core.scanner.Alert; import org.parosproxy.paros.core.scanner.Plugin.AlertThreshold; import org.parosproxy.paros.network.HttpMessage; import org.zaproxy.zap.extension.pscan.PassiveScanThread; import org.zaproxy.zap.extension.pscan.PluginPassiveScanner; /** * Content Security Policy Header passive scan rule https://github.com/zaproxy/zaproxy/issues/527 * Meant to complement the CSP Header Missing passive scan rule * * <p>TODO: Add handling for multiple CSP headers TODO: Add handling for CSP via META tag See * https://github.com/shapesecurity/salvation/issues/149 for info on combining CSP policies * * @author kingthorin+owaspzap@gmail.com */ public class ContentSecurityPolicyScanner extends PluginPassiveScanner { private static final String MESSAGE_PREFIX = "pscanrules.cspscanner."; private static final int PLUGIN_ID = 10055; private static final Logger LOGGER = Logger.getLogger(ContentSecurityPolicyScanner.class); private static final String HTTP_HEADER_CSP = "Content-Security-Policy"; private static final String HTTP_HEADER_XCSP = "X-Content-Security-Policy"; private static final String HTTP_HEADER_WEBKIT_CSP = "X-WebKit-CSP"; private static final String WILDCARD_URI = "http://*"; private static final URI PARSED_WILDCARD_URI = URI.parse(WILDCARD_URI); private PassiveScanThread parent = null; @Override public void setParent(PassiveScanThread parent) { this.parent = parent; } @Override public void scanHttpRequestSend(HttpMessage msg, int id) { // Only checking the response for this plugin } @Override public void scanHttpResponseReceive(HttpMessage msg, int id, Source source) { boolean cspHeaderFound = false; int noticesRisk = Alert.RISK_INFO; // LOGGER.setLevel(Level.DEBUG); //Enable for debugging if (LOGGER.isDebugEnabled()) { LOGGER.debug("Start " + id + " : " + msg.getRequestHeader().getURI().toString()); } long start = System.currentTimeMillis(); if (!msg.getResponseHeader().isHtml() && !AlertThreshold.LOW.equals(this.getAlertThreshold())) { // Only really applies to HTML responses, but also check everything on Low threshold return; } // Content-Security-Policy is supported by Chrome 25+, Firefox 23+, // Safari 7+, Edge but not Internet Explorer List<String> cspOptions = msg.getResponseHeader().getHeaderValues(HTTP_HEADER_CSP); if (!cspOptions.isEmpty()) { cspHeaderFound = true; } // X-Content-Security-Policy is an older header, supported by Firefox // 4.0+, and IE 10+ (in a limited fashion) List<String> xcspOptions = msg.getResponseHeader().getHeaderValues(HTTP_HEADER_XCSP); if (!xcspOptions.isEmpty()) { raiseAlert( msg, Constant.messages.getString(MESSAGE_PREFIX + "xcsp.name"), id, Constant.messages.getString(MESSAGE_PREFIX + "xcsp.desc"), getHeaderField(msg, HTTP_HEADER_XCSP).get(0), cspHeaderFound ? Alert.RISK_INFO : Alert.RISK_LOW, xcspOptions.get(0)); } // X-WebKit-CSP is supported by Chrome 14+, and Safari 6+ List<String> xwkcspOptions = msg.getResponseHeader().getHeaderValues(HTTP_HEADER_WEBKIT_CSP); if (!xwkcspOptions.isEmpty()) { raiseAlert( msg, Constant.messages.getString(MESSAGE_PREFIX + "xwkcsp.name"), id, Constant.messages.getString(MESSAGE_PREFIX + "xwkcsp.desc"), getHeaderField(msg, HTTP_HEADER_WEBKIT_CSP).get(0), cspHeaderFound ? Alert.RISK_INFO : Alert.RISK_LOW, xwkcspOptions.get(0)); } if (cspHeaderFound) { ArrayList<Notice> notices = new ArrayList<>(); Origin origin = URI.parse(msg.getRequestHeader().getURI().toString()); String policyText = cspOptions.toString().replace("[", "").replace("]", ""); Policy pol = ParserWithLocation.parse(policyText, origin, notices); // Populate notices if (!notices.isEmpty()) { String cspNoticesString = getCSPNoticesString(notices); if (cspNoticesString.contains( Constant.messages.getString(MESSAGE_PREFIX + "notices.errors")) || cspNoticesString.contains( Constant.messages.getString(MESSAGE_PREFIX + "notices.warnings"))) { noticesRisk = Alert.RISK_LOW; } else { noticesRisk = Alert.RISK_INFO; } raiseAlert( msg, Constant.messages.getString(MESSAGE_PREFIX + "notices.name"), id, cspNoticesString, getHeaderField(msg, HTTP_HEADER_CSP).get(0), noticesRisk, cspOptions.get(0)); } List<String> allowedWildcardSources = getAllowedWildcardSources(policyText, origin); if (!allowedWildcardSources.isEmpty()) { String allowedWildcardSrcs = allowedWildcardSources.toString().replace("[", "").replace("]", ""); String wildcardSrcDesc = Constant.messages.getString( MESSAGE_PREFIX + "wildcard.desc", allowedWildcardSrcs); raiseAlert( msg, Constant.messages.getString(MESSAGE_PREFIX + "wildcard.name"), id, wildcardSrcDesc, getHeaderField(msg, HTTP_HEADER_CSP).get(0), Alert.RISK_MEDIUM, cspOptions.get(0)); } if (pol.allowsUnsafeInlineScript()) { raiseAlert( msg, Constant.messages.getString(MESSAGE_PREFIX + "scriptsrc.unsafe.name"), id, Constant.messages.getString(MESSAGE_PREFIX + "scriptsrc.unsafe.desc"), getHeaderField(msg, HTTP_HEADER_CSP).get(0), Alert.RISK_MEDIUM, cspOptions.get(0)); } if (pol.allowsUnsafeInlineStyle()) { raiseAlert( msg, Constant.messages.getString(MESSAGE_PREFIX + "stylesrc.unsafe.name"), id, Constant.messages.getString(MESSAGE_PREFIX + "stylesrc.unsafe.desc"), getHeaderField(msg, HTTP_HEADER_CSP).get(0), Alert.RISK_MEDIUM, cspOptions.get(0)); } } if (LOGGER.isDebugEnabled()) { LOGGER.debug( "\tScan of record " + String.valueOf(id) + " took " + (System.currentTimeMillis() - start) + " ms"); } } private String getCSPNoticesString(ArrayList<Notice> notices) { char NEWLINE = '\n'; StringBuilder returnSb = new StringBuilder(); ArrayList<Notice> errorsList = Notice.getAllErrors(notices); if (!errorsList.isEmpty()) { returnSb.append(Constant.messages.getString(MESSAGE_PREFIX + "notices.errors")) .append(NEWLINE); for (Notice notice : errorsList) { returnSb.append(notice.show()).append(NEWLINE); // Ex: 1:1: Unrecognised directive-name: "image-src". } } ArrayList<Notice> warnList = Notice.getAllWarnings(notices); if (!warnList.isEmpty()) { returnSb.append(Constant.messages.getString(MESSAGE_PREFIX + "notices.warnings")) .append(NEWLINE); for (Notice notice : warnList) { returnSb.append(notice.show()).append(NEWLINE); // Ex: 1:25: This host name is unusual, and likely meant to be a // keyword that is missing the required quotes: 'none'. } } ArrayList<Notice> infoList = Notice.getAllInfos(notices); if (!infoList.isEmpty()) { returnSb.append(Constant.messages.getString(MESSAGE_PREFIX + "notices.infoitems")) .append(NEWLINE); for (Notice notice : infoList) { returnSb.append(notice.show()).append(NEWLINE); // Ex: 1:31: A draft of the next version of CSP deprecates // report-uri in favour of a new report-to directive. } } return returnSb.toString(); } /** * Extracts a list of headers, and returns them without changing their cases. * * @param msg HTTP Response message * @param header The header field(s) to be found * @return list of the matched headers */ private List<String> getHeaderField(HttpMessage msg, String header) { List<String> matchedHeaders = new ArrayList<>(); String headers = msg.getResponseHeader().toString(); String[] headerElements = headers.split("\\r\\n"); Pattern pattern = Pattern.compile("^" + header, Pattern.CASE_INSENSITIVE); for (String hdr : headerElements) { Matcher matcher = pattern.matcher(hdr); if (matcher.find()) { String match = matcher.group(); matchedHeaders.add(match); } } return matchedHeaders; } private List<String> getAllowedWildcardSources(String policyText, Origin origin) { List<String> allowedSources = new ArrayList<String>(); Policy pol = ParserWithLocation.parse(policyText, origin); if (pol.allowsScriptFromSource(PARSED_WILDCARD_URI)) { allowedSources.add("script-src"); allowedSources.add("script-src-elem"); allowedSources.add("script-src-attr"); } if (pol.allowsStyleFromSource(PARSED_WILDCARD_URI)) { allowedSources.add("style-src"); allowedSources.add("style-src-elem"); allowedSources.add("style-src-attr"); } if (pol.allowsImgFromSource(PARSED_WILDCARD_URI)) { allowedSources.add("img-src"); } if (pol.allowsConnectTo(PARSED_WILDCARD_URI)) { allowedSources.add("connect-src"); } if (pol.allowsFrameFromSource(PARSED_WILDCARD_URI)) { allowedSources.add("frame-src"); } if (pol.allowsFrameAncestor(PARSED_WILDCARD_URI)) { allowedSources.add("frame-ancestor"); } if (pol.allowsFontFromSource(PARSED_WILDCARD_URI)) { allowedSources.add("font-src"); } if (pol.allowsMediaFromSource(PARSED_WILDCARD_URI)) { allowedSources.add("media-src"); } if (pol.allowsObjectFromSource(PARSED_WILDCARD_URI)) { allowedSources.add("object-src"); } if (pol.allowsManifestFromSource(PARSED_WILDCARD_URI)) { allowedSources.add("manifest-src"); } if (pol.allowsWorkerFromSource(PARSED_WILDCARD_URI)) { allowedSources.add("worker-src"); } if (pol.allowsPrefetchFromSource(PARSED_WILDCARD_URI)) { allowedSources.add("prefetch-src"); } return allowedSources; } @Override public int getPluginId() { return PLUGIN_ID; } @Override public String getName() { return Constant.messages.getString(MESSAGE_PREFIX + "name"); } private String getSolution() { return Constant.messages.getString(MESSAGE_PREFIX + "soln"); } private String getReference() { return Constant.messages.getString(MESSAGE_PREFIX + "refs"); } private void raiseAlert( HttpMessage msg, String name, int id, String description, String param, int risk, String evidence) { String alertName = StringUtils.isEmpty(name) ? getName() : getName() + ": " + name; Alert alert = new Alert( getPluginId(), risk, Alert.CONFIDENCE_MEDIUM, // PluginID, Risk, Reliability alertName); alert.setDetail( description, // Description msg.getRequestHeader().getURI().toString(), // URI param, // Param "", // Attack "", // Other info getSolution(), // Solution getReference(), // References evidence, // Evidence 16, // CWE-16: Configuration 15, // WASC-15: Application Misconfiguration msg); // HttpMessage parent.raiseAlert(id, alert); } }
package com.inmobi.messaging.consumer.databus; /* * #%L * messaging-client-databus * %% * Copyright (C) 2012 - 2014 InMobi * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.io.IOException; import java.util.Calendar; import java.util.Date; import java.util.GregorianCalendar; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.mapred.ThreadedMapBenchmark; import org.testng.annotations.AfterTest; import org.testng.annotations.BeforeTest; import org.testng.annotations.Test; import com.inmobi.databus.readers.CollectorStreamReader; import com.inmobi.messaging.ClientConfig; import com.inmobi.messaging.consumer.AbstractMessageConsumer; import com.inmobi.messaging.consumer.MessageConsumerFactory; import com.inmobi.messaging.consumer.util.ConsumerUtil; import com.inmobi.messaging.consumer.util.TestUtil; public class TestDatabusConsumer extends TestAbstractDatabusConsumer { ClientConfig loadConfig() { return ClientConfig.loadFromClasspath( MessageConsumerFactory.MESSAGE_CLIENT_CONF_FILE); } @BeforeTest public void setup() throws Exception { consumerName = "c1"; collectors = new String[] {COLLECTOR_PREFIX + "1"}; dataFiles = new String[] {TestUtil.files[0], TestUtil.files[1], TestUtil.files[2]}; super.setup(1); } @Test public void testTimeoutStats() throws Exception { ClientConfig config = loadConfig(); config.set(DatabusConsumerConfig.databusRootDirsConfig, rootDirs[0].toUri().toString()); config.set(DatabusConsumerConfig.checkpointDirConfig, ck1); ConsumerUtil.testTimeoutStats(config, testStream, consumerName, CollectorStreamReader.getDateFromCollectorFile(dataFiles[0]), false, 300); } @Test public void testMarkAndReset() throws Exception { ClientConfig config = loadConfig(); config.set(DatabusConsumerConfig.databusRootDirsConfig, rootDirs[0].toUri().toString()); config.set(DatabusConsumerConfig.checkpointDirConfig, ck5); config.set(MessagingConsumerConfig.relativeStartTimeConfig, relativeStartTime); ConsumerUtil.testMarkAndReset(config, testStream, consumerName, false); } @Test public void testDynamicCollector() throws Exception { ClientConfig config = loadConfig(); config.set(DatabusConsumerConfig.databusRootDirsConfig, rootDirs[0].toUri().toString()); config.set(DatabusConsumerConfig.checkpointDirConfig, ck14); config.set(MessagingConsumerConfig.relativeStartTimeConfig, relativeStartTime); config.set(DatabusConsumerConfig.frequencyForDiscoverer, "1"); ConsumerUtil.testDynamicCollector(config, testStream, consumerName, false, rootDirs, conf, testStream, COLLECTOR_PREFIX); } @Test public void testMarkAndResetWithStartTime() throws Exception { ClientConfig config = loadConfig(); config.set(DatabusConsumerConfig.databusRootDirsConfig, rootDirs[0].toUri().toString()); config.set(DatabusConsumerConfig.checkpointDirConfig, ck2); ConsumerUtil.testMarkAndResetWithStartTime(config, testStream, consumerName, CollectorStreamReader.getDateFromCollectorFile(dataFiles[1]), false); } @Test public void testMultipleClusters() throws Exception { ClientConfig config = loadConfig(); config.set(DatabusConsumerConfig.databusRootDirsConfig, rootDirs[0].toUri().toString() + "," + rootDirs[1].toUri().toString()); config.set(DatabusConsumerConfig.checkpointDirConfig, ck3); config.set(MessagingConsumerConfig.relativeStartTimeConfig, relativeStartTime); assertMessages(config, 2, 1); } @Test public void testMultipleClusters2() throws Exception { ClientConfig config = loadConfig(); config.set(DatabusConsumerConfig.databusRootDirsConfig, rootDirs[0].toString() + "," + rootDirs[1].toString() + "," + rootDirs[2].toString()); config.set(DatabusConsumerConfig.checkpointDirConfig, ck4); config.set(MessagingConsumerConfig.relativeStartTimeConfig, relativeStartTime); assertMessages(config, 3, 1); } @Test public void testConsumerStartUp() throws Exception { ClientConfig config = loadConfig(); config.set(DatabusConsumerConfig.databusRootDirsConfig, rootDirs[0].toUri().toString()); config.set(DatabusConsumerConfig.checkpointDirConfig, ck6); config.set(MessagingConsumerConfig.relativeStartTimeConfig, relativeStartTime); ConsumerUtil.testConsumerStartUp(config, testStream, consumerName, false, CollectorStreamReader.getDateFromCollectorFile(dataFiles[1]), rootDirs[0], chkpointPathPrefix); } @Test public void testConsumerWithConfiguredStartTime() throws Exception { ClientConfig config = loadConfig(); config.set(DatabusConsumerConfig.databusRootDirsConfig, rootDirs[0].toUri().toString()); config.set(DatabusConsumerConfig.checkpointDirConfig, ck7); Date absoluteStartTime = CollectorStreamReader. getDateFromCollectorFile(dataFiles[1]); config.set(MessageConsumerFactory.ABSOLUTE_START_TIME, AbstractMessageConsumer.minDirFormat.get().format(absoluteStartTime)); ConsumerUtil.testConsumerWithConfiguredStartTime(config, false); } @Test public void testConsumerWithFutureStartTime() throws Exception { ClientConfig config = loadConfig(); config.set(DatabusConsumerConfig.databusRootDirsConfig, rootDirs[0].toUri().toString()); Date absoluteStartTime = CollectorStreamReader. getDateFromCollectorFile(dataFiles[1]); // created a future time stamp Calendar cal = new GregorianCalendar(); cal.setTime(absoluteStartTime); cal.add(Calendar.HOUR, 2); config.set(MessageConsumerFactory.ABSOLUTE_START_TIME, AbstractMessageConsumer.minDirFormat.get().format(cal.getTime())); ConsumerUtil.testConsumerWithFutureStartTime(config); } @Test public void testConsumerWithoutConfiguredOptions() throws Exception { ClientConfig config = loadConfig(); config.set(DatabusConsumerConfig.databusRootDirsConfig, rootDirs[0].toUri().toString()); config.set(DatabusConsumerConfig.checkpointDirConfig, ck8); ConsumerUtil.testConsumerWithoutConfiguredOptions(config); } @Test public void testConsumerWithRetentionPeriod() throws Exception { ClientConfig config = loadConfig(); config.set(DatabusConsumerConfig.databusRootDirsConfig, rootDirs[0].toUri().toString()); config.set(DatabusConsumerConfig.checkpointDirConfig, ck9); config.set(MessagingConsumerConfig.retentionConfig, "1"); ConsumerUtil.testConsumerWithRetentionPeriod(config, testStream, consumerName, false); } /* * setting retention period as 0 hours and relative time is 30 minutes. * Consumer should start consume the messages from 30 minutes beyond the * current time */ @Test public void testConsumerWithRelativeAndRetention() throws Exception { ClientConfig config = loadConfig(); config.set(DatabusConsumerConfig.databusRootDirsConfig, rootDirs[0].toUri().toString()); config.set(DatabusConsumerConfig.checkpointDirConfig, ck10); config.set(DatabusConsumerConfig.retentionConfig, "0"); config.set(MessagingConsumerConfig.relativeStartTimeConfig, relativeStartTime); Date absoluteStartTime = CollectorStreamReader. getDateFromCollectorFile(dataFiles[1]); config.set(MessageConsumerFactory.ABSOLUTE_START_TIME, AbstractMessageConsumer.minDirFormat.get().format(absoluteStartTime)); ConsumerUtil.testConsumerWithRelativeAndRetention(config, testStream, consumerName, absoluteStartTime, false); } @Test public void testConsumerWithAbsoluteStartTimeAndRetention() throws Exception { ClientConfig config = loadConfig(); config.set(DatabusConsumerConfig.databusRootDirsConfig, rootDirs[0].toUri().toString()); config.set(DatabusConsumerConfig.retentionConfig, "1"); Date absoluteStartTime = CollectorStreamReader. getDateFromCollectorFile(dataFiles[1]); config.set(MessageConsumerFactory.ABSOLUTE_START_TIME, AbstractMessageConsumer.minDirFormat.get().format(absoluteStartTime)); config.set(DatabusConsumerConfig.checkpointDirConfig, ck11); ConsumerUtil.testConsumerWithAbsoluteStartTimeAndRetention(config, testStream, consumerName, absoluteStartTime, false); } @Test public void testConsumerWithStopTimeBeyondCheckpoint() throws Exception { ClientConfig config = loadConfig(); config.set(DatabusConsumerConfig.databusRootDirsConfig, rootDirs[0].toUri().toString()); FileSystem fs = rootDirs[0].getFileSystem(conf); try { // Deleting the dummy collector(COLLECTOR_PREFIX i.e. which does not have // any files to read). // Collector won't have any checkpoint if there are no files to read. // In this test, we wanted to test whether consumer is stopped if the // stop time is beyond the checkpoint. // If checkpoint is not present then consumer won't be closed completely. fs.delete(new Path(rootDirs[0].toUri().toString(), "data/" + testStream + "/" + COLLECTOR_PREFIX)); Date absoluteStartTime = CollectorStreamReader. getDateFromCollectorFile(dataFiles[0]); config.set(MessageConsumerFactory.ABSOLUTE_START_TIME, AbstractMessageConsumer.minDirFormat.get().format(absoluteStartTime)); config.set(DatabusConsumerConfig.checkpointDirConfig, ck12); Date stopDate = CollectorStreamReader.getDateFromCollectorFile(dataFiles[1]); Date stopDateForCheckpoint = CollectorStreamReader. getDateFromCollectorFile(dataFiles[0]); config.set(DatabusConsumerConfig.stopDateConfig, AbstractMessageConsumer.minDirFormat.get().format(stopDate)); ConsumerUtil.testConsumerWithStopTimeBeyondCheckpoint(config, testStream, consumerName, absoluteStartTime, false, stopDateForCheckpoint); } finally { // create a dummy collector directory back fs.mkdirs(new Path(rootDirs[0].toUri().toString(), "data/" + testStream + "/" + COLLECTOR_PREFIX)); } } @Test public void testConsumerWithStartOfStream() throws Exception { ClientConfig config = loadConfig(); config.set(DatabusConsumerConfig.databusRootDirsConfig, rootDirs[0].toUri().toString()); config.set(MessagingConsumerConfig.startOfStreamConfig, "true"); config.set(DatabusConsumerConfig.checkpointDirConfig, ck13); ConsumerUtil.testConsumerWithStartOfStream(config, testStream, consumerName, false); } @Test public void testDatabusConsumerBacklogOnlyCollector() throws Exception { ClientConfig config = loadConfig(); config.set(DatabusConsumerConfig.databusRootDirsConfig, rootDirs[2].toUri().toString()); config.set(DatabusConsumerConfig.checkpointDirConfig, ck15); config.set(MessagingConsumerConfig.relativeStartTimeConfig, relativeStartTime); config.set(DatabusConsumerConfig.frequencyForDiscoverer, "1"); ConsumerUtil.testConsumerBacklogOnlyCollector(config, testStream , consumerName, false, rootDirs, conf, testStream, COLLECTOR_PREFIX); } @Test public void testDatabusConsumerBacklog() throws Exception { ClientConfig config = loadConfig(); config.set(DatabusConsumerConfig.databusRootDirsConfig, rootDirs[0].toUri().toString()); config.set(DatabusConsumerConfig.checkpointDirConfig, ck15); config.set(MessagingConsumerConfig.relativeStartTimeConfig, relativeStartTime); config.set(DatabusConsumerConfig.frequencyForDiscoverer, "1"); ConsumerUtil.testConsumerBacklog(config, testStream, consumerName, false, rootDirs, conf, testStream, COLLECTOR_PREFIX); } @Test public void testDatabusConsumerBacklog2() throws Exception { ClientConfig config = loadConfig(); config.set(DatabusConsumerConfig.databusRootDirsConfig, rootDirs[0].toUri().toString()); config.set(DatabusConsumerConfig.checkpointDirConfig, ck15); config.set(MessagingConsumerConfig.relativeStartTimeConfig, relativeStartTime); config.set(DatabusConsumerConfig.frequencyForDiscoverer, "1"); ConsumerUtil.testConsumerBacklogMoreCollectors(config, testStream, consumerName, false, rootDirs, conf, testStream, COLLECTOR_PREFIX); } @AfterTest public void cleanup() throws IOException { super.cleanup(); } }
/******************************************************************************* * Copyright (c) 2015-2018 Skymind, Inc. * * This program and the accompanying materials are made available under the * terms of the Apache License, Version 2.0 which is available at * https://www.apache.org/licenses/LICENSE-2.0. * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. * * SPDX-License-Identifier: Apache-2.0 ******************************************************************************/ package org.deeplearning4j.arbiter.server; import com.beust.jcommander.JCommander; import com.beust.jcommander.Parameter; import com.beust.jcommander.ParameterException; import org.apache.commons.io.FileUtils; import org.deeplearning4j.arbiter.ComputationGraphSpace; import org.deeplearning4j.arbiter.MultiLayerSpace; import org.deeplearning4j.arbiter.optimize.api.CandidateGenerator; import org.deeplearning4j.arbiter.optimize.api.data.DataProvider; import org.deeplearning4j.arbiter.optimize.api.data.DataSetIteratorFactoryProvider; import org.deeplearning4j.arbiter.optimize.api.score.ScoreFunction; import org.deeplearning4j.arbiter.optimize.api.termination.MaxCandidatesCondition; import org.deeplearning4j.arbiter.optimize.api.termination.MaxTimeCondition; import org.deeplearning4j.arbiter.optimize.api.termination.TerminationCondition; import org.deeplearning4j.arbiter.optimize.generator.GridSearchCandidateGenerator; import org.deeplearning4j.arbiter.optimize.generator.RandomSearchGenerator; import org.deeplearning4j.arbiter.optimize.config.OptimizationConfiguration; import org.deeplearning4j.arbiter.saver.local.FileModelSaver; import org.deeplearning4j.arbiter.scoring.RegressionValue; import org.deeplearning4j.arbiter.scoring.ScoreFunctions; import java.io.File; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.TimeUnit; /** * Generate an {@link OptimizationConfiguration} * via the command line interface. * You can then use this configuration json file from * {@link ArbiterCliRunner} * * @author Adam Gibson */ public class ArbiterCliGenerator { @Parameter(names = {"--searchSpacePath"}) private String searchSpacePath = null; @Parameter(names = {"--candidateType"},required = true) private String candidateType = null; @Parameter(names = {"--discretizationCount"}) private int discretizationCount = 5; @Parameter(names = {"--gridSearchOrder"}) private String gridSearchOrder = null; @Parameter(names = {"--neuralNetType"},required = true) private String neuralNetType = null; @Parameter(names = {"--dataSetIteratorClass"},required = true) private String dataSetIteratorClass = null; @Parameter(names = {"--modelOutputPath"},required = true) private String modelOutputPath = null; @Parameter(names = {"--score"},required = true) private String score = null; @Parameter(names = {"--problemType"},required = true) private String problemType = CLASSIFICIATION; @Parameter(names = {"--configSavePath"},required = true) private String configSavePath = null; @Parameter(names = {"--duration"},description = "The number of minutes to run for. Default is -1 which means run till convergence.") private long duration = -1; @Parameter(names = {"--numCandidates"},description = "The number of candidates to generate. Default is 1.") private int numCandidates = 1; public final static String REGRESSION_MULTI = "regression"; public final static String REGRESSION = "regression"; public final static String CLASSIFICIATION = "classification"; public final static String RANDOM_CANDIDATE = "random"; public final static String GRID_SEARCH_CANDIDATE = "gridsearch"; public final static String SEQUENTIAL_ORDER = "sequence"; public final static String RANDOM_ORDER = "random"; public final static String COMP_GRAPH = "compgraph"; public final static String MULTI_LAYER = "multilayer"; public final static String ACCURACY = "accuracy"; public final static String F1 = "f1"; public final static String ACCURACY_MULTI = "accuracy_multi"; public final static String F1_MULTI = "f1_multi"; public final static String REGRESSION_SCORE = "regression_score"; public final static String REGRESSION_SCORE_MULTI = "regression_score_multi"; public void runMain(String...args) throws Exception { JCommander jcmdr = new JCommander(this); try { jcmdr.parse(args); } catch(ParameterException e) { System.err.println(e.getMessage()); //User provides invalid input -> print the usage info jcmdr.usage(); try{ Thread.sleep(500); } catch(Exception e2){ } System.exit(1); } DataProvider dataProvider = new DataSetIteratorFactoryProvider(); Map<String,Object> commands = new HashMap<>(); commands.put(DataSetIteratorFactoryProvider.FACTORY_KEY,dataSetIteratorClass); if(neuralNetType.equals(MULTI_LAYER)) { MultiLayerSpace multiLayerSpace = loadMultiLayer(); CandidateGenerator candidateGenerator = null; if(candidateType.equals(GRID_SEARCH_CANDIDATE)) { candidateGenerator = new RandomSearchGenerator(multiLayerSpace,commands); } else if(candidateType.equals(RANDOM_CANDIDATE)) { candidateGenerator = new RandomSearchGenerator(multiLayerSpace,commands); } if(problemType.equals(CLASSIFICIATION)) { OptimizationConfiguration configuration = new OptimizationConfiguration.Builder() .candidateGenerator(candidateGenerator) .dataProvider(dataProvider) .modelSaver(new FileModelSaver(modelOutputPath)) .scoreFunction(scoreFunctionMultiLayerNetwork()) .terminationConditions(getConditions()) .build(); FileUtils.writeStringToFile(new File(configSavePath),configuration.toJson()); } else if(problemType.equals(REGRESSION)) { OptimizationConfiguration configuration = new OptimizationConfiguration.Builder() .candidateGenerator(candidateGenerator) .dataProvider(dataProvider) .modelSaver(new FileModelSaver(modelOutputPath)) .scoreFunction(scoreFunctionMultiLayerNetwork()) .terminationConditions(getConditions()) .build(); FileUtils.writeStringToFile(new File(configSavePath),configuration.toJson()); } } else if(neuralNetType.equals(COMP_GRAPH)) { ComputationGraphSpace computationGraphSpace = loadCompGraph(); CandidateGenerator candidateGenerator = null; if(candidateType.equals(GRID_SEARCH_CANDIDATE)) { candidateGenerator = new RandomSearchGenerator(computationGraphSpace,commands); } else if(candidateType.equals(RANDOM_CANDIDATE)) { candidateGenerator = new RandomSearchGenerator(computationGraphSpace,commands); } if(problemType.equals(CLASSIFICIATION)) { OptimizationConfiguration configuration = new OptimizationConfiguration.Builder() .candidateGenerator(candidateGenerator) .dataProvider(dataProvider) .modelSaver(new FileModelSaver(modelOutputPath)) .scoreFunction(scoreFunctionCompGraph()) .terminationConditions(getConditions()) .build(); FileUtils.writeStringToFile(new File(configSavePath),configuration.toJson()); } else { OptimizationConfiguration configuration = new OptimizationConfiguration.Builder() .candidateGenerator(candidateGenerator) .dataProvider(dataProvider) .modelSaver(new FileModelSaver(modelOutputPath)) .scoreFunction(scoreFunctionCompGraph()) .terminationConditions(getConditions()) .build(); FileUtils.writeStringToFile(new File(configSavePath),configuration.toJson()); } } } public static void main(String...args) throws Exception { new ArbiterCliGenerator().runMain(args); } private List<TerminationCondition> getConditions() { List<TerminationCondition> ret = new ArrayList<>(); if(duration > 0) { ret.add(new MaxTimeCondition(duration,TimeUnit.MINUTES)); } if(numCandidates > 0) ret.add(new MaxCandidatesCondition(numCandidates)); if(ret.isEmpty()) { ret.add(new MaxCandidatesCondition(1)); } return ret; } private GridSearchCandidateGenerator.Mode getMode() { if(gridSearchOrder.equals(RANDOM_ORDER)) return GridSearchCandidateGenerator.Mode.RandomOrder; else if(gridSearchOrder.equals(SEQUENTIAL_ORDER)) { return GridSearchCandidateGenerator.Mode.Sequential; } else throw new IllegalArgumentException("Illegal mode " + gridSearchOrder); } private ScoreFunction scoreFunctionCompGraph() { if(problemType.equals(CLASSIFICIATION)) { switch(score) { case ACCURACY: return ScoreFunctions.testSetAccuracy(); case F1: return ScoreFunctions.testSetF1(); case F1_MULTI : return ScoreFunctions.testSetF1(); case ACCURACY_MULTI: return ScoreFunctions.testSetAccuracy(); default: throw new IllegalArgumentException("Score " + score + " not valid for type " + problemType); } } else if(problemType.equals(REGRESSION)) { switch(score) { case REGRESSION_SCORE: return ScoreFunctions.testSetRegression(RegressionValue.valueOf(score)); case REGRESSION_SCORE_MULTI: return ScoreFunctions.testSetRegression(RegressionValue.valueOf(score)); default: throw new IllegalArgumentException("Score " + score + " not valid for type " + problemType); } } throw new IllegalStateException("Illegal problem type " + problemType); } private ScoreFunction scoreFunctionMultiLayerNetwork() { if(problemType.equals(CLASSIFICIATION)) { switch(score) { case ACCURACY: return ScoreFunctions.testSetAccuracy(); case F1: return ScoreFunctions.testSetF1(); default: throw new IllegalArgumentException("Score " + score + " not valid for type " + problemType); } } else if(problemType.equals(REGRESSION)) { switch(score) { case REGRESSION_SCORE: return ScoreFunctions.testSetRegression(RegressionValue.valueOf(score)); default: throw new IllegalArgumentException("Score " + score + " not valid for type " + problemType); } } throw new IllegalStateException("Illegal problem type " + problemType); } private ComputationGraphSpace loadCompGraph() throws Exception { ComputationGraphSpace multiLayerSpace = ComputationGraphSpace.fromJson(FileUtils.readFileToString(new File(searchSpacePath))); return multiLayerSpace; } private MultiLayerSpace loadMultiLayer() throws Exception { MultiLayerSpace multiLayerSpace = MultiLayerSpace.fromJson(FileUtils.readFileToString(new File(searchSpacePath))); return multiLayerSpace; } }
/** * Copyright (c) 2005-2007, Paul Tuckey * All rights reserved. * ==================================================================== * Licensed under the BSD License. Text as follows. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * * - Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * - Redistributions in binary form must reproduce the above * copyright notice, this list of conditions and the following * disclaimer in the documentation and/or other materials provided * with the distribution. * - Neither the name tuckey.org nor the names of its contributors * may be used to endorse or promote products derived from this * software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN * ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. * ==================================================================== */ package org.tuckey.web.filters.urlrewriteviacontainer; import org.tuckey.web.filters.urlrewrite.extend.RewriteMatch; import javax.servlet.FilterChain; import javax.servlet.ServletConfig; import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import java.io.IOException; import java.io.PrintWriter; /** * RunObject that can be used for testing. Included here and not in test folder as it needs to be deployed with the * main library. * * @author Paul Tuckey * @version $Revision: 33 $ $Date: 2006-09-12 16:41:56 +1200 (Tue, 12 Sep 2006) $ */ public class TestRunObj { private static boolean runCalled; private static boolean destroyCalled; private static boolean initCalled; private static boolean nonDefaultRunCalled; private static ServletConfig servletConfig; private static int createdCount = 0; private static String paramStr = null; private static long runWithChainParamAfterDoFilter = 0; public TestRunObj() { createdCount++; } public void run(HttpServletRequest httpServletRequest, HttpServletResponse httpServletResponse) throws ServletException, IOException { runCalled = true; PrintWriter sos = httpServletResponse.getWriter(); if (sos == null) return; sos.print("this is " + TestRunObj.class.getName()); sos.close(); } public MockRewriteMatch runWithReturnedObj(HttpServletRequest httpServletRequest, HttpServletResponse httpServletResponse) throws ServletException, IOException { return new MockRewriteMatch(); } public void nonDefaultRun(HttpServletRequest httpServletRequest, HttpServletResponse httpServletResponse) throws ServletException, IOException { nonDefaultRunCalled = true; } public String runThatReturns(HttpServletRequest httpServletRequest, HttpServletResponse httpServletResponse) throws ServletException, IOException { return "aaabbb"; } public String runWithParam(int i) throws ServletException, IOException { paramStr = "" + i; return paramStr; } public String runWithNoParams() throws ServletException, IOException { paramStr = "[no params]"; return paramStr; } public String runWithChainParam(HttpServletRequest httpServletRequest, HttpServletResponse httpServletResponse, FilterChain c) throws ServletException, IOException { paramStr = "" + c; c.doFilter(httpServletRequest, httpServletResponse); try { Thread.sleep(10); } catch (InterruptedException e) { // } runWithChainParamAfterDoFilter = System.currentTimeMillis(); return paramStr; } public static long getRunWithChainParamAfterDoFilter() { return runWithChainParamAfterDoFilter; } public String runWithPrimitiveParam(int i, char c, double d, float f, short s, byte b, boolean b2, String s2) throws ServletException, IOException { paramStr = i + "," + c + "," + d + "," + f + "," + s + "," + b + "," + b2 + "," + s2; return paramStr; } public String runWithObjParam(Integer i, Character c, Double d, Float f, Short s, Byte b, Boolean b2, String s2) throws ServletException, IOException { paramStr = i + "," + c + "," + d + "," + f + "," + s + "," + b + "," + b2 + "," + s2; return paramStr; } public static String getParamStr() { return paramStr; } public void runNullPointerException(HttpServletRequest httpServletRequest, HttpServletResponse httpServletResponse) throws ServletException, IOException { exceptionGenerator.doNullPointer(); } public void runRuntiumeException(HttpServletRequest httpServletRequest, HttpServletResponse httpServletResponse) throws ServletException, IOException { exceptionGenerator.doRuntime(); } public void runServletException(HttpServletRequest httpServletRequest, HttpServletResponse httpServletResponse) throws ServletException, IOException { exceptionGenerator.doServlet(); } public void runIOException(HttpServletRequest httpServletRequest, HttpServletResponse httpServletResponse) throws ServletException, IOException { exceptionGenerator.doIO(); } public void runCustomException(HttpServletRequest httpServletRequest, HttpServletResponse httpServletResponse) throws TestExceptionGenerator.CustomException { exceptionGenerator.doCustom(); } TestExceptionGenerator exceptionGenerator = new TestExceptionGenerator(); private class TestExceptionGenerator { public void doNullPointer() { String aaa = null; // YES we WANT a null pointer here aaa.toLowerCase(); } public void doRuntime() { throw new RuntimeException("shit!"); } public void doServlet() throws ServletException { throw new ServletException("serv"); } public void doIO() throws IOException { throw new IOException("me i.o. has gone crazy"); } public void doCustom() throws CustomException { throw new CustomException(); } public class CustomException extends Exception { } } public RewriteMatch trialException(HttpServletRequest httpServletRequest, HttpServletResponse httpServletResponse, ClassNotFoundException e) { return new MockRewriteMatch(); } /** * Do not delete! used in RunTest. */ private void privateRun(HttpServletRequest httpServletRequest, HttpServletResponse httpServletResponse) { // do nothing } public void destroy() { destroyCalled = true; } public void init(ServletConfig config) throws ServletException { servletConfig = config; initCalled = true; } public static boolean isRunCalled() { return runCalled; } public static int getCreatedCount() { return createdCount; } public static void resetTestFlags() { createdCount = 0; runCalled = false; destroyCalled = false; initCalled = false; nonDefaultRunCalled = false; servletConfig = null; } public static ServletConfig getTestServletConfig() { return servletConfig; } public static boolean isDestroyCalled() { return destroyCalled; } public static boolean isInitCalled() { return initCalled; } public static boolean isNonDefaultRunCalled() { return nonDefaultRunCalled; } }
package com.orientechnologies.orient.core.index.hashindex.local.cache; import java.io.File; import java.io.IOException; import java.util.*; import java.util.zip.CRC32; import com.orientechnologies.orient.core.storage.cache.OCachePointer; import com.orientechnologies.orient.core.storage.cache.local.OWOWCache; import com.orientechnologies.orient.core.storage.cache.OWriteCache; import com.orientechnologies.orient.core.storage.impl.local.paginated.wal.ODiskWriteAheadLog; import org.testng.Assert; import org.testng.annotations.AfterClass; import org.testng.annotations.BeforeClass; import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; import com.orientechnologies.common.serialization.types.OIntegerSerializer; import com.orientechnologies.common.serialization.types.OLongSerializer; import com.orientechnologies.orient.core.Orient; import com.orientechnologies.orient.core.config.OGlobalConfiguration; import com.orientechnologies.orient.core.storage.fs.OFileClassic; import com.orientechnologies.orient.core.storage.impl.local.paginated.OLocalPaginatedStorage; import com.orientechnologies.orient.core.storage.impl.local.paginated.wal.OLogSequenceNumber; import com.orientechnologies.orient.core.storage.impl.local.paginated.wal.OWALRecordsFactory; import com.orientechnologies.orient.core.storage.impl.local.paginated.wal.WriteAheadLogTest; /** * @author Andrey Lomakin * @since 26.07.13 */ @Test public class WOWCacheTest { private int systemOffset = 2 * (OIntegerSerializer.INT_SIZE + OLongSerializer.LONG_SIZE); private int pageSize = systemOffset + 8; private OLocalPaginatedStorage storageLocal; private String fileName; private ODiskWriteAheadLog writeAheadLog; private OWriteCache wowCache; @BeforeClass public void beforeClass() throws IOException { OGlobalConfiguration.FILE_LOCK.setValue(Boolean.FALSE); String buildDirectory = System.getProperty("buildDirectory"); if (buildDirectory == null) buildDirectory = "."; storageLocal = (OLocalPaginatedStorage) Orient.instance().loadStorage("plocal:" + buildDirectory + "/WOWCacheTest"); storageLocal.create(null); fileName = "wowCacheTest.tst"; OWALRecordsFactory.INSTANCE.registerNewRecord((byte) 128, WriteAheadLogTest.TestRecord.class); } @BeforeMethod public void beforeMethod() throws IOException { closeCacheAndDeleteFile(); initBuffer(); } private void closeCacheAndDeleteFile() throws IOException { if (wowCache != null) { wowCache.close(); wowCache = null; } if (writeAheadLog != null) { writeAheadLog.delete(); writeAheadLog = null; } storageLocal.delete(); File testFile = new File(storageLocal.getConfiguration().getDirectory() + File.separator + fileName); if (testFile.exists()) { Assert.assertTrue(testFile.delete()); } File nameIdMapFile = new File(storageLocal.getConfiguration().getDirectory() + File.separator + "name_id_map.cm"); if (nameIdMapFile.exists()) { Assert.assertTrue(nameIdMapFile.delete()); } } @AfterClass public void afterClass() throws IOException { closeCacheAndDeleteFile(); File file = new File(storageLocal.getConfiguration().getDirectory()); Assert.assertTrue(file.delete()); } private void initBuffer() throws IOException { wowCache = new OWOWCache(true, pageSize, 10000, writeAheadLog, 10, 100, 100, storageLocal, false, 1); } public void testLoadStore() throws IOException { Random random = new Random(); byte[][] pageData = new byte[200][]; long fileId = wowCache.addFile(fileName); for (int i = 0; i < pageData.length; i++) { byte[] data = new byte[8]; random.nextBytes(data); pageData[i] = data; final OCachePointer cachePointer = wowCache.load(fileId, i, true); cachePointer.acquireExclusiveLock(); cachePointer.getDataPointer().set(systemOffset + OWOWCache.PAGE_PADDING, data, 0, data.length); cachePointer.releaseExclusiveLock(); wowCache.store(fileId, i, cachePointer); cachePointer.decrementReferrer(); } for (int i = 0; i < pageData.length; i++) { byte[] dataOne = pageData[i]; OCachePointer cachePointer = wowCache.load(fileId, i, false); byte[] dataTwo = cachePointer.getDataPointer().get(systemOffset + OWOWCache.PAGE_PADDING, 8); cachePointer.decrementReferrer(); Assert.assertEquals(dataTwo, dataOne); } wowCache.flush(); for (int i = 0; i < pageData.length; i++) { byte[] dataContent = pageData[i]; assertFile(i, dataContent, new OLogSequenceNumber(0, 0)); } } public void testDataUpdate() throws Exception { final NavigableMap<Long, byte[]> pageIndexDataMap = new TreeMap<Long, byte[]>(); long fileId = wowCache.addFile(fileName); Random random = new Random(); for (int i = 0; i < 600; i++) { long pageIndex = random.nextInt(2048); byte[] data = new byte[8]; random.nextBytes(data); pageIndexDataMap.put(pageIndex, data); final OCachePointer cachePointer = wowCache.load(fileId, pageIndex, true); cachePointer.acquireExclusiveLock(); cachePointer.getDataPointer().set(systemOffset + OWOWCache.PAGE_PADDING, data, 0, data.length); cachePointer.releaseExclusiveLock(); wowCache.store(fileId, pageIndex, cachePointer); cachePointer.decrementReferrer(); } for (Map.Entry<Long, byte[]> entry : pageIndexDataMap.entrySet()) { long pageIndex = entry.getKey(); byte[] dataOne = entry.getValue(); OCachePointer cachePointer = wowCache.load(fileId, pageIndex, false); byte[] dataTwo = cachePointer.getDataPointer().get(systemOffset + OWOWCache.PAGE_PADDING, 8); cachePointer.decrementReferrer(); Assert.assertEquals(dataTwo, dataOne); } for (int i = 0; i < 300; i++) { long desiredIndex = random.nextInt(2048); Long pageIndex = pageIndexDataMap.ceilingKey(desiredIndex); if (pageIndex == null) pageIndex = pageIndexDataMap.floorKey(desiredIndex); byte[] data = new byte[8]; random.nextBytes(data); pageIndexDataMap.put(pageIndex, data); final OCachePointer cachePointer = wowCache.load(fileId, pageIndex, true); cachePointer.acquireExclusiveLock(); cachePointer.getDataPointer().set(systemOffset + OWOWCache.PAGE_PADDING, data, 0, data.length); cachePointer.releaseExclusiveLock(); wowCache.store(fileId, pageIndex, cachePointer); cachePointer.decrementReferrer(); } for (Map.Entry<Long, byte[]> entry : pageIndexDataMap.entrySet()) { long pageIndex = entry.getKey(); byte[] dataOne = entry.getValue(); OCachePointer cachePointer = wowCache.load(fileId, pageIndex, false); byte[] dataTwo = cachePointer.getDataPointer().get(systemOffset + OWOWCache.PAGE_PADDING, 8); cachePointer.decrementReferrer(); Assert.assertEquals(dataTwo, dataOne); } wowCache.flush(); for (Map.Entry<Long, byte[]> entry : pageIndexDataMap.entrySet()) { assertFile(entry.getKey(), entry.getValue(), new OLogSequenceNumber(0, 0)); } } public void testFlushAllContentEventually() throws Exception { Random random = new Random(); byte[][] pageData = new byte[200][]; long fileId = wowCache.addFile(fileName); for (int i = 0; i < pageData.length; i++) { byte[] data = new byte[8]; random.nextBytes(data); pageData[i] = data; final OCachePointer cachePointer = wowCache.load(fileId, i, true); cachePointer.acquireExclusiveLock(); cachePointer.getDataPointer().set(systemOffset + OWOWCache.PAGE_PADDING, data, 0, data.length); cachePointer.releaseExclusiveLock(); wowCache.store(fileId, i, cachePointer); cachePointer.decrementReferrer(); } for (int i = 0; i < pageData.length; i++) { byte[] dataOne = pageData[i]; OCachePointer cachePointer = wowCache.load(fileId, i, false); byte[] dataTwo = cachePointer.getDataPointer().get(systemOffset + OWOWCache.PAGE_PADDING, 8); cachePointer.decrementReferrer(); Assert.assertEquals(dataTwo, dataOne); } Thread.sleep(10000); for (int i = 0; i < pageData.length; i++) { byte[] dataContent = pageData[i]; assertFile(i, dataContent, new OLogSequenceNumber(0, 0)); } } private void assertFile(long pageIndex, byte[] value, OLogSequenceNumber lsn) throws IOException { String path = storageLocal.getConfiguration().getDirectory() + File.separator + fileName; OFileClassic fileClassic = new OFileClassic(path, "r"); fileClassic.open(); byte[] content = new byte[8 + systemOffset]; fileClassic.read(pageIndex * (8 + systemOffset), content, 8 + systemOffset); Assert.assertEquals(Arrays.copyOfRange(content, systemOffset, 8 + systemOffset), value); long magicNumber = OLongSerializer.INSTANCE.deserializeNative(content, 0); Assert.assertEquals(magicNumber, OWOWCache.MAGIC_NUMBER); CRC32 crc32 = new CRC32(); crc32.update(content, OIntegerSerializer.INT_SIZE + OLongSerializer.LONG_SIZE, content.length - OIntegerSerializer.INT_SIZE - OLongSerializer.LONG_SIZE); int crc = OIntegerSerializer.INSTANCE.deserializeNative(content, OLongSerializer.LONG_SIZE); Assert.assertEquals(crc, (int) crc32.getValue()); int segment = OIntegerSerializer.INSTANCE.deserializeNative(content, OLongSerializer.LONG_SIZE + OIntegerSerializer.INT_SIZE); long position = OLongSerializer.INSTANCE .deserializeNative(content, OLongSerializer.LONG_SIZE + 2 * OIntegerSerializer.INT_SIZE); OLogSequenceNumber readLsn = new OLogSequenceNumber(segment, position); Assert.assertEquals(readLsn, lsn); fileClassic.close(); } }
/* * ==================================================================== * ======== The Apache Software License, Version 1.1 * ================== * ========================================================== * Copyright (C) 2002 The Apache Software Foundation. All rights * reserved. Redistribution and use in source and binary forms, with * or without modifica- tion, are permitted provided that the * following conditions are met: 1. Redistributions of source code * must retain the above copyright notice, this list of conditions and * the following disclaimer. 2. Redistributions in binary form must * reproduce the above copyright notice, this list of conditions and * the following disclaimer in the documentation and/or other * materials provided with the distribution. 3. The end-user * documentation included with the redistribution, if any, must * include the following acknowledgment: "This product includes * software developed by SuperBonBon Industries * (http://www.sbbi.net/)." Alternately, this acknowledgment may * appear in the software itself, if and wherever such third-party * acknowledgments normally appear. 4. The names "UPNPLib" and * "SuperBonBon Industries" must not be used to endorse or promote * products derived from this software without prior written * permission. For written permission, please contact info@sbbi.net. * 5. Products derived from this software may not be called * "SuperBonBon Industries", nor may "SBBI" appear in their name, * without prior written permission of SuperBonBon Industries. THIS * SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE APACHE SOFTWARE FOUNDATION OR ITS * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLU- DING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF * USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF * SUCH DAMAGE. This software consists of voluntary contributions made * by many individuals on behalf of SuperBonBon Industries. For more * information on SuperBonBon Industries, please see * <http://www.sbbi.net/>. */ package net.tomp2p.upnp; import java.io.IOException; import java.net.DatagramPacket; import java.net.InetAddress; import java.net.InetSocketAddress; import java.net.SocketTimeoutException; import java.net.URL; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Set; import net.tomp2p.utils.Timings; /** * This class can be used to listen for UPNP devices responses when a search * message is sent by a control point ( using the * net.sbbi.upnp.Discovery.sendSearchMessage() method ) * * @author <a href="mailto:superbonbon@sbbi.net">SuperBonBon</a> * @version 1.0 */ public class DiscoveryListener implements Runnable { private static final int DEFAULT_TIMEOUT = 250; private Map<String, Set<DiscoveryResultsHandler>> registeredHandlers = new HashMap<String, Set<DiscoveryResultsHandler>>(); private final Object REGISTRATION_PROCESS = new Object(); private final static DiscoveryListener singleton = new DiscoveryListener(); private boolean inService = false; private boolean daemon = true; private java.net.MulticastSocket skt; private DatagramPacket input; private DiscoveryListener() { } final static DiscoveryListener getInstance() { return singleton; } /** * Sets the listener as a daemon thread * * @param daemon * daemon thread */ public void setDaemon(boolean daemon) { this.daemon = daemon; } /** * Registers an SSDP response message handler * * @param resultsHandler * the SSDP response message handler * @param searchTarget * the search target * @throws IOException * if some errors occurs during SSDP search response messages * listener thread startup */ public void registerResultsHandler(DiscoveryResultsHandler resultsHandler, String searchTarget) throws IOException { synchronized (REGISTRATION_PROCESS) { if (!inService) { startDevicesListenerThread(); } Set<DiscoveryResultsHandler> handlers = registeredHandlers.get(searchTarget); if (handlers == null) { handlers = new HashSet<DiscoveryResultsHandler>(); registeredHandlers.put(searchTarget, handlers); } handlers.add(resultsHandler); } } /** * Unregisters an SSDP response message handler * * @param resultsHandler * the SSDP response message handler * @param searchTarget * the search target */ public void unRegisterResultsHandler(DiscoveryResultsHandler resultsHandler, String searchTarget) { synchronized (REGISTRATION_PROCESS) { Set<DiscoveryResultsHandler> handlers = registeredHandlers.get(searchTarget); if (handlers != null) { handlers.remove(resultsHandler); if (handlers.size() == 0) { registeredHandlers.remove(searchTarget); } } if (registeredHandlers.size() == 0) { stopDevicesListenerThread(); } } } private void startDevicesListenerThread() throws IOException { synchronized (singleton) { if (!inService) { startMultiCastSocket(); Thread deamon = new Thread(this, "DiscoveryListener daemon"); deamon.setDaemon(daemon); deamon.start(); while (!inService) { // wait for the thread to be started let's wait a few // ms try { Timings.sleep(2); } catch (InterruptedException ex) { // don t care } } } } } private void stopDevicesListenerThread() { synchronized (singleton) { inService = false; } } private void startMultiCastSocket() throws IOException { int bindPort = Discovery.DEFAULT_SSDP_SEARCH_PORT; skt = new java.net.MulticastSocket(null); skt.bind(new InetSocketAddress(InetAddress.getByName("0.0.0.0"), bindPort)); skt.setTimeToLive(Discovery.DEFAULT_TTL); skt.setSoTimeout(DEFAULT_TIMEOUT); skt.joinGroup(InetAddress.getByName(Discovery.SSDP_IP)); byte[] buf = new byte[2048]; input = new DatagramPacket(buf, buf.length); } @Override public void run() { if (!Thread.currentThread().getName().equals("DiscoveryListener daemon")) { throw new RuntimeException("No right to call this method"); } inService = true; while (inService) { try { listenBroadCast(); } catch (SocketTimeoutException ex) { // ignoring } catch (IOException ioEx) { ioEx.printStackTrace(); } catch (Exception ex) { ex.printStackTrace(); inService = false; } } try { skt.leaveGroup(InetAddress.getByName(Discovery.SSDP_IP)); skt.close(); } catch (Exception ex) { // ignoring } } private void listenBroadCast() throws IOException { skt.receive(input); InetAddress from = input.getAddress(); String received = new String(input.getData(), input.getOffset(), input.getLength()); HttpResponse msg = null; try { msg = new HttpResponse(received); } catch (IllegalArgumentException ex) { // crappy http sent // log.debug( "Skipping uncompliant HTTP message " + received // ); return; } String header = msg.getHeader(); if (header != null && header.startsWith("HTTP/1.1 200 OK") && msg.getHTTPHeaderField("st") != null) { // probably a search repsonse ! String deviceDescrLoc = msg.getHTTPHeaderField("location"); if (deviceDescrLoc == null || deviceDescrLoc.trim().length() == 0) { // log.debug( // "Skipping SSDP message, missing HTTP header 'location' field" // ); return; } URL loc = new URL(deviceDescrLoc); // InetAddress locHost = InetAddress.getByName( loc.getHost() ); String st = msg.getHTTPHeaderField("st"); if (st == null || st.trim().length() == 0) { // log.debug( // "Skipping SSDP message, missing HTTP header 'st' field" // ); return; } String usn = msg.getHTTPHeaderField("usn"); if (usn == null || usn.trim().length() == 0) { // log.debug( // "Skipping SSDP message, missing HTTP header 'usn' field" // ); return; } String maxAge = msg.getHTTPFieldElement("Cache-Control", "max-age"); if (maxAge == null || maxAge.trim().length() == 0) { // log.debug( // "Skipping SSDP message, missing HTTP header 'max-age' field" // ); return; } String server = msg.getHTTPHeaderField("server"); if (server == null || server.trim().length() == 0) { // log.debug( // "Skipping SSDP message, missing HTTP header 'server' field" // ); return; } String udn = usn; int index = udn.indexOf("::"); if (index != -1) { udn = udn.substring(0, index); } synchronized (REGISTRATION_PROCESS) { Set<DiscoveryResultsHandler> handlers = registeredHandlers.get(st); if (handlers != null) { for (DiscoveryResultsHandler handler : handlers) { handler.discoveredDevice(usn, udn, st, maxAge, loc, server, from); } } } } else { // log.debug( "Skipping uncompliant HTTP message " + received // ); } } }