gt stringclasses 1
value | context stringlengths 2.05k 161k |
|---|---|
/**
* Copyright (c) 2013-2019 Contributors to the Eclipse Foundation
*
* <p> See the NOTICE file distributed with this work for additional information regarding copyright
* ownership. All rights reserved. This program and the accompanying materials are made available
* under the terms of the Apache License, Version 2.0 which accompanies this distribution and is
* available at http://www.apache.org/licenses/LICENSE-2.0.txt
*/
package org.locationtech.geowave.adapter.raster.plugin;
import java.awt.Color;
import java.awt.Rectangle;
import java.awt.geom.Rectangle2D;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.imageio.ImageReadParam;
import javax.media.jai.Histogram;
import javax.media.jai.ImageLayout;
import javax.media.jai.Interpolation;
import org.geotools.coverage.grid.GridCoverage2D;
import org.geotools.coverage.grid.GridEnvelope2D;
import org.geotools.coverage.grid.GridGeometry2D;
import org.geotools.coverage.grid.io.AbstractGridCoverage2DReader;
import org.geotools.coverage.grid.io.AbstractGridFormat;
import org.geotools.coverage.grid.io.GridCoverage2DReader;
import org.geotools.coverage.grid.io.OverviewPolicy;
import org.geotools.data.DataSourceException;
import org.geotools.factory.Hints;
import org.geotools.geometry.GeneralEnvelope;
import org.geotools.parameter.Parameter;
import org.geotools.referencing.CRS;
import org.geotools.referencing.operation.BufferedCoordinateOperationFactory;
import org.geotools.util.Utilities;
import org.locationtech.geowave.adapter.auth.AuthorizationSPI;
import org.locationtech.geowave.adapter.raster.RasterUtils;
import org.locationtech.geowave.adapter.raster.Resolution;
import org.locationtech.geowave.adapter.raster.adapter.RasterDataAdapter;
import org.locationtech.geowave.adapter.raster.stats.HistogramStatistics;
import org.locationtech.geowave.adapter.raster.stats.OverviewStatistics;
import org.locationtech.geowave.adapter.raster.stats.RasterBoundingBoxStatistics;
import org.locationtech.geowave.core.geotime.ingest.SpatialDimensionalityTypeProvider;
import org.locationtech.geowave.core.geotime.store.query.IndexOnlySpatialQuery;
import org.locationtech.geowave.core.geotime.store.statistics.BoundingBoxDataStatistics;
import org.locationtech.geowave.core.geotime.util.GeometryUtils;
import org.locationtech.geowave.core.store.AdapterToIndexMapping;
import org.locationtech.geowave.core.store.CloseableIterator;
import org.locationtech.geowave.core.store.CloseableIterator.Wrapper;
import org.locationtech.geowave.core.store.adapter.AdapterIndexMappingStore;
import org.locationtech.geowave.core.store.adapter.InternalAdapterStore;
import org.locationtech.geowave.core.store.adapter.InternalDataAdapter;
import org.locationtech.geowave.core.store.adapter.PersistentAdapterStore;
import org.locationtech.geowave.core.store.adapter.statistics.DataStatisticsStore;
import org.locationtech.geowave.core.store.adapter.statistics.InternalDataStatistics;
import org.locationtech.geowave.core.store.api.DataStore;
import org.locationtech.geowave.core.store.api.DataTypeAdapter;
import org.locationtech.geowave.core.store.api.Index;
import org.locationtech.geowave.core.store.api.QueryBuilder;
import org.locationtech.geowave.core.store.index.IndexStore;
import org.locationtech.geowave.core.store.query.constraints.QueryConstraints;
import org.locationtech.geowave.core.store.util.DataStoreUtils;
import org.locationtech.jts.geom.Envelope;
import org.locationtech.jts.geom.GeometryFactory;
import org.opengis.coverage.grid.Format;
import org.opengis.coverage.grid.GridCoverage;
import org.opengis.coverage.grid.GridEnvelope;
import org.opengis.parameter.GeneralParameterValue;
import org.opengis.parameter.ParameterDescriptor;
import org.opengis.referencing.crs.CoordinateReferenceSystem;
import org.opengis.referencing.cs.AxisDirection;
import org.opengis.referencing.cs.CoordinateSystem;
import org.opengis.referencing.cs.CoordinateSystemAxis;
import org.opengis.referencing.datum.PixelInCell;
import org.opengis.referencing.operation.CoordinateOperationFactory;
import org.opengis.referencing.operation.MathTransform;
import org.opengis.referencing.operation.TransformException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/** the reader gets the connection info and returns a grid coverage for every data adapter */
public class GeoWaveRasterReader extends AbstractGridCoverage2DReader implements
GridCoverage2DReader {
private static final Logger LOGGER = LoggerFactory.getLogger(GeoWaveRasterReader.class);
private GeoWaveRasterConfig config;
private PersistentAdapterStore geowaveAdapterStore;
private InternalAdapterStore geowaveInternalAdapterStore;
private DataStatisticsStore geowaveStatisticsStore;
private DataStore geowaveDataStore;
private IndexStore geowaveIndexStore;
private AdapterIndexMappingStore geowaveAdapterIndexMappingStore;
protected Map<String, CoordinateReferenceSystem> crsCache = new HashMap<>();
protected CoordinateReferenceSystem defaultCrs;
private AuthorizationSPI authorizationSPI;
protected static final CoordinateOperationFactory OPERATION_FACTORY =
new BufferedCoordinateOperationFactory(new Hints(Hints.LENIENT_DATUM_SHIFT, Boolean.TRUE));
private static Set<AxisDirection> UPDirections;
private static Set<AxisDirection> LEFTDirections;
// class initializer
static {
LEFTDirections = new HashSet<>();
LEFTDirections.add(AxisDirection.DISPLAY_LEFT);
LEFTDirections.add(AxisDirection.EAST);
LEFTDirections.add(AxisDirection.GEOCENTRIC_X);
LEFTDirections.add(AxisDirection.COLUMN_POSITIVE);
UPDirections = new HashSet<>();
UPDirections.add(AxisDirection.DISPLAY_UP);
UPDirections.add(AxisDirection.NORTH);
UPDirections.add(AxisDirection.GEOCENTRIC_Y);
UPDirections.add(AxisDirection.ROW_POSITIVE);
}
/**
* @param source The source object.
* @param uHints
* @throws IOException
* @throws MalformedURLException
* @throws AccumuloSecurityException
* @throws AccumuloException
*/
public GeoWaveRasterReader(final Object source, final Hints uHints) throws IOException {
super(source, uHints);
this.source = source;
if (GeoWaveGTRasterFormat.isParamList(source)) {
try {
config = GeoWaveRasterConfig.readFromConfigParams(source.toString());
} catch (final Exception e) {
throw new MalformedURLException(source.toString());
}
} else {
final URL url = GeoWaveGTRasterFormat.getURLFromSource(source);
if (url == null) {
throw new MalformedURLException(source.toString());
}
try {
config = GeoWaveRasterConfig.readFromURL(url);
} catch (final Exception e) {
LOGGER.error("Cannot read config", e);
throw new IOException(e);
}
}
init(config);
}
public GeoWaveRasterReader(final GeoWaveRasterConfig config) throws DataSourceException {
super(new Object(), new Hints());
this.config = config;
init(config);
}
private void init(final GeoWaveRasterConfig config) {
geowaveDataStore = config.getDataStore();
geowaveAdapterStore = config.getAdapterStore();
geowaveStatisticsStore = config.getDataStatisticsStore();
geowaveIndexStore = config.getIndexStore();
geowaveAdapterIndexMappingStore = config.getAdapterIndexMappingStore();
geowaveInternalAdapterStore = config.getInternalAdapterStore();
authorizationSPI = config.getAuthorizationFactory().create(config.getAuthorizationURL());
}
/**
* Constructor.
*
* @param source The source object.
* @throws IOException
* @throws AccumuloSecurityException
* @throws AccumuloException
* @throws UnsupportedEncodingException
*/
public GeoWaveRasterReader(final Object source) throws IOException {
this(source, null);
}
protected CoordinateReferenceSystem getDefaultCrs() {
if (defaultCrs != null) {
return defaultCrs;
}
if (!crsCache.isEmpty()) {
defaultCrs = crsCache.values().iterator().next();
} else {
final String[] coverageNames = getGridCoverageNames();
for (final String coverageName : coverageNames) {
final CoordinateReferenceSystem crs = getCrsForCoverage(coverageName);
if (crs != null) {
defaultCrs = crs;
break;
}
}
}
if (defaultCrs != null) {
return defaultCrs;
}
// if no data has been ingested yet with a CRS, this is the best guess
// we can make
return GeometryUtils.getDefaultCRS();
}
protected CoordinateReferenceSystem getCrsForCoverage(final String coverageName) {
CoordinateReferenceSystem crs = crsCache.get(coverageName);
if (crs != null) {
return crs;
}
final AdapterToIndexMapping adapterMapping =
geowaveAdapterIndexMappingStore.getIndicesForAdapter(getAdapterId(coverageName));
final Index[] indices = adapterMapping.getIndices(geowaveIndexStore);
if ((indices != null) && (indices.length > 0)) {
crs = GeometryUtils.getIndexCrs(indices[0]);
crsCache.put(coverageName, crs);
}
return crs;
}
@Override
public Format getFormat() {
return new GeoWaveGTRasterFormat();
}
@Override
public String[] getGridCoverageNames() {
try (final CloseableIterator<InternalDataAdapter<?>> it = geowaveAdapterStore.getAdapters()) {
final List<String> coverageNames = new ArrayList<>();
while (it.hasNext()) {
final DataTypeAdapter<?> adapter = it.next().getAdapter();
if (adapter instanceof RasterDataAdapter) {
coverageNames.add(((RasterDataAdapter) adapter).getCoverageName());
}
}
return coverageNames.toArray(new String[coverageNames.size()]);
}
}
@Override
public int getGridCoverageCount() {
try (final CloseableIterator<InternalDataAdapter<?>> it = geowaveAdapterStore.getAdapters()) {
int coverageCount = 0;
while (it.hasNext()) {
final DataTypeAdapter<?> adapter = it.next().getAdapter();
if (adapter instanceof RasterDataAdapter) {
coverageCount++;
}
}
return coverageCount;
}
}
@Override
public String[] getMetadataNames() {
throw new UnsupportedOperationException(
"A coverage name must be provided, there is no support for a default coverage");
}
@Override
public String[] getMetadataNames(final String coverageName) {
if (!checkName(coverageName)) {
LOGGER.warn("Unable to find data adapter for '" + coverageName + "'");
return null;
}
final DataTypeAdapter<?> adapter =
geowaveAdapterStore.getAdapter(getAdapterId(coverageName)).getAdapter();
final Set<String> var = ((RasterDataAdapter) adapter).getMetadata().keySet();
return var.toArray(new String[var.size()]);
}
@Override
public String getMetadataValue(final String name) {
throw new UnsupportedOperationException(
"A coverage name must be provided, there is no support for a default coverage");
}
@Override
public String getMetadataValue(final String coverageName, final String name) {
if (!checkName(coverageName)) {
LOGGER.warn("Unable to find data adapter for '" + coverageName + "'");
return null;
}
final DataTypeAdapter<?> adapter =
geowaveAdapterStore.getAdapter(getAdapterId(coverageName)).getAdapter();
return ((RasterDataAdapter) adapter).getMetadata().get(name);
}
@Override
protected boolean checkName(final String coverageName) {
Utilities.ensureNonNull("coverageName", coverageName);
final DataTypeAdapter<?> adapter =
geowaveAdapterStore.getAdapter(getAdapterId(coverageName)).getAdapter();
return (adapter != null) && (adapter instanceof RasterDataAdapter);
}
@Override
public GeneralEnvelope getOriginalEnvelope() {
throw new UnsupportedOperationException(
"A coverage name must be provided, there is no support for a default coverage");
}
@Override
public GeneralEnvelope getOriginalEnvelope(final String coverageName) {
final Envelope envelope =
geowaveDataStore.aggregateStatistics(
RasterBoundingBoxStatistics.STATS_TYPE.newBuilder().setAuthorizations(
authorizationSPI.getAuthorizations()).dataType(coverageName).build());
if (envelope == null) {
final CoordinateReferenceSystem crs = getCoordinateReferenceSystem(coverageName);
final double minX = crs.getCoordinateSystem().getAxis(0).getMinimumValue();
final double maxX = crs.getCoordinateSystem().getAxis(0).getMaximumValue();
final double minY = crs.getCoordinateSystem().getAxis(1).getMinimumValue();
final double maxY = crs.getCoordinateSystem().getAxis(1).getMaximumValue();
final GeneralEnvelope env =
new GeneralEnvelope(new Rectangle2D.Double(minX, minY, maxX - minX, maxY - minY));
env.setCoordinateReferenceSystem(crs);
return env;
}
// try to use both the bounding box and the overview statistics to
// determine the width and height at the highest resolution
final GeneralEnvelope env =
new GeneralEnvelope(
new Rectangle2D.Double(
envelope.getMinX(),
envelope.getMinY(),
envelope.getWidth(),
envelope.getHeight()));
env.setCoordinateReferenceSystem(getCoordinateReferenceSystem(coverageName));
return env;
}
@Override
public CoordinateReferenceSystem getCoordinateReferenceSystem() {
return getDefaultCrs();
}
@Override
public CoordinateReferenceSystem getCoordinateReferenceSystem(final String coverageName) {
return getCrsForCoverage(coverageName);
}
@Override
public GridEnvelope getOriginalGridRange() {
throw new UnsupportedOperationException(
"A coverage name must be provided, there is no support for a default coverage");
}
@Override
public GridEnvelope getOriginalGridRange(final String coverageName) {
try (CloseableIterator<InternalDataStatistics<?, ?, ?>> statisticsIt =
geowaveStatisticsStore.getDataStatistics(
getAdapterId(coverageName),
RasterBoundingBoxStatistics.STATS_TYPE,
authorizationSPI.getAuthorizations())) {
int width = 0;
int height = 0;
// try to use both the bounding box and the overview statistics to
// determine the width and height at the highest resolution
InternalDataStatistics<?, ?, ?> statistics = null;
if (statisticsIt.hasNext()) {
statistics = statisticsIt.next();
}
if ((statistics != null) && (statistics instanceof BoundingBoxDataStatistics)) {
final BoundingBoxDataStatistics<?, ?> bboxStats =
(BoundingBoxDataStatistics<?, ?>) statistics;
try (CloseableIterator<InternalDataStatistics<?, ?, ?>> overviewStatisticsIt =
geowaveStatisticsStore.getDataStatistics(
getAdapterId(coverageName),
OverviewStatistics.STATS_TYPE,
authorizationSPI.getAuthorizations())) {
statistics = null;
if (overviewStatisticsIt.hasNext()) {
statistics = overviewStatisticsIt.next();
}
if ((statistics != null) && (statistics instanceof OverviewStatistics)) {
final OverviewStatistics overviewStats = (OverviewStatistics) statistics;
width =
(int) Math.ceil(
((bboxStats.getMaxX() - bboxStats.getMinX())
/ overviewStats.getResolutions()[0].getResolution(0)));
height =
(int) Math.ceil(
((bboxStats.getMaxY() - bboxStats.getMinY())
/ overviewStats.getResolutions()[0].getResolution(1)));
}
}
}
return new GridEnvelope2D(0, 0, width, height);
}
}
@Override
public MathTransform getOriginalGridToWorld(final PixelInCell pixInCell) {
throw new UnsupportedOperationException(
"A coverage name must be provided, there is no support for a default coverage");
}
@Override
public MathTransform getOriginalGridToWorld(
final String coverageName,
final PixelInCell pixInCell) {
// just reuse super class implementation but ensure that we do not use a
// cached raster2model
synchronized (this) {
raster2Model = null;
return super.getOriginalGridToWorld(coverageName, pixInCell);
}
}
@Override
public GridCoverage2D read(final GeneralParameterValue[] parameters)
throws IllegalArgumentException, IOException {
throw new UnsupportedOperationException(
"A coverage name must be provided, there is no support for a default coverage");
}
/*
* (non-Javadoc)
*
* @see org.opengis.coverage.grid.GridCoverageReader#read(org.opengis.parameter
* .GeneralParameterValue [])
*/
@Override
public GridCoverage2D read(final String coverageName, final GeneralParameterValue[] params)
throws IOException {
if (!checkName(coverageName)) {
LOGGER.warn("Unable to find data adapter for '" + coverageName + "'");
return null;
}
final Date start = new Date();
// /////////////////////////////////////////////////////////////////////
//
// Checking params
//
// /////////////////////////////////////////////////////////////////////
Color outputTransparentColor = null;
Color backgroundColor = null;
Interpolation interpolation = null;
Rectangle dim = null;
GeneralEnvelope requestedEnvelope = null;
if (params != null) {
for (final GeneralParameterValue generalParameterValue : params) {
final Parameter<Object> param = (Parameter<Object>) generalParameterValue;
if (param.getDescriptor().getName().getCode().equals(
AbstractGridFormat.READ_GRIDGEOMETRY2D.getName().toString())) {
final GridGeometry2D gg = (GridGeometry2D) param.getValue();
requestedEnvelope = (GeneralEnvelope) gg.getEnvelope();
dim = gg.getGridRange2D().getBounds();
} else if (param.getDescriptor().getName().getCode().equals(
GeoWaveGTRasterFormat.OUTPUT_TRANSPARENT_COLOR.getName().toString())) {
outputTransparentColor = (Color) param.getValue();
} else if (param.getDescriptor().getName().getCode().equals(
AbstractGridFormat.BACKGROUND_COLOR.getName().toString())) {
backgroundColor = (Color) param.getValue();
} else if (param.getDescriptor().getName().getCode().equals(
AbstractGridFormat.INTERPOLATION.getName().toString())) {
interpolation = (Interpolation) param.getValue();
}
}
}
final GridCoverage2D coverage =
renderGridCoverage(
coverageName,
dim,
requestedEnvelope,
backgroundColor,
outputTransparentColor,
interpolation);
LOGGER.info(
"GeoWave Raster Reader needs : "
+ ((new Date()).getTime() - start.getTime())
+ " millisecs");
return coverage;
}
public GridCoverage2D renderGridCoverage(
final String coverageName,
final Rectangle dim,
final GeneralEnvelope generalEnvelope,
Color backgroundColor,
Color outputTransparentColor,
final Interpolation interpolation) throws IOException {
if (backgroundColor == null) {
backgroundColor = AbstractGridFormat.BACKGROUND_COLOR.getDefaultValue();
}
if (outputTransparentColor == null) {
outputTransparentColor = GeoWaveGTRasterFormat.OUTPUT_TRANSPARENT_COLOR.getDefaultValue();
}
final GeoWaveRasterReaderState state = new GeoWaveRasterReaderState(coverageName);
state.setRequestedEnvelope(generalEnvelope);
// /////////////////////////////////////////////////////////////////////
//
// Loading tiles trying to optimize as much as possible
//
// /////////////////////////////////////////////////////////////////////
final GridCoverage2D coverage =
loadTiles(
coverageName,
backgroundColor,
outputTransparentColor,
interpolation,
dim,
state,
getCoordinateReferenceSystem(coverageName),
getOriginalEnvelope(coverageName));
return coverage;
}
/**
* @param backgroundColor the background color
* @param outputTransparentColor the transparent color
* @param pixelDimension
* @return the gridcoverage as the final result
* @throws IOException
*/
private GridCoverage2D loadTiles(
final String coverageName,
final Color backgroundColor,
final Color outputTransparentColor,
Interpolation interpolation,
final Rectangle pixelDimension,
final GeoWaveRasterReaderState state,
final CoordinateReferenceSystem crs,
final GeneralEnvelope originalEnvelope) throws IOException {
transformRequestEnvelope(state, crs);
// /////////////////////////////////////////////////////////////////////
//
// Check if we have something to load by intersecting the requested
// envelope with the bounds of the data set. If not, give warning
//
// /////////////////////////////////////////////////////////////////////
if (!state.getRequestEnvelopeXformed().intersects(originalEnvelope, true)) {
LOGGER.warn("The requested envelope does not intersect the envelope of this mosaic");
LOGGER.warn(state.getRequestEnvelopeXformed().toString());
LOGGER.warn(originalEnvelope.toString());
return null;
}
final ImageReadParam readP = new ImageReadParam();
final Integer imageChoice;
final RasterDataAdapter adapter =
(RasterDataAdapter) geowaveAdapterStore.getAdapter(getAdapterId(coverageName)).getAdapter();
if (pixelDimension != null) {
try {
synchronized (this) {
if (!setupResolutions(coverageName)) {
LOGGER.warn("Cannot find the overview statistics for the requested coverage name");
return coverageFactory.create(
coverageName,
RasterUtils.getEmptyImage(
(int) pixelDimension.getWidth(),
(int) pixelDimension.getHeight(),
backgroundColor,
outputTransparentColor,
adapter.getColorModel()),
state.getRequestedEnvelope());
}
imageChoice =
setReadParams(
state.getCoverageName(),
OverviewPolicy.getDefaultPolicy(),
readP,
state.getRequestEnvelopeXformed(),
pixelDimension);
}
readP.setSourceSubsampling(1, 1, 0, 0);
} catch (final TransformException e) {
LOGGER.error(e.getLocalizedMessage(), e);
return coverageFactory.create(
coverageName,
RasterUtils.getEmptyImage(
(int) pixelDimension.getWidth(),
(int) pixelDimension.getHeight(),
backgroundColor,
outputTransparentColor,
adapter.getColorModel()),
state.getRequestedEnvelope());
}
} else {
imageChoice = Integer.valueOf(0);
}
final double[][] resolutionLevels = getResolutionLevels(coverageName);
final Histogram histogram;
boolean equalizeHistogram;
if (config.isEqualizeHistogramOverrideSet()) {
equalizeHistogram = config.isEqualizeHistogramOverride();
} else {
equalizeHistogram = adapter.isEqualizeHistogram();
}
if (equalizeHistogram) {
histogram =
getHistogram(
coverageName,
resolutionLevels[imageChoice.intValue()][0],
resolutionLevels[imageChoice.intValue()][1]);
} else {
histogram = null;
}
boolean scaleTo8Bit = true; // default to always scale to 8-bit
final boolean scaleTo8BitSet = config.isScaleTo8BitSet();
if (scaleTo8BitSet) {
scaleTo8Bit = config.isScaleTo8Bit();
}
try (final CloseableIterator<GridCoverage> gridCoverageIt =
queryForTiles(
pixelDimension,
state.getRequestEnvelopeXformed(),
resolutionLevels[imageChoice.intValue()][0],
resolutionLevels[imageChoice.intValue()][1],
adapter)) {
// allow the config to override the WMS request
if (config.isInterpolationOverrideSet()) {
interpolation = config.getInterpolationOverride();
}
// but don't allow the default adapter interpolation to override the
// WMS request
else if (interpolation == null) {
interpolation = adapter.getInterpolation();
}
final GridCoverage2D result =
RasterUtils.mosaicGridCoverages(
gridCoverageIt,
backgroundColor,
outputTransparentColor,
pixelDimension,
state.getRequestEnvelopeXformed(),
resolutionLevels[imageChoice.intValue()][0],
resolutionLevels[imageChoice.intValue()][1],
adapter.getNoDataValuesPerBand(),
state.isAxisSwapped(),
coverageFactory,
state.getCoverageName(),
interpolation,
histogram,
scaleTo8BitSet,
scaleTo8Bit,
adapter.getColorModel());
return transformResult(result, pixelDimension, state);
}
}
private boolean setupResolutions(final String coverageName) throws IOException {
// this is a bit of a hack to avoid copy and pasting large
// portions of the inherited class, which does not handle
// multiple coverage names
final double[][] resLevels = getResolutionLevels(coverageName);
if ((resLevels == null) || (resLevels.length == 0)) {
return false;
}
numOverviews = resLevels.length - 1;
highestRes = resLevels[0];
if (numOverviews > 0) {
overViewResolutions = new double[numOverviews][];
System.arraycopy(resLevels, 1, overViewResolutions, 0, numOverviews);
} else {
overViewResolutions = new double[][] {};
}
this.coverageName = coverageName;
return true;
}
private CloseableIterator<GridCoverage> queryForTiles(
final Rectangle pixelDimension,
final GeneralEnvelope requestEnvelope,
final double levelResX,
final double levelResY,
final RasterDataAdapter adapter) throws IOException {
final QueryConstraints query;
if (requestEnvelope.getCoordinateReferenceSystem() != null) {
query =
new IndexOnlySpatialQuery(
new GeometryFactory().toGeometry(
new Envelope(
requestEnvelope.getMinimum(0),
requestEnvelope.getMaximum(0),
requestEnvelope.getMinimum(1),
requestEnvelope.getMaximum(1))),
GeometryUtils.getCrsCode(requestEnvelope.getCoordinateReferenceSystem()));
} else {
query =
new IndexOnlySpatialQuery(
new GeometryFactory().toGeometry(
new Envelope(
requestEnvelope.getMinimum(0),
requestEnvelope.getMaximum(0),
requestEnvelope.getMinimum(1),
requestEnvelope.getMaximum(1))));
}
return queryForTiles(
adapter,
query,
new double[] {levelResX * adapter.getTileSize(), levelResY * adapter.getTileSize()});
}
private CloseableIterator<GridCoverage> queryForTiles(
final RasterDataAdapter adapter,
final QueryConstraints query,
final double[] targetResolutionPerDimension) {
final AdapterToIndexMapping adapterIndexMapping =
geowaveAdapterIndexMappingStore.getIndicesForAdapter(getAdapterId(adapter.getTypeName()));
final Index[] indices = adapterIndexMapping.getIndices(geowaveIndexStore);
// just work on the first spatial only index that contains this adapter
// ID
// TODO consider the best strategy for handling temporal queries here
for (final Index rasterIndex : indices) {
if (SpatialDimensionalityTypeProvider.isSpatial(rasterIndex)) {
return (CloseableIterator) geowaveDataStore.query(
QueryBuilder.newBuilder().setAuthorizations(
authorizationSPI.getAuthorizations()).addTypeName(
adapter.getTypeName()).constraints(query).addHint(
DataStoreUtils.TARGET_RESOLUTION_PER_DIMENSION_FOR_HIERARCHICAL_INDEX,
targetResolutionPerDimension).build());
}
}
return new Wrapper(Collections.emptyIterator());
}
private GridCoverage2D transformResult(
final GridCoverage2D coverage,
final Rectangle pixelDimension,
final GeoWaveRasterReaderState state) {
if (state.getRequestEnvelopeXformed() == state.getRequestedEnvelope()) {
return coverage; // nothing to do
}
GridCoverage2D result = null;
LOGGER.info("Image reprojection necessary");
result =
(GridCoverage2D) RasterUtils.getCoverageOperations().resample(
coverage,
state.getRequestedEnvelope().getCoordinateReferenceSystem());
return coverageFactory.create(
result.getName(),
result.getRenderedImage(),
result.getEnvelope());
}
/**
* transforms (if necessary) the requested envelope into the CRS used by this reader.
*
* @throws DataSourceException
*/
public static void transformRequestEnvelope(
final GeoWaveRasterReaderState state,
final CoordinateReferenceSystem crs) throws DataSourceException {
if (CRS.equalsIgnoreMetadata(
state.getRequestedEnvelope().getCoordinateReferenceSystem(),
crs)) {
state.setRequestEnvelopeXformed(state.getRequestedEnvelope());
return; // and finish
}
try {
/** Buffered factory for coordinate operations. */
// transforming the envelope back to the dataset crs in
final MathTransform transform =
OPERATION_FACTORY.createOperation(
state.getRequestedEnvelope().getCoordinateReferenceSystem(),
crs).getMathTransform();
if (transform.isIdentity()) { // Identity Transform ?
state.setRequestEnvelopeXformed(state.getRequestedEnvelope());
return; // and finish
}
state.setRequestEnvelopeXformed(CRS.transform(transform, state.getRequestedEnvelope()));
state.getRequestEnvelopeXformed().setCoordinateReferenceSystem(crs);
// if (config.getIgnoreAxisOrder() == false) { // check for axis
// order
// required
final int indexX = indexOfX(crs);
final int indexY = indexOfY(crs);
final int indexRequestedX =
indexOfX(state.getRequestedEnvelope().getCoordinateReferenceSystem());
final int indexRequestedY =
indexOfY(state.getRequestedEnvelope().getCoordinateReferenceSystem());
// x Axis problem ???
if ((indexX == indexRequestedY) && (indexY == indexRequestedX)) {
state.setAxisSwap(true);
final Rectangle2D tmp =
new Rectangle2D.Double(
state.getRequestEnvelopeXformed().getMinimum(1),
state.getRequestEnvelopeXformed().getMinimum(0),
state.getRequestEnvelopeXformed().getSpan(1),
state.getRequestEnvelopeXformed().getSpan(0));
state.setRequestEnvelopeXformed(new GeneralEnvelope(tmp));
state.getRequestEnvelopeXformed().setCoordinateReferenceSystem(crs);
} else if ((indexX == indexRequestedX) && (indexY == indexRequestedY)) {
// everything is fine
} else {
throw new DataSourceException("Unable to resolve the X Axis problem");
}
// }
} catch (final Exception e) {
throw new DataSourceException("Unable to create a coverage for this source", e);
}
}
@Override
public Set<ParameterDescriptor<List>> getDynamicParameters() throws IOException {
throw new UnsupportedOperationException(
"A coverage name must be provided, there is no support for a default coverage");
}
@Override
public Set<ParameterDescriptor<List>> getDynamicParameters(final String coverageName)
throws IOException {
return Collections.emptySet();
}
@Override
public double[] getReadingResolutions(
final OverviewPolicy policy,
final double[] requestedResolution) throws IOException {
throw new UnsupportedOperationException(
"A coverage name must be provided, there is no support for a default coverage");
}
@Override
public double[] getReadingResolutions(
final String coverageName,
final OverviewPolicy policy,
final double[] requestedResolution) throws IOException {
synchronized (this) {
if (!setupResolutions(coverageName)) {
LOGGER.warn("Cannot find the overview statistics for the requested coverage name");
return null;
}
return super.getReadingResolutions(coverageName, policy, requestedResolution);
}
}
@Override
public int getNumOverviews() {
throw new UnsupportedOperationException(
"A coverage name must be provided, there is no support for a default coverage");
}
@Override
public int getNumOverviews(final String coverageName) {
try {
final double[][] resolutionLevels = getResolutionLevels(coverageName);
return Math.max(0, resolutionLevels.length - 1);
} catch (final IOException e) {
LOGGER.warn("Unable to read resolution levels", e);
}
return 0;
}
@Override
public ImageLayout getImageLayout() throws IOException {
throw new UnsupportedOperationException(
"A coverage name must be provided, there is no support for a default coverage");
}
@Override
public ImageLayout getImageLayout(final String coverageName) throws IOException {
if (!checkName(coverageName)) {
LOGGER.warn("Unable to find data adapter for '" + coverageName + "'");
return null;
}
final RasterDataAdapter adapter =
(RasterDataAdapter) geowaveAdapterStore.getAdapter(getAdapterId(coverageName));
final GridEnvelope gridEnvelope = getOriginalGridRange();
return new ImageLayout().setMinX(gridEnvelope.getLow(0)).setMinY(
gridEnvelope.getLow(1)).setTileWidth(adapter.getTileSize()).setTileHeight(
adapter.getTileSize()).setSampleModel(adapter.getSampleModel()).setColorModel(
adapter.getColorModel()).setWidth(gridEnvelope.getHigh(0)).setHeight(
gridEnvelope.getHigh(1));
}
@Override
public double[][] getResolutionLevels() throws IOException {
throw new UnsupportedOperationException(
"A coverage name must be provided, there is no support for a default coverage");
}
@Override
public double[][] getResolutionLevels(final String coverageName) throws IOException {
final Resolution[] resolutions =
geowaveDataStore.aggregateStatistics(
OverviewStatistics.STATS_TYPE.newBuilder().setAuthorizations(
authorizationSPI.getAuthorizations()).dataType(coverageName).build());
if (resolutions == null) {
LOGGER.warn("Cannot find resolutions for coverage '" + coverageName + "'");
return null;
}
final double[][] retVal = new double[resolutions.length][];
int i = 0;
for (final Resolution res : resolutions) {
retVal[i++] = res.getResolutionPerDimension();
}
return retVal;
}
private Histogram getHistogram(final String coverageName, final double resX, final double resY)
throws IOException {
final Map<Resolution, Histogram> histograms =
geowaveDataStore.aggregateStatistics(
HistogramStatistics.STATS_TYPE.newBuilder().setAuthorizations(
authorizationSPI.getAuthorizations()).dataType(coverageName).build());
if (histograms != null) {
return histograms.get(new Resolution(new double[] {resX, resY}));
} else {
LOGGER.warn("Cannot find histogram for coverage '" + coverageName + "'");
}
return null;
}
/**
* @param crs CoordinateReference System
* @return dimension index of y dir in crs
*/
private static int indexOfY(final CoordinateReferenceSystem crs) {
return indexOf(crs, UPDirections);
}
/**
* @param crs CoordinateReference System
* @return dimension index of X dir in crs
*/
private static int indexOfX(final CoordinateReferenceSystem crs) {
return indexOf(crs, LEFTDirections);
}
private static int indexOf(
final CoordinateReferenceSystem crs,
final Set<AxisDirection> direction) {
final CoordinateSystem cs = crs.getCoordinateSystem();
for (int index = 0; index < cs.getDimension(); index++) {
final CoordinateSystemAxis axis = cs.getAxis(index);
if (direction.contains(axis.getDirection())) {
return index;
}
}
return -1;
}
private short getAdapterId(final String coverageName) {
return geowaveInternalAdapterStore.getAdapterId(coverageName);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.accumulo.core.client.mapreduce;
import java.io.IOException;
import java.util.Collection;
import java.util.List;
import java.util.Set;
import org.apache.accumulo.core.client.ClientSideIteratorScanner;
import org.apache.accumulo.core.client.IsolatedScanner;
import org.apache.accumulo.core.client.IteratorSetting;
import org.apache.accumulo.core.client.Scanner;
import org.apache.accumulo.core.client.ScannerBase;
import org.apache.accumulo.core.client.TableNotFoundException;
import org.apache.accumulo.core.client.impl.TabletLocator;
import org.apache.accumulo.core.client.mapreduce.lib.impl.InputConfigurator;
import org.apache.accumulo.core.client.sample.SamplerConfiguration;
import org.apache.accumulo.core.data.Key;
import org.apache.accumulo.core.data.Range;
import org.apache.accumulo.core.data.Value;
import org.apache.accumulo.core.util.Pair;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.InputFormat;
import org.apache.hadoop.mapreduce.InputSplit;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.JobContext;
import org.apache.hadoop.mapreduce.RecordReader;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
/**
* This abstract {@link InputFormat} class allows MapReduce jobs to use Accumulo as the source of K,V pairs.
* <p>
* Subclasses must implement a {@link #createRecordReader(InputSplit, TaskAttemptContext)} to provide a {@link RecordReader} for K,V.
* <p>
* A static base class, RecordReaderBase, is provided to retrieve Accumulo {@link Key}/{@link Value} pairs, but one must implement its
* {@link RecordReaderBase#nextKeyValue()} to transform them to the desired generic types K,V.
* <p>
* See {@link AccumuloInputFormat} for an example implementation.
*/
public abstract class InputFormatBase<K,V> extends AbstractInputFormat<K,V> {
/**
* Gets the table name from the configuration.
*
* @param context
* the Hadoop context for the configured job
* @return the table name
* @since 1.5.0
* @see #setInputTableName(Job, String)
*/
protected static String getInputTableName(JobContext context) {
return InputConfigurator.getInputTableName(CLASS, context.getConfiguration());
}
/**
* Sets the name of the input table, over which this job will scan.
*
* @param job
* the Hadoop job instance to be configured
* @param tableName
* the table to use when the tablename is null in the write call
* @since 1.5.0
*/
public static void setInputTableName(Job job, String tableName) {
InputConfigurator.setInputTableName(CLASS, job.getConfiguration(), tableName);
}
/**
* Sets the input ranges to scan for the single input table associated with this job.
*
* @param job
* the Hadoop job instance to be configured
* @param ranges
* the ranges that will be mapped over
* @since 1.5.0
*/
public static void setRanges(Job job, Collection<Range> ranges) {
InputConfigurator.setRanges(CLASS, job.getConfiguration(), ranges);
}
/**
* Gets the ranges to scan over from a job.
*
* @param context
* the Hadoop context for the configured job
* @return the ranges
* @since 1.5.0
* @see #setRanges(Job, Collection)
*/
protected static List<Range> getRanges(JobContext context) throws IOException {
return InputConfigurator.getRanges(CLASS, context.getConfiguration());
}
/**
* Restricts the columns that will be mapped over for this job for the default input table.
*
* @param job
* the Hadoop job instance to be configured
* @param columnFamilyColumnQualifierPairs
* a pair of {@link Text} objects corresponding to column family and column qualifier. If the column qualifier is null, the entire column family is
* selected. An empty set is the default and is equivalent to scanning the all columns.
* @since 1.5.0
*/
public static void fetchColumns(Job job, Collection<Pair<Text,Text>> columnFamilyColumnQualifierPairs) {
InputConfigurator.fetchColumns(CLASS, job.getConfiguration(), columnFamilyColumnQualifierPairs);
}
/**
* Gets the columns to be mapped over from this job.
*
* @param context
* the Hadoop context for the configured job
* @return a set of columns
* @since 1.5.0
* @see #fetchColumns(Job, Collection)
*/
protected static Set<Pair<Text,Text>> getFetchedColumns(JobContext context) {
return InputConfigurator.getFetchedColumns(CLASS, context.getConfiguration());
}
/**
* Encode an iterator on the single input table for this job.
*
* @param job
* the Hadoop job instance to be configured
* @param cfg
* the configuration of the iterator
* @since 1.5.0
*/
public static void addIterator(Job job, IteratorSetting cfg) {
InputConfigurator.addIterator(CLASS, job.getConfiguration(), cfg);
}
/**
* Gets a list of the iterator settings (for iterators to apply to a scanner) from this configuration.
*
* @param context
* the Hadoop context for the configured job
* @return a list of iterators
* @since 1.5.0
* @see #addIterator(Job, IteratorSetting)
*/
protected static List<IteratorSetting> getIterators(JobContext context) {
return InputConfigurator.getIterators(CLASS, context.getConfiguration());
}
/**
* Controls the automatic adjustment of ranges for this job. This feature merges overlapping ranges, then splits them to align with tablet boundaries.
* Disabling this feature will cause exactly one Map task to be created for each specified range. The default setting is enabled. *
*
* <p>
* By default, this feature is <b>enabled</b>.
*
* @param job
* the Hadoop job instance to be configured
* @param enableFeature
* the feature is enabled if true, disabled otherwise
* @see #setRanges(Job, Collection)
* @since 1.5.0
*/
public static void setAutoAdjustRanges(Job job, boolean enableFeature) {
InputConfigurator.setAutoAdjustRanges(CLASS, job.getConfiguration(), enableFeature);
}
/**
* Determines whether a configuration has auto-adjust ranges enabled. Must be enabled when {@link #setBatchScan(Job, boolean)} is true.
*
* @param context
* the Hadoop context for the configured job
* @return false if the feature is disabled, true otherwise
* @since 1.5.0
* @see #setAutoAdjustRanges(Job, boolean)
*/
protected static boolean getAutoAdjustRanges(JobContext context) {
return InputConfigurator.getAutoAdjustRanges(CLASS, context.getConfiguration());
}
/**
* Controls the use of the {@link IsolatedScanner} in this job.
*
* <p>
* By default, this feature is <b>disabled</b>.
*
* @param job
* the Hadoop job instance to be configured
* @param enableFeature
* the feature is enabled if true, disabled otherwise
* @since 1.5.0
*/
public static void setScanIsolation(Job job, boolean enableFeature) {
InputConfigurator.setScanIsolation(CLASS, job.getConfiguration(), enableFeature);
}
/**
* Determines whether a configuration has isolation enabled.
*
* @param context
* the Hadoop context for the configured job
* @return true if the feature is enabled, false otherwise
* @since 1.5.0
* @see #setScanIsolation(Job, boolean)
*/
protected static boolean isIsolated(JobContext context) {
return InputConfigurator.isIsolated(CLASS, context.getConfiguration());
}
/**
* Controls the use of the {@link ClientSideIteratorScanner} in this job. Enabling this feature will cause the iterator stack to be constructed within the Map
* task, rather than within the Accumulo TServer. To use this feature, all classes needed for those iterators must be available on the classpath for the task.
*
* <p>
* By default, this feature is <b>disabled</b>.
*
* @param job
* the Hadoop job instance to be configured
* @param enableFeature
* the feature is enabled if true, disabled otherwise
* @since 1.5.0
*/
public static void setLocalIterators(Job job, boolean enableFeature) {
InputConfigurator.setLocalIterators(CLASS, job.getConfiguration(), enableFeature);
}
/**
* Determines whether a configuration uses local iterators.
*
* @param context
* the Hadoop context for the configured job
* @return true if the feature is enabled, false otherwise
* @since 1.5.0
* @see #setLocalIterators(Job, boolean)
*/
protected static boolean usesLocalIterators(JobContext context) {
return InputConfigurator.usesLocalIterators(CLASS, context.getConfiguration());
}
/**
* Enable reading offline tables. By default, this feature is disabled and only online tables are scanned. This will make the map reduce job directly read the
* table's files. If the table is not offline, then the job will fail. If the table comes online during the map reduce job, it is likely that the job will
* fail.
*
* <p>
* To use this option, the map reduce user will need access to read the Accumulo directory in HDFS.
*
* <p>
* Reading the offline table will create the scan time iterator stack in the map process. So any iterators that are configured for the table will need to be
* on the mapper's classpath.
*
* <p>
* One way to use this feature is to clone a table, take the clone offline, and use the clone as the input table for a map reduce job. If you plan to map
* reduce over the data many times, it may be better to the compact the table, clone it, take it offline, and use the clone for all map reduce jobs. The
* reason to do this is that compaction will reduce each tablet in the table to one file, and it is faster to read from one file.
*
* <p>
* There are two possible advantages to reading a tables file directly out of HDFS. First, you may see better read performance. Second, it will support
* speculative execution better. When reading an online table speculative execution can put more load on an already slow tablet server.
*
* <p>
* By default, this feature is <b>disabled</b>.
*
* @param job
* the Hadoop job instance to be configured
* @param enableFeature
* the feature is enabled if true, disabled otherwise
* @since 1.5.0
*/
public static void setOfflineTableScan(Job job, boolean enableFeature) {
InputConfigurator.setOfflineTableScan(CLASS, job.getConfiguration(), enableFeature);
}
/**
* Determines whether a configuration has the offline table scan feature enabled.
*
* @param context
* the Hadoop context for the configured job
* @return true if the feature is enabled, false otherwise
* @since 1.5.0
* @see #setOfflineTableScan(Job, boolean)
*/
protected static boolean isOfflineScan(JobContext context) {
return InputConfigurator.isOfflineScan(CLASS, context.getConfiguration());
}
/**
* Controls the use of the {@link org.apache.accumulo.core.client.BatchScanner} in this job. Using this feature will group Ranges by their source tablet,
* producing an InputSplit per tablet rather than per Range. This batching helps to reduce overhead when querying a large number of small ranges. (ex: when
* doing quad-tree decomposition for spatial queries)
* <p>
* In order to achieve good locality of InputSplits this option always clips the input Ranges to tablet boundaries. This may result in one input Range
* contributing to several InputSplits.
* <p>
* Note: that the value of {@link #setAutoAdjustRanges(Job, boolean)} is ignored and is assumed to be true when BatchScan option is enabled.
* <p>
* This configuration is incompatible with:
* <ul>
* <li>{@link #setOfflineTableScan(org.apache.hadoop.mapreduce.Job, boolean)}</li>
* <li>{@link #setLocalIterators(org.apache.hadoop.mapreduce.Job, boolean)}</li>
* <li>{@link #setScanIsolation(org.apache.hadoop.mapreduce.Job, boolean)}</li>
* </ul>
* <p>
* By default, this feature is <b>disabled</b>.
*
* @param job
* the Hadoop job instance to be configured
* @param enableFeature
* the feature is enabled if true, disabled otherwise
* @since 1.7.0
*/
public static void setBatchScan(Job job, boolean enableFeature) {
InputConfigurator.setBatchScan(CLASS, job.getConfiguration(), enableFeature);
}
/**
* Determines whether a configuration has the {@link org.apache.accumulo.core.client.BatchScanner} feature enabled.
*
* @param context
* the Hadoop context for the configured job
* @since 1.7.0
* @see #setBatchScan(Job, boolean)
*/
public static boolean isBatchScan(JobContext context) {
return InputConfigurator.isBatchScan(CLASS, context.getConfiguration());
}
/**
* Causes input format to read sample data. If sample data was created using a different configuration or a tables sampler configuration changes while reading
* data, then the input format will throw an error.
*
*
* @param job
* the Hadoop job instance to be configured
* @param samplerConfig
* The sampler configuration that sample must have been created with inorder for reading sample data to succeed.
*
* @since 1.8.0
* @see ScannerBase#setSamplerConfiguration(SamplerConfiguration)
*/
public static void setSamplerConfiguration(Job job, SamplerConfiguration samplerConfig) {
InputConfigurator.setSamplerConfiguration(CLASS, job.getConfiguration(), samplerConfig);
}
/**
* Initializes an Accumulo {@link org.apache.accumulo.core.client.impl.TabletLocator} based on the configuration.
*
* @param context
* the Hadoop context for the configured job
* @return an Accumulo tablet locator
* @throws org.apache.accumulo.core.client.TableNotFoundException
* if the table name set on the configuration doesn't exist
* @since 1.5.0
* @deprecated since 1.6.0
*/
@Deprecated
protected static TabletLocator getTabletLocator(JobContext context) throws TableNotFoundException {
return InputConfigurator.getTabletLocator(CLASS, context.getConfiguration(), InputConfigurator.getInputTableName(CLASS, context.getConfiguration()));
}
protected abstract static class RecordReaderBase<K,V> extends AbstractRecordReader<K,V> {
@Override
protected List<IteratorSetting> contextIterators(TaskAttemptContext context, String tableName) {
return getIterators(context);
}
/**
* Apply the configured iterators from the configuration to the scanner.
*
* @param context
* the Hadoop context for the configured job
* @param scanner
* the scanner to configure
* @deprecated since 1.7.0; Use {@link #contextIterators} instead.
*/
@Deprecated
protected void setupIterators(TaskAttemptContext context, Scanner scanner) {
// tableName is given as null as it will be ignored in eventual call to #contextIterators
setupIterators(context, scanner, null, null);
}
/**
* Initialize a scanner over the given input split using this task attempt configuration.
*
* @deprecated since 1.7.0; Use {@link #contextIterators} instead.
*/
@Deprecated
protected void setupIterators(TaskAttemptContext context, Scanner scanner, org.apache.accumulo.core.client.mapreduce.RangeInputSplit split) {
setupIterators(context, scanner, null, split);
}
}
/**
* @deprecated since 1.5.2; Use {@link org.apache.accumulo.core.client.mapreduce.RangeInputSplit} instead.
* @see org.apache.accumulo.core.client.mapreduce.RangeInputSplit
*/
@Deprecated
public static class RangeInputSplit extends org.apache.accumulo.core.client.mapreduce.RangeInputSplit {
public RangeInputSplit() {
super();
}
public RangeInputSplit(RangeInputSplit other) throws IOException {
super(other);
}
protected RangeInputSplit(String table, Range range, String[] locations) {
super(table, "", range, locations);
}
public RangeInputSplit(String table, String tableId, Range range, String[] locations) {
super(table, tableId, range, locations);
}
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.cluster.metadata;
import com.carrotsearch.hppc.cursors.ObjectCursor;
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
import org.apache.lucene.util.CollectionUtil;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.Version;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.ActionRunnable;
import org.elasticsearch.action.admin.indices.alias.Alias;
import org.elasticsearch.action.admin.indices.create.CreateIndexClusterStateUpdateRequest;
import org.elasticsearch.cluster.AckedClusterStateUpdateTask;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.ack.ClusterStateUpdateResponse;
import org.elasticsearch.cluster.block.ClusterBlock;
import org.elasticsearch.cluster.block.ClusterBlocks;
import org.elasticsearch.cluster.metadata.IndexMetaData.Custom;
import org.elasticsearch.cluster.metadata.IndexMetaData.State;
import org.elasticsearch.cluster.node.DiscoveryNodes;
import org.elasticsearch.cluster.routing.RoutingTable;
import org.elasticsearch.cluster.routing.allocation.AllocationService;
import org.elasticsearch.cluster.routing.allocation.RoutingAllocation;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.Priority;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.ValidationException;
import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.io.FileSystemUtils;
import org.elasticsearch.common.io.PathUtils;
import org.elasticsearch.common.io.Streams;
import org.elasticsearch.common.regex.Regex;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.env.Environment;
import org.elasticsearch.env.NodeEnvironment;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.query.IndexQueryParserService;
import org.elasticsearch.indices.IndexAlreadyExistsException;
import org.elasticsearch.indices.IndexCreationException;
import org.elasticsearch.indices.IndicesService;
import org.elasticsearch.indices.InvalidIndexNameException;
import org.elasticsearch.script.ScriptService;
import org.elasticsearch.threadpool.ThreadPool;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.nio.charset.StandardCharsets;
import java.nio.file.DirectoryStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.Semaphore;
import java.util.concurrent.TimeUnit;
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_AUTO_EXPAND_REPLICAS;
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_CREATION_DATE;
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_INDEX_UUID;
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS;
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS;
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_VERSION_CREATED;
import static org.elasticsearch.common.settings.Settings.settingsBuilder;
/**
* Service responsible for submitting create index requests
*/
public class MetaDataCreateIndexService extends AbstractComponent {
public final static int MAX_INDEX_NAME_BYTES = 255;
private static final DefaultIndexTemplateFilter DEFAULT_INDEX_TEMPLATE_FILTER = new DefaultIndexTemplateFilter();
private final ThreadPool threadPool;
private final ClusterService clusterService;
private final IndicesService indicesService;
private final AllocationService allocationService;
private final MetaDataService metaDataService;
private final Version version;
private final AliasValidator aliasValidator;
private final IndexTemplateFilter indexTemplateFilter;
private final NodeEnvironment nodeEnv;
private final Environment env;
@Inject
public MetaDataCreateIndexService(Settings settings, ThreadPool threadPool, ClusterService clusterService,
IndicesService indicesService, AllocationService allocationService, MetaDataService metaDataService,
Version version, AliasValidator aliasValidator,
Set<IndexTemplateFilter> indexTemplateFilters, Environment env,
NodeEnvironment nodeEnv) {
super(settings);
this.threadPool = threadPool;
this.clusterService = clusterService;
this.indicesService = indicesService;
this.allocationService = allocationService;
this.metaDataService = metaDataService;
this.version = version;
this.aliasValidator = aliasValidator;
this.nodeEnv = nodeEnv;
this.env = env;
if (indexTemplateFilters.isEmpty()) {
this.indexTemplateFilter = DEFAULT_INDEX_TEMPLATE_FILTER;
} else {
IndexTemplateFilter[] templateFilters = new IndexTemplateFilter[indexTemplateFilters.size() + 1];
templateFilters[0] = DEFAULT_INDEX_TEMPLATE_FILTER;
int i = 1;
for (IndexTemplateFilter indexTemplateFilter : indexTemplateFilters) {
templateFilters[i++] = indexTemplateFilter;
}
this.indexTemplateFilter = new IndexTemplateFilter.Compound(templateFilters);
}
}
public void createIndex(final CreateIndexClusterStateUpdateRequest request, final ActionListener<ClusterStateUpdateResponse> listener) {
// we lock here, and not within the cluster service callback since we don't want to
// block the whole cluster state handling
final Semaphore mdLock = metaDataService.indexMetaDataLock(request.index());
// quick check to see if we can acquire a lock, otherwise spawn to a thread pool
if (mdLock.tryAcquire()) {
createIndex(request, listener, mdLock);
return;
}
threadPool.executor(ThreadPool.Names.MANAGEMENT).execute(new ActionRunnable(listener) {
@Override
public void doRun() throws InterruptedException {
if (!mdLock.tryAcquire(request.masterNodeTimeout().nanos(), TimeUnit.NANOSECONDS)) {
listener.onFailure(new ProcessClusterEventTimeoutException(request.masterNodeTimeout(), "acquire index lock"));
return;
}
createIndex(request, listener, mdLock);
}
});
}
public void validateIndexName(String index, ClusterState state) {
if (state.routingTable().hasIndex(index)) {
throw new IndexAlreadyExistsException(new Index(index));
}
if (state.metaData().hasIndex(index)) {
throw new IndexAlreadyExistsException(new Index(index));
}
if (!Strings.validFileName(index)) {
throw new InvalidIndexNameException(new Index(index), index, "must not contain the following characters " + Strings.INVALID_FILENAME_CHARS);
}
if (index.contains("#")) {
throw new InvalidIndexNameException(new Index(index), index, "must not contain '#'");
}
if (index.charAt(0) == '_') {
throw new InvalidIndexNameException(new Index(index), index, "must not start with '_'");
}
if (!index.toLowerCase(Locale.ROOT).equals(index)) {
throw new InvalidIndexNameException(new Index(index), index, "must be lowercase");
}
int byteCount = 0;
try {
byteCount = index.getBytes("UTF-8").length;
} catch (UnsupportedEncodingException e) {
// UTF-8 should always be supported, but rethrow this if it is not for some reason
throw new ElasticsearchException("Unable to determine length of index name", e);
}
if (byteCount > MAX_INDEX_NAME_BYTES) {
throw new InvalidIndexNameException(new Index(index), index,
"index name is too long, (" + byteCount +
" > " + MAX_INDEX_NAME_BYTES + ")");
}
if (state.metaData().hasAlias(index)) {
throw new InvalidIndexNameException(new Index(index), index, "already exists as alias");
}
if (index.equals(".") || index.equals("..")) {
throw new InvalidIndexNameException(new Index(index), index, "must not be '.' or '..'");
}
}
private void createIndex(final CreateIndexClusterStateUpdateRequest request, final ActionListener<ClusterStateUpdateResponse> listener, final Semaphore mdLock) {
Settings.Builder updatedSettingsBuilder = Settings.settingsBuilder();
updatedSettingsBuilder.put(request.settings()).normalizePrefix(IndexMetaData.INDEX_SETTING_PREFIX);
request.settings(updatedSettingsBuilder.build());
clusterService.submitStateUpdateTask("create-index [" + request.index() + "], cause [" + request.cause() + "]", Priority.URGENT, new AckedClusterStateUpdateTask<ClusterStateUpdateResponse>(request, listener) {
@Override
protected ClusterStateUpdateResponse newResponse(boolean acknowledged) {
return new ClusterStateUpdateResponse(acknowledged);
}
@Override
public void onAllNodesAcked(@Nullable Throwable t) {
mdLock.release();
super.onAllNodesAcked(t);
}
@Override
public void onAckTimeout() {
mdLock.release();
super.onAckTimeout();
}
@Override
public void onFailure(String source, Throwable t) {
mdLock.release();
super.onFailure(source, t);
}
@Override
public ClusterState execute(ClusterState currentState) throws Exception {
boolean indexCreated = false;
String removalReason = null;
try {
validate(request, currentState);
for (Alias alias : request.aliases()) {
aliasValidator.validateAlias(alias, request.index(), currentState.metaData());
}
// we only find a template when its an API call (a new index)
// find templates, highest order are better matching
List<IndexTemplateMetaData> templates = findTemplates(request, currentState, indexTemplateFilter);
Map<String, Custom> customs = new HashMap<>();
// add the request mapping
Map<String, Map<String, Object>> mappings = new HashMap<>();
Map<String, AliasMetaData> templatesAliases = new HashMap<>();
List<String> templateNames = new ArrayList<>();
for (Map.Entry<String, String> entry : request.mappings().entrySet()) {
mappings.put(entry.getKey(), parseMapping(entry.getValue()));
}
for (Map.Entry<String, Custom> entry : request.customs().entrySet()) {
customs.put(entry.getKey(), entry.getValue());
}
// apply templates, merging the mappings into the request mapping if exists
for (IndexTemplateMetaData template : templates) {
templateNames.add(template.getName());
for (ObjectObjectCursor<String, CompressedXContent> cursor : template.mappings()) {
if (mappings.containsKey(cursor.key)) {
XContentHelper.mergeDefaults(mappings.get(cursor.key), parseMapping(cursor.value.string()));
} else {
mappings.put(cursor.key, parseMapping(cursor.value.string()));
}
}
// handle custom
for (ObjectObjectCursor<String, Custom> cursor : template.customs()) {
String type = cursor.key;
IndexMetaData.Custom custom = cursor.value;
IndexMetaData.Custom existing = customs.get(type);
if (existing == null) {
customs.put(type, custom);
} else {
IndexMetaData.Custom merged = existing.mergeWith(custom);
customs.put(type, merged);
}
}
//handle aliases
for (ObjectObjectCursor<String, AliasMetaData> cursor : template.aliases()) {
AliasMetaData aliasMetaData = cursor.value;
//if an alias with same name came with the create index request itself,
// ignore this one taken from the index template
if (request.aliases().contains(new Alias(aliasMetaData.alias()))) {
continue;
}
//if an alias with same name was already processed, ignore this one
if (templatesAliases.containsKey(cursor.key)) {
continue;
}
//Allow templatesAliases to be templated by replacing a token with the name of the index that we are applying it to
if (aliasMetaData.alias().contains("{index}")) {
String templatedAlias = aliasMetaData.alias().replace("{index}", request.index());
aliasMetaData = AliasMetaData.newAliasMetaData(aliasMetaData, templatedAlias);
}
aliasValidator.validateAliasMetaData(aliasMetaData, request.index(), currentState.metaData());
templatesAliases.put(aliasMetaData.alias(), aliasMetaData);
}
}
Settings.Builder indexSettingsBuilder = settingsBuilder();
// apply templates, here, in reverse order, since first ones are better matching
for (int i = templates.size() - 1; i >= 0; i--) {
indexSettingsBuilder.put(templates.get(i).settings());
}
// now, put the request settings, so they override templates
indexSettingsBuilder.put(request.settings());
if (request.index().equals(ScriptService.SCRIPT_INDEX)) {
indexSettingsBuilder.put(SETTING_NUMBER_OF_SHARDS, settings.getAsInt(SETTING_NUMBER_OF_SHARDS, 1));
} else {
if (indexSettingsBuilder.get(SETTING_NUMBER_OF_SHARDS) == null) {
indexSettingsBuilder.put(SETTING_NUMBER_OF_SHARDS, settings.getAsInt(SETTING_NUMBER_OF_SHARDS, 5));
}
}
if (request.index().equals(ScriptService.SCRIPT_INDEX)) {
indexSettingsBuilder.put(SETTING_NUMBER_OF_REPLICAS, settings.getAsInt(SETTING_NUMBER_OF_REPLICAS, 0));
indexSettingsBuilder.put(SETTING_AUTO_EXPAND_REPLICAS, "0-all");
} else {
if (indexSettingsBuilder.get(SETTING_NUMBER_OF_REPLICAS) == null) {
indexSettingsBuilder.put(SETTING_NUMBER_OF_REPLICAS, settings.getAsInt(SETTING_NUMBER_OF_REPLICAS, 1));
}
}
if (settings.get(SETTING_AUTO_EXPAND_REPLICAS) != null && indexSettingsBuilder.get(SETTING_AUTO_EXPAND_REPLICAS) == null) {
indexSettingsBuilder.put(SETTING_AUTO_EXPAND_REPLICAS, settings.get(SETTING_AUTO_EXPAND_REPLICAS));
}
if (indexSettingsBuilder.get(SETTING_VERSION_CREATED) == null) {
DiscoveryNodes nodes = currentState.nodes();
final Version createdVersion = Version.smallest(version, nodes.smallestNonClientNodeVersion());
indexSettingsBuilder.put(SETTING_VERSION_CREATED, createdVersion);
}
if (indexSettingsBuilder.get(SETTING_CREATION_DATE) == null) {
indexSettingsBuilder.put(SETTING_CREATION_DATE, new DateTime(DateTimeZone.UTC).getMillis());
}
indexSettingsBuilder.put(SETTING_INDEX_UUID, Strings.randomBase64UUID());
Settings actualIndexSettings = indexSettingsBuilder.build();
// Set up everything, now locally create the index to see that things are ok, and apply
final IndexMetaData tmpImd = IndexMetaData.builder(request.index()).settings(actualIndexSettings).build();
// create the index here (on the master) to validate it can be created, as well as adding the mapping
indicesService.createIndex(tmpImd);
indexCreated = true;
// now add the mappings
IndexService indexService = indicesService.indexServiceSafe(request.index());
MapperService mapperService = indexService.mapperService();
// first, add the default mapping
if (mappings.containsKey(MapperService.DEFAULT_MAPPING)) {
try {
mapperService.merge(MapperService.DEFAULT_MAPPING, new CompressedXContent(XContentFactory.jsonBuilder().map(mappings.get(MapperService.DEFAULT_MAPPING)).string()), false, request.updateAllTypes());
} catch (Exception e) {
removalReason = "failed on parsing default mapping on index creation";
throw new MapperParsingException("mapping [" + MapperService.DEFAULT_MAPPING + "]", e);
}
}
for (Map.Entry<String, Map<String, Object>> entry : mappings.entrySet()) {
if (entry.getKey().equals(MapperService.DEFAULT_MAPPING)) {
continue;
}
try {
// apply the default here, its the first time we parse it
mapperService.merge(entry.getKey(), new CompressedXContent(XContentFactory.jsonBuilder().map(entry.getValue()).string()), true, request.updateAllTypes());
} catch (Exception e) {
removalReason = "failed on parsing mappings on index creation";
throw new MapperParsingException("mapping [" + entry.getKey() + "]", e);
}
}
IndexQueryParserService indexQueryParserService = indexService.queryParserService();
for (Alias alias : request.aliases()) {
if (Strings.hasLength(alias.filter())) {
aliasValidator.validateAliasFilter(alias.name(), alias.filter(), indexQueryParserService);
}
}
for (AliasMetaData aliasMetaData : templatesAliases.values()) {
if (aliasMetaData.filter() != null) {
aliasValidator.validateAliasFilter(aliasMetaData.alias(), aliasMetaData.filter().uncompressed(), indexQueryParserService);
}
}
// now, update the mappings with the actual source
Map<String, MappingMetaData> mappingsMetaData = new HashMap<>();
for (DocumentMapper mapper : mapperService.docMappers(true)) {
MappingMetaData mappingMd = new MappingMetaData(mapper);
mappingsMetaData.put(mapper.type(), mappingMd);
}
final IndexMetaData.Builder indexMetaDataBuilder = IndexMetaData.builder(request.index()).settings(actualIndexSettings);
for (MappingMetaData mappingMd : mappingsMetaData.values()) {
indexMetaDataBuilder.putMapping(mappingMd);
}
for (AliasMetaData aliasMetaData : templatesAliases.values()) {
indexMetaDataBuilder.putAlias(aliasMetaData);
}
for (Alias alias : request.aliases()) {
AliasMetaData aliasMetaData = AliasMetaData.builder(alias.name()).filter(alias.filter())
.indexRouting(alias.indexRouting()).searchRouting(alias.searchRouting()).build();
indexMetaDataBuilder.putAlias(aliasMetaData);
}
for (Map.Entry<String, Custom> customEntry : customs.entrySet()) {
indexMetaDataBuilder.putCustom(customEntry.getKey(), customEntry.getValue());
}
indexMetaDataBuilder.state(request.state());
final IndexMetaData indexMetaData;
try {
indexMetaData = indexMetaDataBuilder.build();
} catch (Exception e) {
removalReason = "failed to build index metadata";
throw e;
}
indexService.indicesLifecycle().beforeIndexAddedToCluster(new Index(request.index()),
indexMetaData.getSettings());
MetaData newMetaData = MetaData.builder(currentState.metaData())
.put(indexMetaData, false)
.build();
String maybeShadowIndicator = IndexMetaData.isIndexUsingShadowReplicas(indexMetaData.getSettings()) ? "s" : "";
logger.info("[{}] creating index, cause [{}], templates {}, shards [{}]/[{}{}], mappings {}",
request.index(), request.cause(), templateNames, indexMetaData.getNumberOfShards(),
indexMetaData.getNumberOfReplicas(), maybeShadowIndicator, mappings.keySet());
ClusterBlocks.Builder blocks = ClusterBlocks.builder().blocks(currentState.blocks());
if (!request.blocks().isEmpty()) {
for (ClusterBlock block : request.blocks()) {
blocks.addIndexBlock(request.index(), block);
}
}
blocks.updateBlocks(indexMetaData);
ClusterState updatedState = ClusterState.builder(currentState).blocks(blocks).metaData(newMetaData).build();
if (request.state() == State.OPEN) {
RoutingTable.Builder routingTableBuilder = RoutingTable.builder(updatedState.routingTable())
.addAsNew(updatedState.metaData().index(request.index()));
RoutingAllocation.Result routingResult = allocationService.reroute(ClusterState.builder(updatedState).routingTable(routingTableBuilder.build()).build());
updatedState = ClusterState.builder(updatedState).routingResult(routingResult).build();
}
removalReason = "cleaning up after validating index on master";
return updatedState;
} finally {
if (indexCreated) {
// Index was already partially created - need to clean up
indicesService.removeIndex(request.index(), removalReason != null ? removalReason : "failed to create index");
}
}
}
});
}
private Map<String, Object> parseMapping(String mappingSource) throws Exception {
try (XContentParser parser = XContentFactory.xContent(mappingSource).createParser(mappingSource)) {
return parser.map();
}
}
private void addMappings(Map<String, Map<String, Object>> mappings, Path mappingsDir) throws IOException {
try (DirectoryStream<Path> stream = Files.newDirectoryStream(mappingsDir)) {
for (Path mappingFile : stream) {
final String fileName = mappingFile.getFileName().toString();
if (FileSystemUtils.isHidden(mappingFile)) {
continue;
}
int lastDotIndex = fileName.lastIndexOf('.');
String mappingType = lastDotIndex != -1 ? mappingFile.getFileName().toString().substring(0, lastDotIndex) : mappingFile.getFileName().toString();
try (BufferedReader reader = Files.newBufferedReader(mappingFile, StandardCharsets.UTF_8)) {
String mappingSource = Streams.copyToString(reader);
if (mappings.containsKey(mappingType)) {
XContentHelper.mergeDefaults(mappings.get(mappingType), parseMapping(mappingSource));
} else {
mappings.put(mappingType, parseMapping(mappingSource));
}
} catch (Exception e) {
logger.warn("failed to read / parse mapping [" + mappingType + "] from location [" + mappingFile + "], ignoring...", e);
}
}
}
}
private List<IndexTemplateMetaData> findTemplates(CreateIndexClusterStateUpdateRequest request, ClusterState state, IndexTemplateFilter indexTemplateFilter) throws IOException {
List<IndexTemplateMetaData> templates = new ArrayList<>();
for (ObjectCursor<IndexTemplateMetaData> cursor : state.metaData().templates().values()) {
IndexTemplateMetaData template = cursor.value;
if (indexTemplateFilter.apply(request, template)) {
templates.add(template);
}
}
CollectionUtil.timSort(templates, new Comparator<IndexTemplateMetaData>() {
@Override
public int compare(IndexTemplateMetaData o1, IndexTemplateMetaData o2) {
return o2.order() - o1.order();
}
});
return templates;
}
private void validate(CreateIndexClusterStateUpdateRequest request, ClusterState state) {
validateIndexName(request.index(), state);
validateIndexSettings(request.index(), request.settings());
}
public void validateIndexSettings(String indexName, Settings settings) throws IndexCreationException {
List<String> validationErrors = getIndexSettingsValidationErrors(settings);
if (validationErrors.isEmpty() == false) {
ValidationException validationException = new ValidationException();
validationException.addValidationErrors(validationErrors);
throw new IndexCreationException(new Index(indexName), validationException);
}
}
List<String> getIndexSettingsValidationErrors(Settings settings) {
String customPath = settings.get(IndexMetaData.SETTING_DATA_PATH, null);
List<String> validationErrors = new ArrayList<>();
if (customPath != null && env.sharedDataFile() == null) {
validationErrors.add("path.shared_data must be set in order to use custom data paths");
} else if (customPath != null) {
Path resolvedPath = PathUtils.get(new Path[]{env.sharedDataFile()}, customPath);
if (resolvedPath == null) {
validationErrors.add("custom path [" + customPath + "] is not a sub-path of path.shared_data [" + env.sharedDataFile() + "]");
}
}
Integer number_of_primaries = settings.getAsInt(IndexMetaData.SETTING_NUMBER_OF_SHARDS, null);
Integer number_of_replicas = settings.getAsInt(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, null);
if (number_of_primaries != null && number_of_primaries <= 0) {
validationErrors.add("index must have 1 or more primary shards");
}
if (number_of_replicas != null && number_of_replicas < 0) {
validationErrors.add("index must have 0 or more replica shards");
}
return validationErrors;
}
private static class DefaultIndexTemplateFilter implements IndexTemplateFilter {
@Override
public boolean apply(CreateIndexClusterStateUpdateRequest request, IndexTemplateMetaData template) {
return Regex.simpleMatch(template.template(), request.index());
}
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index;
import com.google.common.base.Function;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Iterators;
import org.apache.lucene.util.IOUtils;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.ElasticsearchIllegalStateException;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.inject.*;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.NodeEnvironment;
import org.elasticsearch.env.ShardLock;
import org.elasticsearch.index.aliases.IndexAliasesService;
import org.elasticsearch.index.analysis.AnalysisService;
import org.elasticsearch.index.cache.IndexCache;
import org.elasticsearch.index.cache.filter.ShardFilterCacheModule;
import org.elasticsearch.index.cache.fixedbitset.FixedBitSetFilterCache;
import org.elasticsearch.index.cache.query.ShardQueryCacheModule;
import org.elasticsearch.index.deletionpolicy.DeletionPolicyModule;
import org.elasticsearch.index.engine.IndexEngine;
import org.elasticsearch.index.fielddata.IndexFieldDataService;
import org.elasticsearch.index.fielddata.ShardFieldDataModule;
import org.elasticsearch.index.gateway.IndexGateway;
import org.elasticsearch.index.gateway.IndexShardGatewayModule;
import org.elasticsearch.index.gateway.IndexShardGatewayService;
import org.elasticsearch.index.get.ShardGetModule;
import org.elasticsearch.index.indexing.ShardIndexingModule;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.merge.policy.MergePolicyModule;
import org.elasticsearch.index.merge.policy.MergePolicyProvider;
import org.elasticsearch.index.merge.scheduler.MergeSchedulerModule;
import org.elasticsearch.index.merge.scheduler.MergeSchedulerProvider;
import org.elasticsearch.index.percolator.PercolatorQueriesRegistry;
import org.elasticsearch.index.percolator.PercolatorShardModule;
import org.elasticsearch.index.query.IndexQueryParserService;
import org.elasticsearch.index.search.stats.ShardSearchModule;
import org.elasticsearch.index.settings.IndexSettings;
import org.elasticsearch.index.settings.IndexSettingsService;
import org.elasticsearch.index.shard.IndexShard;
import org.elasticsearch.index.shard.IndexShardCreationException;
import org.elasticsearch.index.shard.IndexShardModule;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.index.similarity.SimilarityService;
import org.elasticsearch.index.snapshots.IndexShardSnapshotModule;
import org.elasticsearch.index.store.IndexStore;
import org.elasticsearch.index.store.Store;
import org.elasticsearch.index.store.StoreModule;
import org.elasticsearch.index.suggest.SuggestShardModule;
import org.elasticsearch.index.termvectors.ShardTermVectorModule;
import org.elasticsearch.index.translog.Translog;
import org.elasticsearch.index.translog.TranslogModule;
import org.elasticsearch.index.translog.TranslogService;
import org.elasticsearch.indices.IndicesLifecycle;
import org.elasticsearch.indices.IndicesService;
import org.elasticsearch.indices.InternalIndicesLifecycle;
import org.elasticsearch.plugins.PluginsService;
import org.elasticsearch.plugins.ShardsPluginsModule;
import java.io.Closeable;
import java.io.IOException;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import static com.google.common.collect.Maps.newHashMap;
import static org.elasticsearch.common.collect.MapBuilder.newMapBuilder;
/**
*
*/
public class IndexService extends AbstractIndexComponent implements IndexComponent, Iterable<IndexShard> {
private final Injector injector;
private final Settings indexSettings;
private final PluginsService pluginsService;
private final InternalIndicesLifecycle indicesLifecycle;
private final AnalysisService analysisService;
private final MapperService mapperService;
private final IndexQueryParserService queryParserService;
private final SimilarityService similarityService;
private final IndexAliasesService aliasesService;
private final IndexCache indexCache;
private final IndexFieldDataService indexFieldData;
private final FixedBitSetFilterCache fixedBitSetFilterCache;
private final IndexEngine indexEngine;
private final IndexGateway indexGateway;
private final IndexStore indexStore;
private final IndexSettingsService settingsService;
private final NodeEnvironment nodeEnv;
private final IndicesService indicesServices;
private volatile ImmutableMap<Integer, Tuple<IndexShard, Injector>> shards = ImmutableMap.of();
private final AtomicBoolean closed = new AtomicBoolean(false);
private final AtomicBoolean deleted = new AtomicBoolean(false);
@Inject
public IndexService(Injector injector, Index index, @IndexSettings Settings indexSettings, NodeEnvironment nodeEnv,
AnalysisService analysisService, MapperService mapperService, IndexQueryParserService queryParserService,
SimilarityService similarityService, IndexAliasesService aliasesService, IndexCache indexCache, IndexEngine indexEngine,
IndexGateway indexGateway, IndexStore indexStore, IndexSettingsService settingsService, IndexFieldDataService indexFieldData,
FixedBitSetFilterCache fixedBitSetFilterCache, IndicesService indicesServices) {
super(index, indexSettings);
this.injector = injector;
this.indexSettings = indexSettings;
this.analysisService = analysisService;
this.mapperService = mapperService;
this.queryParserService = queryParserService;
this.similarityService = similarityService;
this.aliasesService = aliasesService;
this.indexCache = indexCache;
this.indexFieldData = indexFieldData;
this.indexEngine = indexEngine;
this.indexGateway = indexGateway;
this.indexStore = indexStore;
this.settingsService = settingsService;
this.fixedBitSetFilterCache = fixedBitSetFilterCache;
this.pluginsService = injector.getInstance(PluginsService.class);
this.indicesServices = indicesServices;
this.indicesLifecycle = (InternalIndicesLifecycle) injector.getInstance(IndicesLifecycle.class);
// inject workarounds for cyclic dep
indexCache.filter().setIndexService(this);
indexFieldData.setIndexService(this);
fixedBitSetFilterCache.setIndexService(this);
this.nodeEnv = nodeEnv;
}
public int numberOfShards() {
return shards.size();
}
public InternalIndicesLifecycle indicesLifecycle() {
return this.indicesLifecycle;
}
@Override
public Iterator<IndexShard> iterator() {
return Iterators.transform(shards.values().iterator(), new Function<Tuple<IndexShard, Injector>, IndexShard>() {
@Override
public IndexShard apply(Tuple<IndexShard, Injector> input) {
return input.v1();
}
});
}
public boolean hasShard(int shardId) {
return shards.containsKey(shardId);
}
/**
* Return the shard with the provided id, or null if there is no such shard.
*/
@Nullable
public IndexShard shard(int shardId) {
Tuple<IndexShard, Injector> indexShardInjectorTuple = shards.get(shardId);
if (indexShardInjectorTuple != null) {
return indexShardInjectorTuple.v1();
}
return null;
}
/**
* Return the shard with the provided id, or throw an exception if it doesn't exist.
*/
public IndexShard shardSafe(int shardId) throws IndexShardMissingException {
IndexShard indexShard = shard(shardId);
if (indexShard == null) {
throw new IndexShardMissingException(new ShardId(index, shardId));
}
return indexShard;
}
public Set<Integer> shardIds() {
return shards.keySet();
}
public Injector injector() {
return injector;
}
public IndexGateway gateway() {
return indexGateway;
}
public IndexSettingsService settingsService() {
return this.settingsService;
}
public IndexStore store() {
return indexStore;
}
public IndexCache cache() {
return indexCache;
}
public IndexFieldDataService fieldData() {
return indexFieldData;
}
public FixedBitSetFilterCache fixedBitSetFilterCache() {
return fixedBitSetFilterCache;
}
public AnalysisService analysisService() {
return this.analysisService;
}
public MapperService mapperService() {
return mapperService;
}
public IndexQueryParserService queryParserService() {
return queryParserService;
}
public SimilarityService similarityService() {
return similarityService;
}
public IndexAliasesService aliasesService() {
return aliasesService;
}
public synchronized void close(final String reason, boolean delete) {
if (closed.compareAndSet(false, true)) {
deleted.compareAndSet(false, delete);
final Set<Integer> shardIds = shardIds();
for (final int shardId : shardIds) {
try {
removeShard(shardId, reason);
} catch (Throwable t) {
logger.warn("failed to close shard", t);
}
}
}
}
/**
* Return the shard injector for the provided id, or throw an exception if there is no such shard.
*/
public Injector shardInjectorSafe(int shardId) throws IndexShardMissingException {
Tuple<IndexShard, Injector> tuple = shards.get(shardId);
if (tuple == null) {
throw new IndexShardMissingException(new ShardId(index, shardId));
}
return tuple.v2();
}
public String indexUUID() {
return indexSettings.get(IndexMetaData.SETTING_UUID, IndexMetaData.INDEX_UUID_NA_VALUE);
}
public synchronized IndexShard createShard(int sShardId, boolean primary) throws ElasticsearchException {
/*
* TODO: we execute this in parallel but it's a synced method. Yet, we might
* be able to serialize the execution via the cluster state in the future. for now we just
* keep it synced.
*/
if (closed.get()) {
throw new ElasticsearchIllegalStateException("Can't create shard [" + index.name() + "][" + sShardId + "], closed");
}
final ShardId shardId = new ShardId(index, sShardId);
ShardLock lock = null;
boolean success = false;
Injector shardInjector = null;
try {
lock = nodeEnv.shardLock(shardId, TimeUnit.SECONDS.toMillis(5));
if (shards.containsKey(shardId.id())) {
throw new IndexShardAlreadyExistsException(shardId + " already exists");
}
indicesLifecycle.beforeIndexShardCreated(shardId, indexSettings);
logger.debug("creating shard_id {}", shardId);
// if we are on a shared FS we only own the shard (ie. we can safely delete it) if we are the primary.
final boolean canDeleteShardContent = IndexMetaData.isOnSharedFilesystem(indexSettings) == false ||
(primary && IndexMetaData.isOnSharedFilesystem(indexSettings));
ModulesBuilder modules = new ModulesBuilder();
modules.add(new ShardsPluginsModule(indexSettings, pluginsService));
modules.add(new IndexShardModule(shardId, primary, indexSettings));
modules.add(new ShardIndexingModule());
modules.add(new ShardSearchModule());
modules.add(new ShardGetModule());
modules.add(new StoreModule(indexSettings, injector.getInstance(IndexStore.class), lock,
new StoreCloseListener(shardId, canDeleteShardContent)));
modules.add(new DeletionPolicyModule(indexSettings));
modules.add(new MergePolicyModule(indexSettings));
modules.add(new MergeSchedulerModule(indexSettings));
modules.add(new ShardFilterCacheModule());
modules.add(new ShardQueryCacheModule());
modules.add(new ShardFieldDataModule());
modules.add(new TranslogModule(indexSettings));
modules.add(new IndexShardGatewayModule(injector.getInstance(IndexGateway.class)));
modules.add(new PercolatorShardModule());
modules.add(new ShardTermVectorModule());
modules.add(new IndexShardSnapshotModule());
modules.add(new SuggestShardModule());
try {
shardInjector = modules.createChildInjector(injector);
} catch (CreationException e) {
throw new IndexShardCreationException(shardId, Injectors.getFirstErrorFailure(e));
} catch (Throwable e) {
throw new IndexShardCreationException(shardId, e);
}
IndexShard indexShard = shardInjector.getInstance(IndexShard.class);
indicesLifecycle.indexShardStateChanged(indexShard, null, "shard created");
indicesLifecycle.afterIndexShardCreated(indexShard);
shards = newMapBuilder(shards).put(shardId.id(), new Tuple<>(indexShard, shardInjector)).immutableMap();
success = true;
return indexShard;
} catch (IOException ex) {
throw new IndexShardCreationException(shardId, ex);
} finally {
if (success == false) {
IOUtils.closeWhileHandlingException(lock);
if (shardInjector != null) {
IndexShard indexShard = shardInjector.getInstance(IndexShard.class);
closeShardInjector("initialization failed", shardId, shardInjector, indexShard);
}
}
}
}
public synchronized void removeShard(int shardId, String reason) throws ElasticsearchException {
final ShardId sId = new ShardId(index, shardId);
final Injector shardInjector;
final IndexShard indexShard;
if (shards.containsKey(shardId) == false) {
return;
}
logger.debug("[{}] closing... (reason: [{}])", shardId, reason);
HashMap<Integer, Tuple<IndexShard, Injector>> tmpShardsMap = newHashMap(shards);
Tuple<IndexShard, Injector> tuple = tmpShardsMap.remove(shardId);
indexShard = tuple.v1();
shardInjector = tuple.v2();
shards = ImmutableMap.copyOf(tmpShardsMap);
closeShardInjector(reason, sId, shardInjector, indexShard);
logger.debug("[{}] closed (reason: [{}])", shardId, reason);
}
private void closeShardInjector(String reason, ShardId sId, Injector shardInjector, IndexShard indexShard) {
final int shardId = sId.id();
try {
try {
indicesLifecycle.beforeIndexShardClosed(sId, indexShard, indexSettings);
} finally {
// close everything else even if the beforeIndexShardClosed threw an exception
for (Class<? extends CloseableIndexComponent> closeable : pluginsService.shardServices()) {
try {
shardInjector.getInstance(closeable).close();
} catch (Throwable e) {
logger.debug("[{}] failed to clean plugin shard service [{}]", e, shardId, closeable);
}
}
// now we can close the translog service, we need to close it before the we close the shard
closeInjectorResource(sId, shardInjector, TranslogService.class);
// this logic is tricky, we want to close the engine so we rollback the changes done to it
// and close the shard so no operations are allowed to it
if (indexShard != null) {
try {
final boolean flushEngine = deleted.get() == false && closed.get(); // only flush we are we closed (closed index or shutdown) and if we are not deleted
indexShard.close(reason, flushEngine);
} catch (Throwable e) {
logger.debug("[{}] failed to close index shard", e, shardId);
// ignore
}
}
closeInjectorResource(sId, shardInjector,
MergeSchedulerProvider.class,
MergePolicyProvider.class,
IndexShardGatewayService.class,
Translog.class,
PercolatorQueriesRegistry.class);
// call this before we close the store, so we can release resources for it
indicesLifecycle.afterIndexShardClosed(sId, indexShard, indexSettings);
}
} finally {
try {
shardInjector.getInstance(Store.class).close();
} catch (Throwable e) {
logger.warn("[{}] failed to close store on shard removal (reason: [{}])", e, shardId, reason);
}
}
}
/**
* This method gets an instance for each of the given classes passed and calls #close() on the returned instance.
* NOTE: this method swallows all exceptions thrown from the close method of the injector and logs them as debug log
*/
private void closeInjectorResource(ShardId shardId, Injector shardInjector, Class<? extends Closeable>... toClose) {
for (Class<? extends Closeable> closeable : toClose) {
try {
final Closeable instance = shardInjector.getInstance(closeable);
if (instance == null) {
throw new NullPointerException("No instance available for " + closeable.getName());
}
IOUtils.close(instance);
} catch (Throwable t) {
logger.debug("{} failed to close {}", t, shardId, Strings.toUnderscoreCase(closeable.getSimpleName()));
}
}
}
private void onShardClose(ShardLock lock, boolean ownsShard) {
if (deleted.get()) { // we remove that shards content if this index has been deleted
try {
if (ownsShard) {
indicesServices.deleteShardStore("delete index", lock, indexSettings);
}
} catch (IOException e) {
indicesServices.addPendingDelete(lock.getShardId(), indexSettings);
logger.debug("{} failed to delete shard content - scheduled a retry", e, lock.getShardId().id());
}
}
}
private class StoreCloseListener implements Store.OnClose {
private final ShardId shardId;
private final boolean ownsShard;
public StoreCloseListener(ShardId shardId, boolean ownsShard) {
this.shardId = shardId;
this.ownsShard = ownsShard;
}
@Override
public void handle(ShardLock lock) {
assert lock.getShardId().equals(shardId) : "shard Id mismatch, expected: " + shardId + " but got: " + lock.getShardId();
onShardClose(lock, ownsShard);
}
}
public Settings getIndexSettings() {
return indexSettings;
}
}
| |
/*
* Copyright (C) 2009 The Guava Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.common.cache;
import static com.google.common.base.Objects.firstNonNull;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.base.Preconditions.checkState;
import com.google.common.annotations.GwtCompatible;
import com.google.common.base.Ascii;
import com.google.common.base.Equivalence;
import com.google.common.base.Objects;
import com.google.common.base.Supplier;
import com.google.common.base.Suppliers;
import com.google.common.base.Ticker;
import com.google.common.cache.AbstractCache.SimpleStatsCounter;
import com.google.common.cache.AbstractCache.StatsCounter;
import com.google.common.cache.LocalCache.Strength;
import java.util.concurrent.TimeUnit;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.annotation.CheckReturnValue;
/**
* <p>A builder of {@link LoadingCache} and {@link Cache} instances having any combination of the
* following features:
*
* <ul>
* <li>automatic loading of entries into the cache
* <li>least-recently-used eviction when a maximum size is exceeded
* <li>time-based expiration of entries, measured since last access or last write
* <li>keys automatically wrapped in {@linkplain WeakReference weak} references
* <li>values automatically wrapped in {@linkplain WeakReference weak} or
* {@linkplain SoftReference soft} references
* <li>notification of evicted (or otherwise removed) entries
* <li>accumulation of cache access statistics
* </ul>
*
* <p>These features are all optional; caches can be created using all or none of them. By default
* cache instances created by {@code CacheBuilder} will not perform any type of eviction.
*
* <p>Usage example: <pre> {@code
*
* LoadingCache<Key, Graph> graphs = CacheBuilder.newBuilder()
* .maximumSize(10000)
* .expireAfterWrite(10, TimeUnit.MINUTES)
* .removalListener(MY_LISTENER)
* .build(
* new CacheLoader<Key, Graph>() {
* public Graph load(Key key) throws AnyException {
* return createExpensiveGraph(key);
* }
* });}</pre>
*
* <p>Or equivalently, <pre> {@code
*
* // In real life this would come from a command-line flag or config file
* String spec = "maximumSize=10000,expireAfterWrite=10m";
*
* LoadingCache<Key, Graph> graphs = CacheBuilder.from(spec)
* .removalListener(MY_LISTENER)
* .build(
* new CacheLoader<Key, Graph>() {
* public Graph load(Key key) throws AnyException {
* return createExpensiveGraph(key);
* }
* });}</pre>
*
* <p>The returned cache is implemented as a hash table with similar performance characteristics to
* {@link ConcurrentHashMap}. It implements all optional operations of the {@link LoadingCache} and
* {@link Cache} interfaces. The {@code asMap} view (and its collection views) have <i>weakly
* consistent iterators</i>. This means that they are safe for concurrent use, but if other threads
* modify the cache after the iterator is created, it is undefined which of these changes, if any,
* are reflected in that iterator. These iterators never throw {@link
* ConcurrentModificationException}.
*
* <p><b>Note:</b> by default, the returned cache uses equality comparisons (the
* {@link Object#equals equals} method) to determine equality for keys or values. However, if
* {@link #weakKeys} was specified, the cache uses identity ({@code ==})
* comparisons instead for keys. Likewise, if {@link #weakValues} or {@link #softValues} was
* specified, the cache uses identity comparisons for values.
*
* <p>Entries are automatically evicted from the cache when any of
* {@linkplain #maximumSize(long) maximumSize}, {@linkplain #maximumWeight(long) maximumWeight},
* {@linkplain #expireAfterWrite expireAfterWrite},
* {@linkplain #expireAfterAccess expireAfterAccess}, {@linkplain #weakKeys weakKeys},
* {@linkplain #weakValues weakValues}, or {@linkplain #softValues softValues} are requested.
*
* <p>If {@linkplain #maximumSize(long) maximumSize} or
* {@linkplain #maximumWeight(long) maximumWeight} is requested entries may be evicted on each cache
* modification.
*
* <p>If {@linkplain #expireAfterWrite expireAfterWrite} or
* {@linkplain #expireAfterAccess expireAfterAccess} is requested entries may be evicted on each
* cache modification, on occasional cache accesses, or on calls to {@link Cache#cleanUp}. Expired
* entries may be counted by {@link Cache#size}, but will never be visible to read or write
* operations.
*
* <p>If {@linkplain #weakKeys weakKeys}, {@linkplain #weakValues weakValues}, or
* {@linkplain #softValues softValues} are requested, it is possible for a key or value present in
* the cache to be reclaimed by the garbage collector. Entries with reclaimed keys or values may be
* removed from the cache on each cache modification, on occasional cache accesses, or on calls to
* {@link Cache#cleanUp}; such entries may be counted in {@link Cache#size}, but will never be
* visible to read or write operations.
*
* <p>Certain cache configurations will result in the accrual of periodic maintenance tasks which
* will be performed during write operations, or during occasional read operations in the absence of
* writes. The {@link Cache#cleanUp} method of the returned cache will also perform maintenance, but
* calling it should not be necessary with a high throughput cache. Only caches built with
* {@linkplain #removalListener removalListener}, {@linkplain #expireAfterWrite expireAfterWrite},
* {@linkplain #expireAfterAccess expireAfterAccess}, {@linkplain #weakKeys weakKeys},
* {@linkplain #weakValues weakValues}, or {@linkplain #softValues softValues} perform periodic
* maintenance.
*
* <p>The caches produced by {@code CacheBuilder} are serializable, and the deserialized caches
* retain all the configuration properties of the original cache. Note that the serialized form does
* <i>not</i> include cache contents, but only configuration.
*
* <p>See the Guava User Guide article on <a href=
* "http://code.google.com/p/guava-libraries/wiki/CachesExplained">caching</a> for a higher-level
* explanation.
*
* @param <K> the base key type for all caches created by this builder
* @param <V> the base value type for all caches created by this builder
* @author Charles Fry
* @author Kevin Bourrillion
* @since 10.0
*/
@GwtCompatible(emulated = true)
public final class CacheBuilder<K, V> {
private static final int DEFAULT_INITIAL_CAPACITY = 16;
private static final int DEFAULT_CONCURRENCY_LEVEL = 4;
private static final int DEFAULT_EXPIRATION_NANOS = 0;
private static final int DEFAULT_REFRESH_NANOS = 0;
static final Supplier<? extends StatsCounter> NULL_STATS_COUNTER = Suppliers.ofInstance(
new StatsCounter() {
@Override
public void recordHits(int count) {}
@Override
public void recordMisses(int count) {}
@Override
public void recordLoadSuccess(long loadTime) {}
@Override
public void recordLoadException(long loadTime) {}
@Override
public void recordEviction() {}
@Override
public CacheStats snapshot() {
return EMPTY_STATS;
}
});
static final CacheStats EMPTY_STATS = new CacheStats(0, 0, 0, 0, 0, 0);
static final Supplier<StatsCounter> CACHE_STATS_COUNTER =
new Supplier<StatsCounter>() {
@Override
public StatsCounter get() {
return new SimpleStatsCounter();
}
};
enum NullListener implements RemovalListener<Object, Object> {
INSTANCE;
@Override
public void onRemoval(RemovalNotification<Object, Object> notification) {}
}
enum OneWeigher implements Weigher<Object, Object> {
INSTANCE;
@Override
public int weigh(Object key, Object value) {
return 1;
}
}
static final Ticker NULL_TICKER = new Ticker() {
@Override
public long read() {
return 0;
}
};
private static final Logger logger = Logger.getLogger(CacheBuilder.class.getName());
static final int UNSET_INT = -1;
boolean strictParsing = true;
int initialCapacity = UNSET_INT;
int concurrencyLevel = UNSET_INT;
long maximumSize = UNSET_INT;
long maximumWeight = UNSET_INT;
Weigher<? super K, ? super V> weigher;
Strength keyStrength;
Strength valueStrength;
long expireAfterWriteNanos = UNSET_INT;
long expireAfterAccessNanos = UNSET_INT;
long refreshNanos = UNSET_INT;
Equivalence<Object> keyEquivalence;
Equivalence<Object> valueEquivalence;
RemovalListener<? super K, ? super V> removalListener;
Ticker ticker;
Supplier<? extends StatsCounter> statsCounterSupplier = NULL_STATS_COUNTER;
// TODO(fry): make constructor private and update tests to use newBuilder
CacheBuilder() {}
/**
* Constructs a new {@code CacheBuilder} instance with default settings, including strong keys,
* strong values, and no automatic eviction of any kind.
*/
public static CacheBuilder<Object, Object> newBuilder() {
return new CacheBuilder<Object, Object>();
}
Equivalence<Object> getKeyEquivalence() {
return firstNonNull(keyEquivalence, getKeyStrength().defaultEquivalence());
}
Equivalence<Object> getValueEquivalence() {
return firstNonNull(valueEquivalence, getValueStrength().defaultEquivalence());
}
/**
* Sets the minimum total size for the internal hash tables. For example, if the initial capacity
* is {@code 60}, and the concurrency level is {@code 8}, then eight segments are created, each
* having a hash table of size eight. Providing a large enough estimate at construction time
* avoids the need for expensive resizing operations later, but setting this value unnecessarily
* high wastes memory.
*
* @throws IllegalArgumentException if {@code initialCapacity} is negative
* @throws IllegalStateException if an initial capacity was already set
*/
public CacheBuilder<K, V> initialCapacity(int initialCapacity) {
checkState(this.initialCapacity == UNSET_INT, "initial capacity was already set to %s",
this.initialCapacity);
checkArgument(initialCapacity >= 0);
this.initialCapacity = initialCapacity;
return this;
}
int getInitialCapacity() {
return (initialCapacity == UNSET_INT) ? DEFAULT_INITIAL_CAPACITY : initialCapacity;
}
/**
* Guides the allowed concurrency among update operations. Used as a hint for internal sizing. The
* table is internally partitioned to try to permit the indicated number of concurrent updates
* without contention. Because assignment of entries to these partitions is not necessarily
* uniform, the actual concurrency observed may vary. Ideally, you should choose a value to
* accommodate as many threads as will ever concurrently modify the table. Using a significantly
* higher value than you need can waste space and time, and a significantly lower value can lead
* to thread contention. But overestimates and underestimates within an order of magnitude do not
* usually have much noticeable impact. A value of one permits only one thread to modify the cache
* at a time, but since read operations and cache loading computations can proceed concurrently,
* this still yields higher concurrency than full synchronization.
*
* <p> Defaults to 4. <b>Note:</b>The default may change in the future. If you care about this
* value, you should always choose it explicitly.
*
* <p>The current implementation uses the concurrency level to create a fixed number of hashtable
* segments, each governed by its own write lock. The segment lock is taken once for each explicit
* write, and twice for each cache loading computation (once prior to loading the new value,
* and once after loading completes). Much internal cache management is performed at the segment
* granularity. For example, access queues and write queues are kept per segment when they are
* required by the selected eviction algorithm. As such, when writing unit tests it is not
* uncommon to specify {@code concurrencyLevel(1)} in order to achieve more deterministic eviction
* behavior.
*
* <p>Note that future implementations may abandon segment locking in favor of more advanced
* concurrency controls.
*
* @throws IllegalArgumentException if {@code concurrencyLevel} is nonpositive
* @throws IllegalStateException if a concurrency level was already set
*/
public CacheBuilder<K, V> concurrencyLevel(int concurrencyLevel) {
checkState(this.concurrencyLevel == UNSET_INT, "concurrency level was already set to %s",
this.concurrencyLevel);
checkArgument(concurrencyLevel > 0);
this.concurrencyLevel = concurrencyLevel;
return this;
}
int getConcurrencyLevel() {
return (concurrencyLevel == UNSET_INT) ? DEFAULT_CONCURRENCY_LEVEL : concurrencyLevel;
}
/**
* Specifies the maximum number of entries the cache may contain. Note that the cache <b>may evict
* an entry before this limit is exceeded</b>. As the cache size grows close to the maximum, the
* cache evicts entries that are less likely to be used again. For example, the cache may evict an
* entry because it hasn't been used recently or very often.
*
* <p>When {@code size} is zero, elements will be evicted immediately after being loaded into the
* cache. This can be useful in testing, or to disable caching temporarily without a code change.
*
* <p>This feature cannot be used in conjunction with {@link #maximumWeight}.
*
* @param size the maximum size of the cache
* @throws IllegalArgumentException if {@code size} is negative
* @throws IllegalStateException if a maximum size or weight was already set
*/
public CacheBuilder<K, V> maximumSize(long size) {
checkState(this.maximumSize == UNSET_INT, "maximum size was already set to %s",
this.maximumSize);
checkState(this.maximumWeight == UNSET_INT, "maximum weight was already set to %s",
this.maximumWeight);
checkState(this.weigher == null, "maximum size can not be combined with weigher");
checkArgument(size >= 0, "maximum size must not be negative");
this.maximumSize = size;
return this;
}
long getMaximumWeight() {
if (expireAfterWriteNanos == 0 || expireAfterAccessNanos == 0) {
return 0;
}
return (weigher == null) ? maximumSize : maximumWeight;
}
// Make a safe contravariant cast now so we don't have to do it over and over.
@SuppressWarnings("unchecked")
<K1 extends K, V1 extends V> Weigher<K1, V1> getWeigher() {
return (Weigher<K1, V1>) Objects.firstNonNull(weigher, OneWeigher.INSTANCE);
}
CacheBuilder<K, V> setKeyStrength(Strength strength) {
checkState(keyStrength == null, "Key strength was already set to %s", keyStrength);
keyStrength = checkNotNull(strength);
return this;
}
Strength getKeyStrength() {
return firstNonNull(keyStrength, Strength.STRONG);
}
CacheBuilder<K, V> setValueStrength(Strength strength) {
checkState(valueStrength == null, "Value strength was already set to %s", valueStrength);
valueStrength = checkNotNull(strength);
return this;
}
Strength getValueStrength() {
return firstNonNull(valueStrength, Strength.STRONG);
}
/**
* Specifies that each entry should be automatically removed from the cache once a fixed duration
* has elapsed after the entry's creation, or the most recent replacement of its value.
*
* <p>When {@code duration} is zero, this method hands off to
* {@link #maximumSize(long) maximumSize}{@code (0)}, ignoring any otherwise-specificed maximum
* size or weight. This can be useful in testing, or to disable caching temporarily without a code
* change.
*
* <p>Expired entries may be counted in {@link Cache#size}, but will never be visible to read or
* write operations. Expired entries are cleaned up as part of the routine maintenance described
* in the class javadoc.
*
* @param duration the length of time after an entry is created that it should be automatically
* removed
* @param unit the unit that {@code duration} is expressed in
* @throws IllegalArgumentException if {@code duration} is negative
* @throws IllegalStateException if the time to live or time to idle was already set
*/
public CacheBuilder<K, V> expireAfterWrite(long duration, TimeUnit unit) {
checkState(expireAfterWriteNanos == UNSET_INT, "expireAfterWrite was already set to %s ns",
expireAfterWriteNanos);
checkArgument(duration >= 0, "duration cannot be negative: %s %s", duration, unit);
this.expireAfterWriteNanos = unit.toNanos(duration);
return this;
}
long getExpireAfterWriteNanos() {
return (expireAfterWriteNanos == UNSET_INT) ? DEFAULT_EXPIRATION_NANOS : expireAfterWriteNanos;
}
/**
* Specifies that each entry should be automatically removed from the cache once a fixed duration
* has elapsed after the entry's creation, the most recent replacement of its value, or its last
* access. Access time is reset by all cache read and write operations (including
* {@code Cache.asMap().get(Object)} and {@code Cache.asMap().put(K, V)}), but not by operations
* on the collection-views of {@link Cache#asMap}.
*
* <p>When {@code duration} is zero, this method hands off to
* {@link #maximumSize(long) maximumSize}{@code (0)}, ignoring any otherwise-specificed maximum
* size or weight. This can be useful in testing, or to disable caching temporarily without a code
* change.
*
* <p>Expired entries may be counted in {@link Cache#size}, but will never be visible to read or
* write operations. Expired entries are cleaned up as part of the routine maintenance described
* in the class javadoc.
*
* @param duration the length of time after an entry is last accessed that it should be
* automatically removed
* @param unit the unit that {@code duration} is expressed in
* @throws IllegalArgumentException if {@code duration} is negative
* @throws IllegalStateException if the time to idle or time to live was already set
*/
public CacheBuilder<K, V> expireAfterAccess(long duration, TimeUnit unit) {
checkState(expireAfterAccessNanos == UNSET_INT, "expireAfterAccess was already set to %s ns",
expireAfterAccessNanos);
checkArgument(duration >= 0, "duration cannot be negative: %s %s", duration, unit);
this.expireAfterAccessNanos = unit.toNanos(duration);
return this;
}
long getExpireAfterAccessNanos() {
return (expireAfterAccessNanos == UNSET_INT)
? DEFAULT_EXPIRATION_NANOS : expireAfterAccessNanos;
}
long getRefreshNanos() {
return (refreshNanos == UNSET_INT) ? DEFAULT_REFRESH_NANOS : refreshNanos;
}
/**
* Specifies a nanosecond-precision time source for use in determining when entries should be
* expired. By default, {@link System#nanoTime} is used.
*
* <p>The primary intent of this method is to facilitate testing of caches which have been
* configured with {@link #expireAfterWrite} or {@link #expireAfterAccess}.
*
* @throws IllegalStateException if a ticker was already set
*/
public CacheBuilder<K, V> ticker(Ticker ticker) {
checkState(this.ticker == null);
this.ticker = checkNotNull(ticker);
return this;
}
Ticker getTicker(boolean recordsTime) {
if (ticker != null) {
return ticker;
}
return recordsTime ? Ticker.systemTicker() : NULL_TICKER;
}
/**
* Specifies a listener instance that caches should notify each time an entry is removed for any
* {@linkplain RemovalCause reason}. Each cache created by this builder will invoke this listener
* as part of the routine maintenance described in the class documentation above.
*
* <p><b>Warning:</b> after invoking this method, do not continue to use <i>this</i> cache
* builder reference; instead use the reference this method <i>returns</i>. At runtime, these
* point to the same instance, but only the returned reference has the correct generic type
* information so as to ensure type safety. For best results, use the standard method-chaining
* idiom illustrated in the class documentation above, configuring a builder and building your
* cache in a single statement. Failure to heed this advice can result in a {@link
* ClassCastException} being thrown by a cache operation at some <i>undefined</i> point in the
* future.
*
* <p><b>Warning:</b> any exception thrown by {@code listener} will <i>not</i> be propagated to
* the {@code Cache} user, only logged via a {@link Logger}.
*
* @return the cache builder reference that should be used instead of {@code this} for any
* remaining configuration and cache building
* @throws IllegalStateException if a removal listener was already set
*/
@CheckReturnValue
public <K1 extends K, V1 extends V> CacheBuilder<K1, V1> removalListener(
RemovalListener<? super K1, ? super V1> listener) {
checkState(this.removalListener == null);
// safely limiting the kinds of caches this can produce
@SuppressWarnings("unchecked")
CacheBuilder<K1, V1> me = (CacheBuilder<K1, V1>) this;
me.removalListener = checkNotNull(listener);
return me;
}
// Make a safe contravariant cast now so we don't have to do it over and over.
@SuppressWarnings("unchecked")
<K1 extends K, V1 extends V> RemovalListener<K1, V1> getRemovalListener() {
return (RemovalListener<K1, V1>) Objects.firstNonNull(removalListener, NullListener.INSTANCE);
}
/**
* Enable the accumulation of {@link CacheStats} during the operation of the cache. Without this
* {@link Cache#stats} will return zero for all statistics. Note that recording stats requires
* bookkeeping to be performed with each operation, and thus imposes a performance penalty on
* cache operation.
*
* @since 12.0 (previously, stats collection was automatic)
*/
public CacheBuilder<K, V> recordStats() {
statsCounterSupplier = CACHE_STATS_COUNTER;
return this;
}
boolean isRecordingStats() {
return statsCounterSupplier == CACHE_STATS_COUNTER;
}
Supplier<? extends StatsCounter> getStatsCounterSupplier() {
return statsCounterSupplier;
}
/**
* Builds a cache, which either returns an already-loaded value for a given key or atomically
* computes or retrieves it using the supplied {@code CacheLoader}. If another thread is currently
* loading the value for this key, simply waits for that thread to finish and returns its
* loaded value. Note that multiple threads can concurrently load values for distinct keys.
*
* <p>This method does not alter the state of this {@code CacheBuilder} instance, so it can be
* invoked again to create multiple independent caches.
*
* @param loader the cache loader used to obtain new values
* @return a cache having the requested features
*/
public <K1 extends K, V1 extends V> LoadingCache<K1, V1> build(
CacheLoader<? super K1, V1> loader) {
checkWeightWithWeigher();
return new LocalCache.LocalLoadingCache<K1, V1>(this, loader);
}
/**
* Builds a cache which does not automatically load values when keys are requested.
*
* <p>Consider {@link #build(CacheLoader)} instead, if it is feasible to implement a
* {@code CacheLoader}.
*
* <p>This method does not alter the state of this {@code CacheBuilder} instance, so it can be
* invoked again to create multiple independent caches.
*
* @return a cache having the requested features
* @since 11.0
*/
public <K1 extends K, V1 extends V> Cache<K1, V1> build() {
checkWeightWithWeigher();
checkNonLoadingCache();
return new LocalCache.LocalManualCache<K1, V1>(this);
}
private void checkNonLoadingCache() {
checkState(refreshNanos == UNSET_INT, "refreshAfterWrite requires a LoadingCache");
}
private void checkWeightWithWeigher() {
if (weigher == null) {
checkState(maximumWeight == UNSET_INT, "maximumWeight requires weigher");
} else {
if (strictParsing) {
checkState(maximumWeight != UNSET_INT, "weigher requires maximumWeight");
} else {
if (maximumWeight == UNSET_INT) {
logger.log(Level.WARNING, "ignoring weigher specified without maximumWeight");
}
}
}
}
/**
* Returns a string representation for this CacheBuilder instance. The exact form of the returned
* string is not specified.
*/
@Override
public String toString() {
Objects.ToStringHelper s = Objects.toStringHelper(this);
if (initialCapacity != UNSET_INT) {
s.add("initialCapacity", initialCapacity);
}
if (concurrencyLevel != UNSET_INT) {
s.add("concurrencyLevel", concurrencyLevel);
}
if (maximumSize != UNSET_INT) {
s.add("maximumSize", maximumSize);
}
if (maximumWeight != UNSET_INT) {
s.add("maximumWeight", maximumWeight);
}
if (expireAfterWriteNanos != UNSET_INT) {
s.add("expireAfterWrite", expireAfterWriteNanos + "ns");
}
if (expireAfterAccessNanos != UNSET_INT) {
s.add("expireAfterAccess", expireAfterAccessNanos + "ns");
}
if (keyStrength != null) {
s.add("keyStrength", Ascii.toLowerCase(keyStrength.toString()));
}
if (valueStrength != null) {
s.add("valueStrength", Ascii.toLowerCase(valueStrength.toString()));
}
if (keyEquivalence != null) {
s.addValue("keyEquivalence");
}
if (valueEquivalence != null) {
s.addValue("valueEquivalence");
}
if (removalListener != null) {
s.addValue("removalListener");
}
return s.toString();
}
}
| |
/*
* Created By: Abhinav Kumar Mishra
* Copyright © 2014. Abhinav Kumar Mishra.
* All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.jmeter.alfresco.utils;
import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import org.apache.commons.httpclient.HttpClient;
import org.apache.commons.httpclient.methods.GetMethod;
import org.apache.commons.httpclient.methods.PostMethod;
import org.apache.commons.httpclient.methods.multipart.FilePart;
import org.apache.commons.httpclient.methods.multipart.MultipartRequestEntity;
import org.apache.commons.httpclient.methods.multipart.Part;
import org.apache.commons.httpclient.methods.multipart.StringPart;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.http.ParseException;
/**
* The Class HttpUtils.
*
* @author Abhinav Kumar Mishra
* @since 2014
*/
public final class HttpUtils {
/** The Constant logger. */
private static final Log LOG = LogFactory.getLog(HttpUtils.class);
/**
* Gets the login response.
*
* @param authURI the path
* @param username the username
* @param password the password
* @return the login response
* @throws ParseException the parse exception
* @throws IOException Signals that an I/O exception has occurred.
*/
public Map<String, String> getAuthResponse(final String authURI,
final String username, final String password)
throws ParseException, IOException {
LOG.debug("Authenticating request..");
final Map<String, String> responseMap = new ConcurrentHashMap<String, String>();
GetMethod getRequest = null;
try {
final HttpClient httpclient = new HttpClient();
getRequest = new GetMethod(getAuthURL(authURI, username, password));
final int statusCode = httpclient.executeMethod(getRequest);
LOG.debug("Auth Response Status: "+ statusCode+"|"+ getRequest.getStatusText());
responseMap.put(Constants.RESP_BODY, getRequest.getResponseBodyAsString());
responseMap.put(Constants.CONTENT_TYPE, getRequest.getResponseHeader(Constants.CONTENT_TYPE_HDR).getValue());
responseMap.put(Constants.STATUS_CODE, String.valueOf(statusCode));
} finally {
if(getRequest!=null){
getRequest.releaseConnection();
}
}
return responseMap;
}
/**
* Gets the auth ticket.
*
* @param authURI the auth uri
* @param username the username
* @param password the password
* @return the auth ticket
* @throws IOException Signals that an I/O exception has occurred.
*/
public String getAuthTicket(final String authURI,
final String username, final String password) throws IOException {
final Map<String, String> responseMap = getAuthResponse(authURI, username, password);
final String ticketFrmResponse = responseMap.get(Constants.RESP_BODY);
final int startindex = ticketFrmResponse.indexOf("TICKET");
final int endindex = ticketFrmResponse.indexOf("</");
return ticketFrmResponse.substring(startindex, endindex);
}
/**
* Document upload.
*
* @param docFileObj the doc file obj
* @param authTicket the auth ticket
* @param uploadURI the upload uri
* @param siteID the site id
* @param uploadDir the upload dir
* @return the string
* @throws IOException Signals that an I/O exception has occurred.
*/
public String documentUpload(final File docFileObj,
final String authTicket, final String uploadURI,
final String siteID, final String uploadDir) throws IOException {
String uploadResponse = Constants.EMPTY;
PostMethod postRequest = null;
try {
final String uploadURL = getFileUploadURL(uploadURI,authTicket);
LOG.info("documentUpload() | Upload URL: " + uploadURL);
final HttpClient httpClient = new HttpClient();
postRequest = new PostMethod(uploadURL);
final String mimeType = getMimeType(docFileObj);
final String docName = docFileObj.getName();
LOG.debug("documentUpload() | Uploading document: "+docName+" , content-type: "+mimeType);
final Part[] parts = {
new FilePart("filedata", docName, docFileObj, mimeType,null),
new StringPart("filename", docName),
new StringPart("overwrite", "true"),
new StringPart("siteid",siteID),
new StringPart("containerid",ConfigReader.getProperty(Constants.CONTAINER_ID)),
new StringPart("uploaddirectory",uploadDir)
};
postRequest.setRequestEntity(new MultipartRequestEntity(parts, postRequest.getParams()));
final int statusCode = httpClient.executeMethod(postRequest);
uploadResponse = postRequest.getResponseBodyAsString();
LOG.info("documentUpload() | Upload status: "+statusCode);
LOG.debug("documentUpload() | Upload response: "+uploadResponse);
} finally{
if(postRequest!=null){
//releaseConnection http connection
postRequest.releaseConnection();
}
}
return uploadResponse;
}
/**
* Gets the auth url.
*
* @param path the path
* @param username the username
* @param password the password
* @return the url
*/
private String getAuthURL(final String path, final String username,
final String password) {
final StringBuffer urlStrb = new StringBuffer(path);
urlStrb.append(Constants.QUES);
urlStrb.append(Constants.U);
urlStrb.append(Constants.EQL);
urlStrb.append(username);
urlStrb.append(Constants.AMPERSND);
urlStrb.append(Constants.PW);
urlStrb.append(Constants.EQL);
urlStrb.append(password);
return urlStrb.toString();
}
/**
* Url file upload.
*
* @param path the path
* @param ticket the ticket
* @return the string
*/
private String getFileUploadURL(final String path, final String ticket) {
final StringBuffer urlStrb = new StringBuffer(path);
urlStrb.append(Constants.QUES);
urlStrb.append(Constants.TICKET_QRY);
urlStrb.append(Constants.EQL);
urlStrb.append(ticket);
return urlStrb.toString();
}
/**
* Gets the mime type.
*
* @param fileObj the file obj
* @return the mime type
* @throws IOException Signals that an I/O exception has occurred.
*/
public String getMimeType(final File fileObj) throws IOException {
final Path source = Paths.get(fileObj.getPath());
return Files.probeContentType(source);
}
}
| |
package org.opencb.bionetdb.server.rest;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectWriter;
import com.fasterxml.jackson.module.jsonSchema.JsonSchema;
import com.fasterxml.jackson.module.jsonSchema.factories.SchemaFactoryWrapper;
import com.google.common.base.Splitter;
import io.swagger.annotations.ApiParam;
import org.apache.commons.lang3.StringUtils;
import org.glassfish.jersey.server.ParamException;
import org.opencb.bionetdb.core.config.BioNetDBConfiguration;
import org.opencb.bionetdb.core.exceptions.BioNetDBException;
import org.opencb.bionetdb.core.response.BioNetDBResult;
import org.opencb.bionetdb.core.response.RestResponse;
import org.opencb.bionetdb.lib.BioNetDbManager;
import org.opencb.bionetdb.lib.api.NetworkDBAdaptor;
import org.opencb.bionetdb.server.exception.VersionException;
import org.opencb.commons.datastore.core.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.servlet.ServletContext;
import javax.servlet.http.HttpServletRequest;
import javax.ws.rs.*;
import javax.ws.rs.core.*;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.util.*;
import java.util.concurrent.atomic.AtomicBoolean;
/**
* Created by imedina on 01/10/15.
*/
@ApplicationPath("/")
@Path("/{apiVersion}")
@Produces(MediaType.APPLICATION_JSON)
public class GenericRestWSServer {
@DefaultValue("")
@PathParam("apiVersion")
@ApiParam(name = "apiVersion", value = "Use 'latest' for last stable apiVersion", allowableValues = "v1", defaultValue = "v1")
protected String apiVersion;
protected String database;
// protected String include;
// protected String exclude;
//
// protected int limit;
// protected int skip;
// protected boolean count;
protected Query query;
protected QueryOptions queryOptions;
// protected QueryResponse queryResponse;
protected UriInfo uriInfo;
protected HttpServletRequest httpServletRequest;
protected ObjectMap params;
private String requestDescription;
protected static ObjectMapper jsonObjectMapper;
protected static ObjectWriter jsonObjectWriter;
protected long startTime;
protected static Logger logger;
/**
* Loading properties file just one time to be more efficient. All methods
* will check parameters so to avoid extra operations this config can load
* versions and database
*/
protected static BioNetDBConfiguration bioNetDBConfiguration;
@Deprecated
protected static NetworkDBAdaptor networkDBAdaptor;
protected static BioNetDbManager bioNetDBManager;
protected static AtomicBoolean initialized;
private static final int LIMIT_DEFAULT = 1000;
private static final int LIMIT_MAX = 5000;
private static final int DEFAULT_LIMIT = 2000;
private static final int MAX_LIMIT = 5000;
static {
initialized = new AtomicBoolean(false);
logger = LoggerFactory.getLogger("org.opencb.bionetdb.server.rest.GenericRestWSServer");
jsonObjectMapper = new ObjectMapper();
jsonObjectMapper.setSerializationInclusion(JsonInclude.Include.NON_NULL);
jsonObjectWriter = jsonObjectMapper.writer();
logger.info("End of Static block");
}
public GenericRestWSServer(@Context UriInfo uriInfo, @Context HttpServletRequest httpServletRequest) throws VersionException {
this(uriInfo.getPathParameters().getFirst("apiVersion"), uriInfo, httpServletRequest);
}
public GenericRestWSServer(@PathParam("apiVersion") String apiVersion, @Context UriInfo uriInfo, @Context HttpServletRequest hsr)
throws VersionException {
this.apiVersion = apiVersion;
this.uriInfo = uriInfo;
this.httpServletRequest = hsr;
init();
}
private void init() throws VersionException {
// This must be only executed once, this method loads the configuration and create the BioNetDBManagers
if (initialized.compareAndSet(false, true)) {
initBioNetDBObjects();
}
query = new Query();
queryOptions = new QueryOptions();
parseQueryParams();
startTime = System.currentTimeMillis();
}
/**
* This method loads the configuration and create the BioNetDB mangers, must be called once.
*/
private void initBioNetDBObjects() {
try {
if (System.getenv("BIONETDB_HOME") != null) {
logger.info("Loading configuration from '{}'", System.getenv("BIONETDB_HOME") + "/configuration.yml");
bioNetDBConfiguration = BioNetDBConfiguration
.load(new FileInputStream(new File(System.getenv("BIONETDB_HOME") + "/configuration.yml")));
} else {
// We read 'BIONETDB_HOME' parameter from the web.xml file
ServletContext context = httpServletRequest.getServletContext();
String bionetdbHome = context.getInitParameter("BIONETDB_HOME");
if (StringUtils.isNotEmpty(bionetdbHome)) {
logger.info("Loading configuration from '{}'", bionetdbHome + "/configuration.yml");
bioNetDBConfiguration = BioNetDBConfiguration
.load(new FileInputStream(new File(bionetdbHome + "/configuration.yml")));
} else {
logger.info("Loading configuration from '{}'",
BioNetDBConfiguration.class.getClassLoader().getResourceAsStream("configuration.yml").toString());
bioNetDBConfiguration = BioNetDBConfiguration
.load(BioNetDBConfiguration.class.getClassLoader().getResourceAsStream("configuration.yml"));
}
}
} catch (IOException e) {
e.printStackTrace();
}
// Init the manager map with the managers, this will allow methods to query the right database
// bioNetDBManagers = new HashMap<>();
if (bioNetDBConfiguration != null) {
try {
bioNetDBManager = new BioNetDbManager(bioNetDBConfiguration);
} catch (BioNetDBException e) {
e.printStackTrace();
}
}
}
private void parseQueryParams() throws VersionException {
// If by any reason 'apiVersion' is null we try to read it from the URI path, if not present an Exception is thrown
if (this.apiVersion == null) {
if (uriInfo.getPathParameters().containsKey("apiVersion")) {
logger.warn("Setting 'apiVersion' from UriInfo object");
this.apiVersion = uriInfo.getPathParameters().getFirst("apiVersion");
} else {
throw new VersionException("Version not valid: '" + apiVersion + "'");
}
}
// Default database is the first one in the configuration file
// if (bioNetDBConfiguration != null && bioNetDBConfiguration.getDatabases().size() > 0) {
// this.database = bioNetDBConfiguration.getDatabases().get(0).getId();
// }
// We parse the rest of URL params
MultivaluedMap<String, String> multivaluedMap = uriInfo.getQueryParameters();
for (Map.Entry<String, List<String>> entry : multivaluedMap.entrySet()) {
String value = entry.getValue().get(0);
switch (entry.getKey()) {
case QueryOptions.INCLUDE:
case QueryOptions.EXCLUDE:
queryOptions.put(entry.getKey(), new LinkedList<>(Splitter.on(",").splitToList(value)));
break;
case QueryOptions.LIMIT:
int limit = Integer.parseInt(value);
queryOptions.put(QueryOptions.LIMIT, (limit > 0) ? Math.min(limit, MAX_LIMIT) : DEFAULT_LIMIT);
break;
case QueryOptions.SKIP:
int skip = Integer.parseInt(value);
queryOptions.put(entry.getKey(), (skip >= 0) ? skip : -1);
break;
case QueryOptions.TIMEOUT:
queryOptions.put(entry.getKey(), Integer.parseInt(value));
break;
case QueryOptions.SORT:
case QueryOptions.ORDER:
queryOptions.put(entry.getKey(), value);
break;
case QueryOptions.COUNT:
queryOptions.put(entry.getKey(), Boolean.parseBoolean(value));
break;
case QueryOptions.SKIP_COUNT:
queryOptions.put(QueryOptions.SKIP_COUNT, Boolean.parseBoolean(value));
break;
case "database":
this.database = value;
break;
default:
// Query
query.put(entry.getKey(), value);
break;
}
}
}
// @GET
// @Path("/help")
// public Response help() {
// return createOkResponse("No help available");
// }
// @GET
// public Response defaultMethod() {
// switch (database) {
// case "echo":
// return createStringResponse("Status active");
// default:
// break;
// }
// return createOkResponse("Not valid option");
// }
protected Response createModelResponse(Class clazz) {
try {
ObjectMapper mapper = new ObjectMapper();
SchemaFactoryWrapper visitor = new SchemaFactoryWrapper();
mapper.acceptJsonFormatVisitor(mapper.constructType(clazz), visitor);
JsonSchema jsonSchema = visitor.finalSchema();
return createOkResponse(jsonSchema);
} catch (Exception e) {
return createErrorResponse(e);
}
}
// protected Response createErrorResponse(Exception e) {
// // First we print the exception in Server logs
// e.printStackTrace();
//
// // Now we prepare the response to client
// QueryResponse queryResponse = new QueryResponse();
// queryResponse.setTime(new Long(System.currentTimeMillis() - startTime).intValue());
// queryResponse.setApiVersion(apiVersion);
// queryResponse.setQueryOptions(queryOptions);
// queryResponse.setError(e.toString());
//
// QueryResult<ObjectMap> result = new QueryResult<>();
// result.setWarningMsg("Future errors will ONLY be shown in the QueryResponse body");
// result.setErrorMsg("DEPRECATED: " + e.toString());
// queryResponse.setResponse(Collections.singletonList(result));
//
// return Response
// .fromResponse(createJsonResponse(queryResponse))
// .status(Response.Status.INTERNAL_SERVER_ERROR)
// .build();
// }
//
// protected Response createErrorResponse(String method, String errorMessage) {
// try {
// return buildResponse(Response.ok(jsonObjectWriter.writeValueAsString(
// new HashMap<>().put("[ERROR] " + method, errorMessage)), MediaType.APPLICATION_JSON_TYPE));
// } catch (Exception e) {
// return createErrorResponse(e);
// }
// }
protected Response createErrorResponse(Throwable e) {
return createErrorResponse(e, startTime, apiVersion, requestDescription, params);
}
public static Response createErrorResponse(Throwable e, long startTime, String apiVersion, String requestDescription,
ObjectMap params) {
// First we print the exception in Server logs
logger.error("Catch error: " + e.getMessage(), e);
// Now we prepare the response to client
RestResponse<ObjectMap> queryResponse = new RestResponse<>();
queryResponse.setTime(new Long(System.currentTimeMillis() - startTime).intValue());
queryResponse.setApiVersion(apiVersion);
queryResponse.setParams(params);
addErrorEvent(queryResponse, e);
BioNetDBResult<ObjectMap> result = BioNetDBResult.empty();
queryResponse.setResponses(Arrays.asList(result));
Response.StatusType errorStatus;
if (e instanceof WebApplicationException
&& ((WebApplicationException) e).getResponse() != null
&& ((WebApplicationException) e).getResponse().getStatusInfo() != null) {
errorStatus = ((WebApplicationException) e).getResponse().getStatusInfo();
// } else if (e instanceof CatalogAuthorizationException) {
// errorStatus = Response.Status.FORBIDDEN;
// } else if (e instanceof CatalogAuthenticationException) {
// errorStatus = Response.Status.UNAUTHORIZED;
} else {
errorStatus = Response.Status.INTERNAL_SERVER_ERROR;
}
Response response = Response.fromResponse(createJsonResponse(queryResponse)).status(errorStatus).build();
// logResponse(response.getStatusInfo(), queryResponse, startTime, requestDescription);
return response;
}
protected Response createErrorResponse(String errorMessage, BioNetDBResult result) {
RestResponse<ObjectMap> dataResponse = new RestResponse<>();
dataResponse.setApiVersion(apiVersion);
dataResponse.setParams(params);
addErrorEvent(dataResponse, errorMessage);
dataResponse.setResponses(Arrays.asList(result));
Response response = Response.fromResponse(createJsonResponse(dataResponse)).status(Response.Status.INTERNAL_SERVER_ERROR).build();
// logResponse(response.getStatusInfo(), dataResponse);
return response;
}
protected Response createErrorResponse(String method, String errorMessage) {
try {
Response response = buildResponse(Response.ok(jsonObjectWriter.writeValueAsString(new ObjectMap("error", errorMessage)),
MediaType.APPLICATION_JSON_TYPE));
// logResponse(response.getStatusInfo());
return response;
} catch (JsonProcessingException e) {
// e.printStackTrace();
logger.error("Error creating error response", e);
}
return buildResponse(Response.ok("{\"error\":\"Error parsing json error\"}", MediaType.APPLICATION_JSON_TYPE));
}
static <T> void addErrorEvent(RestResponse<T> response, String message) {
if (response.getEvents() == null) {
response.setEvents(new ArrayList<>());
}
response.getEvents().add(new Event(Event.Type.ERROR, message));
}
private static <T> void addErrorEvent(RestResponse<T> response, Throwable e) {
if (response.getEvents() == null) {
response.setEvents(new ArrayList<>());
}
String message;
if (e instanceof ParamException.QueryParamException && e.getCause() != null) {
message = e.getCause().getMessage();
} else {
message = e.getMessage();
}
response.getEvents().add(
new Event(Event.Type.ERROR, 0, e.getClass().getName(), e.getClass().getSimpleName(), message));
}
// TODO: Change signature
// protected <T> Response createOkResponse(BioNetDBResult<T> result)
// protected <T> Response createOkResponse(List<BioNetDBResult<T>> results)
protected Response createOkResponse(Object obj) {
return createOkResponse(obj, Collections.emptyList());
}
protected Response createOkResponse(Object obj, List<Event> events) {
RestResponse queryResponse = new RestResponse();
queryResponse.setTime(new Long(System.currentTimeMillis() - startTime).intValue());
queryResponse.setApiVersion(apiVersion);
queryResponse.setParams(params);
queryResponse.setEvents(events);
// Guarantee that the RestResponse object contains a list of results
List<BioNetDBResult<?>> list = new ArrayList<>();
if (obj instanceof List) {
if (!((List) obj).isEmpty()) {
Object firstObject = ((List) obj).get(0);
if (firstObject instanceof BioNetDBResult) {
list = (List) obj;
} else if (firstObject instanceof DataResult) {
List<DataResult> results = (List) obj;
// We will cast each of the DataResults to OpenCGAResult
for (DataResult result : results) {
list.add(new BioNetDBResult<>(result));
}
} else {
list = Collections.singletonList(new BioNetDBResult<>(0, Collections.emptyList(), 1, (List) obj, 1));
}
}
} else {
if (obj instanceof BioNetDBResult) {
list.add(((BioNetDBResult) obj));
} else if (obj instanceof DataResult) {
list.add(new BioNetDBResult<>((DataResult) obj));
} else {
list.add(new BioNetDBResult<>(0, Collections.emptyList(), 1, Collections.singletonList(obj), 1));
}
}
queryResponse.setResponses(list);
Response response = createJsonResponse(queryResponse);
// logResponse(response.getStatusInfo(), queryResponse);
return response;
}
// protected Response createOkResponse(Object obj) {
// QueryResponse queryResponse = new QueryResponse();
// queryResponse.setTime(new Long(System.currentTimeMillis() - startTime).intValue());
// queryResponse.setApiVersion(apiVersion);
// queryResponse.setQueryOptions(queryOptions);
//
// // Guarantee that the QueryResponse object contains a list of results
// List list;
// if (obj instanceof List) {
// list = (List) obj;
// } else {
// list = new ArrayList(1);
// list.add(obj);
// }
// queryResponse.setResponse(list);
// return createJsonResponse(queryResponse);
// }
protected Response createOkResponse(Object obj, MediaType mediaType) {
return buildResponse(Response.ok(obj, mediaType));
}
protected Response createOkResponse(Object obj, MediaType mediaType, String fileName) {
return buildResponse(Response.ok(obj, mediaType).header("content-disposition", "attachment; filename =" + fileName));
}
protected Response createStringResponse(String str) {
return buildResponse(Response.ok(str));
}
static Response createJsonResponse(RestResponse queryResponse) {
try {
return buildResponse(Response.ok(jsonObjectWriter.writeValueAsString(queryResponse), MediaType.APPLICATION_JSON_TYPE));
} catch (JsonProcessingException e) {
logger.error("Error parsing queryResponse object", e);
throw new WebApplicationException("Error parsing queryResponse object", e);
}
}
protected static Response buildResponse(Response.ResponseBuilder responseBuilder) {
return responseBuilder
.header("Access-Control-Allow-Origin", "*")
.header("Access-Control-Allow-Headers", "x-requested-with, content-type, authorization")
.header("Access-Control-Allow-Credentials", "true")
.header("Access-Control-Allow-Methods", "GET, POST, OPTIONS")
.build();
}
// protected Response createJsonResponse(QueryResponse queryResponse) {
// try {
// return buildResponse(Response.ok(jsonObjectWriter.writeValueAsString(queryResponse), MediaType.APPLICATION_JSON_TYPE));
// } catch (JsonProcessingException e) {
// e.printStackTrace();
// logger.error("Error parsing queryResponse object");
// return createErrorResponse("", "Error parsing QueryResponse object:\n" + Arrays.toString(e.getStackTrace()));
// }
// }
//
// private Response buildResponse(Response.ResponseBuilder responseBuilder) {
// return responseBuilder
// .header("Access-Control-Allow-Origin", "*")
// .header("Access-Control-Allow-Headers", "x-requested-with, content-type, authorization")
// .header("Access-Control-Allow-Credentials", "true")
// .header("Access-Control-Allow-Methods", "GET, POST, OPTIONS")
// .build();
// }
}
| |
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/ads/googleads/v9/services/ad_group_bid_modifier_service.proto
package com.google.ads.googleads.v9.services;
/**
* <pre>
* A single operation (create, remove, update) on an ad group bid modifier.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v9.services.AdGroupBidModifierOperation}
*/
public final class AdGroupBidModifierOperation extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.ads.googleads.v9.services.AdGroupBidModifierOperation)
AdGroupBidModifierOperationOrBuilder {
private static final long serialVersionUID = 0L;
// Use AdGroupBidModifierOperation.newBuilder() to construct.
private AdGroupBidModifierOperation(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private AdGroupBidModifierOperation() {
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new AdGroupBidModifierOperation();
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private AdGroupBidModifierOperation(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
com.google.ads.googleads.v9.resources.AdGroupBidModifier.Builder subBuilder = null;
if (operationCase_ == 1) {
subBuilder = ((com.google.ads.googleads.v9.resources.AdGroupBidModifier) operation_).toBuilder();
}
operation_ =
input.readMessage(com.google.ads.googleads.v9.resources.AdGroupBidModifier.parser(), extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom((com.google.ads.googleads.v9.resources.AdGroupBidModifier) operation_);
operation_ = subBuilder.buildPartial();
}
operationCase_ = 1;
break;
}
case 18: {
com.google.ads.googleads.v9.resources.AdGroupBidModifier.Builder subBuilder = null;
if (operationCase_ == 2) {
subBuilder = ((com.google.ads.googleads.v9.resources.AdGroupBidModifier) operation_).toBuilder();
}
operation_ =
input.readMessage(com.google.ads.googleads.v9.resources.AdGroupBidModifier.parser(), extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom((com.google.ads.googleads.v9.resources.AdGroupBidModifier) operation_);
operation_ = subBuilder.buildPartial();
}
operationCase_ = 2;
break;
}
case 26: {
java.lang.String s = input.readStringRequireUtf8();
operationCase_ = 3;
operation_ = s;
break;
}
case 34: {
com.google.protobuf.FieldMask.Builder subBuilder = null;
if (updateMask_ != null) {
subBuilder = updateMask_.toBuilder();
}
updateMask_ = input.readMessage(com.google.protobuf.FieldMask.parser(), extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(updateMask_);
updateMask_ = subBuilder.buildPartial();
}
break;
}
default: {
if (!parseUnknownField(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v9.services.AdGroupBidModifierServiceProto.internal_static_google_ads_googleads_v9_services_AdGroupBidModifierOperation_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v9.services.AdGroupBidModifierServiceProto.internal_static_google_ads_googleads_v9_services_AdGroupBidModifierOperation_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v9.services.AdGroupBidModifierOperation.class, com.google.ads.googleads.v9.services.AdGroupBidModifierOperation.Builder.class);
}
private int operationCase_ = 0;
private java.lang.Object operation_;
public enum OperationCase
implements com.google.protobuf.Internal.EnumLite,
com.google.protobuf.AbstractMessage.InternalOneOfEnum {
CREATE(1),
UPDATE(2),
REMOVE(3),
OPERATION_NOT_SET(0);
private final int value;
private OperationCase(int value) {
this.value = value;
}
/**
* @param value The number of the enum to look for.
* @return The enum associated with the given number.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static OperationCase valueOf(int value) {
return forNumber(value);
}
public static OperationCase forNumber(int value) {
switch (value) {
case 1: return CREATE;
case 2: return UPDATE;
case 3: return REMOVE;
case 0: return OPERATION_NOT_SET;
default: return null;
}
}
public int getNumber() {
return this.value;
}
};
public OperationCase
getOperationCase() {
return OperationCase.forNumber(
operationCase_);
}
public static final int UPDATE_MASK_FIELD_NUMBER = 4;
private com.google.protobuf.FieldMask updateMask_;
/**
* <pre>
* FieldMask that determines which resource fields are modified in an update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 4;</code>
* @return Whether the updateMask field is set.
*/
@java.lang.Override
public boolean hasUpdateMask() {
return updateMask_ != null;
}
/**
* <pre>
* FieldMask that determines which resource fields are modified in an update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 4;</code>
* @return The updateMask.
*/
@java.lang.Override
public com.google.protobuf.FieldMask getUpdateMask() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
/**
* <pre>
* FieldMask that determines which resource fields are modified in an update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 4;</code>
*/
@java.lang.Override
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
return getUpdateMask();
}
public static final int CREATE_FIELD_NUMBER = 1;
/**
* <pre>
* Create operation: No resource name is expected for the new ad group bid
* modifier.
* </pre>
*
* <code>.google.ads.googleads.v9.resources.AdGroupBidModifier create = 1;</code>
* @return Whether the create field is set.
*/
@java.lang.Override
public boolean hasCreate() {
return operationCase_ == 1;
}
/**
* <pre>
* Create operation: No resource name is expected for the new ad group bid
* modifier.
* </pre>
*
* <code>.google.ads.googleads.v9.resources.AdGroupBidModifier create = 1;</code>
* @return The create.
*/
@java.lang.Override
public com.google.ads.googleads.v9.resources.AdGroupBidModifier getCreate() {
if (operationCase_ == 1) {
return (com.google.ads.googleads.v9.resources.AdGroupBidModifier) operation_;
}
return com.google.ads.googleads.v9.resources.AdGroupBidModifier.getDefaultInstance();
}
/**
* <pre>
* Create operation: No resource name is expected for the new ad group bid
* modifier.
* </pre>
*
* <code>.google.ads.googleads.v9.resources.AdGroupBidModifier create = 1;</code>
*/
@java.lang.Override
public com.google.ads.googleads.v9.resources.AdGroupBidModifierOrBuilder getCreateOrBuilder() {
if (operationCase_ == 1) {
return (com.google.ads.googleads.v9.resources.AdGroupBidModifier) operation_;
}
return com.google.ads.googleads.v9.resources.AdGroupBidModifier.getDefaultInstance();
}
public static final int UPDATE_FIELD_NUMBER = 2;
/**
* <pre>
* Update operation: The ad group bid modifier is expected to have a valid
* resource name.
* </pre>
*
* <code>.google.ads.googleads.v9.resources.AdGroupBidModifier update = 2;</code>
* @return Whether the update field is set.
*/
@java.lang.Override
public boolean hasUpdate() {
return operationCase_ == 2;
}
/**
* <pre>
* Update operation: The ad group bid modifier is expected to have a valid
* resource name.
* </pre>
*
* <code>.google.ads.googleads.v9.resources.AdGroupBidModifier update = 2;</code>
* @return The update.
*/
@java.lang.Override
public com.google.ads.googleads.v9.resources.AdGroupBidModifier getUpdate() {
if (operationCase_ == 2) {
return (com.google.ads.googleads.v9.resources.AdGroupBidModifier) operation_;
}
return com.google.ads.googleads.v9.resources.AdGroupBidModifier.getDefaultInstance();
}
/**
* <pre>
* Update operation: The ad group bid modifier is expected to have a valid
* resource name.
* </pre>
*
* <code>.google.ads.googleads.v9.resources.AdGroupBidModifier update = 2;</code>
*/
@java.lang.Override
public com.google.ads.googleads.v9.resources.AdGroupBidModifierOrBuilder getUpdateOrBuilder() {
if (operationCase_ == 2) {
return (com.google.ads.googleads.v9.resources.AdGroupBidModifier) operation_;
}
return com.google.ads.googleads.v9.resources.AdGroupBidModifier.getDefaultInstance();
}
public static final int REMOVE_FIELD_NUMBER = 3;
/**
* <pre>
* Remove operation: A resource name for the removed ad group bid modifier
* is expected, in this format:
* `customers/{customer_id}/adGroupBidModifiers/{ad_group_id}~{criterion_id}`
* </pre>
*
* <code>string remove = 3;</code>
* @return Whether the remove field is set.
*/
public boolean hasRemove() {
return operationCase_ == 3;
}
/**
* <pre>
* Remove operation: A resource name for the removed ad group bid modifier
* is expected, in this format:
* `customers/{customer_id}/adGroupBidModifiers/{ad_group_id}~{criterion_id}`
* </pre>
*
* <code>string remove = 3;</code>
* @return The remove.
*/
public java.lang.String getRemove() {
java.lang.Object ref = "";
if (operationCase_ == 3) {
ref = operation_;
}
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (operationCase_ == 3) {
operation_ = s;
}
return s;
}
}
/**
* <pre>
* Remove operation: A resource name for the removed ad group bid modifier
* is expected, in this format:
* `customers/{customer_id}/adGroupBidModifiers/{ad_group_id}~{criterion_id}`
* </pre>
*
* <code>string remove = 3;</code>
* @return The bytes for remove.
*/
public com.google.protobuf.ByteString
getRemoveBytes() {
java.lang.Object ref = "";
if (operationCase_ == 3) {
ref = operation_;
}
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
if (operationCase_ == 3) {
operation_ = b;
}
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (operationCase_ == 1) {
output.writeMessage(1, (com.google.ads.googleads.v9.resources.AdGroupBidModifier) operation_);
}
if (operationCase_ == 2) {
output.writeMessage(2, (com.google.ads.googleads.v9.resources.AdGroupBidModifier) operation_);
}
if (operationCase_ == 3) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, operation_);
}
if (updateMask_ != null) {
output.writeMessage(4, getUpdateMask());
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (operationCase_ == 1) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(1, (com.google.ads.googleads.v9.resources.AdGroupBidModifier) operation_);
}
if (operationCase_ == 2) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(2, (com.google.ads.googleads.v9.resources.AdGroupBidModifier) operation_);
}
if (operationCase_ == 3) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, operation_);
}
if (updateMask_ != null) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(4, getUpdateMask());
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.ads.googleads.v9.services.AdGroupBidModifierOperation)) {
return super.equals(obj);
}
com.google.ads.googleads.v9.services.AdGroupBidModifierOperation other = (com.google.ads.googleads.v9.services.AdGroupBidModifierOperation) obj;
if (hasUpdateMask() != other.hasUpdateMask()) return false;
if (hasUpdateMask()) {
if (!getUpdateMask()
.equals(other.getUpdateMask())) return false;
}
if (!getOperationCase().equals(other.getOperationCase())) return false;
switch (operationCase_) {
case 1:
if (!getCreate()
.equals(other.getCreate())) return false;
break;
case 2:
if (!getUpdate()
.equals(other.getUpdate())) return false;
break;
case 3:
if (!getRemove()
.equals(other.getRemove())) return false;
break;
case 0:
default:
}
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasUpdateMask()) {
hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER;
hash = (53 * hash) + getUpdateMask().hashCode();
}
switch (operationCase_) {
case 1:
hash = (37 * hash) + CREATE_FIELD_NUMBER;
hash = (53 * hash) + getCreate().hashCode();
break;
case 2:
hash = (37 * hash) + UPDATE_FIELD_NUMBER;
hash = (53 * hash) + getUpdate().hashCode();
break;
case 3:
hash = (37 * hash) + REMOVE_FIELD_NUMBER;
hash = (53 * hash) + getRemove().hashCode();
break;
case 0:
default:
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.ads.googleads.v9.services.AdGroupBidModifierOperation parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v9.services.AdGroupBidModifierOperation parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v9.services.AdGroupBidModifierOperation parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v9.services.AdGroupBidModifierOperation parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v9.services.AdGroupBidModifierOperation parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v9.services.AdGroupBidModifierOperation parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v9.services.AdGroupBidModifierOperation parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v9.services.AdGroupBidModifierOperation parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v9.services.AdGroupBidModifierOperation parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v9.services.AdGroupBidModifierOperation parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v9.services.AdGroupBidModifierOperation parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v9.services.AdGroupBidModifierOperation parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.ads.googleads.v9.services.AdGroupBidModifierOperation prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* A single operation (create, remove, update) on an ad group bid modifier.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v9.services.AdGroupBidModifierOperation}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.ads.googleads.v9.services.AdGroupBidModifierOperation)
com.google.ads.googleads.v9.services.AdGroupBidModifierOperationOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v9.services.AdGroupBidModifierServiceProto.internal_static_google_ads_googleads_v9_services_AdGroupBidModifierOperation_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v9.services.AdGroupBidModifierServiceProto.internal_static_google_ads_googleads_v9_services_AdGroupBidModifierOperation_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v9.services.AdGroupBidModifierOperation.class, com.google.ads.googleads.v9.services.AdGroupBidModifierOperation.Builder.class);
}
// Construct using com.google.ads.googleads.v9.services.AdGroupBidModifierOperation.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
@java.lang.Override
public Builder clear() {
super.clear();
if (updateMaskBuilder_ == null) {
updateMask_ = null;
} else {
updateMask_ = null;
updateMaskBuilder_ = null;
}
operationCase_ = 0;
operation_ = null;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.ads.googleads.v9.services.AdGroupBidModifierServiceProto.internal_static_google_ads_googleads_v9_services_AdGroupBidModifierOperation_descriptor;
}
@java.lang.Override
public com.google.ads.googleads.v9.services.AdGroupBidModifierOperation getDefaultInstanceForType() {
return com.google.ads.googleads.v9.services.AdGroupBidModifierOperation.getDefaultInstance();
}
@java.lang.Override
public com.google.ads.googleads.v9.services.AdGroupBidModifierOperation build() {
com.google.ads.googleads.v9.services.AdGroupBidModifierOperation result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.ads.googleads.v9.services.AdGroupBidModifierOperation buildPartial() {
com.google.ads.googleads.v9.services.AdGroupBidModifierOperation result = new com.google.ads.googleads.v9.services.AdGroupBidModifierOperation(this);
if (updateMaskBuilder_ == null) {
result.updateMask_ = updateMask_;
} else {
result.updateMask_ = updateMaskBuilder_.build();
}
if (operationCase_ == 1) {
if (createBuilder_ == null) {
result.operation_ = operation_;
} else {
result.operation_ = createBuilder_.build();
}
}
if (operationCase_ == 2) {
if (updateBuilder_ == null) {
result.operation_ = operation_;
} else {
result.operation_ = updateBuilder_.build();
}
}
if (operationCase_ == 3) {
result.operation_ = operation_;
}
result.operationCase_ = operationCase_;
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.ads.googleads.v9.services.AdGroupBidModifierOperation) {
return mergeFrom((com.google.ads.googleads.v9.services.AdGroupBidModifierOperation)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.ads.googleads.v9.services.AdGroupBidModifierOperation other) {
if (other == com.google.ads.googleads.v9.services.AdGroupBidModifierOperation.getDefaultInstance()) return this;
if (other.hasUpdateMask()) {
mergeUpdateMask(other.getUpdateMask());
}
switch (other.getOperationCase()) {
case CREATE: {
mergeCreate(other.getCreate());
break;
}
case UPDATE: {
mergeUpdate(other.getUpdate());
break;
}
case REMOVE: {
operationCase_ = 3;
operation_ = other.operation_;
onChanged();
break;
}
case OPERATION_NOT_SET: {
break;
}
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.ads.googleads.v9.services.AdGroupBidModifierOperation parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (com.google.ads.googleads.v9.services.AdGroupBidModifierOperation) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int operationCase_ = 0;
private java.lang.Object operation_;
public OperationCase
getOperationCase() {
return OperationCase.forNumber(
operationCase_);
}
public Builder clearOperation() {
operationCase_ = 0;
operation_ = null;
onChanged();
return this;
}
private com.google.protobuf.FieldMask updateMask_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> updateMaskBuilder_;
/**
* <pre>
* FieldMask that determines which resource fields are modified in an update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 4;</code>
* @return Whether the updateMask field is set.
*/
public boolean hasUpdateMask() {
return updateMaskBuilder_ != null || updateMask_ != null;
}
/**
* <pre>
* FieldMask that determines which resource fields are modified in an update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 4;</code>
* @return The updateMask.
*/
public com.google.protobuf.FieldMask getUpdateMask() {
if (updateMaskBuilder_ == null) {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
} else {
return updateMaskBuilder_.getMessage();
}
}
/**
* <pre>
* FieldMask that determines which resource fields are modified in an update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 4;</code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
updateMask_ = value;
onChanged();
} else {
updateMaskBuilder_.setMessage(value);
}
return this;
}
/**
* <pre>
* FieldMask that determines which resource fields are modified in an update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 4;</code>
*/
public Builder setUpdateMask(
com.google.protobuf.FieldMask.Builder builderForValue) {
if (updateMaskBuilder_ == null) {
updateMask_ = builderForValue.build();
onChanged();
} else {
updateMaskBuilder_.setMessage(builderForValue.build());
}
return this;
}
/**
* <pre>
* FieldMask that determines which resource fields are modified in an update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 4;</code>
*/
public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (updateMask_ != null) {
updateMask_ =
com.google.protobuf.FieldMask.newBuilder(updateMask_).mergeFrom(value).buildPartial();
} else {
updateMask_ = value;
}
onChanged();
} else {
updateMaskBuilder_.mergeFrom(value);
}
return this;
}
/**
* <pre>
* FieldMask that determines which resource fields are modified in an update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 4;</code>
*/
public Builder clearUpdateMask() {
if (updateMaskBuilder_ == null) {
updateMask_ = null;
onChanged();
} else {
updateMask_ = null;
updateMaskBuilder_ = null;
}
return this;
}
/**
* <pre>
* FieldMask that determines which resource fields are modified in an update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 4;</code>
*/
public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() {
onChanged();
return getUpdateMaskFieldBuilder().getBuilder();
}
/**
* <pre>
* FieldMask that determines which resource fields are modified in an update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 4;</code>
*/
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
if (updateMaskBuilder_ != null) {
return updateMaskBuilder_.getMessageOrBuilder();
} else {
return updateMask_ == null ?
com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
}
/**
* <pre>
* FieldMask that determines which resource fields are modified in an update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 4;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder>
getUpdateMaskFieldBuilder() {
if (updateMaskBuilder_ == null) {
updateMaskBuilder_ = new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder>(
getUpdateMask(),
getParentForChildren(),
isClean());
updateMask_ = null;
}
return updateMaskBuilder_;
}
private com.google.protobuf.SingleFieldBuilderV3<
com.google.ads.googleads.v9.resources.AdGroupBidModifier, com.google.ads.googleads.v9.resources.AdGroupBidModifier.Builder, com.google.ads.googleads.v9.resources.AdGroupBidModifierOrBuilder> createBuilder_;
/**
* <pre>
* Create operation: No resource name is expected for the new ad group bid
* modifier.
* </pre>
*
* <code>.google.ads.googleads.v9.resources.AdGroupBidModifier create = 1;</code>
* @return Whether the create field is set.
*/
@java.lang.Override
public boolean hasCreate() {
return operationCase_ == 1;
}
/**
* <pre>
* Create operation: No resource name is expected for the new ad group bid
* modifier.
* </pre>
*
* <code>.google.ads.googleads.v9.resources.AdGroupBidModifier create = 1;</code>
* @return The create.
*/
@java.lang.Override
public com.google.ads.googleads.v9.resources.AdGroupBidModifier getCreate() {
if (createBuilder_ == null) {
if (operationCase_ == 1) {
return (com.google.ads.googleads.v9.resources.AdGroupBidModifier) operation_;
}
return com.google.ads.googleads.v9.resources.AdGroupBidModifier.getDefaultInstance();
} else {
if (operationCase_ == 1) {
return createBuilder_.getMessage();
}
return com.google.ads.googleads.v9.resources.AdGroupBidModifier.getDefaultInstance();
}
}
/**
* <pre>
* Create operation: No resource name is expected for the new ad group bid
* modifier.
* </pre>
*
* <code>.google.ads.googleads.v9.resources.AdGroupBidModifier create = 1;</code>
*/
public Builder setCreate(com.google.ads.googleads.v9.resources.AdGroupBidModifier value) {
if (createBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
operation_ = value;
onChanged();
} else {
createBuilder_.setMessage(value);
}
operationCase_ = 1;
return this;
}
/**
* <pre>
* Create operation: No resource name is expected for the new ad group bid
* modifier.
* </pre>
*
* <code>.google.ads.googleads.v9.resources.AdGroupBidModifier create = 1;</code>
*/
public Builder setCreate(
com.google.ads.googleads.v9.resources.AdGroupBidModifier.Builder builderForValue) {
if (createBuilder_ == null) {
operation_ = builderForValue.build();
onChanged();
} else {
createBuilder_.setMessage(builderForValue.build());
}
operationCase_ = 1;
return this;
}
/**
* <pre>
* Create operation: No resource name is expected for the new ad group bid
* modifier.
* </pre>
*
* <code>.google.ads.googleads.v9.resources.AdGroupBidModifier create = 1;</code>
*/
public Builder mergeCreate(com.google.ads.googleads.v9.resources.AdGroupBidModifier value) {
if (createBuilder_ == null) {
if (operationCase_ == 1 &&
operation_ != com.google.ads.googleads.v9.resources.AdGroupBidModifier.getDefaultInstance()) {
operation_ = com.google.ads.googleads.v9.resources.AdGroupBidModifier.newBuilder((com.google.ads.googleads.v9.resources.AdGroupBidModifier) operation_)
.mergeFrom(value).buildPartial();
} else {
operation_ = value;
}
onChanged();
} else {
if (operationCase_ == 1) {
createBuilder_.mergeFrom(value);
}
createBuilder_.setMessage(value);
}
operationCase_ = 1;
return this;
}
/**
* <pre>
* Create operation: No resource name is expected for the new ad group bid
* modifier.
* </pre>
*
* <code>.google.ads.googleads.v9.resources.AdGroupBidModifier create = 1;</code>
*/
public Builder clearCreate() {
if (createBuilder_ == null) {
if (operationCase_ == 1) {
operationCase_ = 0;
operation_ = null;
onChanged();
}
} else {
if (operationCase_ == 1) {
operationCase_ = 0;
operation_ = null;
}
createBuilder_.clear();
}
return this;
}
/**
* <pre>
* Create operation: No resource name is expected for the new ad group bid
* modifier.
* </pre>
*
* <code>.google.ads.googleads.v9.resources.AdGroupBidModifier create = 1;</code>
*/
public com.google.ads.googleads.v9.resources.AdGroupBidModifier.Builder getCreateBuilder() {
return getCreateFieldBuilder().getBuilder();
}
/**
* <pre>
* Create operation: No resource name is expected for the new ad group bid
* modifier.
* </pre>
*
* <code>.google.ads.googleads.v9.resources.AdGroupBidModifier create = 1;</code>
*/
@java.lang.Override
public com.google.ads.googleads.v9.resources.AdGroupBidModifierOrBuilder getCreateOrBuilder() {
if ((operationCase_ == 1) && (createBuilder_ != null)) {
return createBuilder_.getMessageOrBuilder();
} else {
if (operationCase_ == 1) {
return (com.google.ads.googleads.v9.resources.AdGroupBidModifier) operation_;
}
return com.google.ads.googleads.v9.resources.AdGroupBidModifier.getDefaultInstance();
}
}
/**
* <pre>
* Create operation: No resource name is expected for the new ad group bid
* modifier.
* </pre>
*
* <code>.google.ads.googleads.v9.resources.AdGroupBidModifier create = 1;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.ads.googleads.v9.resources.AdGroupBidModifier, com.google.ads.googleads.v9.resources.AdGroupBidModifier.Builder, com.google.ads.googleads.v9.resources.AdGroupBidModifierOrBuilder>
getCreateFieldBuilder() {
if (createBuilder_ == null) {
if (!(operationCase_ == 1)) {
operation_ = com.google.ads.googleads.v9.resources.AdGroupBidModifier.getDefaultInstance();
}
createBuilder_ = new com.google.protobuf.SingleFieldBuilderV3<
com.google.ads.googleads.v9.resources.AdGroupBidModifier, com.google.ads.googleads.v9.resources.AdGroupBidModifier.Builder, com.google.ads.googleads.v9.resources.AdGroupBidModifierOrBuilder>(
(com.google.ads.googleads.v9.resources.AdGroupBidModifier) operation_,
getParentForChildren(),
isClean());
operation_ = null;
}
operationCase_ = 1;
onChanged();;
return createBuilder_;
}
private com.google.protobuf.SingleFieldBuilderV3<
com.google.ads.googleads.v9.resources.AdGroupBidModifier, com.google.ads.googleads.v9.resources.AdGroupBidModifier.Builder, com.google.ads.googleads.v9.resources.AdGroupBidModifierOrBuilder> updateBuilder_;
/**
* <pre>
* Update operation: The ad group bid modifier is expected to have a valid
* resource name.
* </pre>
*
* <code>.google.ads.googleads.v9.resources.AdGroupBidModifier update = 2;</code>
* @return Whether the update field is set.
*/
@java.lang.Override
public boolean hasUpdate() {
return operationCase_ == 2;
}
/**
* <pre>
* Update operation: The ad group bid modifier is expected to have a valid
* resource name.
* </pre>
*
* <code>.google.ads.googleads.v9.resources.AdGroupBidModifier update = 2;</code>
* @return The update.
*/
@java.lang.Override
public com.google.ads.googleads.v9.resources.AdGroupBidModifier getUpdate() {
if (updateBuilder_ == null) {
if (operationCase_ == 2) {
return (com.google.ads.googleads.v9.resources.AdGroupBidModifier) operation_;
}
return com.google.ads.googleads.v9.resources.AdGroupBidModifier.getDefaultInstance();
} else {
if (operationCase_ == 2) {
return updateBuilder_.getMessage();
}
return com.google.ads.googleads.v9.resources.AdGroupBidModifier.getDefaultInstance();
}
}
/**
* <pre>
* Update operation: The ad group bid modifier is expected to have a valid
* resource name.
* </pre>
*
* <code>.google.ads.googleads.v9.resources.AdGroupBidModifier update = 2;</code>
*/
public Builder setUpdate(com.google.ads.googleads.v9.resources.AdGroupBidModifier value) {
if (updateBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
operation_ = value;
onChanged();
} else {
updateBuilder_.setMessage(value);
}
operationCase_ = 2;
return this;
}
/**
* <pre>
* Update operation: The ad group bid modifier is expected to have a valid
* resource name.
* </pre>
*
* <code>.google.ads.googleads.v9.resources.AdGroupBidModifier update = 2;</code>
*/
public Builder setUpdate(
com.google.ads.googleads.v9.resources.AdGroupBidModifier.Builder builderForValue) {
if (updateBuilder_ == null) {
operation_ = builderForValue.build();
onChanged();
} else {
updateBuilder_.setMessage(builderForValue.build());
}
operationCase_ = 2;
return this;
}
/**
* <pre>
* Update operation: The ad group bid modifier is expected to have a valid
* resource name.
* </pre>
*
* <code>.google.ads.googleads.v9.resources.AdGroupBidModifier update = 2;</code>
*/
public Builder mergeUpdate(com.google.ads.googleads.v9.resources.AdGroupBidModifier value) {
if (updateBuilder_ == null) {
if (operationCase_ == 2 &&
operation_ != com.google.ads.googleads.v9.resources.AdGroupBidModifier.getDefaultInstance()) {
operation_ = com.google.ads.googleads.v9.resources.AdGroupBidModifier.newBuilder((com.google.ads.googleads.v9.resources.AdGroupBidModifier) operation_)
.mergeFrom(value).buildPartial();
} else {
operation_ = value;
}
onChanged();
} else {
if (operationCase_ == 2) {
updateBuilder_.mergeFrom(value);
}
updateBuilder_.setMessage(value);
}
operationCase_ = 2;
return this;
}
/**
* <pre>
* Update operation: The ad group bid modifier is expected to have a valid
* resource name.
* </pre>
*
* <code>.google.ads.googleads.v9.resources.AdGroupBidModifier update = 2;</code>
*/
public Builder clearUpdate() {
if (updateBuilder_ == null) {
if (operationCase_ == 2) {
operationCase_ = 0;
operation_ = null;
onChanged();
}
} else {
if (operationCase_ == 2) {
operationCase_ = 0;
operation_ = null;
}
updateBuilder_.clear();
}
return this;
}
/**
* <pre>
* Update operation: The ad group bid modifier is expected to have a valid
* resource name.
* </pre>
*
* <code>.google.ads.googleads.v9.resources.AdGroupBidModifier update = 2;</code>
*/
public com.google.ads.googleads.v9.resources.AdGroupBidModifier.Builder getUpdateBuilder() {
return getUpdateFieldBuilder().getBuilder();
}
/**
* <pre>
* Update operation: The ad group bid modifier is expected to have a valid
* resource name.
* </pre>
*
* <code>.google.ads.googleads.v9.resources.AdGroupBidModifier update = 2;</code>
*/
@java.lang.Override
public com.google.ads.googleads.v9.resources.AdGroupBidModifierOrBuilder getUpdateOrBuilder() {
if ((operationCase_ == 2) && (updateBuilder_ != null)) {
return updateBuilder_.getMessageOrBuilder();
} else {
if (operationCase_ == 2) {
return (com.google.ads.googleads.v9.resources.AdGroupBidModifier) operation_;
}
return com.google.ads.googleads.v9.resources.AdGroupBidModifier.getDefaultInstance();
}
}
/**
* <pre>
* Update operation: The ad group bid modifier is expected to have a valid
* resource name.
* </pre>
*
* <code>.google.ads.googleads.v9.resources.AdGroupBidModifier update = 2;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.ads.googleads.v9.resources.AdGroupBidModifier, com.google.ads.googleads.v9.resources.AdGroupBidModifier.Builder, com.google.ads.googleads.v9.resources.AdGroupBidModifierOrBuilder>
getUpdateFieldBuilder() {
if (updateBuilder_ == null) {
if (!(operationCase_ == 2)) {
operation_ = com.google.ads.googleads.v9.resources.AdGroupBidModifier.getDefaultInstance();
}
updateBuilder_ = new com.google.protobuf.SingleFieldBuilderV3<
com.google.ads.googleads.v9.resources.AdGroupBidModifier, com.google.ads.googleads.v9.resources.AdGroupBidModifier.Builder, com.google.ads.googleads.v9.resources.AdGroupBidModifierOrBuilder>(
(com.google.ads.googleads.v9.resources.AdGroupBidModifier) operation_,
getParentForChildren(),
isClean());
operation_ = null;
}
operationCase_ = 2;
onChanged();;
return updateBuilder_;
}
/**
* <pre>
* Remove operation: A resource name for the removed ad group bid modifier
* is expected, in this format:
* `customers/{customer_id}/adGroupBidModifiers/{ad_group_id}~{criterion_id}`
* </pre>
*
* <code>string remove = 3;</code>
* @return Whether the remove field is set.
*/
@java.lang.Override
public boolean hasRemove() {
return operationCase_ == 3;
}
/**
* <pre>
* Remove operation: A resource name for the removed ad group bid modifier
* is expected, in this format:
* `customers/{customer_id}/adGroupBidModifiers/{ad_group_id}~{criterion_id}`
* </pre>
*
* <code>string remove = 3;</code>
* @return The remove.
*/
@java.lang.Override
public java.lang.String getRemove() {
java.lang.Object ref = "";
if (operationCase_ == 3) {
ref = operation_;
}
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (operationCase_ == 3) {
operation_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <pre>
* Remove operation: A resource name for the removed ad group bid modifier
* is expected, in this format:
* `customers/{customer_id}/adGroupBidModifiers/{ad_group_id}~{criterion_id}`
* </pre>
*
* <code>string remove = 3;</code>
* @return The bytes for remove.
*/
@java.lang.Override
public com.google.protobuf.ByteString
getRemoveBytes() {
java.lang.Object ref = "";
if (operationCase_ == 3) {
ref = operation_;
}
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
if (operationCase_ == 3) {
operation_ = b;
}
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <pre>
* Remove operation: A resource name for the removed ad group bid modifier
* is expected, in this format:
* `customers/{customer_id}/adGroupBidModifiers/{ad_group_id}~{criterion_id}`
* </pre>
*
* <code>string remove = 3;</code>
* @param value The remove to set.
* @return This builder for chaining.
*/
public Builder setRemove(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
operationCase_ = 3;
operation_ = value;
onChanged();
return this;
}
/**
* <pre>
* Remove operation: A resource name for the removed ad group bid modifier
* is expected, in this format:
* `customers/{customer_id}/adGroupBidModifiers/{ad_group_id}~{criterion_id}`
* </pre>
*
* <code>string remove = 3;</code>
* @return This builder for chaining.
*/
public Builder clearRemove() {
if (operationCase_ == 3) {
operationCase_ = 0;
operation_ = null;
onChanged();
}
return this;
}
/**
* <pre>
* Remove operation: A resource name for the removed ad group bid modifier
* is expected, in this format:
* `customers/{customer_id}/adGroupBidModifiers/{ad_group_id}~{criterion_id}`
* </pre>
*
* <code>string remove = 3;</code>
* @param value The bytes for remove to set.
* @return This builder for chaining.
*/
public Builder setRemoveBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
operationCase_ = 3;
operation_ = value;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.ads.googleads.v9.services.AdGroupBidModifierOperation)
}
// @@protoc_insertion_point(class_scope:google.ads.googleads.v9.services.AdGroupBidModifierOperation)
private static final com.google.ads.googleads.v9.services.AdGroupBidModifierOperation DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.ads.googleads.v9.services.AdGroupBidModifierOperation();
}
public static com.google.ads.googleads.v9.services.AdGroupBidModifierOperation getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<AdGroupBidModifierOperation>
PARSER = new com.google.protobuf.AbstractParser<AdGroupBidModifierOperation>() {
@java.lang.Override
public AdGroupBidModifierOperation parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new AdGroupBidModifierOperation(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<AdGroupBidModifierOperation> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<AdGroupBidModifierOperation> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.ads.googleads.v9.services.AdGroupBidModifierOperation getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
| |
// Copyright 2018 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
/**
* ProductScope.java
*
* This file was auto-generated from WSDL
* by the Apache Axis 1.4 Mar 02, 2009 (07:08:06 PST) WSDL2Java emitter.
*/
package com.google.api.ads.adwords.axis.v201809.cm;
/**
* Scope of products. Contains a set of product dimensions, all of
* which a product has to match to
* be included in the campaign. These product dimensions
* must have a value; the "everything else"
* case without a value is not allowed.
*
* <p>If there is no {@code ProductScope}, all products are
* included in the campaign. If a campaign
* has more than one {@code ProductScope}, products are included
* as long as they match any.
* Campaigns of {@link AdvertisingChannelType#SHOPPING} can
* have at most one {@code ProductScope}.
* <span class="constraint AdxEnabled">This is disabled for
* AdX when it is contained within Operators: ADD, SET.</span>
*/
public class ProductScope extends com.google.api.ads.adwords.axis.v201809.cm.Criterion implements java.io.Serializable {
/* <span class="constraint NotEmptyForOperators">This field must
* contain at least one element when it is contained within {@link Operator}s:
* ADD.</span>
* <span class="constraint Required">This field is
* required and should not be {@code null} when it is contained within
* {@link Operator}s : ADD.</span> */
private com.google.api.ads.adwords.axis.v201809.cm.ProductDimension[] dimensions;
public ProductScope() {
}
public ProductScope(
java.lang.Long id,
com.google.api.ads.adwords.axis.v201809.cm.CriterionType type,
java.lang.String criterionType,
com.google.api.ads.adwords.axis.v201809.cm.ProductDimension[] dimensions) {
super(
id,
type,
criterionType);
this.dimensions = dimensions;
}
@Override
public String toString() {
return com.google.common.base.MoreObjects.toStringHelper(this.getClass())
.omitNullValues()
.add("criterionType", getCriterionType())
.add("dimensions", getDimensions())
.add("id", getId())
.add("type", getType())
.toString();
}
/**
* Gets the dimensions value for this ProductScope.
*
* @return dimensions * <span class="constraint NotEmptyForOperators">This field must
* contain at least one element when it is contained within {@link Operator}s:
* ADD.</span>
* <span class="constraint Required">This field is
* required and should not be {@code null} when it is contained within
* {@link Operator}s : ADD.</span>
*/
public com.google.api.ads.adwords.axis.v201809.cm.ProductDimension[] getDimensions() {
return dimensions;
}
/**
* Sets the dimensions value for this ProductScope.
*
* @param dimensions * <span class="constraint NotEmptyForOperators">This field must
* contain at least one element when it is contained within {@link Operator}s:
* ADD.</span>
* <span class="constraint Required">This field is
* required and should not be {@code null} when it is contained within
* {@link Operator}s : ADD.</span>
*/
public void setDimensions(com.google.api.ads.adwords.axis.v201809.cm.ProductDimension[] dimensions) {
this.dimensions = dimensions;
}
public com.google.api.ads.adwords.axis.v201809.cm.ProductDimension getDimensions(int i) {
return this.dimensions[i];
}
public void setDimensions(int i, com.google.api.ads.adwords.axis.v201809.cm.ProductDimension _value) {
this.dimensions[i] = _value;
}
private java.lang.Object __equalsCalc = null;
public synchronized boolean equals(java.lang.Object obj) {
if (!(obj instanceof ProductScope)) return false;
ProductScope other = (ProductScope) obj;
if (obj == null) return false;
if (this == obj) return true;
if (__equalsCalc != null) {
return (__equalsCalc == obj);
}
__equalsCalc = obj;
boolean _equals;
_equals = super.equals(obj) &&
((this.dimensions==null && other.getDimensions()==null) ||
(this.dimensions!=null &&
java.util.Arrays.equals(this.dimensions, other.getDimensions())));
__equalsCalc = null;
return _equals;
}
private boolean __hashCodeCalc = false;
public synchronized int hashCode() {
if (__hashCodeCalc) {
return 0;
}
__hashCodeCalc = true;
int _hashCode = super.hashCode();
if (getDimensions() != null) {
for (int i=0;
i<java.lang.reflect.Array.getLength(getDimensions());
i++) {
java.lang.Object obj = java.lang.reflect.Array.get(getDimensions(), i);
if (obj != null &&
!obj.getClass().isArray()) {
_hashCode += obj.hashCode();
}
}
}
__hashCodeCalc = false;
return _hashCode;
}
// Type metadata
private static org.apache.axis.description.TypeDesc typeDesc =
new org.apache.axis.description.TypeDesc(ProductScope.class, true);
static {
typeDesc.setXmlType(new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "ProductScope"));
org.apache.axis.description.ElementDesc elemField = new org.apache.axis.description.ElementDesc();
elemField.setFieldName("dimensions");
elemField.setXmlName(new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "dimensions"));
elemField.setXmlType(new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "ProductDimension"));
elemField.setMinOccurs(0);
elemField.setNillable(false);
elemField.setMaxOccursUnbounded(true);
typeDesc.addFieldDesc(elemField);
}
/**
* Return type metadata object
*/
public static org.apache.axis.description.TypeDesc getTypeDesc() {
return typeDesc;
}
/**
* Get Custom Serializer
*/
public static org.apache.axis.encoding.Serializer getSerializer(
java.lang.String mechType,
java.lang.Class _javaType,
javax.xml.namespace.QName _xmlType) {
return
new org.apache.axis.encoding.ser.BeanSerializer(
_javaType, _xmlType, typeDesc);
}
/**
* Get Custom Deserializer
*/
public static org.apache.axis.encoding.Deserializer getDeserializer(
java.lang.String mechType,
java.lang.Class _javaType,
javax.xml.namespace.QName _xmlType) {
return
new org.apache.axis.encoding.ser.BeanDeserializer(
_javaType, _xmlType, typeDesc);
}
}
| |
//
// This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, v2.2.8-b130911.1802
// See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a>
// Any modifications to this file will be lost upon recompilation of the source schema.
// Generated on: 2017.11.30 at 08:24:17 PM JST
//
package uk.org.siri.siri;
import java.math.BigInteger;
import java.util.ArrayList;
import java.util.List;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlSchemaType;
import javax.xml.bind.annotation.XmlType;
/**
* Type for Cancellation of an earlier Stop Visit.
*
* <p>Java class for MonitoredStopVisitCancellationStructure complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType name="MonitoredStopVisitCancellationStructure">
* <complexContent>
* <extension base="{http://www.siri.org.uk/siri}AbstractReferencingItemStructure">
* <sequence>
* <group ref="{http://www.siri.org.uk/siri}StopVisitCancellationIdentityGroup" minOccurs="0"/>
* <element name="ClearDownRef" type="{http://www.siri.org.uk/siri}ClearDownRefStructure" minOccurs="0"/>
* <group ref="{http://www.siri.org.uk/siri}JourneyPatternInfoGroup"/>
* <element name="Reason" type="{http://www.siri.org.uk/siri}NaturalLanguageStringStructure" maxOccurs="unbounded" minOccurs="0"/>
* <element ref="{http://www.siri.org.uk/siri}Extensions" minOccurs="0"/>
* </sequence>
* </extension>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "MonitoredStopVisitCancellationStructure", propOrder = {
"monitoringRef",
"visitNumber",
"lineRef",
"directionRef",
"vehicleJourneyRef",
"clearDownRef",
"journeyPatternRef",
"journeyPatternName",
"vehicleMode",
"routeRef",
"publishedLineName",
"groupOfLinesRef",
"directionName",
"externalLineRef",
"reason",
"extensions"
})
public class MonitoredStopVisitCancellationStructure
extends AbstractReferencingItemStructure
{
@XmlElement(name = "MonitoringRef")
protected MonitoringRefStructure monitoringRef;
@XmlElement(name = "VisitNumber")
@XmlSchemaType(name = "positiveInteger")
protected BigInteger visitNumber;
@XmlElement(name = "LineRef")
protected LineRefStructure lineRef;
@XmlElement(name = "DirectionRef")
protected DirectionRefStructure directionRef;
@XmlElement(name = "VehicleJourneyRef")
protected FramedVehicleJourneyRefStructure vehicleJourneyRef;
@XmlElement(name = "ClearDownRef")
protected ClearDownRefStructure clearDownRef;
@XmlElement(name = "JourneyPatternRef")
protected JourneyPatternRefStructure journeyPatternRef;
@XmlElement(name = "JourneyPatternName")
protected NaturalLanguageStringStructure journeyPatternName;
@XmlElement(name = "VehicleMode")
@XmlSchemaType(name = "NMTOKEN")
protected List<VehicleModesEnumeration> vehicleMode;
@XmlElement(name = "RouteRef")
protected RouteRefStructure routeRef;
@XmlElement(name = "PublishedLineName")
protected List<NaturalLanguageStringStructure> publishedLineName;
@XmlElement(name = "GroupOfLinesRef")
protected GroupOfLinesRefStructure groupOfLinesRef;
@XmlElement(name = "DirectionName")
protected List<NaturalLanguageStringStructure> directionName;
@XmlElement(name = "ExternalLineRef")
protected LineRefStructure externalLineRef;
@XmlElement(name = "Reason")
protected List<NaturalLanguageStringStructure> reason;
@XmlElement(name = "Extensions")
protected ExtensionsStructure extensions;
/**
* Gets the value of the monitoringRef property.
*
* @return
* possible object is
* {@link MonitoringRefStructure }
*
*/
public MonitoringRefStructure getMonitoringRef() {
return monitoringRef;
}
/**
* Sets the value of the monitoringRef property.
*
* @param value
* allowed object is
* {@link MonitoringRefStructure }
*
*/
public void setMonitoringRef(MonitoringRefStructure value) {
this.monitoringRef = value;
}
/**
* Gets the value of the visitNumber property.
*
* @return
* possible object is
* {@link BigInteger }
*
*/
public BigInteger getVisitNumber() {
return visitNumber;
}
/**
* Sets the value of the visitNumber property.
*
* @param value
* allowed object is
* {@link BigInteger }
*
*/
public void setVisitNumber(BigInteger value) {
this.visitNumber = value;
}
/**
* Gets the value of the lineRef property.
*
* @return
* possible object is
* {@link LineRefStructure }
*
*/
public LineRefStructure getLineRef() {
return lineRef;
}
/**
* Sets the value of the lineRef property.
*
* @param value
* allowed object is
* {@link LineRefStructure }
*
*/
public void setLineRef(LineRefStructure value) {
this.lineRef = value;
}
/**
* Gets the value of the directionRef property.
*
* @return
* possible object is
* {@link DirectionRefStructure }
*
*/
public DirectionRefStructure getDirectionRef() {
return directionRef;
}
/**
* Sets the value of the directionRef property.
*
* @param value
* allowed object is
* {@link DirectionRefStructure }
*
*/
public void setDirectionRef(DirectionRefStructure value) {
this.directionRef = value;
}
/**
* Gets the value of the vehicleJourneyRef property.
*
* @return
* possible object is
* {@link FramedVehicleJourneyRefStructure }
*
*/
public FramedVehicleJourneyRefStructure getVehicleJourneyRef() {
return vehicleJourneyRef;
}
/**
* Sets the value of the vehicleJourneyRef property.
*
* @param value
* allowed object is
* {@link FramedVehicleJourneyRefStructure }
*
*/
public void setVehicleJourneyRef(FramedVehicleJourneyRefStructure value) {
this.vehicleJourneyRef = value;
}
/**
* Gets the value of the clearDownRef property.
*
* @return
* possible object is
* {@link ClearDownRefStructure }
*
*/
public ClearDownRefStructure getClearDownRef() {
return clearDownRef;
}
/**
* Sets the value of the clearDownRef property.
*
* @param value
* allowed object is
* {@link ClearDownRefStructure }
*
*/
public void setClearDownRef(ClearDownRefStructure value) {
this.clearDownRef = value;
}
/**
* Gets the value of the journeyPatternRef property.
*
* @return
* possible object is
* {@link JourneyPatternRefStructure }
*
*/
public JourneyPatternRefStructure getJourneyPatternRef() {
return journeyPatternRef;
}
/**
* Sets the value of the journeyPatternRef property.
*
* @param value
* allowed object is
* {@link JourneyPatternRefStructure }
*
*/
public void setJourneyPatternRef(JourneyPatternRefStructure value) {
this.journeyPatternRef = value;
}
/**
* Gets the value of the journeyPatternName property.
*
* @return
* possible object is
* {@link NaturalLanguageStringStructure }
*
*/
public NaturalLanguageStringStructure getJourneyPatternName() {
return journeyPatternName;
}
/**
* Sets the value of the journeyPatternName property.
*
* @param value
* allowed object is
* {@link NaturalLanguageStringStructure }
*
*/
public void setJourneyPatternName(NaturalLanguageStringStructure value) {
this.journeyPatternName = value;
}
/**
* Gets the value of the vehicleMode property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the vehicleMode property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getVehicleMode().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link VehicleModesEnumeration }
*
*
*/
public List<VehicleModesEnumeration> getVehicleMode() {
if (vehicleMode == null) {
vehicleMode = new ArrayList<VehicleModesEnumeration>();
}
return this.vehicleMode;
}
/**
* Gets the value of the routeRef property.
*
* @return
* possible object is
* {@link RouteRefStructure }
*
*/
public RouteRefStructure getRouteRef() {
return routeRef;
}
/**
* Sets the value of the routeRef property.
*
* @param value
* allowed object is
* {@link RouteRefStructure }
*
*/
public void setRouteRef(RouteRefStructure value) {
this.routeRef = value;
}
/**
* Name or Number by which the LINE is known to the public. (Unbounded since SIRI 2.0) Gets the value of the publishedLineName property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the publishedLineName property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getPublishedLineName().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link NaturalLanguageStringStructure }
*
*
*/
public List<NaturalLanguageStringStructure> getPublishedLineName() {
if (publishedLineName == null) {
publishedLineName = new ArrayList<NaturalLanguageStringStructure>();
}
return this.publishedLineName;
}
/**
* Gets the value of the groupOfLinesRef property.
*
* @return
* possible object is
* {@link GroupOfLinesRefStructure }
*
*/
public GroupOfLinesRefStructure getGroupOfLinesRef() {
return groupOfLinesRef;
}
/**
* Sets the value of the groupOfLinesRef property.
*
* @param value
* allowed object is
* {@link GroupOfLinesRefStructure }
*
*/
public void setGroupOfLinesRef(GroupOfLinesRefStructure value) {
this.groupOfLinesRef = value;
}
/**
* Gets the value of the directionName property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the directionName property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getDirectionName().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link NaturalLanguageStringStructure }
*
*
*/
public List<NaturalLanguageStringStructure> getDirectionName() {
if (directionName == null) {
directionName = new ArrayList<NaturalLanguageStringStructure>();
}
return this.directionName;
}
/**
* Gets the value of the externalLineRef property.
*
* @return
* possible object is
* {@link LineRefStructure }
*
*/
public LineRefStructure getExternalLineRef() {
return externalLineRef;
}
/**
* Sets the value of the externalLineRef property.
*
* @param value
* allowed object is
* {@link LineRefStructure }
*
*/
public void setExternalLineRef(LineRefStructure value) {
this.externalLineRef = value;
}
/**
* Gets the value of the reason property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the reason property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getReason().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link NaturalLanguageStringStructure }
*
*
*/
public List<NaturalLanguageStringStructure> getReason() {
if (reason == null) {
reason = new ArrayList<NaturalLanguageStringStructure>();
}
return this.reason;
}
/**
* Gets the value of the extensions property.
*
* @return
* possible object is
* {@link ExtensionsStructure }
*
*/
public ExtensionsStructure getExtensions() {
return extensions;
}
/**
* Sets the value of the extensions property.
*
* @param value
* allowed object is
* {@link ExtensionsStructure }
*
*/
public void setExtensions(ExtensionsStructure value) {
this.extensions = value;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to you under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.calcite.prepare;
import org.apache.calcite.DataContext;
import org.apache.calcite.adapter.enumerable.EnumerableBindable;
import org.apache.calcite.adapter.enumerable.EnumerableConvention;
import org.apache.calcite.adapter.enumerable.EnumerableInterpretable;
import org.apache.calcite.adapter.enumerable.EnumerableInterpreterRule;
import org.apache.calcite.adapter.enumerable.EnumerableRel;
import org.apache.calcite.adapter.enumerable.EnumerableRules;
import org.apache.calcite.adapter.enumerable.RexToLixTranslator;
import org.apache.calcite.adapter.java.JavaTypeFactory;
import org.apache.calcite.avatica.AvaticaParameter;
import org.apache.calcite.avatica.ColumnMetaData;
import org.apache.calcite.avatica.Meta;
import org.apache.calcite.config.CalciteConnectionConfig;
import org.apache.calcite.interpreter.BindableConvention;
import org.apache.calcite.interpreter.Bindables;
import org.apache.calcite.interpreter.Interpreters;
import org.apache.calcite.jdbc.CalcitePrepare;
import org.apache.calcite.jdbc.CalciteSchema;
import org.apache.calcite.linq4j.Enumerable;
import org.apache.calcite.linq4j.Linq4j;
import org.apache.calcite.linq4j.Ord;
import org.apache.calcite.linq4j.Queryable;
import org.apache.calcite.linq4j.function.Function1;
import org.apache.calcite.linq4j.tree.BinaryExpression;
import org.apache.calcite.linq4j.tree.BlockStatement;
import org.apache.calcite.linq4j.tree.Blocks;
import org.apache.calcite.linq4j.tree.ConstantExpression;
import org.apache.calcite.linq4j.tree.Expression;
import org.apache.calcite.linq4j.tree.Expressions;
import org.apache.calcite.linq4j.tree.MemberExpression;
import org.apache.calcite.linq4j.tree.MethodCallExpression;
import org.apache.calcite.linq4j.tree.NewExpression;
import org.apache.calcite.linq4j.tree.ParameterExpression;
import org.apache.calcite.materialize.MaterializationService;
import org.apache.calcite.plan.Contexts;
import org.apache.calcite.plan.Convention;
import org.apache.calcite.plan.ConventionTraitDef;
import org.apache.calcite.plan.RelOptCluster;
import org.apache.calcite.plan.RelOptCostFactory;
import org.apache.calcite.plan.RelOptPlanner;
import org.apache.calcite.plan.RelOptRule;
import org.apache.calcite.plan.RelOptTable;
import org.apache.calcite.plan.RelOptUtil;
import org.apache.calcite.plan.hep.HepPlanner;
import org.apache.calcite.plan.hep.HepProgramBuilder;
import org.apache.calcite.plan.volcano.VolcanoPlanner;
import org.apache.calcite.rel.RelCollationTraitDef;
import org.apache.calcite.rel.RelNode;
import org.apache.calcite.rel.core.Filter;
import org.apache.calcite.rel.core.Project;
import org.apache.calcite.rel.core.TableScan;
import org.apache.calcite.rel.rules.AggregateExpandDistinctAggregatesRule;
import org.apache.calcite.rel.rules.AggregateReduceFunctionsRule;
import org.apache.calcite.rel.rules.AggregateStarTableRule;
import org.apache.calcite.rel.rules.FilterAggregateTransposeRule;
import org.apache.calcite.rel.rules.FilterJoinRule;
import org.apache.calcite.rel.rules.FilterProjectTransposeRule;
import org.apache.calcite.rel.rules.FilterTableScanRule;
import org.apache.calcite.rel.rules.JoinAssociateRule;
import org.apache.calcite.rel.rules.JoinCommuteRule;
import org.apache.calcite.rel.rules.JoinPushThroughJoinRule;
import org.apache.calcite.rel.rules.ProjectFilterTransposeRule;
import org.apache.calcite.rel.rules.ProjectMergeRule;
import org.apache.calcite.rel.rules.ProjectTableScanRule;
import org.apache.calcite.rel.rules.ReduceExpressionsRule;
import org.apache.calcite.rel.rules.SortProjectTransposeRule;
import org.apache.calcite.rel.rules.TableScanRule;
import org.apache.calcite.rel.rules.ValuesReduceRule;
import org.apache.calcite.rel.stream.StreamRules;
import org.apache.calcite.rel.type.RelDataType;
import org.apache.calcite.rel.type.RelDataTypeFactory;
import org.apache.calcite.rel.type.RelDataTypeFactoryImpl;
import org.apache.calcite.rel.type.RelDataTypeField;
import org.apache.calcite.rex.RexBuilder;
import org.apache.calcite.rex.RexInputRef;
import org.apache.calcite.rex.RexNode;
import org.apache.calcite.runtime.Bindable;
import org.apache.calcite.runtime.Hook;
import org.apache.calcite.runtime.Typed;
import org.apache.calcite.schema.Schemas;
import org.apache.calcite.schema.Table;
import org.apache.calcite.server.CalciteServerStatement;
import org.apache.calcite.sql.SqlBinaryOperator;
import org.apache.calcite.sql.SqlExplainLevel;
import org.apache.calcite.sql.SqlKind;
import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.SqlOperator;
import org.apache.calcite.sql.fun.SqlStdOperatorTable;
import org.apache.calcite.sql.parser.SqlParseException;
import org.apache.calcite.sql.parser.SqlParser;
import org.apache.calcite.sql.type.SqlTypeName;
import org.apache.calcite.sql.util.ChainedSqlOperatorTable;
import org.apache.calcite.sql.validate.SqlConformance;
import org.apache.calcite.sql.validate.SqlValidator;
import org.apache.calcite.sql.validate.SqlValidatorImpl;
import org.apache.calcite.sql2rel.SqlToRelConverter;
import org.apache.calcite.sql2rel.StandardConvertletTable;
import org.apache.calcite.tools.Frameworks;
import org.apache.calcite.util.ImmutableIntList;
import org.apache.calcite.util.Util;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Maps;
import java.lang.reflect.Type;
import java.math.BigDecimal;
import java.sql.DatabaseMetaData;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import static org.apache.calcite.util.Static.RESOURCE;
/**
* Shit just got real.
*
* <p>This class is public so that projects that create their own JDBC driver
* and server can fine-tune preferences. However, this class and its methods are
* subject to change without notice.</p>
*/
public class CalcitePrepareImpl implements CalcitePrepare {
public static final boolean DEBUG =
"true".equals(System.getProperties().getProperty("calcite.debug"));
public static final boolean COMMUTE =
"true".equals(
System.getProperties().getProperty("calcite.enable.join.commute"));
/** Whether to enable the collation trait. Some extra optimizations are
* possible if enabled, but queries should work either way. At some point
* this will become a preference, or we will run multiple phases: first
* disabled, then enabled. */
private static final boolean ENABLE_COLLATION_TRAIT = true;
/** Whether the bindable convention should be the root convention of any
* plan. If not, enumerable convention is the default. */
public static final boolean ENABLE_BINDABLE = false;
/** Whether the enumerable convention is enabled. */
public static final boolean ENABLE_ENUMERABLE = true;
/** Whether the streaming is enabled. */
public static final boolean ENABLE_STREAM = true;
private static final Set<String> SIMPLE_SQLS =
ImmutableSet.of(
"SELECT 1",
"select 1",
"SELECT 1 FROM DUAL",
"select 1 from dual",
"values 1",
"VALUES 1");
private static final List<RelOptRule> ENUMERABLE_RULES =
ImmutableList.of(
EnumerableRules.ENUMERABLE_JOIN_RULE,
EnumerableRules.ENUMERABLE_MERGE_JOIN_RULE,
EnumerableRules.ENUMERABLE_SEMI_JOIN_RULE,
EnumerableRules.ENUMERABLE_CORRELATE_RULE,
EnumerableRules.ENUMERABLE_PROJECT_RULE,
EnumerableRules.ENUMERABLE_FILTER_RULE,
EnumerableRules.ENUMERABLE_AGGREGATE_RULE,
EnumerableRules.ENUMERABLE_SORT_RULE,
EnumerableRules.ENUMERABLE_LIMIT_RULE,
EnumerableRules.ENUMERABLE_COLLECT_RULE,
EnumerableRules.ENUMERABLE_UNCOLLECT_RULE,
EnumerableRules.ENUMERABLE_UNION_RULE,
EnumerableRules.ENUMERABLE_INTERSECT_RULE,
EnumerableRules.ENUMERABLE_MINUS_RULE,
EnumerableRules.ENUMERABLE_TABLE_MODIFICATION_RULE,
EnumerableRules.ENUMERABLE_VALUES_RULE,
EnumerableRules.ENUMERABLE_WINDOW_RULE,
EnumerableRules.ENUMERABLE_TABLE_SCAN_RULE,
EnumerableRules.ENUMERABLE_TABLE_FUNCTION_SCAN_RULE);
private static final List<RelOptRule> DEFAULT_RULES =
ImmutableList.of(
AggregateStarTableRule.INSTANCE,
AggregateStarTableRule.INSTANCE2,
TableScanRule.INSTANCE,
COMMUTE
? JoinAssociateRule.INSTANCE
: ProjectMergeRule.INSTANCE,
FilterTableScanRule.INSTANCE,
ProjectFilterTransposeRule.INSTANCE,
FilterProjectTransposeRule.INSTANCE,
FilterJoinRule.FILTER_ON_JOIN,
AggregateExpandDistinctAggregatesRule.INSTANCE,
AggregateReduceFunctionsRule.INSTANCE,
FilterAggregateTransposeRule.INSTANCE,
JoinCommuteRule.INSTANCE,
JoinPushThroughJoinRule.RIGHT,
JoinPushThroughJoinRule.LEFT,
SortProjectTransposeRule.INSTANCE);
private static final List<RelOptRule> CONSTANT_REDUCTION_RULES =
ImmutableList.of(
ReduceExpressionsRule.PROJECT_INSTANCE,
ReduceExpressionsRule.FILTER_INSTANCE,
ReduceExpressionsRule.CALC_INSTANCE,
ReduceExpressionsRule.JOIN_INSTANCE,
ValuesReduceRule.FILTER_INSTANCE,
ValuesReduceRule.PROJECT_FILTER_INSTANCE,
ValuesReduceRule.PROJECT_INSTANCE);
public CalcitePrepareImpl() {
}
public ParseResult parse(
Context context, String sql) {
return parse_(context, sql, false, false, false);
}
public ConvertResult convert(Context context, String sql) {
return (ConvertResult) parse_(context, sql, true, false, false);
}
public AnalyzeViewResult analyzeView(Context context, String sql, boolean fail) {
return (AnalyzeViewResult) parse_(context, sql, true, true, fail);
}
/** Shared implementation for {@link #parse}, {@link #convert} and
* {@link #analyzeView}. */
private ParseResult parse_(Context context, String sql, boolean convert,
boolean analyze, boolean fail) {
final JavaTypeFactory typeFactory = context.getTypeFactory();
CalciteCatalogReader catalogReader =
new CalciteCatalogReader(
context.getRootSchema(),
context.config().caseSensitive(),
context.getDefaultSchemaPath(),
typeFactory);
SqlParser parser = createParser(sql);
SqlNode sqlNode;
try {
sqlNode = parser.parseStmt();
} catch (SqlParseException e) {
throw new RuntimeException("parse failed", e);
}
final SqlValidator validator =
new CalciteSqlValidator(
SqlStdOperatorTable.instance(), catalogReader, typeFactory);
SqlNode sqlNode1 = validator.validate(sqlNode);
if (convert) {
return convert_(
context, sql, analyze, fail, catalogReader, validator, sqlNode1);
}
return new ParseResult(this, validator, sql, sqlNode1,
validator.getValidatedNodeType(sqlNode1));
}
private ParseResult convert_(Context context, String sql, boolean analyze,
boolean fail, CalciteCatalogReader catalogReader, SqlValidator validator,
SqlNode sqlNode1) {
final JavaTypeFactory typeFactory = context.getTypeFactory();
final Convention resultConvention =
ENABLE_BINDABLE ? BindableConvention.INSTANCE
: EnumerableConvention.INSTANCE;
final HepPlanner planner = new HepPlanner(new HepProgramBuilder().build());
planner.addRelTraitDef(ConventionTraitDef.INSTANCE);
final CalcitePreparingStmt preparingStmt =
new CalcitePreparingStmt(this, context, catalogReader, typeFactory,
context.getRootSchema(), null, planner, resultConvention);
final SqlToRelConverter converter =
preparingStmt.getSqlToRelConverter(validator, catalogReader);
if (analyze) {
converter.enableTableAccessConversion(false);
}
final RelNode relNode = converter.convertQuery(sqlNode1, false, true);
if (analyze) {
return analyze_(validator, sql, sqlNode1, relNode, fail);
}
return new ConvertResult(this, validator, sql, sqlNode1,
validator.getValidatedNodeType(sqlNode1), relNode);
}
private AnalyzeViewResult analyze_(SqlValidator validator, String sql,
SqlNode sqlNode, RelNode rel, boolean fail) {
final RexBuilder rexBuilder = rel.getCluster().getRexBuilder();
final RelNode viewRel = rel;
Project project;
if (rel instanceof Project) {
project = (Project) rel;
rel = project.getInput();
} else {
project = null;
}
Filter filter;
if (rel instanceof Filter) {
filter = (Filter) rel;
rel = filter.getInput();
} else {
filter = null;
}
TableScan scan;
if (rel instanceof TableScan) {
scan = (TableScan) rel;
} else {
scan = null;
}
if (scan == null) {
if (fail) {
throw validator.newValidationError(sqlNode,
RESOURCE.modifiableViewMustBeBasedOnSingleTable());
}
return new AnalyzeViewResult(this, validator, sql, sqlNode,
validator.getValidatedNodeType(sqlNode), rel, null, null, null,
null);
}
final RelOptTable targetRelTable = scan.getTable();
final RelDataType targetRowType = targetRelTable.getRowType();
final Table table = targetRelTable.unwrap(Table.class);
final List<String> tablePath = targetRelTable.getQualifiedName();
assert table != null;
List<Integer> columnMapping;
final Map<Integer, RexNode> projectMap = new HashMap<>();
if (project == null) {
columnMapping = ImmutableIntList.range(0, targetRowType.getFieldCount());
} else {
columnMapping = new ArrayList<>();
for (Ord<RexNode> node : Ord.zip(project.getProjects())) {
if (node.e instanceof RexInputRef) {
RexInputRef rexInputRef = (RexInputRef) node.e;
int index = rexInputRef.getIndex();
if (projectMap.get(index) != null) {
if (fail) {
throw validator.newValidationError(sqlNode,
RESOURCE.moreThanOneMappedColumn(
targetRowType.getFieldList().get(index).getName(),
Util.last(tablePath)));
}
return new AnalyzeViewResult(this, validator, sql, sqlNode,
validator.getValidatedNodeType(sqlNode), rel, null, null, null,
null);
}
projectMap.put(index, rexBuilder.makeInputRef(viewRel, node.i));
columnMapping.add(index);
} else {
columnMapping.add(-1);
}
}
}
final RexNode constraint;
if (filter != null) {
constraint = filter.getCondition();
} else {
constraint = rexBuilder.makeLiteral(true);
}
final List<RexNode> filters = new ArrayList<>();
RelOptUtil.inferViewPredicates(projectMap, filters, constraint);
// Check that all columns that are not projected have a constant value
for (RelDataTypeField field : targetRowType.getFieldList()) {
final int x = columnMapping.indexOf(field.getIndex());
if (x >= 0) {
assert Util.skip(columnMapping, x + 1).indexOf(field.getIndex()) < 0
: "column projected more than once; should have checked above";
continue; // target column is projected
}
if (projectMap.get(field.getIndex()) != null) {
continue; // constant expression
}
if (field.getType().isNullable()) {
continue; // don't need expression for nullable columns; NULL suffices
}
if (fail) {
throw validator.newValidationError(sqlNode,
RESOURCE.noValueSuppliedForViewColumn(field.getName(),
Util.last(tablePath)));
}
return new AnalyzeViewResult(this, validator, sql, sqlNode,
validator.getValidatedNodeType(sqlNode), rel, null, null, null,
null);
}
return new AnalyzeViewResult(this, validator, sql, sqlNode,
validator.getValidatedNodeType(sqlNode), rel, table,
ImmutableList.copyOf(tablePath),
constraint, ImmutableIntList.copyOf(columnMapping));
}
/** Factory method for default SQL parser. */
protected SqlParser createParser(String sql) {
return createParser(sql, createParserConfig());
}
/** Factory method for SQL parser with a given configuration. */
protected SqlParser createParser(String sql,
SqlParser.ConfigBuilder parserConfig) {
return SqlParser.create(sql, parserConfig.build());
}
/** Factory method for SQL parser configuration. */
protected SqlParser.ConfigBuilder createParserConfig() {
return SqlParser.configBuilder();
}
/** Factory method for cluster. */
protected RelOptCluster createCluster(RelOptPlanner planner,
RexBuilder rexBuilder) {
return RelOptCluster.create(planner, rexBuilder);
}
/** Creates a collection of planner factories.
*
* <p>The collection must have at least one factory, and each factory must
* create a planner. If the collection has more than one planner, Calcite will
* try each planner in turn.</p>
*
* <p>One of the things you can do with this mechanism is to try a simpler,
* faster, planner with a smaller rule set first, then fall back to a more
* complex planner for complex and costly queries.</p>
*
* <p>The default implementation returns a factory that calls
* {@link #createPlanner(org.apache.calcite.jdbc.CalcitePrepare.Context)}.</p>
*/
protected List<Function1<Context, RelOptPlanner>> createPlannerFactories() {
return Collections.<Function1<Context, RelOptPlanner>>singletonList(
new Function1<Context, RelOptPlanner>() {
public RelOptPlanner apply(Context context) {
return createPlanner(context, null, null);
}
});
}
/** Creates a query planner and initializes it with a default set of
* rules. */
protected RelOptPlanner createPlanner(CalcitePrepare.Context prepareContext) {
return createPlanner(prepareContext, null, null);
}
/** Creates a query planner and initializes it with a default set of
* rules. */
protected RelOptPlanner createPlanner(
final CalcitePrepare.Context prepareContext,
org.apache.calcite.plan.Context externalContext,
RelOptCostFactory costFactory) {
if (externalContext == null) {
externalContext = Contexts.of(prepareContext.config());
}
final VolcanoPlanner planner =
new VolcanoPlanner(costFactory, externalContext);
planner.addRelTraitDef(ConventionTraitDef.INSTANCE);
if (ENABLE_COLLATION_TRAIT) {
planner.addRelTraitDef(RelCollationTraitDef.INSTANCE);
planner.registerAbstractRelationalRules();
}
RelOptUtil.registerAbstractRels(planner);
for (RelOptRule rule : DEFAULT_RULES) {
planner.addRule(rule);
}
if (ENABLE_BINDABLE) {
for (RelOptRule rule : Bindables.RULES) {
planner.addRule(rule);
}
}
planner.addRule(Bindables.BINDABLE_TABLE_SCAN_RULE);
planner.addRule(ProjectTableScanRule.INSTANCE);
planner.addRule(ProjectTableScanRule.INTERPRETER);
if (ENABLE_ENUMERABLE) {
for (RelOptRule rule : ENUMERABLE_RULES) {
planner.addRule(rule);
}
planner.addRule(EnumerableInterpreterRule.INSTANCE);
}
if (ENABLE_BINDABLE && ENABLE_ENUMERABLE) {
planner.addRule(
EnumerableBindable.EnumerableToBindableConverterRule.INSTANCE);
}
if (ENABLE_STREAM) {
for (RelOptRule rule : StreamRules.RULES) {
planner.addRule(rule);
}
}
// Change the below to enable constant-reduction.
if (false) {
for (RelOptRule rule : CONSTANT_REDUCTION_RULES) {
planner.addRule(rule);
}
}
final SparkHandler spark = prepareContext.spark();
if (spark.enabled()) {
spark.registerRules(
new SparkHandler.RuleSetBuilder() {
public void addRule(RelOptRule rule) {
// TODO:
}
public void removeRule(RelOptRule rule) {
// TODO:
}
});
}
return planner;
}
public <T> CalciteSignature<T> prepareQueryable(
Context context,
Queryable<T> queryable) {
return prepare_(context, null, queryable, queryable.getElementType(), -1);
}
public <T> CalciteSignature<T> prepareSql(
Context context,
String sql,
Queryable<T> expression,
Type elementType,
long maxRowCount) {
return prepare_(context, sql, expression, elementType, maxRowCount);
}
<T> CalciteSignature<T> prepare_(
Context context,
String sql,
Queryable<T> queryable,
Type elementType,
long maxRowCount) {
if (SIMPLE_SQLS.contains(sql)) {
return simplePrepare(context, sql);
}
final JavaTypeFactory typeFactory = context.getTypeFactory();
CalciteCatalogReader catalogReader =
new CalciteCatalogReader(
context.getRootSchema(),
context.config().caseSensitive(),
context.getDefaultSchemaPath(),
typeFactory);
final List<Function1<Context, RelOptPlanner>> plannerFactories =
createPlannerFactories();
if (plannerFactories.isEmpty()) {
throw new AssertionError("no planner factories");
}
RuntimeException exception = new RuntimeException();
for (Function1<Context, RelOptPlanner> plannerFactory : plannerFactories) {
final RelOptPlanner planner = plannerFactory.apply(context);
if (planner == null) {
throw new AssertionError("factory returned null planner");
}
try {
return prepare2_(context, sql, queryable, elementType, maxRowCount,
catalogReader, planner);
} catch (RelOptPlanner.CannotPlanException e) {
exception = e;
}
}
throw exception;
}
/** Quickly prepares a simple SQL statement, circumventing the usual
* preparation process. */
private <T> CalciteSignature<T> simplePrepare(Context context, String sql) {
final JavaTypeFactory typeFactory = context.getTypeFactory();
final RelDataType x =
typeFactory.builder().add("EXPR$0", SqlTypeName.INTEGER).build();
@SuppressWarnings("unchecked")
final List<T> list = (List) ImmutableList.of(1);
final List<String> origin = null;
final List<List<String>> origins =
Collections.nCopies(x.getFieldCount(), origin);
final List<ColumnMetaData> columns =
getColumnMetaDataList(typeFactory, x, x, origins);
final Meta.CursorFactory cursorFactory =
Meta.CursorFactory.deduce(columns, null);
return new CalciteSignature<T>(
sql,
ImmutableList.<AvaticaParameter>of(),
ImmutableMap.<String, Object>of(),
x,
columns,
cursorFactory,
-1,
new Bindable<T>() {
public Enumerable<T> bind(DataContext dataContext) {
return Linq4j.asEnumerable(list);
}
}
);
}
<T> CalciteSignature<T> prepare2_(
Context context,
String sql,
Queryable<T> queryable,
Type elementType,
long maxRowCount,
CalciteCatalogReader catalogReader,
RelOptPlanner planner) {
final JavaTypeFactory typeFactory = context.getTypeFactory();
final EnumerableRel.Prefer prefer;
if (elementType == Object[].class) {
prefer = EnumerableRel.Prefer.ARRAY;
} else {
prefer = EnumerableRel.Prefer.CUSTOM;
}
final Convention resultConvention =
ENABLE_BINDABLE ? BindableConvention.INSTANCE
: EnumerableConvention.INSTANCE;
final CalcitePreparingStmt preparingStmt =
new CalcitePreparingStmt(this, context, catalogReader, typeFactory,
context.getRootSchema(), prefer, planner, resultConvention);
final RelDataType x;
final Prepare.PreparedResult preparedResult;
if (sql != null) {
assert queryable == null;
final CalciteConnectionConfig config = context.config();
SqlParser parser = createParser(sql,
createParserConfig()
.setQuotedCasing(config.quotedCasing())
.setUnquotedCasing(config.unquotedCasing())
.setQuoting(config.quoting()));
SqlNode sqlNode;
try {
sqlNode = parser.parseStmt();
} catch (SqlParseException e) {
throw new RuntimeException(
"parse failed: " + e.getMessage(), e);
}
Hook.PARSE_TREE.run(new Object[] {sql, sqlNode});
final CalciteSchema rootSchema = context.getRootSchema();
final ChainedSqlOperatorTable opTab =
new ChainedSqlOperatorTable(
ImmutableList.of(SqlStdOperatorTable.instance(), catalogReader));
final SqlValidator validator =
new CalciteSqlValidator(opTab, catalogReader, typeFactory);
validator.setIdentifierExpansion(true);
final List<Prepare.Materialization> materializations =
config.materializationsEnabled()
? MaterializationService.instance().query(rootSchema)
: ImmutableList.<Prepare.Materialization>of();
for (Prepare.Materialization materialization : materializations) {
populateMaterializations(context, planner, materialization);
}
final List<CalciteSchema.LatticeEntry> lattices =
Schemas.getLatticeEntries(rootSchema);
preparedResult = preparingStmt.prepareSql(
sqlNode, Object.class, validator, true, materializations, lattices);
switch (sqlNode.getKind()) {
case INSERT:
case EXPLAIN:
// FIXME: getValidatedNodeType is wrong for DML
x = RelOptUtil.createDmlRowType(sqlNode.getKind(), typeFactory);
break;
default:
x = validator.getValidatedNodeType(sqlNode);
}
} else {
assert queryable != null;
x = context.getTypeFactory().createType(elementType);
preparedResult =
preparingStmt.prepareQueryable(queryable, x);
}
final List<AvaticaParameter> parameters = new ArrayList<AvaticaParameter>();
final RelDataType parameterRowType = preparedResult.getParameterRowType();
for (RelDataTypeField field : parameterRowType.getFieldList()) {
RelDataType type = field.getType();
parameters.add(
new AvaticaParameter(
false,
getPrecision(type),
getScale(type),
getTypeOrdinal(type),
getTypeName(type),
getClassName(type),
field.getName()));
}
RelDataType jdbcType = makeStruct(typeFactory, x);
final List<List<String>> originList = preparedResult.getFieldOrigins();
final List<ColumnMetaData> columns =
getColumnMetaDataList(typeFactory, x, jdbcType, originList);
Class resultClazz = null;
if (preparedResult instanceof Typed) {
resultClazz = (Class) ((Typed) preparedResult).getElementType();
}
//noinspection unchecked
final Bindable<T> bindable = preparedResult.getBindable();
return new CalciteSignature<>(
sql,
parameters,
preparingStmt.internalParameters,
jdbcType,
columns,
preparingStmt.resultConvention == BindableConvention.INSTANCE
? Meta.CursorFactory.ARRAY
: Meta.CursorFactory.deduce(columns, resultClazz),
maxRowCount,
bindable);
}
private List<ColumnMetaData> getColumnMetaDataList(
JavaTypeFactory typeFactory, RelDataType x, RelDataType jdbcType,
List<List<String>> originList) {
final List<ColumnMetaData> columns = new ArrayList<ColumnMetaData>();
for (Ord<RelDataTypeField> pair : Ord.zip(jdbcType.getFieldList())) {
final RelDataTypeField field = pair.e;
final RelDataType type = field.getType();
final RelDataType fieldType =
x.isStruct() ? x.getFieldList().get(pair.i).getType() : type;
columns.add(
metaData(typeFactory, columns.size(), field.getName(), type,
fieldType, originList.get(pair.i)));
}
return columns;
}
private ColumnMetaData metaData(JavaTypeFactory typeFactory, int ordinal,
String fieldName, RelDataType type, RelDataType fieldType,
List<String> origins) {
final ColumnMetaData.AvaticaType avaticaType =
avaticaType(typeFactory, type, fieldType);
return new ColumnMetaData(
ordinal,
false,
true,
false,
false,
type.isNullable()
? DatabaseMetaData.columnNullable
: DatabaseMetaData.columnNoNulls,
true,
type.getPrecision(),
fieldName,
origin(origins, 0),
origin(origins, 2),
getPrecision(type),
getScale(type),
origin(origins, 1),
null,
avaticaType,
true,
false,
false,
avaticaType.columnClassName());
}
private ColumnMetaData.AvaticaType avaticaType(JavaTypeFactory typeFactory,
RelDataType type, RelDataType fieldType) {
final Type clazz = typeFactory.getJavaClass(Util.first(fieldType, type));
final ColumnMetaData.Rep rep = ColumnMetaData.Rep.of(clazz);
assert rep != null;
final String typeName = getTypeName(type);
if (type.getComponentType() != null) {
final ColumnMetaData.AvaticaType componentType =
avaticaType(typeFactory, type.getComponentType(), null);
return ColumnMetaData.array(componentType, typeName, rep);
} else {
return ColumnMetaData.scalar(getTypeOrdinal(type), typeName, rep);
}
}
private static String origin(List<String> origins, int offsetFromEnd) {
return origins == null || offsetFromEnd >= origins.size()
? null
: origins.get(origins.size() - 1 - offsetFromEnd);
}
private int getTypeOrdinal(RelDataType type) {
return type.getSqlTypeName().getJdbcOrdinal();
}
private static String getClassName(RelDataType type) {
return null;
}
private static int getScale(RelDataType type) {
return type.getScale() == RelDataType.SCALE_NOT_SPECIFIED
? 0
: type.getScale();
}
private static int getPrecision(RelDataType type) {
return type.getPrecision() == RelDataType.PRECISION_NOT_SPECIFIED
? 0
: type.getPrecision();
}
private static String getTypeName(RelDataType type) {
SqlTypeName sqlTypeName = type.getSqlTypeName();
if (type instanceof RelDataTypeFactoryImpl.JavaType) {
// We'd rather print "INTEGER" than "JavaType(int)".
return sqlTypeName.getName();
}
switch (sqlTypeName) {
case INTERVAL_YEAR_MONTH:
case INTERVAL_DAY_TIME:
// e.g. "INTERVAL_MONTH" or "INTERVAL_YEAR_MONTH"
return "INTERVAL_"
+ type.getIntervalQualifier().toString().replace(' ', '_');
default:
return type.toString(); // e.g. "VARCHAR(10)", "INTEGER ARRAY"
}
}
protected void populateMaterializations(Context context,
RelOptPlanner planner, Prepare.Materialization materialization) {
// REVIEW: initialize queryRel and tableRel inside MaterializationService,
// not here?
try {
final CalciteSchema schema = materialization.materializedTable.schema;
CalciteCatalogReader catalogReader =
new CalciteCatalogReader(
schema.root(),
context.config().caseSensitive(),
Util.skipLast(materialization.materializedTable.path()),
context.getTypeFactory());
final CalciteMaterializer materializer =
new CalciteMaterializer(this, context, catalogReader, schema, planner);
materializer.populate(materialization);
} catch (Exception e) {
throw new RuntimeException("While populating materialization "
+ materialization.materializedTable.path(), e);
}
}
private static RelDataType makeStruct(
RelDataTypeFactory typeFactory,
RelDataType type) {
if (type.isStruct()) {
return type;
}
return typeFactory.builder().add("$0", type).build();
}
/** Executes a prepare action. */
public <R> R perform(CalciteServerStatement statement,
Frameworks.PrepareAction<R> action) {
final CalcitePrepare.Context prepareContext =
statement.createPrepareContext();
final JavaTypeFactory typeFactory = prepareContext.getTypeFactory();
final CalciteSchema schema =
action.getConfig().getDefaultSchema() != null
? CalciteSchema.from(action.getConfig().getDefaultSchema())
: prepareContext.getRootSchema();
CalciteCatalogReader catalogReader =
new CalciteCatalogReader(schema.root(),
prepareContext.config().caseSensitive(),
schema.path(null),
typeFactory);
final RexBuilder rexBuilder = new RexBuilder(typeFactory);
final RelOptPlanner planner =
createPlanner(prepareContext,
action.getConfig().getContext(),
action.getConfig().getCostFactory());
final RelOptCluster cluster = createCluster(planner, rexBuilder);
return action.apply(cluster, catalogReader,
prepareContext.getRootSchema().plus(), statement);
}
/** Holds state for the process of preparing a SQL statement. */
static class CalcitePreparingStmt extends Prepare
implements RelOptTable.ViewExpander {
protected final RelOptPlanner planner;
protected final RexBuilder rexBuilder;
protected final CalcitePrepareImpl prepare;
protected final CalciteSchema schema;
protected final RelDataTypeFactory typeFactory;
private final EnumerableRel.Prefer prefer;
private final Map<String, Object> internalParameters =
Maps.newLinkedHashMap();
private int expansionDepth;
private SqlValidator sqlValidator;
public CalcitePreparingStmt(CalcitePrepareImpl prepare,
Context context,
CatalogReader catalogReader,
RelDataTypeFactory typeFactory,
CalciteSchema schema,
EnumerableRel.Prefer prefer,
RelOptPlanner planner,
Convention resultConvention) {
super(context, catalogReader, resultConvention);
this.prepare = prepare;
this.schema = schema;
this.prefer = prefer;
this.planner = planner;
this.typeFactory = typeFactory;
this.rexBuilder = new RexBuilder(typeFactory);
}
@Override protected void init(Class runtimeContextClass) {
}
public PreparedResult prepareQueryable(
Queryable queryable,
RelDataType resultType) {
queryString = null;
Class runtimeContextClass = Object.class;
init(runtimeContextClass);
final RelOptCluster cluster = prepare.createCluster(planner, rexBuilder);
RelNode rootRel =
new LixToRelTranslator(cluster, CalcitePreparingStmt.this)
.translate(queryable);
if (timingTracer != null) {
timingTracer.traceTime("end sql2rel");
}
final RelDataType jdbcType =
makeStruct(rexBuilder.getTypeFactory(), resultType);
fieldOrigins = Collections.nCopies(jdbcType.getFieldCount(), null);
parameterRowType = rexBuilder.getTypeFactory().builder().build();
// Structured type flattening, view expansion, and plugging in
// physical storage.
rootRel = flattenTypes(rootRel, true);
// Trim unused fields.
rootRel = trimUnusedFields(rootRel);
final List<Materialization> materializations = ImmutableList.of();
final List<CalciteSchema.LatticeEntry> lattices = ImmutableList.of();
rootRel = optimize(rootRel, materializations, lattices);
if (timingTracer != null) {
timingTracer.traceTime("end optimization");
}
return implement(
resultType,
rootRel,
SqlKind.SELECT);
}
@Override protected SqlToRelConverter getSqlToRelConverter(
SqlValidator validator,
CatalogReader catalogReader) {
final RelOptCluster cluster = prepare.createCluster(planner, rexBuilder);
SqlToRelConverter sqlToRelConverter =
new SqlToRelConverter(this, validator, catalogReader, cluster,
StandardConvertletTable.INSTANCE);
sqlToRelConverter.setTrimUnusedFields(true);
return sqlToRelConverter;
}
@Override public RelNode flattenTypes(
RelNode rootRel,
boolean restructure) {
final SparkHandler spark = context.spark();
if (spark.enabled()) {
return spark.flattenTypes(planner, rootRel, restructure);
}
return rootRel;
}
@Override protected RelNode decorrelate(SqlToRelConverter sqlToRelConverter,
SqlNode query, RelNode rootRel) {
return sqlToRelConverter.decorrelate(query, rootRel);
}
@Override public RelNode expandView(
RelDataType rowType,
String queryString,
List<String> schemaPath) {
expansionDepth++;
SqlParser parser = prepare.createParser(queryString);
SqlNode sqlNode;
try {
sqlNode = parser.parseQuery();
} catch (SqlParseException e) {
throw new RuntimeException("parse failed", e);
}
// View may have different schema path than current connection.
final CatalogReader catalogReader =
this.catalogReader.withSchemaPath(schemaPath);
SqlValidator validator = createSqlValidator(catalogReader);
SqlNode sqlNode1 = validator.validate(sqlNode);
SqlToRelConverter sqlToRelConverter =
getSqlToRelConverter(validator, catalogReader);
RelNode relNode =
sqlToRelConverter.convertQuery(sqlNode1, true, false);
--expansionDepth;
return relNode;
}
private SqlValidatorImpl createSqlValidator(CatalogReader catalogReader) {
return new SqlValidatorImpl(
SqlStdOperatorTable.instance(), catalogReader,
rexBuilder.getTypeFactory(), SqlConformance.DEFAULT) { };
}
@Override protected SqlValidator getSqlValidator() {
if (sqlValidator == null) {
sqlValidator = createSqlValidator(catalogReader);
}
return sqlValidator;
}
@Override protected PreparedResult createPreparedExplanation(
RelDataType resultType,
RelDataType parameterRowType,
RelNode rootRel,
boolean explainAsXml,
SqlExplainLevel detailLevel) {
return new CalcitePreparedExplain(
resultType, parameterRowType, rootRel, explainAsXml, detailLevel);
}
@Override protected PreparedResult implement(
RelDataType rowType,
RelNode rootRel,
SqlKind sqlKind) {
RelDataType resultType = rootRel.getRowType();
boolean isDml = sqlKind.belongsTo(SqlKind.DML);
final Bindable bindable;
if (resultConvention == BindableConvention.INSTANCE) {
bindable = Interpreters.bindable(rootRel);
} else {
bindable = EnumerableInterpretable.toBindable(internalParameters,
context.spark(), (EnumerableRel) rootRel, prefer);
}
if (timingTracer != null) {
timingTracer.traceTime("end codegen");
}
if (timingTracer != null) {
timingTracer.traceTime("end compilation");
}
return new PreparedResultImpl(
resultType,
parameterRowType,
fieldOrigins,
rootRel,
mapTableModOp(isDml, sqlKind),
isDml) {
public String getCode() {
throw new UnsupportedOperationException();
}
public Bindable getBindable() {
return bindable;
}
public Type getElementType() {
return ((Typed) bindable).getElementType();
}
};
}
}
/** An {@code EXPLAIN} statement, prepared and ready to execute. */
private static class CalcitePreparedExplain extends Prepare.PreparedExplain {
public CalcitePreparedExplain(
RelDataType resultType,
RelDataType parameterRowType,
RelNode rootRel,
boolean explainAsXml,
SqlExplainLevel detailLevel) {
super(resultType, parameterRowType, rootRel, explainAsXml, detailLevel);
}
public Bindable getBindable() {
final String explanation = getCode();
return new Bindable() {
public Enumerable bind(DataContext dataContext) {
return Linq4j.singletonEnumerable(explanation);
}
};
}
}
/** Translator from Java AST to {@link RexNode}. */
interface ScalarTranslator {
RexNode toRex(BlockStatement statement);
List<RexNode> toRexList(BlockStatement statement);
RexNode toRex(Expression expression);
ScalarTranslator bind(List<ParameterExpression> parameterList,
List<RexNode> values);
}
/** Basic translator. */
static class EmptyScalarTranslator implements ScalarTranslator {
private final RexBuilder rexBuilder;
public EmptyScalarTranslator(RexBuilder rexBuilder) {
this.rexBuilder = rexBuilder;
}
public static ScalarTranslator empty(RexBuilder builder) {
return new EmptyScalarTranslator(builder);
}
public List<RexNode> toRexList(BlockStatement statement) {
final List<Expression> simpleList = simpleList(statement);
final List<RexNode> list = new ArrayList<RexNode>();
for (Expression expression1 : simpleList) {
list.add(toRex(expression1));
}
return list;
}
public RexNode toRex(BlockStatement statement) {
return toRex(Blocks.simple(statement));
}
private static List<Expression> simpleList(BlockStatement statement) {
Expression simple = Blocks.simple(statement);
if (simple instanceof NewExpression) {
NewExpression newExpression = (NewExpression) simple;
return newExpression.arguments;
} else {
return Collections.singletonList(simple);
}
}
public RexNode toRex(Expression expression) {
switch (expression.getNodeType()) {
case MemberAccess:
// Case-sensitive name match because name was previously resolved.
return rexBuilder.makeFieldAccess(
toRex(
((MemberExpression) expression).expression),
((MemberExpression) expression).field.getName(),
true);
case GreaterThan:
return binary(expression, SqlStdOperatorTable.GREATER_THAN);
case LessThan:
return binary(expression, SqlStdOperatorTable.LESS_THAN);
case Parameter:
return parameter((ParameterExpression) expression);
case Call:
MethodCallExpression call = (MethodCallExpression) expression;
SqlOperator operator =
RexToLixTranslator.JAVA_TO_SQL_METHOD_MAP.get(call.method);
if (operator != null) {
return rexBuilder.makeCall(
type(call),
operator,
toRex(
Expressions.<Expression>list()
.appendIfNotNull(call.targetExpression)
.appendAll(call.expressions)));
}
throw new RuntimeException(
"Could translate call to method " + call.method);
case Constant:
final ConstantExpression constant =
(ConstantExpression) expression;
Object value = constant.value;
if (value instanceof Number) {
Number number = (Number) value;
if (value instanceof Double || value instanceof Float) {
return rexBuilder.makeApproxLiteral(
BigDecimal.valueOf(number.doubleValue()));
} else if (value instanceof BigDecimal) {
return rexBuilder.makeExactLiteral((BigDecimal) value);
} else {
return rexBuilder.makeExactLiteral(
BigDecimal.valueOf(number.longValue()));
}
} else if (value instanceof Boolean) {
return rexBuilder.makeLiteral((Boolean) value);
} else {
return rexBuilder.makeLiteral(constant.toString());
}
default:
throw new UnsupportedOperationException(
"unknown expression type " + expression.getNodeType() + " "
+ expression);
}
}
private RexNode binary(Expression expression, SqlBinaryOperator op) {
BinaryExpression call = (BinaryExpression) expression;
return rexBuilder.makeCall(type(call), op,
toRex(ImmutableList.of(call.expression0, call.expression1)));
}
private List<RexNode> toRex(List<Expression> expressions) {
ArrayList<RexNode> list = new ArrayList<RexNode>();
for (Expression expression : expressions) {
list.add(toRex(expression));
}
return list;
}
protected RelDataType type(Expression expression) {
final Type type = expression.getType();
return ((JavaTypeFactory) rexBuilder.getTypeFactory()).createType(type);
}
public ScalarTranslator bind(
List<ParameterExpression> parameterList, List<RexNode> values) {
return new LambdaScalarTranslator(
rexBuilder, parameterList, values);
}
public RexNode parameter(ParameterExpression param) {
throw new RuntimeException("unknown parameter " + param);
}
}
/** Translator that looks for parameters. */
private static class LambdaScalarTranslator extends EmptyScalarTranslator {
private final List<ParameterExpression> parameterList;
private final List<RexNode> values;
public LambdaScalarTranslator(
RexBuilder rexBuilder,
List<ParameterExpression> parameterList,
List<RexNode> values) {
super(rexBuilder);
this.parameterList = parameterList;
this.values = values;
}
public RexNode parameter(ParameterExpression param) {
int i = parameterList.indexOf(param);
if (i >= 0) {
return values.get(i);
}
throw new RuntimeException("unknown parameter " + param);
}
}
}
// End CalcitePrepareImpl.java
| |
/*
* Copyright (C) 2014-2015 Hippo Seven
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hippo.androidchinesestring;
import org.mozilla.universalchardet.UniversalDetector;
import java.io.DataOutputStream;
import java.io.InputStream;
import java.net.HttpURLConnection;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLEncoder;
import java.util.List;
import java.util.zip.GZIPInputStream;
/**
* It is not thread safe
*/
public class HttpHelper {
private static final int MAX_RETRY = 3;
private static final int MAX_REDIRECTS = 3;
private static final int CONNECT_TIMEOUT = 5000;
private static final int READ_TIMEOUT = 5000;
public static final String DEFAULT_USER_AGENT =
"Mozilla/5.0 (Windows NT 6.1; WOW64) " +
"AppleWebKit/537.36 (KHTML, like Gecko) " +
"Chrome/39.0.2171.95 Safari/537.36";
public static final String USER_AGENT =
System.getProperty("http.agent", DEFAULT_USER_AGENT);
private static final int HTTP_TEMP_REDIRECT = 307;
private static final String DEFAULT_CHARSET = "utf-8";
private static final String CHARSET_KEY = "charset=";
private int mResponseCode = -1;
public void reset() {
mResponseCode = -1;
}
public int getResponseCode() {
return mResponseCode;
}
/**
* Get cookie for the url
*
* @param url the URL
* @return the cookie for the URL
*/
@SuppressWarnings("UnusedParameters")
protected String getCookie(URL url) {
return null;
}
/**
* Store cookie for the url
*
* @param url the URL
* @param value the cookie for the URL
*/
@SuppressWarnings("UnusedParameters")
protected void storeCookie(URL url, String value) {
// Empty
}
/**
* Prepare before connecting
*
* @param conn the connection
*/
protected void onBeforeConnect(HttpURLConnection conn) {
conn.setInstanceFollowRedirects(true);
conn.setRequestProperty("User-Agent", USER_AGENT);
conn.setConnectTimeout(CONNECT_TIMEOUT);
conn.setReadTimeout(READ_TIMEOUT);
}
private Object doRequst(RequestHelper rh) throws Exception {
URL url;
HttpURLConnection conn = null;
int redirectionCount = 0;
try {
url = rh.getUrl();
while (redirectionCount++ < MAX_REDIRECTS) {
Log.d("Request: " + url.toString());
conn = (HttpURLConnection) url.openConnection();
// Prepare before connecting
onBeforeConnect(conn);
// Set cookie
String cookie = getCookie(url);
if (cookie != null) {
conn.setRequestProperty("Cookie", cookie);
}
// Do custom staff
rh.onBeforeConnect(conn);
conn.connect();
// Store cookie
List<String> cookieList = conn.getHeaderFields().get("Set-Cookie");
if (cookieList != null) {
for (String cookieTemp : cookieList) {
if (cookieTemp != null) {
storeCookie(url, cookieTemp);
}
}
}
final int responseCode = conn.getResponseCode();
mResponseCode = responseCode;
Log.d("Response code: " + responseCode);
switch (responseCode) {
case HttpURLConnection.HTTP_MOVED_PERM:
case HttpURLConnection.HTTP_MOVED_TEMP:
case HttpURLConnection.HTTP_SEE_OTHER:
case HTTP_TEMP_REDIRECT:
final String location = conn.getHeaderField("Location");
Log.d("New location: " + location);
conn.disconnect();
url = new URL(url, location);
break;
default:
return rh.onAfterConnect(conn);
}
}
} finally {
if (conn != null)
conn.disconnect();
}
throw new RedirectionException();
}
private Object requst(RequestHelper rh) throws Exception {
Exception exception = null;
for (int times = 0;
times < MAX_RETRY && (times == 0 || rh.onRetry(exception));
times++) {
try {
return doRequst(rh);
} catch (Exception e) {
exception = e;
}
}
rh.onRequestFailed(exception);
throw exception;
}
public static interface RequestHelper {
/**
* Get the URL to connect
*
* @return the URL to connect
*/
public URL getUrl() throws MalformedURLException;
/**
* Add header or do something else for HttpURLConnection before connect
*
* @param conn the connection
* @throws Exception
*/
public void onBeforeConnect(HttpURLConnection conn) throws Exception;
/**
* Get what do you need from HttpURLConnection after connect
* Return null means get error
*
* @param conn the connection
* @return what you want to return
* @throws Exception
*/
public Object onAfterConnect(HttpURLConnection conn) throws Exception;
/**
* Retry http connecting, or stop
*
* @param previousException previous thrown
* @return true for retry, false for stop
*/
public boolean onRetry(Exception previousException);
/**
* Called when request failed by exception
*/
public void onRequestFailed(Exception exception);
}
public static abstract class GetStringHelper implements RequestHelper {
private final String mUrl;
public GetStringHelper(String url) {
mUrl = url;
}
@Override
public URL getUrl() throws MalformedURLException {
return new URL(mUrl);
}
@Override
public void onBeforeConnect(HttpURLConnection conn)
throws Exception {
conn.addRequestProperty("Accept-Encoding", "gzip");
}
private String getCharsetFromContentType(String contentType) {
if (contentType == null) {
return null;
}
String[] values = contentType.split(";");
for (String value : values) {
value = value.trim();
if (value.toLowerCase().startsWith(CHARSET_KEY)) {
return value.substring(CHARSET_KEY.length());
}
}
return null;
}
private String getBody(HttpURLConnection conn)
throws Exception {
String body = null;
InputStream is = null;
FastByteArrayOutputStream fbaos = null;
try {
try {
// First try to get input stream
is = conn.getInputStream();
} catch (Exception t){
// If we get error, get error stream
is = conn.getErrorStream();
}
String encoding = conn.getContentEncoding();
if (encoding != null && encoding.equalsIgnoreCase("gzip"))
is = new GZIPInputStream(is);
int length = conn.getContentLength();
if (length >= 0)
fbaos = new FastByteArrayOutputStream(length);
else
fbaos = new FastByteArrayOutputStream();
Utils.copy(is, fbaos);
// Get charset
String charset = getCharsetFromContentType(conn.getContentType());
if (charset == null) {
// We need to detect charset by Ourselves
UniversalDetector ud = new UniversalDetector(null);
byte[] bs = fbaos.getBuffer();
ud.handleData(bs, 0, bs.length);
ud.dataEnd();
charset = ud.getDetectedCharset();
if (charset == null) {
// WTF?
charset = DEFAULT_CHARSET;
}
}
body = fbaos.toString(charset);
} finally {
Utils.closeQuietly(is);
Utils.closeQuietly(fbaos);
}
return body;
}
@Override
public Object onAfterConnect(HttpURLConnection conn)
throws Exception {
return getBody(conn);
}
@Override
public boolean onRetry(Exception previousException) {
// Do not care about exception, just retry
return true;
}
@Override
public void onRequestFailed(Exception exception) {
// Empty
}
}
/**
* RequstHelper for GET method
*/
public static class GetHelper extends GetStringHelper {
public GetHelper(String url) {
super(url);
}
@Override
public void onBeforeConnect(HttpURLConnection conn)
throws Exception {
super.onBeforeConnect(conn);
conn.setRequestMethod("GET");
}
}
/**
* RequstHelper for post form data, use POST method
*/
public static class PostFormHelper extends GetStringHelper {
private final String[][] mArgs;
public PostFormHelper(String url, String[][] args) {
super(url);
mArgs = args;
}
@Override
public void onBeforeConnect(HttpURLConnection conn)
throws Exception {
super.onBeforeConnect(conn);
conn.setDoOutput(true);
conn.setDoInput(true);
conn.setUseCaches(false);
conn.setRequestMethod("POST");
conn.setRequestProperty("Content-Type",
"application/x-www-form-urlencoded");
DataOutputStream out = new DataOutputStream(conn.getOutputStream());
StringBuilder sb = new StringBuilder();
int i = 0;
for (String[] arg : mArgs) {
if (i != 0)
sb.append("&");
sb.append(URLEncoder.encode(arg[0], "UTF-8"));
sb.append("=");
sb.append(URLEncoder.encode(arg[1], "UTF-8"));
i++;
}
out.writeBytes(sb.toString());
out.flush();
out.close();
}
}
/**
* Http GET method
* @param url the url to get
* @return body
*/
public String get(String url) throws Exception {
return (String) requst(new GetHelper(url));
}
/**
* Post form data
* @param url the url to post
* @param args the form to post
* @return body
*/
public String postForm(String url, String[][] args) throws Exception {
return (String) requst(new PostFormHelper(url, args));
}
}
| |
/*
* Copyright 2015 Open Networking Laboratory
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onosproject.net.device;
import org.onosproject.net.DeviceId;
/**
* Default implementation of immutable port statistics.
*/
public final class DefaultPortStatistics implements PortStatistics {
private final DeviceId deviceId;
private final int port;
private final long packetsReceived;
private final long packetsSent;
private final long bytesReceived;
private final long bytesSent;
private final long packetsRxDropped;
private final long packetsTxDropped;
private final long packetsRxErrors;
private final long packetsTxErrors;
private final long durationSec;
private final long durationNano;
private DefaultPortStatistics(DeviceId deviceId,
int port,
long packetsReceived,
long packetsSent,
long bytesReceived,
long bytesSent,
long packetsRxDropped,
long packetsTxDropped,
long packetsRxErrors,
long packetsTxErrors,
long durationSec,
long durationNano) {
this.deviceId = deviceId;
this.port = port;
this.packetsReceived = packetsReceived;
this.packetsSent = packetsSent;
this.bytesReceived = bytesReceived;
this.bytesSent = bytesSent;
this.packetsRxDropped = packetsRxDropped;
this.packetsTxDropped = packetsTxDropped;
this.packetsRxErrors = packetsRxErrors;
this.packetsTxErrors = packetsTxErrors;
this.durationSec = durationSec;
this.durationNano = durationNano;
}
// Constructor for serializer
private DefaultPortStatistics() {
this.deviceId = null;
this.port = 0;
this.packetsReceived = 0;
this.packetsSent = 0;
this.bytesReceived = 0;
this.bytesSent = 0;
this.packetsRxDropped = 0;
this.packetsTxDropped = 0;
this.packetsRxErrors = 0;
this.packetsTxErrors = 0;
this.durationSec = 0;
this.durationNano = 0;
}
/**
* Creates a builder for DefaultPortStatistics object.
*
* @return builder object for DefaultPortStatistics object
*/
public static DefaultPortStatistics.Builder builder() {
return new Builder();
}
@Override
public int port() {
return this.port;
}
@Override
public long packetsReceived() {
return this.packetsReceived;
}
@Override
public long packetsSent() {
return this.packetsSent;
}
@Override
public long bytesReceived() {
return this.bytesReceived;
}
@Override
public long bytesSent() {
return this.bytesSent;
}
@Override
public long packetsRxDropped() {
return this.packetsRxDropped;
}
@Override
public long packetsTxDropped() {
return this.packetsTxDropped;
}
@Override
public long packetsRxErrors() {
return this.packetsRxErrors;
}
@Override
public long packetsTxErrors() {
return this.packetsTxErrors;
}
@Override
public long durationSec() {
return this.durationSec;
}
@Override
public long durationNano() {
return this.durationNano;
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder("device: " + deviceId + ", ");
sb.append("port: " + this.port + ", ");
sb.append("pktRx: " + this.packetsReceived + ", ");
sb.append("pktTx: " + this.packetsSent + ", ");
sb.append("byteRx: " + this.bytesReceived + ", ");
sb.append("byteTx: " + this.bytesSent + ", ");
sb.append("pktRxErr: " + this.packetsRxErrors + ", ");
sb.append("pktTxErr: " + this.packetsTxErrors + ", ");
sb.append("pktRxDrp: " + this.packetsRxDropped + ", ");
sb.append("pktTxDrp: " + this.packetsTxDropped);
return sb.toString();
}
public static final class Builder {
DeviceId deviceId;
int port;
long packetsReceived;
long packetsSent;
long bytesReceived;
long bytesSent;
long packetsRxDropped;
long packetsTxDropped;
long packetsRxErrors;
long packetsTxErrors;
long durationSec;
long durationNano;
private Builder() {
}
/**
* Sets port number.
*
* @param port port number
* @return builder object
*/
public Builder setPort(int port) {
this.port = port;
return this;
}
/**
* Sets the device identifier.
*
* @param deviceId device identifier
* @return builder object
*/
public Builder setDeviceId(DeviceId deviceId) {
this.deviceId = deviceId;
return this;
}
/**
* Sets the number of packet received.
*
* @param packets number of packets received
* @return builder object
*/
public Builder setPacketsReceived(long packets) {
packetsReceived = packets;
return this;
}
/**
* Sets the number of packets sent.
*
* @param packets number of packets sent
* @return builder object
*/
public Builder setPacketsSent(long packets) {
packetsSent = packets;
return this;
}
/**
* Sets the number of received bytes.
*
* @param bytes number of received bytes.
* @return builder object
*/
public Builder setBytesReceived(long bytes) {
bytesReceived = bytes;
return this;
}
/**
* Sets the number of sent bytes.
*
* @param bytes number of sent bytes
* @return builder object
*/
public Builder setBytesSent(long bytes) {
bytesSent = bytes;
return this;
}
/**
* Sets the number of packets dropped by RX.
*
* @param packets number of packets dropped by RX
* @return builder object
*/
public Builder setPacketsRxDropped(long packets) {
packetsRxDropped = packets;
return this;
}
/**
* Sets the number of packets dropped by TX.
*
* @param packets number of packets
* @return builder object
*/
public Builder setPacketsTxDropped(long packets) {
packetsTxDropped = packets;
return this;
}
/**
* Sets the number of receive errors.
*
* @param packets number of receive errors
* @return builder object
*/
public Builder setPacketsRxErrors(long packets) {
packetsRxErrors = packets;
return this;
}
/**
* Sets the number of transmit errors.
*
* @param packets number of transmit errors
* @return builder object
*/
public Builder setPacketsTxErrors(long packets) {
packetsTxErrors = packets;
return this;
}
/**
* Sets the time port has been alive in seconds.
*
* @param sec time port has been alive in seconds
* @return builder object
*/
public Builder setDurationSec(long sec) {
durationSec = sec;
return this;
}
/**
* Sets the time port has been alive in nano seconds.
*
* @param nano time port has been alive in nano seconds
* @return builder object
*/
public Builder setDurationNano(long nano) {
durationNano = nano;
return this;
}
/**
* Creates a PortStatistics object.
*
* @return DefaultPortStatistics object
*/
public DefaultPortStatistics build() {
return new DefaultPortStatistics(
deviceId,
port,
packetsReceived,
packetsSent,
bytesReceived,
bytesSent,
packetsRxDropped,
packetsTxDropped,
packetsRxErrors,
packetsTxErrors,
durationSec,
durationNano);
}
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.action.support.replication;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.ClusterName;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.node.DiscoveryNodes;
import org.elasticsearch.cluster.routing.IndexRoutingTable;
import org.elasticsearch.cluster.routing.IndexShardRoutingTable;
import org.elasticsearch.cluster.routing.RoutingTable;
import org.elasticsearch.cluster.routing.ShardRoutingState;
import org.elasticsearch.cluster.routing.TestShardRouting;
import org.elasticsearch.cluster.routing.UnassignedInfo;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.transport.LocalTransportAddress;
import org.elasticsearch.index.shard.ShardId;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
import java.util.Set;
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_CREATION_DATE;
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS;
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS;
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_VERSION_CREATED;
import static org.elasticsearch.test.ESTestCase.randomFrom;
import static org.elasticsearch.test.ESTestCase.randomInt;
import static org.elasticsearch.test.ESTestCase.randomIntBetween;
/**
* Helper methods for generating cluster states
*/
public class ClusterStateCreationUtils {
/**
* Creates cluster state with and index that has one shard and #(replicaStates) replicas
*
* @param index name of the index
* @param activePrimaryLocal if active primary should coincide with the local node in the cluster state
* @param primaryState state of primary
* @param replicaStates states of the replicas. length of this array determines also the number of replicas
*/
public static ClusterState state(String index, boolean activePrimaryLocal, ShardRoutingState primaryState,
ShardRoutingState... replicaStates) {
final int numberOfReplicas = replicaStates.length;
int numberOfNodes = numberOfReplicas + 1;
if (primaryState == ShardRoutingState.RELOCATING) {
numberOfNodes++;
}
for (ShardRoutingState state : replicaStates) {
if (state == ShardRoutingState.RELOCATING) {
numberOfNodes++;
}
}
numberOfNodes = Math.max(2, numberOfNodes); // we need a non-local master to test shard failures
final ShardId shardId = new ShardId(index, "_na_", 0);
DiscoveryNodes.Builder discoBuilder = DiscoveryNodes.builder();
Set<String> unassignedNodes = new HashSet<>();
for (int i = 0; i < numberOfNodes + 1; i++) {
final DiscoveryNode node = newNode(i);
discoBuilder = discoBuilder.put(node);
unassignedNodes.add(node.getId());
}
discoBuilder.localNodeId(newNode(0).getId());
discoBuilder.masterNodeId(newNode(1).getId()); // we need a non-local master to test shard failures
final int primaryTerm = 1 + randomInt(200);
IndexMetaData indexMetaData = IndexMetaData.builder(index).settings(Settings.builder()
.put(SETTING_VERSION_CREATED, Version.CURRENT)
.put(SETTING_NUMBER_OF_SHARDS, 1).put(SETTING_NUMBER_OF_REPLICAS, numberOfReplicas)
.put(SETTING_CREATION_DATE, System.currentTimeMillis())).primaryTerm(0, primaryTerm).build();
RoutingTable.Builder routing = new RoutingTable.Builder();
routing.addAsNew(indexMetaData);
IndexShardRoutingTable.Builder indexShardRoutingBuilder = new IndexShardRoutingTable.Builder(shardId);
String primaryNode = null;
String relocatingNode = null;
UnassignedInfo unassignedInfo = null;
if (primaryState != ShardRoutingState.UNASSIGNED) {
if (activePrimaryLocal) {
primaryNode = newNode(0).getId();
unassignedNodes.remove(primaryNode);
} else {
Set<String> unassignedNodesExecludingPrimary = new HashSet<>(unassignedNodes);
unassignedNodesExecludingPrimary.remove(newNode(0).getId());
primaryNode = selectAndRemove(unassignedNodesExecludingPrimary);
unassignedNodes.remove(primaryNode);
}
if (primaryState == ShardRoutingState.RELOCATING) {
relocatingNode = selectAndRemove(unassignedNodes);
}
} else {
unassignedInfo = new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, null);
}
indexShardRoutingBuilder.addShard(TestShardRouting.newShardRouting(index, 0, primaryNode, relocatingNode, null, true,
primaryState, unassignedInfo));
for (ShardRoutingState replicaState : replicaStates) {
String replicaNode = null;
relocatingNode = null;
unassignedInfo = null;
if (replicaState != ShardRoutingState.UNASSIGNED) {
assert primaryNode != null : "a replica is assigned but the primary isn't";
replicaNode = selectAndRemove(unassignedNodes);
if (replicaState == ShardRoutingState.RELOCATING) {
relocatingNode = selectAndRemove(unassignedNodes);
}
} else {
unassignedInfo = new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, null);
}
indexShardRoutingBuilder.addShard(
TestShardRouting.newShardRouting(index, shardId.id(), replicaNode, relocatingNode, null, false, replicaState,
unassignedInfo));
}
ClusterState.Builder state = ClusterState.builder(new ClusterName("test"));
state.nodes(discoBuilder);
state.metaData(MetaData.builder().put(indexMetaData, false).generateClusterUuidIfNeeded());
state.routingTable(RoutingTable.builder().add(IndexRoutingTable.builder(indexMetaData.getIndex())
.addIndexShard(indexShardRoutingBuilder.build())).build());
return state.build();
}
/**
* Creates cluster state with several shards and one replica and all shards STARTED.
*/
public static ClusterState stateWithAssignedPrimariesAndOneReplica(String index, int numberOfShards) {
int numberOfNodes = 2; // we need a non-local master to test shard failures
DiscoveryNodes.Builder discoBuilder = DiscoveryNodes.builder();
for (int i = 0; i < numberOfNodes + 1; i++) {
final DiscoveryNode node = newNode(i);
discoBuilder = discoBuilder.put(node);
}
discoBuilder.localNodeId(newNode(0).getId());
discoBuilder.masterNodeId(newNode(1).getId()); // we need a non-local master to test shard failures
IndexMetaData indexMetaData = IndexMetaData.builder(index).settings(Settings.builder()
.put(SETTING_VERSION_CREATED, Version.CURRENT)
.put(SETTING_NUMBER_OF_SHARDS, numberOfShards).put(SETTING_NUMBER_OF_REPLICAS, 1)
.put(SETTING_CREATION_DATE, System.currentTimeMillis())).build();
ClusterState.Builder state = ClusterState.builder(new ClusterName("test"));
state.nodes(discoBuilder);
state.metaData(MetaData.builder().put(indexMetaData, false).generateClusterUuidIfNeeded());
IndexRoutingTable.Builder indexRoutingTableBuilder = IndexRoutingTable.builder(indexMetaData.getIndex());
for (int i = 0; i < numberOfShards; i++) {
RoutingTable.Builder routing = new RoutingTable.Builder();
routing.addAsNew(indexMetaData);
final ShardId shardId = new ShardId(index, "_na_", i);
IndexShardRoutingTable.Builder indexShardRoutingBuilder = new IndexShardRoutingTable.Builder(shardId);
indexShardRoutingBuilder.addShard(TestShardRouting.newShardRouting(index, i, newNode(0).getId(), null, null, true,
ShardRoutingState.STARTED, null));
indexShardRoutingBuilder.addShard(TestShardRouting.newShardRouting(index, i, newNode(1).getId(), null, null, false,
ShardRoutingState.STARTED, null));
indexRoutingTableBuilder.addIndexShard(indexShardRoutingBuilder.build());
}
state.routingTable(RoutingTable.builder().add(indexRoutingTableBuilder.build()).build());
return state.build();
}
/**
* Creates cluster state with and index that has one shard and as many replicas as numberOfReplicas.
* Primary will be STARTED in cluster state but replicas will be one of UNASSIGNED, INITIALIZING, STARTED or RELOCATING.
*
* @param index name of the index
* @param activePrimaryLocal if active primary should coincide with the local node in the cluster state
* @param numberOfReplicas number of replicas
*/
public static ClusterState stateWithActivePrimary(String index, boolean activePrimaryLocal, int numberOfReplicas) {
int assignedReplicas = randomIntBetween(0, numberOfReplicas);
return stateWithActivePrimary(index, activePrimaryLocal, assignedReplicas, numberOfReplicas - assignedReplicas);
}
/**
* Creates cluster state with and index that has one shard and as many replicas as numberOfReplicas.
* Primary will be STARTED in cluster state. Some (unassignedReplicas) will be UNASSIGNED and
* some (assignedReplicas) will be one of INITIALIZING, STARTED or RELOCATING.
*
* @param index name of the index
* @param activePrimaryLocal if active primary should coincide with the local node in the cluster state
* @param assignedReplicas number of replicas that should have INITIALIZING, STARTED or RELOCATING state
* @param unassignedReplicas number of replicas that should be unassigned
*/
public static ClusterState stateWithActivePrimary(String index, boolean activePrimaryLocal,
int assignedReplicas, int unassignedReplicas) {
ShardRoutingState[] replicaStates = new ShardRoutingState[assignedReplicas + unassignedReplicas];
// no point in randomizing - node assignment later on does it too.
for (int i = 0; i < assignedReplicas; i++) {
replicaStates[i] = randomFrom(ShardRoutingState.INITIALIZING, ShardRoutingState.STARTED, ShardRoutingState.RELOCATING);
}
for (int i = assignedReplicas; i < replicaStates.length; i++) {
replicaStates[i] = ShardRoutingState.UNASSIGNED;
}
return state(index, activePrimaryLocal, randomFrom(ShardRoutingState.STARTED, ShardRoutingState.RELOCATING), replicaStates);
}
/**
* Creates a cluster state with no index
*/
public static ClusterState stateWithNoShard() {
DiscoveryNodes.Builder discoBuilder = DiscoveryNodes.builder();
discoBuilder.localNodeId(newNode(0).getId());
discoBuilder.masterNodeId(newNode(1).getId());
ClusterState.Builder state = ClusterState.builder(new ClusterName("test"));
state.nodes(discoBuilder);
state.metaData(MetaData.builder().generateClusterUuidIfNeeded());
state.routingTable(RoutingTable.builder().build());
return state.build();
}
/**
* Creates a cluster state where local node and master node can be specified
*
* @param localNode node in allNodes that is the local node
* @param masterNode node in allNodes that is the master node. Can be null if no master exists
* @param allNodes all nodes in the cluster
* @return cluster state
*/
public static ClusterState state(DiscoveryNode localNode, DiscoveryNode masterNode, DiscoveryNode... allNodes) {
DiscoveryNodes.Builder discoBuilder = DiscoveryNodes.builder();
for (DiscoveryNode node : allNodes) {
discoBuilder.put(node);
}
if (masterNode != null) {
discoBuilder.masterNodeId(masterNode.getId());
}
discoBuilder.localNodeId(localNode.getId());
ClusterState.Builder state = ClusterState.builder(new ClusterName("test"));
state.nodes(discoBuilder);
state.metaData(MetaData.builder().generateClusterUuidIfNeeded());
return state.build();
}
private static DiscoveryNode newNode(int nodeId) {
return new DiscoveryNode("node_" + nodeId, LocalTransportAddress.buildUnique(), Collections.emptyMap(),
new HashSet<>(Arrays.asList(DiscoveryNode.Role.values())), Version.CURRENT);
}
private static String selectAndRemove(Set<String> strings) {
String selection = randomFrom(strings.toArray(new String[strings.size()]));
strings.remove(selection);
return selection;
}
}
| |
/*===========================================================================
* Licensed Materials - Property of IBM
* "Restricted Materials of IBM"
*
* IBM SDK, Java(tm) Technology Edition, v8
* (C) Copyright IBM Corp. 1998, 2013. All Rights Reserved
*
* US Government Users Restricted Rights - Use, duplication or disclosure
* restricted by GSA ADP Schedule Contract with IBM Corp.
*===========================================================================
*/
/*
* Copyright (c) 1998, 2013, Oracle and/or its affiliates. All rights reserved.
* ORACLE PROPRIETARY/CONFIDENTIAL. Use is subject to license terms.
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*/
package javax.swing.colorchooser;
import javax.swing.*;
import javax.swing.border.*;
import javax.swing.event.*;
import javax.swing.text.*;
import java.awt.*;
import java.awt.image.*;
import java.awt.event.*;
import java.beans.PropertyChangeEvent;
import java.beans.PropertyChangeListener;
import java.io.Serializable;
import sun.swing.SwingUtilities2;
/**
* The standard preview panel for the color chooser.
* <p>
* <strong>Warning:</strong>
* Serialized objects of this class will not be compatible with
* future Swing releases. The current serialization support is
* appropriate for short term storage or RMI between applications running
* the same version of Swing. As of 1.4, support for long term storage
* of all JavaBeans™
* has been added to the <code>java.beans</code> package.
* Please see {@link java.beans.XMLEncoder}.
*
* @author Steve Wilson
* @see JColorChooser
*/
class DefaultPreviewPanel extends JPanel {
private int squareSize = 25;
private int squareGap = 5;
private int innerGap = 5;
private int textGap = 5;
private Font font = new Font(Font.DIALOG, Font.PLAIN, 12);
private String sampleText;
private int swatchWidth = 50;
private Color oldColor = null;
private JColorChooser getColorChooser() {
return (JColorChooser)SwingUtilities.getAncestorOfClass(
JColorChooser.class, this);
}
public Dimension getPreferredSize() {
JComponent host = getColorChooser();
if (host == null) {
host = this;
}
FontMetrics fm = host.getFontMetrics(getFont());
int ascent = fm.getAscent();
int height = fm.getHeight();
int width = SwingUtilities2.stringWidth(host, fm, getSampleText());
int y = height*3 + textGap*3;
int x = squareSize * 3 + squareGap*2 + swatchWidth + width + textGap*3;
return new Dimension( x,y );
}
public void paintComponent(Graphics g) {
if (oldColor == null)
oldColor = getForeground();
g.setColor(getBackground());
g.fillRect(0,0,getWidth(),getHeight());
if (this.getComponentOrientation().isLeftToRight()) {
int squareWidth = paintSquares(g, 0);
int textWidth = paintText(g, squareWidth);
paintSwatch(g, squareWidth + textWidth);
} else {
int swatchWidth = paintSwatch(g, 0);
int textWidth = paintText(g, swatchWidth);
paintSquares(g , swatchWidth + textWidth);
}
}
private int paintSwatch(Graphics g, int offsetX) {
int swatchX = offsetX;
g.setColor(oldColor);
g.fillRect(swatchX, 0, swatchWidth, (squareSize) + (squareGap/2));
g.setColor(getForeground());
g.fillRect(swatchX, (squareSize) + (squareGap/2), swatchWidth, (squareSize) + (squareGap/2) );
return (swatchX+swatchWidth);
}
private int paintText(Graphics g, int offsetX) {
g.setFont(getFont());
JComponent host = getColorChooser();
if (host == null) {
host = this;
}
FontMetrics fm = SwingUtilities2.getFontMetrics(host, g);
int ascent = fm.getAscent();
int height = fm.getHeight();
int width = SwingUtilities2.stringWidth(host, fm, getSampleText());
int textXOffset = offsetX + textGap;
Color color = getForeground();
g.setColor(color);
SwingUtilities2.drawString(host, g, getSampleText(),textXOffset+(textGap/2),
ascent+2);
g.fillRect(textXOffset,
( height) + textGap,
width + (textGap),
height +2);
g.setColor(Color.black);
SwingUtilities2.drawString(host, g, getSampleText(),
textXOffset+(textGap/2),
height+ascent+textGap+2);
g.setColor(Color.white);
g.fillRect(textXOffset,
( height + textGap) * 2,
width + (textGap),
height +2);
g.setColor(color);
SwingUtilities2.drawString(host, g, getSampleText(),
textXOffset+(textGap/2),
((height+textGap) * 2)+ascent+2);
return width + textGap*3;
}
private int paintSquares(Graphics g, int offsetX) {
int squareXOffset = offsetX;
Color color = getForeground();
g.setColor(Color.white);
g.fillRect(squareXOffset,0,squareSize,squareSize);
g.setColor(color);
g.fillRect(squareXOffset+innerGap,
innerGap,
squareSize - (innerGap*2),
squareSize - (innerGap*2));
g.setColor(Color.white);
g.fillRect(squareXOffset+innerGap*2,
innerGap*2,
squareSize - (innerGap*4),
squareSize - (innerGap*4));
g.setColor(color);
g.fillRect(squareXOffset,squareSize+squareGap,squareSize,squareSize);
g.translate(squareSize+squareGap, 0);
g.setColor(Color.black);
g.fillRect(squareXOffset,0,squareSize,squareSize);
g.setColor(color);
g.fillRect(squareXOffset+innerGap,
innerGap,
squareSize - (innerGap*2),
squareSize - (innerGap*2));
g.setColor(Color.white);
g.fillRect(squareXOffset+innerGap*2,
innerGap*2,
squareSize - (innerGap*4),
squareSize - (innerGap*4));
g.translate(-(squareSize+squareGap), 0);
g.translate(squareSize+squareGap, squareSize+squareGap);
g.setColor(Color.white);
g.fillRect(squareXOffset,0,squareSize,squareSize);
g.setColor(color);
g.fillRect(squareXOffset+innerGap,
innerGap,
squareSize - (innerGap*2),
squareSize - (innerGap*2));
g.translate(-(squareSize+squareGap), -(squareSize+squareGap));
g.translate((squareSize+squareGap)*2, 0);
g.setColor(Color.white);
g.fillRect(squareXOffset,0,squareSize,squareSize);
g.setColor(color);
g.fillRect(squareXOffset+innerGap,
innerGap,
squareSize - (innerGap*2),
squareSize - (innerGap*2));
g.setColor(Color.black);
g.fillRect(squareXOffset+innerGap*2,
innerGap*2,
squareSize - (innerGap*4),
squareSize - (innerGap*4));
g.translate(-((squareSize+squareGap)*2), 0);
g.translate((squareSize+squareGap)*2, (squareSize+squareGap));
g.setColor(Color.black);
g.fillRect(squareXOffset,0,squareSize,squareSize);
g.setColor(color);
g.fillRect(squareXOffset+innerGap,
innerGap,
squareSize - (innerGap*2),
squareSize - (innerGap*2));
g.translate(-((squareSize+squareGap)*2), -(squareSize+squareGap));
return (squareSize*3+squareGap*2);
}
private String getSampleText() {
if (this.sampleText == null) {
this.sampleText = UIManager.getString("ColorChooser.sampleText", getLocale());
}
return this.sampleText;
}
}
| |
//
// This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, vJAXB 2.1.10 in JDK 6
// See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a>
// Any modifications to this file will be lost upon recompilation of the source schema.
// Generated on: 2011.09.09 at 01:22:27 PM CEST
//
package test;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlAnyAttribute;
import javax.xml.bind.annotation.XmlAttribute;
import javax.xml.bind.annotation.XmlID;
import javax.xml.bind.annotation.XmlIDREF;
import javax.xml.bind.annotation.XmlSchemaType;
import javax.xml.bind.annotation.XmlType;
import javax.xml.bind.annotation.adapters.CollapsedStringAdapter;
import javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter;
import javax.xml.namespace.QName;
/**
* <p>Java class for mode.type complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType name="mode.type">
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <attGroup ref="{http://www.w3.org/1998/Math/MathML}mode.attlist"/>
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "mode.type", namespace = "http://www.w3.org/1998/Math/MathML")
public class ModeType {
@XmlAttribute(name = "class")
@XmlSchemaType(name = "NMTOKENS")
protected List<String> clazz;
@XmlAttribute
protected String style;
@XmlAttribute
@XmlIDREF
@XmlSchemaType(name = "IDREF")
protected Object xref;
@XmlAttribute
@XmlJavaTypeAdapter(CollapsedStringAdapter.class)
@XmlID
@XmlSchemaType(name = "ID")
protected String id;
@XmlAttribute(namespace = "http://www.w3.org/1999/xlink")
@XmlSchemaType(name = "anySimpleType")
protected String href;
@XmlAttribute
protected String encoding;
@XmlAttribute
@XmlSchemaType(name = "anyURI")
protected String definitionURL;
@XmlAnyAttribute
private Map<QName, String> otherAttributes = new HashMap<QName, String>();
/**
* Gets the value of the clazz property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the clazz property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getClazz().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link String }
*
*
*/
public List<String> getClazz() {
if (clazz == null) {
clazz = new ArrayList<String>();
}
return this.clazz;
}
/**
* Gets the value of the style property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getStyle() {
return style;
}
/**
* Sets the value of the style property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setStyle(String value) {
this.style = value;
}
/**
* Gets the value of the xref property.
*
* @return
* possible object is
* {@link Object }
*
*/
public Object getXref() {
return xref;
}
/**
* Sets the value of the xref property.
*
* @param value
* allowed object is
* {@link Object }
*
*/
public void setXref(Object value) {
this.xref = value;
}
/**
* Gets the value of the id property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getId() {
return id;
}
/**
* Sets the value of the id property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setId(String value) {
this.id = value;
}
/**
* Gets the value of the href property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getHref() {
return href;
}
/**
* Sets the value of the href property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setHref(String value) {
this.href = value;
}
/**
* Gets the value of the encoding property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getEncoding() {
return encoding;
}
/**
* Sets the value of the encoding property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setEncoding(String value) {
this.encoding = value;
}
/**
* Gets the value of the definitionURL property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getDefinitionURL() {
return definitionURL;
}
/**
* Sets the value of the definitionURL property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setDefinitionURL(String value) {
this.definitionURL = value;
}
/**
* Gets a map that contains attributes that aren't bound to any typed property on this class.
*
* <p>
* the map is keyed by the name of the attribute and
* the value is the string value of the attribute.
*
* the map returned by this method is live, and you can add new attribute
* by updating the map directly. Because of this design, there's no setter.
*
*
* @return
* always non-null
*/
public Map<QName, String> getOtherAttributes() {
return otherAttributes;
}
}
| |
/*! ******************************************************************************
*
* Pentaho Data Integration
*
* Copyright (C) 2002-2018 by Hitachi Vantara : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.di.ui.trans.step;
import org.apache.commons.vfs2.FileObject;
import org.eclipse.swt.SWT;
import org.eclipse.swt.custom.CTabFolder;
import org.eclipse.swt.custom.CTabItem;
import org.eclipse.swt.events.ModifyListener;
import org.eclipse.swt.events.SelectionAdapter;
import org.eclipse.swt.events.SelectionEvent;
import org.eclipse.swt.events.ShellAdapter;
import org.eclipse.swt.events.ShellEvent;
import org.eclipse.swt.graphics.Image;
import org.eclipse.swt.layout.FormAttachment;
import org.eclipse.swt.layout.FormData;
import org.eclipse.swt.layout.FormLayout;
import org.eclipse.swt.widgets.Button;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Display;
import org.eclipse.swt.widgets.Label;
import org.eclipse.swt.widgets.Shell;
import org.eclipse.swt.widgets.Text;
import org.pentaho.di.core.Const;
import org.pentaho.di.core.ObjectLocationSpecificationMethod;
import org.pentaho.di.core.Props;
import org.pentaho.di.core.exception.KettleException;
import org.pentaho.di.core.exception.KettleFileException;
import org.pentaho.di.core.extension.ExtensionPointHandler;
import org.pentaho.di.core.extension.KettleExtensionPoint;
import org.pentaho.di.core.gui.Point;
import org.pentaho.di.core.plugins.PluginInterface;
import org.pentaho.di.core.plugins.PluginRegistry;
import org.pentaho.di.core.plugins.StepPluginType;
import org.pentaho.di.core.util.Utils;
import org.pentaho.di.core.variables.Variables;
import org.pentaho.di.core.vfs.KettleVFS;
import org.pentaho.di.i18n.BaseMessages;
import org.pentaho.di.repository.ObjectId;
import org.pentaho.di.repository.RepositoryDirectoryInterface;
import org.pentaho.di.repository.RepositoryObject;
import org.pentaho.di.repository.RepositoryObjectType;
import org.pentaho.di.shared.SharedObjects;
import org.pentaho.di.trans.StepWithMappingMeta;
import org.pentaho.di.trans.TransMeta;
import org.pentaho.di.trans.step.BaseStepMeta;
import org.pentaho.di.trans.step.StepDialogInterface;
import org.pentaho.di.trans.step.StepMeta;
import org.pentaho.di.trans.steps.recordsfromstream.RecordsFromStreamMeta;
import org.pentaho.di.trans.steps.transexecutor.TransExecutorMeta;
import org.pentaho.di.trans.streaming.common.BaseStreamStepMeta;
import org.pentaho.di.ui.core.ConstUI;
import org.pentaho.di.ui.core.dialog.ErrorDialog;
import org.pentaho.di.ui.core.gui.GUIResource;
import org.pentaho.di.ui.core.widget.ComboVar;
import org.pentaho.di.ui.core.widget.TextVar;
import org.pentaho.di.ui.repository.dialog.SelectObjectDialog;
import org.pentaho.di.ui.spoon.MainSpoonPerspective;
import org.pentaho.di.ui.spoon.Spoon;
import org.pentaho.di.ui.spoon.dialog.NewSubtransDialog;
import org.pentaho.di.ui.util.DialogUtils;
import org.pentaho.vfs.ui.VfsFileChooserDialog;
import org.pentaho.xul.swt.tab.TabItem;
import org.pentaho.xul.swt.tab.TabSet;
import java.io.IOException;
import java.util.Arrays;
import java.util.Optional;
@SuppressWarnings( { "FieldCanBeLocal", "unused", "WeakerAccess" } )
public abstract class BaseStreamingDialog extends BaseStepDialog implements StepDialogInterface {
public static final int INPUT_WIDTH = 350;
private static Class<?> PKG = BaseStreamingDialog.class;
// for i18n purposes, needed by Translator2!! $NON-NLS-1$
protected BaseStreamStepMeta meta;
protected TransMeta executorTransMeta = null;
private Spoon spoonInstance;
protected Label wlTransPath;
protected TextVar wTransPath;
protected Button wbBrowseTrans;
protected Button wbCreateSubtrans;
protected Label wlSubStep;
protected ComboVar wSubStep;
protected ObjectId referenceObjectId;
protected ObjectLocationSpecificationMethod specificationMethod;
protected ModifyListener lsMod;
protected Label wlBatchSize;
protected TextVar wBatchSize;
protected Label wlBatchDuration;
protected TextVar wBatchDuration;
protected CTabFolder wTabFolder;
protected CTabItem wSetupTab;
protected CTabItem wBatchTab;
protected CTabItem wResultsTab;
protected Composite wSetupComp;
protected Composite wBatchComp;
protected Composite wResultsComp;
public BaseStreamingDialog( Shell parent, Object in, TransMeta tr, String sname ) {
super( parent, (BaseStepMeta) in, tr, sname );
meta = (BaseStreamStepMeta) in;
spoonInstance = Spoon.getInstance();
}
public String open() {
Shell parent = getParent();
Display display = parent.getDisplay();
shell = new Shell( parent, SWT.DIALOG_TRIM | SWT.MIN | SWT.MAX | SWT.RESIZE );
props.setLook( shell );
setShellImage( shell, meta );
shell.setMinimumSize( 527, 622 );
lsMod = e -> meta.setChanged();
changed = meta.hasChanged();
FormLayout formLayout = new FormLayout();
formLayout.marginWidth = 15;
formLayout.marginHeight = 15;
shell.setLayout( formLayout );
shell.setText( getDialogTitle() );
Label wicon = new Label( shell, SWT.RIGHT );
wicon.setImage( getImage() );
FormData fdlicon = new FormData();
fdlicon.top = new FormAttachment( 0, 0 );
fdlicon.right = new FormAttachment( 100, 0 );
wicon.setLayoutData( fdlicon );
props.setLook( wicon );
wlStepname = new Label( shell, SWT.RIGHT );
wlStepname.setText( BaseMessages.getString( PKG, "BaseStreamingDialog.Stepname.Label" ) );
props.setLook( wlStepname );
fdlStepname = new FormData();
fdlStepname.left = new FormAttachment( 0, 0 );
fdlStepname.top = new FormAttachment( 0, 0 );
wlStepname.setLayoutData( fdlStepname );
wStepname = new Text( shell, SWT.SINGLE | SWT.LEFT | SWT.BORDER );
wStepname.setText( stepname );
props.setLook( wStepname );
wStepname.addModifyListener( lsMod );
fdStepname = new FormData();
fdStepname.width = 250;
fdStepname.left = new FormAttachment( 0, 0 );
fdStepname.top = new FormAttachment( wlStepname, 5 );
wStepname.setLayoutData( fdStepname );
Label spacer = new Label( shell, SWT.HORIZONTAL | SWT.SEPARATOR );
props.setLook( spacer );
FormData fdSpacer = new FormData();
fdSpacer.height = 2;
fdSpacer.left = new FormAttachment( 0, 0 );
fdSpacer.top = new FormAttachment( wStepname, 15 );
fdSpacer.right = new FormAttachment( 100, 0 );
fdSpacer.width = 497;
spacer.setLayoutData( fdSpacer );
wlTransPath = new Label( shell, SWT.LEFT );
props.setLook( wlTransPath );
wlTransPath.setText( BaseMessages.getString( PKG, "BaseStreamingDialog.Transformation" ) );
FormData fdlTransPath = new FormData();
fdlTransPath.left = new FormAttachment( 0, 0 );
fdlTransPath.top = new FormAttachment( spacer, 15 );
fdlTransPath.right = new FormAttachment( 50, 0 );
wlTransPath.setLayoutData( fdlTransPath );
wTransPath = new TextVar( transMeta, shell, SWT.SINGLE | SWT.LEFT | SWT.BORDER );
props.setLook( wTransPath );
wTransPath.addModifyListener( lsMod );
FormData fdTransPath = new FormData();
fdTransPath.left = new FormAttachment( 0, 0 );
fdTransPath.top = new FormAttachment( wlTransPath, 5 );
fdTransPath.width = 275;
wTransPath.setLayoutData( fdTransPath );
wbBrowseTrans = new Button( shell, SWT.PUSH );
props.setLook( wbBrowseTrans );
wbBrowseTrans.setText( BaseMessages.getString( PKG, "BaseStreaming.Dialog.Transformation.Browse" ) );
FormData fdBrowseTrans = new FormData();
fdBrowseTrans.left = new FormAttachment( wTransPath, 5 );
fdBrowseTrans.top = new FormAttachment( wlTransPath, 5 );
wbBrowseTrans.setLayoutData( fdBrowseTrans );
wbBrowseTrans.addSelectionListener( new SelectionAdapter() {
public void widgetSelected( SelectionEvent e ) {
if ( repository != null ) {
selectRepositoryTrans();
} else {
Optional<String> fileName = selectFile( BaseStreamingDialog.this.wTransPath, Const.STRING_TRANS_FILTER_EXT );
fileName.ifPresent( fn -> {
try {
loadFileTrans( fn );
} catch ( KettleException ex ) {
ex.printStackTrace();
}
} );
}
}
} );
wbCreateSubtrans = new Button( shell, SWT.PUSH );
props.setLook( wbCreateSubtrans );
wbCreateSubtrans.setText( BaseMessages.getString( PKG, "BaseStreaming.Dialog.Transformation.CreateSubtrans" ) );
FormData fdCreateSubtrans = new FormData();
fdCreateSubtrans.left = new FormAttachment( wbBrowseTrans, 5 );
fdCreateSubtrans.top = new FormAttachment( wbBrowseTrans, 0, SWT.TOP );
wbCreateSubtrans.setLayoutData( fdCreateSubtrans );
wbCreateSubtrans.addSelectionListener( new SelectionAdapter() {
public void widgetSelected( SelectionEvent e ) {
createNewSubtrans();
}
} );
// Start of tabbed display
wTabFolder = new CTabFolder( shell, SWT.BORDER );
props.setLook( wTabFolder, Props.WIDGET_STYLE_TAB );
wTabFolder.setSimple( false );
wTabFolder.setUnselectedCloseVisible( true );
wCancel = new Button( shell, SWT.PUSH );
wCancel.setText( BaseMessages.getString( PKG, "System.Button.Cancel" ) );
FormData fdCancel = new FormData();
fdCancel.right = new FormAttachment( 100, 0 );
fdCancel.bottom = new FormAttachment( 100, 0 );
wCancel.setLayoutData( fdCancel );
wOK = new Button( shell, SWT.PUSH );
wOK.setText( BaseMessages.getString( PKG, "System.Button.OK" ) );
FormData fdOk = new FormData();
fdOk.right = new FormAttachment( wCancel, -5 );
fdOk.bottom = new FormAttachment( 100, 0 );
wOK.setLayoutData( fdOk );
Label hSpacer = new Label( shell, SWT.HORIZONTAL | SWT.SEPARATOR );
props.setLook( hSpacer );
FormData fdhSpacer = new FormData();
fdhSpacer.height = 2;
fdhSpacer.left = new FormAttachment( 0, 0 );
fdhSpacer.bottom = new FormAttachment( wCancel, -15 );
fdhSpacer.right = new FormAttachment( 100, 0 );
hSpacer.setLayoutData( fdhSpacer );
FormData fdTabFolder = new FormData();
fdTabFolder.left = new FormAttachment( 0, 0 );
fdTabFolder.top = new FormAttachment( wTransPath, 15 );
fdTabFolder.bottom = new FormAttachment( hSpacer, -15 );
fdTabFolder.right = new FormAttachment( 100, 0 );
wTabFolder.setLayoutData( fdTabFolder );
buildSetupTab();
buildBatchTab();
buildResultsTab();
createAdditionalTabs();
lsCancel = e -> cancel();
lsOK = e -> ok();
wOK.addListener( SWT.Selection, lsOK );
wCancel.addListener( SWT.Selection, lsCancel );
lsDef = new SelectionAdapter() {
public void widgetDefaultSelected( SelectionEvent e ) {
ok();
}
};
wStepname.addSelectionListener( lsDef );
shell.addShellListener( new ShellAdapter() {
public void shellClosed( ShellEvent e ) {
cancel();
}
} );
getData();
setSize();
wTabFolder.setSelection( 0 );
wStepname.selectAll();
wStepname.setFocus();
shell.open();
while ( !shell.isDisposed() ) {
if ( !display.readAndDispatch() ) {
display.sleep();
}
}
return stepname;
}
protected abstract String getDialogTitle();
private void buildSetupTab() {
wSetupTab = new CTabItem( wTabFolder, SWT.NONE );
wSetupTab.setText( BaseMessages.getString( PKG, "BaseStreamingDialog.SetupTab" ) );
wSetupComp = new Composite( wTabFolder, SWT.NONE );
props.setLook( wSetupComp );
FormLayout setupLayout = new FormLayout();
setupLayout.marginHeight = 15;
setupLayout.marginWidth = 15;
wSetupComp.setLayout( setupLayout );
buildSetup( wSetupComp );
FormData fdSetupComp = new FormData();
fdSetupComp.left = new FormAttachment( 0, 0 );
fdSetupComp.top = new FormAttachment( 0, 0 );
fdSetupComp.right = new FormAttachment( 100, 0 );
fdSetupComp.bottom = new FormAttachment( 100, 0 );
wSetupComp.setLayoutData( fdSetupComp );
wSetupComp.layout();
wSetupTab.setControl( wSetupComp );
}
protected abstract void buildSetup( Composite wSetupComp );
protected void createAdditionalTabs() {
}
protected void createNewSubtrans() {
TransMeta newSubTransMeta = createSubTransMeta();
boolean saved = false;
String path = null;
if ( spoonInstance.getRepository() != null ) {
try {
saved = spoonInstance.saveToRepository( newSubTransMeta );
path = getRepositoryRelativePath( newSubTransMeta.getPathAndName() );
} catch ( KettleException e ) {
new ErrorDialog( shell, BaseMessages.getString( PKG, "BaseStreamingDialog.File.Save.Fail.Title" ), BaseMessages.getString(
PKG, "BaseStreamingDialog.File.Save.Fail.Message" ), e );
}
} else {
saved = spoonInstance.saveXMLFile( newSubTransMeta, false );
if ( saved ) {
try {
path = getRelativePath( KettleVFS.getFileObject( newSubTransMeta.getFilename() ).toString() );
} catch ( KettleFileException e ) {
new ErrorDialog( shell, BaseMessages.getString( PKG, "BaseStreamingDialog.File.Save.Fail.Title" ),
BaseMessages.getString(
PKG, "BaseStreamingDialog.File.Save.Fail.Message" ), e );
}
}
}
if ( saved && null != path ) {
wTransPath.setText( path );
createSubtrans( newSubTransMeta );
if ( props.showNewSubtransPopup() ) {
NewSubtransDialog newSubtransDialog = new NewSubtransDialog( shell, SWT.NONE );
props.setShowNewSubtransPopup( !newSubtransDialog.open() );
}
}
}
protected TransMeta createSubTransMeta() {
RecordsFromStreamMeta rm = new RecordsFromStreamMeta();
String[] fieldNames = getFieldNames();
int[] empty = new int[ fieldNames.length ];
Arrays.fill( empty, -1 );
rm.setFieldname( fieldNames );
rm.setType( getFieldTypes() );
rm.setLength( empty );
rm.setPrecision( empty );
StepMeta recsFromStream = new StepMeta( "RecordsFromStream", "Get records from stream", rm );
recsFromStream.setLocation( new Point( 100, 100 ) );
recsFromStream.setDraw( true );
TransMeta transMeta = new TransMeta();
transMeta.addStep( recsFromStream );
transMeta.setFilename( "" );
return transMeta;
}
protected abstract int[] getFieldTypes();
protected abstract String[] getFieldNames();
private void createSubtrans( TransMeta newTransMeta ) {
TabItem tabItem = spoonInstance.getTabSet().getSelected(); // remember current tab
newTransMeta.setMetaStore( spoonInstance.getMetaStore() );
try {
SharedObjects sharedObjects = newTransMeta.readSharedObjects();
newTransMeta.setSharedObjects( sharedObjects );
newTransMeta.importFromMetaStore();
newTransMeta.clearChanged();
} catch ( Exception e ) {
log.logError( "Failed to retrieve shared objects", e );
}
spoonInstance.delegates.tabs.makeTabName( newTransMeta, false );
spoonInstance.addTransGraph( newTransMeta );
spoonInstance.applyVariables();
if ( spoonInstance.setDesignMode() ) {
// No refresh done yet, do so
spoonInstance.refreshTree();
}
spoonInstance.loadPerspective( MainSpoonPerspective.ID );
try {
ExtensionPointHandler.callExtensionPoint( log, KettleExtensionPoint.TransformationCreateNew.id, newTransMeta );
} catch ( KettleException e ) {
log.logError( "Failed to call extension point", e );
}
// go back to inital tab
TabSet ts = spoonInstance.getTabSet();
ts.setSelected( tabItem );
}
private void buildBatchTab() {
wBatchTab = new CTabItem( wTabFolder, SWT.NONE );
wBatchTab.setText( BaseMessages.getString( PKG, "BaseStreamingDialog.BatchTab" ) );
wBatchComp = new Composite( wTabFolder, SWT.NONE );
props.setLook( wBatchComp );
FormLayout batchLayout = new FormLayout();
batchLayout.marginHeight = 15;
batchLayout.marginWidth = 15;
wBatchComp.setLayout( batchLayout );
FormData fdBatchComp = new FormData();
fdBatchComp.left = new FormAttachment( 0, 0 );
fdBatchComp.top = new FormAttachment( 0, 0 );
fdBatchComp.right = new FormAttachment( 100, 0 );
fdBatchComp.bottom = new FormAttachment( 100, 0 );
wBatchComp.setLayoutData( fdBatchComp );
wlBatchDuration = new Label( wBatchComp, SWT.LEFT );
props.setLook( wlBatchDuration );
wlBatchDuration.setText( BaseMessages.getString( PKG, "BaseStreamingDialog.BatchDuration" ) );
FormData fdlBatchDuration = new FormData();
fdlBatchDuration.left = new FormAttachment( 0, 0 );
fdlBatchDuration.top = new FormAttachment( 0, 0 );
fdlBatchDuration.right = new FormAttachment( 50, 0 );
wlBatchDuration.setLayoutData( fdlBatchDuration );
wBatchDuration = new TextVar( transMeta, wBatchComp, SWT.SINGLE | SWT.LEFT | SWT.BORDER );
props.setLook( wBatchDuration );
wBatchDuration.addModifyListener( lsMod );
FormData fdBatchDuration = new FormData();
fdBatchDuration.left = new FormAttachment( 0, 0 );
fdBatchDuration.top = new FormAttachment( wlBatchDuration, 5 );
fdBatchDuration.width = 75;
wBatchDuration.setLayoutData( fdBatchDuration );
wlBatchSize = new Label( wBatchComp, SWT.LEFT );
props.setLook( wlBatchSize );
wlBatchSize.setText( BaseMessages.getString( PKG, "BaseStreamingDialog.BatchSize" ) );
FormData fdlBatchSize = new FormData();
fdlBatchSize.left = new FormAttachment( 0, 0 );
fdlBatchSize.top = new FormAttachment( wBatchDuration, 10 );
fdlBatchSize.right = new FormAttachment( 50, 0 );
wlBatchSize.setLayoutData( fdlBatchSize );
wBatchSize = new TextVar( transMeta, wBatchComp, SWT.SINGLE | SWT.LEFT | SWT.BORDER );
props.setLook( wBatchSize );
wBatchSize.addModifyListener( lsMod );
FormData fdBatchSize = new FormData();
fdBatchSize.left = new FormAttachment( 0, 0 );
fdBatchSize.top = new FormAttachment( wlBatchSize, 5 );
fdBatchSize.width = 75;
wBatchSize.setLayoutData( fdBatchSize );
wBatchComp.layout();
wBatchTab.setControl( wBatchComp );
}
private void buildResultsTab() {
wResultsTab = new CTabItem( wTabFolder, SWT.NONE );
wResultsTab.setText( BaseMessages.getString( PKG, "BaseStreamingDialog.ResultsTab" ) );
wResultsComp = new Composite( wTabFolder, SWT.NONE );
props.setLook( wResultsComp );
FormLayout resultsLayout = new FormLayout();
resultsLayout.marginHeight = 15;
resultsLayout.marginWidth = 15;
wResultsComp.setLayout( resultsLayout );
FormData fdResultsComp = new FormData();
fdResultsComp.left = new FormAttachment( 0, 0 );
fdResultsComp.top = new FormAttachment( 0, 0 );
fdResultsComp.right = new FormAttachment( 100, 0 );
fdResultsComp.bottom = new FormAttachment( 100, 0 );
wResultsComp.setLayoutData( fdResultsComp );
wlSubStep = new Label( wResultsComp, SWT.LEFT );
props.setLook( wlSubStep );
FormData fdlSubTrans = new FormData();
fdlSubTrans.left = new FormAttachment( 0, 0 );
fdlSubTrans.top = new FormAttachment( 0, 0 );
wlSubStep.setLayoutData( fdlSubTrans );
wlSubStep.setText( BaseMessages.getString( PKG, "BaseStreaming.Dialog.Transformation.SubTransStep" ) );
wSubStep = new ComboVar( transMeta, wResultsComp, SWT.SINGLE | SWT.LEFT | SWT.BORDER );
props.setLook( wSubStep );
FormData fdSubStep = new FormData();
fdSubStep.left = new FormAttachment( 0, 0 );
fdSubStep.top = new FormAttachment( wlSubStep, 5 );
fdSubStep.width = 250;
wSubStep.setLayoutData( fdSubStep );
wResultsComp.layout();
wResultsTab.setControl( wResultsComp );
}
protected void getData() {
if ( meta.getTransformationPath() != null ) {
wTransPath.setText( meta.getTransformationPath() );
}
if ( meta.getBatchSize() != null ) {
wBatchSize.setText( meta.getBatchSize() );
}
if ( meta.getBatchDuration() != null ) {
wBatchDuration.setText( meta.getBatchDuration() );
}
populateSubSteps();
specificationMethod = meta.getSpecificationMethod();
}
protected void populateSubSteps() {
try {
TransMeta transMeta = TransExecutorMeta
.loadMappingMeta( (StepWithMappingMeta) stepMeta.getStepMetaInterface(), getRepository(), getMetaStore(),
new Variables() );
transMeta.getSteps().stream().map( StepMeta::getName ).sorted().forEach( wSubStep::add );
if ( meta.getSubStep() != null ) {
wSubStep.setText( meta.getSubStep() );
}
} catch ( KettleException e ) {
logDebug( e.getMessage(), e );
}
}
private Image getImage() {
PluginInterface plugin =
PluginRegistry.getInstance().getPlugin( StepPluginType.class, stepMeta.getStepMetaInterface() );
String id = plugin.getIds()[ 0 ];
if ( id != null ) {
return GUIResource.getInstance().getImagesSteps().get( id ).getAsBitmapForSize( shell.getDisplay(),
ConstUI.ICON_SIZE, ConstUI.ICON_SIZE );
}
return null;
}
private void cancel() {
meta.setChanged( false );
dispose();
}
private void ok() {
stepname = wStepname.getText();
meta.setTransformationPath( wTransPath.getText() );
meta.setBatchSize( wBatchSize.getText() );
meta.setBatchDuration( wBatchDuration.getText() );
meta.setSpecificationMethod( specificationMethod );
meta.setSubStep( wSubStep.getText() );
switch ( specificationMethod ) {
case FILENAME:
meta.setFileName( wTransPath.getText() );
meta.setDirectoryPath( null );
meta.setTransName( null );
meta.setTransObjectId( null );
break;
case REPOSITORY_BY_NAME:
String transPath = wTransPath.getText();
String transName = transPath;
String directory = "";
int index = transPath.lastIndexOf( "/" );
if ( index != -1 ) {
transName = transPath.substring( index + 1 );
directory = transPath.substring( 0, index );
}
meta.setDirectoryPath( directory );
meta.setTransName( transName );
meta.setFileName( null );
meta.setTransObjectId( null );
break;
default:
break;
}
additionalOks( meta );
dispose();
}
protected void additionalOks( BaseStreamStepMeta meta ) {
}
private void selectRepositoryTrans() {
try {
SelectObjectDialog sod = new SelectObjectDialog( shell, repository );
String transName = sod.open();
RepositoryDirectoryInterface repdir = sod.getDirectory();
if ( transName != null && repdir != null ) {
loadRepositoryTrans( transName, repdir );
String path = getRepositoryRelativePath( executorTransMeta.getPathAndName() );
wTransPath.setText( path );
specificationMethod = ObjectLocationSpecificationMethod.REPOSITORY_BY_NAME;
}
} catch ( KettleException ke ) {
new ErrorDialog( shell,
BaseMessages.getString( PKG, "TransExecutorDialog.ErrorSelectingObject.DialogTitle" ),
BaseMessages.getString( PKG, "TransExecutorDialog.ErrorSelectingObject.DialogMessage" ), ke );
}
}
protected String getRepositoryRelativePath( String path ) {
String parentPath = this.transMeta.getRepositoryDirectory().getPath();
if ( path.startsWith( parentPath ) ) {
path = path.replace( parentPath, "${" + Const.INTERNAL_VARIABLE_ENTRY_CURRENT_DIRECTORY + "}" );
}
return path;
}
protected String getRelativePath( String filePath ) {
String parentFolder = null;
try {
parentFolder =
KettleVFS.getFileObject( transMeta.environmentSubstitute( transMeta.getFilename() ) ).getParent().toString();
} catch ( Exception e ) {
// Take no action
}
if ( filePath != null ) {
if ( parentFolder != null && filePath.startsWith( parentFolder ) ) {
filePath = filePath.replace( parentFolder, "${" + Const.INTERNAL_VARIABLE_ENTRY_CURRENT_DIRECTORY + "}" );
}
}
return filePath;
}
private void loadRepositoryTrans( String transName, RepositoryDirectoryInterface repdir ) throws KettleException {
// Read the transformation...
//
executorTransMeta =
repository.loadTransformation( transMeta.environmentSubstitute( transName ), repdir, null, false, null );
executorTransMeta.clearChanged();
}
protected Optional<String> selectFile( TextVar fileWidget, String[] fileFilters ) {
String curFile = transMeta.environmentSubstitute( fileWidget.getText() );
FileObject root = null;
try {
root = KettleVFS.getFileObject( curFile != null ? curFile : Const.getUserHomeDirectory() );
VfsFileChooserDialog vfsFileChooser = Spoon.getInstance().getVfsFileChooserDialog( root.getParent(), root );
FileObject file =
vfsFileChooser.open(
shell, null, fileFilters, Const.getTransformationFilterNames(),
VfsFileChooserDialog.VFS_DIALOG_OPEN_FILE );
if ( file == null ) {
return Optional.empty();
}
String filePath = getRelativePath( file.getName().toString() );
fileWidget.setText( filePath );
return Optional.ofNullable( filePath );
} catch ( IOException | KettleException e ) {
new ErrorDialog( shell,
BaseMessages.getString( PKG, "TransExecutorDialog.ErrorLoadingTransformation.DialogTitle" ),
BaseMessages.getString( PKG, "TransExecutorDialog.ErrorLoadingTransformation.DialogMessage" ), e );
}
return Optional.empty();
}
private void loadFileTrans( String fname ) throws KettleException {
executorTransMeta = new TransMeta( transMeta.environmentSubstitute( fname ), repository );
executorTransMeta.clearChanged();
specificationMethod = ObjectLocationSpecificationMethod.FILENAME;
}
// Method is defined as package-protected in order to be accessible by unit tests
void loadTransformation() throws KettleException {
String filename = wTransPath.getText();
if ( repository != null ) {
specificationMethod = ObjectLocationSpecificationMethod.REPOSITORY_BY_NAME;
} else {
specificationMethod = ObjectLocationSpecificationMethod.FILENAME;
}
switch ( specificationMethod ) {
case FILENAME:
if ( Utils.isEmpty( filename ) ) {
return;
}
if ( !filename.endsWith( ".ktr" ) ) {
filename = filename + ".ktr";
wTransPath.setText( filename );
}
loadFileTrans( filename );
break;
case REPOSITORY_BY_NAME:
if ( Utils.isEmpty( filename ) ) {
return;
}
if ( filename.endsWith( ".ktr" ) ) {
filename = filename.replace( ".ktr", "" );
wTransPath.setText( filename );
}
String transPath = transMeta.environmentSubstitute( filename );
String realTransname = transPath;
String realDirectory = "";
int index = transPath.lastIndexOf( "/" );
if ( index != -1 ) {
realTransname = transPath.substring( index + 1 );
realDirectory = transPath.substring( 0, index );
}
if ( Utils.isEmpty( realDirectory ) || Utils.isEmpty( realTransname ) ) {
throw new KettleException(
BaseMessages.getString( PKG, "TransExecutorDialog.Exception.NoValidMappingDetailsFound" ) );
}
RepositoryDirectoryInterface repdir = repository.findDirectory( realDirectory );
if ( repdir == null ) {
throw new KettleException( BaseMessages.getString(
PKG, "TransExecutorDialog.Exception.UnableToFindRepositoryDirectory" ) );
}
loadRepositoryTrans( realTransname, repdir );
break;
default:
break;
}
}
private void getByReferenceData( ObjectId transObjectId ) {
try {
RepositoryObject transInf = repository.getObjectInformation( transObjectId, RepositoryObjectType.TRANSFORMATION );
String
path =
DialogUtils
.getPath( transMeta.getRepositoryDirectory().getPath(),
transInf.getRepositoryDirectory().getPath() );
String fullPath = Const.NVL( path, "" ) + "/" + Const.NVL( transInf.getName(), "" );
wTransPath.setText( fullPath );
} catch ( KettleException e ) {
new ErrorDialog( shell,
BaseMessages.getString( PKG, "JobEntryTransDialog.Exception.UnableToReferenceObjectId.Title" ),
BaseMessages.getString( PKG, "JobEntryTransDialog.Exception.UnableToReferenceObjectId.Message" ), e );
}
}
}
| |
package model;
// o model conversa com o BD
import java.sql.CallableStatement;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import util.ConnectionFactory;
public class user {
private Connection conn;
private String login;
private boolean status;
private String senha;
private String email;
private int id;
public user() {
}
public user(String login, String senha) {
this.login = login;
this.senha = senha;
}
public String getLogin() {
return login;
}
public void setLogin(String login) {
this.login = login;
}
public boolean isStatus() {
return status;
}
public void setStatus(boolean status) {
this.status = status;
}
public String getSenha() {
return senha;
}
public void setSenha(String senha) {
this.senha = senha;
}
public String getEmail() {
return email;
}
public void setEmail(String email) {
this.email = email;
}
public int getId() {
return id;
}
public void setId(int id) {
this.id = id;
}
public boolean verificaLogin() {
PreparedStatement pst = null;
PreparedStatement pst2 = null;
ResultSet rs = null;
try {
conn = ConnectionFactory.getConnection();
String sql = "select id from users where login=? and pwd=?";
pst = conn.prepareStatement(sql);
pst.setString(1, login);
pst.setString(2, senha);
rs = pst.executeQuery();
if (rs.next()) {
setId(rs.getInt(1));
sql = "update users set status=true where id=?";
pst2 = conn.prepareStatement(sql);
pst2.setInt(1, id);
pst2.executeUpdate();
return true;
}
else
return false;
} catch (Exception e) {
e.printStackTrace();
} finally {
ConnectionFactory.closeConnection(conn, pst, rs);
ConnectionFactory.closeConnection(null, pst2);
}
return false;
}
public boolean verificaLogin(String cara) {
PreparedStatement pst = null;
ResultSet rs = null;
try {
conn = ConnectionFactory.getConnection();
String sql = "select id from users where login=?";
pst = conn.prepareStatement(sql);
pst.setString(1, cara);
rs = pst.executeQuery();
if (rs.next()) {
return true;
} else
return false;
} catch (Exception e) {
e.printStackTrace();
} finally {
ConnectionFactory.closeConnection(conn, pst, rs);
}
return false;
}
public String buscaMsg(int num) {
PreparedStatement pst = null;
ResultSet rs = null;
try {
conn = ConnectionFactory.getConnection();
String sql = "select login from users,msg where msg.uid=users.id and msg.id=?";
pst = conn.prepareStatement(sql);
pst.setInt(1, num);
rs = pst.executeQuery();
if (rs.next()) {
setLogin(rs.getString(1));
return login;
}
} catch (Exception e) {
e.printStackTrace();
} finally {
ConnectionFactory.closeConnection(conn, pst, rs);
}
return null;
}
public void cadastrarUsuario() {
PreparedStatement pst = null;
try {
conn = ConnectionFactory.getConnection();
String sql = "insert into users(login,pwd,email,status) values (?,?,?,?)";
pst = conn.prepareStatement(sql);
pst.setString(1, login);
pst.setString(2, senha);
pst.setString(3, email);
pst.setBoolean(4, status);
pst.executeUpdate();
} catch (Exception e) {
e.printStackTrace();
} finally {
ConnectionFactory.closeConnection(conn, pst);
}
}
public String buscarLogados() {
CallableStatement cs = null;
ResultSet rs = null;
try {
conn = ConnectionFactory.getConnection();
String sql = "{call logados()}";
cs = conn.prepareCall(sql);
rs = cs.executeQuery();
if (rs.next()) {
return rs.getString(1);
}
} catch (Exception e) {
e.printStackTrace();
} finally {
ConnectionFactory.closeConnection(conn, cs, rs);
}
return null;
}
public void logout (){
PreparedStatement pst = null;
try {
conn = ConnectionFactory.getConnection();
String sql = "update users set status=false where id = ?";
pst = conn.prepareStatement(sql);
pst.setInt(1, id);
pst.executeUpdate();
} catch (Exception e) {
e.printStackTrace();
} finally {
ConnectionFactory.closeConnection(conn, pst);
}
}
}
| |
/**************************************************************
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*************************************************************/
package org.openoffice.extensions.util.datamodel;
import junit.framework.*;
import java.beans.PropertyChangeListener;
import java.beans.PropertyEditor;
import java.util.Arrays;
import java.util.Vector;
import javax.swing.Action;
import org.openide.nodes.Node;
import org.openide.nodes.Sheet;
import org.openide.util.NbBundle;
import org.openoffice.extensions.config.ConfigurationSettings;
import org.openoffice.extensions.projecttemplates.component.datamodel.types.node.ComponentInterfaceTypePropertyEditor;
import org.openoffice.extensions.util.LogWriter;
import org.openoffice.extensions.util.datamodel.actions.BaseAction;
import org.openoffice.extensions.util.datamodel.properties.SimpleOpenOfficeOrgProperty;
import org.openoffice.extensions.util.datamodel.properties.UnknownOpenOfficeOrgPropertyException;
/**
*
* @author sg128468
*/
public class ServiceTest extends TestCase {
public ServiceTest(String testName) {
super(testName);
}
protected void setUp() throws Exception {
}
protected void tearDown() throws Exception {
}
public static Test suite() {
TestSuite suite = new TestSuite(ServiceTest.class);
return suite;
}
/**
* Test of addInterface method, of class org.openoffice.extensions.util.datamodel.Service.
*/
public void testAddInterface() {
System.out.println("addInterface");
Service instance = null;
Interface expResult = null;
Interface result = instance.addInterface();
assertEquals(expResult, result);
// TODO review the generated test code and remove the default call to fail.
fail("The test case is a prototype.");
}
/**
* Test of removeInterface method, of class org.openoffice.extensions.util.datamodel.Service.
*/
public void testRemoveInterface() {
System.out.println("removeInterface");
Interface ifc = null;
Service instance = null;
instance.removeInterface(ifc);
// TODO review the generated test code and remove the default call to fail.
fail("The test case is a prototype.");
}
/**
* Test of getType method, of class org.openoffice.extensions.util.datamodel.Service.
*/
public void testGetType() {
System.out.println("getType");
Service instance = null;
int expResult = 0;
int result = instance.getType();
assertEquals(expResult, result);
// TODO review the generated test code and remove the default call to fail.
fail("The test case is a prototype.");
}
/**
* Test of getAllSetObjectNames method, of class org.openoffice.extensions.util.datamodel.Service.
*/
public void testGetAllSetObjectNames() {
System.out.println("getAllSetObjectNames");
Service instance = null;
String[] expResult = null;
String[] result = instance.getAllSetObjectNames();
assertEquals(expResult, result);
// TODO review the generated test code and remove the default call to fail.
fail("The test case is a prototype.");
}
/**
* Test of getSetObject method, of class org.openoffice.extensions.util.datamodel.Service.
*/
public void testGetSetObject() {
System.out.println("getSetObject");
String name = "";
Service instance = null;
Object expResult = null;
Object result = instance.getSetObject(name);
assertEquals(expResult, result);
// TODO review the generated test code and remove the default call to fail.
fail("The test case is a prototype.");
}
/**
* Test of addSetObject method, of class org.openoffice.extensions.util.datamodel.Service.
*/
public void testAddSetObject() {
System.out.println("addSetObject");
String name = "";
Object setObject = null;
Service instance = null;
instance.addSetObject(name, setObject);
// TODO review the generated test code and remove the default call to fail.
fail("The test case is a prototype.");
}
/**
* Test of addLanguage method, of class org.openoffice.extensions.util.datamodel.Service.
*/
public void testAddLanguage() {
System.out.println("addLanguage");
int languageID = 0;
String defaultText = "";
Service instance = null;
instance.addLanguage(languageID, defaultText);
// TODO review the generated test code and remove the default call to fail.
fail("The test case is a prototype.");
}
/**
* Test of removeLanguage method, of class org.openoffice.extensions.util.datamodel.Service.
*/
public void testRemoveLanguage() {
System.out.println("removeLanguage");
int languageID = 0;
Service instance = null;
instance.removeLanguage(languageID);
// TODO review the generated test code and remove the default call to fail.
fail("The test case is a prototype.");
}
/**
* Test of getParent method, of class org.openoffice.extensions.util.datamodel.Service.
*/
public void testGetParent() {
System.out.println("getParent");
Service instance = null;
NbNodeObject expResult = null;
NbNodeObject result = instance.getParent();
assertEquals(expResult, result);
// TODO review the generated test code and remove the default call to fail.
fail("The test case is a prototype.");
}
/**
* Test of getDisplayName method, of class org.openoffice.extensions.util.datamodel.Service.
*/
public void testGetDisplayName() {
System.out.println("getDisplayName");
Service instance = null;
String expResult = "";
String result = instance.getDisplayName();
assertEquals(expResult, result);
// TODO review the generated test code and remove the default call to fail.
fail("The test case is a prototype.");
}
/**
* Test of getAllSubObjects method, of class org.openoffice.extensions.util.datamodel.Service.
*/
public void testGetAllSubObjects() {
System.out.println("getAllSubObjects");
Service instance = null;
NbNodeObject[] expResult = null;
NbNodeObject[] result = instance.getAllSubObjects();
assertEquals(expResult, result);
// TODO review the generated test code and remove the default call to fail.
fail("The test case is a prototype.");
}
/**
* Test of createProperties method, of class org.openoffice.extensions.util.datamodel.Service.
*/
public void testCreateProperties() {
System.out.println("createProperties");
Sheet sheet = null;
PropertyChangeListener listener = null;
Service instance = null;
Node.Property[] expResult = null;
Node.Property[] result = instance.createProperties(sheet, listener);
assertEquals(expResult, result);
// TODO review the generated test code and remove the default call to fail.
fail("The test case is a prototype.");
}
/**
* Test of hasActions method, of class org.openoffice.extensions.util.datamodel.Service.
*/
public void testHasActions() {
System.out.println("hasActions");
int type = 0;
Service instance = null;
boolean expResult = true;
boolean result = instance.hasActions(type);
assertEquals(expResult, result);
// TODO review the generated test code and remove the default call to fail.
fail("The test case is a prototype.");
}
/**
* Test of getActions method, of class org.openoffice.extensions.util.datamodel.Service.
*/
public void testGetActions() {
System.out.println("getActions");
boolean context = true;
Service instance = null;
Action[] expResult = null;
Action[] result = instance.getActions(context);
assertEquals(expResult, result);
// TODO review the generated test code and remove the default call to fail.
fail("The test case is a prototype.");
}
/**
* Test of setActions method, of class org.openoffice.extensions.util.datamodel.Service.
*/
public void testSetActions() {
System.out.println("setActions");
BaseAction actions = null;
Service instance = null;
instance.setActions(actions);
// TODO review the generated test code and remove the default call to fail.
fail("The test case is a prototype.");
}
}
| |
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package com.cloud.network.router;
import java.net.URI;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import javax.inject.Inject;
import javax.naming.ConfigurationException;
import com.cloud.agent.api.Answer;
import com.cloud.agent.api.Command;
import com.cloud.agent.api.Command.OnError;
import com.cloud.agent.api.NetworkUsageCommand;
import com.cloud.agent.api.PlugNicCommand;
import com.cloud.agent.api.SetupGuestNetworkCommand;
import com.cloud.agent.api.routing.AggregationControlCommand;
import com.cloud.agent.api.routing.AggregationControlCommand.Action;
import com.cloud.agent.manager.Commands;
import com.cloud.dc.DataCenter;
import com.cloud.deploy.DeployDestination;
import com.cloud.exception.AgentUnavailableException;
import com.cloud.exception.ConcurrentOperationException;
import com.cloud.exception.InsufficientCapacityException;
import com.cloud.exception.OperationTimedoutException;
import com.cloud.exception.ResourceUnavailableException;
import com.cloud.network.IpAddress;
import com.cloud.network.Network;
import com.cloud.network.Network.Provider;
import com.cloud.network.Network.Service;
import com.cloud.network.Networks.BroadcastDomainType;
import com.cloud.network.Networks.TrafficType;
import com.cloud.network.PublicIpAddress;
import com.cloud.network.RemoteAccessVpn;
import com.cloud.network.Site2SiteVpnConnection;
import com.cloud.network.VirtualRouterProvider;
import com.cloud.network.addr.PublicIp;
import com.cloud.network.dao.IPAddressVO;
import com.cloud.network.dao.RemoteAccessVpnVO;
import com.cloud.network.vpc.NetworkACLItemDao;
import com.cloud.network.vpc.NetworkACLItemVO;
import com.cloud.network.vpc.NetworkACLManager;
import com.cloud.network.vpc.PrivateGateway;
import com.cloud.network.vpc.PrivateIpAddress;
import com.cloud.network.vpc.PrivateIpVO;
import com.cloud.network.vpc.StaticRoute;
import com.cloud.network.vpc.StaticRouteProfile;
import com.cloud.network.vpc.Vpc;
import com.cloud.network.vpc.VpcGateway;
import com.cloud.network.vpc.VpcGatewayVO;
import com.cloud.network.vpc.VpcManager;
import com.cloud.network.vpc.VpcVO;
import com.cloud.network.vpc.dao.PrivateIpDao;
import com.cloud.network.vpc.dao.StaticRouteDao;
import com.cloud.network.vpc.dao.VpcGatewayDao;
import com.cloud.network.vpn.Site2SiteVpnManager;
import com.cloud.user.UserStatisticsVO;
import com.cloud.utils.Pair;
import com.cloud.utils.db.EntityManager;
import com.cloud.utils.exception.CloudRuntimeException;
import com.cloud.utils.fsm.StateMachine2;
import com.cloud.utils.net.NetUtils;
import com.cloud.vm.DomainRouterVO;
import com.cloud.vm.Nic;
import com.cloud.vm.NicProfile;
import com.cloud.vm.NicVO;
import com.cloud.vm.ReservationContext;
import com.cloud.vm.VirtualMachine;
import com.cloud.vm.VirtualMachine.State;
import com.cloud.vm.VirtualMachineProfile;
import com.cloud.vm.VirtualMachineProfile.Param;
import com.cloud.vm.dao.VMInstanceDao;
import com.cloud.agent.api.to.VirtualMachineTO;
import com.cloud.hypervisor.Hypervisor;
import com.cloud.hypervisor.HypervisorGuru;
import com.cloud.hypervisor.HypervisorGuruManager;
import org.apache.log4j.Logger;
import org.springframework.stereotype.Component;
@Component
public class VpcVirtualNetworkApplianceManagerImpl extends VirtualNetworkApplianceManagerImpl implements VpcVirtualNetworkApplianceManager {
private static final Logger s_logger = Logger.getLogger(VpcVirtualNetworkApplianceManagerImpl.class);
@Inject
private NetworkACLManager _networkACLMgr;
@Inject
private VMInstanceDao _vmDao;
@Inject
private StaticRouteDao _staticRouteDao;
@Inject
private VpcManager _vpcMgr;
@Inject
private PrivateIpDao _privateIpDao;
@Inject
private Site2SiteVpnManager _s2sVpnMgr;
@Inject
private VpcGatewayDao _vpcGatewayDao;
@Inject
private NetworkACLItemDao _networkACLItemDao;
@Inject
private EntityManager _entityMgr;
@Inject
protected HypervisorGuruManager _hvGuruMgr;
@Override
public boolean configure(final String name, final Map<String, Object> params) throws ConfigurationException {
_itMgr.registerGuru(VirtualMachine.Type.DomainRouter, this);
return super.configure(name, params);
}
@Override
public boolean addVpcRouterToGuestNetwork(final VirtualRouter router, final Network network, final Map<VirtualMachineProfile.Param, Object> params)
throws ConcurrentOperationException, ResourceUnavailableException, InsufficientCapacityException {
if (network.getTrafficType() != TrafficType.Guest) {
s_logger.warn("Network " + network + " is not of type " + TrafficType.Guest);
return false;
}
// Add router to the Guest network
boolean result = true;
try {
// 1) add nic to the router
_routerDao.addRouterToGuestNetwork(router, network);
final NicProfile guestNic = _itMgr.addVmToNetwork(router, network, null);
// 2) setup guest network
if (guestNic != null) {
result = setupVpcGuestNetwork(network, router, true, guestNic);
} else {
s_logger.warn("Failed to add router " + router + " to guest network " + network);
result = false;
}
// 3) apply networking rules
if (result && params.get(Param.ReProgramGuestNetworks) != null && (Boolean) params.get(Param.ReProgramGuestNetworks) == true) {
sendNetworkRulesToRouter(router.getId(), network.getId());
}
} catch (final Exception ex) {
s_logger.warn("Failed to add router " + router + " to network " + network + " due to ", ex);
result = false;
} finally {
if (!result) {
s_logger.debug("Removing the router " + router + " from network " + network + " as a part of cleanup");
if (removeVpcRouterFromGuestNetwork(router, network)) {
s_logger.debug("Removed the router " + router + " from network " + network + " as a part of cleanup");
} else {
s_logger.warn("Failed to remove the router " + router + " from network " + network + " as a part of cleanup");
}
} else {
s_logger.debug("Succesfully added router " + router + " to guest network " + network);
}
}
return result;
}
@Override
public boolean removeVpcRouterFromGuestNetwork(final VirtualRouter router, final Network network) throws ConcurrentOperationException,
ResourceUnavailableException {
if (network.getTrafficType() != TrafficType.Guest) {
s_logger.warn("Network " + network + " is not of type " + TrafficType.Guest);
return false;
}
boolean result = true;
try {
// Check if router is a part of the Guest network
if (!_networkModel.isVmPartOfNetwork(router.getId(), network.getId())) {
s_logger.debug("Router " + router + " is not a part of the Guest network " + network);
return result;
}
result = setupVpcGuestNetwork(network, router, false, _networkModel.getNicProfile(router, network.getId(), null));
if (!result) {
s_logger.warn("Failed to destroy guest network config " + network + " on router " + router);
return false;
}
result = result && _itMgr.removeVmFromNetwork(router, network, null);
} finally {
if (result) {
_routerDao.removeRouterFromGuestNetwork(router.getId(), network.getId());
}
}
return result;
}
protected boolean setupVpcGuestNetwork(final Network network, final VirtualRouter router, final boolean add, final NicProfile guestNic) throws ConcurrentOperationException,
ResourceUnavailableException {
boolean result = true;
if (router.getState() == State.Running) {
final SetupGuestNetworkCommand setupCmd = _commandSetupHelper.createSetupGuestNetworkCommand((DomainRouterVO) router, add, guestNic);
final Commands cmds = new Commands(Command.OnError.Stop);
cmds.addCommand("setupguestnetwork", setupCmd);
_nwHelper.sendCommandsToRouter(router, cmds);
final Answer setupAnswer = cmds.getAnswer("setupguestnetwork");
final String setup = add ? "set" : "destroy";
if (!(setupAnswer != null && setupAnswer.getResult())) {
s_logger.warn("Unable to " + setup + " guest network on router " + router);
result = false;
}
return result;
} else if (router.getState() == State.Stopped || router.getState() == State.Stopping) {
s_logger.debug("Router " + router.getInstanceName() + " is in " + router.getState() + ", so not sending setup guest network command to the backend");
return true;
} else {
s_logger.warn("Unable to setup guest network on virtual router " + router + " is not in the right state " + router.getState());
throw new ResourceUnavailableException("Unable to setup guest network on the backend," + " virtual router " + router + " is not in the right state", DataCenter.class,
router.getDataCenterId());
}
}
@Override
public boolean finalizeVirtualMachineProfile(final VirtualMachineProfile profile, final DeployDestination dest, final ReservationContext context) {
final DomainRouterVO domainRouterVO = _routerDao.findById(profile.getId());
final Long vpcId = domainRouterVO.getVpcId();
if (vpcId != null) {
if (domainRouterVO.getState() == State.Starting || domainRouterVO.getState() == State.Running) {
String defaultDns1 = null;
String defaultDns2 = null;
// remove public and guest nics as we will plug them later
final Iterator<NicProfile> it = profile.getNics().iterator();
while (it.hasNext()) {
final NicProfile nic = it.next();
if (nic.getTrafficType() == TrafficType.Public || nic.getTrafficType() == TrafficType.Guest) {
// save dns information
if (nic.getTrafficType() == TrafficType.Public) {
defaultDns1 = nic.getIPv4Dns1();
defaultDns2 = nic.getIPv4Dns2();
}
s_logger.debug("Removing nic " + nic + " of type " + nic.getTrafficType() + " from the nics passed on vm start. " + "The nic will be plugged later");
it.remove();
}
}
// add vpc cidr/dns/networkdomain to the boot load args
final StringBuilder buf = profile.getBootArgsBuilder();
final Vpc vpc = _entityMgr.findById(Vpc.class, vpcId);
buf.append(" vpccidr=" + vpc.getCidr() + " domain=" + vpc.getNetworkDomain());
buf.append(" dns1=").append(defaultDns1);
if (defaultDns2 != null) {
buf.append(" dns2=").append(defaultDns2);
}
VpcGatewayVO privateGatewayForVpc = _vpcGatewayDao.getPrivateGatewayForVpc(domainRouterVO.getVpcId());
if (privateGatewayForVpc != null) {
String ip4Address = privateGatewayForVpc.getIp4Address();
buf.append(" privategateway=").append(ip4Address);
s_logger.debug("Set privategateway field in cmd_line.json to " + ip4Address);
} else {
buf.append(" privategateway=None");
}
}
}
return super.finalizeVirtualMachineProfile(profile, dest, context);
}
@Override
public boolean finalizeCommandsOnStart(final Commands cmds, final VirtualMachineProfile profile) {
final DomainRouterVO domainRouterVO = _routerDao.findById(profile.getId());
Map<String, String> details = new HashMap<String, String>();
if(profile.getHypervisorType() == Hypervisor.HypervisorType.VMware){
HypervisorGuru hvGuru = _hvGuruMgr.getGuru(profile.getHypervisorType());
VirtualMachineTO vmTO = hvGuru.implement(profile);
if(vmTO.getDetails() != null){
details = vmTO.getDetails();
}
}
final boolean isVpc = domainRouterVO.getVpcId() != null;
if (!isVpc) {
return super.finalizeCommandsOnStart(cmds, profile);
}
if (domainRouterVO.getState() == State.Starting || domainRouterVO.getState() == State.Running) {
// 1) FORM SSH CHECK COMMAND
final NicProfile controlNic = getControlNic(profile);
if (controlNic == null) {
s_logger.error("Control network doesn't exist for the router " + domainRouterVO);
return false;
}
finalizeSshAndVersionAndNetworkUsageOnStart(cmds, profile, domainRouterVO, controlNic);
// 2) FORM PLUG NIC COMMANDS
final List<Pair<Nic, Network>> guestNics = new ArrayList<Pair<Nic, Network>>();
final List<Pair<Nic, Network>> publicNics = new ArrayList<Pair<Nic, Network>>();
final Map<String, String> vlanMacAddress = new HashMap<String, String>();
final List<? extends Nic> routerNics = _nicDao.listByVmId(profile.getId());
for (final Nic routerNic : routerNics) {
final Network network = _networkModel.getNetwork(routerNic.getNetworkId());
if (network.getTrafficType() == TrafficType.Guest) {
final Pair<Nic, Network> guestNic = new Pair<Nic, Network>(routerNic, network);
guestNics.add(guestNic);
} else if (network.getTrafficType() == TrafficType.Public) {
final Pair<Nic, Network> publicNic = new Pair<Nic, Network>(routerNic, network);
publicNics.add(publicNic);
final String vlanTag = BroadcastDomainType.getValue(routerNic.getBroadcastUri());
vlanMacAddress.put(vlanTag, routerNic.getMacAddress());
}
}
final List<Command> usageCmds = new ArrayList<Command>();
// 3) PREPARE PLUG NIC COMMANDS
try {
// add VPC router to public networks
final List<PublicIp> sourceNat = new ArrayList<PublicIp>(1);
for (final Pair<Nic, Network> nicNtwk : publicNics) {
final Nic publicNic = nicNtwk.first();
final Network publicNtwk = nicNtwk.second();
final IPAddressVO userIp = _ipAddressDao.findByIpAndSourceNetworkId(publicNtwk.getId(), publicNic.getIPv4Address());
if (userIp.isSourceNat()) {
final PublicIp publicIp = PublicIp.createFromAddrAndVlan(userIp, _vlanDao.findById(userIp.getVlanId()));
sourceNat.add(publicIp);
if (domainRouterVO.getPublicIpAddress() == null) {
final DomainRouterVO routerVO = _routerDao.findById(domainRouterVO.getId());
routerVO.setPublicIpAddress(publicNic.getIPv4Address());
routerVO.setPublicNetmask(publicNic.getIPv4Netmask());
routerVO.setPublicMacAddress(publicNic.getMacAddress());
_routerDao.update(routerVO.getId(), routerVO);
}
}
final PlugNicCommand plugNicCmd = new PlugNicCommand(_nwHelper.getNicTO(domainRouterVO, publicNic.getNetworkId(), publicNic.getBroadcastUri().toString()),
domainRouterVO.getInstanceName(), domainRouterVO.getType(), details);
cmds.addCommand(plugNicCmd);
final VpcVO vpc = _vpcDao.findById(domainRouterVO.getVpcId());
final NetworkUsageCommand netUsageCmd = new NetworkUsageCommand(domainRouterVO.getPrivateIpAddress(), domainRouterVO.getInstanceName(), true, publicNic.getIPv4Address(), vpc.getCidr());
usageCmds.add(netUsageCmd);
UserStatisticsVO stats = _userStatsDao.findBy(domainRouterVO.getAccountId(), domainRouterVO.getDataCenterId(), publicNtwk.getId(), publicNic.getIPv4Address(), domainRouterVO.getId(),
domainRouterVO.getType().toString());
if (stats == null) {
stats = new UserStatisticsVO(domainRouterVO.getAccountId(), domainRouterVO.getDataCenterId(), publicNic.getIPv4Address(), domainRouterVO.getId(), domainRouterVO.getType().toString(),
publicNtwk.getId());
_userStatsDao.persist(stats);
}
}
// create ip assoc for source nat
if (!sourceNat.isEmpty()) {
_commandSetupHelper.createVpcAssociatePublicIPCommands(domainRouterVO, sourceNat, cmds, vlanMacAddress);
}
// add VPC router to guest networks
for (final Pair<Nic, Network> nicNtwk : guestNics) {
final Nic guestNic = nicNtwk.first();
// plug guest nic
final PlugNicCommand plugNicCmd = new PlugNicCommand(_nwHelper.getNicTO(domainRouterVO, guestNic.getNetworkId(), null), domainRouterVO.getInstanceName(), domainRouterVO.getType(), details);
cmds.addCommand(plugNicCmd);
if (!_networkModel.isPrivateGateway(guestNic.getNetworkId())) {
// set guest network
final VirtualMachine vm = _vmDao.findById(domainRouterVO.getId());
final NicProfile nicProfile = _networkModel.getNicProfile(vm, guestNic.getNetworkId(), null);
final SetupGuestNetworkCommand setupCmd = _commandSetupHelper.createSetupGuestNetworkCommand(domainRouterVO, true, nicProfile);
cmds.addCommand(setupCmd);
} else {
// set private network
final PrivateIpVO ipVO = _privateIpDao.findByIpAndSourceNetworkId(guestNic.getNetworkId(), guestNic.getIPv4Address());
final Network network = _networkDao.findById(guestNic.getNetworkId());
BroadcastDomainType.getValue(network.getBroadcastUri());
final String netmask = NetUtils.getCidrNetmask(network.getCidr());
final PrivateIpAddress ip = new PrivateIpAddress(ipVO, network.getBroadcastUri().toString(), network.getGateway(), netmask, guestNic.getMacAddress());
final List<PrivateIpAddress> privateIps = new ArrayList<PrivateIpAddress>(1);
privateIps.add(ip);
_commandSetupHelper.createVpcAssociatePrivateIPCommands(domainRouterVO, privateIps, cmds, true);
final Long privateGwAclId = _vpcGatewayDao.getNetworkAclIdForPrivateIp(ipVO.getVpcId(), ipVO.getNetworkId(), ipVO.getIpAddress());
if (privateGwAclId != null) {
// set network acl on private gateway
final List<NetworkACLItemVO> networkACLs = _networkACLItemDao.listByACL(privateGwAclId);
s_logger.debug("Found " + networkACLs.size() + " network ACLs to apply as a part of VPC VR " + domainRouterVO + " start for private gateway ip = "
+ ipVO.getIpAddress());
_commandSetupHelper.createNetworkACLsCommands(networkACLs, domainRouterVO, cmds, ipVO.getNetworkId(), true);
}
}
}
} catch (final Exception ex) {
s_logger.warn("Failed to add router " + domainRouterVO + " to network due to exception ", ex);
return false;
}
// 4) RE-APPLY ALL STATIC ROUTE RULES
final List<? extends StaticRoute> routes = _staticRouteDao.listByVpcId(domainRouterVO.getVpcId());
final List<StaticRouteProfile> staticRouteProfiles = new ArrayList<StaticRouteProfile>(routes.size());
final Map<Long, VpcGateway> gatewayMap = new HashMap<Long, VpcGateway>();
for (final StaticRoute route : routes) {
VpcGateway gateway = gatewayMap.get(route.getVpcGatewayId());
if (gateway == null) {
gateway = _entityMgr.findById(VpcGateway.class, route.getVpcGatewayId());
gatewayMap.put(gateway.getId(), gateway);
}
staticRouteProfiles.add(new StaticRouteProfile(route, gateway));
}
s_logger.debug("Found " + staticRouteProfiles.size() + " static routes to apply as a part of vpc route " + domainRouterVO + " start");
if (!staticRouteProfiles.isEmpty()) {
_commandSetupHelper.createStaticRouteCommands(staticRouteProfiles, domainRouterVO, cmds);
}
// 5) RE-APPLY ALL REMOTE ACCESS VPNs
final RemoteAccessVpnVO vpn = _vpnDao.findByAccountAndVpc(domainRouterVO.getAccountId(), domainRouterVO.getVpcId());
if (vpn != null) {
_commandSetupHelper.createApplyVpnCommands(true, vpn, domainRouterVO, cmds);
}
// 6) REPROGRAM GUEST NETWORK
boolean reprogramGuestNtwks = true;
if (profile.getParameter(Param.ReProgramGuestNetworks) != null && (Boolean) profile.getParameter(Param.ReProgramGuestNetworks) == false) {
reprogramGuestNtwks = false;
}
final VirtualRouterProvider vrProvider = _vrProviderDao.findById(domainRouterVO.getElementId());
if (vrProvider == null) {
throw new CloudRuntimeException("Cannot find related virtual router provider of router: " + domainRouterVO.getHostName());
}
final Provider provider = Network.Provider.getProvider(vrProvider.getType().toString());
if (provider == null) {
throw new CloudRuntimeException("Cannot find related provider of virtual router provider: " + vrProvider.getType().toString());
}
for (final Pair<Nic, Network> nicNtwk : guestNics) {
final Nic guestNic = nicNtwk.first();
final AggregationControlCommand startCmd = new AggregationControlCommand(Action.Start, domainRouterVO.getInstanceName(), controlNic.getIPv4Address(), _routerControlHelper.getRouterIpInNetwork(
guestNic.getNetworkId(), domainRouterVO.getId()));
cmds.addCommand(startCmd);
if (reprogramGuestNtwks) {
finalizeIpAssocForNetwork(cmds, domainRouterVO, provider, guestNic.getNetworkId(), vlanMacAddress);
finalizeNetworkRulesForNetwork(cmds, domainRouterVO, provider, guestNic.getNetworkId());
}
finalizeUserDataAndDhcpOnStart(cmds, domainRouterVO, provider, guestNic.getNetworkId());
final AggregationControlCommand finishCmd = new AggregationControlCommand(Action.Finish, domainRouterVO.getInstanceName(), controlNic.getIPv4Address(), _routerControlHelper.getRouterIpInNetwork(
guestNic.getNetworkId(), domainRouterVO.getId()));
cmds.addCommand(finishCmd);
}
// Add network usage commands
cmds.addCommands(usageCmds);
}
return true;
}
@Override
protected void finalizeNetworkRulesForNetwork(final Commands cmds, final DomainRouterVO domainRouterVO, final Provider provider, final Long guestNetworkId) {
super.finalizeNetworkRulesForNetwork(cmds, domainRouterVO, provider, guestNetworkId);
if (domainRouterVO.getVpcId() != null) {
if (domainRouterVO.getState() == State.Starting || domainRouterVO.getState() == State.Running) {
if (_networkModel.isProviderSupportServiceInNetwork(guestNetworkId, Service.NetworkACL, Provider.VPCVirtualRouter)) {
final List<NetworkACLItemVO> networkACLs = _networkACLMgr.listNetworkACLItems(guestNetworkId);
if (networkACLs != null && !networkACLs.isEmpty()) {
s_logger.debug("Found " + networkACLs.size() + " network ACLs to apply as a part of VPC VR " + domainRouterVO + " start for guest network id=" + guestNetworkId);
_commandSetupHelper.createNetworkACLsCommands(networkACLs, domainRouterVO, cmds, guestNetworkId, false);
}
}
}
}
}
protected boolean sendNetworkRulesToRouter(final long routerId, final long networkId) throws ResourceUnavailableException {
final DomainRouterVO router = _routerDao.findById(routerId);
final Commands cmds = new Commands(OnError.Continue);
final VirtualRouterProvider vrProvider = _vrProviderDao.findById(router.getElementId());
if (vrProvider == null) {
throw new CloudRuntimeException("Cannot find related virtual router provider of router: " + router.getHostName());
}
final Provider provider = Network.Provider.getProvider(vrProvider.getType().toString());
if (provider == null) {
throw new CloudRuntimeException("Cannot find related provider of virtual router provider: " + vrProvider.getType().toString());
}
finalizeNetworkRulesForNetwork(cmds, router, provider, networkId);
return _nwHelper.sendCommandsToRouter(router, cmds);
}
/**
* @param router
* @param add
* @param privateNic
* @return
* @throws ResourceUnavailableException
*/
protected boolean setupVpcPrivateNetwork(final VirtualRouter router, final boolean add, final NicProfile privateNic) throws ResourceUnavailableException {
if (router.getState() == State.Running) {
final PrivateIpVO ipVO = _privateIpDao.findByIpAndSourceNetworkId(privateNic.getNetworkId(), privateNic.getIPv4Address());
final Network network = _networkDao.findById(privateNic.getNetworkId());
final String netmask = NetUtils.getCidrNetmask(network.getCidr());
final PrivateIpAddress ip = new PrivateIpAddress(ipVO, network.getBroadcastUri().toString(), network.getGateway(), netmask, privateNic.getMacAddress());
final List<PrivateIpAddress> privateIps = new ArrayList<PrivateIpAddress>(1);
privateIps.add(ip);
final Commands cmds = new Commands(Command.OnError.Stop);
_commandSetupHelper.createVpcAssociatePrivateIPCommands(router, privateIps, cmds, add);
try {
if (_nwHelper.sendCommandsToRouter(router, cmds)) {
s_logger.debug("Successfully applied ip association for ip " + ip + " in vpc network " + network);
return true;
} else {
s_logger.warn("Failed to associate ip address " + ip + " in vpc network " + network);
return false;
}
} catch (final Exception ex) {
s_logger.warn("Failed to send " + (add ? "add " : "delete ") + " private network " + network + " commands to rotuer ");
return false;
}
} else if (router.getState() == State.Stopped || router.getState() == State.Stopping) {
s_logger.debug("Router " + router.getInstanceName() + " is in " + router.getState() + ", so not sending setup private network command to the backend");
} else {
s_logger.warn("Unable to setup private gateway, virtual router " + router + " is not in the right state " + router.getState());
throw new ResourceUnavailableException("Unable to setup Private gateway on the backend," + " virtual router " + router + " is not in the right state",
DataCenter.class, router.getDataCenterId());
}
return true;
}
@Override
public boolean destroyPrivateGateway(final PrivateGateway gateway, final VirtualRouter router) throws ConcurrentOperationException, ResourceUnavailableException {
boolean result = true;
if (!_networkModel.isVmPartOfNetwork(router.getId(), gateway.getNetworkId())) {
s_logger.debug("Router doesn't have nic for gateway " + gateway + " so no need to removed it");
return result;
}
final Network privateNetwork = _networkModel.getNetwork(gateway.getNetworkId());
final NicProfile nicProfile = _networkModel.getNicProfile(router, privateNetwork.getId(), null);
s_logger.debug("Releasing private ip for gateway " + gateway + " from " + router);
result = setupVpcPrivateNetwork(router, false, nicProfile);
if (!result) {
s_logger.warn("Failed to release private ip for gateway " + gateway + " on router " + router);
return false;
}
// revoke network acl on the private gateway.
if (!_networkACLMgr.revokeACLItemsForPrivateGw(gateway)) {
s_logger.debug("Failed to delete network acl items on " + gateway + " from router " + router);
return false;
}
s_logger.debug("Removing router " + router + " from private network " + privateNetwork + " as a part of delete private gateway");
result = result && _itMgr.removeVmFromNetwork(router, privateNetwork, null);
s_logger.debug("Private gateawy " + gateway + " is removed from router " + router);
return result;
}
@Override
protected void finalizeIpAssocForNetwork(final Commands cmds, final VirtualRouter domainRouterVO, final Provider provider, final Long guestNetworkId,
final Map<String, String> vlanMacAddress) {
if (domainRouterVO.getVpcId() == null) {
super.finalizeIpAssocForNetwork(cmds, domainRouterVO, provider, guestNetworkId, vlanMacAddress);
return;
}
if (domainRouterVO.getState() == State.Starting || domainRouterVO.getState() == State.Running) {
final ArrayList<? extends PublicIpAddress> publicIps = getPublicIpsToApply(domainRouterVO, provider, guestNetworkId, IpAddress.State.Releasing);
if (publicIps != null && !publicIps.isEmpty()) {
s_logger.debug("Found " + publicIps.size() + " ip(s) to apply as a part of domR " + domainRouterVO + " start.");
// Re-apply public ip addresses - should come before PF/LB/VPN
_commandSetupHelper.createVpcAssociatePublicIPCommands(domainRouterVO, publicIps, cmds, vlanMacAddress);
}
}
}
@Override
public boolean startSite2SiteVpn(final Site2SiteVpnConnection conn, final VirtualRouter router) throws ResourceUnavailableException {
if (router.getState() != State.Running) {
s_logger.warn("Unable to apply site-to-site VPN configuration, virtual router is not in the right state " + router.getState());
throw new ResourceUnavailableException("Unable to apply site 2 site VPN configuration," + " virtual router is not in the right state", DataCenter.class,
router.getDataCenterId());
}
return applySite2SiteVpn(true, router, conn);
}
@Override
public boolean stopSite2SiteVpn(final Site2SiteVpnConnection conn, final VirtualRouter router) throws ResourceUnavailableException {
if (router.getState() != State.Running) {
s_logger.warn("Unable to apply site-to-site VPN configuration, virtual router is not in the right state " + router.getState());
throw new ResourceUnavailableException("Unable to apply site 2 site VPN configuration," + " virtual router is not in the right state", DataCenter.class,
router.getDataCenterId());
}
return applySite2SiteVpn(false, router, conn);
}
protected boolean applySite2SiteVpn(final boolean isCreate, final VirtualRouter router, final Site2SiteVpnConnection conn) throws ResourceUnavailableException {
final Commands cmds = new Commands(Command.OnError.Continue);
_commandSetupHelper.createSite2SiteVpnCfgCommands(conn, isCreate, router, cmds);
return _nwHelper.sendCommandsToRouter(router, cmds);
}
protected Pair<Map<String, PublicIpAddress>, Map<String, PublicIpAddress>> getNicsToChangeOnRouter(final List<? extends PublicIpAddress> publicIps, final VirtualRouter router) {
// 1) check which nics need to be plugged/unplugged and plug/unplug them
final Map<String, PublicIpAddress> nicsToPlug = new HashMap<String, PublicIpAddress>();
final Map<String, PublicIpAddress> nicsToUnplug = new HashMap<String, PublicIpAddress>();
// find out nics to unplug
for (final PublicIpAddress ip : publicIps) {
final long publicNtwkId = ip.getNetworkId();
// if ip is not associated to any network, and there are no firewall
// rules, release it on the backend
if (!_vpcMgr.isIpAllocatedToVpc(ip)) {
ip.setState(IpAddress.State.Releasing);
}
if (ip.getState() == IpAddress.State.Releasing) {
final Nic nic = _nicDao.findByIp4AddressAndNetworkIdAndInstanceId(publicNtwkId, router.getId(), ip.getAddress().addr());
if (nic != null) {
nicsToUnplug.put(ip.getVlanTag(), ip);
s_logger.debug("Need to unplug the nic for ip=" + ip + "; vlan=" + ip.getVlanTag() + " in public network id =" + publicNtwkId);
}
}
}
// find out nics to plug
for (final PublicIpAddress ip : publicIps) {
final URI broadcastUri = BroadcastDomainType.Vlan.toUri(ip.getVlanTag());
final long publicNtwkId = ip.getNetworkId();
// if ip is not associated to any network, and there are no firewall
// rules, release it on the backend
if (!_vpcMgr.isIpAllocatedToVpc(ip)) {
ip.setState(IpAddress.State.Releasing);
}
if (ip.getState() == IpAddress.State.Allocated || ip.getState() == IpAddress.State.Allocating) {
// nic has to be plugged only when there are no nics for this
// vlan tag exist on VR
final Nic nic = _nicDao.findByNetworkIdInstanceIdAndBroadcastUri(publicNtwkId, router.getId(), broadcastUri.toString());
if (nic == null && nicsToPlug.get(ip.getVlanTag()) == null) {
nicsToPlug.put(ip.getVlanTag(), ip);
s_logger.debug("Need to plug the nic for ip=" + ip + "; vlan=" + ip.getVlanTag() + " in public network id =" + publicNtwkId);
} else {
final PublicIpAddress nicToUnplug = nicsToUnplug.get(ip.getVlanTag());
if (nicToUnplug != null) {
final NicVO nicVO = _nicDao.findByIp4AddressAndNetworkIdAndInstanceId(publicNtwkId, router.getId(), nicToUnplug.getAddress().addr());
nicVO.setIPv4Address(ip.getAddress().addr());
_nicDao.update(nicVO.getId(), nicVO);
s_logger.debug("Updated the nic " + nicVO + " with the new ip address " + ip.getAddress().addr());
nicsToUnplug.remove(ip.getVlanTag());
}
}
}
}
final Pair<Map<String, PublicIpAddress>, Map<String, PublicIpAddress>> nicsToChange = new Pair<Map<String, PublicIpAddress>, Map<String, PublicIpAddress>>(nicsToPlug,
nicsToUnplug);
return nicsToChange;
}
@Override
public void finalizeStop(final VirtualMachineProfile profile, final Answer answer) {
super.finalizeStop(profile, answer);
// Mark VPN connections as Disconnected
final DomainRouterVO router = _routerDao.findById(profile.getId());
final Long vpcId = router.getVpcId();
if (vpcId != null) {
_s2sVpnMgr.markDisconnectVpnConnByVpc(vpcId);
}
}
@Override
public List<DomainRouterVO> getVpcRouters(final long vpcId) {
return _routerDao.listByVpcId(vpcId);
}
@Override
public boolean start() {
return true;
}
@Override
public boolean stop() {
return true;
}
@Override
public boolean startRemoteAccessVpn(final RemoteAccessVpn vpn, final VirtualRouter router) throws ResourceUnavailableException {
if (router.getState() != State.Running) {
s_logger.warn("Unable to apply remote access VPN configuration, virtual router is not in the right state " + router.getState());
throw new ResourceUnavailableException("Unable to apply remote access VPN configuration," + " virtual router is not in the right state", DataCenter.class,
router.getDataCenterId());
}
final Commands cmds = new Commands(Command.OnError.Stop);
_commandSetupHelper.createApplyVpnCommands(true, vpn, router, cmds);
try {
_agentMgr.send(router.getHostId(), cmds);
} catch (final OperationTimedoutException e) {
s_logger.debug("Failed to start remote access VPN: ", e);
throw new AgentUnavailableException("Unable to send commands to virtual router ", router.getHostId(), e);
}
Answer answer = cmds.getAnswer("users");
if (!answer.getResult()) {
s_logger.error("Unable to start vpn: unable add users to vpn in zone " + router.getDataCenterId() + " for account " + vpn.getAccountId() + " on domR: "
+ router.getInstanceName() + " due to " + answer.getDetails());
throw new ResourceUnavailableException("Unable to start vpn: Unable to add users to vpn in zone " + router.getDataCenterId() + " for account " + vpn.getAccountId()
+ " on domR: " + router.getInstanceName() + " due to " + answer.getDetails(), DataCenter.class, router.getDataCenterId());
}
answer = cmds.getAnswer("startVpn");
if (!answer.getResult()) {
s_logger.error("Unable to start vpn in zone " + router.getDataCenterId() + " for account " + vpn.getAccountId() + " on domR: " + router.getInstanceName() + " due to "
+ answer.getDetails());
throw new ResourceUnavailableException("Unable to start vpn in zone " + router.getDataCenterId() + " for account " + vpn.getAccountId() + " on domR: "
+ router.getInstanceName() + " due to " + answer.getDetails(), DataCenter.class, router.getDataCenterId());
}
return true;
}
@Override
public boolean stopRemoteAccessVpn(final RemoteAccessVpn vpn, final VirtualRouter router) throws ResourceUnavailableException {
boolean result = true;
if (router.getState() == State.Running) {
final Commands cmds = new Commands(Command.OnError.Continue);
_commandSetupHelper.createApplyVpnCommands(false, vpn, router, cmds);
result = result && _nwHelper.sendCommandsToRouter(router, cmds);
} else if (router.getState() == State.Stopped) {
s_logger.debug("Router " + router + " is in Stopped state, not sending deleteRemoteAccessVpn command to it");
} else {
s_logger.warn("Failed to delete remote access VPN: domR " + router + " is not in right state " + router.getState());
throw new ResourceUnavailableException("Failed to delete remote access VPN: domR is not in right state " + router.getState(), DataCenter.class,
router.getDataCenterId());
}
return true;
}
@Override
public boolean postStateTransitionEvent(final StateMachine2.Transition<State, VirtualMachine.Event> transition, final VirtualMachine vo, final boolean status, final Object opaque) {
// Without this VirtualNetworkApplianceManagerImpl.postStateTransitionEvent() gets called twice as part of listeners -
// once from VpcVirtualNetworkApplianceManagerImpl and once from VirtualNetworkApplianceManagerImpl itself
return true;
}
}
| |
/**
* Copyright 2005-2013 Restlet S.A.S.
*
* The contents of this file are subject to the terms of one of the following
* open source licenses: Apache 2.0 or LGPL 3.0 or LGPL 2.1 or CDDL 1.0 or EPL
* 1.0 (the "Licenses"). You can select the license that you prefer but you may
* not use this file except in compliance with one of these Licenses.
*
* You can obtain a copy of the Apache 2.0 license at
* http://www.opensource.org/licenses/apache-2.0
*
* You can obtain a copy of the LGPL 3.0 license at
* http://www.opensource.org/licenses/lgpl-3.0
*
* You can obtain a copy of the LGPL 2.1 license at
* http://www.opensource.org/licenses/lgpl-2.1
*
* You can obtain a copy of the CDDL 1.0 license at
* http://www.opensource.org/licenses/cddl1
*
* You can obtain a copy of the EPL 1.0 license at
* http://www.opensource.org/licenses/eclipse-1.0
*
* See the Licenses for the specific language governing permissions and
* limitations under the Licenses.
*
* Alternatively, you can obtain a royalty free commercial license with less
* limitations, transferable or non-transferable, directly at
* http://www.restlet.com/products/restlet-framework
*
* Restlet is a registered trademark of Restlet S.A.S.
*/
package org.restlet.util;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.logging.Level;
import org.restlet.Context;
/**
* Modifiable list of entries with many helper methods. Note that this class
* uses the Parameter class as the template type. This allows you to use an
* instance of this class as any other java.util.List, in particular all the
* helper methods in java.util.Collections.
*
* @author Jerome Louvel
* @param <T>
* The contained type
* @see org.restlet.data.Parameter
* @see java.util.Collections
* @see java.util.List
*/
public class Series<T extends NamedValue<String>> extends WrapperList<T> {
/**
* A marker for empty values to differentiate from non existing values
* (null).
*/
public static final Object EMPTY_VALUE = new Object();
/**
* Returns an unmodifiable view of the specified series. Attempts to call a
* modification method will throw an UnsupportedOperationException.
*
* @param series
* The series for which an unmodifiable view should be returned.
* @return The unmodifiable view of the specified series.
*/
@SuppressWarnings({ "unchecked", "rawtypes" })
public static Series<? extends NamedValue> unmodifiableSeries(
final Series<? extends NamedValue> series) {
return new Series(series.entryClass,
java.util.Collections.unmodifiableList(series.getDelegate()));
}
/** The entry class. */
private final Class<T> entryClass;
/**
* Constructor.
*/
public Series(Class<T> entryClass) {
super();
this.entryClass = entryClass;
}
/**
* Constructor.
*
* @param initialCapacity
* The initial list capacity.
*/
public Series(Class<T> entryClass, int initialCapacity) {
super(initialCapacity);
this.entryClass = entryClass;
}
/**
* Constructor.
*
* @param delegate
* The delegate list.
*/
public Series(Class<T> entryClass, List<T> delegate) {
super(delegate);
this.entryClass = entryClass;
}
/**
* Creates then adds a parameter at the end of the list.
*
* @param name
* The parameter name.
* @param value
* The parameter value.
* @return True (as per the general contract of the Collection.add method).
*/
public boolean add(String name, String value) {
return add(createEntry(name, value));
}
/**
* Copies the parameters whose name is a key in the given map.<br>
* If a matching parameter is found, its value is put in the map.<br>
* If multiple values are found, a list is created and set in the map.
*
* @param params
* The map controlling the copy.
*/
@SuppressWarnings("unchecked")
public void copyTo(Map<String, Object> params) {
NamedValue<String> param;
Object currentValue = null;
for (Iterator<T> iter = iterator(); iter.hasNext();) {
param = iter.next();
if (params.containsKey(param.getName())) {
currentValue = params.get(param.getName());
if (currentValue != null) {
List<Object> values = null;
if (currentValue instanceof List) {
// Multiple values already found for this entry
values = (List<Object>) currentValue;
} else {
// Second value found for this entry
// Create a list of values
values = new ArrayList<Object>();
values.add(currentValue);
params.put(param.getName(), values);
}
if (param.getValue() == null) {
values.add(Series.EMPTY_VALUE);
} else {
values.add(param.getValue());
}
} else {
if (param.getValue() == null) {
params.put(param.getName(), Series.EMPTY_VALUE);
} else {
params.put(param.getName(), param.getValue());
}
}
}
}
}
/**
* Creates a new entry.
*
* @param name
* The name of the entry.
* @param value
* The value of the entry.
* @return A new entry.
*/
public T createEntry(String name, String value) {
try {
return this.entryClass.getConstructor(String.class, String.class)
.newInstance(name, value);
} catch (Exception e) {
Context.getCurrentLogger().log(Level.WARNING,
"Unable to create a series entry", e);
return null;
}
}
/**
* Creates a new series.
*
* @param delegate
* Optional delegate series.
* @return A new series.
* @deprecated Use {@link Series#Series(Class, List)} constructor instead.
*/
@Deprecated
public Series<T> createSeries(List<T> delegate) {
return new Series<T>(this.entryClass, delegate);
}
/**
* Tests the equality of two string, potentially null, which a case
* sensitivity flag.
*
* @param value1
* The first value.
* @param value2
* The second value.
* @param ignoreCase
* Indicates if the test should be case insensitive.
* @return True if both values are equal.
*/
private boolean equals(String value1, String value2, boolean ignoreCase) {
boolean result = (value1 == value2);
if (!result) {
if ((value1 != null) && (value2 != null)) {
if (ignoreCase) {
result = value1.equalsIgnoreCase(value2);
} else {
result = value1.equals(value2);
}
}
}
return result;
}
/**
* Returns the first parameter found with the given name.
*
* @param name
* The parameter name (case sensitive).
* @return The first parameter found with the given name.
*/
public T getFirst(String name) {
return getFirst(name, false);
}
/**
* Returns the first parameter found with the given name.
*
* @param name
* The parameter name.
* @param ignoreCase
* Indicates if the name comparison is case insensitive.
* @return The first parameter found with the given name.
*/
public T getFirst(String name, boolean ignoreCase) {
for (T param : this) {
if (equals(param.getName(), name, ignoreCase)) {
return param;
}
}
return null;
}
/**
* Returns the value of the first parameter found with the given name.
*
* @param name
* The parameter name (case sensitive).
* @return The value of the first parameter found with the given name.
*/
public String getFirstValue(String name) {
return getFirstValue(name, false);
}
/**
* Returns the value of the first parameter found with the given name.
*
* @param name
* The parameter name.
* @param ignoreCase
* Indicates if the name comparison is case sensitive.
* @return The value of the first parameter found with the given name.
*/
public String getFirstValue(String name, boolean ignoreCase) {
return getFirstValue(name, ignoreCase, null);
}
/**
* Returns the value of the first parameter found with the given name.
*
* @param name
* The parameter name.
* @param ignoreCase
* Indicates if the name comparison is case sensitive.
* @param defaultValue
* The default value to return if no matching parameter found or
* if the parameter has a null value.
* @return The value of the first parameter found with the given name or the
* default value.
*/
public String getFirstValue(String name, boolean ignoreCase,
String defaultValue) {
String result = defaultValue;
NamedValue<String> param = getFirst(name, ignoreCase);
if ((param != null) && (param.getValue() != null)) {
result = param.getValue();
}
return result;
}
/**
* Returns the value of the first parameter found with the given name.
*
* @param name
* The parameter name (case sensitive).
* @param defaultValue
* The default value to return if no matching parameter found or
* if the parameter has a null value.
* @return The value of the first parameter found with the given name or the
* default value.
*/
public String getFirstValue(String name, String defaultValue) {
return getFirstValue(name, false, defaultValue);
}
/**
* Returns the set of parameter names (case sensitive).
*
* @return The set of parameter names.
*/
public Set<String> getNames() {
Set<String> result = new HashSet<String>();
for (NamedValue<String> param : this) {
result.add(param.getName());
}
return result;
}
/**
* Returns the values of the parameters with a given name. If multiple
* parameters with the same name are found, all values are concatenated and
* separated by a comma (like for HTTP message headers).
*
* @param name
* The parameter name (case insensitive).
* @return The values of the parameters with a given name.
*/
public String getValues(String name) {
return getValues(name, ",", true);
}
/**
* Returns the parameter values with a given name. If multiple parameters
* with the same name are found, all values are concatenated and separated
* by the given separator.
*
* @param name
* The parameter name.
* @param separator
* The separator character.
* @param ignoreCase
* Indicates if the name comparison is case sensitive.
* @return The sequence of values.
*/
public String getValues(String name, String separator, boolean ignoreCase) {
String result = null;
StringBuilder sb = null;
for (final T param : this) {
if ((ignoreCase && param.getName().equalsIgnoreCase(name))
|| param.getName().equals(name)) {
if (sb == null) {
if (result == null) {
result = param.getValue();
} else {
sb = new StringBuilder();
sb.append(result).append(separator)
.append(param.getValue());
}
} else {
sb.append(separator).append(param.getValue());
}
}
}
if (sb != null) {
result = sb.toString();
}
return result;
}
/**
* Returns an array of all the values associated to the given parameter
* name.
*
* @param name
* The parameter name to match.
* @return The array of values.
*/
public String[] getValuesArray(String name) {
return getValuesArray(name, false);
}
/**
* Returns an array of all the values associated to the given parameter
* name.
*
* @param name
* The parameter name to match.
* @param ignoreCase
* Indicates if the name comparison is case sensitive.
* @return The array of values.
*/
public String[] getValuesArray(String name, boolean ignoreCase) {
return getValuesArray(name, ignoreCase, null);
}
/**
* Returns an array of all the values associated to the given parameter
* name.
*
* @param name
* The parameter name to match.
* @param ignoreCase
* Indicates if the name comparison is case sensitive.
* @param defaultValue
* The default value to return if no matching parameter found or
* if the parameter has a null value.
* @return The array of values.
*/
public String[] getValuesArray(String name, boolean ignoreCase,
String defaultValue) {
String[] result = null;
List<T> params = subList(name, ignoreCase);
if ((params.size() == 0) && (defaultValue != null)) {
result = new String[1];
result[0] = defaultValue;
} else {
result = new String[params.size()];
for (int i = 0; i < params.size(); i++) {
result[i] = params.get(i).getValue();
}
}
return result;
}
/**
* Returns an array of all the values associated to the given parameter
* name.
*
* @param name
* The parameter name to match.
* @param defaultValue
* The default value to return if no matching parameter found or
* if the parameter has a null value.
* @return The array of values.
*/
public String[] getValuesArray(String name, String defaultValue) {
return getValuesArray(name, false, defaultValue);
}
/**
* Returns a map of name, value pairs. The order of the map keys is
* respected based on the series order. When a name has multiple values,
* only the first one is put in the map.
*
* @return The map of name, value pairs.
*/
public Map<String, String> getValuesMap() {
Map<String, String> result = new LinkedHashMap<String, String>();
for (NamedValue<String> param : this) {
if (!result.containsKey(param.getName())) {
result.put(param.getName(), param.getValue());
}
}
return result;
}
/**
* Removes all the parameters with a given name.
*
* @param name
* The parameter name (case sensitive).
* @return True if the list changed.
*/
public boolean removeAll(String name) {
return removeAll(name, false);
}
/**
* Removes all the parameters with a given name.
*
* @param name
* The parameter name.
* @param ignoreCase
* Indicates if the name comparison is case insensitive.
* @return True if the list changed.
*/
public boolean removeAll(String name, boolean ignoreCase) {
boolean changed = false;
NamedValue<String> param = null;
for (Iterator<T> iter = iterator(); iter.hasNext();) {
param = iter.next();
if (equals(param.getName(), name, ignoreCase)) {
iter.remove();
changed = true;
}
}
return changed;
}
/**
* Removes from this list the first entry whose name equals the specified
* name ignoring the case.
*
* @param name
* The name of the entries to be removed (case sensitive).
* @return false if no entry has been removed, true otherwise.
*/
public boolean removeFirst(String name) {
return removeFirst(name, false);
}
/**
* Removes from this list the first entry whose name equals the specified
* name ignoring the case or not.
*
* @param name
* The name of the entries to be removed.
* @param ignoreCase
* Indicates if the name comparison is case insensitive.
* @return false if no entry has been removed, true otherwise.
*/
public boolean removeFirst(String name, boolean ignoreCase) {
boolean changed = false;
NamedValue<String> param = null;
for (final Iterator<T> iter = iterator(); iter.hasNext() && !changed;) {
param = iter.next();
if (equals(param.getName(), name, ignoreCase)) {
iter.remove();
changed = true;
}
}
return changed;
}
/**
* Replaces the value of the first parameter with the given name and removes
* all other parameters with the same name. The name matching is case
* sensitive.
*
* @param name
* The parameter name.
* @param value
* The value to set.
* @return The parameter set or added.
*/
public T set(String name, String value) {
return set(name, value, false);
}
/**
* Replaces the value of the first parameter with the given name and removes
* all other parameters with the same name.
*
* @param name
* The parameter name.
* @param value
* The value to set.
* @param ignoreCase
* Indicates if the name comparison is case insensitive.
* @return The parameter set or added.
*/
public T set(String name, String value, boolean ignoreCase) {
T result = null;
T param = null;
boolean found = false;
for (final Iterator<T> iter = iterator(); iter.hasNext();) {
param = iter.next();
if (equals(param.getName(), name, ignoreCase)) {
if (found) {
// Remove other entries with the same name
iter.remove();
} else {
// Change the value of the first matching entry
found = true;
param.setValue(value);
result = param;
}
}
}
if (!found) {
add(name, value);
}
return result;
}
/**
* Returns a view of the portion of this list between the specified
* fromIndex, inclusive, and toIndex, exclusive.
*
* @param fromIndex
* The start position.
* @param toIndex
* The end position (exclusive).
* @return The sub-list.
*/
@Override
public Series<T> subList(int fromIndex, int toIndex) {
return new Series<T>(this.entryClass, getDelegate().subList(fromIndex,
toIndex));
}
/**
* Returns a list of all the values associated to the parameter name.
*
* @param name
* The parameter name (case sensitive).
* @return The list of values.
*/
public Series<T> subList(String name) {
return subList(name, false);
}
/**
* Returns a list of all the values associated to the parameter name.
*
* @param name
* The parameter name.
* @param ignoreCase
* Indicates if the name comparison is case insensitive.
* @return The list of values.
*/
public Series<T> subList(String name, boolean ignoreCase) {
Series<T> result = new Series<T>(this.entryClass);
for (T param : this) {
if (equals(param.getName(), name, ignoreCase)) {
result.add(param);
}
}
return result;
}
}
| |
package openblocks.common;
import java.util.Map;
import java.util.Random;
import net.minecraft.entity.Entity;
import net.minecraft.entity.EntityList;
import net.minecraft.entity.EntityLiving;
import net.minecraft.entity.item.EntityItem;
import net.minecraft.entity.passive.EntityOcelot;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.init.Items;
import net.minecraft.item.ItemStack;
import net.minecraft.world.World;
import net.minecraftforge.event.entity.living.LivingDropsEvent;
import openblocks.Config;
import openblocks.OpenBlocks;
import openblocks.common.item.ItemTrophyBlock;
import openblocks.common.tileentity.TileEntityTrophy;
import openblocks.trophy.*;
import openmods.Log;
import openmods.reflection.ReflectionHelper;
import com.google.common.base.Strings;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Maps;
import cpw.mods.fml.common.eventhandler.SubscribeEvent;
public class TrophyHandler {
private static final Random DROP_RAND = new Random();
private static final Map<Trophy, Entity> ENTITY_CACHE = Maps.newHashMap();
public static Entity getEntityFromCache(Trophy trophy) {
Entity entity = ENTITY_CACHE.get(trophy);
if (entity == null) {
if (!ENTITY_CACHE.containsKey(trophy)) {
try {
entity = trophy.createEntity();
} catch (Throwable t) {
Log.severe(t, "Failed to create dummy entity for trophy %s", trophy);
}
}
ENTITY_CACHE.put(trophy, entity);
}
return entity;
}
private static Entity setSlimeSize(Entity entity, int size) {
try {
ReflectionHelper.call(entity, new String[] { "func_70799_a", "setSlimeSize" }, ReflectionHelper.primitive(size));
} catch (Exception e) {
Log.warn(e, "Can't update slime size");
}
return entity;
}
public enum Trophy {
Wolf(),
Chicken(new ItemDropBehavior(10000, new ItemStack(Items.egg), "mob.chicken.plop")),
Cow(new ItemDropBehavior(20000, new ItemStack(Items.leather))),
Creeper(new CreeperBehavior()),
Skeleton(new SkeletonBehavior()),
PigZombie(new ItemDropBehavior(20000, new ItemStack(Items.gold_nugget))),
Bat(1.0, -0.3),
Zombie(),
Witch(0.35, new WitchBehavior()),
Villager(),
Ozelot() {
@Override
protected Entity createEntity() {
Entity entity = super.createEntity();
try {
((EntityOcelot)entity).setTamed(true);
} catch (ClassCastException e) {
Log.warn("Invalid cat entity class: %s", entity.getClass());
}
return entity;
}
},
Sheep(),
Blaze(new BlazeBehavior()),
Silverfish(),
Spider(),
CaveSpider(new CaveSpiderBehavior()),
Slime(0.6) {
@Override
protected Entity createEntity() {
return setSlimeSize(super.createEntity(), 1);
}
},
Ghast(0.1, 0.3),
Enderman(0.3, new EndermanBehavior()),
LavaSlime(0.6) {
@Override
protected Entity createEntity() {
return setSlimeSize(super.createEntity(), 1);
}
},
Squid(0.3, 0.5, new SquidBehavior()),
MushroomCow(new MooshroomBehavior()),
VillagerGolem(0.3),
SnowMan(new SnowmanBehavior()),
Pig(new ItemDropBehavior(20000, new ItemStack(Items.porkchop)));
private double scale = 0.4;
private double verticalOffset = 0.0;
private ITrophyBehavior behavior;
Trophy() {}
Trophy(ITrophyBehavior behavior) {
this.behavior = behavior;
}
Trophy(double scale) {
this.scale = scale;
}
Trophy(double scale, ITrophyBehavior behavior) {
this.scale = scale;
this.behavior = behavior;
}
Trophy(double scale, double verticalOffset) {
this(scale);
this.verticalOffset = verticalOffset;
}
Trophy(double scale, double verticalOffset, ITrophyBehavior behavior) {
this(scale, verticalOffset);
this.behavior = behavior;
}
public double getVerticalOffset() {
return verticalOffset;
}
public double getScale() {
return scale;
}
public Entity getEntity() {
return getEntityFromCache(this);
}
public ItemStack getItemStack() {
return ItemTrophyBlock.putMetadata(new ItemStack(OpenBlocks.Blocks.trophy), this);
}
public void playSound(World world, double x, double y, double z) {
if (world == null) return;
Entity e = getEntity();
if (e instanceof EntityLiving) {
e.posX = x;
e.posY = y;
e.posZ = z;
synchronized (e) {
e.worldObj = world;
((EntityLiving)e).playLivingSound();
e.worldObj = null;
}
}
}
public int executeActivateBehavior(TileEntityTrophy tile, EntityPlayer player) {
if (behavior != null) return behavior.executeActivateBehavior(tile, player);
return 0;
}
public void executeTickBehavior(TileEntityTrophy tile) {
if (behavior != null) behavior.executeTickBehavior(tile);
}
protected Entity createEntity() {
return EntityList.createEntityByName(toString(), null);
}
static {
ImmutableMap.Builder<String, Trophy> builder = ImmutableMap.builder();
for (Trophy t : values())
builder.put(t.name(), t);
TYPES = builder.build();
}
public final static Map<String, Trophy> TYPES;
public final static Trophy[] VALUES = values();
}
@SubscribeEvent
public void onLivingDrops(LivingDropsEvent event) {
if (event.recentlyHit && DROP_RAND.nextDouble() < Config.trophyDropChance * event.lootingLevel) {
final Entity entity = event.entity;
String entityName = EntityList.getEntityString(entity);
if (!Strings.isNullOrEmpty(entityName)) {
Trophy mobTrophy = Trophy.TYPES.get(entityName);
if (mobTrophy != null) {
EntityItem drop = new EntityItem(entity.worldObj, entity.posX, entity.posY, entity.posZ, mobTrophy.getItemStack());
drop.delayBeforeCanPickup = 10;
event.drops.add(drop);
}
}
}
}
}
| |
/*
* Created on May 22, 2005
* Author: TomHornson(at)hotmail.com
*/
package com.fruits.nestle.cache.maps;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.jgroups.*;
import org.jgroups.blocks.*;
import org.jgroups.persistence.CannotPersistException;
import org.jgroups.persistence.CannotRemoveException;
import org.jgroups.persistence.PersistenceFactory;
import org.jgroups.persistence.PersistenceManager;
import org.jgroups.util.Promise;
import org.jgroups.util.Util;
import java.io.Serializable;
import java.util.*;
//$Id: DistributedHashtable.java,v 1.18 2005/02/19 13:23:34 belaban Exp $
/**
* Provides the abstraction of a java.util.Hashtable that is replicated at
* several locations. Any change to the hashtable (clear, put, remove etc) will
* transparently be propagated to all replicas in the group. All read-only
* methods will always access the local replica.
* <p>
* Both keys and values added to the hashtable <em>must be serializable</em>,
* the reason being that they will be sent across the network to all replicas of
* the group. Having said this, it is now for example possible to add RMI remote
* objects to the hashtable as they are derived from
* <code>java.rmi.server.RemoteObject</code> which in turn is serializable. This
* allows to lookup shared distributed objects by their name and invoke methods
* on them, regardless of one's onw location. A
* <code>DistributedHashtable</code> thus allows to implement a distributed
* naming service in just a couple of lines.
* <p>
* An instance of this class will contact an existing member of the group to
* fetch its initial state (using the state exchange funclet
* <code>StateExchangeFunclet</code>.
*
* @author Bela Ban
* @author <a href="mailto:aolias@yahoo.com">Alfonso Olias-Sanz</a>
* @version $Id: DistributedHashtable.java,v 1.18 2005/02/19 13:23:34 belaban
* Exp $
* @deprecated This class is unsupported; use JBossCache instead:
* http://www.jboss.com/products/jbosscache
*/
public class DistributedHashMap extends HashMap implements MessageListener, MembershipListener {
public interface Notification {
void entrySet(Object key, Object value);
void entryRemoved(Object key);
void viewChange(Vector new_mbrs, Vector old_mbrs);
void contentsSet(Map new_entries);
void contentsCleared();
}
private transient Channel channel;
protected transient RpcDispatcher disp = null;
private transient String groupname = null;
private final transient Vector notifs = new Vector(); // to be notified when
// mbrship changes
private final transient Vector members = new Vector(); // keeps track of all
// DHTs
private transient Class[] put_signature = null;
private transient Class[] putAll_signature = null;
private transient Class[] clear_signature = null;
private transient Class[] remove_signature = null;
private transient boolean persistent = false; // whether to use
// PersistenceManager to save
// state
private transient PersistenceManager persistence_mgr = null;
/**
* Determines when the updates have to be sent across the network, avoids
* sending unnecessary messages when there are no member in the group
*/
private transient boolean send_message = false;
protected final transient Promise state_promise = new Promise();
protected final Log log = LogFactory.getLog(this.getClass());
/**
* Creates a DistributedHashtable
*
* @param groupname
* The name of the group to join
* @param factory
* The ChannelFactory which will be used to create a channel
* @param properties
* The property string to be used to define the channel
* @param state_timeout
* The time to wait until state is retrieved in milliseconds. A value
* of 0 means wait forever.
*/
public DistributedHashMap(String groupname, ChannelFactory factory, String properties, long state_timeout) throws ChannelException {
this.groupname = groupname;
initSignatures();
channel = factory != null ? factory.createChannel(properties) : new JChannel(properties);
disp = new RpcDispatcher(channel, this, this, this);
channel.setOpt(Channel.GET_STATE_EVENTS, Boolean.TRUE);
channel.connect(groupname);
start(state_timeout);
}
/**
* Creates a DisttributedHashtable. Optionally the contents can be saved to
* persistemt storage using the {@link PersistenceManager}.
*
* @param groupname
* Name of the group to join
* @param factory
* Instance of a ChannelFactory to create the channel
* @param properties
* Protocol stack properties
* @param persistent
* Whether the contents should be persisted
* @param state_timeout
* Max number of milliseconds to wait until state is retrieved
*/
public DistributedHashMap(String groupname, ChannelFactory factory, String properties, boolean persistent, long state_timeout) throws ChannelException {
this.groupname = groupname;
this.persistent = persistent;
initSignatures();
channel = factory != null ? factory.createChannel(properties) : new JChannel(properties);
disp = new RpcDispatcher(channel, this, this, this);
channel.setOpt(Channel.GET_STATE_EVENTS, Boolean.TRUE);
channel.connect(groupname);
start(state_timeout);
}
public DistributedHashMap(JChannel channel, long state_timeout) throws ChannelNotConnectedException, ChannelClosedException {
this(channel, false, state_timeout);
}
public DistributedHashMap(JChannel channel, boolean persistent, long state_timeout) throws ChannelNotConnectedException, ChannelClosedException {
this.groupname = channel.getChannelName();
this.channel = channel;
this.persistent = persistent;
init(state_timeout);
}
/**
* Uses a user-provided PullPushAdapter to create the dispatcher rather than a
* Channel. If id is non-null, it will be used to register under that id. This
* is typically used when another building block is already using
* PullPushAdapter, and we want to add this building block in addition. The id
* is the used to discriminate between messages for the various blocks on top
* of PullPushAdapter. If null, we will assume we are the first block created
* on PullPushAdapter.
*
* @param adapter
* The PullPushAdapter which to use as underlying transport
* @param id
* A serializable object (e.g. an Integer) used to discriminate
* (multiplex/demultiplex) between requests/responses for different
* building blocks on top of PullPushAdapter.
* @param state_timeout
* Max number of milliseconds to wait until state is retrieved
*/
public DistributedHashMap(PullPushAdapter adapter, Serializable id, long state_timeout) throws ChannelNotConnectedException, ChannelClosedException {
initSignatures();
this.channel = (Channel) adapter.getTransport();
this.groupname = this.channel.getChannelName();
disp = new RpcDispatcher(adapter, id, this, this, this);
channel.setOpt(Channel.GET_STATE_EVENTS, Boolean.TRUE);
start(state_timeout);
}
public DistributedHashMap(PullPushAdapter adapter, Serializable id) {
initSignatures();
this.channel = (Channel) adapter.getTransport();
this.groupname = this.channel.getChannelName();
disp = new RpcDispatcher(adapter, id, this, this, this);
channel.setOpt(Channel.GET_STATE_EVENTS, Boolean.TRUE);
}
protected void init(long state_timeout) throws ChannelClosedException, ChannelNotConnectedException {
initSignatures();
channel.setOpt(Channel.GET_STATE_EVENTS, Boolean.TRUE);
disp = new RpcDispatcher(channel, this, this, this);
// Changed by bela (jan 20 2003): start() has to be called by user (only
// when providing
// own channel). First, Channel.connect() has to be called, then start().
// start(state_timeout);
}
/**
* Fetches the state
*
* @param state_timeout
* @throws ChannelClosedException
* @throws ChannelNotConnectedException
*/
public void start(long state_timeout) throws ChannelClosedException, ChannelNotConnectedException {
boolean rc;
if (persistent) {
if (log.isInfoEnabled())
log.info("fetching state from database");
try {
persistence_mgr = PersistenceFactory.getInstance().createManager();
} catch (Throwable ex) {
if (log.isErrorEnabled())
log.error("failed creating PersistenceManager, " + "turning persistency off. Exception: " + Util.printStackTrace(ex));
persistent = false;
}
}
state_promise.reset();
rc = channel.getState(null, state_timeout);
if (rc) {
if (log.isInfoEnabled())
log.info("state was retrieved successfully, waiting for setState()");
Boolean result = (Boolean) state_promise.getResult(state_timeout);
if (result == null) {
if (log.isErrorEnabled())
log.error("setState() never got called");
} else {
if (log.isInfoEnabled())
log.info("setState() was called");
}
} else {
if (log.isInfoEnabled())
log.info("state could not be retrieved (first member)");
if (persistent) {
if (log.isInfoEnabled())
log.info("fetching state from database");
try {
Map m = persistence_mgr.retrieveAll();
if (m != null) {
Map.Entry entry;
Object key, val;
for (Iterator it = m.entrySet().iterator(); it.hasNext();) {
entry = (Map.Entry) it.next();
key = entry.getKey();
val = entry.getValue();
if (log.isInfoEnabled())
log.info("inserting " + key + " --> " + val);
put(key, val); // will replicate key and value
}
}
} catch (Throwable ex) {
if (log.isErrorEnabled())
log.error("failed creating PersistenceManager, " + "turning persistency off. Exception: " + Util.printStackTrace(ex));
persistent = false;
}
}
}
}
public Address getLocalAddress() {
return channel != null ? channel.getLocalAddress() : null;
}
public String getGroupName() {
return groupname;
}
public Channel getChannel() {
return channel;
}
public boolean getPersistent() {
return persistent;
}
public void setPersistent(boolean p) {
persistent = p;
}
public void addNotifier(Notification n) {
if (!notifs.contains(n))
notifs.addElement(n);
}
public void removeNotifier(Notification n) {
if (notifs.contains(n))
notifs.removeElement(n);
}
public void stop() {
if (disp != null) {
disp.stop();
disp = null;
}
if (channel != null) {
channel.close();
channel = null;
}
}
/**
* Maps the specified key to the specified value in the hashtable. Neither of
* both parameters can be null
*
* @param key
* - the hashtable key
* @param value
* - the value
* @return the previous value of the specified key in this hashtable, or null
* if it did not have one
*/
public Object put(Object key, Object value) {
Object prev_val = get(key);
// Changes done by <aos>
// if true, propagate action to the group
if (send_message == true) {
try {
disp.callRemoteMethods(null, "_put", new Object[] { key, value }, put_signature, GroupRequest.GET_ALL, 0);
} catch (Exception e) {
// return null;
}
} else {
_put(key, value);
// don't have to do prev_val = super.put(..) as is done at the beginning
}
return prev_val;
}
/**
* Copies all of the mappings from the specified Map to this Hashtable These
* mappings will replace any mappings that this Hashtable had for any of the
* keys currently in the specified Map.
*
* @param m
* - Mappings to be stored in this map
*/
public void putAll(Map m) {
// Changes done by <aos>
// if true, propagate action to the group
if (send_message == true) {
try {
disp.callRemoteMethods(null, "_putAll", new Object[] { m }, putAll_signature, GroupRequest.GET_ALL, 0);
} catch (Throwable t) {
}
} else {
_putAll(m);
}
}
/**
* Clears this hashtable so that it contains no keys
*/
public synchronized void clear() {
// Changes done by <aos>
// if true, propagate action to the group
if (send_message == true) {
try {
disp.callRemoteMethods(null, "_clear", null, clear_signature, GroupRequest.GET_ALL, 0);
} catch (Exception e) {
if (log.isErrorEnabled())
log.error("exception=" + e);
}
} else {
_clear();
}
}
/**
* Removes the key (and its corresponding value) from the Hashtable.
*
* @param key
* - the key to be removed.
* @return the value to which the key had been mapped in this hashtable, or
* null if the key did not have a mapping.
*/
public Object remove(Object key) {
Object retval = get(key);
// Changes done by <aos>
// if true, propagate action to the group
if (send_message == true) {
try {
disp.callRemoteMethods(null, "_remove", new Object[] { key }, remove_signature, GroupRequest.GET_ALL, 0);
// return retval;
} catch (Exception e) {
// return null;
}
} else {
_remove(key);
// don't have to do retval = super.remove(..) as is done at the beginning
}
return retval;
}
/*------------------------ Callbacks -----------------------*/
public Object _put(Object key, Object value) {
Object retval = super.put(key, value);
if (persistent) {
try {
persistence_mgr.save((Serializable) key, (Serializable) value);
} catch (CannotPersistException cannot_persist_ex) {
if (log.isErrorEnabled())
log.error("failed persisting " + key + " + " + value + ", exception=" + cannot_persist_ex);
} catch (Throwable t) {
if (log.isErrorEnabled())
log.error("failed persisting " + key + " + " + value + ", exception=" + Util.printStackTrace(t));
}
}
for (int i = 0; i < notifs.size(); i++)
((Notification) notifs.elementAt(i)).entrySet(key, value);
return retval;
}
/**
* @see java.util.Map#putAll(java.util.Map)
*/
public void _putAll(Map m) {
if (m == null)
return;
// Calling the method below seems okay, but would result in ... deadlock !
// The reason is that Map.putAll() calls put(), which we override, which
// results in
// lock contention for the map.
// ---> super.putAll(m); <--- CULPRIT !!!@#$%$
// That said let's do it the stupid way:
Map.Entry entry;
for (Iterator it = m.entrySet().iterator(); it.hasNext();) {
entry = (Map.Entry) it.next();
super.put(entry.getKey(), entry.getValue());
}
if (persistent) {
try {
persistence_mgr.saveAll(m);
} catch (CannotPersistException persist_ex) {
if (log.isErrorEnabled())
log.error("failed persisting contents: " + persist_ex);
} catch (Throwable t) {
if (log.isErrorEnabled())
log.error("failed persisting contents: " + t);
}
}
for (int i = 0; i < notifs.size(); i++)
((Notification) notifs.elementAt(i)).contentsSet(m);
}
public void _clear() {
super.clear();
if (persistent) {
try {
persistence_mgr.clear();
} catch (CannotRemoveException cannot_remove_ex) {
if (log.isErrorEnabled())
log.error("failed clearing contents, exception=" + cannot_remove_ex);
} catch (Throwable t) {
if (log.isErrorEnabled())
log.error("failed clearing contents, exception=" + t);
}
}
for (int i = 0; i < notifs.size(); i++)
((Notification) notifs.elementAt(i)).contentsCleared();
}
public Object _remove(Object key) {
Object retval = super.remove(key);
if (persistent) {
try {
persistence_mgr.remove((Serializable) key);
} catch (CannotRemoveException cannot_remove_ex) {
if (log.isErrorEnabled())
log.error("failed clearing contents, exception=" + cannot_remove_ex);
} catch (Throwable t) {
if (log.isErrorEnabled())
log.error("failed clearing contents, exception=" + t);
}
}
for (int i = 0; i < notifs.size(); i++)
((Notification) notifs.elementAt(i)).entryRemoved(key);
return retval;
}
/*----------------------------------------------------------*/
/*-------------------- State Exchange ----------------------*/
public void receive(Message msg) {
}
public byte[] getState() {
Object key, val;
Hashtable copy = new Hashtable();
for (Iterator iter = keySet().iterator(); iter.hasNext();) {
key = iter.next();
val = get(key);
copy.put(key, val);
}
try {
return Util.objectToByteBuffer(copy);
} catch (Throwable ex) {
if (log.isErrorEnabled())
log.error("exception marshalling state: " + ex);
return null;
}
}
public void setState(byte[] new_state) {
Hashtable new_copy;
try {
new_copy = (Hashtable) Util.objectFromByteBuffer(new_state);
if (new_copy == null)
return;
} catch (Throwable ex) {
if (log.isErrorEnabled())
log.error("exception unmarshalling state: " + ex);
return;
}
_putAll(new_copy);
state_promise.setResult(Boolean.TRUE);
}
/*------------------- Membership Changes ----------------------*/
public void viewAccepted(View new_view) {
Vector new_mbrs = new_view.getMembers();
if (new_mbrs != null) {
sendViewChangeNotifications(new_mbrs, members); // notifies observers
// (joined, left)
members.removeAllElements();
for (int i = 0; i < new_mbrs.size(); i++)
members.addElement(new_mbrs.elementAt(i));
}
// if size is bigger than one, there are more peers in the group
// otherwise there is only one server.
if (members.size() > 1) {
send_message = true;
} else {
send_message = false;
}
}
/** Called when a member is suspected */
public void suspect(Address suspected_mbr) {
;
}
/** Block sending and receiving of messages until ViewAccepted is called */
public void block() {
}
void sendViewChangeNotifications(Vector new_mbrs, Vector old_mbrs) {
Vector joined, left;
Object mbr;
Notification n;
if (notifs.size() == 0 || old_mbrs == null || new_mbrs == null || old_mbrs.size() == 0 || new_mbrs.size() == 0)
return;
// 1. Compute set of members that joined: all that are in new_mbrs, but not
// in old_mbrs
joined = new Vector();
for (int i = 0; i < new_mbrs.size(); i++) {
mbr = new_mbrs.elementAt(i);
if (!old_mbrs.contains(mbr))
joined.addElement(mbr);
}
// 2. Compute set of members that left: all that were in old_mbrs, but not
// in new_mbrs
left = new Vector();
for (int i = 0; i < old_mbrs.size(); i++) {
mbr = old_mbrs.elementAt(i);
if (!new_mbrs.contains(mbr)) {
left.addElement(mbr);
}
}
for (int i = 0; i < notifs.size(); i++) {
n = (Notification) notifs.elementAt(i);
n.viewChange(joined, left);
}
}
void initSignatures() {
try {
if (put_signature == null) {
put_signature = new Class[] { Object.class, Object.class };
}
if (putAll_signature == null) {
putAll_signature = new Class[] { Map.class };
}
if (clear_signature == null)
clear_signature = new Class[0];
if (remove_signature == null) {
remove_signature = new Class[] { Object.class };
}
} catch (Throwable ex) {
if (log.isErrorEnabled())
log.error("exception=" + ex);
}
}
public static void main(String[] args) {
try {
// The setup here is kind of weird:
// 1. Create a channel
// 2. Create a DistributedHashtable (on the channel)
// 3. Connect the channel (so the HT gets a VIEW_CHANGE)
// 4. Start the HT
//
// A simpler setup is
// DistributedHashtable ht = new DistributedHashtable("demo", null,
// "file://c:/JGroups-2.0/conf/state_transfer.xml", 5000);
JChannel c = new JChannel("file:/c:/JGroups-2.0/conf/state_transfer.xml");
c.setOpt(Channel.GET_STATE_EVENTS, Boolean.TRUE);
DistributedHashtable ht = new DistributedHashtable(c, false, 5000);
c.connect("demo");
ht.start(5000);
ht.put("name", "Michelle Ban");
Object old_key = ht.remove("name");
System.out.println("old key was " + old_key);
ht.put("newkey", "newvalue");
Map m = new HashMap();
m.put("k1", "v1");
m.put("k2", "v2");
ht.putAll(m);
System.out.println("hashmap is " + ht);
} catch (Throwable t) {
t.printStackTrace();
}
}
}
| |
/*
* Copyright 2015 Open Networking Laboratory
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onosproject.app.impl;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.onosproject.app.ApplicationEvent;
import org.onosproject.app.ApplicationListener;
import org.onosproject.app.ApplicationState;
import org.onosproject.app.ApplicationStoreAdapter;
import org.onosproject.common.app.ApplicationArchive;
import org.onosproject.common.event.impl.TestEventDispatcher;
import org.onosproject.core.Application;
import org.onosproject.core.ApplicationId;
import org.onosproject.core.DefaultApplication;
import org.onosproject.core.DefaultApplicationId;
import java.io.InputStream;
import java.net.URI;
import java.util.HashSet;
import java.util.Optional;
import java.util.Set;
import static org.junit.Assert.*;
import static org.onosproject.app.ApplicationEvent.Type.*;
import static org.onosproject.app.ApplicationState.ACTIVE;
import static org.onosproject.app.ApplicationState.INSTALLED;
import static org.onosproject.app.DefaultApplicationDescriptionTest.*;
import static org.onosproject.net.NetTestTools.injectEventDispatcher;
/**
* Test of the application manager implementation.
*/
public class ApplicationManagerTest {
public static final DefaultApplicationId APP_ID = new DefaultApplicationId(1, APP_NAME);
private ApplicationManager mgr = new ApplicationManager();
private ApplicationListener listener = new TestListener();
private boolean deactivated = false;
@Before
public void setUp() {
injectEventDispatcher(mgr, new TestEventDispatcher());
mgr.featuresService = new TestFeaturesService();
mgr.store = new TestStore();
mgr.activate();
mgr.addListener(listener);
}
@After
public void tearDown() {
mgr.removeListener(listener);
mgr.deactivate();
}
private void validate(Application app) {
assertEquals("incorrect name", APP_NAME, app.id().name());
assertEquals("incorrect version", VER, app.version());
assertEquals("incorrect origin", ORIGIN, app.origin());
assertEquals("incorrect description", DESC, app.description());
assertEquals("incorrect features URI", FURL, app.featuresRepo().get());
assertEquals("incorrect features", FEATURES, app.features());
}
@Test
public void install() {
InputStream stream = ApplicationArchive.class.getResourceAsStream("app.zip");
Application app = mgr.install(stream);
validate(app);
assertEquals("incorrect features URI used", app.featuresRepo().get(),
((TestFeaturesService) mgr.featuresService).uri);
assertEquals("incorrect app count", 1, mgr.getApplications().size());
assertEquals("incorrect app", app, mgr.getApplication(APP_ID));
assertEquals("incorrect app state", INSTALLED, mgr.getState(APP_ID));
mgr.registerDeactivateHook(app.id(), this::deactivateHook);
}
private void deactivateHook() {
deactivated = true;
}
@Test
public void uninstall() {
install();
mgr.uninstall(APP_ID);
assertEquals("incorrect app count", 0, mgr.getApplications().size());
}
@Test
public void activate() {
install();
mgr.activate(APP_ID);
assertEquals("incorrect app state", ACTIVE, mgr.getState(APP_ID));
assertFalse("preDeactivate hook wrongly called", deactivated);
}
@Test
public void deactivate() {
activate();
mgr.deactivate(APP_ID);
assertEquals("incorrect app state", INSTALLED, mgr.getState(APP_ID));
assertTrue("preDeactivate hook not called", deactivated);
}
private class TestListener implements ApplicationListener {
private ApplicationEvent event;
@Override
public void event(ApplicationEvent event) {
this.event = event;
}
}
private class TestStore extends ApplicationStoreAdapter {
private Application app;
private ApplicationState state;
@Override
public Application create(InputStream appDescStream) {
app = new DefaultApplication(APP_ID, VER, DESC, ORIGIN, CATEGORY,
URL, README, ICON, ROLE, PERMS,
Optional.of(FURL), FEATURES, ImmutableList.of());
state = INSTALLED;
delegate.notify(new ApplicationEvent(APP_INSTALLED, app));
return app;
}
@Override
public Set<Application> getApplications() {
return app != null ? ImmutableSet.of(app) : ImmutableSet.of();
}
@Override
public Application getApplication(ApplicationId appId) {
return app;
}
@Override
public void remove(ApplicationId appId) {
delegate.notify(new ApplicationEvent(APP_UNINSTALLED, app));
app = null;
state = null;
}
@Override
public ApplicationState getState(ApplicationId appId) {
return state;
}
@Override
public void activate(ApplicationId appId) {
state = ApplicationState.ACTIVE;
delegate.notify(new ApplicationEvent(APP_ACTIVATED, app));
}
@Override
public void deactivate(ApplicationId appId) {
state = INSTALLED;
delegate.notify(new ApplicationEvent(APP_DEACTIVATED, app));
}
@Override
public ApplicationId getId(String name) {
return new DefaultApplicationId(0, name);
}
}
private class TestFeaturesService extends FeaturesServiceAdapter {
private URI uri;
private Set<String> features = new HashSet<>();
@Override
public void addRepository(URI uri) throws Exception {
this.uri = uri;
}
@Override
public void removeRepository(URI uri) throws Exception {
this.uri = null;
}
@Override
public void installFeature(String name) throws Exception {
features.add(name);
}
@Override
public void uninstallFeature(String name) throws Exception {
features.remove(name);
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.triggers;
import java.util.*;
import org.junit.BeforeClass;
import org.junit.Test;
import org.apache.cassandra.Util;
import org.apache.cassandra.config.CFMetaData;
import org.apache.cassandra.config.DatabaseDescriptor;
import org.apache.cassandra.db.*;
import org.apache.cassandra.db.rows.*;
import org.apache.cassandra.db.marshal.UTF8Type;
import org.apache.cassandra.db.partitions.Partition;
import org.apache.cassandra.db.partitions.PartitionUpdate;
import org.apache.cassandra.exceptions.ConfigurationException;
import org.apache.cassandra.exceptions.InvalidRequestException;
import org.apache.cassandra.schema.TriggerMetadata;
import org.apache.cassandra.utils.FBUtilities;
import static org.apache.cassandra.utils.ByteBufferUtil.bytes;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
public class TriggerExecutorTest
{
@BeforeClass
public static void setupDD()
{
DatabaseDescriptor.daemonInitialization();
}
@Test
public void sameKeySameCfColumnFamilies() throws ConfigurationException, InvalidRequestException
{
CFMetaData metadata = makeCfMetaData("ks1", "cf1", TriggerMetadata.create("test", SameKeySameCfTrigger.class.getName()));
// origin column 'c1' = "v1", augment extra column 'c2' = "trigger"
PartitionUpdate mutated = TriggerExecutor.instance.execute(makeCf(metadata, "k1", "v1", null));
List<Row> rows = new ArrayList<>();
try (RowIterator iterator = UnfilteredRowIterators.filter(mutated.unfilteredIterator(),
FBUtilities.nowInSeconds()))
{
iterator.forEachRemaining(rows::add);
}
// only 1 row
assertEquals(1, rows.size());
List<Cell> cells = new ArrayList<>();
rows.get(0).cells().forEach(cells::add);
// 2 columns
assertEquals(2, cells.size());
// check column 'c1'
assertEquals(bytes("v1"), cells.get(0).value());
// check column 'c2'
assertEquals(bytes("trigger"), cells.get(1).value());
}
@Test(expected = InvalidRequestException.class)
public void sameKeyDifferentCfColumnFamilies() throws ConfigurationException, InvalidRequestException
{
CFMetaData metadata = makeCfMetaData("ks1", "cf1", TriggerMetadata.create("test", SameKeyDifferentCfTrigger.class.getName()));
TriggerExecutor.instance.execute(makeCf(metadata, "k1", "v1", null));
}
@Test(expected = InvalidRequestException.class)
public void differentKeyColumnFamilies() throws ConfigurationException, InvalidRequestException
{
CFMetaData metadata = makeCfMetaData("ks1", "cf1", TriggerMetadata.create("test", DifferentKeyTrigger.class.getName()));
TriggerExecutor.instance.execute(makeCf(metadata, "k1", "v1", null));
}
@Test
public void noTriggerMutations() throws ConfigurationException, InvalidRequestException
{
CFMetaData metadata = makeCfMetaData("ks1", "cf1", TriggerMetadata.create("test", NoOpTrigger.class.getName()));
Mutation rm = new Mutation(makeCf(metadata, "k1", "v1", null));
assertNull(TriggerExecutor.instance.execute(Collections.singletonList(rm)));
}
@Test
public void sameKeySameCfRowMutations() throws ConfigurationException, InvalidRequestException
{
CFMetaData metadata = makeCfMetaData("ks1", "cf1", TriggerMetadata.create("test", SameKeySameCfTrigger.class.getName()));
PartitionUpdate cf1 = makeCf(metadata, "k1", "k1v1", null);
PartitionUpdate cf2 = makeCf(metadata, "k2", "k2v1", null);
Mutation rm1 = new Mutation("ks1", cf1.partitionKey()).add(cf1);
Mutation rm2 = new Mutation("ks1", cf2.partitionKey()).add(cf2);
List<? extends IMutation> tmutations = new ArrayList<>(TriggerExecutor.instance.execute(Arrays.asList(rm1, rm2)));
assertEquals(2, tmutations.size());
Collections.sort(tmutations, new RmComparator());
List<PartitionUpdate> mutatedCFs = new ArrayList<>(tmutations.get(0).getPartitionUpdates());
assertEquals(1, mutatedCFs.size());
Row row = mutatedCFs.get(0).iterator().next();
assertEquals(bytes("k1v1"), row.getCell(metadata.getColumnDefinition(bytes("c1"))).value());
assertEquals(bytes("trigger"), row.getCell(metadata.getColumnDefinition(bytes("c2"))).value());
mutatedCFs = new ArrayList<>(tmutations.get(1).getPartitionUpdates());
assertEquals(1, mutatedCFs.size());
row = mutatedCFs.get(0).iterator().next();
assertEquals(bytes("k2v1"), row.getCell(metadata.getColumnDefinition(bytes("c1"))).value());
assertEquals(bytes("trigger"), row.getCell(metadata.getColumnDefinition(bytes("c2"))).value());
}
@Test
public void sameKeySameCfPartialRowMutations() throws ConfigurationException, InvalidRequestException
{
CFMetaData metadata = makeCfMetaData("ks1", "cf1", TriggerMetadata.create("test", SameKeySameCfPartialTrigger.class.getName()));
PartitionUpdate cf1 = makeCf(metadata, "k1", "k1v1", null);
PartitionUpdate cf2 = makeCf(metadata, "k2", "k2v1", null);
Mutation rm1 = new Mutation("ks1", cf1.partitionKey()).add(cf1);
Mutation rm2 = new Mutation("ks1", cf2.partitionKey()).add(cf2);
List<? extends IMutation> tmutations = new ArrayList<>(TriggerExecutor.instance.execute(Arrays.asList(rm1, rm2)));
assertEquals(2, tmutations.size());
Collections.sort(tmutations, new RmComparator());
List<PartitionUpdate> mutatedCFs = new ArrayList<>(tmutations.get(0).getPartitionUpdates());
assertEquals(1, mutatedCFs.size());
Row row = mutatedCFs.get(0).iterator().next();
assertEquals(bytes("k1v1"), row.getCell(metadata.getColumnDefinition(bytes("c1"))).value());
assertNull(row.getCell(metadata.getColumnDefinition(bytes("c2"))));
mutatedCFs = new ArrayList<>(tmutations.get(1).getPartitionUpdates());
assertEquals(1, mutatedCFs.size());
row = mutatedCFs.get(0).iterator().next();
assertEquals(bytes("k2v1"), row.getCell(metadata.getColumnDefinition(bytes("c1"))).value());
assertEquals(bytes("trigger"), row.getCell(metadata.getColumnDefinition(bytes("c2"))).value());
}
@Test
public void sameKeyDifferentCfRowMutations() throws ConfigurationException, InvalidRequestException
{
CFMetaData metadata = makeCfMetaData("ks1", "cf1", TriggerMetadata.create("test", SameKeyDifferentCfTrigger.class.getName()));
PartitionUpdate cf1 = makeCf(metadata, "k1", "k1v1", null);
PartitionUpdate cf2 = makeCf(metadata, "k2", "k2v1", null);
Mutation rm1 = new Mutation("ks1", cf1.partitionKey()).add(cf1);
Mutation rm2 = new Mutation("ks1", cf2.partitionKey()).add(cf2);
List<? extends IMutation> tmutations = new ArrayList<>(TriggerExecutor.instance.execute(Arrays.asList(rm1, rm2)));
assertEquals(2, tmutations.size());
Collections.sort(tmutations, new RmComparator());
List<PartitionUpdate> mutatedCFs = new ArrayList<>(tmutations.get(0).getPartitionUpdates());
assertEquals(2, mutatedCFs.size());
for (PartitionUpdate update : mutatedCFs)
{
if (update.metadata().cfName.equals("cf1"))
{
Row row = update.iterator().next();
assertEquals(bytes("k1v1"), row.getCell(metadata.getColumnDefinition(bytes("c1"))).value());
assertNull(row.getCell(metadata.getColumnDefinition(bytes("c2"))));
}
else
{
Row row = update.iterator().next();
assertNull(row.getCell(metadata.getColumnDefinition(bytes("c1"))));
assertEquals(bytes("trigger"), row.getCell(metadata.getColumnDefinition(bytes("c2"))).value());
}
}
mutatedCFs = new ArrayList<>(tmutations.get(1).getPartitionUpdates());
assertEquals(2, mutatedCFs.size());
for (PartitionUpdate update : mutatedCFs)
{
if (update.metadata().cfName.equals("cf1"))
{
Row row = update.iterator().next();
assertEquals(bytes("k2v1"), row.getCell(metadata.getColumnDefinition(bytes("c1"))).value());
assertNull(row.getCell(metadata.getColumnDefinition(bytes("c2"))));
}
else
{
Row row = update.iterator().next();
assertNull(row.getCell(metadata.getColumnDefinition(bytes("c1"))));
assertEquals(bytes("trigger"), row.getCell(metadata.getColumnDefinition(bytes("c2"))).value());
}
}
}
@Test
public void sameKeyDifferentKsRowMutations() throws ConfigurationException, InvalidRequestException
{
CFMetaData metadata = makeCfMetaData("ks1", "cf1", TriggerMetadata.create("test", SameKeyDifferentKsTrigger.class.getName()));
PartitionUpdate cf1 = makeCf(metadata, "k1", "k1v1", null);
PartitionUpdate cf2 = makeCf(metadata, "k2", "k2v1", null);
Mutation rm1 = new Mutation("ks1", cf1.partitionKey()).add(cf1);
Mutation rm2 = new Mutation("ks1", cf2.partitionKey()).add(cf2);
List<? extends IMutation> tmutations = new ArrayList<>(TriggerExecutor.instance.execute(Arrays.asList(rm1, rm2)));
assertEquals(4, tmutations.size());
Collections.sort(tmutations, new RmComparator());
List<PartitionUpdate> mutatedCFs = new ArrayList<>(tmutations.get(0).getPartitionUpdates());
assertEquals(1, mutatedCFs.size());
Row row = mutatedCFs.get(0).iterator().next();
assertEquals(bytes("k1v1"), row.getCell(metadata.getColumnDefinition(bytes("c1"))).value());
assertNull(row.getCell(metadata.getColumnDefinition(bytes("c2"))));
mutatedCFs = new ArrayList<>(tmutations.get(1).getPartitionUpdates());
assertEquals(1, mutatedCFs.size());
row = mutatedCFs.get(0).iterator().next();
assertEquals(bytes("k2v1"), row.getCell(metadata.getColumnDefinition(bytes("c1"))).value());
assertNull(row.getCell(metadata.getColumnDefinition(bytes("c2"))));
mutatedCFs = new ArrayList<>(tmutations.get(2).getPartitionUpdates());
assertEquals(1, mutatedCFs.size());
row = mutatedCFs.get(0).iterator().next();
assertNull(row.getCell(metadata.getColumnDefinition(bytes("c1"))));
assertEquals(bytes("trigger"), row.getCell(metadata.getColumnDefinition(bytes("c2"))).value());
mutatedCFs = new ArrayList<>(tmutations.get(3).getPartitionUpdates());
assertEquals(1, mutatedCFs.size());
row = mutatedCFs.get(0).iterator().next();
assertNull(row.getCell(metadata.getColumnDefinition(bytes("c1"))));
assertEquals(bytes("trigger"), row.getCell(metadata.getColumnDefinition(bytes("c2"))).value());
}
@Test
public void differentKeyRowMutations() throws ConfigurationException, InvalidRequestException
{
CFMetaData metadata = makeCfMetaData("ks1", "cf1", TriggerMetadata.create("test", DifferentKeyTrigger.class.getName()));
PartitionUpdate cf1 = makeCf(metadata, "k1", "v1", null);
Mutation rm = new Mutation("ks1", cf1.partitionKey()).add(cf1);
List<? extends IMutation> tmutations = new ArrayList<>(TriggerExecutor.instance.execute(Arrays.asList(rm)));
assertEquals(2, tmutations.size());
Collections.sort(tmutations, new RmComparator());
assertEquals(bytes("k1"), tmutations.get(0).key().getKey());
assertEquals(bytes("otherKey"), tmutations.get(1).key().getKey());
List<PartitionUpdate> mutatedCFs = new ArrayList<>(tmutations.get(0).getPartitionUpdates());
assertEquals(1, mutatedCFs.size());
Row row = mutatedCFs.get(0).iterator().next();
assertEquals(bytes("v1"), row.getCell(metadata.getColumnDefinition(bytes("c1"))).value());
assertNull(row.getCell(metadata.getColumnDefinition(bytes("c2"))));
mutatedCFs = new ArrayList<>(tmutations.get(1).getPartitionUpdates());
assertEquals(1, mutatedCFs.size());
row = mutatedCFs.get(0).iterator().next();
assertEquals(bytes("trigger"), row.getCell(metadata.getColumnDefinition(bytes("c2"))).value());
assertNull(row.getCell(metadata.getColumnDefinition(bytes("c1"))));
}
private static CFMetaData makeCfMetaData(String ks, String cf, TriggerMetadata trigger)
{
CFMetaData metadata = CFMetaData.Builder.create(ks, cf)
.addPartitionKey("pkey", UTF8Type.instance)
.addRegularColumn("c1", UTF8Type.instance)
.addRegularColumn("c2", UTF8Type.instance)
.build();
try
{
if (trigger != null)
metadata.triggers(metadata.getTriggers().with(trigger));
}
catch (InvalidRequestException e)
{
throw new AssertionError(e);
}
return metadata;
}
private static PartitionUpdate makeCf(CFMetaData metadata, String key, String columnValue1, String columnValue2)
{
Row.Builder builder = BTreeRow.unsortedBuilder(FBUtilities.nowInSeconds());
builder.newRow(Clustering.EMPTY);
long ts = FBUtilities.timestampMicros();
if (columnValue1 != null)
builder.addCell(BufferCell.live(metadata.getColumnDefinition(bytes("c1")), ts, bytes(columnValue1)));
if (columnValue2 != null)
builder.addCell(BufferCell.live(metadata.getColumnDefinition(bytes("c2")), ts, bytes(columnValue2)));
return PartitionUpdate.singleRowUpdate(metadata, Util.dk(key), builder.build());
}
public static class NoOpTrigger implements ITrigger
{
public Collection<Mutation> augment(Partition partition)
{
return null;
}
}
public static class SameKeySameCfTrigger implements ITrigger
{
public Collection<Mutation> augment(Partition partition)
{
RowUpdateBuilder builder = new RowUpdateBuilder(partition.metadata(), FBUtilities.timestampMicros(), partition.partitionKey().getKey());
builder.add("c2", bytes("trigger"));
return Collections.singletonList(builder.build());
}
}
public static class SameKeySameCfPartialTrigger implements ITrigger
{
public Collection<Mutation> augment(Partition partition)
{
if (!partition.partitionKey().getKey().equals(bytes("k2")))
return null;
RowUpdateBuilder builder = new RowUpdateBuilder(partition.metadata(), FBUtilities.timestampMicros(), partition.partitionKey().getKey());
builder.add("c2", bytes("trigger"));
return Collections.singletonList(builder.build());
}
}
public static class SameKeyDifferentCfTrigger implements ITrigger
{
public Collection<Mutation> augment(Partition partition)
{
RowUpdateBuilder builder = new RowUpdateBuilder(makeCfMetaData(partition.metadata().ksName, "otherCf", null), FBUtilities.timestampMicros(), partition.partitionKey().getKey());
builder.add("c2", bytes("trigger"));
return Collections.singletonList(builder.build());
}
}
public static class SameKeyDifferentKsTrigger implements ITrigger
{
public Collection<Mutation> augment(Partition partition)
{
RowUpdateBuilder builder = new RowUpdateBuilder(makeCfMetaData("otherKs", "otherCf", null), FBUtilities.timestampMicros(), partition.partitionKey().getKey());
builder.add("c2", bytes("trigger"));
return Collections.singletonList(builder.build());
}
}
public static class DifferentKeyTrigger implements ITrigger
{
public Collection<Mutation> augment(Partition partition)
{
RowUpdateBuilder builder = new RowUpdateBuilder(makeCfMetaData("otherKs", "otherCf", null), FBUtilities.timestampMicros(), "otherKey");
builder.add("c2", bytes("trigger"));
return Collections.singletonList(builder.build());
}
}
private static class RmComparator implements Comparator<IMutation>
{
public int compare(IMutation m1, IMutation m2)
{
int cmp = m1.getKeyspaceName().compareTo(m2.getKeyspaceName());
return cmp != 0 ? cmp : m1.key().compareTo(m2.key());
}
}
}
| |
/*
*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package flex2.tools;
import java.io.File;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import flash.util.Trace;
import flex2.compiler.CompilationUnit;
import flex2.compiler.CompilerSwcContext;
import flex2.compiler.Source;
import flex2.compiler.config.ConfigurationBuffer;
import flex2.compiler.io.LocalFile;
import flex2.compiler.io.VirtualFile;
import flex2.compiler.swc.SwcScript;
import flex2.compiler.util.QName;
/**
* A value object used to store checksums during an incremental
* compilation.
*/
public class SwcChecksums
{
/**
* Used for incremental builds to verify the stored cache.
* checksums[0] - checksum
* checksums[1] = cmdChecksum - must be set with expected value before loading cache
* checksums[2] = linkChecksum
* checksums[3] = swcChecksum
*/
int checksums[];
/**
* Map definition to signature checksum.
*/
private Map<QName, Long> swcDefSignatureChecksums = new HashMap<QName, Long>(); // Map<QName, Long>
/**
* Map swc file to checksum (modification timestamp).
*/
private Map<String, Long> swcFileChecksums = new HashMap<String, Long>(); // Map<String, Long>
/**
* Map archive files to checksum (modification timestamp).
*/
private Map<String, Long> archiveFileChecksums = new HashMap<String, Long>(); // Map<String, Long>
private CompilerSwcContext swcContext;
private ConfigurationBuffer cfgbuf;
/**
* If true, checksum the signatures in the swc rather than the mod time on
* the entire swc file. This can save recompiles if there is a change but
* the signature doesn't change.
*/
private boolean isSwcChecksumEnabled;
protected SwcChecksums(CompilerSwcContext swcContext, ConfigurationBuffer cfgbuf,
ToolsConfiguration configuration)
{
this.swcContext = swcContext;
this.cfgbuf = cfgbuf;
this.isSwcChecksumEnabled = configuration.isSwcChecksumEnabled();
// This array is used to load and persist the cache for the incremental
// compile. Initialize it for the load.
checksums = new int[4];
checksums[0] = 0;
checksums[1] = cfgbuf.checksum_ts();
checksums[2] = cfgbuf.link_checksum_ts();
checksums[3] = swcContext.checksum();
}
/**
* Save the compiler units signature checksums.
* These will be used by the next compilation to determine if we
* can do an incremental compiler or if a full recompilation is required.
*
* @param units - compilation units, may be null
*/
protected void saveSignatureChecksums(List<CompilationUnit> units)
{
if (!isSwcChecksumEnabled)
{
swcDefSignatureChecksums = null;
return;
}
if (units != null)
{
swcDefSignatureChecksums = new HashMap<QName, Long>();
for (CompilationUnit unit : units)
{
Source source = unit == null ? null : unit.getSource();
if (source != null && source.isSwcScriptOwner() && !source.isInternal())
{
addSignatureChecksumToData(unit);
}
}
}
}
/**
* Save the swc file checksums.
*/
protected void saveSwcFileChecksums()
{
if (!isSwcChecksumEnabled)
{
swcFileChecksums = null;
return;
}
for (Map.Entry<String, VirtualFile> entry: swcContext.getFiles().entrySet())
{
String filename = entry.getKey();
VirtualFile file = entry.getValue();
swcFileChecksums.put(filename, new Long(file.getLastModified()));
}
for (VirtualFile themeStyleSheet : swcContext.getThemeStyleSheets())
{
swcFileChecksums.put(themeStyleSheet.getName(),
new Long(themeStyleSheet.getLastModified()));
}
}
/**
* Save the checksums for the resources that get written to the swc. These
* may not have compile dependencies so mods won't get detected during
* the compile phase.
*
* @param m - Map(filename, VirtualFile) of resources
*/
protected void saveArchiveFilesChecksums(Map<String, VirtualFile> m)
{
if (!isSwcChecksumEnabled)
{
swcFileChecksums = null;
return;
}
for (Map.Entry<String, VirtualFile> entry : m.entrySet())
{
VirtualFile file = entry.getValue();
archiveFileChecksums.put(file.getName(), new Long(file.getLastModified()));
}
}
/**
* Save the checksums that are going to be persisted to the store file.
*
* @param units - compilation units
*/
protected void saveChecksums(List<CompilationUnit> units)
{
saveSwcFileChecksums();
saveSignatureChecksums(units);
// Ensure this is current.
updateChecksum();
}
/**
* Loop thru the saved signature and file checksums in the persisted data
* and compare them with the signature and file checksums in the swc context.
*
* @return true if all the signature and file checksums in cached data match the checksums
* in the swc context.
*/
protected boolean isRecompilationNeeded(int[] loadedChecksums)
{
// Now that the load is done, which can change swcDefSignatureChecksums,
// recalculate the checksum and see if it matches the loaded one from the
// cache.
this.checksums[0] = calculateChecksum();
// if the checksum from last time and the current checksum do not match,
// then we need to recompile.
if (this.checksums[0] != loadedChecksums[0])
{
if (Trace.swcChecksum)
{
Trace.trace("isRecompilationNeeded: calculated checksum differs from last checksum, recompile");
}
return true;
}
// if we got here and swc checksums are disabled, then
// the checksums are equal and a recompilation is not needed.
// Otherwise continue on and compare the swc checksums.
if (!isSwcChecksumEnabled)
{
if (Trace.swcChecksum)
{
Trace.trace("isRecompilationNeeded: checksums equal, swc-checksum disabled, incremental compile");
}
return false;
}
Map<QName, Long> signatureChecksums = swcDefSignatureChecksums;
if (signatureChecksums == null)
{
if (Trace.swcChecksum)
{
Trace.trace("isRecompilationNeeded: checksums equal, signatureChecksums is null, incremental compile");
}
}
else
{
for (Map.Entry<QName, Long> entry : signatureChecksums.entrySet())
{
// lookup definition in swc context
QName qName = (QName) entry.getKey();
Long dataSignatureChecksum = (Long)entry.getValue();
Long swcSignatureChecksum = swcContext.getChecksum(qName);
if (swcSignatureChecksum == null && qName != null)
{
Source source = swcContext.getSource(qName.getNamespace(), qName.getLocalPart());
if (source != null)
{
swcSignatureChecksum = new Long(source.getLastModified());
}
}
if (Trace.swcChecksum)
{
if (dataSignatureChecksum == null)
{
throw new IllegalStateException("dataSignatureChecksum should never be null");
}
}
if (dataSignatureChecksum != null && swcSignatureChecksum == null)
{
if (Trace.swcChecksum)
{
Trace.trace("isRecompilationNeeded: signature checksums not equal, recompile");
Trace.trace("compare " + entry.getKey());
Trace.trace("data = " + dataSignatureChecksum);
Trace.trace("swc = " + swcSignatureChecksum);
}
return true;
}
if (dataSignatureChecksum != null)
{
if (dataSignatureChecksum.longValue() != swcSignatureChecksum.longValue())
{
if (Trace.swcChecksum)
{
Trace.trace("isRecompilationNeeded: signature checksums not equal, recompile");
Trace.trace("compare " + entry.getKey());
Trace.trace("data = " + dataSignatureChecksum);
Trace.trace("swc = " + swcSignatureChecksum);
}
return true;
}
}
else
{
// dataSignatureChecksum should never be null, but if it is then recompile.
return true;
}
}
}
boolean result = !areSwcFileChecksumsEqual();
if (Trace.swcChecksum)
{
Trace.trace("isRecompilationNeeded: " + (result ? "recompile" : "incremental compile"));
}
return result;
}
/**
* If the link timestamp is different, or the resources in the swc have been
* updated, a relink is necessary.
*
* @return true if relink is needed.
*/
protected boolean isRelinkNeeded(int[] loadedChecksums)
{
// If the link checksum from last time and the current checksum do not match,
// then we need to relink.
if (this.checksums[2] != loadedChecksums[2])
{
if (Trace.swcChecksum)
{
Trace.trace("isRecompilationNeeded: calculated checksum differs from last checksum, relink");
}
return true;
}
// Verify that the timestamps on the archive files haven't changed.
boolean result = !areArchiveFileChecksumsEqual();
if (Trace.swcChecksum)
{
Trace.trace("isRelinkNeeded: " + result);
}
return result;
}
/**
* Add all top level definitions.
* @param unit
*/
private void addSignatureChecksumToData(CompilationUnit unit)
{
Long signatureChecksum = unit.getSignatureChecksum();
if (signatureChecksum == null)
{
SwcScript script = (SwcScript) unit.getSource().getOwner();
signatureChecksum = new Long(script.getLastModified());
}
if (swcDefSignatureChecksums != null)
{
for (Iterator<QName> iter = unit.topLevelDefinitions.iterator(); iter.hasNext();)
{
QName qname = iter.next();
swcDefSignatureChecksums.put(qname, signatureChecksum);
}
}
}
/**
* Test if the files in the compiler swc context have changed since the last compile.
*
* @return true it the swc files compiled with last time are the same as this time.
*/
private boolean areSwcFileChecksumsEqual()
{
if (swcFileChecksums == null)
{
if (Trace.swcChecksum)
{
Trace.trace("areSwcFileChecksumsEqual: no file checksum map, not equal");
}
return false;
}
Map<String, VirtualFile> swcFiles = swcContext.getFiles();
for (VirtualFile themeStyleSheet : swcContext.getThemeStyleSheets())
{
swcFiles.put(themeStyleSheet.getName(), themeStyleSheet);
}
Set<Map.Entry<String, Long>> dataSet = swcFileChecksums.entrySet();
if (swcFiles.size() < dataSet.size())
{
if (Trace.swcChecksum)
{
Trace.trace("areSwcFileChecksumsEqual: less files than before, not equal");
}
return false;
}
for (Map.Entry<String, Long> entry : dataSet)
{
String filename = entry.getKey();
Long dataFileLastModified = entry.getValue();
Long swcFileLastModified = null;
VirtualFile swcFile = swcFiles.get(filename);
if (swcFile != null)
{
swcFileLastModified = new Long(swcFile.getLastModified());
}
if (!dataFileLastModified.equals(swcFileLastModified))
{
if (Trace.swcChecksum)
{
Trace.trace("areSwcFileChecksumsEqual: not equal");
Trace.trace("filename = " + filename);
Trace.trace("last modified1 = " + dataFileLastModified);
Trace.trace("last modified2 = " + swcFileLastModified);
}
return false;
}
}
if (Trace.swcChecksum)
{
Trace.trace("areSwcFileChecksumsEqual: equal");
}
return true;
}
/**
* Test if the archive files in the swc have changed since the last compile.
*
* @return true it the swc files compiled with last time are the same as this time.
*/
private boolean areArchiveFileChecksumsEqual()
{
if (swcFileChecksums == null)
{
if (Trace.swcChecksum)
{
Trace.trace("areArchiveFileChecksumsEqual: no file checksum map, not equal");
}
return false;
}
Set<Map.Entry<String, Long>> dataSet = archiveFileChecksums.entrySet();
for (Map.Entry<String, Long> entry : dataSet)
{
String filename = entry.getKey();
Long dataFileLastModified = entry.getValue();
Long localFileLastModified = null;
LocalFile localFile = new LocalFile(new File(filename));
localFileLastModified = new Long(localFile.getLastModified());
if (!dataFileLastModified.equals(localFileLastModified))
{
if (Trace.swcChecksum)
{
Trace.trace("areArchiveFileChecksumsEqual: not equal");
Trace.trace("filename = " + filename);
Trace.trace("last modified1 = " + dataFileLastModified);
Trace.trace("last modified2 = " + localFileLastModified);
}
return false;
}
}
if (Trace.swcChecksum)
{
Trace.trace("areArchiveFileChecksumsEqual: equal");
}
return true;
}
/**
* Calculate the data checksum on configuration buffer and the
* swcContext. If the configuration changes the checksum changes.
*
* @return checksum
*/
protected int calculateChecksum()
{
// Checksum on the oonfiguration buffer.
int checksum = cfgbuf.checksum_ts();
// If swc checksums are disabled or there are no checksums to compare,
// then include the swc timestamp as part of the checksum.
if (!isSwcChecksumEnabled ||
swcDefSignatureChecksums == null ||
swcDefSignatureChecksums.size() == 0)
{
checksum += swcContext.checksum();
}
return checksum;
}
// protected int getChecksum()
// {
// return checksums[0];
// }
/**
* Set the checksum with the recalculated value.
*/
protected void updateChecksum()
{
checksums[0] = calculateChecksum();
}
/**
* @return a copy of the stored checksum array
*/
protected int[] copy()
{
int[] copy = (int[])this.checksums.clone();
return copy;
}
/**
*
* @return array of checksums
*/
protected int[] getChecksums()
{
return checksums;
}
protected Map<QName, Long> getSwcDefSignatureChecksums()
{
return swcDefSignatureChecksums;
}
protected Map<String, Long> getSwcFileChecksums()
{
return swcFileChecksums;
}
protected Map<String, Long> getArchiveFileChecksums()
{
return archiveFileChecksums;
}
}
| |
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
package org.elasticsearch.xpack.security.authc.saml;
import org.junit.Before;
import org.opensaml.saml.common.xml.SAMLConstants;
import org.opensaml.saml.saml2.core.AuthnRequest;
import org.opensaml.saml.saml2.core.NameID;
import org.opensaml.saml.saml2.metadata.EntityDescriptor;
import java.time.Clock;
import java.time.Instant;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.greaterThan;
import static org.hamcrest.Matchers.hasSize;
import static org.hamcrest.Matchers.notNullValue;
import static org.hamcrest.Matchers.nullValue;
import static org.opensaml.saml.saml2.core.AuthnContext.KERBEROS_AUTHN_CTX;
import static org.opensaml.saml.saml2.core.AuthnContext.SMARTCARD_AUTHN_CTX;
public class SamlAuthnRequestBuilderTests extends SamlTestCase {
private static final String SP_ENTITY_ID = "https://sp.example.com/";
private static final String IDP_ENTITY_ID = "https://idp.example.net/";
private static final String ACS_URL = "https://sp.example.com/saml/acs";
private static final String IDP_URL = "https://idp.example.net/saml/sso/redirect";
private EntityDescriptor idpDescriptor;
@Before
public void init() throws Exception {
SamlUtils.initialize(logger);
idpDescriptor = buildIdPDescriptor(IDP_URL, IDP_ENTITY_ID);
}
public void testBuildRequestWithDefaultSettingsHasNoNameIdPolicy() {
SpConfiguration sp = new SpConfiguration(SP_ENTITY_ID, ACS_URL, null, null, null, Collections.emptyList());
final SamlAuthnRequestBuilder builder = new SamlAuthnRequestBuilder(
sp,
SAMLConstants.SAML2_POST_BINDING_URI,
idpDescriptor,
SAMLConstants.SAML2_REDIRECT_BINDING_URI,
Clock.systemUTC()
);
final AuthnRequest request = buildAndValidateAuthnRequest(builder);
assertThat(request.getIssuer().getValue(), equalTo(SP_ENTITY_ID));
assertThat(request.getProtocolBinding(), equalTo(SAMLConstants.SAML2_POST_BINDING_URI));
assertThat(request.getAssertionConsumerServiceURL(), equalTo(ACS_URL));
assertThat(request.getNameIDPolicy(), notNullValue());
assertThat(request.getNameIDPolicy().getFormat(), nullValue());
assertThat(request.getNameIDPolicy().getSPNameQualifier(), nullValue());
assertThat(request.getNameIDPolicy().getAllowCreate(), equalTo(Boolean.FALSE));
}
public void testBuildRequestWithPersistentNameAndNoForceAuth() throws Exception {
SpConfiguration sp = new SpConfiguration(SP_ENTITY_ID, ACS_URL, null, null, null, Collections.emptyList());
final SamlAuthnRequestBuilder builder = new SamlAuthnRequestBuilder(
sp,
SAMLConstants.SAML2_POST_BINDING_URI,
idpDescriptor,
SAMLConstants.SAML2_REDIRECT_BINDING_URI,
Clock.systemUTC()
);
builder.nameIDPolicy(new SamlAuthnRequestBuilder.NameIDPolicySettings(NameID.PERSISTENT, false, SP_ENTITY_ID));
builder.forceAuthn(null);
final AuthnRequest request = buildAndValidateAuthnRequest(builder);
assertThat(request.getIssuer().getValue(), equalTo(SP_ENTITY_ID));
assertThat(request.getProtocolBinding(), equalTo(SAMLConstants.SAML2_POST_BINDING_URI));
assertThat(request.getAssertionConsumerServiceURL(), equalTo(ACS_URL));
assertThat(request.getNameIDPolicy(), notNullValue());
assertThat(request.getNameIDPolicy().getFormat(), equalTo(NameID.PERSISTENT));
assertThat(request.getNameIDPolicy().getSPNameQualifier(), equalTo(SP_ENTITY_ID));
assertThat(request.getNameIDPolicy().getAllowCreate(), equalTo(Boolean.FALSE));
assertThat(request.isForceAuthn(), equalTo(Boolean.FALSE));
assertThat(request.getRequestedAuthnContext(), equalTo(null));
}
public void testBuildRequestWithTransientNameAndForceAuthTrue() throws Exception {
SpConfiguration sp = new SpConfiguration(SP_ENTITY_ID, ACS_URL, null, null, null, Collections.emptyList());
final SamlAuthnRequestBuilder builder = new SamlAuthnRequestBuilder(
sp,
SAMLConstants.SAML2_POST_BINDING_URI,
idpDescriptor,
SAMLConstants.SAML2_REDIRECT_BINDING_URI,
Clock.systemUTC()
);
final String noSpNameQualifier = randomBoolean() ? "" : null;
builder.nameIDPolicy(new SamlAuthnRequestBuilder.NameIDPolicySettings(NameID.TRANSIENT, true, noSpNameQualifier));
builder.forceAuthn(Boolean.TRUE);
final AuthnRequest request = buildAndValidateAuthnRequest(builder);
assertThat(request.getIssuer().getValue(), equalTo(SP_ENTITY_ID));
assertThat(request.getProtocolBinding(), equalTo(SAMLConstants.SAML2_POST_BINDING_URI));
assertThat(request.getAssertionConsumerServiceURL(), equalTo(ACS_URL));
assertThat(request.getNameIDPolicy(), notNullValue());
assertThat(request.getNameIDPolicy().getFormat(), equalTo(NameID.TRANSIENT));
assertThat(request.getNameIDPolicy().getSPNameQualifier(), nullValue());
assertThat(request.getNameIDPolicy().getAllowCreate(), equalTo(Boolean.TRUE));
assertThat(request.isForceAuthn(), equalTo(Boolean.TRUE));
assertThat(request.getRequestedAuthnContext(), equalTo(null));
}
public void testBuildRequestWithRequestedAuthnContext() throws Exception {
SpConfiguration sp = new SpConfiguration(SP_ENTITY_ID, ACS_URL, null, null, null, Collections.singletonList(KERBEROS_AUTHN_CTX));
final SamlAuthnRequestBuilder builder = new SamlAuthnRequestBuilder(
sp,
SAMLConstants.SAML2_POST_BINDING_URI,
idpDescriptor,
SAMLConstants.SAML2_REDIRECT_BINDING_URI,
Clock.systemUTC()
);
builder.nameIDPolicy(new SamlAuthnRequestBuilder.NameIDPolicySettings(NameID.PERSISTENT, false, SP_ENTITY_ID));
builder.forceAuthn(null);
final AuthnRequest request = buildAndValidateAuthnRequest(builder);
assertThat(request.getIssuer().getValue(), equalTo(SP_ENTITY_ID));
assertThat(request.getProtocolBinding(), equalTo(SAMLConstants.SAML2_POST_BINDING_URI));
assertThat(request.getAssertionConsumerServiceURL(), equalTo(ACS_URL));
assertThat(request.getNameIDPolicy(), notNullValue());
assertThat(request.getNameIDPolicy().getFormat(), equalTo(NameID.PERSISTENT));
assertThat(request.getNameIDPolicy().getSPNameQualifier(), equalTo(SP_ENTITY_ID));
assertThat(request.getNameIDPolicy().getAllowCreate(), equalTo(Boolean.FALSE));
assertThat(request.isForceAuthn(), equalTo(Boolean.FALSE));
assertThat(request.getRequestedAuthnContext().getAuthnContextClassRefs(), hasSize(1));
assertThat(request.getRequestedAuthnContext().getAuthnContextClassRefs().get(0).getURI(), equalTo(KERBEROS_AUTHN_CTX));
}
public void testBuildRequestWithRequestedAuthnContexts() throws Exception {
List<String> reqAuthnCtxClassRef = Arrays.asList(KERBEROS_AUTHN_CTX, SMARTCARD_AUTHN_CTX, "http://an.arbitrary/mfa-profile");
SpConfiguration sp = new SpConfiguration(SP_ENTITY_ID, ACS_URL, null, null, null, reqAuthnCtxClassRef);
final SamlAuthnRequestBuilder builder = new SamlAuthnRequestBuilder(
sp,
SAMLConstants.SAML2_POST_BINDING_URI,
idpDescriptor,
SAMLConstants.SAML2_REDIRECT_BINDING_URI,
Clock.systemUTC()
);
builder.nameIDPolicy(new SamlAuthnRequestBuilder.NameIDPolicySettings(NameID.PERSISTENT, false, SP_ENTITY_ID));
builder.forceAuthn(null);
final AuthnRequest request = buildAndValidateAuthnRequest(builder);
assertThat(request.getIssuer().getValue(), equalTo(SP_ENTITY_ID));
assertThat(request.getProtocolBinding(), equalTo(SAMLConstants.SAML2_POST_BINDING_URI));
assertThat(request.getAssertionConsumerServiceURL(), equalTo(ACS_URL));
assertThat(request.getNameIDPolicy(), notNullValue());
assertThat(request.getNameIDPolicy().getFormat(), equalTo(NameID.PERSISTENT));
assertThat(request.getNameIDPolicy().getSPNameQualifier(), equalTo(SP_ENTITY_ID));
assertThat(request.getNameIDPolicy().getAllowCreate(), equalTo(Boolean.FALSE));
assertThat(request.isForceAuthn(), equalTo(Boolean.FALSE));
assertThat(request.getRequestedAuthnContext().getAuthnContextClassRefs(), hasSize(3));
assertThat(request.getRequestedAuthnContext().getAuthnContextClassRefs().get(0).getURI(), equalTo(KERBEROS_AUTHN_CTX));
assertThat(request.getRequestedAuthnContext().getAuthnContextClassRefs().get(1).getURI(), equalTo(SMARTCARD_AUTHN_CTX));
assertThat(
request.getRequestedAuthnContext().getAuthnContextClassRefs().get(2).getURI(),
equalTo("http://an.arbitrary/mfa-profile")
);
}
private AuthnRequest buildAndValidateAuthnRequest(SamlAuthnRequestBuilder builder) {
Instant before = Instant.now();
final AuthnRequest request = builder.build();
Instant after = Instant.now();
assertThat(request, notNullValue());
assertThat(request.getID(), notNullValue());
assertThat(request.getID().length(), greaterThan(20));
assertThat(request.getIssuer(), notNullValue());
assertThat(request.getIssueInstant(), notNullValue());
assertThat(request.getIssueInstant().isBefore(before), equalTo(false));
assertThat(request.getIssueInstant().isAfter(after), equalTo(false));
assertThat(request.getDestination(), equalTo(IDP_URL));
return request;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.schema;
import java.io.IOException;
import java.util.*;
import java.util.concurrent.*;
import java.lang.management.ManagementFactory;
import java.util.function.LongSupplier;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.util.concurrent.Futures;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.cassandra.db.*;
import org.apache.cassandra.exceptions.AlreadyExistsException;
import org.apache.cassandra.exceptions.ConfigurationException;
import org.apache.cassandra.gms.*;
import org.apache.cassandra.io.IVersionedSerializer;
import org.apache.cassandra.io.util.DataInputPlus;
import org.apache.cassandra.io.util.DataOutputPlus;
import org.apache.cassandra.locator.InetAddressAndPort;
import org.apache.cassandra.net.Message;
import org.apache.cassandra.net.MessagingService;
import org.apache.cassandra.schema.Keyspaces.KeyspacesDiff;
import org.apache.cassandra.utils.FBUtilities;
import static org.apache.cassandra.concurrent.Stage.MIGRATION;
import static org.apache.cassandra.net.Verb.SCHEMA_PUSH_REQ;
public class MigrationManager
{
private static final Logger logger = LoggerFactory.getLogger(MigrationManager.class);
public static final MigrationManager instance = new MigrationManager();
private static LongSupplier getUptimeFn = () -> ManagementFactory.getRuntimeMXBean().getUptime();
@VisibleForTesting
public static void setUptimeFn(LongSupplier supplier)
{
getUptimeFn = supplier;
}
private static final int MIGRATION_DELAY_IN_MS = 60000;
private static final int MIGRATION_TASK_WAIT_IN_SECONDS = Integer.parseInt(System.getProperty("cassandra.migration_task_wait_in_seconds", "1"));
private MigrationManager() {}
private static boolean shouldPushSchemaTo(InetAddressAndPort endpoint)
{
// only push schema to nodes with known and equal versions
return !endpoint.equals(FBUtilities.getBroadcastAddressAndPort())
&& MessagingService.instance().versions.knows(endpoint)
&& MessagingService.instance().versions.getRaw(endpoint) == MessagingService.current_version;
}
public static void announceNewKeyspace(KeyspaceMetadata ksm) throws ConfigurationException
{
announceNewKeyspace(ksm, false);
}
public static void announceNewKeyspace(KeyspaceMetadata ksm, boolean announceLocally) throws ConfigurationException
{
announceNewKeyspace(ksm, FBUtilities.timestampMicros(), announceLocally);
}
public static void announceNewKeyspace(KeyspaceMetadata ksm, long timestamp, boolean announceLocally) throws ConfigurationException
{
ksm.validate();
if (Schema.instance.getKeyspaceMetadata(ksm.name) != null)
throw new AlreadyExistsException(ksm.name);
logger.info("Create new Keyspace: {}", ksm);
announce(SchemaKeyspace.makeCreateKeyspaceMutation(ksm, timestamp), announceLocally);
}
public static void announceNewTable(TableMetadata cfm)
{
announceNewTable(cfm, true, FBUtilities.timestampMicros());
}
private static void announceNewTable(TableMetadata cfm, boolean throwOnDuplicate, long timestamp)
{
cfm.validate();
KeyspaceMetadata ksm = Schema.instance.getKeyspaceMetadata(cfm.keyspace);
if (ksm == null)
throw new ConfigurationException(String.format("Cannot add table '%s' to non existing keyspace '%s'.", cfm.name, cfm.keyspace));
// If we have a table or a view which has the same name, we can't add a new one
else if (throwOnDuplicate && ksm.getTableOrViewNullable(cfm.name) != null)
throw new AlreadyExistsException(cfm.keyspace, cfm.name);
logger.info("Create new table: {}", cfm);
announce(SchemaKeyspace.makeCreateTableMutation(ksm, cfm, timestamp), false);
}
static void announceKeyspaceUpdate(KeyspaceMetadata ksm)
{
ksm.validate();
KeyspaceMetadata oldKsm = Schema.instance.getKeyspaceMetadata(ksm.name);
if (oldKsm == null)
throw new ConfigurationException(String.format("Cannot update non existing keyspace '%s'.", ksm.name));
logger.info("Update Keyspace '{}' From {} To {}", ksm.name, oldKsm, ksm);
announce(SchemaKeyspace.makeCreateKeyspaceMutation(ksm.name, ksm.params, FBUtilities.timestampMicros()), false);
}
public static void announceTableUpdate(TableMetadata tm)
{
announceTableUpdate(tm, false);
}
public static void announceTableUpdate(TableMetadata updated, boolean announceLocally)
{
updated.validate();
TableMetadata current = Schema.instance.getTableMetadata(updated.keyspace, updated.name);
if (current == null)
throw new ConfigurationException(String.format("Cannot update non existing table '%s' in keyspace '%s'.", updated.name, updated.keyspace));
KeyspaceMetadata ksm = Schema.instance.getKeyspaceMetadata(current.keyspace);
updated.validateCompatibility(current);
long timestamp = FBUtilities.timestampMicros();
logger.info("Update table '{}/{}' From {} To {}", current.keyspace, current.name, current, updated);
Mutation.SimpleBuilder builder = SchemaKeyspace.makeUpdateTableMutation(ksm, current, updated, timestamp);
announce(builder, announceLocally);
}
static void announceKeyspaceDrop(String ksName)
{
KeyspaceMetadata oldKsm = Schema.instance.getKeyspaceMetadata(ksName);
if (oldKsm == null)
throw new ConfigurationException(String.format("Cannot drop non existing keyspace '%s'.", ksName));
logger.info("Drop Keyspace '{}'", oldKsm.name);
announce(SchemaKeyspace.makeDropKeyspaceMutation(oldKsm, FBUtilities.timestampMicros()), false);
}
public static void announceTableDrop(String ksName, String cfName, boolean announceLocally)
{
TableMetadata tm = Schema.instance.getTableMetadata(ksName, cfName);
if (tm == null)
throw new ConfigurationException(String.format("Cannot drop non existing table '%s' in keyspace '%s'.", cfName, ksName));
KeyspaceMetadata ksm = Schema.instance.getKeyspaceMetadata(ksName);
logger.info("Drop table '{}/{}'", tm.keyspace, tm.name);
announce(SchemaKeyspace.makeDropTableMutation(ksm, tm, FBUtilities.timestampMicros()), announceLocally);
}
/**
* actively announce a new version to active hosts via rpc
* @param schema The schema mutation to be applied
*/
private static void announce(Mutation.SimpleBuilder schema, boolean announceLocally)
{
List<Mutation> mutations = Collections.singletonList(schema.build());
if (announceLocally)
Schema.instance.merge(mutations);
else
announce(mutations);
}
public static void announce(Mutation change)
{
announce(Collections.singleton(change));
}
public static void announce(Collection<Mutation> schema)
{
Future<?> f = announceWithoutPush(schema);
Set<InetAddressAndPort> schemaDestinationEndpoints = new HashSet<>();
Set<InetAddressAndPort> schemaEndpointsIgnored = new HashSet<>();
Message<Collection<Mutation>> message = Message.out(SCHEMA_PUSH_REQ, schema);
for (InetAddressAndPort endpoint : Gossiper.instance.getLiveMembers())
{
if (shouldPushSchemaTo(endpoint))
{
MessagingService.instance().send(message, endpoint);
schemaDestinationEndpoints.add(endpoint);
}
else
{
schemaEndpointsIgnored.add(endpoint);
}
}
SchemaAnnouncementDiagnostics.schemaMutationsAnnounced(schemaDestinationEndpoints, schemaEndpointsIgnored);
FBUtilities.waitOnFuture(f);
}
public static Future<?> announceWithoutPush(Collection<Mutation> schema)
{
return MIGRATION.submit(() -> Schema.instance.mergeAndAnnounceVersion(schema));
}
public static KeyspacesDiff announce(SchemaTransformation transformation, boolean locally)
{
long now = FBUtilities.timestampMicros();
Future<Schema.TransformationResult> future =
MIGRATION.submit(() -> Schema.instance.transform(transformation, locally, now));
Schema.TransformationResult result = Futures.getUnchecked(future);
if (!result.success)
throw result.exception;
if (locally || result.diff.isEmpty())
return result.diff;
Set<InetAddressAndPort> schemaDestinationEndpoints = new HashSet<>();
Set<InetAddressAndPort> schemaEndpointsIgnored = new HashSet<>();
Message<Collection<Mutation>> message = Message.out(SCHEMA_PUSH_REQ, result.mutations);
for (InetAddressAndPort endpoint : Gossiper.instance.getLiveMembers())
{
if (shouldPushSchemaTo(endpoint))
{
MessagingService.instance().send(message, endpoint);
schemaDestinationEndpoints.add(endpoint);
}
else
{
schemaEndpointsIgnored.add(endpoint);
}
}
SchemaAnnouncementDiagnostics.schemaTransformationAnnounced(schemaDestinationEndpoints, schemaEndpointsIgnored,
transformation);
return result.diff;
}
/**
* Clear all locally stored schema information and reset schema to initial state.
* Called by user (via JMX) who wants to get rid of schema disagreement.
*/
public static void resetLocalSchema()
{
logger.info("Starting local schema reset...");
logger.debug("Truncating schema tables...");
SchemaMigrationDiagnostics.resetLocalSchema();
SchemaKeyspace.truncate();
logger.debug("Clearing local schema keyspace definitions...");
Schema.instance.clear();
Set<InetAddressAndPort> liveEndpoints = Gossiper.instance.getLiveMembers();
liveEndpoints.remove(FBUtilities.getBroadcastAddressAndPort());
// force migration if there are nodes around
for (InetAddressAndPort node : liveEndpoints)
{
EndpointState state = Gossiper.instance.getEndpointStateForEndpoint(node);
Future<Void> pull = MigrationCoordinator.instance.reportEndpointVersion(node, state);
if (pull != null)
FBUtilities.waitOnFuture(pull);
}
logger.info("Local schema reset is complete.");
}
/**
* We have a set of non-local, distributed system keyspaces, e.g. system_traces, system_auth, etc.
* (see {@link SchemaConstants#REPLICATED_SYSTEM_KEYSPACE_NAMES}), that need to be created on cluster initialisation,
* and later evolved on major upgrades (sometimes minor too). This method compares the current known definitions
* of the tables (if the keyspace exists) to the expected, most modern ones expected by the running version of C*;
* if any changes have been detected, a schema Mutation will be created which, when applied, should make
* cluster's view of that keyspace aligned with the expected modern definition.
*
* @param keyspace the expected modern definition of the keyspace
* @param generation timestamp to use for the table changes in the schema mutation
*
* @return empty Optional if the current definition is up to date, or an Optional with the Mutation that would
* bring the schema in line with the expected definition.
*/
public static Optional<Mutation> evolveSystemKeyspace(KeyspaceMetadata keyspace, long generation)
{
Mutation.SimpleBuilder builder = null;
KeyspaceMetadata definedKeyspace = Schema.instance.getKeyspaceMetadata(keyspace.name);
Tables definedTables = null == definedKeyspace ? Tables.none() : definedKeyspace.tables;
for (TableMetadata table : keyspace.tables)
{
if (table.equals(definedTables.getNullable(table.name)))
continue;
if (null == builder)
{
// for the keyspace definition itself (name, replication, durability) always use generation 0;
// this ensures that any changes made to replication by the user will never be overwritten.
builder = SchemaKeyspace.makeCreateKeyspaceMutation(keyspace.name, keyspace.params, 0);
// now set the timestamp to generation, so the tables have the expected timestamp
builder.timestamp(generation);
}
// for table definitions always use the provided generation; these tables, unlike their containing
// keyspaces, are *NOT* meant to be altered by the user; if their definitions need to change,
// the schema must be updated in code, and the appropriate generation must be bumped.
SchemaKeyspace.addTableToSchemaMutation(table, true, builder);
}
return builder == null ? Optional.empty() : Optional.of(builder.build());
}
public static class MigrationsSerializer implements IVersionedSerializer<Collection<Mutation>>
{
public static MigrationsSerializer instance = new MigrationsSerializer();
public void serialize(Collection<Mutation> schema, DataOutputPlus out, int version) throws IOException
{
out.writeInt(schema.size());
for (Mutation mutation : schema)
Mutation.serializer.serialize(mutation, out, version);
}
public Collection<Mutation> deserialize(DataInputPlus in, int version) throws IOException
{
int count = in.readInt();
Collection<Mutation> schema = new ArrayList<>(count);
for (int i = 0; i < count; i++)
schema.add(Mutation.serializer.deserialize(in, version));
return schema;
}
public long serializedSize(Collection<Mutation> schema, int version)
{
int size = TypeSizes.sizeof(schema.size());
for (Mutation mutation : schema)
size += mutation.serializedSize(version);
return size;
}
}
}
| |
// ========================================================================
// $Id: AbstractSessionManager.java,v 1.53 2006/11/22 20:01:10 gregwilkins Exp $
// Copyright 199-2004 Mort Bay Consulting Pty. Ltd.
// ------------------------------------------------------------------------
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// ========================================================================
package org.openqa.jetty.jetty.servlet;
import java.security.NoSuchAlgorithmException;
import java.security.SecureRandom;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Enumeration;
import java.util.EventListener;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Random;
import javax.servlet.ServletContext;
import javax.servlet.http.Cookie;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpSession;
import javax.servlet.http.HttpSessionAttributeListener;
import javax.servlet.http.HttpSessionBindingEvent;
import javax.servlet.http.HttpSessionBindingListener;
import javax.servlet.http.HttpSessionContext;
import javax.servlet.http.HttpSessionEvent;
import javax.servlet.http.HttpSessionListener;
import org.apache.commons.logging.Log;
import org.openqa.jetty.log.LogFactory;
import org.openqa.jetty.http.HttpOnlyCookie;
import org.openqa.jetty.util.LazyList;
import org.openqa.jetty.util.LogSupport;
import org.openqa.jetty.util.MultiMap;
/* ------------------------------------------------------------ */
/** An Abstract implementation of SessionManager.
* The partial implementation of SessionManager interface provides
* the majority of the handling required to implement a
* SessionManager. Concrete implementations of SessionManager based
* on AbstractSessionManager need only implement the newSession method
* to return a specialized version of the Session inner class that
* provides an attribute Map.
* <p>
* If the property
* org.openqa.jetty.jetty.servlet.AbstractSessionManager.23Notifications is set to
* true, the 2.3 servlet spec notification style will be used.
* <p>
* @version $Id: AbstractSessionManager.java,v 1.53 2006/11/22 20:01:10 gregwilkins Exp $
* @author Greg Wilkins (gregw)
*/
public abstract class AbstractSessionManager implements SessionManager
{
private static Log log = LogFactory.getLog(AbstractSessionManager.class);
/* ------------------------------------------------------------ */
public final static int __distantFuture = 60*60*24*7*52*20;
private final static String __NEW_SESSION_ID="org.openqa.jetty.jetty.newSessionId";
/* ------------------------------------------------------------ */
/* global Map of ID to session */
protected static MultiMap __allSessions=new MultiMap();
/* ------------------------------------------------------------ */
// Setting of max inactive interval for new sessions
// -1 means no timeout
private int _dftMaxIdleSecs = -1;
private int _scavengePeriodMs = 30000;
private String _workerName ;
protected transient ArrayList _sessionListeners=new ArrayList();
protected transient ArrayList _sessionAttributeListeners=new ArrayList();
protected transient Map _sessions;
protected transient Random _random;
protected transient boolean _weakRandom;
protected transient ServletHandler _handler;
protected int _minSessions = 0;
protected int _maxSessions = 0;
protected boolean _crossContextSessionIDs=false;
protected boolean _secureCookies=false;
protected boolean _httpOnly=false;
protected boolean _invalidateGlobal=true;
private transient SessionScavenger _scavenger = null;
/* ------------------------------------------------------------ */
public AbstractSessionManager()
{
this(null);
}
/* ------------------------------------------------------------ */
public AbstractSessionManager(Random random)
{
_random=random;
_weakRandom=false;
}
/* ------------------------------------------------------------ */
/**
* @return True if requested session ID are first considered for new
* @deprecated use getCrossContextSessionIDs
* session IDs
*/
@Deprecated
public boolean getUseRequestedId()
{
return _crossContextSessionIDs;
}
/* ------------------------------------------------------------ */
/** Set Use Requested ID.
* @param useRequestedId True if requested session ID are first considered for new
* @deprecated use setCrossContextSessionIDs
* session IDs
*/
@Deprecated
public void setUseRequestedId(boolean useRequestedId)
{
_crossContextSessionIDs = useRequestedId;
}
/* ------------------------------------------------------------ */
/**
* @return True if cross context session IDs are first considered for new
* session IDs
*/
public boolean getCrossContextSessionIDs()
{
return _crossContextSessionIDs;
}
/* ------------------------------------------------------------ */
/** Set Cross Context sessions IDs
* This option activates a mode where a requested session ID can be used to create a
* new session. This facilitates the sharing of session cookies when cross context
* dispatches use sessions.
*
* @param useRequestedId True if cross context session ID are first considered for new
* session IDs
*/
public void setCrossContextSessionIDs(boolean useRequestedId)
{
_crossContextSessionIDs = useRequestedId;
}
/* ------------------------------------------------------------ */
public void initialize(ServletHandler handler)
{
_handler=handler;
}
/* ------------------------------------------------------------ */
public Map getSessionMap()
{
return Collections.unmodifiableMap(_sessions);
}
/* ------------------------------------------------------------ */
public int getSessions ()
{
return _sessions.size ();
}
/* ------------------------------------------------------------ */
public int getMinSessions ()
{
return _minSessions;
}
/* ------------------------------------------------------------ */
public int getMaxSessions ()
{
return _maxSessions;
}
/* ------------------------------------------------------------ */
public void resetStats ()
{
_minSessions = _sessions.size ();
_maxSessions = _sessions.size ();
}
/* ------------------------------------------------------------ */
/* new Session ID.
* If the request has a requestedSessionID which is unique, that is used.
* The session ID is created as a unique random long base 36.
* If the request has a jvmRoute attribute, that is appended as a
* worker tag, else any worker tag set on the manager is appended.
* @param request
* @param created
* @return Session ID.
*/
private String newSessionId(HttpServletRequest request,long created)
{
synchronized(__allSessions)
{
// A requested session ID can only be used if it is in the global map of
// ID but not in this contexts map. Ie it is an ID in use by another context
// in this server and thus we are doing a cross context dispatch.
if (_crossContextSessionIDs)
{
String requested_id=(String)request.getAttribute(__NEW_SESSION_ID);
if (requested_id==null)
requested_id=request.getRequestedSessionId();
if (requested_id !=null &&
requested_id!=null && __allSessions.containsKey(requested_id) && !_sessions.containsKey(requested_id))
return requested_id;
}
// pick a new unique ID!
String id=null;
while (id==null || id.length()==0 || __allSessions.containsKey(id))
{
long r=_weakRandom
?(hashCode()^Runtime.getRuntime().freeMemory()^_random.nextInt()^(((long)request.hashCode())<<32))
:_random.nextLong();
r^=created;
if (request!=null && request.getRemoteAddr()!=null)
r^=request.getRemoteAddr().hashCode();
if (r<0)
r=-r;
id=Long.toString(r,36);
String worker = (String)request.getAttribute("org.openqa.jetty.http.ajp.JVMRoute");
if (worker!=null)
id+="."+worker;
else if (_workerName!=null)
id+="."+_workerName;
}
return id;
}
}
/* ------------------------------------------------------------ */
public HttpSession getHttpSession(String id)
{
synchronized(this)
{
return (HttpSession)_sessions.get(id);
}
}
/* ------------------------------------------------------------ */
public HttpSession newHttpSession(HttpServletRequest request)
{
Session session = newSession(request);
session.setMaxInactiveInterval(_dftMaxIdleSecs);
synchronized(__allSessions)
{
synchronized(this)
{
_sessions.put(session.getId(),session);
__allSessions.add(session.getId(), session);
if (_sessions.size() > this._maxSessions)
this._maxSessions = _sessions.size ();
}
}
HttpSessionEvent event=new HttpSessionEvent(session);
for(int i=0;i<_sessionListeners.size();i++)
((HttpSessionListener)_sessionListeners.get(i))
.sessionCreated(event);
if (getCrossContextSessionIDs())
request.setAttribute(__NEW_SESSION_ID, session.getId());
return session;
}
/* ------------------------------------------------------------ */
public Cookie getSessionCookie(HttpSession session,boolean requestIsSecure)
{
if (_handler.isUsingCookies())
{
Cookie cookie = _handler.getSessionManager().getHttpOnly()
?new HttpOnlyCookie(SessionManager.__SessionCookie,session.getId())
:new Cookie(SessionManager.__SessionCookie,session.getId());
String domain=_handler.getServletContext().getInitParameter(SessionManager.__SessionDomain);
String maxAge=_handler.getServletContext().getInitParameter(SessionManager.__MaxAge);
String path=_handler.getServletContext().getInitParameter(SessionManager.__SessionPath);
if (path==null)
path=getCrossContextSessionIDs()?"/":_handler.getHttpContext().getContextPath();
if (path==null || path.length()==0)
path="/";
if (domain!=null)
cookie.setDomain(domain);
if (maxAge!=null)
cookie.setMaxAge(Integer.parseInt(maxAge));
else
cookie.setMaxAge(-1);
cookie.setSecure(requestIsSecure && getSecureCookies());
cookie.setPath(path);
return cookie;
}
return null;
}
/* ------------------------------------------------------------ */
protected abstract Session newSession(HttpServletRequest request);
/* ------------------------------------------------------------ */
/** Get the workname.
* If set, the workername is dot appended to the session ID
* and can be used to assist session affinity in a load balancer.
* @return String or null
*/
public String getWorkerName()
{
return _workerName;
}
/* ------------------------------------------------------------ */
/** Set the workname.
* If set, the workername is dot appended to the session ID
* and can be used to assist session affinity in a load balancer.
* @param workerName
*/
public void setWorkerName(String workerName)
{
_workerName = workerName;
}
/* ------------------------------------------------------------ */
/**
* @return seconds
*/
public int getMaxInactiveInterval()
{
return _dftMaxIdleSecs;
}
/* ------------------------------------------------------------ */
/**
* @param seconds
*/
public void setMaxInactiveInterval(int seconds)
{
_dftMaxIdleSecs = seconds;
if (_dftMaxIdleSecs>0 && _scavengePeriodMs>_dftMaxIdleSecs*100)
setScavengePeriod((_dftMaxIdleSecs+9)/10);
}
/* ------------------------------------------------------------ */
/**
* @return seconds
*/
public int getScavengePeriod()
{
return _scavengePeriodMs/1000;
}
/* ------------------------------------------------------------ */
/**
* @param seconds
*/
public void setScavengePeriod(int seconds)
{
if (seconds==0)
seconds=60;
int old_period=_scavengePeriodMs;
int period = seconds*1000;
if (period>60000)
period=60000;
if (period<1000)
period=1000;
if (period!=old_period)
{
synchronized(this)
{
_scavengePeriodMs=period;
if (_scavenger!=null)
_scavenger.interrupt();
}
}
}
/* ------------------------------------------------------------ */
/**
* @return Returns the httpOnly.
*/
public boolean getHttpOnly()
{
return _httpOnly;
}
/* ------------------------------------------------------------ */
/**
* @param httpOnly The httpOnly to set.
*/
public void setHttpOnly(boolean httpOnly)
{
_httpOnly = httpOnly;
}
/* ------------------------------------------------------------ */
/**
* @return Returns the secureCookies.
*/
public boolean getSecureCookies()
{
return _secureCookies;
}
/* ------------------------------------------------------------ */
/**
* @param secureCookies The secureCookies to set.
*/
public void setSecureCookies(boolean secureCookies)
{
_secureCookies = secureCookies;
}
/* ------------------------------------------------------------ */
public boolean isInvalidateGlobal()
{
return _invalidateGlobal;
}
/* ------------------------------------------------------------ */
/**
* @param global True if session invalidation should be global.
* ie Sessions in other contexts with the same ID (linked by cross context dispatch
* or shared session cookie) are invalidated as a group.
*/
public void setInvalidateGlobal(boolean global)
{
_invalidateGlobal=global;
}
/* ------------------------------------------------------------ */
public void addEventListener(EventListener listener)
throws IllegalArgumentException
{
if (listener instanceof HttpSessionAttributeListener)
_sessionAttributeListeners.add(listener);
if (listener instanceof HttpSessionListener)
_sessionListeners.add(listener);
}
/* ------------------------------------------------------------ */
public void removeEventListener(EventListener listener)
{
if (listener instanceof HttpSessionAttributeListener)
_sessionAttributeListeners.remove(listener);
if (listener instanceof HttpSessionListener)
_sessionListeners.remove(listener);
}
/* ------------------------------------------------------------ */
public boolean isStarted()
{
return _scavenger!=null;
}
/* ------------------------------------------------------------ */
public void start()
throws Exception
{
if (_random==null)
{
log.debug("New random session seed");
try
{
_random=SecureRandom.getInstance("SHA1PRNG");
}
catch (NoSuchAlgorithmException e)
{
log.warn("Could not generate SecureRandom for session-id randomness",e);
_random=new Random();
_weakRandom=true;
}
_random.setSeed(_random.nextLong()^System.currentTimeMillis()^hashCode()^Runtime.getRuntime().freeMemory());
}
if (_sessions==null)
_sessions=new HashMap();
// Start the session scavenger if we haven't already
if (_scavenger == null)
{
_scavenger = new SessionScavenger();
_scavenger.start();
}
}
/* ------------------------------------------------------------ */
public void stop()
{
// Invalidate all sessions to cause unbind events
ArrayList sessions = new ArrayList(_sessions.values());
for (Iterator i = sessions.iterator(); i.hasNext(); )
{
Session session = (Session)i.next();
session.invalidate();
}
_sessions.clear();
// stop the scavenger
SessionScavenger scavenger = _scavenger;
_scavenger=null;
if (scavenger!=null)
scavenger.interrupt();
}
/* -------------------------------------------------------------- */
/** Find sessions that have timed out and invalidate them.
* This runs in the SessionScavenger thread.
*/
private void scavenge()
{
Thread thread = Thread.currentThread();
ClassLoader old_loader = thread.getContextClassLoader();
try
{
if (_handler==null)
return;
ClassLoader loader = _handler.getClassLoader();
if (loader!=null)
thread.setContextClassLoader(loader);
long now = System.currentTimeMillis();
// Since Hashtable enumeration is not safe over deletes,
// we build a list of stale sessions, then go back and invalidate them
Object stale=null;
synchronized(AbstractSessionManager.this)
{
// For each session
for (Iterator i = _sessions.values().iterator(); i.hasNext(); )
{
Session session = (Session)i.next();
long idleTime = session._maxIdleMs;
if (idleTime > 0 && session._accessed + idleTime < now) {
// Found a stale session, add it to the list
stale=LazyList.add(stale,session);
}
}
}
// Remove the stale sessions
for (int i = LazyList.size(stale); i-->0;)
{
// check it has not been accessed in the meantime
Session session=(Session)LazyList.get(stale,i);
long idleTime = session._maxIdleMs;
if (idleTime > 0 && session._accessed + idleTime < System.currentTimeMillis())
{
session.invalidate();
int nbsess = this._sessions.size();
if (nbsess < this._minSessions)
this._minSessions = nbsess;
}
}
}
finally
{
thread.setContextClassLoader(old_loader);
}
}
/* ------------------------------------------------------------ */
public Random getRandom()
{
return _random;
}
/* ------------------------------------------------------------ */
public void setRandom(Random random)
{
_random=random;
}
/* ------------------------------------------------------------ */
/* ------------------------------------------------------------ */
/* -------------------------------------------------------------- */
/** SessionScavenger is a background thread that kills off old sessions */
class SessionScavenger extends Thread
{
public void run()
{
int period=-1;
try{
while (isStarted())
{
try {
if (period!=_scavengePeriodMs)
{
if(log.isDebugEnabled())log.debug("Session scavenger period = "+_scavengePeriodMs/1000+"s");
period=_scavengePeriodMs;
}
sleep(period>1000?period:1000);
AbstractSessionManager.this.scavenge();
}
catch (InterruptedException ex){continue;}
catch (Error e) {log.warn(LogSupport.EXCEPTION,e);}
catch (Exception e) {log.warn(LogSupport.EXCEPTION,e);}
}
}
finally
{
AbstractSessionManager.this._scavenger=null;
log.debug("Session scavenger exited");
}
}
SessionScavenger()
{
super("SessionScavenger");
setDaemon(true);
}
} // SessionScavenger
/* ------------------------------------------------------------ */
/* ------------------------------------------------------------ */
/* ------------------------------------------------------------ */
public abstract class Session implements SessionManager.Session
{
Map _values;
boolean _invalid=false;
boolean _newSession=true;
long _created=System.currentTimeMillis();
long _accessed=_created;
long _maxIdleMs = _dftMaxIdleSecs*1000;
String _id;
/* ------------------------------------------------------------- */
protected Session(HttpServletRequest request)
{
_id=newSessionId(request,_created);
if (_dftMaxIdleSecs>=0)
_maxIdleMs=_dftMaxIdleSecs*1000;
}
/* ------------------------------------------------------------ */
protected abstract Map newAttributeMap();
/* ------------------------------------------------------------ */
public void access()
{
_newSession=false;
_accessed=System.currentTimeMillis();
}
/* ------------------------------------------------------------ */
public boolean isValid()
{
return !_invalid;
}
/* ------------------------------------------------------------ */
public ServletContext getServletContext()
{
return _handler.getServletContext();
}
/* ------------------------------------------------------------- */
public String getId()
throws IllegalStateException
{
return _id;
}
/* ------------------------------------------------------------- */
public long getCreationTime()
throws IllegalStateException
{
if (_invalid) throw new IllegalStateException();
return _created;
}
/* ------------------------------------------------------------- */
public long getLastAccessedTime()
throws IllegalStateException
{
if (_invalid) throw new IllegalStateException();
return _accessed;
}
/* ------------------------------------------------------------- */
public int getMaxInactiveInterval()
{
if (_invalid) throw new IllegalStateException();
return (int)(_maxIdleMs / 1000);
}
/* ------------------------------------------------------------- */
/**
* @deprecated
*/
@Deprecated
public HttpSessionContext getSessionContext()
throws IllegalStateException
{
if (_invalid) throw new IllegalStateException();
return SessionContext.NULL_IMPL;
}
/* ------------------------------------------------------------- */
public void setMaxInactiveInterval(int secs)
{
_maxIdleMs = (long)secs * 1000;
if (_maxIdleMs>0 && (_maxIdleMs/10)<_scavengePeriodMs)
AbstractSessionManager.this.setScavengePeriod((secs+9)/10);
}
/* ------------------------------------------------------------- */
public void invalidate() throws IllegalStateException
{
if (log.isDebugEnabled()) log.debug("Invalidate session "+getId()+" in "+_handler.getHttpContext());
try
{
// Notify listeners and unbind values
synchronized (this)
{
if (_invalid)
throw new IllegalStateException();
if (_sessionListeners!=null)
{
HttpSessionEvent event=new HttpSessionEvent(this);
for (int i=_sessionListeners.size(); i-->0;)
((HttpSessionListener)_sessionListeners.get(i)).sessionDestroyed(event);
}
if (_values!=null)
{
Iterator iter=_values.keySet().iterator();
while (iter.hasNext())
{
String key=(String)iter.next();
Object value=_values.get(key);
iter.remove();
unbindValue(key,value);
if (_sessionAttributeListeners.size()>0)
{
HttpSessionBindingEvent event=new HttpSessionBindingEvent(this,key,value);
for (int i=0; i<_sessionAttributeListeners.size(); i++)
{
((HttpSessionAttributeListener)_sessionAttributeListeners.get(i)).attributeRemoved(event);
}
}
}
}
}
}
finally
{
// Remove session from context and global maps
synchronized (__allSessions)
{
synchronized (_sessions)
{
_invalid=true;
_sessions.remove(getId());
__allSessions.removeValue(getId(), this);
if (isInvalidateGlobal())
{
// Don't iterate as other sessions may also be globally invalidating
while(__allSessions.containsKey(getId()))
{
Session session=(Session)__allSessions.getValue(getId(),0);
session.invalidate();
}
}
}
}
}
}
/* ------------------------------------------------------------- */
public boolean isNew()
throws IllegalStateException
{
if (_invalid) throw new IllegalStateException();
return _newSession;
}
/* ------------------------------------------------------------ */
public synchronized Object getAttribute(String name)
{
if (_invalid) throw new IllegalStateException();
if (_values==null)
return null;
return _values.get(name);
}
/* ------------------------------------------------------------ */
public synchronized Enumeration getAttributeNames()
{
if (_invalid) throw new IllegalStateException();
List names = _values==null?Collections.EMPTY_LIST:new ArrayList(_values.keySet());
return Collections.enumeration(names);
}
/* ------------------------------------------------------------ */
public synchronized void setAttribute(String name, Object value)
{
if (_invalid) throw new IllegalStateException();
if (_values==null)
_values=newAttributeMap();
Object oldValue = _values.put(name,value);
if (value==null || !value.equals(oldValue))
{
unbindValue(name, oldValue);
bindValue(name, value);
if (_sessionAttributeListeners.size()>0)
{
HttpSessionBindingEvent event =
new HttpSessionBindingEvent(this,name,
oldValue==null?value:oldValue);
for(int i=0;i<_sessionAttributeListeners.size();i++)
{
HttpSessionAttributeListener l =
(HttpSessionAttributeListener)
_sessionAttributeListeners.get(i);
if (oldValue==null)
l.attributeAdded(event);
else if (value==null)
l.attributeRemoved(event);
else
l.attributeReplaced(event);
}
}
}
}
/* ------------------------------------------------------------ */
public synchronized void removeAttribute(String name)
{
if (_invalid) throw new IllegalStateException();
if (_values==null)
return;
Object old=_values.remove(name);
if (old!=null)
{
unbindValue(name, old);
if (_sessionAttributeListeners.size()>0)
{
HttpSessionBindingEvent event =
new HttpSessionBindingEvent(this,name,old);
for(int i=0;i<_sessionAttributeListeners.size();i++)
{
HttpSessionAttributeListener l =
(HttpSessionAttributeListener)
_sessionAttributeListeners.get(i);
l.attributeRemoved(event);
}
}
}
}
/* ------------------------------------------------------------- */
/**
* @deprecated As of Version 2.2, this method is
* replaced by {@link #getAttribute}
*/
@Deprecated
public Object getValue(String name)
throws IllegalStateException
{
return getAttribute(name);
}
/* ------------------------------------------------------------- */
/**
* @deprecated As of Version 2.2, this method is
* replaced by {@link #getAttributeNames}
*/
@Deprecated
public synchronized String[] getValueNames()
throws IllegalStateException
{
if (_invalid) throw new IllegalStateException();
if (_values==null)
return new String[0];
String[] a = new String[_values.size()];
return (String[])_values.keySet().toArray(a);
}
/* ------------------------------------------------------------- */
/**
* @deprecated As of Version 2.2, this method is
* replaced by {@link #setAttribute}
*/
@Deprecated
public void putValue(java.lang.String name,
java.lang.Object value)
throws IllegalStateException
{
setAttribute(name,value);
}
/* ------------------------------------------------------------- */
/**
* @deprecated As of Version 2.2, this method is
* replaced by {@link #removeAttribute}
*/
@Deprecated
public void removeValue(java.lang.String name)
throws IllegalStateException
{
removeAttribute(name);
}
/* ------------------------------------------------------------- */
/** If value implements HttpSessionBindingListener, call valueBound() */
private void bindValue(java.lang.String name, Object value)
{
if (value!=null && value instanceof HttpSessionBindingListener)
((HttpSessionBindingListener)value)
.valueBound(new HttpSessionBindingEvent(this,name));
}
/* ------------------------------------------------------------- */
/** If value implements HttpSessionBindingListener, call valueUnbound() */
private void unbindValue(java.lang.String name, Object value)
{
if (value!=null && value instanceof HttpSessionBindingListener)
((HttpSessionBindingListener)value)
.valueUnbound(new HttpSessionBindingEvent(this,name));
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.metrics.prometheus;
import org.apache.flink.api.common.JobID;
import org.apache.flink.metrics.Counter;
import org.apache.flink.metrics.Gauge;
import org.apache.flink.metrics.Histogram;
import org.apache.flink.metrics.Meter;
import org.apache.flink.metrics.SimpleCounter;
import org.apache.flink.metrics.util.TestHistogram;
import org.apache.flink.metrics.util.TestMeter;
import org.apache.flink.runtime.clusterframework.types.ResourceID;
import org.apache.flink.runtime.executiongraph.ExecutionAttemptID;
import org.apache.flink.runtime.jobgraph.JobVertexID;
import org.apache.flink.runtime.metrics.MetricRegistryImpl;
import org.apache.flink.runtime.metrics.MetricRegistryTestUtils;
import org.apache.flink.runtime.metrics.groups.TaskManagerMetricGroup;
import org.apache.flink.runtime.metrics.groups.TaskMetricGroup;
import com.mashape.unirest.http.exceptions.UnirestException;
import io.prometheus.client.CollectorRegistry;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import java.util.Arrays;
import java.util.Collections;
import static org.apache.flink.metrics.prometheus.PrometheusReporterTest.createReporterSetup;
import static org.apache.flink.metrics.prometheus.PrometheusReporterTest.pollMetrics;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.nullValue;
import static org.junit.Assert.assertThat;
/**
* Test for {@link PrometheusReporter} that registers several instances of the same metric for
* different subtasks.
*/
public class PrometheusReporterTaskScopeTest {
private static final String[] LABEL_NAMES = {
"job_id",
"task_id",
"task_attempt_id",
"host",
"task_name",
"task_attempt_num",
"job_name",
"tm_id",
"subtask_index"
};
private static final String TASK_MANAGER_HOST = "taskManagerHostName";
private static final String TASK_MANAGER_ID = "taskManagerId";
private static final String JOB_NAME = "jobName";
private static final String TASK_NAME = "taskName";
private static final int ATTEMPT_NUMBER = 0;
private static final int SUBTASK_INDEX_1 = 0;
private static final int SUBTASK_INDEX_2 = 1;
private final JobID jobId = new JobID();
private final JobVertexID taskId1 = new JobVertexID();
private final ExecutionAttemptID taskAttemptId1 = new ExecutionAttemptID();
private final String[] labelValues1 = {
jobId.toString(),
taskId1.toString(),
taskAttemptId1.toString(),
TASK_MANAGER_HOST,
TASK_NAME,
"" + ATTEMPT_NUMBER,
JOB_NAME,
TASK_MANAGER_ID,
"" + SUBTASK_INDEX_1
};
private final JobVertexID taskId2 = new JobVertexID();
private final ExecutionAttemptID taskAttemptId2 = new ExecutionAttemptID();
private final String[] labelValues2 = {
jobId.toString(),
taskId2.toString(),
taskAttemptId2.toString(),
TASK_MANAGER_HOST,
TASK_NAME,
"" + ATTEMPT_NUMBER,
JOB_NAME,
TASK_MANAGER_ID,
"" + SUBTASK_INDEX_2
};
private TaskMetricGroup taskMetricGroup1;
private TaskMetricGroup taskMetricGroup2;
private MetricRegistryImpl registry;
private PrometheusReporter reporter;
@Before
public void setupReporter() {
registry =
new MetricRegistryImpl(
MetricRegistryTestUtils.defaultMetricRegistryConfiguration(),
Collections.singletonList(createReporterSetup("test1", "9400-9500")));
reporter = (PrometheusReporter) registry.getReporters().get(0);
TaskManagerMetricGroup tmMetricGroup =
TaskManagerMetricGroup.createTaskManagerMetricGroup(
registry, TASK_MANAGER_HOST, new ResourceID(TASK_MANAGER_ID));
taskMetricGroup1 =
tmMetricGroup
.addJob(jobId, JOB_NAME)
.addTask(
taskId1,
taskAttemptId1,
TASK_NAME,
SUBTASK_INDEX_1,
ATTEMPT_NUMBER);
taskMetricGroup2 =
tmMetricGroup
.addJob(jobId, JOB_NAME)
.addTask(
taskId2,
taskAttemptId2,
TASK_NAME,
SUBTASK_INDEX_2,
ATTEMPT_NUMBER);
}
@After
public void shutdownRegistry() throws Exception {
if (registry != null) {
registry.shutdown().get();
}
}
@Test
public void countersCanBeAddedSeveralTimesIfTheyDifferInLabels() throws UnirestException {
Counter counter1 = new SimpleCounter();
counter1.inc(1);
Counter counter2 = new SimpleCounter();
counter2.inc(2);
taskMetricGroup1.counter("my_counter", counter1);
taskMetricGroup2.counter("my_counter", counter2);
assertThat(
CollectorRegistry.defaultRegistry.getSampleValue(
"flink_taskmanager_job_task_my_counter", LABEL_NAMES, labelValues1),
equalTo(1.));
assertThat(
CollectorRegistry.defaultRegistry.getSampleValue(
"flink_taskmanager_job_task_my_counter", LABEL_NAMES, labelValues2),
equalTo(2.));
}
@Test
public void gaugesCanBeAddedSeveralTimesIfTheyDifferInLabels() throws UnirestException {
Gauge<Integer> gauge1 =
new Gauge<Integer>() {
@Override
public Integer getValue() {
return 3;
}
};
Gauge<Integer> gauge2 =
new Gauge<Integer>() {
@Override
public Integer getValue() {
return 4;
}
};
taskMetricGroup1.gauge("my_gauge", gauge1);
taskMetricGroup2.gauge("my_gauge", gauge2);
assertThat(
CollectorRegistry.defaultRegistry.getSampleValue(
"flink_taskmanager_job_task_my_gauge", LABEL_NAMES, labelValues1),
equalTo(3.));
assertThat(
CollectorRegistry.defaultRegistry.getSampleValue(
"flink_taskmanager_job_task_my_gauge", LABEL_NAMES, labelValues2),
equalTo(4.));
}
@Test
public void metersCanBeAddedSeveralTimesIfTheyDifferInLabels() throws UnirestException {
Meter meter = new TestMeter();
taskMetricGroup1.meter("my_meter", meter);
taskMetricGroup2.meter("my_meter", meter);
assertThat(
CollectorRegistry.defaultRegistry.getSampleValue(
"flink_taskmanager_job_task_my_meter", LABEL_NAMES, labelValues1),
equalTo(5.));
assertThat(
CollectorRegistry.defaultRegistry.getSampleValue(
"flink_taskmanager_job_task_my_meter", LABEL_NAMES, labelValues2),
equalTo(5.));
}
@Test
public void histogramsCanBeAddedSeveralTimesIfTheyDifferInLabels() throws UnirestException {
Histogram histogram = new TestHistogram();
taskMetricGroup1.histogram("my_histogram", histogram);
taskMetricGroup2.histogram("my_histogram", histogram);
final String exportedMetrics = pollMetrics(reporter.getPort()).getBody();
assertThat(
exportedMetrics,
containsString("subtask_index=\"0\",quantile=\"0.5\",} 0.5")); // histogram
assertThat(
exportedMetrics,
containsString("subtask_index=\"1\",quantile=\"0.5\",} 0.5")); // histogram
final String[] labelNamesWithQuantile = addToArray(LABEL_NAMES, "quantile");
for (Double quantile : PrometheusReporter.HistogramSummaryProxy.QUANTILES) {
assertThat(
CollectorRegistry.defaultRegistry.getSampleValue(
"flink_taskmanager_job_task_my_histogram",
labelNamesWithQuantile,
addToArray(labelValues1, "" + quantile)),
equalTo(quantile));
assertThat(
CollectorRegistry.defaultRegistry.getSampleValue(
"flink_taskmanager_job_task_my_histogram",
labelNamesWithQuantile,
addToArray(labelValues2, "" + quantile)),
equalTo(quantile));
}
}
@Test
public void removingSingleInstanceOfMetricDoesNotBreakOtherInstances() throws UnirestException {
Counter counter1 = new SimpleCounter();
counter1.inc(1);
Counter counter2 = new SimpleCounter();
counter2.inc(2);
taskMetricGroup1.counter("my_counter", counter1);
taskMetricGroup2.counter("my_counter", counter2);
assertThat(
CollectorRegistry.defaultRegistry.getSampleValue(
"flink_taskmanager_job_task_my_counter", LABEL_NAMES, labelValues1),
equalTo(1.));
assertThat(
CollectorRegistry.defaultRegistry.getSampleValue(
"flink_taskmanager_job_task_my_counter", LABEL_NAMES, labelValues2),
equalTo(2.));
taskMetricGroup2.close();
assertThat(
CollectorRegistry.defaultRegistry.getSampleValue(
"flink_taskmanager_job_task_my_counter", LABEL_NAMES, labelValues1),
equalTo(1.));
taskMetricGroup1.close();
assertThat(
CollectorRegistry.defaultRegistry.getSampleValue(
"flink_taskmanager_job_task_my_counter", LABEL_NAMES, labelValues1),
nullValue());
}
private String[] addToArray(String[] array, String element) {
final String[] labelNames = Arrays.copyOf(array, LABEL_NAMES.length + 1);
labelNames[LABEL_NAMES.length] = element;
return labelNames;
}
}
| |
/*
* Copyright 2014 The gRPC Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.grpc.okhttp;
import static com.google.common.base.Preconditions.checkNotNull;
import static io.grpc.internal.GrpcUtil.DEFAULT_KEEPALIVE_TIMEOUT_NANOS;
import static io.grpc.internal.GrpcUtil.DEFAULT_KEEPALIVE_TIME_NANOS;
import static io.grpc.internal.GrpcUtil.KEEPALIVE_TIME_NANOS_DISABLED;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
import io.grpc.Attributes;
import io.grpc.ExperimentalApi;
import io.grpc.Internal;
import io.grpc.NameResolver;
import io.grpc.internal.AbstractManagedChannelImplBuilder;
import io.grpc.internal.AtomicBackoff;
import io.grpc.internal.ClientTransportFactory;
import io.grpc.internal.ConnectionClientTransport;
import io.grpc.internal.GrpcUtil;
import io.grpc.internal.KeepAliveManager;
import io.grpc.internal.SharedResourceHolder;
import io.grpc.internal.SharedResourceHolder.Resource;
import io.grpc.internal.TransportTracer;
import io.grpc.okhttp.internal.CipherSuite;
import io.grpc.okhttp.internal.ConnectionSpec;
import io.grpc.okhttp.internal.Platform;
import io.grpc.okhttp.internal.TlsVersion;
import java.net.InetSocketAddress;
import java.net.SocketAddress;
import java.security.GeneralSecurityException;
import java.security.KeyStore;
import java.security.SecureRandom;
import java.util.concurrent.Executor;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import javax.annotation.Nullable;
import javax.net.ssl.HostnameVerifier;
import javax.net.ssl.SSLContext;
import javax.net.ssl.SSLSocketFactory;
import javax.net.ssl.TrustManagerFactory;
/** Convenience class for building channels with the OkHttp transport. */
@ExperimentalApi("https://github.com/grpc/grpc-java/issues/1785")
public class OkHttpChannelBuilder extends
AbstractManagedChannelImplBuilder<OkHttpChannelBuilder> {
/** Identifies the negotiation used for starting up HTTP/2. */
private enum NegotiationType {
/** Uses TLS ALPN/NPN negotiation, assumes an SSL connection. */
TLS,
/**
* Just assume the connection is plaintext (non-SSL) and the remote endpoint supports HTTP/2
* directly without an upgrade.
*/
PLAINTEXT
}
/**
* ConnectionSpec closely matching the default configuration that could be used as a basis for
* modification.
*
* <p>Since this field is the only reference in gRPC to ConnectionSpec that may not be ProGuarded,
* we are removing the field to reduce method count. We've been unable to find any existing users
* of the field, and any such user would highly likely at least be changing the cipher suites,
* which is sort of the only part that's non-obvious. Any existing user should instead create
* their own spec from scratch or base it off ConnectionSpec.MODERN_TLS if believed to be
* necessary. If this was providing you with value and don't want to see it removed, open a GitHub
* issue to discuss keeping it.
*
* @deprecated Deemed of little benefit and users weren't using it. Just define one yourself
*/
@Deprecated
public static final com.squareup.okhttp.ConnectionSpec DEFAULT_CONNECTION_SPEC =
new com.squareup.okhttp.ConnectionSpec.Builder(com.squareup.okhttp.ConnectionSpec.MODERN_TLS)
.cipherSuites(
// The following items should be sync with Netty's Http2SecurityUtil.CIPHERS.
com.squareup.okhttp.CipherSuite.TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384,
com.squareup.okhttp.CipherSuite.TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256,
com.squareup.okhttp.CipherSuite.TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384,
com.squareup.okhttp.CipherSuite.TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256,
com.squareup.okhttp.CipherSuite.TLS_DHE_RSA_WITH_AES_128_GCM_SHA256,
com.squareup.okhttp.CipherSuite.TLS_DHE_DSS_WITH_AES_128_GCM_SHA256,
com.squareup.okhttp.CipherSuite.TLS_DHE_RSA_WITH_AES_256_GCM_SHA384,
com.squareup.okhttp.CipherSuite.TLS_DHE_DSS_WITH_AES_256_GCM_SHA384)
.tlsVersions(com.squareup.okhttp.TlsVersion.TLS_1_2)
.supportsTlsExtensions(true)
.build();
@VisibleForTesting
static final ConnectionSpec INTERNAL_DEFAULT_CONNECTION_SPEC =
new ConnectionSpec.Builder(ConnectionSpec.MODERN_TLS)
.cipherSuites(
// The following items should be sync with Netty's Http2SecurityUtil.CIPHERS.
CipherSuite.TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384,
CipherSuite.TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256,
CipherSuite.TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384,
CipherSuite.TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256,
CipherSuite.TLS_DHE_RSA_WITH_AES_128_GCM_SHA256,
CipherSuite.TLS_DHE_DSS_WITH_AES_128_GCM_SHA256,
CipherSuite.TLS_DHE_RSA_WITH_AES_256_GCM_SHA384,
CipherSuite.TLS_DHE_DSS_WITH_AES_256_GCM_SHA384)
.tlsVersions(TlsVersion.TLS_1_2)
.supportsTlsExtensions(true)
.build();
private static final long AS_LARGE_AS_INFINITE = TimeUnit.DAYS.toNanos(1000L);
private static final Resource<ExecutorService> SHARED_EXECUTOR =
new Resource<ExecutorService>() {
@Override
public ExecutorService create() {
return Executors.newCachedThreadPool(GrpcUtil.getThreadFactory("grpc-okhttp-%d", true));
}
@Override
public void close(ExecutorService executor) {
executor.shutdown();
}
};
/** Creates a new builder for the given server host and port. */
public static OkHttpChannelBuilder forAddress(String host, int port) {
return new OkHttpChannelBuilder(host, port);
}
/**
* Creates a new builder for the given target that will be resolved by
* {@link io.grpc.NameResolver}.
*/
public static OkHttpChannelBuilder forTarget(String target) {
return new OkHttpChannelBuilder(target);
}
private Executor transportExecutor;
private ScheduledExecutorService scheduledExecutorService;
private SSLSocketFactory sslSocketFactory;
private HostnameVerifier hostnameVerifier;
private ConnectionSpec connectionSpec = INTERNAL_DEFAULT_CONNECTION_SPEC;
private NegotiationType negotiationType = NegotiationType.TLS;
private long keepAliveTimeNanos = KEEPALIVE_TIME_NANOS_DISABLED;
private long keepAliveTimeoutNanos = DEFAULT_KEEPALIVE_TIMEOUT_NANOS;
private boolean keepAliveWithoutCalls;
protected OkHttpChannelBuilder(String host, int port) {
this(GrpcUtil.authorityFromHostAndPort(host, port));
}
private OkHttpChannelBuilder(String target) {
super(target);
}
@VisibleForTesting
final OkHttpChannelBuilder setTransportTracerFactory(
TransportTracer.Factory transportTracerFactory) {
this.transportTracerFactory = transportTracerFactory;
return this;
}
/**
* Override the default executor necessary for internal transport use.
*
* <p>The channel does not take ownership of the given executor. It is the caller' responsibility
* to shutdown the executor when appropriate.
*/
public final OkHttpChannelBuilder transportExecutor(@Nullable Executor transportExecutor) {
this.transportExecutor = transportExecutor;
return this;
}
/**
* Sets the negotiation type for the HTTP/2 connection.
*
* <p>If TLS is enabled a default {@link SSLSocketFactory} is created using the best
* {@link java.security.Provider} available and is NOT based on
* {@link SSLSocketFactory#getDefault}. To more precisely control the TLS configuration call
* {@link #sslSocketFactory} to override the socket factory used.
*
* <p>Default: <code>TLS</code>
*
* @deprecated use {@link #usePlaintext()} or {@link #useTransportSecurity()} instead.
*/
@Deprecated
public final OkHttpChannelBuilder negotiationType(io.grpc.okhttp.NegotiationType type) {
Preconditions.checkNotNull(type, "type");
switch (type) {
case TLS:
negotiationType = NegotiationType.TLS;
break;
case PLAINTEXT:
negotiationType = NegotiationType.PLAINTEXT;
break;
default:
throw new AssertionError("Unknown negotiation type: " + type);
}
return this;
}
/**
* Enable keepalive with default delay and timeout.
*
* @deprecated Use {@link #keepAliveTime} instead
*/
@Deprecated
public final OkHttpChannelBuilder enableKeepAlive(boolean enable) {
if (enable) {
return keepAliveTime(DEFAULT_KEEPALIVE_TIME_NANOS, TimeUnit.NANOSECONDS);
} else {
return keepAliveTime(KEEPALIVE_TIME_NANOS_DISABLED, TimeUnit.NANOSECONDS);
}
}
/**
* Enable keepalive with custom delay and timeout.
*
* @deprecated Use {@link #keepAliveTime} and {@link #keepAliveTimeout} instead
*/
@Deprecated
public final OkHttpChannelBuilder enableKeepAlive(boolean enable, long keepAliveTime,
TimeUnit delayUnit, long keepAliveTimeout, TimeUnit timeoutUnit) {
if (enable) {
return keepAliveTime(keepAliveTime, delayUnit)
.keepAliveTimeout(keepAliveTimeout, timeoutUnit);
} else {
return keepAliveTime(KEEPALIVE_TIME_NANOS_DISABLED, TimeUnit.NANOSECONDS);
}
}
/**
* {@inheritDoc}
*
* @since 1.3.0
*/
@Override
public OkHttpChannelBuilder keepAliveTime(long keepAliveTime, TimeUnit timeUnit) {
Preconditions.checkArgument(keepAliveTime > 0L, "keepalive time must be positive");
keepAliveTimeNanos = timeUnit.toNanos(keepAliveTime);
keepAliveTimeNanos = KeepAliveManager.clampKeepAliveTimeInNanos(keepAliveTimeNanos);
if (keepAliveTimeNanos >= AS_LARGE_AS_INFINITE) {
// Bump keepalive time to infinite. This disables keepalive.
keepAliveTimeNanos = KEEPALIVE_TIME_NANOS_DISABLED;
}
return this;
}
/**
* {@inheritDoc}
*
* @since 1.3.0
*/
@Override
public OkHttpChannelBuilder keepAliveTimeout(long keepAliveTimeout, TimeUnit timeUnit) {
Preconditions.checkArgument(keepAliveTimeout > 0L, "keepalive timeout must be positive");
keepAliveTimeoutNanos = timeUnit.toNanos(keepAliveTimeout);
keepAliveTimeoutNanos = KeepAliveManager.clampKeepAliveTimeoutInNanos(keepAliveTimeoutNanos);
return this;
}
/**
* {@inheritDoc}
*
* @since 1.3.0
* @see #keepAliveTime(long, TimeUnit)
*/
@Override
public OkHttpChannelBuilder keepAliveWithoutCalls(boolean enable) {
keepAliveWithoutCalls = enable;
return this;
}
/**
* Override the default {@link SSLSocketFactory} and enable TLS negotiation.
*/
public final OkHttpChannelBuilder sslSocketFactory(SSLSocketFactory factory) {
this.sslSocketFactory = factory;
negotiationType = NegotiationType.TLS;
return this;
}
/**
* Set the hostname verifier to use when using TLS negotiation. The hostnameVerifier is only used
* if using TLS negotiation. If the hostname verifier is not set, a default hostname verifier is
* used.
*
* <p>Be careful when setting a custom hostname verifier! By setting a non-null value, you are
* replacing all default verification behavior. If the hostname verifier you supply does not
* effectively supply the same checks, you may be removing the security assurances that TLS aims
* to provide.</p>
*
* <p>This method should not be used to avoid hostname verification, even during testing, since
* {@link #overrideAuthority} is a safer alternative as it does not disable any security checks.
* </p>
*
* @see io.grpc.okhttp.internal.OkHostnameVerifier
*
* @since 1.6.0
* @return this
*
*/
public final OkHttpChannelBuilder hostnameVerifier(@Nullable HostnameVerifier hostnameVerifier) {
this.hostnameVerifier = hostnameVerifier;
return this;
}
/**
* For secure connection, provides a ConnectionSpec to specify Cipher suite and
* TLS versions.
*
* <p>By default a modern, HTTP/2-compatible spec will be used.
*
* <p>This method is only used when building a secure connection. For plaintext
* connection, use {@link #usePlaintext()} instead.
*
* @throws IllegalArgumentException
* If {@code connectionSpec} is not with TLS
*/
public final OkHttpChannelBuilder connectionSpec(
com.squareup.okhttp.ConnectionSpec connectionSpec) {
Preconditions.checkArgument(connectionSpec.isTls(), "plaintext ConnectionSpec is not accepted");
this.connectionSpec = Utils.convertSpec(connectionSpec);
return this;
}
/**
* Equivalent to using {@link #negotiationType} with {@code PLAINTEXT}.
*
* @deprecated use {@link #usePlaintext()} instead.
*/
@Override
@Deprecated
public final OkHttpChannelBuilder usePlaintext(boolean skipNegotiation) {
if (skipNegotiation) {
negotiationType(io.grpc.okhttp.NegotiationType.PLAINTEXT);
} else {
throw new IllegalArgumentException("Plaintext negotiation not currently supported");
}
return this;
}
/** Sets the negotiation type for the HTTP/2 connection to plaintext. */
@Override
public final OkHttpChannelBuilder usePlaintext() {
negotiationType = NegotiationType.PLAINTEXT;
return this;
}
/**
* Sets the negotiation type for the HTTP/2 connection to TLS (this is the default).
*
* <p>With TLS enabled, a default {@link SSLSocketFactory} is created using the best {@link
* java.security.Provider} available and is NOT based on {@link SSLSocketFactory#getDefault}. To
* more precisely control the TLS configuration call {@link #sslSocketFactory} to override the
* socket factory used.
*/
@Override
public final OkHttpChannelBuilder useTransportSecurity() {
negotiationType = NegotiationType.TLS;
return this;
}
/**
* Provides a custom scheduled executor service.
*
* <p>It's an optional parameter. If the user has not provided a scheduled executor service when
* the channel is built, the builder will use a static cached thread pool.
*
* @return this
*
* @since 1.11.0
*/
public final OkHttpChannelBuilder scheduledExecutorService(
ScheduledExecutorService scheduledExecutorService) {
this.scheduledExecutorService =
checkNotNull(scheduledExecutorService, "scheduledExecutorService");
return this;
}
@Override
@Internal
protected final ClientTransportFactory buildTransportFactory() {
boolean enableKeepAlive = keepAliveTimeNanos != KEEPALIVE_TIME_NANOS_DISABLED;
return new OkHttpTransportFactory(transportExecutor, scheduledExecutorService,
createSocketFactory(), hostnameVerifier, connectionSpec, maxInboundMessageSize(),
enableKeepAlive, keepAliveTimeNanos, keepAliveTimeoutNanos, keepAliveWithoutCalls,
transportTracerFactory);
}
@Override
protected Attributes getNameResolverParams() {
int defaultPort;
switch (negotiationType) {
case PLAINTEXT:
defaultPort = GrpcUtil.DEFAULT_PORT_PLAINTEXT;
break;
case TLS:
defaultPort = GrpcUtil.DEFAULT_PORT_SSL;
break;
default:
throw new AssertionError(negotiationType + " not handled");
}
return Attributes.newBuilder()
.set(NameResolver.Factory.PARAMS_DEFAULT_PORT, defaultPort).build();
}
@VisibleForTesting
@Nullable
SSLSocketFactory createSocketFactory() {
switch (negotiationType) {
case TLS:
try {
if (sslSocketFactory == null) {
SSLContext sslContext;
if (GrpcUtil.IS_RESTRICTED_APPENGINE) {
// The following auth code circumvents the following AccessControlException:
// access denied ("java.util.PropertyPermission" "javax.net.ssl.keyStore" "read")
// Conscrypt will attempt to load the default KeyStore if a trust manager is not
// provided, which is forbidden on AppEngine
sslContext = SSLContext.getInstance("TLS", Platform.get().getProvider());
TrustManagerFactory trustManagerFactory =
TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm());
trustManagerFactory.init((KeyStore) null);
sslContext.init(
null,
trustManagerFactory.getTrustManagers(),
// Use an algorithm that doesn't need /dev/urandom
SecureRandom.getInstance("SHA1PRNG", Platform.get().getProvider()));
} else {
sslContext = SSLContext.getInstance("Default", Platform.get().getProvider());
}
sslSocketFactory = sslContext.getSocketFactory();
}
return sslSocketFactory;
} catch (GeneralSecurityException gse) {
throw new RuntimeException("TLS Provider failure", gse);
}
case PLAINTEXT:
return null;
default:
throw new RuntimeException("Unknown negotiation type: " + negotiationType);
}
}
/**
* Creates OkHttp transports. Exposed for internal use, as it should be private.
*/
@Internal
static final class OkHttpTransportFactory implements ClientTransportFactory {
private final Executor executor;
private final boolean usingSharedExecutor;
private final boolean usingSharedScheduler;
private final TransportTracer.Factory transportTracerFactory;
@Nullable
private final SSLSocketFactory socketFactory;
@Nullable
private final HostnameVerifier hostnameVerifier;
private final ConnectionSpec connectionSpec;
private final int maxMessageSize;
private final boolean enableKeepAlive;
private final AtomicBackoff keepAliveTimeNanos;
private final long keepAliveTimeoutNanos;
private final boolean keepAliveWithoutCalls;
private final ScheduledExecutorService timeoutService;
private boolean closed;
private OkHttpTransportFactory(Executor executor,
@Nullable ScheduledExecutorService timeoutService,
@Nullable SSLSocketFactory socketFactory,
@Nullable HostnameVerifier hostnameVerifier,
ConnectionSpec connectionSpec,
int maxMessageSize,
boolean enableKeepAlive,
long keepAliveTimeNanos,
long keepAliveTimeoutNanos,
boolean keepAliveWithoutCalls,
TransportTracer.Factory transportTracerFactory) {
usingSharedScheduler = timeoutService == null;
this.timeoutService = usingSharedScheduler
? SharedResourceHolder.get(GrpcUtil.TIMER_SERVICE) : timeoutService;
this.socketFactory = socketFactory;
this.hostnameVerifier = hostnameVerifier;
this.connectionSpec = connectionSpec;
this.maxMessageSize = maxMessageSize;
this.enableKeepAlive = enableKeepAlive;
this.keepAliveTimeNanos = new AtomicBackoff("keepalive time nanos", keepAliveTimeNanos);
this.keepAliveTimeoutNanos = keepAliveTimeoutNanos;
this.keepAliveWithoutCalls = keepAliveWithoutCalls;
usingSharedExecutor = executor == null;
this.transportTracerFactory =
Preconditions.checkNotNull(transportTracerFactory, "transportTracerFactory");
if (usingSharedExecutor) {
// The executor was unspecified, using the shared executor.
this.executor = SharedResourceHolder.get(SHARED_EXECUTOR);
} else {
this.executor = executor;
}
}
@Override
public ConnectionClientTransport newClientTransport(
SocketAddress addr, ClientTransportOptions options) {
if (closed) {
throw new IllegalStateException("The transport factory is closed.");
}
final AtomicBackoff.State keepAliveTimeNanosState = keepAliveTimeNanos.getState();
Runnable tooManyPingsRunnable = new Runnable() {
@Override
public void run() {
keepAliveTimeNanosState.backoff();
}
};
InetSocketAddress inetSocketAddr = (InetSocketAddress) addr;
OkHttpClientTransport transport = new OkHttpClientTransport(
inetSocketAddr,
options.getAuthority(),
options.getUserAgent(),
executor,
socketFactory,
hostnameVerifier,
connectionSpec,
maxMessageSize,
options.getProxyParameters(),
tooManyPingsRunnable,
transportTracerFactory.create());
if (enableKeepAlive) {
transport.enableKeepAlive(
true, keepAliveTimeNanosState.get(), keepAliveTimeoutNanos, keepAliveWithoutCalls);
}
return transport;
}
@Override
public ScheduledExecutorService getScheduledExecutorService() {
return timeoutService;
}
@Override
public void close() {
if (closed) {
return;
}
closed = true;
if (usingSharedScheduler) {
SharedResourceHolder.release(GrpcUtil.TIMER_SERVICE, timeoutService);
}
if (usingSharedExecutor) {
SharedResourceHolder.release(SHARED_EXECUTOR, (ExecutorService) executor);
}
}
}
}
| |
/*
* Copyright (C) 2007 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.example.android.apis.app;
import com.example.android.apis.R;
import android.app.Activity;
import android.app.Notification;
import android.app.NotificationManager;
import android.app.PendingIntent;
import android.content.ComponentName;
import android.content.Intent;
import android.os.Bundle;
import android.view.View;
import android.widget.Button;
import android.widget.RemoteViews;
/**
* Demonstrates adding notifications to the status bar
*/
public class StatusBarNotifications extends com.example.android.apis.stub.Base_Activity {
private NotificationManager mNotificationManager;
// Use our layout id for a unique identifier
private static int MOOD_NOTIFICATIONS = R.layout.status_bar_notifications;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.status_bar_notifications);
Button button;
// Get the notification manager serivce.
mNotificationManager = (NotificationManager) getSystemService(NOTIFICATION_SERVICE);
button = (Button) findViewById(R.id.happy);
button.setOnClickListener(new Button.OnClickListener() {
public void onClick(View v) {
setMood(R.drawable.stat_happy, R.string.status_bar_notifications_happy_message,
false);
}
});
button = (Button) findViewById(R.id.neutral);
button.setOnClickListener(new Button.OnClickListener() {
public void onClick(View v) {
setMood(R.drawable.stat_neutral, R.string.status_bar_notifications_ok_message,
false);
}
});
button = (Button) findViewById(R.id.sad);
button.setOnClickListener(new Button.OnClickListener() {
public void onClick(View v) {
setMood(R.drawable.stat_sad, R.string.status_bar_notifications_sad_message, false);
}
});
button = (Button) findViewById(R.id.happyMarquee);
button.setOnClickListener(new Button.OnClickListener() {
public void onClick(View v) {
setMood(R.drawable.stat_happy, R.string.status_bar_notifications_happy_message,
true);
}
});
button = (Button) findViewById(R.id.neutralMarquee);
button.setOnClickListener(new Button.OnClickListener() {
public void onClick(View v) {
setMood(R.drawable.stat_neutral, R.string.status_bar_notifications_ok_message, true);
}
});
button = (Button) findViewById(R.id.sadMarquee);
button.setOnClickListener(new Button.OnClickListener() {
public void onClick(View v) {
setMood(R.drawable.stat_sad, R.string.status_bar_notifications_sad_message, true);
}
});
button = (Button) findViewById(R.id.happyViews);
button.setOnClickListener(new Button.OnClickListener() {
public void onClick(View v) {
setMoodView(R.drawable.stat_happy, R.string.status_bar_notifications_happy_message);
}
});
button = (Button) findViewById(R.id.neutralViews);
button.setOnClickListener(new Button.OnClickListener() {
public void onClick(View v) {
setMoodView(R.drawable.stat_neutral, R.string.status_bar_notifications_ok_message);
}
});
button = (Button) findViewById(R.id.sadViews);
button.setOnClickListener(new Button.OnClickListener() {
public void onClick(View v) {
setMoodView(R.drawable.stat_sad, R.string.status_bar_notifications_sad_message);
}
});
button = (Button) findViewById(R.id.defaultSound);
button.setOnClickListener(new Button.OnClickListener() {
public void onClick(View v) {
setDefault(Notification.DEFAULT_SOUND);
}
});
button = (Button) findViewById(R.id.defaultVibrate);
button.setOnClickListener(new Button.OnClickListener() {
public void onClick(View v) {
setDefault(Notification.DEFAULT_VIBRATE);
}
});
button = (Button) findViewById(R.id.defaultAll);
button.setOnClickListener(new Button.OnClickListener() {
public void onClick(View v) {
setDefault(Notification.DEFAULT_ALL);
}
});
button = (Button) findViewById(R.id.clear);
button.setOnClickListener(new Button.OnClickListener() {
public void onClick(View v) {
mNotificationManager.cancel(R.layout.status_bar_notifications);
}
});
}
private PendingIntent makeMoodIntent(int moodId) {
// The PendingIntent to launch our activity if the user selects this
// notification. Note the use of FLAG_UPDATE_CURRENT so that if there
// is already an active matching pending intent, we will update its
// extras (and other Intents in the array) to be the ones passed in here.
PendingIntent contentIntent = PendingIntent.getActivity(this, 0,
new Intent(this, NotificationDisplay.class).putExtra("moodimg", moodId),
PendingIntent.FLAG_UPDATE_CURRENT);
return contentIntent;
}
private PendingIntent makeDefaultIntent() {
// A typical convention for notifications is to launch the user deeply
// into an application representing the data in the notification; to
// accomplish this, we can build an array of intents to insert the back
// stack stack history above the item being displayed.
Intent[] intents = new Intent[4];
// First: root activity of ApiDemos.
// This is a convenient way to make the proper Intent to launch and
// reset an application's task.
intents[0] = Intent.makeRestartActivityTask(new ComponentName(this,
com.example.android.apis.ApiDemos.class));
// "App"
intents[1] = new Intent(this, com.example.android.apis.ApiDemos.class);
intents[1].putExtra("com.example.android.apis.Path", "App");
// "App/Notification"
intents[2] = new Intent(this, com.example.android.apis.ApiDemos.class);
intents[2].putExtra("com.example.android.apis.Path", "App/Notification");
// Now the activity to display to the user.
intents[3] = new Intent(this, StatusBarNotifications.class);
// The PendingIntent to launch our activity if the user selects this
// notification. Note the use of FLAG_UPDATE_CURRENT so that if there
// is already an active matching pending intent, we will update its
// extras (and other Intents in the array) to be the ones passed in here.
PendingIntent contentIntent = PendingIntent.getActivities(this, 0,
intents, PendingIntent.FLAG_UPDATE_CURRENT);
return contentIntent;
}
private void setMood(int moodId, int textId, boolean showTicker) {
// In this sample, we'll use the same text for the ticker and the expanded notification
CharSequence text = getText(textId);
// choose the ticker text
String tickerText = showTicker ? getString(textId) : null;
// Set the icon, scrolling text and timestamp
Notification notification = new Notification(moodId, tickerText,
System.currentTimeMillis());
// Set the info for the views that show in the notification panel.
notification.setLatestEventInfo(this, getText(R.string.status_bar_notifications_mood_title),
text, makeMoodIntent(moodId));
// Send the notification.
// We use a layout id because it is a unique number. We use it later to cancel.
mNotificationManager.notify(MOOD_NOTIFICATIONS, notification);
}
private void setMoodView(int moodId, int textId) {
// Instead of the normal constructor, we're going to use the one with no args and fill
// in all of the data ourselves. The normal one uses the default layout for notifications.
// You probably want that in most cases, but if you want to do something custom, you
// can set the contentView field to your own RemoteViews object.
Notification notif = new Notification();
// This is who should be launched if the user selects our notification.
notif.contentIntent = makeMoodIntent(moodId);
// In this sample, we'll use the same text for the ticker and the expanded notification
CharSequence text = getText(textId);
notif.tickerText = text;
// the icon for the status bar
notif.icon = moodId;
// our custom view
RemoteViews contentView = new RemoteViews(getPackageName(), R.layout.status_bar_balloon);
contentView.setTextViewText(R.id.text, text);
contentView.setImageViewResource(R.id.icon, moodId);
notif.contentView = contentView;
// we use a string id because is a unique number. we use it later to cancel the
// notification
mNotificationManager.notify(MOOD_NOTIFICATIONS, notif);
}
private void setDefault(int defaults) {
// This method sets the defaults on the notification before posting it.
// This is who should be launched if the user selects our notification.
PendingIntent contentIntent = makeDefaultIntent();
// In this sample, we'll use the same text for the ticker and the expanded notification
CharSequence text = getText(R.string.status_bar_notifications_happy_message);
final Notification notification = new Notification(
R.drawable.stat_happy, // the icon for the status bar
text, // the text to display in the ticker
System.currentTimeMillis()); // the timestamp for the notification
notification.setLatestEventInfo(
this, // the context to use
getText(R.string.status_bar_notifications_mood_title),
// the title for the notification
text, // the details to display in the notification
contentIntent); // the contentIntent (see above)
notification.defaults = defaults;
mNotificationManager.notify(
MOOD_NOTIFICATIONS, // we use a string id because it is a unique
// number. we use it later to cancel the notification
notification);
}
}
| |
/* $Id: 9aa95f266f44246a9109c47c32971f2658d49c57 $
*
* @license
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy
* of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package io.coala.json;
import java.beans.PropertyChangeEvent;
import java.beans.PropertyChangeListener;
import java.io.IOException;
import java.lang.annotation.Documented;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
import java.lang.reflect.InvocationHandler;
import java.lang.reflect.Method;
import java.lang.reflect.Proxy;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Properties;
import java.util.Set;
import java.util.TreeMap;
import javax.inject.Provider;
import org.aeonbits.owner.Accessible;
import org.aeonbits.owner.Config;
import org.aeonbits.owner.ConfigFactory;
import org.aeonbits.owner.Mutable;
import org.apache.logging.log4j.Logger;
import com.fasterxml.jackson.annotation.JsonAnyGetter;
import com.fasterxml.jackson.annotation.JsonAnySetter;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonInclude.Include;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.core.JsonGenerationException;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.core.JsonToken;
import com.fasterxml.jackson.core.TreeNode;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.DeserializationContext;
import com.fasterxml.jackson.databind.JsonDeserializer;
import com.fasterxml.jackson.databind.JsonSerializer;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.SerializationFeature;
import com.fasterxml.jackson.databind.SerializerProvider;
import com.fasterxml.jackson.databind.jsontype.TypeDeserializer;
import com.fasterxml.jackson.databind.module.SimpleModule;
import com.fasterxml.jackson.databind.node.ValueNode;
import io.coala.exception.ExceptionFactory;
import io.coala.exception.Thrower;
import io.coala.log.LogUtil;
import io.coala.util.ReflectUtil;
import io.coala.util.TypeArguments;
/**
* {@link DynaBean} implements a dynamic bean, ready for JSON de/serialization
*
* <p>
* NOT THREAD-SAFE
*
* @version $Id: 9aa95f266f44246a9109c47c32971f2658d49c57 $
* @author Rick van Krevelen
*/
@SuppressWarnings( "rawtypes" )
@JsonInclude( Include.NON_NULL )
public final class DynaBean implements Cloneable, Comparable
{
/**
* {@link BeanProxy} is a annotation used to recognize {@link DynaBean}
* entities/tags during de/serialization and specify the property to use for
* {@link Comparable}s
*
* @version $Id: 9aa95f266f44246a9109c47c32971f2658d49c57 $
* @author Rick van Krevelen
*/
@Documented
@Retention( RetentionPolicy.RUNTIME )
@Target( ElementType.TYPE )
public @interface BeanProxy
{
/**
* @return
*/
String comparableOn() default "";
}
/** */
private static final Logger LOG = LogUtil.getLogger( DynaBean.class );
/** leave null as long as possible */
@JsonIgnore
private Map<String, Object> dynamicProperties = null;
/**
* {@link DynaBean} zero-arg bean constructor for (de)serialization
*/
@JsonCreator
protected DynaBean()
{
// empty
}
protected void lock()
{
if( this.dynamicProperties != null )
this.dynamicProperties = Collections
.unmodifiableMap( this.dynamicProperties );
}
/**
* @return the map of property values
*/
@JsonAnyGetter
protected Map<String, Object> any()
{
return this.dynamicProperties == null ? Collections.emptyMap()
: this.dynamicProperties;
}
/**
* @param key
* @return
*/
public boolean has( final String key )
{
return any().containsKey( key );
}
/**
* @param key
* @return
*/
public boolean hasNonNull( final String key )
{
final Object value = get( key );
return value != null;
}
/**
* @param key
* @param value
* @return {@code true} iff this bean contains the specified {@code value}
* at specified {@code key}, i.e. both null/empty or both equal
*/
public boolean match( final String key, final Object value )
{
final Object v = get( key );
return value == null ? v == null : value.equals( v );
}
/**
* @param key
* @return
*/
public Object get( final String key )
{
return any().get( key );
}
/**
* helper-method
*
* @param key
* @param defaultValue
* @return the dynamically set value, or {@code defaultValue} if not set
*/
@SuppressWarnings( "unchecked" )
protected <T> T get( final String key, final T defaultValue )
{
final Object result = get( key );
return result == null ? defaultValue : (T) result;
}
/**
* helper-method
*
* @param key
* @param returnType
* @return the currently set value, or {@code null} if not set
*/
@SuppressWarnings( "unchecked" )
protected <T> T get( final String key, final Class<T> returnType )
{
return (T) get( key );
}
private Map<String, Object> getOrCreateMap()
{
if( this.dynamicProperties == null )
this.dynamicProperties = new TreeMap<String, Object>();
return this.dynamicProperties;
}
protected void set( final Map<String, ?> values )
{
Map<String, Object> map = getOrCreateMap();
// synchronized( map )
// {
map.putAll( values );
// }
}
@JsonAnySetter
public Object set( final String key, final Object value )
{
Map<String, Object> map = getOrCreateMap();
// synchronized( map )
// {
return map.put( key, value );
// }
}
protected Object remove( final String key )
{
Map<String, Object> map = getOrCreateMap();
synchronized( map )
{
return map.remove( key );
}
}
@SuppressWarnings( "unchecked" )
@Override
protected DynaBean clone()
{
final Map<String, Object> values = any();
final DynaBean result = new DynaBean();
result.set( JsonUtil.valueOf( JsonUtil.toTree( values ),
values.getClass() ) );
return result;
}
@Override
public int hashCode()
{
return any().hashCode();
}
@Override
public boolean equals( final Object other )
{
return any().equals( other );
}
@Override
public int compareTo( final Object o )
{
return Thrower.throwNew( IllegalStateException::new,
() -> "Invocation should be intercepted" );
}
@Override
public String toString()
{
try
{
return JsonUtil.getJOM()
.disable( SerializationFeature.FAIL_ON_EMPTY_BEANS )
.writeValueAsString( any() );
} catch( final IOException e )
{
LOG.warn( "Problem serializing " + getClass().getName(), e );
return super.toString();
}
}
/** cache of type arguments for known {@link Identifier} sub-types */
// static final Map<Class<?>, Provider<?>> DYNABEAN_PROVIDER_CACHE = new
// WeakHashMap<>();
/**
* {@link DynaBeanInvocationHandler}
*
* @version $Id: 9aa95f266f44246a9109c47c32971f2658d49c57 $
* @author Rick van Krevelen
*/
static class DynaBeanInvocationHandler implements InvocationHandler
{
/** */
private static final Logger LOG = LogUtil
.getLogger( DynaBeanInvocationHandler.class );
/** */
private final Class<?> type;
/** */
private final Config config;
/** */
protected final DynaBean bean;
/**
* {@link DynaBeanInvocationHandler} constructor
*/
@SafeVarargs
public DynaBeanInvocationHandler( final ObjectMapper om,
final Class<?> type, final DynaBean bean,
final Map<String, ?>... imports )
{
this.type = type;
this.bean = bean;
// LOG.trace("Using imports: " + Arrays.asList(imports));
Config config = null;
if( Config.class.isAssignableFrom( type ) )
{
// always create fresh, never from cache
config = ConfigFactory.create( type.asSubclass( Config.class ),
imports );
if( Mutable.class.isAssignableFrom( type ) )
{
final Mutable mutable = (Mutable) config;
mutable.addPropertyChangeListener(
new PropertyChangeListener()
{
@Override
public void propertyChange(
final PropertyChangeEvent change )
{
LOG.trace( "{} changed: {} = {} (was {})",
type.getSimpleName(),
change.getPropertyName(),
change.getNewValue(),
change.getOldValue() );
// remove bean property in favor of changed
// default config
// bean.remove(change.getPropertyName());
/*
* TODO parse actual value into bean try {
* final Method method =
* type.getMethod(change
* .getPropertyName()); final Object
* newValue = om.readValue(
* change.getNewValue().toString(),
* JsonUtil.checkRegistered(om,
* method.getReturnType(), imports));
* bean.set(change.getPropertyName(),
* newValue); } catch (final Throwable t) {
* LOG.warn(
* "Could not deserialize property: " +
* change.getPropertyName(), t); }
*/
}
} );
}
} else if( imports != null ) for( Map<String, ?> imp : imports )
this.bean.set( imp );
this.config = config;
// TODO use event listeners of Mutable interface to dynamically add
// Converters at runtime
}
// @SuppressWarnings( "rawtypes" )
@Override
public Object invoke( final Object proxy, final Method method,
final Object[] args ) throws Throwable
{
if( method.isDefault() )
return ReflectUtil.invokeDefaultMethod( proxy, method, args );
final String beanProp = method.getName();
// LOG.trace( "Calling <{}> {}::{}({})",
// method.getReturnType().getSimpleName(), this.type,
// method.getName(), args == null ? "" : args );
switch( args == null ? 0 : args.length )
{
case 0:
if( beanProp.equals( Wrapper.UNWRAP_PROPERTY ) )
return this.bean.get( Wrapper.WRAP_PROPERTY );
if( beanProp.equals( "toString" ) )
{
if( Wrapper.class.isAssignableFrom( this.type ) )
return this.bean.get( Wrapper.WRAP_PROPERTY )
.toString();
JsonUtil.checkRegistered( JsonUtil.getJOM(), this.type );
return this.bean.toString();
}
if( beanProp.equals( "hashCode" ) ) return this.bean.hashCode();
// ! can't intercept call to native method
// if (method.getName().equals("getClass"))
// return this.type;
Object result = this.bean.any().get( beanProp );
if( result == null ) // no value currently
{
if( this.config != null ) // obtain value from config
{
// cache (immutable) result
result = method.invoke( this.config, args );
if( this.config instanceof Mutable == false )
this.bean.any().put( beanProp, result );
} else
try
{
return ReflectUtil.invokeAsBean(
this.bean.dynamicProperties, this.type,
method, args );
} catch( final Exception e )
{
// ignoring non-bean method
}
}
return result;
case 1:
if( beanProp.equals( "equals" ) )
return this.bean.equals( args[0] );
final DynaBean.BeanProxy annot = this.type
.getAnnotation( DynaBean.BeanProxy.class );
if( beanProp.equals( "compareTo" ) && annot != null )
return DynaBean.getComparator( annot ).compare(
(Comparable) this.bean, (Comparable) args[0] );
// assume setter method, e.g. void setVal()
if( method.getParameterTypes().length == 1
&& method.getParameterTypes()[0]
.isAssignableFrom( args[0].getClass() )
//&& method.getName().startsWith( "set" ) )
//&& method.getReturnType().equals( Void.TYPE )
) try
{
return ReflectUtil.invokeAsBean(
this.bean.dynamicProperties, this.type, method,
args[0] );
} catch( final Exception e )
{
// non-bean method, assume setter, e.g. val(..), withVal(..)
return this.bean.set( beanProp, args[0] );
}
LOG.warn( "{} ({}) unknown: {}#{}({})",
DynaBean.class.getSimpleName(),
method.getReturnType().isPrimitive() ? "primitive"
: "Object",
this.type, beanProp, Arrays.asList( args ) );
break;
}
if( this.config != null )
{
// LOG.trace("Passing call to Config");
return method.invoke( this.config, args );
}
// if( method.getReturnType().equals( Void.TYPE ) )
// {
// LOG.warn( "Ignoring call to: void " + this.type.getSimpleName()
// + "#" + beanProp + "(" + Arrays.asList( args ) + ")" );
// return null;
// }
throw ExceptionFactory.createUnchecked(
"{} ({}) value not set: {}#{}({})",
DynaBean.class.getSimpleName(),
method.getReturnType().isPrimitive() ? "primitive"
: "Object",
this.type.getSimpleName(), beanProp,
Arrays.asList( args ) );
}
}
/**
* @param <T>
* @param wrapperType
* @return
*/
static final <T> JsonSerializer<T>
createJsonSerializer( final Class<T> type )
{
return new JsonSerializer<T>()
{
@Override
public void serialize( final T value, final JsonGenerator jgen,
final SerializerProvider serializers )
throws IOException, JsonProcessingException
{
// non-Proxy objects get default treatment
if( !Proxy.isProxyClass( value.getClass() ) )
{
@SuppressWarnings( "unchecked" )
final JsonSerializer<T> ser = (JsonSerializer<T>) serializers
.findValueSerializer( value.getClass() );
if( ser != this )
ser.serialize( value, jgen, serializers );
else
LOG.warn( "Problem serializing: {}", value );
return;
}
// BeanWrapper gets special treatment
if( DynaBeanInvocationHandler.class
.isInstance( Proxy.getInvocationHandler( value ) ) )
{
final DynaBeanInvocationHandler handler = (DynaBeanInvocationHandler) Proxy
.getInvocationHandler( value );
// Wrapper extensions get special treatment
if( Wrapper.class.isAssignableFrom( handler.type ) )
{
final Object wrap = handler.bean.get( "wrap" );
serializers.findValueSerializer( wrap.getClass(), null )
.serialize( wrap, jgen, serializers );
return;
}
// Config (Accessible) extensions get special treatment
else if( Accessible.class.isAssignableFrom( handler.type ) )
{
final Map<String, Object> copy = new HashMap<>(
handler.bean.any() );
final Accessible config = (Accessible) handler.config;
for( String key : config.propertyNames() )
copy.put( key, config.getProperty( key ) );
serializers.findValueSerializer( copy.getClass(), null )
.serialize( copy, jgen, serializers );
return;
} else if( Config.class.isAssignableFrom( handler.type ) )
throw new JsonGenerationException(
"BeanWrapper should extend "
+ Accessible.class.getName()
+ " required for serialization: "
+ Arrays.asList(
handler.type.getInterfaces() ),
jgen );
// BeanWrappers that do not extend OWNER API's Config
serializers
.findValueSerializer( handler.bean.getClass(),
null )
.serialize( handler.bean, jgen, serializers );
return;
}
// Config (Accessible) gets special treatment
if( Accessible.class.isInstance( value ) )
{
final Accessible config = (Accessible) value;
final Properties entries = new Properties();
for( String key : config.propertyNames() )
entries.put( key, config.getProperty( key ) );
serializers.findValueSerializer( entries.getClass(), null )
.serialize( entries, jgen, serializers );
return;
}
if( Config.class.isInstance( value ) )
throw new JsonGenerationException(
"Config should extend " + Accessible.class.getName()
+ " required for serialization: "
+ Arrays.asList(
value.getClass().getInterfaces() ),
jgen );
throw new JsonGenerationException(
"No serializer found for proxy of: " + Arrays
.asList( value.getClass().getInterfaces() ),
jgen );
}
};
}
/**
* @param referenceType
* @param <S>
* @param <T>
* @return
*/
@SafeVarargs
static final <S, T> JsonDeserializer<T> createJsonDeserializer(
final ObjectMapper om, final Class<T> resultType,
final Map<String, ?>... imports )
{
return new JsonDeserializer<T>()
{
@Override
public T deserializeWithType( final JsonParser jp,
final DeserializationContext ctxt,
final TypeDeserializer typeDeserializer )
throws IOException, JsonProcessingException
{
return deserialize( jp, ctxt );
}
@Override
public T deserialize( final JsonParser jp,
final DeserializationContext ctxt )
throws IOException, JsonProcessingException
{
if( jp.getCurrentToken() == JsonToken.VALUE_NULL ) return null;
// if( Wrapper.class.isAssignableFrom( resultType ) )
// {
// // FIXME
// LOG.trace( "deser wrapper intf of {}", jp.getText() );
// return (T) Wrapper.Util.valueOf( jp.getText(),
// resultType.asSubclass( Wrapper.class ) );
// }
if( Config.class.isAssignableFrom( resultType ) )
{
final Map<String, Object> entries = jp.readValueAs(
new TypeReference<Map<String, Object>>()
{
} );
final Iterator<Entry<String, Object>> it = entries
.entrySet().iterator();
for( Entry<String, Object> next = null; it
.hasNext(); next = it.next() )
if( next != null && next.getValue() == null )
{
LOG.trace( "Ignoring null value: {}", next );
it.remove();
}
return resultType.cast( ConfigFactory.create(
resultType.asSubclass( Config.class ), entries ) );
}
// else if (Config.class.isAssignableFrom(resultType))
// throw new JsonGenerationException(
// "Config does not extend "+Mutable.class.getName()+" required for deserialization: "
// + Arrays.asList(resultType
// .getInterfaces()));
// can't parse directly to interface type
final DynaBean bean = new DynaBean();
final TreeNode tree = jp.readValueAsTree();
// override attributes as defined in interface getters
final Set<String> attributes = new HashSet<>();
for( Method method : resultType.getMethods() )
{
if( method.getReturnType().equals( Void.TYPE )
|| method.getParameterTypes().length != 0 )
continue;
final String attribute = method.getName();
if( attribute.equals( "toString" )
|| attribute.equals( "hashCode" ) )
continue;
attributes.add( attribute );
final TreeNode value = tree.get( attribute );// bean.any().get(attributeName);
if( value == null ) continue;
bean.set( method.getName(),
om.treeToValue( value, JsonUtil.checkRegistered( om,
method.getReturnType(), imports ) ) );
}
if( tree.isObject() )
{
// keep superfluous properties as TreeNodes, just in case
final Iterator<String> fieldNames = tree.fieldNames();
while( fieldNames.hasNext() )
{
final String fieldName = fieldNames.next();
if( !attributes.contains( fieldName ) )
bean.set( fieldName, tree.get( fieldName ) );
}
} else if( tree.isValueNode() )
{
for( Class<?> type : resultType.getInterfaces() )
for( Method method : type.getDeclaredMethods() )
{
// LOG.trace( "Scanning {}", method );
if( method
.isAnnotationPresent( JsonProperty.class ) )
{
final String property = method
.getAnnotation( JsonProperty.class )
.value();
// LOG.trace( "Setting {}: {}", property,
// ((ValueNode) tree).textValue() );
bean.set( property,
((ValueNode) tree).textValue() );
}
}
} else
throw ExceptionFactory.createUnchecked(
"Expected {} but parsed: {}", resultType,
tree.getClass() );
return DynaBean.proxyOf( om, resultType, bean, imports );
}
};
}
/** */
@SafeVarargs
public static <T> void registerType( final ObjectMapper om,
final Class<T> type, final Map<String, ?>... imports )
{
// TODO implement dynamic generic Converter(s) for JSON bean
// properties ?
// if (Config.class.isAssignableFrom(type))
// {
// final Class<?> editorType = new
// JsonPropertyEditor<T>().getClass();
// PropertyEditorManager.registerEditor(type, editorType);
// LOG.trace("Registered " + editorType + " - "
// + PropertyEditorManager.findEditor(type));
// }
om.registerModule( new SimpleModule()
.addSerializer( type, createJsonSerializer( type ) )
.addDeserializer( type,
createJsonDeserializer( om, type, imports ) ) );
}
/** */
private static final Map<BeanProxy, Comparator<?>> COMPARATOR_CACHE = new TreeMap<>();
/**
* @param annot the {@link BeanProxy} instance for the type of wrapper of
* {@link DynaBean}s containing the {@link Comparable} value type
* in the annotated property key
* @return a (cached) comparator
*/
@SuppressWarnings( { "unchecked"/* , "rawtypes" */ } )
public static <S extends Comparable> Comparator<S>
getComparator( final BeanProxy annot )
{
if( annot.comparableOn().isEmpty() ) return null;
synchronized( COMPARATOR_CACHE )
{
Comparator<S> result = (Comparator<S>) COMPARATOR_CACHE
.get( annot );
if( result == null )
{
result = new Comparator<S>()
{
@Override
public int compare( final S o1, final S o2 )
{
final S key1 = (S) ((DynaBeanInvocationHandler) Proxy
.getInvocationHandler( o1 )).bean.any()
.get( annot.comparableOn() );
final S key2 = (S) ((DynaBeanInvocationHandler) Proxy
.getInvocationHandler( o2 )).bean.any()
.get( annot.comparableOn() );
return key1.compareTo( key2 );
}
};
LOG.trace( "Created comparator for " + annot );
COMPARATOR_CACHE.put( annot, result );
}
return result;
}
}
/**
* @param type the type of {@link Proxy} to generate
* @param imports default value {@link Properties} of the bean
* @return a {@link Proxy} instance backed by an empty {@link DynaBean}
*/
@SafeVarargs
public static <T> T proxyOf( final Class<T> type,
final Map<String, ?>... imports )
{
return proxyOf( type, new DynaBean(), imports );
}
/**
* @param type the type of {@link Proxy} to generate
* @param bean the (prepared) {@link DynaBean} for proxied getters/setters
* @param imports default value {@link Properties} of the bean
* @return a {@link Proxy} instance backed by an empty {@link DynaBean}
*/
@SafeVarargs
protected static <T> T proxyOf( final Class<T> type, final DynaBean bean,
final Map<String, ?>... imports )
{
return proxyOf( JsonUtil.getJOM(), type, bean, imports );
}
/**
* @param om the {@link ObjectMapper} for get and set de/serialization
* @param type the type of {@link Proxy} to generate
* @param bean the (prepared) {@link DynaBean} for proxied getters/setters
* @param imports default value {@link Properties} of the bean
* @return a {@link Proxy} instance backed by an empty {@link DynaBean}
*/
@SuppressWarnings( "unchecked" )
protected static <T> T proxyOf( final ObjectMapper om, final Class<T> type,
final DynaBean bean, final Map<String, ?>... imports )
{
// if( !type.isAnnotationPresent( BeanProxy.class ) )
// throw ExceptionFactory.createUnchecked( "{} is not a @{}", type,
// BeanProxy.class.getSimpleName() );
return (T) Proxy.newProxyInstance( type.getClassLoader(),
new Class[]
{ type }, new DynaBeanInvocationHandler( om, type, bean, imports ) );
}
public static Class<?> typeOf( final Object proxy )
{
return ((DynaBeanInvocationHandler) Proxy
.getInvocationHandler( proxy )).type;
}
/**
* {@link ProxyProvider}
*
* @param <T>
* @version $Id: 9aa95f266f44246a9109c47c32971f2658d49c57 $
* @author Rick van Krevelen
*/
public static class ProxyProvider<T> implements Provider<T>
{
/** cache of type arguments for known {@link Proxy} sub-types */
private static final Map<Class<?>, List<Class<?>>> PROXY_TYPE_ARGUMENT_CACHE = new HashMap<>();
/**
* @param proxyType should be a non-abstract concrete {@link Class} that
* has a public zero-arg constructor
* @return the new {@link ProxyProvider} instance
*/
@SafeVarargs
public static <T> ProxyProvider<T> of( final Class<T> proxyType,
final Map<String, ?>... imports )
{
return of( JsonUtil.getJOM(), proxyType, imports );
}
/**
* @param om the {@link ObjectMapper} for get and set de/serialization
* @param proxyType should be a non-abstract concrete {@link Class} that
* has a public zero-arg constructor
* @return the new {@link ProxyProvider} instance
*/
@SafeVarargs
public static <T> ProxyProvider<T> of( final ObjectMapper om,
final Class<T> proxyType, final Map<String, ?>... imports )
{
return new ProxyProvider<T>( om, proxyType, new DynaBean(),
imports );
}
/**
* @param om the {@link ObjectMapper} for get and set de/serialization
* @param beanType should be a non-abstract concrete {@link Class} that
* has a public zero-arg constructor
* @param cache the {@link Map} of previously created instances
* @return the cached (new) {@link ProxyProvider} instance
*/
@SafeVarargs
public static <T> ProxyProvider<T> of( final ObjectMapper om,
final Class<T> beanType,
final Map<Class<?>, ProxyProvider<?>> cache,
final Map<String, ?>... imports )
{
if( cache == null ) return of( om, beanType, imports );
synchronized( cache )
{
@SuppressWarnings( "unchecked" )
ProxyProvider<T> result = (ProxyProvider<T>) cache
.get( beanType );
if( result == null )
{
result = of( om, beanType, imports );
cache.put( beanType, result );
}
return result;
}
}
/** */
private final ObjectMapper om;
/** */
private final Class<T> proxyType;
/** */
private final DynaBean bean;
/** */
private final Map<String, ?>[] imports;
/**
* {@link ProxyProvider} constructor
*
* @param om
* @param proxyType
* @param bean the (possibly prepared) {@link DynaBean}
* @param imports
*/
@SafeVarargs
public ProxyProvider( final ObjectMapper om, final Class<T> proxyType,
final DynaBean bean, final Map<String, ?>... imports )
{
this.om = om;
this.proxyType = proxyType;
this.bean = bean;
this.imports = imports;
}
@Override
public T get()
{
try
{
@SuppressWarnings( "unchecked" )
final Class<T> proxyType = this.proxyType == null
? (Class<T>) TypeArguments.of( ProxyProvider.class,
getClass(), PROXY_TYPE_ARGUMENT_CACHE ).get( 0 )
: this.proxyType;
return DynaBean.proxyOf( this.om, proxyType, this.bean,
this.imports );
} catch( final Throwable e )
{
Thrower.rethrowUnchecked( e );
return null;
}
}
}
// /**
// * {@link Builder}
// *
// * @param <T> the result type
// * @param <THIS> the builder type
// * @version $Id: 9aa95f266f44246a9109c47c32971f2658d49c57 $
// * @author Rick van Krevelen
// */
// public static class Builder<T, THIS extends Builder<T, THIS>>
// extends ProxyProvider<T>
// {
//
// /** */
// private final DynaBean bean;
//
// /**
// * {@link Builder} constructor, to be extended by a public zero-arg
// * constructor in concrete sub-types
// */
// protected Builder( final Properties... imports )
// {
// this( JsonUtil.getJOM(), new DynaBean(), imports );
// }
//
// /**
// * {@link Builder} constructor, to be extended by a public zero-arg
// * constructor in concrete sub-types
// */
// protected Builder( final ObjectMapper om, final Properties... imports )
// {
// this( om, new DynaBean(), imports );
// }
//
// /**
// * {@link Builder} constructor, to be extended by a public zero-arg
// * constructor in concrete sub-types
// */
// protected Builder( final ObjectMapper om, final DynaBean bean,
// final Properties... imports )
// {
// super( om, null, bean, imports );
// this.bean = bean;
// }
//
// /**
// * helper-method
// *
// * @param key
// * @param returnType
// * @return the currently set value, or {@code null} if not set
// */
// protected <S> S get( final String key, final Class<S> returnType )
// {
// return returnType.cast( this.bean.get( key ) );
// }
//
// /**
// * @param key
// * @param value
// * @return
// */
// @SuppressWarnings( "unchecked" )
// public THIS with( final String key, final Object value )
// {
// this.bean.set( key, value );
// return (THIS) this;
// }
//
// public THIS with( final String key, final TreeNode value,
// final Class<?> valueType )
// {
// return (THIS) with( key, JsonUtil.valueOf( value, valueType ) );
// }
//
// /**
// * @return this Builder with the immutable bean
// */
// @SuppressWarnings( "unchecked" )
// public THIS lock()
// {
// this.bean.lock();
// return (THIS) this;
// }
//
// /**
// * @return the provided instance of <T>
// */
// public T build()
// {
// return get();
// }
// }
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
// Code generated by Microsoft (R) AutoRest Code Generator.
package com.azure.resourcemanager.databricks.fluent.models;
import com.azure.core.annotation.Fluent;
import com.azure.core.annotation.JsonFlatten;
import com.azure.core.management.ProxyResource;
import com.azure.core.util.logging.ClientLogger;
import com.azure.resourcemanager.databricks.models.AddressSpace;
import com.azure.resourcemanager.databricks.models.PeeringProvisioningState;
import com.azure.resourcemanager.databricks.models.PeeringState;
import com.azure.resourcemanager.databricks.models.VirtualNetworkPeeringPropertiesFormatDatabricksVirtualNetwork;
import com.azure.resourcemanager.databricks.models.VirtualNetworkPeeringPropertiesFormatRemoteVirtualNetwork;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonProperty;
/** Peerings in a VirtualNetwork resource. */
@JsonFlatten
@Fluent
public class VirtualNetworkPeeringInner extends ProxyResource {
@JsonIgnore private final ClientLogger logger = new ClientLogger(VirtualNetworkPeeringInner.class);
/*
* Whether the VMs in the local virtual network space would be able to
* access the VMs in remote virtual network space.
*/
@JsonProperty(value = "properties.allowVirtualNetworkAccess")
private Boolean allowVirtualNetworkAccess;
/*
* Whether the forwarded traffic from the VMs in the local virtual network
* will be allowed/disallowed in remote virtual network.
*/
@JsonProperty(value = "properties.allowForwardedTraffic")
private Boolean allowForwardedTraffic;
/*
* If gateway links can be used in remote virtual networking to link to
* this virtual network.
*/
@JsonProperty(value = "properties.allowGatewayTransit")
private Boolean allowGatewayTransit;
/*
* If remote gateways can be used on this virtual network. If the flag is
* set to true, and allowGatewayTransit on remote peering is also true,
* virtual network will use gateways of remote virtual network for transit.
* Only one peering can have this flag set to true. This flag cannot be set
* if virtual network already has a gateway.
*/
@JsonProperty(value = "properties.useRemoteGateways")
private Boolean useRemoteGateways;
/*
* The remote virtual network should be in the same region. See here to
* learn more
* (https://docs.microsoft.com/en-us/azure/databricks/administration-guide/cloud-configurations/azure/vnet-peering).
*/
@JsonProperty(value = "properties.databricksVirtualNetwork")
private VirtualNetworkPeeringPropertiesFormatDatabricksVirtualNetwork databricksVirtualNetwork;
/*
* The reference to the databricks virtual network address space.
*/
@JsonProperty(value = "properties.databricksAddressSpace")
private AddressSpace databricksAddressSpace;
/*
* The remote virtual network should be in the same region. See here to
* learn more
* (https://docs.microsoft.com/en-us/azure/databricks/administration-guide/cloud-configurations/azure/vnet-peering).
*/
@JsonProperty(value = "properties.remoteVirtualNetwork", required = true)
private VirtualNetworkPeeringPropertiesFormatRemoteVirtualNetwork remoteVirtualNetwork;
/*
* The reference to the remote virtual network address space.
*/
@JsonProperty(value = "properties.remoteAddressSpace")
private AddressSpace remoteAddressSpace;
/*
* The status of the virtual network peering.
*/
@JsonProperty(value = "properties.peeringState", access = JsonProperty.Access.WRITE_ONLY)
private PeeringState peeringState;
/*
* The provisioning state of the virtual network peering resource.
*/
@JsonProperty(value = "properties.provisioningState", access = JsonProperty.Access.WRITE_ONLY)
private PeeringProvisioningState provisioningState;
/**
* Get the allowVirtualNetworkAccess property: Whether the VMs in the local virtual network space would be able to
* access the VMs in remote virtual network space.
*
* @return the allowVirtualNetworkAccess value.
*/
public Boolean allowVirtualNetworkAccess() {
return this.allowVirtualNetworkAccess;
}
/**
* Set the allowVirtualNetworkAccess property: Whether the VMs in the local virtual network space would be able to
* access the VMs in remote virtual network space.
*
* @param allowVirtualNetworkAccess the allowVirtualNetworkAccess value to set.
* @return the VirtualNetworkPeeringInner object itself.
*/
public VirtualNetworkPeeringInner withAllowVirtualNetworkAccess(Boolean allowVirtualNetworkAccess) {
this.allowVirtualNetworkAccess = allowVirtualNetworkAccess;
return this;
}
/**
* Get the allowForwardedTraffic property: Whether the forwarded traffic from the VMs in the local virtual network
* will be allowed/disallowed in remote virtual network.
*
* @return the allowForwardedTraffic value.
*/
public Boolean allowForwardedTraffic() {
return this.allowForwardedTraffic;
}
/**
* Set the allowForwardedTraffic property: Whether the forwarded traffic from the VMs in the local virtual network
* will be allowed/disallowed in remote virtual network.
*
* @param allowForwardedTraffic the allowForwardedTraffic value to set.
* @return the VirtualNetworkPeeringInner object itself.
*/
public VirtualNetworkPeeringInner withAllowForwardedTraffic(Boolean allowForwardedTraffic) {
this.allowForwardedTraffic = allowForwardedTraffic;
return this;
}
/**
* Get the allowGatewayTransit property: If gateway links can be used in remote virtual networking to link to this
* virtual network.
*
* @return the allowGatewayTransit value.
*/
public Boolean allowGatewayTransit() {
return this.allowGatewayTransit;
}
/**
* Set the allowGatewayTransit property: If gateway links can be used in remote virtual networking to link to this
* virtual network.
*
* @param allowGatewayTransit the allowGatewayTransit value to set.
* @return the VirtualNetworkPeeringInner object itself.
*/
public VirtualNetworkPeeringInner withAllowGatewayTransit(Boolean allowGatewayTransit) {
this.allowGatewayTransit = allowGatewayTransit;
return this;
}
/**
* Get the useRemoteGateways property: If remote gateways can be used on this virtual network. If the flag is set to
* true, and allowGatewayTransit on remote peering is also true, virtual network will use gateways of remote virtual
* network for transit. Only one peering can have this flag set to true. This flag cannot be set if virtual network
* already has a gateway.
*
* @return the useRemoteGateways value.
*/
public Boolean useRemoteGateways() {
return this.useRemoteGateways;
}
/**
* Set the useRemoteGateways property: If remote gateways can be used on this virtual network. If the flag is set to
* true, and allowGatewayTransit on remote peering is also true, virtual network will use gateways of remote virtual
* network for transit. Only one peering can have this flag set to true. This flag cannot be set if virtual network
* already has a gateway.
*
* @param useRemoteGateways the useRemoteGateways value to set.
* @return the VirtualNetworkPeeringInner object itself.
*/
public VirtualNetworkPeeringInner withUseRemoteGateways(Boolean useRemoteGateways) {
this.useRemoteGateways = useRemoteGateways;
return this;
}
/**
* Get the databricksVirtualNetwork property: The remote virtual network should be in the same region. See here to
* learn more
* (https://docs.microsoft.com/en-us/azure/databricks/administration-guide/cloud-configurations/azure/vnet-peering).
*
* @return the databricksVirtualNetwork value.
*/
public VirtualNetworkPeeringPropertiesFormatDatabricksVirtualNetwork databricksVirtualNetwork() {
return this.databricksVirtualNetwork;
}
/**
* Set the databricksVirtualNetwork property: The remote virtual network should be in the same region. See here to
* learn more
* (https://docs.microsoft.com/en-us/azure/databricks/administration-guide/cloud-configurations/azure/vnet-peering).
*
* @param databricksVirtualNetwork the databricksVirtualNetwork value to set.
* @return the VirtualNetworkPeeringInner object itself.
*/
public VirtualNetworkPeeringInner withDatabricksVirtualNetwork(
VirtualNetworkPeeringPropertiesFormatDatabricksVirtualNetwork databricksVirtualNetwork) {
this.databricksVirtualNetwork = databricksVirtualNetwork;
return this;
}
/**
* Get the databricksAddressSpace property: The reference to the databricks virtual network address space.
*
* @return the databricksAddressSpace value.
*/
public AddressSpace databricksAddressSpace() {
return this.databricksAddressSpace;
}
/**
* Set the databricksAddressSpace property: The reference to the databricks virtual network address space.
*
* @param databricksAddressSpace the databricksAddressSpace value to set.
* @return the VirtualNetworkPeeringInner object itself.
*/
public VirtualNetworkPeeringInner withDatabricksAddressSpace(AddressSpace databricksAddressSpace) {
this.databricksAddressSpace = databricksAddressSpace;
return this;
}
/**
* Get the remoteVirtualNetwork property: The remote virtual network should be in the same region. See here to learn
* more
* (https://docs.microsoft.com/en-us/azure/databricks/administration-guide/cloud-configurations/azure/vnet-peering).
*
* @return the remoteVirtualNetwork value.
*/
public VirtualNetworkPeeringPropertiesFormatRemoteVirtualNetwork remoteVirtualNetwork() {
return this.remoteVirtualNetwork;
}
/**
* Set the remoteVirtualNetwork property: The remote virtual network should be in the same region. See here to learn
* more
* (https://docs.microsoft.com/en-us/azure/databricks/administration-guide/cloud-configurations/azure/vnet-peering).
*
* @param remoteVirtualNetwork the remoteVirtualNetwork value to set.
* @return the VirtualNetworkPeeringInner object itself.
*/
public VirtualNetworkPeeringInner withRemoteVirtualNetwork(
VirtualNetworkPeeringPropertiesFormatRemoteVirtualNetwork remoteVirtualNetwork) {
this.remoteVirtualNetwork = remoteVirtualNetwork;
return this;
}
/**
* Get the remoteAddressSpace property: The reference to the remote virtual network address space.
*
* @return the remoteAddressSpace value.
*/
public AddressSpace remoteAddressSpace() {
return this.remoteAddressSpace;
}
/**
* Set the remoteAddressSpace property: The reference to the remote virtual network address space.
*
* @param remoteAddressSpace the remoteAddressSpace value to set.
* @return the VirtualNetworkPeeringInner object itself.
*/
public VirtualNetworkPeeringInner withRemoteAddressSpace(AddressSpace remoteAddressSpace) {
this.remoteAddressSpace = remoteAddressSpace;
return this;
}
/**
* Get the peeringState property: The status of the virtual network peering.
*
* @return the peeringState value.
*/
public PeeringState peeringState() {
return this.peeringState;
}
/**
* Get the provisioningState property: The provisioning state of the virtual network peering resource.
*
* @return the provisioningState value.
*/
public PeeringProvisioningState provisioningState() {
return this.provisioningState;
}
/**
* Validates the instance.
*
* @throws IllegalArgumentException thrown if the instance is not valid.
*/
public void validate() {
if (databricksVirtualNetwork() != null) {
databricksVirtualNetwork().validate();
}
if (databricksAddressSpace() != null) {
databricksAddressSpace().validate();
}
if (remoteVirtualNetwork() == null) {
throw logger
.logExceptionAsError(
new IllegalArgumentException(
"Missing required property remoteVirtualNetwork in model VirtualNetworkPeeringInner"));
} else {
remoteVirtualNetwork().validate();
}
if (remoteAddressSpace() != null) {
remoteAddressSpace().validate();
}
}
}
| |
/******************************************************************************
* Confidential Proprietary *
* (c) Copyright Haifeng Li 2011, All Rights Reserved *
******************************************************************************/
package smile.classification;
import java.util.Arrays;
import smile.math.Math;
import smile.data.Attribute;
import smile.data.NumericAttribute;
import smile.util.SmileUtils;
import smile.validation.Accuracy;
import smile.validation.ClassificationMeasure;
/**
* AdaBoost (Adaptive Boosting) classifier with decision trees. In principle,
* AdaBoost is a meta-algorithm, and can be used in conjunction with many other
* learning algorithms to improve their performance. In practice, AdaBoost with
* decision trees is probably the most popular combination. AdaBoost is adaptive
* in the sense that subsequent classifiers built are tweaked in favor of those
* instances misclassified by previous classifiers. AdaBoost is sensitive to
* noisy data and outliers. However in some problems it can be less susceptible
* to the over-fitting problem than most learning algorithms.
* <p>
* AdaBoost calls a weak classifier repeatedly in a series of rounds from
* total T classifiers. For each call a distribution of weights is updated
* that indicates the importance of examples in the data set for the
* classification. On each round, the weights of each incorrectly classified
* example are increased (or alternatively, the weights of each correctly
* classified example are decreased), so that the new classifier focuses more
* on those examples.
* <p>
* The basic AdaBoost algorithm is only for binary classification problem.
* For multi-class classification, a common approach is reducing the
* multi-class classification problem to multiple two-class problems.
* This implementation is a multi-class AdaBoost without such reductions.
*
* <h2>References</h2>
* <ol>
* <li> Yoav Freund, Robert E. Schapire. A Decision-Theoretic Generalization of on-Line Learning and an Application to Boosting, 1995.</li>
* <li> Ji Zhu, Hui Zhou, Saharon Rosset and Trevor Hastie. Multi-class Adaboost, 2009.</li>
* </ol>
*
* @author Haifeng Li
*/
public class AdaBoost implements Classifier<double[]> {
/**
* The number of classes.
*/
private int k;
/**
* Forest of decision trees.
*/
private DecisionTree[] trees;
/**
* The weight of each decision tree.
*/
private double[] alpha;
/**
* The weighted error of each decision tree during training.
*/
private double[] error;
/**
* Variable importance. Every time a split of a node is made on variable
* the (GINI, information gain, etc.) impurity criterion for the two
* descendent nodes is less than the parent node. Adding up the decreases
* for each individual variable over all trees in the forest gives a fast
* variable importance that is often very consistent with the permutation
* importance measure.
*/
private double[] importance;
/**
* Trainer for AdaBoost classifiers.
*/
public static class Trainer extends ClassifierTrainer<double[]> {
/**
* The number of trees.
*/
private int T = 500;
/**
* The maximum number of leaf nodes in the tree.
*/
private int J = 2;
/**
* Constructor.
*
* @param T the number of trees.
*/
public Trainer(int T) {
if (T < 1) {
throw new IllegalArgumentException("Invlaid number of trees: " + T);
}
this.T = T;
}
/**
* Constructor.
*
* @param attributes the attributes of independent variable.
* @param T the number of trees.
*/
public Trainer(Attribute[] attributes, int T) {
super(attributes);
if (T < 1) {
throw new IllegalArgumentException("Invlaid number of trees: " + T);
}
this.T = T;
}
/**
* Sets the number of trees in the random forest.
* @param T the number of trees.
*/
public void setNumTrees(int T) {
if (T < 1) {
throw new IllegalArgumentException("Invlaid number of trees: " + T);
}
this.T = T;
}
/**
* Sets the maximum number of leaf nodes in the tree.
* @param J the maximum number of leaf nodes in the tree.
*/
public void setMaximumLeafNodes(int J) {
if (J < 2) {
throw new IllegalArgumentException("Invalid number of leaf nodes: " + J);
}
this.J = J;
}
@Override
public AdaBoost train(double[][] x, int[] y) {
return new AdaBoost(attributes, x, y, T, J);
}
}
/**
* Constructor. Learns AdaBoost with decision stumps.
*
* @param x the training instances.
* @param y the response variable.
* @param T the number of trees.
*/
public AdaBoost(double[][] x, int[] y, int T) {
this(null, x, y, T);
}
/**
* Constructor. Learns AdaBoost with decision trees.
*
* @param x the training instances.
* @param y the response variable.
* @param T the number of trees.
* @param J the maximum number of leaf nodes in the trees.
*/
public AdaBoost(double[][] x, int[] y, int T, int J) {
this(null, x, y, T, J);
}
/**
* Constructor. Learns AdaBoost with decision stumps.
*
* @param attributes the attribute properties.
* @param x the training instances.
* @param y the response variable.
* @param T the number of trees.
*/
public AdaBoost(Attribute[] attributes, double[][] x, int[] y, int T) {
this(attributes, x, y, T, 2);
}
/**
* Constructor.
*
* @param attributes the attribute properties.
* @param x the training instances.
* @param y the response variable.
* @param T the number of trees.
* @param J the maximum number of leaf nodes in the trees.
*/
public AdaBoost(Attribute[] attributes, double[][] x, int[] y, int T, int J) {
if (x.length != y.length) {
throw new IllegalArgumentException(String.format("The sizes of X and Y don't match: %d != %d", x.length, y.length));
}
if (T < 1) {
throw new IllegalArgumentException("Invlaid number of trees: " + T);
}
if (J < 2) {
throw new IllegalArgumentException("Invalid maximum leaves: " + J);
}
// class label set.
int[] labels = Math.unique(y);
Arrays.sort(labels);
for (int i = 0; i < labels.length; i++) {
if (labels[i] < 0) {
throw new IllegalArgumentException("Negative class label: " + labels[i]);
}
if (i > 0 && labels[i] - labels[i-1] > 1) {
throw new IllegalArgumentException("Missing class: " + labels[i]+1);
}
}
k = labels.length;
if (k < 2) {
throw new IllegalArgumentException("Only one class.");
}
if (attributes == null) {
int p = x[0].length;
attributes = new Attribute[p];
for (int i = 0; i < p; i++) {
attributes[i] = new NumericAttribute("V" + (i + 1));
}
}
int[][] order = SmileUtils.sort(attributes, x);
int n = x.length;
int[] samples = new int[n];
double[] w = new double[n];
boolean[] err = new boolean[n];
for (int i = 0; i < n; i++) {
w[i] = 1.0;
}
double guess = 1.0 / k; // accuracy of random guess.
double b = Math.log(k - 1); // the baise to tree weight in case of multi-class.
trees = new DecisionTree[T];
alpha = new double[T];
error = new double[T];
for (int t = 0; t < T; t++) {
double W = Math.sum(w);
for (int i = 0; i < n; i++) {
w[i] /= W;
}
Arrays.fill(samples, 0);
int[] rand = Math.random(w, n);
for (int s : rand) {
samples[s]++;
}
trees[t] = new DecisionTree(attributes, x, y, J, samples, order, DecisionTree.SplitRule.GINI);
for (int i = 0; i < n; i++) {
err[i] = trees[t].predict(x[i]) != y[i];
}
double e = 0.0; // weighted error
for (int i = 0; i < n; i++) {
if (err[i]) {
e += w[i];
}
}
if (1 - e <= guess) {
System.err.format("Weak classifier %d makes %.2f%% weighted error\n", t, 100*e);
trees = Arrays.copyOf(trees, t);
alpha = Arrays.copyOf(alpha, t);
error = Arrays.copyOf(error, t);
break;
}
error[t] = e;
alpha[t] = Math.log((1-e)/Math.max(1E-10,e)) + b;
double a = Math.exp(alpha[t]);
for (int i = 0; i < n; i++) {
if (err[i]) {
w[i] *= a;
}
}
}
importance = new double[attributes.length];
for (DecisionTree tree : trees) {
double[] imp = tree.importance();
for (int i = 0; i < imp.length; i++) {
importance[i] += imp[i];
}
}
}
/**
* Returns the variable importance. Every time a split of a node is made
* on variable the (GINI, information gain, etc.) impurity criterion for
* the two descendent nodes is less than the parent node. Adding up the
* decreases for each individual variable over all trees in the forest
* gives a simple measure of variable importance.
*
* @return the variable importance
*/
public double[] importance() {
return importance;
}
/**
* Returns the number of trees in the model.
*
* @return the number of trees in the model
*/
public int size() {
return trees.length;
}
/**
* Trims the tree model set to a smaller size in case of over-fitting.
* Or if extra decision trees in the model don't improve the performance,
* we may remove them to reduce the model size and also improve the speed of
* prediction.
*
* @param T the new (smaller) size of tree model set.
*/
public void trim(int T) {
if (T > trees.length) {
throw new IllegalArgumentException("The new model size is larger than the current size.");
}
if (T <= 0) {
throw new IllegalArgumentException("Invalid new model size: " + T);
}
if (T < trees.length) {
trees = Arrays.copyOf(trees, T);
alpha = Arrays.copyOf(alpha, T);
error = Arrays.copyOf(error, T);
}
}
@Override
public int predict(double[] x) {
if (k == 2) {
double y = 0.0;
for (int i = 0; i < trees.length; i++) {
y += alpha[i] * trees[i].predict(x);
}
return y > 0 ? 1 : 0;
} else {
double[] y = new double[k];
for (int i = 0; i < trees.length; i++) {
y[trees[i].predict(x)] += alpha[i];
}
return Math.whichMax(y);
}
}
/**
* Predicts the class label of an instance and also calculate a posteriori
* probabilities. Not supported.
*/
@Override
public int predict(double[] x, double[] posteriori) {
throw new UnsupportedOperationException("Not supported.");
}
/**
* Test the model on a validation dataset.
*
* @param x the test data set.
* @param y the test data response values.
* @return accuracies with first 1, 2, ..., decision trees.
*/
public double[] test(double[][] x, int[] y) {
int T = trees.length;
double[] accuracy = new double[T];
int n = x.length;
int[] label = new int[n];
Accuracy measure = new Accuracy();
if (k == 2) {
double[] prediction = new double[n];
for (int i = 0; i < T; i++) {
for (int j = 0; j < n; j++) {
prediction[j] += alpha[i] * trees[i].predict(x[j]);
label[j] = prediction[j] > 0 ? 1 : 0;
}
accuracy[i] = measure.measure(y, label);
}
} else {
double[][] prediction = new double[n][k];
for (int i = 0; i < T; i++) {
for (int j = 0; j < n; j++) {
prediction[j][trees[i].predict(x[j])] += alpha[i];
label[j] = Math.whichMax(prediction[j]);
}
accuracy[i] = measure.measure(y, label);
}
}
return accuracy;
}
/**
* Test the model on a validation dataset.
*
* @param x the test data set.
* @param y the test data labels.
* @param measures the performance measures of classification.
* @return performance measures with first 1, 2, ..., decision trees.
*/
public double[][] test(double[][] x, int[] y, ClassificationMeasure[] measures) {
int T = trees.length;
int m = measures.length;
double[][] results = new double[T][m];
int n = x.length;
int[] label = new int[n];
if (k == 2) {
double[] prediction = new double[n];
for (int i = 0; i < T; i++) {
for (int j = 0; j < n; j++) {
prediction[j] += alpha[i] * trees[i].predict(x[j]);
label[j] = prediction[j] > 0 ? 1 : 0;
}
for (int j = 0; j < m; j++) {
results[i][j] = measures[j].measure(y, label);
}
}
} else {
double[][] prediction = new double[n][k];
for (int i = 0; i < T; i++) {
for (int j = 0; j < n; j++) {
prediction[j][trees[i].predict(x[j])] += alpha[i];
label[j] = Math.whichMax(prediction[j]);
}
for (int j = 0; j < m; j++) {
results[i][j] = measures[j].measure(y, label);
}
}
}
return results;
}
}
| |
package com.xtremelabs.robolectric.bytecode;
import android.net.Uri;
import com.xtremelabs.robolectric.internal.DoNotStrip;
import javassist.*;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
@SuppressWarnings({"UnusedDeclaration"})
public class AndroidTranslator implements Translator {
/**
* IMPORTANT -- increment this number when the bytecode generated for modified classes changes
* so the cache file can be invalidated.
*/
public static final int CACHE_VERSION = 19;
private static final List<ClassHandler> CLASS_HANDLERS = new ArrayList<ClassHandler>();
private ClassHandler classHandler;
private ClassCache classCache;
public AndroidTranslator(ClassHandler classHandler, ClassCache classCache) {
this.classHandler = classHandler;
this.classCache = classCache;
}
public static ClassHandler getClassHandler(int index) {
return CLASS_HANDLERS.get(index);
}
@Override
public void start(ClassPool classPool) throws NotFoundException, CannotCompileException {
injectClassHandlerToInstrumentedClasses(classPool);
}
private void injectClassHandlerToInstrumentedClasses(ClassPool classPool) throws NotFoundException, CannotCompileException {
int index;
synchronized (CLASS_HANDLERS) {
CLASS_HANDLERS.add(classHandler);
index = CLASS_HANDLERS.size() - 1;
}
CtClass robolectricInternalsCtClass = classPool.get(RobolectricInternals.class.getName());
robolectricInternalsCtClass.setModifiers(Modifier.PUBLIC);
robolectricInternalsCtClass.getClassInitializer().insertBefore("{\n" +
"classHandler = " + AndroidTranslator.class.getName() + ".getClassHandler(" + index + ");\n" +
"}");
}
@Override
public void onLoad(ClassPool classPool, String className) throws NotFoundException, CannotCompileException {
if (classCache.isWriting()) {
throw new IllegalStateException("shouldn't be modifying bytecode after we've started writing cache! class=" + className);
}
if (classHasFromAndroidEquivalent(className)) {
replaceClassWithFromAndroidEquivalent(classPool, className);
return;
}
boolean needsStripping =
className.startsWith("android.")
|| className.startsWith("com.google.android.maps")
|| className.equals("org.apache.http.impl.client.DefaultRequestDirector");
CtClass ctClass = classPool.get(className);
if (needsStripping && !ctClass.hasAnnotation(DoNotStrip.class)) {
int modifiers = ctClass.getModifiers();
if (Modifier.isFinal(modifiers)) {
ctClass.setModifiers(modifiers & ~Modifier.FINAL);
}
if (ctClass.isInterface()) return;
classHandler.instrument(ctClass);
fixConstructors(ctClass);
fixMethods(ctClass);
try {
classCache.addClass(className, ctClass.toBytecode());
} catch (IOException e) {
throw new RuntimeException(e);
}
}
}
private boolean classHasFromAndroidEquivalent(String className) {
return className.startsWith(Uri.class.getName());
}
private void replaceClassWithFromAndroidEquivalent(ClassPool classPool, String className) throws NotFoundException {
FromAndroidClassNameParts classNameParts = new FromAndroidClassNameParts(className);
if (classNameParts.isFromAndroid()) return;
String from = classNameParts.getNameWithFromAndroid();
CtClass ctClass = classPool.getAndRename(from, className);
ClassMap map = new ClassMap() {
@Override
public Object get(Object jvmClassName) {
FromAndroidClassNameParts classNameParts = new FromAndroidClassNameParts(jvmClassName.toString());
if (classNameParts.isFromAndroid()) {
return classNameParts.getNameWithoutFromAndroid();
} else {
return jvmClassName;
}
}
};
ctClass.replaceClassName(map);
}
class FromAndroidClassNameParts {
private static final String TOKEN = "__FromAndroid";
private String prefix;
private String suffix;
FromAndroidClassNameParts(String name) {
int dollarIndex = name.indexOf("$");
prefix = name;
suffix = "";
if (dollarIndex > -1) {
prefix = name.substring(0, dollarIndex);
suffix = name.substring(dollarIndex);
}
}
public boolean isFromAndroid() {
return prefix.endsWith(TOKEN);
}
public String getNameWithFromAndroid() {
return prefix + TOKEN + suffix;
}
public String getNameWithoutFromAndroid() {
return prefix.replace(TOKEN, "") + suffix;
}
}
private void addBypassShadowField(CtClass ctClass, String fieldName) {
try {
try {
ctClass.getField(fieldName);
} catch (NotFoundException e) {
CtField field = new CtField(CtClass.booleanType, fieldName, ctClass);
field.setModifiers(java.lang.reflect.Modifier.PUBLIC | java.lang.reflect.Modifier.STATIC);
ctClass.addField(field);
}
} catch (CannotCompileException e) {
throw new RuntimeException(e);
}
}
private void fixConstructors(CtClass ctClass) throws CannotCompileException, NotFoundException {
boolean hasDefault = false;
for (CtConstructor ctConstructor : ctClass.getConstructors()) {
try {
fixConstructor(ctClass, hasDefault, ctConstructor);
if (ctConstructor.getParameterTypes().length == 0) {
hasDefault = true;
}
} catch (Exception e) {
throw new RuntimeException("problem instrumenting " + ctConstructor, e);
}
}
if (!hasDefault) {
String methodBody = generateConstructorBody(ctClass, new CtClass[0]);
ctClass.addConstructor(CtNewConstructor.make(new CtClass[0], new CtClass[0], "{\n" + methodBody + "}\n", ctClass));
}
}
private boolean fixConstructor(CtClass ctClass, boolean needsDefault, CtConstructor ctConstructor) throws NotFoundException, CannotCompileException {
String methodBody = generateConstructorBody(ctClass, ctConstructor.getParameterTypes());
ctConstructor.setBody("{\n" + methodBody + "}\n");
return needsDefault;
}
private String generateConstructorBody(CtClass ctClass, CtClass[] parameterTypes) throws NotFoundException {
return generateMethodBody(ctClass,
new CtMethod(CtClass.voidType, "<init>", parameterTypes, ctClass),
CtClass.voidType,
Type.VOID,
false,
false);
}
private void fixMethods(CtClass ctClass) throws NotFoundException, CannotCompileException {
for (CtMethod ctMethod : ctClass.getDeclaredMethods()) {
fixMethod(ctClass, ctMethod, true);
}
CtMethod equalsMethod = ctClass.getMethod("equals", "(Ljava/lang/Object;)Z");
CtMethod hashCodeMethod = ctClass.getMethod("hashCode", "()I");
CtMethod toStringMethod = ctClass.getMethod("toString", "()Ljava/lang/String;");
fixMethod(ctClass, equalsMethod, false);
fixMethod(ctClass, hashCodeMethod, false);
fixMethod(ctClass, toStringMethod, false);
}
private String describe(CtMethod ctMethod) throws NotFoundException {
return Modifier.toString(ctMethod.getModifiers()) + " " + ctMethod.getReturnType().getSimpleName() + " " + ctMethod.getLongName();
}
private void fixMethod(CtClass ctClass, CtMethod ctMethod, boolean wasFoundInClass) throws NotFoundException {
String describeBefore = describe(ctMethod);
try {
CtClass declaringClass = ctMethod.getDeclaringClass();
int originalModifiers = ctMethod.getModifiers();
boolean wasNative = Modifier.isNative(originalModifiers);
boolean wasFinal = Modifier.isFinal(originalModifiers);
boolean wasAbstract = Modifier.isAbstract(originalModifiers);
boolean wasDeclaredInClass = ctClass == declaringClass;
if (wasFinal && ctClass.isEnum()) {
return;
}
int newModifiers = originalModifiers;
if (wasNative) {
newModifiers = Modifier.clear(newModifiers, Modifier.NATIVE);
}
if (wasFinal) {
newModifiers = Modifier.clear(newModifiers, Modifier.FINAL);
}
if (wasFoundInClass) {
ctMethod.setModifiers(newModifiers);
}
CtClass returnCtClass = ctMethod.getReturnType();
Type returnType = Type.find(returnCtClass);
String methodName = ctMethod.getName();
CtClass[] paramTypes = ctMethod.getParameterTypes();
// if (!isAbstract) {
// if (methodName.startsWith("set") && paramTypes.length == 1) {
// String fieldName = "__" + methodName.substring(3);
// if (declareField(ctClass, fieldName, paramTypes[0])) {
// methodBody = fieldName + " = $1;\n" + methodBody;
// }
// } else if (methodName.startsWith("get") && paramTypes.length == 0) {
// String fieldName = "__" + methodName.substring(3);
// if (declareField(ctClass, fieldName, returnType)) {
// methodBody = "return " + fieldName + ";\n";
// }
// }
// }
boolean isStatic = Modifier.isStatic(originalModifiers);
String methodBody = generateMethodBody(ctClass, ctMethod, wasNative, wasAbstract, returnCtClass, returnType, isStatic, !wasFoundInClass);
if (!wasFoundInClass) {
CtMethod newMethod = makeNewMethod(ctClass, ctMethod, returnCtClass, methodName, paramTypes, "{\n" + methodBody + generateCallToSuper(methodName, paramTypes) + "\n}");
newMethod.setModifiers(newModifiers);
if (wasDeclaredInClass) {
ctMethod.insertBefore("{\n" + methodBody + "}\n");
} else {
ctClass.addMethod(newMethod);
}
} else if (wasAbstract || wasNative) {
CtMethod newMethod = makeNewMethod(ctClass, ctMethod, returnCtClass, methodName, paramTypes, "{\n" + methodBody + "\n}");
ctMethod.setBody(newMethod, null);
} else {
ctMethod.insertBefore("{\n" + methodBody + "}\n");
}
} catch (Exception e) {
throw new RuntimeException("problem instrumenting " + describeBefore, e);
}
}
private CtMethod makeNewMethod(CtClass ctClass, CtMethod ctMethod, CtClass returnCtClass, String methodName, CtClass[] paramTypes, String methodBody) throws CannotCompileException, NotFoundException {
return CtNewMethod.make(
ctMethod.getModifiers(),
returnCtClass,
methodName,
paramTypes,
ctMethod.getExceptionTypes(),
methodBody,
ctClass);
}
public String generateCallToSuper(String methodName, CtClass[] paramTypes) {
return "return super." + methodName + "(" + makeParameterReplacementList(paramTypes.length) + ");";
}
public String makeParameterReplacementList(int length) {
if (length == 0) {
return "";
}
String parameterReplacementList = "$1";
for (int i = 2; i <= length; ++i) {
parameterReplacementList += ", $" + i;
}
return parameterReplacementList;
}
private String generateMethodBody(CtClass ctClass, CtMethod ctMethod, boolean wasNative, boolean wasAbstract, CtClass returnCtClass, Type returnType, boolean aStatic, boolean shouldGenerateCallToSuper) throws NotFoundException {
String methodBody;
if (wasAbstract) {
methodBody = returnType.isVoid() ? "" : "return " + returnType.defaultReturnString() + ";";
} else {
methodBody = generateMethodBody(ctClass, ctMethod, returnCtClass, returnType, aStatic, shouldGenerateCallToSuper);
}
if (wasNative) {
methodBody += returnType.isVoid() ? "" : "return " + returnType.defaultReturnString() + ";";
}
return methodBody;
}
public String generateMethodBody(CtClass ctClass, CtMethod ctMethod, CtClass returnCtClass, Type returnType, boolean isStatic, boolean shouldGenerateCallToSuper) throws NotFoundException {
boolean returnsVoid = returnType.isVoid();
String className = ctClass.getName();
String methodBody;
StringBuilder buf = new StringBuilder();
buf.append("if (!");
buf.append(RobolectricInternals.class.getName());
buf.append(".shouldCallDirectly(");
buf.append(isStatic ? className + ".class" : "this");
buf.append(")) {\n");
if (!returnsVoid) {
buf.append("Object x = ");
}
buf.append(RobolectricInternals.class.getName());
buf.append(".methodInvoked(\n ");
buf.append(className);
buf.append(".class, \"");
buf.append(ctMethod.getName());
buf.append("\", ");
if (!isStatic) {
buf.append("this");
} else {
buf.append("null");
}
buf.append(", ");
appendParamTypeArray(buf, ctMethod);
buf.append(", ");
appendParamArray(buf, ctMethod);
buf.append(")");
buf.append(";\n");
if (!returnsVoid) {
buf.append("if (x != null) return ((");
buf.append(returnType.nonPrimitiveClassName(returnCtClass));
buf.append(") x)");
buf.append(returnType.unboxString());
buf.append(";\n");
if (shouldGenerateCallToSuper) {
buf.append(generateCallToSuper(ctMethod.getName(), ctMethod.getParameterTypes()));
} else {
buf.append("return ");
buf.append(returnType.defaultReturnString());
buf.append(";\n");
}
} else {
buf.append("return;\n");
}
buf.append("}\n");
methodBody = buf.toString();
return methodBody;
}
private void appendParamTypeArray(StringBuilder buf, CtMethod ctMethod) throws NotFoundException {
CtClass[] parameterTypes = ctMethod.getParameterTypes();
if (parameterTypes.length == 0) {
buf.append("new String[0]");
} else {
buf.append("new String[] {");
for (int i = 0; i < parameterTypes.length; i++) {
if (i > 0) buf.append(", ");
buf.append("\"");
CtClass parameterType = parameterTypes[i];
buf.append(parameterType.getName());
buf.append("\"");
}
buf.append("}");
}
}
private void appendParamArray(StringBuilder buf, CtMethod ctMethod) throws NotFoundException {
int parameterCount = ctMethod.getParameterTypes().length;
if (parameterCount == 0) {
buf.append("new Object[0]");
} else {
buf.append("new Object[] {");
for (int i = 0; i < parameterCount; i++) {
if (i > 0) buf.append(", ");
buf.append(RobolectricInternals.class.getName());
buf.append(".autobox(");
buf.append("$").append(i + 1);
buf.append(")");
}
buf.append("}");
}
}
private boolean declareField(CtClass ctClass, String fieldName, CtClass fieldType) throws CannotCompileException, NotFoundException {
CtMethod ctMethod = getMethod(ctClass, "get" + fieldName, "");
if (ctMethod == null) {
return false;
}
CtClass getterFieldType = ctMethod.getReturnType();
if (!getterFieldType.equals(fieldType)) {
return false;
}
if (getField(ctClass, fieldName) == null) {
CtField field = new CtField(fieldType, fieldName, ctClass);
field.setModifiers(Modifier.PRIVATE);
ctClass.addField(field);
}
return true;
}
private CtField getField(CtClass ctClass, String fieldName) {
try {
return ctClass.getField(fieldName);
} catch (NotFoundException e) {
return null;
}
}
private CtMethod getMethod(CtClass ctClass, String methodName, String desc) {
try {
return ctClass.getMethod(methodName, desc);
} catch (NotFoundException e) {
return null;
}
}
}
| |
/*
* Copyright 2000-2009 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.execution.testframework.sm.runner.ui.statistics;
import com.intellij.execution.runners.ExecutionEnvironment;
import com.intellij.execution.testframework.TestConsoleProperties;
import com.intellij.execution.testframework.sm.Marker;
import com.intellij.execution.testframework.sm.runner.BaseSMTRunnerTestCase;
import com.intellij.execution.testframework.sm.runner.SMTRunnerEventsListener;
import com.intellij.execution.testframework.sm.runner.SMTestProxy;
import com.intellij.execution.testframework.sm.runner.ui.PropagateSelectionHandler;
import com.intellij.execution.testframework.sm.runner.ui.SMTestRunnerResultsForm;
import com.intellij.openapi.util.Disposer;
import com.intellij.openapi.util.Ref;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import java.util.List;
/**
* @author Roman Chernyatchik
*/
public class SMTRunnerStatisticsPanelTest extends BaseSMTRunnerTestCase {
private StatisticsPanel myStatisticsPanel;
private SMTRunnerEventsListener myTestEventsListener;
private SMTestProxy myRootSuite;
private SMTestRunnerResultsForm myResultsForm;
@Override
protected void setUp() throws Exception {
super.setUp();
myRootSuite = createSuiteProxy("root");
final TestConsoleProperties consoleProperties = createConsoleProperties();
final ExecutionEnvironment environment = new ExecutionEnvironment();
myResultsForm = new SMTestRunnerResultsForm(consoleProperties.getConfiguration(),
new JLabel(),
consoleProperties,
environment.getRunnerSettings(),
environment.getConfigurationSettings());
myStatisticsPanel = new StatisticsPanel(getProject(), myResultsForm);
myTestEventsListener = myStatisticsPanel.createTestEventsListener();
}
@Override
protected void tearDown() throws Exception {
Disposer.dispose(myResultsForm);
super.tearDown();
}
public void testGotoSuite_OnTest() {
// create test sturcure
final SMTestProxy rootSuite = createSuiteProxy("rootSuite");
final SMTestProxy suite1 = createSuiteProxy("suite1", rootSuite);
final SMTestProxy test1 = createTestProxy("test1", suite1);
// show suite in table
myStatisticsPanel.selectProxy(suite1);
// selects row that corresponds to test1
myStatisticsPanel.selectRow(1);
// Check that necessary row is selected
assertEquals(test1, myStatisticsPanel.getSelectedItem());
// Perform action on test
myStatisticsPanel.createGotoSuiteOrParentAction().run();
// Check that current suite in table wasn't changed.
// For it let's select Total row and check selected object
myStatisticsPanel.selectRow(0);
assertEquals(suite1, myStatisticsPanel.getSelectedItem());
}
public void testGotoSuite_OnSuite() {
// create test sturcure
final SMTestProxy rootSuite = createSuiteProxy("rootSuite");
final SMTestProxy suite1 = createSuiteProxy("suite1", rootSuite);
// show root suite in table
myStatisticsPanel.selectProxy(rootSuite);
// selects row that corresponds to suite1
myStatisticsPanel.selectRow(1);
// Check that necessary row is selected
assertEquals(suite1, myStatisticsPanel.getSelectedItem());
// Perform action on suite
myStatisticsPanel.createGotoSuiteOrParentAction().run();
// Check that current suite in table was changed.
// For it let's select Total row and check selected object
myStatisticsPanel.selectRow(0);
assertEquals(suite1, myStatisticsPanel.getSelectedItem());
}
public void testGotoParentSuite_Total() {
// create test sturcure
final SMTestProxy rootSuite = createSuiteProxy("rootSuite");
final SMTestProxy suite1 = createSuiteProxy("suite1", rootSuite);
// show suite in table
myStatisticsPanel.selectProxy(suite1);
// selects Total row
myStatisticsPanel.selectRow(0);
// Check that necessary row is selected
assertEquals(suite1, myStatisticsPanel.getSelectedItem());
// Perform action on suite
myStatisticsPanel.createGotoSuiteOrParentAction().run();
// Check that current suite in table was changed.
// For it let's select Total row and check selected object
myStatisticsPanel.selectRow(0);
assertEquals(rootSuite, myStatisticsPanel.getSelectedItem());
}
public void testGotoParentSuite_TotalRoot() {
// create test sturcure
final SMTestProxy rootSuite = createSuiteProxy("rootSuite");
createSuiteProxy("suite1", rootSuite);
// show root suite in table
myStatisticsPanel.selectProxy(rootSuite);
// selects Total row
myStatisticsPanel.selectRow(0);
// Check that necessary row is selected
assertEquals(rootSuite, myStatisticsPanel.getSelectedItem());
// Perform action on suite
myStatisticsPanel.createGotoSuiteOrParentAction().run();
// Check that current suite in table wasn't changed.
// For it let's select Total row and check selected object
myStatisticsPanel.selectRow(0);
assertEquals(rootSuite, myStatisticsPanel.getSelectedItem());
}
public void testChangeSelectionListener() {
// create data fixture
final SMTestProxy rootSuite = createSuiteProxy("rootSuite");
final SMTestProxy suite1 = createSuiteProxy("suite1", rootSuite);
final SMTestProxy test1 = createTestProxy("test1", suite1);
//test
myStatisticsPanel.selectProxy(test1);
assertEquals(test1, myStatisticsPanel.getSelectedItem());
//suite
myStatisticsPanel.selectProxy(suite1);
assertEquals(suite1, myStatisticsPanel.getSelectedItem());
}
public void testChangeSelectionAction() {
final Marker onSelectedHappend = new Marker();
final Ref<SMTestProxy> proxyRef = new Ref<SMTestProxy>();
final Ref<Boolean> focusRequestedRef = new Ref<Boolean>();
myStatisticsPanel.addPropagateSelectionListener(new PropagateSelectionHandler() {
public void handlePropagateSelectionRequest(@Nullable final SMTestProxy selectedTestProxy, @NotNull final Object sender,
final boolean requestFocus) {
onSelectedHappend.set();
proxyRef.set(selectedTestProxy);
focusRequestedRef.set(requestFocus);
}
});
// create data fixture
final SMTestProxy rootSuite = createSuiteProxy("rootSuite");
final SMTestProxy suite1 = createSuiteProxy("suite1", rootSuite);
final SMTestProxy test1 = createTestProxy("test1", suite1);
//on test
myStatisticsPanel.selectProxy(suite1);
myStatisticsPanel.selectRow(1);
assertEquals(test1, myStatisticsPanel.getSelectedItem());
myStatisticsPanel.showSelectedProxyInTestsTree();
assertTrue(onSelectedHappend.isSet());
assertEquals(test1, proxyRef.get());
assertTrue(focusRequestedRef.get());
//on suite
//reset markers
onSelectedHappend.reset();
proxyRef.set(null);
focusRequestedRef.set(null);
myStatisticsPanel.selectProxy(rootSuite);
myStatisticsPanel.selectRow(1);
assertEquals(suite1, myStatisticsPanel.getSelectedItem());
myStatisticsPanel.showSelectedProxyInTestsTree();
assertTrue(onSelectedHappend.isSet());
assertEquals(suite1, proxyRef.get());
assertTrue(focusRequestedRef.get());
//on Total
//reset markers
onSelectedHappend.reset();
proxyRef.set(null);
focusRequestedRef.set(null);
myStatisticsPanel.selectProxy(rootSuite);
myStatisticsPanel.selectRow(0);
assertEquals(rootSuite, myStatisticsPanel.getSelectedItem());
myStatisticsPanel.showSelectedProxyInTestsTree();
assertTrue(onSelectedHappend.isSet());
assertEquals(rootSuite, proxyRef.get());
assertTrue(focusRequestedRef.get());
}
public void testOnSuiteStarted_NoCurrent() {
myStatisticsPanel.selectProxy(null);
final SMTestProxy suite1 = createSuiteProxy("suite1", myRootSuite);
createTestProxy("test1", suite1);
createTestProxy("test2", suite1);
myTestEventsListener.onSuiteStarted(suite1);
assertEmpty(getItems());
}
public void testOnSuiteStarted_Current() {
final SMTestProxy suite = createSuiteProxy("suite1", myRootSuite);
myStatisticsPanel.selectProxy(suite);
assertSameElements(getItems(), suite);
final SMTestProxy test1 = createTestProxy("test1", suite);
final SMTestProxy test2 = createTestProxy("test2", suite);
myTestEventsListener.onSuiteStarted(suite);
assertSameElements(getItems(), suite, test1, test2);
}
public void testOnSuiteStarted_Child() {
final SMTestProxy suite = createSuiteProxy("suite1", myRootSuite);
myStatisticsPanel.selectProxy(suite);
assertSameElements(getItems(), suite);
final SMTestProxy test1 = createTestProxy("test1", suite);
final SMTestProxy test2 = createTestProxy("test2", suite);
myTestEventsListener.onSuiteStarted(test1);
assertSameElements(getItems(), suite, test1, test2);
}
public void testOnSuiteStarted_Other() {
final SMTestProxy suite = createSuiteProxy("suite", myRootSuite);
final SMTestProxy other_suite = createSuiteProxy("other_suite", myRootSuite);
myStatisticsPanel.selectProxy(suite);
assertSameElements(getItems(), suite);
createTestProxy("test1", suite);
createTestProxy("test2", suite);
myTestEventsListener.onSuiteStarted(other_suite);
assertSameElements(getItems(), suite);
}
public void testOnSuiteFinished_NoCurrent() {
myStatisticsPanel.selectProxy(null);
final SMTestProxy suite1 = createSuiteProxy("suite1", myRootSuite);
createTestProxy("test1", suite1);
createTestProxy("test2", suite1);
myTestEventsListener.onSuiteFinished(suite1);
assertEmpty(getItems());
}
public void testOnSuiteFinished_Current() {
final SMTestProxy suite = createSuiteProxy("suite1", myRootSuite);
myStatisticsPanel.selectProxy(suite);
assertSameElements(getItems(), suite);
final SMTestProxy test1 = createTestProxy("test1", suite);
final SMTestProxy test2 = createTestProxy("test2", suite);
myTestEventsListener.onSuiteFinished(suite);
assertSameElements(getItems(), suite, test1, test2);
}
public void testOnSuiteFinished_Child() {
final SMTestProxy suite = createSuiteProxy("suite1", myRootSuite);
myStatisticsPanel.selectProxy(suite);
assertSameElements(getItems(), suite);
final SMTestProxy test1 = createTestProxy("test1", suite);
final SMTestProxy test2 = createTestProxy("test2", suite);
myTestEventsListener.onSuiteFinished(test1);
assertSameElements(getItems(), suite, test1, test2);
}
public void testOnSuiteFinished_Other() {
final SMTestProxy suite = createSuiteProxy("suite", myRootSuite);
final SMTestProxy other_suite = createSuiteProxy("other_suite", myRootSuite);
myStatisticsPanel.selectProxy(suite);
assertSameElements(getItems(), suite);
createTestProxy("test1", suite);
createTestProxy("test2", suite);
myTestEventsListener.onSuiteFinished(other_suite);
assertSameElements(getItems(), suite);
}
public void testOnTestStarted_NoCurrent() {
myStatisticsPanel.selectProxy(null);
final SMTestProxy suite1 = createSuiteProxy("suite1", myRootSuite);
final SMTestProxy test1 = createTestProxy("test1", suite1);
createTestProxy("test2", suite1);
myTestEventsListener.onTestStarted(test1);
assertEmpty(getItems());
}
public void testOnTestStarted_Child() {
final SMTestProxy test1 = createTestProxy("test1", myRootSuite);
myStatisticsPanel.selectProxy(test1);
assertSameElements(getItems(), myRootSuite, test1);
final SMTestProxy test2 = createTestProxy("test2", myRootSuite);
myTestEventsListener.onTestStarted(test1);
assertSameElements(getItems(), myRootSuite, test1, test2);
}
public void testOnTestStarted_Other() {
final SMTestProxy test1 = createTestProxy("test1", myRootSuite);
final SMTestProxy suite = createSuiteProxy("suite1", myRootSuite);
final SMTestProxy other_test = createTestProxy("other_test", suite);
myStatisticsPanel.selectProxy(test1);
assertSameElements(getItems(), myRootSuite, test1, suite);
createTestProxy("test2", myRootSuite);
myTestEventsListener.onTestStarted(other_test);
assertSameElements(getItems(), myRootSuite, test1, suite);
}
public void testOnTestFinished_NoCurrent() {
myStatisticsPanel.selectProxy(null);
final SMTestProxy suite1 = createSuiteProxy("suite1", myRootSuite);
final SMTestProxy test1 = createTestProxy("test1", suite1);
createTestProxy("test2", suite1);
myTestEventsListener.onTestFinished(test1);
assertEmpty(getItems());
}
public void testOnTestFinished_Child() {
final SMTestProxy test1 = createTestProxy("test1", myRootSuite);
myStatisticsPanel.selectProxy(test1);
assertSameElements(getItems(), myRootSuite, test1);
final SMTestProxy test2 = createTestProxy("test2", myRootSuite);
myTestEventsListener.onTestFinished(test1);
assertSameElements(getItems(), myRootSuite, test1, test2);
}
public void testOnTestFinished_Other() {
final SMTestProxy test1 = createTestProxy("test1", myRootSuite);
final SMTestProxy suite = createSuiteProxy("suite1", myRootSuite);
final SMTestProxy other_test = createTestProxy("other_test", suite);
myStatisticsPanel.selectProxy(test1);
assertSameElements(getItems(), myRootSuite, test1, suite);
createTestProxy("test2", myRootSuite);
myTestEventsListener.onTestFinished(other_test);
assertSameElements(getItems(), myRootSuite, test1, suite);
}
public void testSelectionRestoring_ForTest() {
final SMTestProxy suite = createSuiteProxy("suite1", myRootSuite);
final SMTestProxy test1 = createTestProxy("test1", suite);
myStatisticsPanel.selectProxy(test1);
final SMTestProxy test2 = createTestProxy("test2", suite);
myTestEventsListener.onTestStarted(test2);
assertEquals(test1, myStatisticsPanel.getSelectedItem());
}
public void testSelectionRestoring_ForSuite() {
myStatisticsPanel.selectProxy(myRootSuite);
// another suite was added. Model should be updated
final SMTestProxy suite = createSuiteProxy("suite1", myRootSuite);
myTestEventsListener.onSuiteStarted(suite);
assertEquals(myRootSuite, myStatisticsPanel.getSelectedItem());
}
private List<SMTestProxy> getItems() {
return myStatisticsPanel.getTableItems();
}
}
| |
/*
* Copyright (c) 2008-2017, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.collection.impl.list;
import com.hazelcast.collection.impl.collection.CollectionContainer;
import com.hazelcast.collection.impl.collection.CollectionDataSerializerHook;
import com.hazelcast.collection.impl.collection.CollectionItem;
import com.hazelcast.collection.impl.collection.TxCollectionItem;
import com.hazelcast.config.ListConfig;
import com.hazelcast.nio.serialization.Data;
import com.hazelcast.spi.NodeEngine;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.ListIterator;
import java.util.Map;
public class ListContainer extends CollectionContainer {
private static final int INITIAL_CAPACITY = 1000;
private List<CollectionItem> itemList;
private ListConfig config;
public ListContainer() {
}
public ListContainer(String name, NodeEngine nodeEngine) {
super(name, nodeEngine);
}
@Override
public ListConfig getConfig() {
if (config == null) {
config = nodeEngine.getConfig().findListConfig(name);
}
return config;
}
@Override
public void rollbackRemove(long itemId) {
TxCollectionItem txItem = txMap.remove(itemId);
if (txItem == null) {
logger.warning("Transaction log cannot be found for rolling back 'remove()' operation."
+ " Missing log item id: " + itemId);
return;
}
CollectionItem item = new CollectionItem(itemId, txItem.getValue());
addTxItemOrdered(item);
}
private void addTxItemOrdered(CollectionItem item) {
ListIterator<CollectionItem> iterator = getCollection().listIterator();
while (iterator.hasNext()) {
CollectionItem collectionItem = iterator.next();
if (item.getItemId() < collectionItem.getItemId()) {
iterator.previous();
break;
}
}
iterator.add(item);
}
public CollectionItem add(int index, Data value) {
final CollectionItem item = new CollectionItem(nextId(), value);
if (index < 0) {
return getCollection().add(item) ? item : null;
} else {
getCollection().add(index, item);
return item;
}
}
public CollectionItem get(int index) {
return getCollection().get(index);
}
public CollectionItem set(int index, long itemId, Data value) {
return getCollection().set(index, new CollectionItem(itemId, value));
}
public void setBackup(long oldItemId, long itemId, Data value) {
getMap().remove(oldItemId);
getMap().put(itemId, new CollectionItem(itemId, value));
}
public CollectionItem remove(int index) {
return getCollection().remove(index);
}
public int indexOf(boolean last, Data value) {
final List<CollectionItem> list = getCollection();
if (last) {
int index = list.size();
final ListIterator<CollectionItem> iterator = list.listIterator(index);
while (iterator.hasPrevious()) {
final CollectionItem item = iterator.previous();
index--;
if (value.equals(item.getValue())) {
return index;
}
}
} else {
int index = -1;
for (CollectionItem item : list) {
index++;
if (value.equals(item.getValue())) {
return index;
}
}
}
return -1;
}
public Map<Long, Data> addAll(int index, List<Data> valueList) {
final int size = valueList.size();
final Map<Long, Data> map = new HashMap<Long, Data>(size);
List<CollectionItem> list = new ArrayList<CollectionItem>(size);
for (Data value : valueList) {
final long itemId = nextId();
list.add(new CollectionItem(itemId, value));
map.put(itemId, value);
}
getCollection().addAll(index, list);
return map;
}
public List<Data> sub(int from, int to) {
final List<CollectionItem> list;
if (from == -1 && to == -1) {
list = getCollection();
} else if (to == -1) {
List<CollectionItem> collection = getCollection();
list = collection.subList(from, collection.size());
} else {
list = getCollection().subList(from, to);
}
final ArrayList<Data> sub = new ArrayList<Data>(list.size());
for (CollectionItem item : list) {
sub.add((Data) item.getValue());
}
return sub;
}
@Override
public List<CollectionItem> getCollection() {
if (itemList == null) {
if (itemMap != null && !itemMap.isEmpty()) {
itemList = new ArrayList<CollectionItem>(itemMap.values());
Collections.sort(itemList);
itemMap.clear();
} else {
itemList = new ArrayList<CollectionItem>(INITIAL_CAPACITY);
}
itemMap = null;
}
return itemList;
}
@Override
protected Map<Long, CollectionItem> getMap() {
if (itemMap == null) {
if (itemList != null && !itemList.isEmpty()) {
itemMap = new HashMap<Long, CollectionItem>(itemList.size());
for (CollectionItem item : itemList) {
itemMap.put(item.getItemId(), item);
}
itemList.clear();
} else {
itemMap = new HashMap<Long, CollectionItem>(INITIAL_CAPACITY);
}
itemList = null;
}
return itemMap;
}
@Override
protected void onDestroy() {
if (itemList != null) {
itemList.clear();
}
if (itemMap != null) {
itemMap.clear();
}
}
@Override
public int getId() {
return CollectionDataSerializerHook.LIST_CONTAINER;
}
}
| |
/*
Copyright (c) 2013, Colorado State University
All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
This software is provided by the copyright holders and contributors "as is" and
any express or implied warranties, including, but not limited to, the implied
warranties of merchantability and fitness for a particular purpose are
disclaimed. In no event shall the copyright holder or contributors be liable for
any direct, indirect, incidental, special, exemplary, or consequential damages
(including, but not limited to, procurement of substitute goods or services;
loss of use, data, or profits; or business interruption) however caused and on
any theory of liability, whether in contract, strict liability, or tort
(including negligence or otherwise) arising in any way out of the use of this
software, even if advised of the possibility of such damage.
*/
package mendel.network;
import java.io.IOException;
import java.net.Socket;
import java.nio.BufferUnderflowException;
import java.nio.ByteBuffer;
import java.nio.channels.CancelledKeyException;
import java.nio.channels.SelectionKey;
import java.nio.channels.Selector;
import java.nio.channels.ServerSocketChannel;
import java.nio.channels.SocketChannel;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.concurrent.ConcurrentHashMap;
import java.util.logging.Level;
import java.util.logging.Logger;
/**
* Provides an abstract implementation for consuming and publishing messages on
* both the server and client side.
*
* @author malensek
*/
public abstract class MessageRouter implements Runnable {
protected static final Logger logger = Logger.getLogger("mendel");
/**
* The size (in bytes) of the message prefix used in the system.
*/
public static final int PREFIX_SZ = Integer.SIZE / Byte.SIZE;
/**
* The default read buffer size is 8 MB.
*/
public static final int DEFAULT_READ_BUFFER_SIZE = 8388608;
/**
* The default write queue allows 100 items to be inserted before it
* starts blocking. This prevents situations where the MessageRouter is
* overwhelmed by an extreme number of write requests, exhausting available
* resources.
*/
public static final int DEFAULT_WRITE_QUEUE_SIZE = 100;
/**
* System property that overrides the read buffer size.
*/
public static final String READ_BUFFER_PROPERTY
= "mendel.network.MessageRouter.readBufferSize";
/**
* System property that overrides the write queue maximum size.
*/
public static final String WRITE_QUEUE_PROPERTY
= "mendel.network.MessageRouter.writeQueueSize";
/**
* Flag used to determine whether the Selector thread should run
*/
protected boolean online;
private List<MessageListener> listeners = new ArrayList<>();
protected Selector selector;
protected int readBufferSize;
protected int writeQueueSize;
private ByteBuffer readBuffer;
protected ConcurrentHashMap<SelectionKey, Integer> changeInterest
= new ConcurrentHashMap<>();
public MessageRouter() {
this(DEFAULT_READ_BUFFER_SIZE, DEFAULT_WRITE_QUEUE_SIZE);
}
public MessageRouter(int readBufferSize, int maxWriteQueueSize) {
String readSz = System.getProperty(READ_BUFFER_PROPERTY);
if (readSz == null) {
this.readBufferSize = readBufferSize;
} else {
this.readBufferSize = Integer.parseInt(readSz);
}
String queueSz = System.getProperty(WRITE_QUEUE_PROPERTY);
if (queueSz == null) {
this.writeQueueSize = maxWriteQueueSize;
} else {
this.writeQueueSize = Integer.parseInt(queueSz);
}
readBuffer = ByteBuffer.allocateDirect(this.readBufferSize);
}
/**
* As long as the MessageRouter is online, monitor connection operations
* through the Selector instance.
*/
@Override
public void run() {
while (online) {
try {
updateInterestOps();
processSelectionKeys();
} catch (IOException e) {
logger.log(Level.WARNING, "Error in selector thread", e);
}
}
}
/**
* Updates interest sets for any SelectionKey instances that require
* changes. This allows external threads to queue up changes to the
* interest sets that will be fulfilled by the selector thread.
*/
protected void updateInterestOps() {
Iterator<SelectionKey> it = changeInterest.keySet().iterator();
while (it.hasNext()) {
SelectionKey key = it.next();
if (key.isValid()) {
SocketChannel channel = (SocketChannel) key.channel();
if (channel.isConnected() == false
|| channel.isRegistered() == false) {
continue;
}
key.interestOps(changeInterest.get(key));
}
changeInterest.remove(key);
}
}
/**
* Performs a select operation, and then processes the resulting
* SelectionKey set based on interest ops.
*/
protected void processSelectionKeys()
throws IOException {
selector.select();
Iterator<SelectionKey> keys = selector.selectedKeys().iterator();
while (keys.hasNext()) {
SelectionKey key = keys.next();
keys.remove();
if (key.isValid() == false) {
continue;
}
try {
if (key.isAcceptable()) {
accept(key);
continue;
}
if (key.isConnectable()) {
connect(key);
continue;
}
if (key.isWritable()) {
write(key);
}
if (key.isReadable()) {
read(key);
}
} catch (CancelledKeyException e) {
/* SelectionKey was cancelled by another thread. */
continue;
}
}
}
/**
* Accepts new connections.
*
* @param key The SelectionKey for the connecting client.
*/
protected void accept(SelectionKey key)
throws IOException {
ServerSocketChannel servSocket = (ServerSocketChannel) key.channel();
SocketChannel channel = servSocket.accept();
logger.info("Accepted connection: " + getClientString(channel));
TransmissionTracker tracker = new TransmissionTracker(writeQueueSize);
channel.configureBlocking(false);
channel.register(selector, SelectionKey.OP_READ, tracker);
dispatchConnect(getDestination(channel));
}
/**
* Finishes setting up a connection on a SocketChannel.
*
* @param key SelectionKey for the SocketChannel.
*/
protected void connect(SelectionKey key) {
try {
SocketChannel channel = (SocketChannel) key.channel();
if (channel.finishConnect()) {
TransmissionTracker tracker = TransmissionTracker.fromKey(key);
if (tracker.hasPendingData() == false) {
changeInterest.put(key, SelectionKey.OP_READ);
} else {
/* Data has already been queued up; start writing */
changeInterest.put(key,
SelectionKey.OP_READ | SelectionKey.OP_WRITE);
}
}
dispatchConnect(getDestination(channel));
} catch (IOException e) {
logger.log(Level.INFO, "Connection finalization failed", e);
disconnect(key);
}
}
/**
* Read data from a SocketChannel.
*
* @param key SelectionKey for the SocketChannel.
*/
protected void read(SelectionKey key) {
SocketChannel channel = (SocketChannel) key.channel();
readBuffer.clear();
int bytesRead = 0;
try {
/* Read data from the channel */
while ((bytesRead = channel.read(readBuffer)) > 0) {
readBuffer.flip();
processIncomingMessage(key);
}
} catch (IOException e) {
logger.log(Level.FINE, "Abnormal remote termination", e);
disconnect(key);
return;
} catch (BufferUnderflowException e) {
/* Incoming packets lied about their size! */
logger.log(Level.WARNING, "Incoming packet size mismatch", e);
}
if (bytesRead == -1) {
/* Connection was terminated by the client. */
logger.fine("Reached EOF in channel input stream");
disconnect(key);
return;
}
}
/**
* Process data received from a client SocketChannel. This method is
* chiefly concerned with processing incoming data streams into
* MendelMessage packets to be consumed by the system.
*
* @param key SelectionKey for the client.
*/
protected void processIncomingMessage(SelectionKey key) {
TransmissionTracker transmission = TransmissionTracker.fromKey(key);
if (transmission.expectedBytes == 0) {
/* We don't know how much data the client is sending yet.
* Read the message prefix to determine the payload size. */
boolean ready = readPrefix(readBuffer, transmission);
/* Check if we have read the payload size prefix yet. If
* not, then we're done for now. */
if (ready == false) {
return;
}
}
int readSize = transmission.expectedBytes - transmission.readPointer;
if (readSize > readBuffer.remaining()) {
readSize = readBuffer.remaining();
}
readBuffer.get(transmission.payload,
transmission.readPointer, readSize);
transmission.readPointer += readSize;
if (transmission.readPointer == transmission.expectedBytes) {
/* The payload has been read */
MendelMessage msg = new MendelMessage(
transmission.payload,
new MessageContext(this, key));
dispatchMessage(msg);
transmission.resetCounters();
if (readBuffer.hasRemaining()) {
/* There is another payload to read */
processIncomingMessage(key);
/* Note: this process continues until we reach the end of the
* buffer. Not doing so would cause us to lose data. */
}
}
}
/**
* Read the payload size prefix from a channel.
* Each message in Mendel is prefixed with a payload size field; this is
* read to allocate buffers for the incoming message.
*
* @return true if the payload size has been determined; false otherwise.
*/
protected static boolean readPrefix(ByteBuffer buffer,
TransmissionTracker transmission) {
/* Make sure the prefix hasn't already been read. */
if (transmission.expectedBytes != 0) {
return true;
}
/* Can we determine the payload size in one shot? (we must read at least
* PREFIX_SZ bytes) */
if (transmission.prefixPointer == 0
&& buffer.remaining() >= PREFIX_SZ) {
transmission.expectedBytes = buffer.getInt();
transmission.allocatePayload();
return true;
} else {
/* Keep reading until we have at least PREFIX_SZ bytes to determine
* the payload size. */
int prefixLeft = PREFIX_SZ - transmission.prefixPointer;
if (buffer.remaining() < prefixLeft) {
prefixLeft = buffer.remaining();
}
buffer.get(transmission.prefix,
transmission.prefixPointer, prefixLeft);
transmission.prefixPointer += prefixLeft;
if (transmission.prefixPointer >= PREFIX_SZ) {
ByteBuffer buf = ByteBuffer.wrap(transmission.prefix);
transmission.expectedBytes = buf.getInt();
transmission.allocatePayload();
return true;
}
}
return false;
}
/**
* Wraps a given message in a {@link java.nio.ByteBuffer}, including the payload size
* prefix. Data produced by this method will be subsequently read by the
* readPrefix() method.
*/
protected static ByteBuffer wrapWithPrefix(MendelMessage message) {
int messageSize = message.getPayload().length;
ByteBuffer buffer = ByteBuffer.allocate(messageSize + 4);
buffer.putInt(messageSize);
buffer.put(message.getPayload());
buffer.flip();
return buffer;
}
/**
* Adds a message to the pending write queue for a particular SelectionKey
* and submits a change request for its interest set. Pending data is placed
* in a blocking queue, so this function may block to prevent queueing an
* excessive amount of data.
* <p/>
* The system property <em>mendel.net.MessageRouter.writeQueueSize</em>
* tunes the maximum amount of data that can be queued.
*
* @param key SelectionKey for the channel.
* @param message MendelMessage to publish on the channel.
* @return {@link mendel.network.Transmission} instance representing the send operation.
*/
public Transmission sendMessage(SelectionKey key, MendelMessage message)
throws IOException {
//TODO reduce the visibility of this method to protected
if (this.isOnline() == false) {
throw new IOException("MessageRouter is not online.");
}
TransmissionTracker tracker = TransmissionTracker.fromKey(key);
ByteBuffer payload = wrapWithPrefix(message);
Transmission trans = null;
try {
tracker.queueOutgoingData(payload);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw new IOException("Interrupted while waiting to queue data");
}
changeInterest.put(key, SelectionKey.OP_READ | SelectionKey.OP_WRITE);
selector.wakeup();
return trans;
}
/**
* When a {@link java.nio.channels.SelectionKey} is writable, push as much pending data
* out on the channel as possible.
*
* @param key {@link java.nio.channels.SelectionKey} of the channel to write to.
*/
private void write(SelectionKey key) {
TransmissionTracker tracker = TransmissionTracker.fromKey(key);
SocketChannel channel = (SocketChannel) key.channel();
while (tracker.hasPendingData() == true) {
Transmission trans = tracker.getNextTransmission();
ByteBuffer buffer = trans.getPayload();
if (buffer == null) {
break;
}
int written = 0;
while (buffer.hasRemaining()) {
try {
written = channel.write(buffer);
} catch (IOException e) {
/* Broken pipe */
disconnect(key);
return;
}
if (buffer.hasRemaining() == false) {
/* Done writing */
tracker.transmissionFinished();
}
if (written == 0) {
/* Return now, to keep our OP_WRITE interest op set. */
return;
}
}
}
/* At this point, the queue is empty. */
key.interestOps(SelectionKey.OP_READ);
return;
}
/**
* Handle termination of connections.
*
* @param key The SelectionKey of the SocketChannel that has disconnected.
*/
protected void disconnect(SelectionKey key) {
if (key.isValid() == false) {
return;
}
SocketChannel channel = (SocketChannel) key.channel();
NetworkDestination destination = getDestination(channel);
logger.info("Terminating connection: " + destination.toString());
try {
key.cancel();
key.channel().close();
} catch (IOException e) {
logger.log(Level.WARNING, "Failed to disconnect channel", e);
}
dispatchDisconnect(destination);
}
/**
* Adds a message listener (consumer) to this MessageRouter. Listeners
* receive messages that are published by this MessageRouter.
*
* @param listener {@link mendel.network.MessageListener} that will consume messages
* published by this MessageRouter.
*/
public void addListener(MessageListener listener) {
listeners.add(listener);
}
/**
* Dispatches a message to all listening consumers.
*
* @param message {@link mendel.network.MendelMessage} to dispatch.
*/
protected void dispatchMessage(MendelMessage message) {
for (MessageListener listener : listeners) {
listener.onMessage(message);
}
}
/**
* Informs all listening consumers that a connection to a remote endpoint
* has been made.
*/
protected void dispatchConnect(NetworkDestination endpoint) {
for (MessageListener listener : listeners) {
listener.onConnect(endpoint);
}
}
/**
* Informs all listening consumers that a connection to a remote endpoint
* has been terminated.
*/
protected void dispatchDisconnect(NetworkDestination endpoint) {
for (MessageListener listener : listeners) {
listener.onDisconnect(endpoint);
}
}
/**
* Determines whether or not this MessageRouter is online. As long as the
* router is online, the selector thread will continue to run.
*
* @return true if the MessageRouter instance is online and running.
*/
public boolean isOnline() {
return this.online;
}
/**
* Determines a connection's hostname and port, then concatenates the two
* values, separated by a colon (:).
*
* @param channel Channel to get client information about.
*/
protected static String getClientString(SocketChannel channel) {
Socket socket = channel.socket();
return socket.getInetAddress().getHostName() + ":" + socket.getPort();
}
/**
* Determines a connection's endpoint information (hostname and port) and
* encapsulates them in a {@link mendel.network.NetworkDestination}.
*
* @param channel The SocketChannel of the network endpoint.
* @return NetworkDestination representation of the endpoint.
*/
protected static NetworkDestination getDestination(SocketChannel channel) {
Socket socket = channel.socket();
return new NetworkDestination(
socket.getInetAddress().getHostName(),
socket.getPort());
}
}
| |
// Copyright 2017 The Bazel Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.syntax;
import static com.google.common.truth.Truth.assertThat;
import static com.google.devtools.build.lib.syntax.Parser.ParsingLevel.LOCAL_LEVEL;
import static com.google.devtools.build.lib.syntax.Parser.ParsingLevel.TOP_LEVEL;
import com.google.common.base.Joiner;
import com.google.common.collect.ImmutableMap;
import com.google.devtools.build.lib.syntax.util.EvaluationTestCase;
import java.io.IOException;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
/** Tests the {@code toString} and pretty printing methods for {@link ASTNode} subclasses. */
@RunWith(JUnit4.class)
public class ASTPrettyPrintTest extends EvaluationTestCase {
private String join(String... lines) {
return Joiner.on("\n").join(lines);
}
/**
* Asserts that the given node's pretty print at a given indent level matches the given string.
*/
private void assertPrettyMatches(ASTNode node, int indentLevel, String expected) {
StringBuilder prettyBuilder = new StringBuilder();
try {
node.prettyPrint(prettyBuilder, indentLevel);
} catch (IOException e) {
// Impossible for StringBuilder.
throw new AssertionError(e);
}
assertThat(prettyBuilder.toString()).isEqualTo(expected);
}
/** Asserts that the given node's pretty print with no indent matches the given string. */
private void assertPrettyMatches(ASTNode node, String expected) {
assertPrettyMatches(node, 0, expected);
}
/** Asserts that the given node's pretty print with one indent matches the given string. */
private void assertIndentedPrettyMatches(ASTNode node, String expected) {
assertPrettyMatches(node, 1, expected);
}
/** Asserts that the given node's {@code toString} matches the given string. */
private void assertTostringMatches(ASTNode node, String expected) {
assertThat(node.toString()).isEqualTo(expected);
}
/**
* Parses the given string as an expression, and asserts that its pretty print matches the given
* string.
*/
private void assertExprPrettyMatches(String source, String expected) {
Expression node = parseExpression(source);
assertPrettyMatches(node, expected);
}
/**
* Parses the given string as an expression, and asserts that its {@code toString} matches the
* given string.
*/
private void assertExprTostringMatches(String source, String expected) {
Expression node = parseExpression(source);
assertThat(node.toString()).isEqualTo(expected);
}
/**
* Parses the given string as an expression, and asserts that both its pretty print and {@code
* toString} return the original string.
*/
private void assertExprBothRoundTrip(String source) {
assertExprPrettyMatches(source, source);
assertExprTostringMatches(source, source);
}
/**
* Parses the given string as a statement, and asserts that its pretty print with one indent
* matches the given string.
*/
private void assertStmtIndentedPrettyMatches(
Parser.ParsingLevel parsingLevel, String source, String expected) {
Statement node = parseStatement(parsingLevel, source);
assertIndentedPrettyMatches(node, expected);
}
/**
* Parses the given string as an statement, and asserts that its {@code toString} matches the
* given string.
*/
private void assertStmtTostringMatches(
Parser.ParsingLevel parsingLevel, String source, String expected) {
Statement node = parseStatement(parsingLevel, source);
assertThat(node.toString()).isEqualTo(expected);
}
// Expressions.
@Test
public void abstractComprehension() {
// Covers DictComprehension and ListComprehension.
assertExprBothRoundTrip("[z for y in x if True for z in y]");
assertExprBothRoundTrip("{z: x for y in x if True for z in y}");
}
@Test
public void binaryOperatorExpression() {
assertExprPrettyMatches("1 + 2", "(1 + 2)");
assertExprTostringMatches("1 + 2", "1 + 2");
assertExprPrettyMatches("1 + (2 * 3)", "(1 + (2 * 3))");
assertExprTostringMatches("1 + (2 * 3)", "1 + 2 * 3");
}
@Test
public void conditionalExpression() {
assertExprBothRoundTrip("1 if True else 2");
}
@Test
public void dictionaryLiteral() {
assertExprBothRoundTrip("{1: \"a\", 2: \"b\"}");
}
@Test
public void dotExpression() {
assertExprBothRoundTrip("o.f");
}
@Test
public void funcallExpression() {
assertExprBothRoundTrip("f()");
assertExprBothRoundTrip("f(a)");
assertExprBothRoundTrip("f(a, b = B, *c, d = D, **e)");
assertExprBothRoundTrip("o.f()");
}
@Test
public void identifier() {
assertExprBothRoundTrip("foo");
}
@Test
public void indexExpression() {
assertExprBothRoundTrip("a[i]");
}
@Test
public void integerLiteral() {
assertExprBothRoundTrip("5");
}
@Test
public void listLiteralShort() {
assertExprBothRoundTrip("[]");
assertExprBothRoundTrip("[5]");
assertExprBothRoundTrip("[5, 6]");
assertExprBothRoundTrip("()");
assertExprBothRoundTrip("(5,)");
assertExprBothRoundTrip("(5, 6)");
}
@Test
public void listLiteralLong() {
// List literals with enough elements to trigger the abbreviated toString() format.
assertExprPrettyMatches("[1, 2, 3, 4, 5, 6]", "[1, 2, 3, 4, 5, 6]");
assertExprTostringMatches("[1, 2, 3, 4, 5, 6]", "[1, 2, 3, 4, <2 more arguments>]");
assertExprPrettyMatches("(1, 2, 3, 4, 5, 6)", "(1, 2, 3, 4, 5, 6)");
assertExprTostringMatches("(1, 2, 3, 4, 5, 6)", "(1, 2, 3, 4, <2 more arguments>)");
}
@Test
public void listLiteralNested() {
// Make sure that the inner list doesn't get abbreviated when the outer list is printed using
// prettyPrint().
assertExprPrettyMatches(
"[1, 2, 3, [10, 20, 30, 40, 50, 60], 4, 5, 6]",
"[1, 2, 3, [10, 20, 30, 40, 50, 60], 4, 5, 6]");
// It doesn't matter as much what toString does. This case demonstrates an apparent bug in how
// Printer#printList abbreviates the nested contents. We can keep this test around to help
// monitor changes in the buggy behavior or eventually fix it.
assertExprTostringMatches(
"[1, 2, 3, [10, 20, 30, 40, 50, 60], 4, 5, 6]",
"[1, 2, 3, [10, 20, 30, 40, <2 more argu...<2 more arguments>], <3 more arguments>]");
}
@Test
public void sliceExpression() {
assertExprBothRoundTrip("a[b:c:d]");
assertExprBothRoundTrip("a[b:c]");
assertExprBothRoundTrip("a[b:]");
assertExprBothRoundTrip("a[:c:d]");
assertExprBothRoundTrip("a[:c]");
assertExprBothRoundTrip("a[::d]");
assertExprBothRoundTrip("a[:]");
}
@Test
public void stringLiteral() {
assertExprBothRoundTrip("\"foo\"");
assertExprBothRoundTrip("\"quo\\\"ted\"");
}
@Test
public void unaryOperatorExpression() {
assertExprPrettyMatches("not True", "not (True)");
assertExprTostringMatches("not True", "not True");
assertExprPrettyMatches("-5", "-(5)");
assertExprTostringMatches("-5", "-5");
}
// Statements.
@Test
public void assignmentStatement() {
assertStmtIndentedPrettyMatches(LOCAL_LEVEL, "x = y", " x = y\n");
assertStmtTostringMatches(LOCAL_LEVEL, "x = y", "x = y\n");
}
@Test
public void augmentedAssignmentStatement() {
assertStmtIndentedPrettyMatches(LOCAL_LEVEL, "x += y", " x += y\n");
assertStmtTostringMatches(LOCAL_LEVEL, "x += y", "x += y\n");
}
@Test
public void expressionStatement() {
assertStmtIndentedPrettyMatches(LOCAL_LEVEL, "5", " 5\n");
assertStmtTostringMatches(LOCAL_LEVEL, "5", "5\n");
}
@Test
public void functionDefStatement() {
assertStmtIndentedPrettyMatches(
TOP_LEVEL,
join("def f(x):",
" print(x)"),
join(" def f(x):",
" print(x)",
""));
assertStmtTostringMatches(
TOP_LEVEL,
join("def f(x):",
" print(x)"),
"def f(x): ...\n");
assertStmtIndentedPrettyMatches(
TOP_LEVEL,
join("def f(a, b=B, *c, d=D, **e):",
" print(x)"),
join(" def f(a, b=B, *c, d=D, **e):",
" print(x)",
""));
assertStmtTostringMatches(
TOP_LEVEL,
join("def f(a, b=B, *c, d=D, **e):",
" print(x)"),
"def f(a, b = B, *c, d = D, **e): ...\n");
assertStmtIndentedPrettyMatches(
TOP_LEVEL,
join("def f():",
" pass"),
join(" def f():",
" pass",
""));
assertStmtTostringMatches(
TOP_LEVEL,
join("def f():",
" pass"),
"def f(): ...\n");
}
@Test
public void flowStatement() {
// The parser would complain if we tried to construct them from source.
ASTNode breakNode = new FlowStatement(FlowStatement.Kind.BREAK);
assertIndentedPrettyMatches(breakNode, " break\n");
assertTostringMatches(breakNode, "break\n");
ASTNode continueNode = new FlowStatement(FlowStatement.Kind.CONTINUE);
assertIndentedPrettyMatches(continueNode, " continue\n");
assertTostringMatches(continueNode, "continue\n");
}
@Test
public void forStatement() {
assertStmtIndentedPrettyMatches(
LOCAL_LEVEL,
join("for x in y:",
" print(x)"),
join(" for x in y:",
" print(x)",
""));
assertStmtTostringMatches(
LOCAL_LEVEL,
join("for x in y:",
" print(x)"),
"for x in y: ...\n");
assertStmtIndentedPrettyMatches(
LOCAL_LEVEL,
join("for x in y:",
" pass"),
join(" for x in y:",
" pass",
""));
assertStmtTostringMatches(
LOCAL_LEVEL,
join("for x in y:",
" pass"),
"for x in y: ...\n");
}
@Test
public void ifStatement() {
assertStmtIndentedPrettyMatches(
LOCAL_LEVEL,
join("if True:",
" print(x)"),
join(" if True:",
" print(x)",
""));
assertStmtTostringMatches(
LOCAL_LEVEL,
join("if True:",
" print(x)"),
"if True: ...\n");
assertStmtIndentedPrettyMatches(
LOCAL_LEVEL,
join("if True:",
" print(x)",
"elif False:",
" print(y)",
"else:",
" print(z)"),
join(" if True:",
" print(x)",
" elif False:",
" print(y)",
" else:",
" print(z)",
""));
assertStmtTostringMatches(
LOCAL_LEVEL,
join("if True:",
" print(x)",
"elif False:",
" print(y)",
"else:",
" print(z)"),
"if True: ...\n");
}
@Test
public void loadStatement() {
// load("foo.bzl", a="A", "B")
ASTNode loadStatement = new LoadStatement(
new StringLiteral("foo.bzl"),
ImmutableMap.of(new Identifier("a"), "A", new Identifier("B"), "B"));
assertIndentedPrettyMatches(
loadStatement,
" load(\"foo.bzl\", a=\"A\", \"B\")\n");
assertTostringMatches(
loadStatement,
"load(\"foo.bzl\", a=\"A\", \"B\")\n");
}
@Test
public void returnStatement() {
assertIndentedPrettyMatches(
new ReturnStatement(new StringLiteral("foo")),
" return \"foo\"\n");
assertTostringMatches(
new ReturnStatement(new StringLiteral("foo")),
"return \"foo\"\n");
assertIndentedPrettyMatches(
new ReturnStatement(new Identifier("None")),
" return\n");
assertTostringMatches(
new ReturnStatement(new Identifier("None")),
"return\n");
}
// Miscellaneous.
@Test
public void buildFileAST() {
ASTNode node = parseBuildFileASTWithoutValidation("print(x)\nprint(y)");
assertIndentedPrettyMatches(
node,
join(" print(x)",
" print(y)",
""));
assertTostringMatches(
node,
"<BuildFileAST with 2 statements>");
}
@Test
public void comment() {
Comment node = new Comment("foo");
assertIndentedPrettyMatches(node, " # foo");
assertTostringMatches(node, "foo");
}
/* Not tested explicitly because they're covered implicitly by tests for other nodes:
* - LValue
* - DictionaryEntryLiteral
* - passed arguments / formal parameters
* - ConditionalStatements
*/
}
| |
//============================================================================
//
// Copyright (C) 2006-2022 Talend Inc. - www.talend.com
//
// This source code is available under agreement available at
// %InstallDIR%\features\org.talend.rcp.branding.%PRODUCTNAME%\%PRODUCTNAME%license.txt
//
// You should have received a copy of the agreement
// along with this program; if not, write to Talend SA
// 9 rue Pages 92150 Suresnes, France
//
//============================================================================
package org.talend.components.salesforce.runtime;
import java.io.File;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.avro.Schema;
import org.apache.avro.generic.IndexedRecord;
import org.junit.Assert;
import org.junit.ClassRule;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
import org.junit.rules.TestRule;
import org.talend.components.api.component.runtime.Reader;
import org.talend.components.api.container.DefaultComponentRuntimeContainerImpl;
import org.talend.components.api.container.RuntimeContainer;
import org.talend.components.api.exception.DataRejectException;
import org.talend.components.salesforce.test.SalesforceRuntimeTestUtil;
import org.talend.components.salesforce.test.SalesforceTestBase;
import org.talend.components.salesforce.tsalesforcebulkexec.TSalesforceBulkExecDefinition;
import org.talend.components.salesforce.tsalesforcebulkexec.TSalesforceBulkExecProperties;
import org.talend.daikon.avro.AvroRegistry;
import org.talend.daikon.avro.converter.IndexedRecordConverter;
import com.fasterxml.jackson.databind.ObjectMapper;
public class SalesforceBulkLoadJSONTestIT extends SalesforceTestBase {
@ClassRule
public static final TestRule DISABLE_IF_NEEDED = new DisableIfMissingConfig();
private SalesforceRuntimeTestUtil util = new SalesforceRuntimeTestUtil();
@Rule
public TemporaryFolder tempFolder = new TemporaryFolder();
@Test
public void testInsert() throws Throwable {
String data_file = tempFolder.newFile("Insert.json").getAbsolutePath();
new ObjectMapper().writeValue(new File(data_file),util.getTestData());
// bulkexec part
TSalesforceBulkExecDefinition defin = (TSalesforceBulkExecDefinition) getComponentService()
.getComponentDefinition(TSalesforceBulkExecDefinition.COMPONENT_NAME);
TSalesforceBulkExecProperties modelProps = (TSalesforceBulkExecProperties) defin.createRuntimeProperties();
SalesforceBulkExecRuntime bulk =
util.initBulk(defin, data_file, modelProps, util.getTestSchema1(), util.getTestSchema5());
modelProps.outputAction.setValue(TSalesforceBulkExecProperties.OutputAction.INSERT);
modelProps.contentType.setValue(TSalesforceBulkExecProperties.ContentType.JSON);
RuntimeContainer container = new DefaultComponentRuntimeContainerImpl() {
@Override
public String getCurrentComponentId() {
return "tSalesforceBulkExec";
}
};
bulk.runAtDriver(container);
Assert.assertEquals(3, container.getGlobalData("tSalesforceBulkExecNB_SUCCESS"));
}
@Test
public void testBulkReaderInsert() throws Throwable {
String data_file = tempFolder.newFile("Insert.json").getAbsolutePath();
new ObjectMapper().writeValue(new File(data_file),util.getTestData());
// bulkexec part
TSalesforceBulkExecDefinition defin = (TSalesforceBulkExecDefinition) getComponentService()
.getComponentDefinition(TSalesforceBulkExecDefinition.COMPONENT_NAME);
TSalesforceBulkExecProperties modelProps = (TSalesforceBulkExecProperties) defin.createRuntimeProperties();
Reader reader = util.initReader(defin, data_file, modelProps, util.getTestSchema1(), util.getTestSchema5());
modelProps.outputAction.setValue(TSalesforceBulkExecProperties.OutputAction.INSERT);
modelProps.contentType.setValue(TSalesforceBulkExecProperties.ContentType.JSON);
List<String> ids = new ArrayList<String>();
List<String> sids = new ArrayList<String>();
try {
IndexedRecordConverter<Object, ? extends IndexedRecord> factory = null;
final List<Map<String, String>> result = new ArrayList<Map<String, String>>();
for (boolean available = reader.start(); available; available = reader.advance()) {
IndexedRecord data = IndexedRecord.class.cast(reader.getCurrent());
Assert.assertTrue("true".equals(data.get(5)));//schema column 5 -> Success
}
} finally {
reader.close();
}
}
@Test
public void testDelete() throws Throwable {
List<String> ids = util.createTestData();
final List<Map<String, String>> testData = new ArrayList<Map<String, String>>();
for (String id : ids) {
Map<String, String> row = new HashMap<String, String>();
row.put("Id", id);
testData.add(row);
}
String data_file = tempFolder.newFile("delete.json").getAbsolutePath();
ObjectMapper mapper = new ObjectMapper();
mapper.writeValue(new File(data_file),testData);
// bulkexec part
TSalesforceBulkExecDefinition defin = (TSalesforceBulkExecDefinition) getComponentService()
.getComponentDefinition(TSalesforceBulkExecDefinition.COMPONENT_NAME);
TSalesforceBulkExecProperties modelProps = (TSalesforceBulkExecProperties) defin.createRuntimeProperties();
Reader reader = util.initReader(defin, data_file, modelProps, util.getTestSchema1(), util.getTestSchema5());
modelProps.contentType.setValue(TSalesforceBulkExecProperties.ContentType.JSON);
modelProps.outputAction.setValue(TSalesforceBulkExecProperties.OutputAction.DELETE);
try {
IndexedRecordConverter<Object, ? extends IndexedRecord> factory = null;
List<String> resultIds = new ArrayList<String>();
for (boolean available = reader.start(); available; available = reader.advance()) {
try {
Object data = reader.getCurrent();
factory = initCurrentData(factory, data);
IndexedRecord record = factory.convertToAvro(data);
String id = (String) record.get(4);//record ids
resultIds.add(id);
} catch (Exception e) {
Assert.fail(e.getMessage());
}
}
Assert.assertEquals(ids, resultIds);
} finally {
try {
reader.close();
} finally {
try{
util.deleteTestData(ids);
}catch (AssertionError e){
// do nothing
}
}
}
}
@Test
public void testUpdate() throws Throwable {
List<String> ids = util.createTestData();
String id = ids.get(0);
final List<Map<String, String>> testData = new ArrayList<Map<String, String>>();
Map<String, String> datarow = new HashMap<String, String>();
datarow.put("Id", id);
datarow.put("FirstName", "Wei");
datarow.put("LastName", "Wang");
datarow.put("Phone", "010-89492686");// update the field
testData.add(datarow);
datarow = new HashMap<String, String>();
datarow.put("Id", "not_exist");// should reject
datarow.put("FirstName", "Who");
datarow.put("LastName", "Who");
datarow.put("Phone", "010-89492686");
testData.add(datarow);
String data_file = tempFolder.newFile("update.json").getAbsolutePath();
ObjectMapper mapper = new ObjectMapper();
mapper.writeValue(new File(data_file),testData);
// bulkexec part
TSalesforceBulkExecDefinition defin = (TSalesforceBulkExecDefinition) getComponentService()
.getComponentDefinition(TSalesforceBulkExecDefinition.COMPONENT_NAME);
Schema testSchema4 = util.getTestSchema6();
testSchema4.addProp("include-all-fields","true");
TSalesforceBulkExecProperties modelProps = (TSalesforceBulkExecProperties) defin.createRuntimeProperties();
Reader reader = util.initReader(defin, data_file, modelProps, testSchema4, testSchema4);
modelProps.contentType.setValue(TSalesforceBulkExecProperties.ContentType.JSON);
modelProps.outputAction.setValue(TSalesforceBulkExecProperties.OutputAction.UPDATE);
try {
IndexedRecordConverter<Object, ? extends IndexedRecord> factory = null;
for (boolean available = reader.start(); available; available = reader.advance()) {
try {
Object data = reader.getCurrent();
factory = initCurrentData(factory, data);
IndexedRecord record = factory.convertToAvro(data);
String phone = String.valueOf( record.get(4));
Assert.assertTrue("true".equals(record.get(5)));//schema column 6 -> Success
Assert.assertEquals("010-89492686", phone);
} catch (DataRejectException e) {
Map<String, Object> info = e.getRejectInfo();
Object data = info.get("talend_record");
String err = (String) info.get("error");
factory = initCurrentData(factory, data);
IndexedRecord record = factory.convertToAvro(data);
String resultid = (String) record.get(1);
String firstname = (String) record.get(2);
String lastname = (String) record.get(3);
String phone = (String) record.get(4);
// id should not null, it should be keep as in bulk file.
Assert.assertEquals("not_exist",resultid);
Assert.assertEquals("Who", firstname);
Assert.assertEquals("Who", lastname);
Assert.assertEquals("010-89492686", phone);
Assert.assertTrue("false".equals(record.get(5)));//schema column 6 -> Success
Assert.assertTrue(err != null);
}
}
} finally {
try {
reader.close();
} finally {
util.deleteTestData(ids);
}
}
}
@Test
public void testUpsert() throws Throwable {
List<String> ids = util.createTestData();
String id = ids.get(0);
final List<Map<String, String>> testData = new ArrayList<Map<String, String>>();
Map<String, String> datarow = new HashMap<String, String>();
datarow.put("Id", id);// should update
datarow.put("FirstName", "Wei");
datarow.put("LastName", "Wang");
datarow.put("Phone", "010-89492686");// update the field
testData.add(datarow);
datarow = new HashMap<String, String>();
datarow.put("Id", null);// should insert
datarow.put("FirstName", "Who");
datarow.put("LastName", "Who");
datarow.put("Phone", "010-89492686");
testData.add(datarow);
String data_file = tempFolder.newFile("upsert.json").getAbsolutePath();
ObjectMapper mapper = new ObjectMapper();
mapper.writeValue(new File(data_file),testData);
// bulkexec part
TSalesforceBulkExecDefinition defin = (TSalesforceBulkExecDefinition) getComponentService()
.getComponentDefinition(TSalesforceBulkExecDefinition.COMPONENT_NAME);
TSalesforceBulkExecProperties modelProps = (TSalesforceBulkExecProperties) defin.createRuntimeProperties();
Reader reader = util.initReader(defin, data_file, modelProps, util.getTestSchema6(), util.getTestSchema6());
modelProps.outputAction.setValue(TSalesforceBulkExecProperties.OutputAction.UPSERT);
modelProps.upsertKeyColumn.setValue("Id");
modelProps.contentType.setValue(TSalesforceBulkExecProperties.ContentType.JSON);
try {
IndexedRecordConverter<Object, ? extends IndexedRecord> factory = null;
int index = -1;
for (boolean available = reader.start(); available; available = reader.advance()) {
try {
Object data = reader.getCurrent();
factory = initCurrentData(factory, data);
IndexedRecord record = factory.convertToAvro(data);
index++;
if (index == 0) {
Assert.assertTrue("false".equals(record.get(5)));//schema column 5 -> salesforce_created
} else if (index == 1) {
Assert.assertTrue("true".equals(record.get(5)));//schema column 5 -> salesforce_created
}
} catch (DataRejectException e) {
Assert.fail(e.getMessage());
}
}
} finally {
try {
reader.close();
} finally {
util.deleteTestData(ids);
}
}
}
private IndexedRecordConverter<Object, ? extends IndexedRecord> initCurrentData(
IndexedRecordConverter<Object, ? extends IndexedRecord> factory, Object data) {
if (factory == null) {
factory = (IndexedRecordConverter<Object, ? extends IndexedRecord>) new AvroRegistry()
.createIndexedRecordConverter(data.getClass());
}
// IndexedRecord unenforced = factory.convertToAvro(data);
// current.setWrapped(unenforced);
return factory;
}
}
| |
/*
* Copyright 2015-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.intellij.plugin.build;
import com.facebook.buck.intellij.plugin.config.BuckSettingsProvider;
import com.intellij.execution.ExecutionException;
import com.intellij.execution.configurations.GeneralCommandLine;
import com.intellij.execution.process.OSProcessHandler;
import com.intellij.execution.process.ProcessEvent;
import com.intellij.execution.process.ProcessHandler;
import com.intellij.execution.process.ProcessListener;
import com.intellij.execution.process.ProcessOutputTypes;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.progress.ProcessCanceledException;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Key;
import com.intellij.openapi.vcs.LineHandlerHelper;
import com.intellij.openapi.vfs.CharsetToolkit;
import org.jetbrains.annotations.Nullable;
import java.io.File;
import java.nio.charset.Charset;
import java.util.Iterator;
/**
* The handler for buck commands with text outputs.
*/
public abstract class BuckCommandHandler {
protected static final Logger LOG = Logger.getInstance(BuckCommandHandler.class);
private static final long LONG_TIME = 10 * 1000;
protected final Project project;
protected final BuckCommand command;
private final File workingDirectory;
private final GeneralCommandLine commandLine;
private final Object processStateLock = new Object();
@SuppressWarnings({"FieldAccessedSynchronizedAndUnsynchronized"})
private Process process;
@SuppressWarnings({"FieldAccessedSynchronizedAndUnsynchronized"})
private OSProcessHandler handler;
/**
* Character set to use for IO.
*/
@SuppressWarnings({"FieldAccessedSynchronizedAndUnsynchronized"})
private Charset charset = CharsetToolkit.UTF8_CHARSET;
/**
* Buck execution start timestamp.
*/
private long startTime;
/**
* The partial line from stderr stream.
*/
private final StringBuilder stderrLine = new StringBuilder();
/**
* @param project a project
* @param directory a process directory
* @param command a command to execute (if empty string, the parameter is ignored)
*/
public BuckCommandHandler(
Project project,
File directory,
BuckCommand command) {
String buckExecutable = BuckSettingsProvider.getInstance().getState().buckExecutable;
this.project = project;
this.command = command;
commandLine = new GeneralCommandLine();
commandLine.setExePath(buckExecutable);
workingDirectory = directory;
commandLine.withWorkDirectory(workingDirectory);
commandLine.addParameter(command.name());
}
/**
* Start process
*/
public synchronized void start() {
checkNotStarted();
try {
startTime = System.currentTimeMillis();
process = startProcess();
startHandlingStreams();
} catch (ProcessCanceledException e) {
LOG.warn(e);
} catch (Throwable t) {
if (!project.isDisposed()) {
LOG.error(t);
}
}
}
/**
* @return true if process is started.
*/
public final synchronized boolean isStarted() {
return process != null;
}
/**
* Check that process is not started yet.
*
* @throws IllegalStateException if process has been already started
*/
private void checkNotStarted() {
if (isStarted()) {
throw new IllegalStateException("The process has been already started");
}
}
/**
* Check that process is started.
*
* @throws IllegalStateException if process has not been started
*/
protected final void checkStarted() {
if (!isStarted()) {
throw new IllegalStateException("The process is not started yet");
}
}
public GeneralCommandLine command() {
return commandLine;
}
/**
* @return a context project
*/
public Project project() {
return project;
}
/**
* Start the buck process.
*/
@Nullable
protected Process startProcess() throws ExecutionException {
synchronized (processStateLock) {
final ProcessHandler processHandler = createProcess(commandLine);
handler = (OSProcessHandler) processHandler;
return handler.getProcess();
}
}
/**
* Start handling process output streams for the handler.
*/
protected void startHandlingStreams() {
if (handler == null) {
return;
}
handler.addProcessListener(new ProcessListener() {
public void startNotified(final ProcessEvent event) {
}
public void processTerminated(final ProcessEvent event) {
BuckCommandHandler.this.processTerminated();
}
public void processWillTerminate(
final ProcessEvent event,
final boolean willBeDestroyed) {
}
public void onTextAvailable(final ProcessEvent event, final Key outputType) {
BuckCommandHandler.this.onTextAvailable(event.getText(), outputType);
}
});
handler.startNotify();
}
/**
* Wait for process termination.
*/
public void waitFor() {
checkStarted();
if (handler != null) {
handler.waitFor();
}
}
public ProcessHandler createProcess(GeneralCommandLine commandLine)
throws ExecutionException {
// TODO(t7984081): Use ProcessExecutor to start buck process.
Process process = commandLine.createProcess();
return new MyOSProcessHandler(process, commandLine, getCharset());
}
private static class MyOSProcessHandler extends OSProcessHandler {
private final Charset myCharset;
public MyOSProcessHandler(
Process process,
GeneralCommandLine commandLine,
Charset charset) {
super(process, commandLine.getCommandLineString());
myCharset = charset;
}
@Override
public Charset getCharset() {
return myCharset;
}
}
/**
* @return a character set to use for IO.
*/
public Charset getCharset() {
return charset;
}
public void runInCurrentThread(@Nullable Runnable postStartAction) {
if (!beforeCommand()) {
return;
}
start();
if (isStarted()) {
if (postStartAction != null) {
postStartAction.run();
}
waitFor();
}
afterCommand();
logTime();
}
private void logTime() {
if (startTime > 0) {
long time = System.currentTimeMillis() - startTime;
if (!LOG.isDebugEnabled() && time > LONG_TIME) {
LOG.info(String.format("buck %s took %s ms. Command parameters: %n%s",
command,
time,
commandLine.getCommandLineString()));
} else {
LOG.debug(String.format("buck %s took %s ms", command, time));
}
} else {
LOG.debug(String.format("buck %s finished.", command));
}
}
protected void processTerminated() {
if (stderrLine.length() != 0) {
onTextAvailable("\n", ProcessOutputTypes.STDERR);
}
}
protected void onTextAvailable(final String text, final Key outputType) {
Iterator<String> lines = LineHandlerHelper.splitText(text).iterator();
notifyLines(outputType, lines, stderrLine);
}
/**
* Notify listeners for each complete line. Note that in the case of stderr,
* the last line is saved.
*
* @param outputType output type
* @param lines line iterator
* @param lineBuilder a line builder
*/
protected abstract void notifyLines(
final Key outputType,
final Iterator<String> lines,
final StringBuilder lineBuilder);
protected abstract boolean beforeCommand();
protected abstract void afterCommand();
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.repair;
import java.io.IOException;
import java.net.InetAddress;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.locks.Condition;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.cassandra.config.DatabaseDescriptor;
import org.apache.cassandra.dht.Range;
import org.apache.cassandra.dht.Token;
import org.apache.cassandra.exceptions.RepairException;
import org.apache.cassandra.gms.*;
import org.apache.cassandra.service.ActiveRepairService;
import org.apache.cassandra.utils.*;
/**
* Coordinates the (active) repair of a token range.
*
* A given RepairSession repairs a set of replicas for a given range on a list
* of column families. For each of the column family to repair, RepairSession
* creates a RepairJob that handles the repair of that CF.
*
* A given RepairJob has the 2 main phases:
* 1. Validation phase: the job requests merkle trees from each of the replica involves
* (RepairJob.sendTreeRequests()) and waits until all trees are received (in
* validationComplete()).
* 2. Synchonization phase: once all trees are received, the job compares each tree with
* all the other using a so-called Differencer (started by submitDifferencers()). If
* differences there is between 2 trees, the concerned Differencer will start a streaming
* of the difference between the 2 endpoint concerned (Differencer.performStreamingRepair).
* The job is done once all its Differencer are done (i.e. have either computed no differences
* or the streaming they started is done (syncComplete())).
*
* A given session will execute the first phase (validation phase) of each of it's job
* sequentially. In other words, it will start the first job and only start the next one
* once that first job validation phase is complete. This is done so that the replica only
* create one merkle tree at a time, which is our way to ensure that such creation starts
* roughly at the same time on every node (see CASSANDRA-2816). However the synchronization
* phases are allowed to run concurrently (with each other and with validation phases).
*
* A given RepairJob has 2 modes: either sequential or not (isSequential flag). If sequential,
* it will requests merkle tree creation from each replica in sequence (though in that case
* we still first send a message to each node to flush and snapshot data so each merkle tree
* creation is still done on similar data, even if the actual creation is not
* done simulatneously). If not sequential, all merkle tree are requested in parallel.
* Similarly, if a job is sequential, it will handle one Differencer at a time, but will handle
* all of them in parallel otherwise.
*/
public class RepairSession extends WrappedRunnable implements IEndpointStateChangeSubscriber, IFailureDetectionEventListener
{
private static Logger logger = LoggerFactory.getLogger(RepairSession.class);
/** Repair session ID */
private final UUID id;
public final String keyspace;
private final String[] cfnames;
public final boolean isSequential;
/** Range to repair */
public final Range<Token> range;
public final Set<InetAddress> endpoints;
private volatile Exception exception;
private final AtomicBoolean isFailed = new AtomicBoolean(false);
// First, all RepairJobs are added to this queue,
final Queue<RepairJob> jobs = new ConcurrentLinkedQueue<>();
// and after receiving all validation, the job is moved to
// this map, keyed by CF name.
final Map<String, RepairJob> syncingJobs = new ConcurrentHashMap<>();
private final SimpleCondition completed = new SimpleCondition();
public final Condition differencingDone = new SimpleCondition();
public final UUID parentRepairSession;
private volatile boolean terminated = false;
/**
* Create new repair session.
*
* @param range range to repair
* @param keyspace name of keyspace
* @param isSequential true if performing repair on snapshots sequentially
* @param endpoints the data centers that should be part of the repair; null for all DCs
* @param cfnames names of columnfamilies
*/
public RepairSession(UUID parentRepairSession, Range<Token> range, String keyspace, boolean isSequential, Set<InetAddress> endpoints, String... cfnames)
{
this(parentRepairSession, UUIDGen.getTimeUUID(), range, keyspace, isSequential, endpoints, cfnames);
}
public RepairSession(UUID parentRepairSession, UUID id, Range<Token> range, String keyspace, boolean isSequential, Set<InetAddress> endpoints, String[] cfnames)
{
this.parentRepairSession = parentRepairSession;
this.id = id;
this.isSequential = isSequential;
this.keyspace = keyspace;
this.cfnames = cfnames;
assert cfnames.length > 0 : "Repairing no column families seems pointless, doesn't it";
this.range = range;
this.endpoints = endpoints;
}
public UUID getId()
{
return id;
}
public Range<Token> getRange()
{
return range;
}
/**
* Receive merkle tree response or failed response from {@code endpoint} for current repair job.
*
* @param desc repair job description
* @param endpoint endpoint that sent merkle tree
* @param tree calculated merkle tree, or null if validation failed
*/
public void validationComplete(RepairJobDesc desc, InetAddress endpoint, MerkleTree tree)
{
RepairJob job = jobs.peek();
if (job == null)
{
assert terminated;
return;
}
if (tree == null)
{
exception = new RepairException(desc, "Validation failed in " + endpoint);
forceShutdown();
return;
}
logger.info(String.format("[repair #%s] Received merkle tree for %s from %s", getId(), desc.columnFamily, endpoint));
assert job.desc.equals(desc);
if (job.addTree(endpoint, tree) == 0)
{
logger.debug("All response received for {}/{}", getId(), desc.columnFamily);
if (!job.isFailed())
{
syncingJobs.put(job.desc.columnFamily, job);
job.submitDifferencers();
}
// This job is complete, switching to next in line (note that only
// one thread will can ever do this)
jobs.poll();
RepairJob nextJob = jobs.peek();
if (nextJob == null)
// We are done with this repair session as far as differencing
// is considered. Just inform the session
differencingDone.signalAll();
else
nextJob.sendTreeRequests(endpoints);
}
}
/**
* Notify this session that sync completed/failed with given {@code NodePair}.
*
* @param desc synced repair job
* @param nodes nodes that completed sync
* @param success true if sync succeeded
*/
public void syncComplete(RepairJobDesc desc, NodePair nodes, boolean success)
{
RepairJob job = syncingJobs.get(desc.columnFamily);
if (job == null)
{
assert terminated;
return;
}
if (!success)
{
exception = new RepairException(desc, String.format("Sync failed between %s and %s", nodes.endpoint1, nodes.endpoint2));
forceShutdown();
return;
}
logger.debug(String.format("[repair #%s] Repair completed between %s and %s on %s", getId(), nodes.endpoint1, nodes.endpoint2, desc.columnFamily));
if (job.completedSynchronization(nodes, success))
{
RepairJob completedJob = syncingJobs.remove(job.desc.columnFamily);
String remaining = syncingJobs.size() == 0 ? "" : String.format(" (%d remaining column family to sync for this session)", syncingJobs.size());
if (completedJob != null && completedJob.isFailed())
logger.warn(String.format("[repair #%s] %s sync failed%s", getId(), desc.columnFamily, remaining));
else
logger.info(String.format("[repair #%s] %s is fully synced%s", getId(), desc.columnFamily, remaining));
if (jobs.isEmpty() && syncingJobs.isEmpty())
{
// this repair session is completed
completed.signalAll();
}
}
}
private String repairedNodes()
{
StringBuilder sb = new StringBuilder();
sb.append(FBUtilities.getBroadcastAddress());
for (InetAddress ep : endpoints)
sb.append(", ").append(ep);
return sb.toString();
}
// we don't care about the return value but care about it throwing exception
public void runMayThrow() throws Exception
{
logger.info(String.format("[repair #%s] new session: will sync %s on range %s for %s.%s", getId(), repairedNodes(), range, keyspace, Arrays.toString(cfnames)));
if (endpoints.isEmpty())
{
differencingDone.signalAll();
logger.info(String.format("[repair #%s] No neighbors to repair with on range %s: session completed", getId(), range));
return;
}
// Checking all nodes are live
for (InetAddress endpoint : endpoints)
{
if (!FailureDetector.instance.isAlive(endpoint))
{
String message = String.format("Cannot proceed on repair because a neighbor (%s) is dead: session failed", endpoint);
differencingDone.signalAll();
logger.error("[repair #{}] {}", getId(), message);
throw new IOException(message);
}
}
ActiveRepairService.instance.addToActiveSessions(this);
try
{
// Create and queue a RepairJob for each column family
for (String cfname : cfnames)
{
RepairJob job = new RepairJob(parentRepairSession, id, keyspace, cfname, range, isSequential);
jobs.offer(job);
}
logger.debug("Sending tree requests to endpoints {}", endpoints);
jobs.peek().sendTreeRequests(endpoints);
// block whatever thread started this session until all requests have been returned:
// if this thread dies, the session will still complete in the background
completed.await();
if (exception == null)
{
logger.info(String.format("[repair #%s] session completed successfully", getId()));
}
else
{
logger.error(String.format("[repair #%s] session completed with the following error", getId()), exception);
throw exception;
}
}
catch (InterruptedException e)
{
throw new RuntimeException("Interrupted while waiting for repair.");
}
finally
{
// mark this session as terminated
terminate();
ActiveRepairService.instance.removeFromActiveSessions(this);
}
}
public void terminate()
{
terminated = true;
for (RepairJob job : jobs)
job.terminate();
jobs.clear();
syncingJobs.clear();
}
/**
* clear all RepairJobs and terminate this session.
*/
public void forceShutdown()
{
differencingDone.signalAll();
completed.signalAll();
}
void failedNode(InetAddress remote)
{
String errorMsg = String.format("Endpoint %s died", remote);
exception = new IOException(errorMsg);
// If a node failed, we stop everything (though there could still be some activity in the background)
forceShutdown();
}
public void onJoin(InetAddress endpoint, EndpointState epState) {}
public void beforeChange(InetAddress endpoint, EndpointState currentState, ApplicationState newStateKey, VersionedValue newValue) {}
public void onChange(InetAddress endpoint, ApplicationState state, VersionedValue value) {}
public void onAlive(InetAddress endpoint, EndpointState state) {}
public void onDead(InetAddress endpoint, EndpointState state) {}
public void onRemove(InetAddress endpoint)
{
convict(endpoint, Double.MAX_VALUE);
}
public void onRestart(InetAddress endpoint, EndpointState epState)
{
convict(endpoint, Double.MAX_VALUE);
}
public void convict(InetAddress endpoint, double phi)
{
if (!endpoints.contains(endpoint))
return;
// We want a higher confidence in the failure detection than usual because failing a repair wrongly has a high cost.
if (phi < 2 * DatabaseDescriptor.getPhiConvictThreshold())
return;
// Though unlikely, it is possible to arrive here multiple time and we
// want to avoid print an error message twice
if (!isFailed.compareAndSet(false, true))
return;
failedNode(endpoint);
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
// Code generated by Microsoft (R) AutoRest Code Generator.
package com.azure.resourcemanager.mariadb.implementation;
import com.azure.core.annotation.ExpectedResponses;
import com.azure.core.annotation.Get;
import com.azure.core.annotation.HeaderParam;
import com.azure.core.annotation.Headers;
import com.azure.core.annotation.Host;
import com.azure.core.annotation.HostParam;
import com.azure.core.annotation.PathParam;
import com.azure.core.annotation.QueryParam;
import com.azure.core.annotation.ReturnType;
import com.azure.core.annotation.ServiceInterface;
import com.azure.core.annotation.ServiceMethod;
import com.azure.core.annotation.UnexpectedResponseExceptionType;
import com.azure.core.http.rest.PagedFlux;
import com.azure.core.http.rest.PagedIterable;
import com.azure.core.http.rest.PagedResponse;
import com.azure.core.http.rest.PagedResponseBase;
import com.azure.core.http.rest.Response;
import com.azure.core.http.rest.RestProxy;
import com.azure.core.management.exception.ManagementException;
import com.azure.core.util.Context;
import com.azure.core.util.FluxUtil;
import com.azure.core.util.logging.ClientLogger;
import com.azure.resourcemanager.mariadb.fluent.LogFilesClient;
import com.azure.resourcemanager.mariadb.fluent.models.LogFileInner;
import com.azure.resourcemanager.mariadb.models.LogFileListResult;
import reactor.core.publisher.Mono;
/** An instance of this class provides access to all the operations defined in LogFilesClient. */
public final class LogFilesClientImpl implements LogFilesClient {
private final ClientLogger logger = new ClientLogger(LogFilesClientImpl.class);
/** The proxy service used to perform REST calls. */
private final LogFilesService service;
/** The service client containing this operation class. */
private final MariaDBManagementClientImpl client;
/**
* Initializes an instance of LogFilesClientImpl.
*
* @param client the instance of the service client containing this operation class.
*/
LogFilesClientImpl(MariaDBManagementClientImpl client) {
this.service = RestProxy.create(LogFilesService.class, client.getHttpPipeline(), client.getSerializerAdapter());
this.client = client;
}
/**
* The interface defining all the services for MariaDBManagementClientLogFiles to be used by the proxy service to
* perform REST calls.
*/
@Host("{$host}")
@ServiceInterface(name = "MariaDBManagementCli")
private interface LogFilesService {
@Headers({"Content-Type: application/json"})
@Get(
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DBForMariaDB"
+ "/servers/{serverName}/logFiles")
@ExpectedResponses({200})
@UnexpectedResponseExceptionType(ManagementException.class)
Mono<Response<LogFileListResult>> listByServer(
@HostParam("$host") String endpoint,
@QueryParam("api-version") String apiVersion,
@PathParam("subscriptionId") String subscriptionId,
@PathParam("resourceGroupName") String resourceGroupName,
@PathParam("serverName") String serverName,
@HeaderParam("Accept") String accept,
Context context);
}
/**
* List all the log files in a given server.
*
* @param resourceGroupName The name of the resource group. The name is case insensitive.
* @param serverName The name of the server.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return a list of log files.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
private Mono<PagedResponse<LogFileInner>> listByServerSinglePageAsync(String resourceGroupName, String serverName) {
if (this.client.getEndpoint() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getEndpoint() is required and cannot be null."));
}
if (this.client.getSubscriptionId() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getSubscriptionId() is required and cannot be null."));
}
if (resourceGroupName == null) {
return Mono
.error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."));
}
if (serverName == null) {
return Mono.error(new IllegalArgumentException("Parameter serverName is required and cannot be null."));
}
final String apiVersion = "2018-06-01";
final String accept = "application/json";
return FluxUtil
.withContext(
context ->
service
.listByServer(
this.client.getEndpoint(),
apiVersion,
this.client.getSubscriptionId(),
resourceGroupName,
serverName,
accept,
context))
.<PagedResponse<LogFileInner>>map(
res ->
new PagedResponseBase<>(
res.getRequest(), res.getStatusCode(), res.getHeaders(), res.getValue().value(), null, null))
.contextWrite(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext()).readOnly()));
}
/**
* List all the log files in a given server.
*
* @param resourceGroupName The name of the resource group. The name is case insensitive.
* @param serverName The name of the server.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return a list of log files.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
private Mono<PagedResponse<LogFileInner>> listByServerSinglePageAsync(
String resourceGroupName, String serverName, Context context) {
if (this.client.getEndpoint() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getEndpoint() is required and cannot be null."));
}
if (this.client.getSubscriptionId() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getSubscriptionId() is required and cannot be null."));
}
if (resourceGroupName == null) {
return Mono
.error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."));
}
if (serverName == null) {
return Mono.error(new IllegalArgumentException("Parameter serverName is required and cannot be null."));
}
final String apiVersion = "2018-06-01";
final String accept = "application/json";
context = this.client.mergeContext(context);
return service
.listByServer(
this.client.getEndpoint(),
apiVersion,
this.client.getSubscriptionId(),
resourceGroupName,
serverName,
accept,
context)
.map(
res ->
new PagedResponseBase<>(
res.getRequest(), res.getStatusCode(), res.getHeaders(), res.getValue().value(), null, null));
}
/**
* List all the log files in a given server.
*
* @param resourceGroupName The name of the resource group. The name is case insensitive.
* @param serverName The name of the server.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return a list of log files.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
private PagedFlux<LogFileInner> listByServerAsync(String resourceGroupName, String serverName) {
return new PagedFlux<>(() -> listByServerSinglePageAsync(resourceGroupName, serverName));
}
/**
* List all the log files in a given server.
*
* @param resourceGroupName The name of the resource group. The name is case insensitive.
* @param serverName The name of the server.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return a list of log files.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
private PagedFlux<LogFileInner> listByServerAsync(String resourceGroupName, String serverName, Context context) {
return new PagedFlux<>(() -> listByServerSinglePageAsync(resourceGroupName, serverName, context));
}
/**
* List all the log files in a given server.
*
* @param resourceGroupName The name of the resource group. The name is case insensitive.
* @param serverName The name of the server.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return a list of log files.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
public PagedIterable<LogFileInner> listByServer(String resourceGroupName, String serverName) {
return new PagedIterable<>(listByServerAsync(resourceGroupName, serverName));
}
/**
* List all the log files in a given server.
*
* @param resourceGroupName The name of the resource group. The name is case insensitive.
* @param serverName The name of the server.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return a list of log files.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
public PagedIterable<LogFileInner> listByServer(String resourceGroupName, String serverName, Context context) {
return new PagedIterable<>(listByServerAsync(resourceGroupName, serverName, context));
}
}
| |
/*
* Copyright (C) 2012 Google, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package fr.lteconsulting.hexa.databinding.annotation.processor;
import java.util.List;
import javax.lang.model.element.Element;
import javax.lang.model.element.PackageElement;
import javax.lang.model.element.TypeElement;
import javax.lang.model.element.TypeParameterElement;
import javax.lang.model.type.ArrayType;
import javax.lang.model.type.DeclaredType;
import javax.lang.model.type.TypeMirror;
import javax.lang.model.type.WildcardType;
import javax.lang.model.util.SimpleTypeVisitor6;
import javax.lang.model.util.Types;
/**
* A class from the Google Auto project on Github
* Special thanks to the authors !
*/
final public class TypeSimplifier
{
private final Types typeUtils;
public TypeSimplifier( Types typeUtils )
{
this.typeUtils = typeUtils;
}
String simplify( TypeMirror type )
{
return type.accept( TO_STRING_TYPE_VISITOR, new StringBuilder() ).toString();
}
// From the Google Auto project, thanks !
// The formal type parameters of the given type.
// It will return the angle-bracket part of:
// Foo<SomeClass>
// Foo<T extends SomeClass>
// Foo<T extends Number>
// Foo<E extends Enum<E>>
// Foo<K, V extends Comparable<? extends K>>
// Type variables need special treatment because we only want to include
// their bounds when they
// are declared, not when they are referenced. We don't want to include the
// bounds of the second E
// in <E extends Enum<E>> or of the second K in <K, V extends Comparable<?
// extends K>>. That's
// why we put the "extends" handling here and not in ToStringTypeVisitor.
public String formalTypeParametersString( TypeElement type )
{
List<? extends TypeParameterElement> typeParameters = type.getTypeParameters();
if( typeParameters.isEmpty() )
{
return "";
}
else
{
StringBuilder sb = new StringBuilder( "<" );
String sep = "";
for( TypeParameterElement typeParameter : typeParameters )
{
sb.append( sep );
sep = ", ";
appendTypeParameterWithBounds( sb, typeParameter );
}
return sb.append( ">" ).toString();
}
}
// From the Google Auto project, thanks !
// The actual type parameters of the given type.
// If we have a class Foo<T extends Something> extends Foo<T>.
// <T extends Something> is the formal type parameter list and
// <T> is the actual type parameter list, which is what this method returns.
static public String actualTypeParametersString( TypeElement type )
{
List<? extends TypeParameterElement> typeParameters = type.getTypeParameters();
if( typeParameters.isEmpty() )
{
return "";
}
else
{
StringBuilder sb = new StringBuilder();
sb.append( "<" );
String sep = "";
for( TypeParameterElement typeP : typeParameters )
{
sb.append( sep );
sep = ", ";
sb.append( typeP.getSimpleName() );
}
sb.append( ">" );
return sb.toString();
}
}
private void appendTypeParameterWithBounds( StringBuilder sb, TypeParameterElement typeParameter )
{
sb.append( typeParameter.getSimpleName() );
String sep = " extends ";
for( TypeMirror bound : typeParameter.getBounds() )
{
if( !bound.toString().equals( "java.lang.Object" ) )
{
sb.append( sep );
sep = " & ";
bound.accept( TO_STRING_TYPE_VISITOR, sb );
}
}
}
private final ToStringTypeVisitor TO_STRING_TYPE_VISITOR = new ToStringTypeVisitor();
/**
* Visitor that produces a string representation of a type for use in
* generated code. The visitor takes into account the imports defined by
* {@link #typesToImport} and will use the short names of those types.
*
* <p>
* A simpler alternative would be just to use TypeMirror.toString() and
* regular expressions to pick apart the type references and replace
* fully-qualified types where possible. That depends on unspecified
* behaviour of TypeMirror.toString(), though, and is vulnerable to
* formatting quirks such as the way it omits the space after the comma in
* {@code java.util.Map<java.lang.String, java.lang.String>}.
*/
private class ToStringTypeVisitor extends SimpleTypeVisitor6<StringBuilder, StringBuilder>
{
@Override
protected StringBuilder defaultAction( TypeMirror type, StringBuilder sb )
{
return sb.append( type );
}
@Override
public StringBuilder visitArray( ArrayType type, StringBuilder sb )
{
return visit( type.getComponentType(), sb ).append( "[]" );
}
@Override
public StringBuilder visitDeclared( DeclaredType type, StringBuilder sb )
{
TypeElement typeElement = (TypeElement) typeUtils.asElement( type );
String typeString = typeElement.getQualifiedName().toString();
sb.append( typeString );
appendTypeArguments( type, sb );
return sb;
}
void appendTypeArguments( DeclaredType type, StringBuilder sb )
{
List<? extends TypeMirror> arguments = type.getTypeArguments();
if( !arguments.isEmpty() )
{
sb.append( "<" );
String sep = "";
for( TypeMirror argument : arguments )
{
sb.append( sep );
sep = ", ";
visit( argument, sb );
}
sb.append( ">" );
}
}
@Override
public StringBuilder visitWildcard( WildcardType type, StringBuilder sb )
{
sb.append( "?" );
TypeMirror extendsBound = type.getExtendsBound();
TypeMirror superBound = type.getSuperBound();
if( superBound != null )
{
sb.append( " super " );
visit( superBound, sb );
}
else if( extendsBound != null )
{
sb.append( " extends " );
visit( extendsBound, sb );
}
return sb;
}
}
/**
* Returns the name of the given type, including any enclosing types but not
* the package.
*/
static String classNameOf( TypeElement type )
{
String name = type.getQualifiedName().toString();
String pkgName = packageNameOf( type );
return pkgName.isEmpty() ? name : name.substring( pkgName.length() + 1 );
}
/**
* Returns the name of the package that the given type is in. If the type is
* in the default (unnamed) package then the name is the empty string.
*/
static String packageNameOf( TypeElement type )
{
while( true )
{
Element enclosing = type.getEnclosingElement();
if( enclosing instanceof PackageElement )
{
return ((PackageElement) enclosing).getQualifiedName().toString();
}
type = (TypeElement) enclosing;
}
}
static String simpleNameOf( String s )
{
if( s.contains( "." ) )
{
return s.substring( s.lastIndexOf( '.' ) + 1 );
}
else
{
return s;
}
}
/**
* Returns the qualified name of a TypeMirror.
*/
public static String getTypeQualifiedName(TypeMirror type) throws CodeGenerationIncompleteException
{
if(type.toString().equals("<any>")) {
throw new CodeGenerationIncompleteException("Type reported as <any> is likely a not-yet " +
"generated parameterized type.");
}
switch( type.getKind() )
{
case ARRAY:
return getTypeQualifiedName( ((ArrayType) type).getComponentType() ) + "[]";
case BOOLEAN:
return "boolean";
case BYTE:
return "byte";
case CHAR:
return "char";
case DOUBLE:
return "double";
case FLOAT:
return "float";
case INT:
return "int";
case LONG:
return "long";
case SHORT:
return "short";
case DECLARED:
StringBuilder b = new StringBuilder();
b.append( ((TypeElement) ((DeclaredType) type).asElement()).getQualifiedName().toString() );
if( !((DeclaredType) type).getTypeArguments().isEmpty() )
{
b.append( "<" );
boolean addComa = false;
for( TypeMirror pType : ((DeclaredType) type).getTypeArguments() )
{
if( addComa )
b.append( ", " );
else
addComa = true;
b.append( getTypeQualifiedName( pType ) );
}
b.append( ">" );
}
return b.toString();
default:
return type.toString();
}
}
}
| |
package krasa.grepconsole.utils;
import com.intellij.ide.CopyProvider;
import com.intellij.openapi.actionSystem.AnActionEvent;
import com.intellij.openapi.actionSystem.CommonDataKeys;
import com.intellij.openapi.actionSystem.DataContext;
import com.intellij.openapi.actionSystem.PlatformDataKeys;
import com.intellij.openapi.editor.Caret;
import com.intellij.openapi.editor.CaretModel;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.ide.CopyPasteManager;
import krasa.grepconsole.model.Profile;
import org.apache.commons.lang.StringUtils;
import org.jetbrains.annotations.NotNull;
import java.awt.*;
import java.awt.datatransfer.DataFlavor;
import java.awt.datatransfer.Transferable;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
public class Utils {
public static int safeParseInt(String text) {
if ("".equals(text)) {
return 0;
}
try {
return Integer.parseInt(text);
} catch (NumberFormatException e) {
return 0;
}
}
public static boolean isSelectedText(AnActionEvent e) {
DataContext dataContext = e.getDataContext();
CopyProvider provider = PlatformDataKeys.COPY_PROVIDER.getData(dataContext);
return provider != null && provider.isCopyEnabled(dataContext) && provider.isCopyVisible(dataContext);
}
public static String getString(AnActionEvent e) {
DataContext dataContext = e.getDataContext();
CopyProvider provider = PlatformDataKeys.COPY_PROVIDER.getData(dataContext);
if (provider == null) {
return null;
}
provider.performCopy(dataContext);
Transferable contents = CopyPasteManager.getInstance().getContents();
try {
return contents == null ? null : (String) contents.getTransferData(DataFlavor.stringFlavor);
} catch (Exception e1) {
return null;
}
}
public static String getSelectedString(AnActionEvent e) {
DataContext dataContext = e.getDataContext();
Editor editor = CommonDataKeys.EDITOR.getData(dataContext);
if (editor == null) {
return null;
}
CaretModel caretModel = editor.getCaretModel();
Caret primaryCaret = caretModel.getPrimaryCaret();
return primaryCaret.getSelectedText();
}
@NotNull
public static String toNiceLineForLog(String substring) {
int length = substring.length();
int endIndex = substring.length();
int min = Math.min(endIndex, 120);
String result = substring.substring(0, min);
if (min < endIndex) {
result += "...";
}
result += " [length=" + length + "]";
return result;
}
public static String generateName(List<Profile> settingsList, String name) {
Pattern compile = Pattern.compile("\\(\\d\\)");
int i = 0;
int index = indexOf(compile, name);
if (index > 0) {
String s = StringUtils.substring(name, index);
try {
i = Integer.parseInt(StringUtils.replaceChars(s, "()", ""));
name = StringUtils.substring(name, 0, index).trim();
} catch (Exception e) {
}
}
return generateName(settingsList, i, name, name);
}
public static String generateName(List<Profile> settingsList, int i, String name, String resultName) {
if (resultName == null) {
resultName = name;
}
for (Profile settings : settingsList) {
if (resultName.equals(settings.getName())) {
resultName = name + " (" + i + ")";
resultName = generateName(settingsList, ++i, name, resultName);
}
}
return resultName;
}
/**
* @return index of pattern in s or -1, if not found
*/
public static int indexOf(Pattern pattern, String s) {
Matcher matcher = pattern.matcher(s);
return matcher.find() ? matcher.start() : -1;
}
public static Color nextColor() {
return colorList.get(++index % colorList.size());
}
public static ArrayList<Color> colorList;
static int index = 0;
//TODO find a good palette
private static final Color deepskyblue = new Color(0x006400);
private static final Color darkturquoise = new Color(0x008000);
private static final Color aqua = new Color(0x008b8b);
private static final Color dodgerblue = new Color(0x00bfff);
private static final Color seagreen = new Color(0x00ced1);
private static final Color darkslategray = new Color(0x00fa9a);
private static final Color mediumseagreen = new Color(0x00ffff);
private static final Color cadetblue = new Color(0x191970);
private static final Color slateblue = new Color(0x1e90ff);
private static final Color olivedrab = new Color(0x20b2aa);
private static final Color blueviolet = new Color(0x32cd32);
private static final Color darkred = new Color(0x3cb371);
private static final Color darkmagenta = new Color(0x4169e1);
private static final Color saddlebrown = new Color(0x4682b4);
private static final Color darkseagreen = new Color(0x483d8b);
private static final Color yellowgreen = new Color(0x556b2f);
private static final Color lightseagreen = new Color(0x5f9ea0);
private static final Color limegreen = new Color(0x6495ed);
private static final Color turquoise = new Color(0x663399);
private static final Color skyblue = new Color(0x696969);
private static final Color lightgreen = new Color(0x6a5acd);
private static final Color palegreen = new Color(0x6b8e23);
private static final Color darkolivegreen = new Color(0x7b68ee);
private static final Color royalblue = new Color(0x7cfc00);
private static final Color steelblue = new Color(0x7fffd4);
private static final Color darkgreen = new Color(0x800000);
private static final Color cornflowerblue = new Color(0x800080);
private static final Color green = new Color(0x808000);
private static final Color teal = new Color(0x808080);
private static final Color mediumpurple = new Color(0x87ceeb);
private static final Color darkviolet = new Color(0x8a2be2);
private static final Color darkorchid = new Color(0x8b0000);
private static final Color midnightblue = new Color(0x8b008b);
private static final Color rebeccapurple = new Color(0x8b4513);
private static final Color dimgray = new Color(0x8fbc8f);
private static final Color slategray = new Color(0x90ee90);
private static final Color lightslategray = new Color(0x9370d8);
private static final Color maroon = new Color(0x9400d3);
private static final Color purple = new Color(0x98fb98);
private static final Color olive = new Color(0x9932cc);
private static final Color gray = new Color(0x9acd32);
private static final Color darkgray = new Color(0xa0522d);
private static final Color brown = new Color(0xa52a2a);
private static final Color sienna = new Color(0xa9a9a9);
private static final Color lightblue = new Color(0xadd8e6);
private static final Color greenyellow = new Color(0xadff2f);
private static final Color paleturquoise = new Color(0xafeeee);
private static final Color darkgoldenrod = new Color(0xb22222);
private static final Color firebrick = new Color(0xb8860b);
private static final Color mediumorchid = new Color(0xba55d3);
private static final Color rosybrown = new Color(0xbc8f8f);
private static final Color darkkhaki = new Color(0xbdb76b);
private static final Color silver = new Color(0xc0c0c0);
private static final Color mediumvioletred = new Color(0xc71585);
private static final Color indianred = new Color(0xcd5c5c);
private static final Color peru = new Color(0xcd853f);
private static final Color tan = new Color(0xd2691e);
private static final Color lightgray = new Color(0xd2b48c);
private static final Color thistle = new Color(0xd3d3d3);
private static final Color chocolate = new Color(0xd87093);
private static final Color palevioletred = new Color(0xd8bfd8);
private static final Color orchid = new Color(0xda70d6);
private static final Color goldenrod = new Color(0xdaa520);
private static final Color crimson = new Color(0xdc143c);
private static final Color gainsboro = new Color(0xdcdcdc);
private static final Color plum = new Color(0xdda0dd);
private static final Color burlywood = new Color(0xdeb887);
private static final Color darksalmon = new Color(0xe9967a);
private static final Color violet = new Color(0xee82ee);
private static final Color palegoldenrod = new Color(0xeee8aa);
private static final Color sandybrown = new Color(0xf08080);
private static final Color wheat = new Color(0xf4a460);
private static final Color lightcoral = new Color(0xf5deb3);
private static final Color red = new Color(0xff0000);
private static final Color fuchsia = new Color(0xff00ff);
private static final Color coral = new Color(0xff1493);
private static final Color darkorange = new Color(0xff4500);
private static final Color hotpink = new Color(0xff6347);
private static final Color deeppink = new Color(0xff69b4);
private static final Color tomato = new Color(0xff8c00);
private static final Color lightsalmon = new Color(0xffa07a);
private static final Color orange = new Color(0xffa500);
private static final Color pink = new Color(0xffc0cb);
private static final Color gold = new Color(0xffd700);
private static final Color white = new Color(0xffffff);
static {
colorList = new ArrayList<>();
// https://www.w3schools.com/colors/colors_names.asp
colorList.add(deepskyblue);
colorList.add(darkturquoise);
colorList.add(aqua);
colorList.add(dodgerblue);
colorList.add(seagreen);
colorList.add(darkslategray);
colorList.add(mediumseagreen);
colorList.add(cadetblue);
colorList.add(slateblue);
colorList.add(olivedrab);
colorList.add(blueviolet);
colorList.add(darkred);
colorList.add(darkmagenta);
colorList.add(saddlebrown);
colorList.add(darkseagreen);
colorList.add(yellowgreen);
colorList.add(lightseagreen);
colorList.add(limegreen);
colorList.add(turquoise);
colorList.add(skyblue);
colorList.add(lightgreen);
colorList.add(palegreen);
colorList.add(darkolivegreen);
colorList.add(royalblue);
colorList.add(steelblue);
colorList.add(darkgreen);
colorList.add(cornflowerblue);
colorList.add(green);
colorList.add(teal);
colorList.add(mediumpurple);
colorList.add(darkviolet);
colorList.add(darkorchid);
colorList.add(midnightblue);
colorList.add(rebeccapurple);
colorList.add(dimgray);
colorList.add(slategray);
colorList.add(lightslategray);
colorList.add(maroon);
colorList.add(purple);
colorList.add(olive);
colorList.add(gray);
colorList.add(darkgray);
colorList.add(brown);
colorList.add(sienna);
colorList.add(lightblue);
colorList.add(greenyellow);
colorList.add(paleturquoise);
colorList.add(darkgoldenrod);
colorList.add(firebrick);
colorList.add(mediumorchid);
colorList.add(rosybrown);
colorList.add(darkkhaki);
colorList.add(silver);
colorList.add(mediumvioletred);
colorList.add(indianred);
colorList.add(peru);
colorList.add(tan);
colorList.add(lightgray);
colorList.add(thistle);
colorList.add(chocolate);
colorList.add(palevioletred);
colorList.add(orchid);
colorList.add(goldenrod);
colorList.add(crimson);
colorList.add(gainsboro);
colorList.add(plum);
colorList.add(burlywood);
colorList.add(darksalmon);
colorList.add(violet);
colorList.add(palegoldenrod);
colorList.add(sandybrown);
colorList.add(wheat);
colorList.add(lightcoral);
colorList.add(red);
colorList.add(fuchsia);
colorList.add(coral);
colorList.add(darkorange);
colorList.add(hotpink);
colorList.add(deeppink);
colorList.add(tomato);
colorList.add(lightsalmon);
colorList.add(orange);
colorList.add(pink);
colorList.add(gold);
colorList.add(white);
Collections.shuffle(colorList);
}
public static int toPositiveInt(String s, String def) {
try {
int p = Integer.parseInt(s);
if (p < 0) {
p = Integer.parseInt(def);
}
return p;
} catch (NumberFormatException e) {
return Integer.parseInt(def);
}
}
public static long toNano(String ms, String def) {
return toPositiveInt(ms, def) * 1_000_000L;
}
}
| |
package me.foxaice.controller_api;
import java.io.IOException;
import me.foxaice.controller_api.bulb.BulbCommands;
import me.foxaice.controller_api.bulb.BulbGroup;
import me.foxaice.controller_api.udp.UdpAdminCommands;
import me.foxaice.controller_api.udp.UdpController;
public class ControllerApi implements IAdminControllerApi, IBulbControllerApi {
private UdpController mUdpController;
private String mIpAddress;
private int mPort;
private static final String BROADCAST_IP = "255.255.255.255";
private static final int ADMIN_PORT = 48899;
public ControllerApi(String ipAddress, int port) {
mUdpController = new UdpController();
mIpAddress = ipAddress;
mPort = port;
}
@Override
public String getIpAddress() {
return mIpAddress;
}
@Override
public void setIpAddress(String ipAddress) {
mIpAddress = ipAddress;
}
@Override
public int getPort() {
return mPort;
}
@Override
public void setPort(int port) {
mPort = port;
}
@Override
public void setBrightnessOfCurrentGroup(int brightness) throws IOException {
sendMessage(BulbCommands.getBrightnessArray(brightness));
}
@Override
public void setBrightnessOfGroup(int group, int brightness) throws IOException, InterruptedException {
setGroup(group);
Thread.sleep(100L);
setBrightnessOfCurrentGroup(brightness);
}
@Override
public void setColorOfCurrentGroup(int color) throws IOException {
sendMessage(BulbCommands.getColorArray(color));
}
@Override
public void setColorOfGroup(int group, int color) throws IOException, InterruptedException {
setGroup(group);
Thread.sleep(100L);
setColorOfCurrentGroup(color);
}
@Override
public void toggleDiscoModeOfCurrentGroup() throws IOException {
sendMessage(BulbCommands.getToggleDiscoModeArray());
}
@Override
public void toggleDiscoModeOfGroup(int group) throws IOException, InterruptedException {
setGroup(group);
Thread.sleep(100L);
toggleDiscoModeOfCurrentGroup();
}
@Override
public void speedUpDiscoModeOfCurrentGroup() throws IOException {
sendMessage(BulbCommands.getSpeedUpDiscoModeArray());
}
@Override
public void speedUpDiscoModeOfGroup(int group) throws IOException, InterruptedException {
setGroup(group);
Thread.sleep(100L);
speedUpDiscoModeOfCurrentGroup();
}
@Override
public void speedDownDiscoModeOfCurrentGroup() throws IOException {
sendMessage(BulbCommands.getSlowDownDiscoModeArray());
}
@Override
public void speedDownDiscoModeOfGroup(int group) throws IOException, InterruptedException {
setGroup(group);
Thread.sleep(100L);
speedDownDiscoModeOfCurrentGroup();
}
@Override
public void powerOnGroup(int group) throws IOException {
sendMessage(BulbCommands.getPowerOnArray(getBulbGroup(group)));
}
@Override
public void powerOffGroup(int group) throws IOException {
sendMessage(BulbCommands.getPowerOffArray(getBulbGroup(group)));
}
@Override
public void setWhiteColorOfGroup(int group) throws IOException, InterruptedException {
setGroup(group);
Thread.sleep(100L);
sendMessage(BulbCommands.getWhiteColorCurrentGroupArray(getBulbGroup(group)));
}
@Override
public void setCurrentGroup(int group) throws IOException {
setGroup(group);
}
@Override
public void sendCommandQuit() throws IOException {
sendAdminMessage(UdpAdminCommands.QUIT, mIpAddress);
}
@Override
public void sendCommandLinkWiFi() throws IOException {
sendAdminMessage(UdpAdminCommands.LINK_WIFI, BROADCAST_IP);
}
@Override
public void sendCommandGetNETP() throws IOException {
sendAdminMessage(UdpAdminCommands.GET_NETP, mIpAddress);
}
@Override
public void sendCommandOk() throws IOException {
sendAdminMessage(UdpAdminCommands.OK, mIpAddress);
}
@Override
public void sendCommandRestart() throws IOException {
sendAdminMessage(UdpAdminCommands.RESTART, mIpAddress);
}
@Override
public void sendCommandScanNetworks() throws IOException {
sendAdminMessage(UdpAdminCommands.SCAN_NETWORKS, mIpAddress);
}
@Override
public void sendCommandGetSettingsSTA() throws IOException {
sendAdminMessage(UdpAdminCommands.GET_SETTINGS_STA, mIpAddress);
}
@Override
public void sendCommandGetMode() throws IOException {
sendAdminMessage(UdpAdminCommands.GET_MODE, mIpAddress);
}
@Override
public void sendCommandSetModeSTA() throws IOException {
sendAdminMessage(UdpAdminCommands.SET_MODE_STA, mIpAddress);
}
@Override
public void sendCommandSetModeAP() throws IOException {
sendAdminMessage(UdpAdminCommands.SET_MODE_AP, mIpAddress);
}
@Override
public void sendCommandSetPasswordAP(CharSequence pass) throws IOException {
String params;
if (pass.length() == 0) {
params = "OPEN,NONE";
} else {
params = "WPA2PSK,AES,";
}
sendAdminMessage(UdpAdminCommands.SET_KEY_AP, mIpAddress, params + pass.toString());
}
@Override
public void sendCommandSetPasswordSTA(CharSequence params) throws IOException {
sendAdminMessage(UdpAdminCommands.SET_KEY_STA, mIpAddress, params.toString());
}
@Override
public void sendCommandGetPasswordSTA() throws IOException {
sendAdminMessage(UdpAdminCommands.GET_KEY_STA, mIpAddress);
}
@Override
public void sendCommandSetSSIDSTA(CharSequence params) throws IOException {
sendAdminMessage(UdpAdminCommands.SET_SSID_STA, mIpAddress, params.toString());
}
@Override
public void sendCommandGetSSIDSTA() throws IOException {
sendAdminMessage(UdpAdminCommands.GET_SSID_STA, mIpAddress);
}
@Override
public void sendCommandGetPasswordAp() throws IOException {
sendAdminMessage(UdpAdminCommands.GET_KEY_AP, mIpAddress);
}
@Override
public String receiveAdminMessage(String ipAddress) throws IOException {
return receiveMessage(ipAddress);
}
@Override
public void closeSockets() {
mUdpController.closeSocket();
}
@Override
public void initSockets() {
if (mUdpController != null) {
mUdpController.closeSocket();
}
mUdpController = new UdpController();
}
@Override
public void initSocket(int port) {
if (mUdpController != null) {
mUdpController.closeSocket();
}
mUdpController = new UdpController(port);
}
private void setGroup(int group) throws IOException {
sendMessage(BulbCommands.getGroupArray(getBulbGroup(group)));
}
private BulbGroup getBulbGroup(int group) {
if (group == 1) {
return BulbGroup.GROUP_1;
} else if (group == 2) {
return BulbGroup.GROUP_2;
} else if (group == 3) {
return BulbGroup.GROUP_3;
} else if (group == 4) {
return BulbGroup.GROUP_4;
} else {
return BulbGroup.All;
}
}
private void sendMessage(byte[] arr) throws IOException {
mUdpController.sendMessage(arr, mIpAddress, mPort);
}
private void sendAdminMessage(UdpAdminCommands command, String ipAddress, String params) throws IOException {
mUdpController.sendAdminMessage(command, ipAddress, ADMIN_PORT, params);
}
private void sendAdminMessage(UdpAdminCommands command, String ipAddress) throws IOException {
mUdpController.sendAdminMessage(command, ipAddress, ADMIN_PORT);
}
private String receiveMessage(String ipAddress) throws IOException {
return mUdpController.receiveAdminMessage(ipAddress);
}
}
| |
/*
* Copyright (C) 2013 Chen Hui <calmer91@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package master.flame.danmaku.ui.widget;
import android.annotation.SuppressLint;
import android.content.Context;
import android.graphics.Canvas;
import android.graphics.Color;
import android.os.Build;
import android.os.HandlerThread;
import android.os.Looper;
import android.util.AttributeSet;
import android.view.MotionEvent;
import android.view.View;
import java.util.LinkedList;
import java.util.Locale;
import master.flame.danmaku.controller.DrawHandler;
import master.flame.danmaku.controller.DrawHandler.Callback;
import master.flame.danmaku.controller.DrawHelper;
import master.flame.danmaku.controller.IDanmakuView;
import master.flame.danmaku.controller.IDanmakuViewController;
import master.flame.danmaku.danmaku.model.BaseDanmaku;
import master.flame.danmaku.danmaku.model.IDanmakus;
import master.flame.danmaku.danmaku.model.android.DanmakuContext;
import master.flame.danmaku.danmaku.parser.BaseDanmakuParser;
import master.flame.danmaku.danmaku.renderer.IRenderer.RenderingState;
public class DanmakuView extends View implements IDanmakuView, IDanmakuViewController {
public static final String TAG = "DanmakuView";
private Callback mCallback;
private HandlerThread mHandlerThread;
private DrawHandler handler;
private boolean isSurfaceCreated;
private boolean mEnableDanmakuDrwaingCache = true;
private OnDanmakuClickListener mOnDanmakuClickListener;
private DanmakuTouchHelper mTouchHelper;
private boolean mShowFps;
private boolean mDanmakuVisible = true;
protected int mDrawingThreadType = THREAD_TYPE_NORMAL_PRIORITY;
private Object mDrawMonitor = new Object();
private boolean mDrawFinished = false;
private boolean mRequestRender = false;
private long mUiThreadId;
public DanmakuView(Context context) {
super(context);
init();
}
private void init() {
mUiThreadId = Thread.currentThread().getId();
setBackgroundColor(Color.TRANSPARENT);
setDrawingCacheBackgroundColor(Color.TRANSPARENT);
DrawHelper.useDrawColorToClearCanvas(true, false);
mTouchHelper = DanmakuTouchHelper.instance(this);
}
public DanmakuView(Context context, AttributeSet attrs) {
super(context, attrs);
init();
}
public DanmakuView(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
init();
}
public void addDanmaku(BaseDanmaku item) {
if (handler != null) {
handler.addDanmaku(item);
}
}
@Override
public void removeAllDanmakus() {
if (handler != null) {
handler.removeAllDanmakus();
}
}
@Override
public void removeAllLiveDanmakus() {
if (handler != null) {
handler.removeAllLiveDanmakus();
}
}
@Override
public IDanmakus getCurrentVisibleDanmakus() {
if (handler != null) {
return handler.getCurrentVisibleDanmakus();
}
return null;
}
public void setCallback(Callback callback) {
mCallback = callback;
if (handler != null) {
handler.setCallback(callback);
}
}
@Override
public void release() {
stop();
if(mDrawTimes!= null) mDrawTimes.clear();
}
@Override
public void stop() {
stopDraw();
}
private void stopDraw() {
if (handler != null) {
handler.quit();
handler = null;
}
if (mHandlerThread != null) {
try {
mHandlerThread.join();
} catch (InterruptedException e) {
e.printStackTrace();
}
mHandlerThread.quit();
mHandlerThread = null;
}
}
protected Looper getLooper(int type){
if (mHandlerThread != null) {
mHandlerThread.quit();
mHandlerThread = null;
}
int priority;
switch (type) {
case THREAD_TYPE_MAIN_THREAD:
return Looper.getMainLooper();
case THREAD_TYPE_HIGH_PRIORITY:
priority = android.os.Process.THREAD_PRIORITY_URGENT_DISPLAY;
break;
case THREAD_TYPE_LOW_PRIORITY:
priority = android.os.Process.THREAD_PRIORITY_LOWEST;
break;
case THREAD_TYPE_NORMAL_PRIORITY:
default:
priority = android.os.Process.THREAD_PRIORITY_DEFAULT;
break;
}
String threadName = "DFM Handler Thread #"+priority;
mHandlerThread = new HandlerThread(threadName, priority);
mHandlerThread.start();
return mHandlerThread.getLooper();
}
private void prepare() {
if (handler == null)
handler = new DrawHandler(getLooper(mDrawingThreadType), this, mDanmakuVisible);
}
@Override
public void prepare(BaseDanmakuParser parser, DanmakuContext config) {
prepare();
handler.setConfig(config);
handler.setParser(parser);
handler.setCallback(mCallback);
handler.prepare();
}
@Override
public boolean isPrepared() {
return handler != null && handler.isPrepared();
}
@Override
public DanmakuContext getConfig() {
if (handler == null) {
return null;
}
return handler.getConfig();
}
@Override
public void showFPS(boolean show){
mShowFps = show;
}
private static final int MAX_RECORD_SIZE = 50;
private static final int ONE_SECOND = 1000;
private LinkedList<Long> mDrawTimes;
private boolean mClearFlag;
private float fps() {
long lastTime = System.currentTimeMillis();
mDrawTimes.addLast(lastTime);
float dtime = lastTime - mDrawTimes.getFirst();
int frames = mDrawTimes.size();
if (frames > MAX_RECORD_SIZE) {
mDrawTimes.removeFirst();
}
return dtime > 0 ? mDrawTimes.size() * ONE_SECOND / dtime : 0.0f;
}
@Override
public long drawDanmakus() {
if (!isSurfaceCreated)
return 0;
if (!isShown())
return -1;
long stime = System.currentTimeMillis();
lockCanvas();
return System.currentTimeMillis() - stime;
}
@SuppressLint("NewApi")
private void postInvalidateCompat() {
mRequestRender = true;
if(Build.VERSION.SDK_INT >= 16) {
this.postInvalidateOnAnimation();
} else {
this.postInvalidate();
}
}
private void lockCanvas() {
if(mDanmakuVisible == false) {
return;
}
postInvalidateCompat();
synchronized (mDrawMonitor) {
while ((!mDrawFinished) && (handler != null)) {
try {
mDrawMonitor.wait(200);
} catch (InterruptedException e) {
if (mDanmakuVisible == false || handler == null || handler.isStop()) {
break;
} else {
Thread.currentThread().interrupt();
}
}
}
mDrawFinished = false;
}
}
private void lockCanvasAndClear() {
mClearFlag = true;
lockCanvas();
}
private void unlockCanvasAndPost() {
synchronized (mDrawMonitor) {
mDrawFinished = true;
mDrawMonitor.notifyAll();
}
}
@Override
protected void onDraw(Canvas canvas) {
if ((!mDanmakuVisible) && (!mRequestRender)) {
super.onDraw(canvas);
return;
}
if (mClearFlag) {
DrawHelper.clearCanvas(canvas);
mClearFlag = false;
} else {
if (handler != null) {
RenderingState rs = handler.draw(canvas);
if (mShowFps) {
if (mDrawTimes == null)
mDrawTimes = new LinkedList<Long>();
String fps = String.format(Locale.getDefault(),
"fps %.2f,time:%d s,cache:%d,miss:%d", fps(), getCurrentTime() / 1000,
rs.cacheHitCount, rs.cacheMissCount);
DrawHelper.drawFPS(canvas, fps);
}
}
}
mRequestRender = false;
unlockCanvasAndPost();
}
@Override
protected void onLayout(boolean changed, int left, int top, int right, int bottom) {
super.onLayout(changed, left, top, right, bottom);
if (handler != null) {
handler.notifyDispSizeChanged(right - left, bottom - top);
}
isSurfaceCreated = true;
}
public void toggle() {
if (isSurfaceCreated) {
if (handler == null)
start();
else if (handler.isStop()) {
resume();
} else
pause();
}
}
@Override
public void pause() {
if (handler != null)
handler.pause();
}
private int mResumeTryCount = 0;
private Runnable mResumeRunnable = new Runnable() {
@Override
public void run() {
if (handler == null) {
return;
}
mResumeTryCount++;
if (mResumeTryCount > 4 || DanmakuView.super.isShown()) {
handler.resume();
} else {
handler.postDelayed(this, 100 * mResumeTryCount);
}
}
};
@Override
public void resume() {
if (handler != null && handler.isPrepared()) {
mResumeTryCount = 0;
handler.postDelayed(mResumeRunnable, 100);
} else if (handler == null) {
restart();
}
}
@Override
public boolean isPaused() {
if(handler != null) {
return handler.isStop();
}
return false;
}
public void restart() {
stop();
start();
}
@Override
public void start() {
start(0);
}
@Override
public void start(long postion) {
if (handler == null) {
prepare();
}else{
handler.removeCallbacksAndMessages(null);
}
handler.obtainMessage(DrawHandler.START, postion).sendToTarget();
}
@Override
public boolean onTouchEvent(MotionEvent event) {
if (null != mTouchHelper) {
mTouchHelper.onTouchEvent(event);
}
return super.onTouchEvent(event);
}
public void seekTo(Long ms) {
if(handler != null){
handler.seekTo(ms);
}
}
public void enableDanmakuDrawingCache(boolean enable) {
mEnableDanmakuDrwaingCache = enable;
}
@Override
public boolean isDanmakuDrawingCacheEnabled() {
return mEnableDanmakuDrwaingCache;
}
@Override
public boolean isViewReady() {
return isSurfaceCreated;
}
@Override
public View getView() {
return this;
}
@Override
public void show() {
showAndResumeDrawTask(null);
}
@Override
public void showAndResumeDrawTask(Long position) {
mDanmakuVisible = true;
mClearFlag = false;
if (handler == null) {
return;
}
handler.showDanmakus(position);
}
@Override
public void hide() {
mDanmakuVisible = false;
if (handler == null) {
return;
}
handler.hideDanmakus(false);
}
@Override
public long hideAndPauseDrawTask() {
mDanmakuVisible = false;
if (handler == null) {
return 0;
}
return handler.hideDanmakus(true);
}
@Override
public void clear() {
if (!isViewReady()) {
return;
}
if (!mDanmakuVisible || Thread.currentThread().getId() == mUiThreadId) {
mClearFlag = true;
postInvalidateCompat();
} else {
lockCanvasAndClear();
}
}
@Override
public boolean isShown() {
return mDanmakuVisible && super.isShown();
}
@Override
public void setDrawingThreadType(int type) {
mDrawingThreadType = type;
}
@Override
public long getCurrentTime() {
if (handler != null) {
return handler.getCurrentTime();
}
return 0;
}
@Override
@SuppressLint("NewApi")
public boolean isHardwareAccelerated() {
// >= 3.0
if (Build.VERSION.SDK_INT >= 11) {
return super.isHardwareAccelerated();
} else {
return false;
}
}
@Override
public void clearDanmakusOnScreen() {
if (handler != null) {
handler.clearDanmakusOnScreen();
}
}
@Override
public void setOnDanmakuClickListener(OnDanmakuClickListener listener) {
mOnDanmakuClickListener = listener;
setClickable(null != listener);
}
@Override
public OnDanmakuClickListener getOnDanmakuClickListener() {
return mOnDanmakuClickListener;
}
}
| |
package com.eharmony.pho.query.criterion;
import java.util.Collection;
import com.eharmony.pho.query.criterion.expression.EqualityExpression;
import com.eharmony.pho.query.criterion.expression.NativeExpression;
import com.eharmony.pho.query.criterion.expression.RangeExpression;
import com.eharmony.pho.query.criterion.expression.SetExpression;
import com.eharmony.pho.query.criterion.expression.UnaryExpression;
import com.eharmony.pho.query.criterion.junction.Conjunction;
import com.eharmony.pho.query.criterion.junction.Disjunction;
/**
* Hibernate style Restriction expression builder
*/
public class Restrictions {
private Restrictions() {
}
/**
* Apply an "equal" constraint to the named property
*
* @param propertyName
* String
* @param value
* Object
* @return Criterion
*/
public static EqualityExpression eq(String propertyName, Object value) {
return new EqualityExpression(Operator.EQUAL, propertyName, value);
}
/**
* Apply a "not equal" constraint to the named property
*
* @param propertyName
* String
* @param value
* Object
* @return Criterion
*/
public static EqualityExpression ne(String propertyName, Object value) {
return new EqualityExpression(Operator.NOT_EQUAL, propertyName, value);
}
/**
* Apply a "less than" constraint to the named property
*
* @param propertyName
* String
* @param value
* Object
* @return Criterion
*/
public static EqualityExpression lt(String propertyName, Object value) {
return new EqualityExpression(Operator.LESS_THAN, propertyName, value);
}
/**
* Apply a "like" constraint to the named property
*
* @param propertyName
* String
* @param value
* Object
* @return Criterion
*/
public static EqualityExpression like(String propertyName, Object value) {
return new EqualityExpression(Operator.LIKE, propertyName, value);
}
/**
* Apply a "ilike" (case insensitive like) constraint to the named property
*
* @param propertyName
* String
* @param value
* Object
* @return Criterion
*/
public static EqualityExpression ilike(String propertyName, Object value) {
return new EqualityExpression(Operator.ILIKE, propertyName, value);
}
/**
* Apply a "less than or equal" constraint to the named property
*
* @param propertyName
* String
* @param value
* Object
* @return Criterion
*/
public static EqualityExpression lte(String propertyName, Object value) {
return new EqualityExpression(Operator.LESS_THAN_OR_EQUAL, propertyName, value);
}
/**
* Apply a "greater than" constraint to the named property
*
* @param propertyName
* String
* @param value
* Object
* @return Criterion
*/
public static EqualityExpression gt(String propertyName, Object value) {
return new EqualityExpression(Operator.GREATER_THAN, propertyName, value);
}
/**
* Apply a "greater than or equal" constraint to the named property
*
* @param propertyName
* String
* @param value
* Object
* @return Criterion
*/
public static EqualityExpression gte(String propertyName, Object value) {
return new EqualityExpression(Operator.GREATER_THAN_OR_EQUAL, propertyName, value);
}
/**
* Apply a "between" constraint to the named property
*
* @param propertyName
* String
* @param from
* Object
* @param to
* Object
* @return Criterion
*/
public static RangeExpression between(String propertyName, Object from, Object to) {
return new RangeExpression(Operator.BETWEEN, propertyName, from, to);
}
/**
* Apply a "between" constraint to the named integer property with a finite, discrete number of values. This is
* translated into an inclusive "in" expression.
*
* @param propertyName
* String
* @param from
* Object
* @param to
* Object
* @return Criterion
*/
public static SetExpression discreteRange(String propertyName, int from, int to) {
return new SetExpression(Operator.IN, propertyName, from <= to ? range(from, to) : range(to, from));
}
/*
* NOTE: this yields an Integer[], not an int[] to conform with the Object[] signature of the Expressions. Give
* that, nulls are still unacceptable.
*/
protected static Integer[] range(int from, int to) {
if (from > to) {
throw new IllegalArgumentException("from must be <= to (" + from + "," + to + ")");
}
int n = to - from + 1;
Integer[] range = new Integer[n];
for (int i = 0; i < n; i++) {
range[i] = from + i;
}
return range;
}
/**
* Apply an "in" constraint to the named property
*
* @param propertyName
* String
* @param values
* Object[]
* @return Criterion
*/
public static SetExpression in(String propertyName, Object[] values) {
return new SetExpression(Operator.IN, propertyName, values);
}
/**
* Apply an "in" constraint to the named property
*
* @param propertyName
* String
* @param values
* Collection
* @return Criterion
*/
public static SetExpression in(String propertyName, Collection<? extends Object> values) {
return in(propertyName, values.toArray());
}
/**
* Apply a "not in" constraint to the named property
*
* @param propertyName
* String
* @param values
* Object[]
* @return Criterion
*/
public static SetExpression notIn(String propertyName, Object[] values) {
return new SetExpression(Operator.NOT_IN, propertyName, values);
}
/**
* Apply a "not in" constraint to the named property
*
* @param propertyName
* String
* @param values
* Collection
* @return Criterion
*/
public static SetExpression notIn(String propertyName, Collection<? extends Object> values) {
return notIn(propertyName, values.toArray());
}
/**
* Apply a "contains" constraint to the named property
*
* @param propertyName
* String
* @param value
* Object
* @return SetExpression
*/
public static SetExpression contains(String propertyName, Object value) {
return contains(propertyName, new Object[] { value });
}
/**
* Apply a "contains" constraint to the named property
*
* @param propertyName
* String
* @param values
* Object[]
* @return Criterion
*/
public static SetExpression contains(String propertyName, Object[] values) {
return new SetExpression(Operator.CONTAINS, propertyName, values);
}
/**
* Apply an "is null" constraint to the named property
*
* @param propertyName
* String
* @return Criterion
*/
public static UnaryExpression isNull(String propertyName) {
return new UnaryExpression(Operator.NULL, propertyName);
}
/**
* Apply an "is not null" constraint to the named property
*
* @param propertyName
* String
* @return Criterion
*/
public static UnaryExpression isNotNull(String propertyName) {
return new UnaryExpression(Operator.NOT_NULL, propertyName);
}
/**
* Constrain a collection valued property to be empty
*
* @param propertyName
* String
* @return UnaryExpression
*
*/
public static UnaryExpression isEmpty(String propertyName) {
return new UnaryExpression(Operator.EMPTY, propertyName);
}
public static UnaryExpression isNotEmpty(String propertyName) {
return new UnaryExpression(Operator.NOT_EMPTY, propertyName);
}
public static <T> NativeExpression nativeQuery(Class<T> type, T expression) {
return new NativeExpression(type, expression);
}
/**
* Return the conjunction of two expressions
*
* @param criteria
* Criterion
* @return Conjunction
*/
public static Conjunction and(Criterion... criteria) {
return new Conjunction(criteria);
}
/**
* Return the disjunction of two expressions
*
* @param criteria
* Criterion
* @return Disjunction
*/
public static Disjunction or(Criterion... criteria) {
return new Disjunction(criteria);
}
}
| |
/*
* Copyright (C) 2013 YIXIA.COM
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.vov.vitamio;
import android.content.Context;
import java.io.BufferedInputStream;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileOutputStream;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import io.vov.vitamio.utils.CPU;
import io.vov.vitamio.utils.ContextUtils;
import io.vov.vitamio.utils.IOUtils;
import io.vov.vitamio.utils.Log;
/**
* Inspect this class before using any other Vitamio classes.
* <p/>
* Don't modify this class, or the full Vitamio library may be broken.
*/
public class Vitamio {
private static final String[] LIBS_ARM_CODECS = {"libvvo.7.so", "libvvo.8.so", "libffmpeg.so", "libOMX.9.so", "libOMX.11.so", "libOMX.14.so", "libOMX.18.so"};
private static final String[] LIBS_X86_CODECS = {"libffmpeg.so", "libOMX.9.so", "libOMX.14.so", "libOMX.18.so"};
private static final String[] LIBS_MIPS_CODECS = {"libffmpeg.so", "libOMX.14.so"};
private static final String[] LIBS_PLAYER = {"libvplayer.so"};
private static final String[] LIBS_SCANNER = {"libvscanner.so"};
private static final String[] LIBS_AV = {"libvao.0.so", "libvvo.0.so", "libvvo.9.so", "libvvo.j.so"};
private static final String LIBS_LOCK = ".lock";
private static final int VITAMIO_NOT_SUPPORTED = -1;
private static final int VITAMIO_MIPS = 40;
private static final int VITAMIO_X86 = 50;
private static final int VITAMIO_ARMV6 = 60;
private static final int VITAMIO_ARMV6_VFP = 61;
private static final int VITAMIO_ARMV7_VFPV3 = 70;
private static final int VITAMIO_ARMV7_NEON = 71;
private static final int vitamioType;
static {
int cpu = CPU.getFeature();
if ((cpu & CPU.FEATURE_ARM_NEON) > 0)
vitamioType = VITAMIO_ARMV7_NEON;
else if ((cpu & CPU.FEATURE_ARM_VFPV3) > 0 && (cpu & CPU.FEATURE_ARM_V7A) > 0)
vitamioType = VITAMIO_ARMV7_VFPV3;
else if ((cpu & CPU.FEATURE_ARM_VFP) > 0 && (cpu & CPU.FEATURE_ARM_V6) > 0)
vitamioType = VITAMIO_ARMV6_VFP;
else if ((cpu & CPU.FEATURE_ARM_V6) > 0)
vitamioType = VITAMIO_ARMV6;
else if ((cpu & CPU.FEATURE_X86) > 0)
vitamioType = VITAMIO_X86;
else if ((cpu & CPU.FEATURE_MIPS) > 0)
vitamioType = VITAMIO_MIPS;
else
vitamioType = VITAMIO_NOT_SUPPORTED;
}
private static String vitamioPackage;
private static String vitamioLibraryPath;
/**
* Call this method before using any other Vitamio specific classes.
* <p/>
* This method will use {@link #isInitialized(Context)} to check if Vitamio is
* initialized at this device, and initialize it if not initialized.
*
* @param ctx Android Context
* @return true if the Vitamio initialized successfully.
*/
public static boolean initialize(Context ctx) {
return isInitialized(ctx) || extractLibs(ctx, R.raw.libarm);
}
/**
* Same as {@link #initialize(Context)}
*
* @param ctx Android Context
* @param rawId R.raw.libarm
* @return true if the Vitamio initialized successfully.
*/
public static boolean initialize(Context ctx, int rawId) {
return isInitialized(ctx) || extractLibs(ctx, rawId);
}
/**
* Check if Vitamio is initialized at this device
*
* @param ctx Android Context
* @return true if the Vitamio has been initialized.
*/
public static boolean isInitialized(Context ctx) {
vitamioPackage = ctx.getPackageName();
vitamioLibraryPath = ContextUtils.getDataDir(ctx) + "libs/";
File dir = new File(getLibraryPath());
if (dir.exists() && dir.isDirectory()) {
String[] libs = dir.list();
if (libs != null) {
Arrays.sort(libs);
for (String L : getRequiredLibs()) {
if (Arrays.binarySearch(libs, L) < 0) {
Log.e("Native libs %s not exists!", L);
return false;
}
}
File lock = new File(getLibraryPath() + LIBS_LOCK);
BufferedReader buffer = null;
try {
buffer = new BufferedReader(new FileReader(lock));
int appVersion = ContextUtils.getVersionCode(ctx);
int libVersion = Integer.valueOf(buffer.readLine());
Log.i("isNativeLibsInited, APP VERSION: %d, Vitamio Library version: %d", appVersion, libVersion);
if (libVersion == appVersion)
return true;
} catch (IOException e) {
Log.e("isNativeLibsInited", e);
} catch (NumberFormatException e) {
Log.e("isNativeLibsInited", e);
} finally {
IOUtils.closeSilently(buffer);
}
}
}
return false;
}
public static String getVitamioPackage() {
return vitamioPackage;
}
public static int getVitamioType() {
return vitamioType;
}
public static final String getLibraryPath() {
return vitamioLibraryPath;
}
private static final List<String> getRequiredLibs() {
List<String> libs = new ArrayList<String>();
String[][] vitamioLibs = null;
switch (vitamioType) {
case VITAMIO_ARMV6:
case VITAMIO_ARMV6_VFP:
case VITAMIO_ARMV7_VFPV3:
case VITAMIO_ARMV7_NEON:
vitamioLibs = new String[][]{LIBS_ARM_CODECS, LIBS_PLAYER, LIBS_SCANNER, LIBS_AV};
break;
case VITAMIO_X86:
vitamioLibs = new String[][]{LIBS_X86_CODECS, LIBS_PLAYER, LIBS_SCANNER, LIBS_AV};
break;
case VITAMIO_MIPS:
vitamioLibs = new String[][]{LIBS_MIPS_CODECS, LIBS_PLAYER, LIBS_SCANNER, LIBS_AV};
break;
default:
break;
}
if (vitamioLibs == null)
return libs;
for (String[] libArray : vitamioLibs) {
for (String lib : libArray)
libs.add(lib);
}
libs.add(LIBS_LOCK);
return libs;
}
private static boolean extractLibs(Context ctx, int rawID) {
long begin = System.currentTimeMillis();
final int version = ContextUtils.getVersionCode(ctx);
Log.d("loadLibs start " + version);
File lock = new File(getLibraryPath() + LIBS_LOCK);
if (lock.exists())
lock.delete();
String libPath = copyCompressedLib(ctx, rawID, "libarm.so");
Log.d("copyCompressedLib time: " + (System.currentTimeMillis() - begin) / 1000.0);
boolean inited = native_initializeLibs(libPath, getLibraryPath(), String.valueOf(Vitamio.getVitamioType()));
new File(libPath).delete();
FileWriter fw = null;
try {
lock.createNewFile();
fw = new FileWriter(lock);
fw.write(String.valueOf(version));
return true;
} catch (IOException e) {
Log.e("Error creating lock file", e);
} finally {
Log.d("initializeNativeLibs: " + inited);
Log.d("loadLibs time: " + (System.currentTimeMillis() - begin) / 1000.0);
IOUtils.closeSilently(fw);
}
return false;
}
private static String copyCompressedLib(Context ctx, int rawID, String destName) {
byte[] buffer = new byte[1024];
InputStream is = null;
BufferedInputStream bis = null;
FileOutputStream fos = null;
String destPath = null;
try {
try {
String destDir = getLibraryPath();
destPath = destDir + destName;
File f = new File(destDir);
if (f.exists() && !f.isDirectory())
f.delete();
if (!f.exists())
f.mkdirs();
f = new File(destPath);
if (f.exists() && !f.isFile())
f.delete();
if (!f.exists())
f.createNewFile();
} catch (Exception fe) {
Log.e("loadLib", fe);
}
is = ctx.getResources().openRawResource(rawID);
bis = new BufferedInputStream(is);
fos = new FileOutputStream(destPath);
while (bis.read(buffer) != -1) {
fos.write(buffer);
}
} catch (Exception e) {
Log.e("loadLib", e);
return null;
} finally {
IOUtils.closeSilently(fos);
IOUtils.closeSilently(bis);
IOUtils.closeSilently(is);
}
return destPath;
}
static {
System.loadLibrary("vinit");
}
private native static boolean native_initializeLibs(String libPath, String destDir, String prefix);
}
| |
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.codeInsight.daemon.impl;
import com.intellij.codeHighlighting.HighlightDisplayLevel;
import com.intellij.lang.annotation.HighlightSeverity;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.editor.markup.TextAttributes;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.JDOMExternalizableStringList;
import com.intellij.openapi.util.ModificationTracker;
import com.intellij.openapi.util.SimpleModificationTracker;
import com.intellij.profile.codeInspection.InspectionProfileManager;
import com.intellij.util.messages.MessageBus;
import com.intellij.util.messages.Topic;
import it.unimi.dsi.fastutil.objects.Object2IntMap;
import it.unimi.dsi.fastutil.objects.Object2IntMaps;
import it.unimi.dsi.fastutil.objects.Object2IntOpenHashMap;
import org.jdom.Element;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import java.awt.*;
import java.util.List;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicReference;
public final class SeverityRegistrar implements Comparator<HighlightSeverity>, ModificationTracker {
/**
* Always first {@link HighlightDisplayLevel#DO_NOT_SHOW} must be skipped during navigation, editing settings, etc.
*/
static final int SHOWN_SEVERITIES_OFFSET = 1;
private static final Logger LOG = Logger.getInstance(SeverityRegistrar.class);
private static final Topic<Runnable> STANDARD_SEVERITIES_CHANGED_TOPIC = new Topic<>("standard severities changed", Runnable.class, Topic.BroadcastDirection.TO_DIRECT_CHILDREN);
@NonNls private static final String INFO_TAG = "info";
@NonNls private static final String COLOR_ATTRIBUTE = "color";
private final Map<String, SeverityBasedTextAttributes> myMap = new ConcurrentHashMap<>();
private final Map<String, Color> myRendererColors = new ConcurrentHashMap<>();
static final Topic<Runnable> SEVERITIES_CHANGED_TOPIC = new Topic<>("severities changed", Runnable.class, Topic.BroadcastDirection.TO_PARENT);
private final @NotNull MessageBus myMessageBus;
private final AtomicReference<Object2IntMap<HighlightSeverity>> orderMap = new AtomicReference<>();
private JDOMExternalizableStringList myReadOrder;
private static final Map<String, HighlightInfoType> STANDARD_SEVERITIES;
private final SimpleModificationTracker myModificationTracker = new SimpleModificationTracker();
public SeverityRegistrar(@NotNull MessageBus messageBus) {
myMessageBus = messageBus;
messageBus.simpleConnect().subscribe(STANDARD_SEVERITIES_CHANGED_TOPIC, () -> orderMap.set(null));
}
static {
Map<String, HighlightInfoType> map = new HashMap<>(6);
map.put(HighlightSeverity.ERROR.getName(), HighlightInfoType.ERROR);
map.put(HighlightSeverity.WARNING.getName(), HighlightInfoType.WARNING);
map.put(HighlightSeverity.INFO.getName(), HighlightInfoType.INFO);
map.put(HighlightSeverity.WEAK_WARNING.getName(), HighlightInfoType.WEAK_WARNING);
map.put(HighlightSeverity.GENERIC_SERVER_ERROR_OR_WARNING.getName(), HighlightInfoType.GENERIC_WARNINGS_OR_ERRORS_FROM_SERVER);
map.put(HighlightDisplayLevel.DO_NOT_SHOW.getName(), HighlightInfoType.INFORMATION);
STANDARD_SEVERITIES = new ConcurrentHashMap<>(map);
}
@SuppressWarnings("unused")
public static void registerStandard(@NotNull HighlightInfoType highlightInfoType, @NotNull HighlightSeverity highlightSeverity) {
STANDARD_SEVERITIES.put(highlightSeverity.getName(), highlightInfoType);
ApplicationManager.getApplication().getMessageBus().syncPublisher(STANDARD_SEVERITIES_CHANGED_TOPIC).run();
}
public static void registerStandard(@NotNull Map<String, HighlightInfoType> map) {
STANDARD_SEVERITIES.putAll(map);
ApplicationManager.getApplication().getMessageBus().syncPublisher(STANDARD_SEVERITIES_CHANGED_TOPIC).run();
}
public static @NotNull SeverityRegistrar getSeverityRegistrar(@Nullable Project project) {
return project == null
? InspectionProfileManager.getInstance().getSeverityRegistrar()
: InspectionProfileManager.getInstance(project).getCurrentProfile().getProfileManager().getSeverityRegistrar();
}
@Override
public long getModificationCount() {
return myModificationTracker.getModificationCount();
}
public void registerSeverity(@NotNull SeverityBasedTextAttributes info, Color renderColor) {
HighlightSeverity severity = info.getType().getSeverity(null);
myMap.put(severity.getName(), info);
if (renderColor != null) {
myRendererColors.put(severity.getName(), renderColor);
}
orderMap.set(null);
HighlightDisplayLevel.registerSeverity(severity, getHighlightInfoTypeBySeverity(severity).getAttributesKey(), null);
severitiesChanged();
}
private void severitiesChanged() {
myModificationTracker.incModificationCount();
myMessageBus.syncPublisher(SEVERITIES_CHANGED_TOPIC).run();
}
// called only by SeverityEditorDialog and after that setOrder is called, so, severitiesChanged is not called here
public SeverityBasedTextAttributes unregisterSeverity(@NotNull HighlightSeverity severity) {
severitiesChanged();
return myMap.remove(severity.getName());
}
public @NotNull HighlightInfoType.HighlightInfoTypeImpl getHighlightInfoTypeBySeverity(@NotNull HighlightSeverity severity) {
HighlightInfoType infoType = STANDARD_SEVERITIES.get(severity.getName());
if (infoType != null) {
return (HighlightInfoType.HighlightInfoTypeImpl)infoType;
}
if (severity == HighlightSeverity.INFORMATION){
return (HighlightInfoType.HighlightInfoTypeImpl)HighlightInfoType.INFORMATION;
}
SeverityBasedTextAttributes type = getAttributesBySeverity(severity);
return (HighlightInfoType.HighlightInfoTypeImpl)(type == null ? HighlightInfoType.WARNING : type.getType());
}
private SeverityBasedTextAttributes getAttributesBySeverity(@NotNull HighlightSeverity severity) {
return myMap.get(severity.getName());
}
public @Nullable TextAttributes getTextAttributesBySeverity(@NotNull HighlightSeverity severity) {
SeverityBasedTextAttributes infoType = getAttributesBySeverity(severity);
if (infoType != null) {
return infoType.getAttributes();
}
return null;
}
public void readExternal(@NotNull Element element) {
myMap.clear();
myRendererColors.clear();
for (Element infoElement : element.getChildren(INFO_TAG)) {
SeverityBasedTextAttributes highlightInfo = new SeverityBasedTextAttributes(infoElement);
String colorStr = infoElement.getAttributeValue(COLOR_ATTRIBUTE);
@SuppressWarnings("UseJBColor")
Color color = colorStr == null ? null : new Color(Integer.parseInt(colorStr, 16));
registerSeverity(highlightInfo, color);
}
myReadOrder = new JDOMExternalizableStringList();
myReadOrder.readExternal(element);
List<HighlightSeverity> read = new ArrayList<>(myReadOrder.size());
List<HighlightSeverity> knownSeverities = getDefaultOrder();
for (String name : myReadOrder) {
HighlightSeverity severity = getSeverity(name);
if (severity != null && knownSeverities.contains(severity)) {
read.add(severity);
}
}
orderMap.set(ensureAllStandardIncluded(read, knownSeverities));
severitiesChanged();
}
private @NotNull Object2IntMap<HighlightSeverity> ensureAllStandardIncluded(@NotNull List<HighlightSeverity> read, @NotNull List<HighlightSeverity> knownSeverities) {
Object2IntMap<HighlightSeverity> orderMap = fromList(read);
if (orderMap.isEmpty()) {
return fromList(knownSeverities);
}
// enforce include all known
List<HighlightSeverity> list = getSortedSeverities(orderMap);
for (HighlightSeverity stdSeverity : knownSeverities) {
if (!list.contains(stdSeverity)) {
for (int oIdx = 0; oIdx < list.size(); oIdx++) {
HighlightSeverity orderSeverity = list.get(oIdx);
if (orderSeverity.myVal > stdSeverity.myVal) {
list.add(oIdx, stdSeverity);
myReadOrder = null;
break;
}
}
}
}
return fromList(list);
}
public void writeExternal(@NotNull Element element) {
List<HighlightSeverity> list = getAllSeverities();
for (HighlightSeverity severity : list) {
Element info = new Element(INFO_TAG);
String severityName = severity.getName();
SeverityBasedTextAttributes infoType = getAttributesBySeverity(severity);
if (infoType != null) {
infoType.writeExternal(info);
Color color = myRendererColors.get(severityName);
if (color != null) {
info.setAttribute(COLOR_ATTRIBUTE, Integer.toString(color.getRGB() & 0xFFFFFF, 16));
}
element.addContent(info);
}
}
@SuppressWarnings("deprecation")
JDOMExternalizableStringList readOrder = myReadOrder;
if (readOrder != null && !readOrder.isEmpty()) {
readOrder.writeExternal(element);
}
else if (!getDefaultOrder().equals(list)) {
Object2IntMap<HighlightSeverity> orderMap = getOrderMap();
@SuppressWarnings("deprecation")
JDOMExternalizableStringList ext = new JDOMExternalizableStringList(Collections.nCopies(orderMap.size(), ""));
for (Object2IntMap.Entry<HighlightSeverity> entry : getOrderMap().object2IntEntrySet()) {
ext.set(entry.getIntValue(), entry.getKey().getName());
}
ext.writeExternal(element);
}
}
public @NotNull List<HighlightSeverity> getAllSeverities() {
return getSortedSeverities(getOrderMap());
}
private static @NotNull List<HighlightSeverity> getSortedSeverities(@NotNull Object2IntMap<HighlightSeverity> map) {
List<HighlightSeverity> list = new ArrayList<>(map.keySet());
list.sort((o1, o2) -> compare(o1, o2, map));
return list;
}
int getSeveritiesCount() {
return STANDARD_SEVERITIES.size() + myMap.size();
}
public @Nullable HighlightSeverity getSeverityByIndex(int index) {
for (Object2IntMap.Entry<HighlightSeverity> entry : getOrderMap().object2IntEntrySet()) {
if (entry.getIntValue() == index) {
return entry.getKey();
}
}
return null;
}
public @Nullable HighlightSeverity getSeverity(@NotNull String name) {
HighlightInfoType type = STANDARD_SEVERITIES.get(name);
if (type != null) return type.getSeverity(null);
SeverityBasedTextAttributes attributes = myMap.get(name);
if (attributes != null) return attributes.getSeverity();
return null;
}
Icon getRendererIconByIndex(int index, boolean defaultIcon) {
HighlightSeverity severity = getSeverityByIndex(index);
HighlightDisplayLevel level = HighlightDisplayLevel.find(severity);
if (level != null) {
return defaultIcon ? level.getIcon() : level.getOutlineIcon();
}
return severity != null ? HighlightDisplayLevel.createIconByMask(myRendererColors.get(severity.getName())) : null;
}
public boolean isSeverityValid(@NotNull String severityName) {
return STANDARD_SEVERITIES.containsKey(severityName) || myMap.containsKey(severityName);
}
@Override
public int compare(@NotNull HighlightSeverity s1, @NotNull HighlightSeverity s2) {
return compare(s1, s2, getOrderMap());
}
private static int compare(@NotNull HighlightSeverity s1,
@NotNull HighlightSeverity s2,
@NotNull Object2IntMap<HighlightSeverity> orderMap) {
return orderMap.getInt(s1) - orderMap.getInt(s2);
}
private @NotNull Object2IntMap<HighlightSeverity> getOrderMap() {
Object2IntMap<HighlightSeverity> map = orderMap.get();
if (map != null) return map;
return orderMap.updateAndGet(oldMap -> oldMap == null ? fromList(getDefaultOrder()) : oldMap);
}
private static @NotNull Object2IntMap<HighlightSeverity> fromList(@NotNull List<HighlightSeverity> orderList) {
if (orderList.isEmpty()) {
return Object2IntMaps.emptyMap();
}
Object2IntMap<HighlightSeverity> map = new Object2IntOpenHashMap<>(orderList.size());
map.defaultReturnValue(-1);
for (int index = 0; index < orderList.size(); index++) {
HighlightSeverity severity = orderList.get(index);
map.put(severity, index);
}
if (map.size() != orderList.size()) {
LOG.error("Severities order list must contain unique severities but got: " + orderList);
}
return Object2IntMaps.unmodifiable(map);
}
private @NotNull List<HighlightSeverity> getDefaultOrder() {
List<HighlightSeverity> order = new ArrayList<>(STANDARD_SEVERITIES.size() + myMap.size());
for (HighlightInfoType type : STANDARD_SEVERITIES.values()) {
order.add(type.getSeverity(null));
}
for (SeverityBasedTextAttributes attributes : myMap.values()) {
order.add(attributes.getSeverity());
}
order.sort(null);
return order;
}
public void setOrder(@NotNull List<HighlightSeverity> orderList) {
orderMap.set(ensureAllStandardIncluded(orderList, getDefaultOrder()));
myReadOrder = null;
severitiesChanged();
}
int getSeverityIdx(@NotNull HighlightSeverity severity) {
return getOrderMap().getInt(severity);
}
public static boolean isDefaultSeverity(@NotNull HighlightSeverity severity) {
return STANDARD_SEVERITIES.containsKey(severity.myName);
}
static boolean isGotoBySeverityEnabled(@NotNull HighlightSeverity minSeverity) {
for (SeveritiesProvider provider : SeveritiesProvider.EP_NAME.getIterable()) {
if (provider.isGotoBySeverityEnabled(minSeverity)) {
return true;
}
}
return minSeverity != HighlightSeverity.INFORMATION;
}
public static final class SeverityBasedTextAttributes {
private final TextAttributes myAttributes;
private final HighlightInfoType.HighlightInfoTypeImpl myType;
//read external
SeverityBasedTextAttributes(@NotNull Element element) {
this(new TextAttributes(element), new HighlightInfoType.HighlightInfoTypeImpl(element));
}
public SeverityBasedTextAttributes(@NotNull TextAttributes attributes, @NotNull HighlightInfoType.HighlightInfoTypeImpl type) {
myAttributes = attributes;
myType = type;
}
public @NotNull TextAttributes getAttributes() {
return myAttributes;
}
public @NotNull HighlightInfoType.HighlightInfoTypeImpl getType() {
return myType;
}
private void writeExternal(@NotNull Element element) {
myAttributes.writeExternal(element);
myType.writeExternal(element);
}
public @NotNull HighlightSeverity getSeverity() {
return myType.getSeverity(null);
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
SeverityBasedTextAttributes that = (SeverityBasedTextAttributes)o;
return myAttributes.equals(that.myAttributes) && myType.equals(that.myType);
}
@Override
public int hashCode() {
return 31 * myAttributes.hashCode() + myType.hashCode();
}
}
@NotNull
Collection<SeverityBasedTextAttributes> allRegisteredAttributes() {
return Collections.unmodifiableCollection(myMap.values());
}
@NotNull
Collection<HighlightInfoType> standardSeverities() {
return STANDARD_SEVERITIES.values();
}
}
| |
package twg2.cli;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.function.Consumer;
import java.util.function.Function;
import java.util.function.Predicate;
import twg2.collections.builder.MapBuilder;
public class ParameterBuilderImpl<C extends CharSequence, T> implements ParameterBuilder<C, T> {
private ParameterType type;
private Map<String, T> enumMap;
private boolean isArray;
private C primaryName;
private List<C> aliases;
private String helpMsg;
private String requestParamMsg;
private boolean required;
private Consumer<T> setter;
private Predicate<T> validator;
private Function<T, String> validatorMsgs;
ParameterBuilderImpl(Map<String, ? extends T> enumsMap, Class<T> dataType, boolean isEnum, boolean isArray) {
this(dataType, isEnum);
@SuppressWarnings("unchecked")
Map<String, T> enumMapE = (Map<String, T>) enumsMap;
this.enumMap = enumMapE;
}
public ParameterBuilderImpl(ParameterType type, boolean isArray) {
this(isArray ? type.getArrayDataType() : type.getDefaultDataType(), type == ParameterType.ENUM);
}
// package-private
ParameterBuilderImpl(Class<?> classType, boolean isEnum) {
this.isArray = classType.isArray();
if(this.isArray) {
classType = classType.getComponentType();
}
boolean foundType = false;
for(ParameterType type : ParameterType.values()) {
if(type.isDataTypeClass(classType)) {
this.type = type;
foundType = true;
break;
}
}
if(!foundType) {
throw new IllegalArgumentException("the class '" + classType + "'" +
" is not a recognized parameter type, a parameter type must be one of ParameterType's values");
}
this.aliases = new ArrayList<>();
}
@Override
public ParameterType getParameterType() {
return type;
}
@Override
public ParameterBuilder<C, T> setParameterType(ParameterType paramType) {
this.type = paramType;
return this;
}
@Override
public boolean getIsArrayType() {
return isArray;
}
@Override
public ParameterBuilder<C, T> setIsArrayType(boolean isArrayType) {
this.isArray = isArrayType;
return this;
}
@Override
@SuppressWarnings("unchecked")
public Map<String, T> getEnumNameMap() {
return enumMap;
}
@Override
public C getPrimaryName() {
return primaryName;
}
@Override
public List<C> getAliases() {
return aliases;
}
@Override
@SafeVarargs
public final ParameterBuilder<C, T> setNameAndAliases(C primaryName, C... aliases) {
this.primaryName = primaryName;
Collections.addAll(this.aliases, aliases);
return this;
}
@Override
public String getHelpMessage() {
return helpMsg;
}
@Override
public ParameterBuilder<C, T> setHelpMessage(CharSequence helpMessage) {
this.helpMsg = helpMessage.toString();
return this;
}
@Override
public String getRequestParameterMessage() {
return requestParamMsg;
}
@Override
public ParameterBuilder<C, T> setRequestParameterMessage(CharSequence requestParameterMessage) {
this.requestParamMsg = requestParameterMessage.toString();
return this;
}
@Override
public boolean isRequired() {
return required;
}
@Override
public ParameterBuilder<C, T> setRequired(boolean required) {
this.required = required;
return this;
}
@Override
public Consumer<T> getSetter() {
return setter;
}
@Override
public ParameterBuilder<C, T> setSetter(Consumer<T> setter) {
this.setter = setter;
return this;
}
@Override
public Predicate<T> getValidator() {
return validator;
}
@Override
public ParameterBuilder<C, T> setValidator(Predicate<T> validator) {
this.validator = validator;
return this;
}
@Override
public Function<T, String> getValidatorMessageGenerator() {
return validatorMsgs;
}
@Override
public ParameterBuilder<C, T> setValidatorMessageGenerator(Function<T, String> validatorMessageGenerator) {
this.validatorMsgs = validatorMessageGenerator;
return this;
}
@Override
public ParameterData<C, T> build() {
ParameterData<C, T> param = null;
if(enumMap == null) {
param = new ParameterDataImpl<>(type, isArray, primaryName, aliases,
setter, validator, validatorMsgs, helpMsg, requestParamMsg, required);
}
else {
param = new ParameterDataImpl<>(type, isArray, enumMap, primaryName, aliases,
setter, validator, validatorMsgs, helpMsg, requestParamMsg, required);
}
return param;
}
/** Provides generic type safety for enum parameter builders
*/
public static final <C extends CharSequence, E extends Enum<E>> ParameterBuilderImpl<C, E> newEnumParameterBuilder(
Class<E> enumClass) {
if(enumClass.isArray()) {
throw new IllegalArgumentException("this method creates a parameter parser for single enum values, " +
"use newEnumArrayParameterBuilder() for parsing arrays of enum values");
}
Map<String, E> enumMap = MapBuilder.mutableEnumNames(enumClass);
return new ParameterBuilderImpl<C, E>(enumMap, enumClass, true, false);
}
/** Provides generic type safety for enum array parameter builders
*/
public static final <C extends CharSequence, E extends Enum<E>> ParameterBuilderImpl<C, E> newEnumArrayParameterBuilder(
Class<E[]> enumArrayClass) {
if(!enumArrayClass.isArray()) {
throw new IllegalArgumentException("this method creates a parameter parser for arrays of enum values, " +
"use newEnumParameterBuilder() for parsing single enum values");
}
@SuppressWarnings("unchecked")
Class<E> enumType = (Class<E>)enumArrayClass.getComponentType();
Map<String, E> enumMap = MapBuilder.mutableEnumNames(enumType);
return new ParameterBuilderImpl<C, E>(enumMap, enumType, true, false);
}
public static final <C extends CharSequence, T> ParameterBuilderImpl<C, T> newEnumMapParameterBuilder(
Map<String, ? extends T> enumsMap, Class<T> dataType) {
@SuppressWarnings("unchecked")
Map<String, T> enumMap = (Map<String, T>)enumsMap;
return new ParameterBuilderImpl<C, T>(enumMap, dataType, true, false);
}
public static final <C extends CharSequence, T> ParameterBuilderImpl<C, T> newEnumArrayMapParameterBuilder(
Map<String, ? extends T> enumsMap, Class<T[]> dataType) {
@SuppressWarnings("unchecked")
Class<T> enumType = (Class<T>)dataType.getComponentType();
@SuppressWarnings("unchecked")
Map<String, T> enumMap = (Map<String, T>)enumsMap;
return new ParameterBuilderImpl<C, T>(enumMap, enumType, true, true);
}
}
| |
/**
* Copyright Pravega Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.pravega.segmentstore.storage.mocks;
import com.google.common.base.Preconditions;
import io.pravega.common.Exceptions;
import io.pravega.common.util.AsyncIterator;
import io.pravega.common.util.BufferView;
import io.pravega.common.util.ByteArraySegment;
import io.pravega.segmentstore.contracts.SegmentType;
import io.pravega.segmentstore.contracts.StreamSegmentExistsException;
import io.pravega.segmentstore.contracts.StreamSegmentNotExistsException;
import io.pravega.segmentstore.contracts.tables.BadKeyVersionException;
import io.pravega.segmentstore.contracts.tables.IteratorArgs;
import io.pravega.segmentstore.contracts.tables.IteratorItem;
import io.pravega.segmentstore.contracts.tables.KeyNotExistsException;
import io.pravega.segmentstore.contracts.tables.TableEntry;
import io.pravega.segmentstore.contracts.tables.TableKey;
import io.pravega.segmentstore.contracts.tables.TableSegmentConfig;
import io.pravega.segmentstore.contracts.tables.TableSegmentInfo;
import io.pravega.segmentstore.contracts.tables.TableStore;
import java.time.Duration;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.CompletionException;
import java.util.concurrent.Executor;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicLong;
import java.util.function.Function;
import java.util.stream.Collectors;
import javax.annotation.concurrent.GuardedBy;
import javax.annotation.concurrent.ThreadSafe;
import lombok.Data;
import lombok.NonNull;
import lombok.RequiredArgsConstructor;
import lombok.SneakyThrows;
import lombok.val;
@RequiredArgsConstructor
@ThreadSafe
public class InMemoryTableStore implements TableStore {
@GuardedBy("tables")
private final HashMap<String, TableData> tables = new HashMap<>();
private final AtomicBoolean closed = new AtomicBoolean();
@NonNull
private final Executor executor;
//region TableStore Implementation
@Override
public CompletableFuture<Void> createSegment(String segmentName, SegmentType segmentType, TableSegmentConfig config, Duration timeout) {
Exceptions.checkNotClosed(this.closed.get(), this);
Preconditions.checkArgument(segmentType.isTableSegment(), "Expected SegmentType.isSortedSegment.");
Preconditions.checkArgument(segmentType.isSystem(), "Expected SegmentType.isSystem.");
Preconditions.checkArgument(segmentType.isCritical(), "Expected SegmentType.isCritical.");
Preconditions.checkArgument(!segmentType.isFixedKeyLengthTableSegment(), "Fixed-Key-Length Table Segments not supported in this mock.");
return CompletableFuture.runAsync(() -> {
synchronized (this.tables) {
if (this.tables.containsKey(segmentName)) {
throw new CompletionException(new StreamSegmentExistsException(segmentName));
}
this.tables.put(segmentName, new TableData(segmentName));
}
}, this.executor);
}
@Override
public CompletableFuture<Void> deleteSegment(String segmentName, boolean mustBeEmpty, Duration timeout) {
Exceptions.checkNotClosed(this.closed.get(), this);
return CompletableFuture.runAsync(() -> {
synchronized (this.tables) {
if (this.tables.remove(segmentName) == null) {
throw new CompletionException(new StreamSegmentNotExistsException(segmentName));
}
}
}, this.executor);
}
@Override
public CompletableFuture<List<Long>> put(String segmentName, List<TableEntry> entries, Duration timeout) {
Exceptions.checkNotClosed(this.closed.get(), this);
return CompletableFuture.supplyAsync(() -> getTableData(segmentName).put(entries), this.executor);
}
@Override
public CompletableFuture<List<Long>> put(String segmentName, List<TableEntry> entries, long tableSegmentOffset, Duration timeout) {
Exceptions.checkNotClosed(this.closed.get(), this);
return CompletableFuture.supplyAsync(() -> getTableData(segmentName).put(entries), this.executor);
}
@Override
public CompletableFuture<Void> remove(String segmentName, Collection<TableKey> keys, Duration timeout) {
Exceptions.checkNotClosed(this.closed.get(), this);
return CompletableFuture.runAsync(() -> getTableData(segmentName).remove(keys), this.executor);
}
@Override
public CompletableFuture<Void> remove(String segmentName, Collection<TableKey> keys, long tableSegmentOffset, Duration timeout) {
Exceptions.checkNotClosed(this.closed.get(), this);
return CompletableFuture.runAsync(() -> getTableData(segmentName).remove(keys), this.executor);
}
@Override
public CompletableFuture<List<TableEntry>> get(String segmentName, List<BufferView> keys, Duration timeout) {
Exceptions.checkNotClosed(this.closed.get(), this);
return CompletableFuture.supplyAsync(() -> getTableData(segmentName).get(keys), this.executor);
}
@Override
public CompletableFuture<AsyncIterator<IteratorItem<TableKey>>> keyIterator(String segmentName, IteratorArgs args) {
Collection<TableEntry> tableEntries = getTableEntries(segmentName);
val item = new IteratorItemImpl<>(args.getContinuationToken(), tableEntries.stream().map(TableEntry::getKey).collect(Collectors.toList()));
return CompletableFuture.completedFuture(AsyncIterator.singleton(item));
}
@Override
public CompletableFuture<AsyncIterator<IteratorItem<TableEntry>>> entryIterator(String segmentName, IteratorArgs args) {
Collection<TableEntry> tableEntries = getTableEntries(segmentName);
val item = new IteratorItemImpl<>(args.getContinuationToken(), tableEntries);
return CompletableFuture.completedFuture(AsyncIterator.singleton(item));
}
@Override
public CompletableFuture<AsyncIterator<IteratorItem<TableEntry>>> entryDeltaIterator(String segmentName, long fromPosition, Duration fetchTimeout) {
throw new UnsupportedOperationException();
}
@Override
public CompletableFuture<TableSegmentInfo> getInfo(String segmentName, Duration timeout) {
throw new UnsupportedOperationException();
}
@SneakyThrows(StreamSegmentNotExistsException.class)
private TableData getTableData(String segmentName) {
synchronized (this.tables) {
TableData result = this.tables.get(segmentName);
if (result == null) {
throw new StreamSegmentNotExistsException(segmentName);
}
return result;
}
}
private Collection<TableEntry> getTableEntries(String segmentName) {
TableData tableData = this.tables.get(segmentName);
Collection<TableEntry> tableEntries = null != tableData ?
tableData.entries.values().stream()
.filter(tableEntry -> tableEntry != null && tableEntry.getValue() != null)
.collect(Collectors.toList())
: Collections.emptyList();
return tableEntries;
}
//endregion
//region TableData
@ThreadSafe
@RequiredArgsConstructor
private static class TableData {
private final AtomicLong nextVersion = new AtomicLong();
@GuardedBy("this")
private final HashMap<BufferView, TableEntry> entries = new HashMap<>();
private final String segmentName;
synchronized List<Long> put(List<TableEntry> entries) {
validateKeys(entries, TableEntry::getKey);
return entries
.stream()
.map(e -> {
long version = this.nextVersion.incrementAndGet();
val key = new ByteArraySegment(e.getKey().getKey().getCopy());
this.entries.put(key,
TableEntry.versioned(key, new ByteArraySegment(e.getValue().getCopy()), version));
return version;
})
.collect(Collectors.toList());
}
synchronized void remove(Collection<TableKey> keys) {
validateKeys(keys, k -> k);
keys.forEach(k -> this.entries.remove(k.getKey()));
}
synchronized List<TableEntry> get(List<BufferView> keys) {
return keys.stream().map(this.entries::get).collect(Collectors.toList());
}
@GuardedBy("this")
private <T> void validateKeys(Collection<T> items, Function<T, TableKey> getKey) {
items.stream()
.map(getKey)
.filter(TableKey::hasVersion)
.forEach(k -> {
TableEntry e = this.entries.get(k.getKey());
if (e == null) {
if (k.getVersion() != TableKey.NOT_EXISTS) {
throw new CompletionException(new KeyNotExistsException(this.segmentName, k.getKey()));
}
} else if (k.getVersion() != e.getKey().getVersion()) {
throw new CompletionException(new BadKeyVersionException(this.segmentName, Collections.emptyMap()));
}
});
}
synchronized TableData deepCopy() {
TableData clone = new TableData(this.segmentName);
clone.nextVersion.set(this.nextVersion.get());
clone.entries.putAll(this.entries);
return clone;
}
}
/**
* Implementation of {@link IteratorItem} for {@link InMemoryTableStore}.
* @param <T> Entry type.
*/
@Data
private static class IteratorItemImpl<T> implements IteratorItem<T> {
private final BufferView state;
private final Collection<T> entries;
}
//endregion
/**
* Creates a clone for given {@link InMemoryTableStore}. Useful to simulate zombie segment container.
*
* @param original Metadata store to clone.
* @return Clone of given instance.
*/
public static InMemoryTableStore clone(InMemoryTableStore original) {
InMemoryTableStore clone = new InMemoryTableStore(original.executor);
for (val e : original.tables.entrySet()) {
clone.tables.put(e.getKey(), e.getValue().deepCopy());
}
return clone;
}
}
| |
/*
* Copyright 2012-2014 eBay Software Foundation and selendroid committers.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package io.selendroid.server.model;
import android.app.Activity;
import android.view.ViewParent;
import io.selendroid.ServerInstrumentation;
import io.selendroid.android.AndroidWait;
import io.selendroid.android.KeySender;
import io.selendroid.android.ViewHierarchyAnalyzer;
import io.selendroid.android.internal.Dimension;
import io.selendroid.android.internal.Point;
import io.selendroid.exceptions.ElementNotVisibleException;
import io.selendroid.exceptions.NoSuchElementAttributeException;
import io.selendroid.exceptions.NoSuchElementException;
import io.selendroid.exceptions.SelendroidException;
import io.selendroid.exceptions.TimeoutException;
import io.selendroid.server.model.interactions.AndroidCoordinates;
import io.selendroid.server.model.interactions.Coordinates;
import io.selendroid.server.model.internal.AbstractNativeElementContext;
import io.selendroid.util.Function;
import io.selendroid.util.Preconditions;
import io.selendroid.util.SelendroidLogger;
import java.lang.ref.WeakReference;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.*;
import org.json.JSONException;
import org.json.JSONObject;
import android.graphics.Rect;
import android.os.SystemClock;
import android.view.MotionEvent;
import android.view.View;
import android.webkit.JsResult;
import android.webkit.WebChromeClient;
import android.webkit.WebView;
import android.widget.CompoundButton;
import android.widget.EditText;
import android.widget.TextView;
public class AndroidNativeElement implements AndroidElement {
// TODO revisit
protected static final long DURATION_OF_LONG_PRESS = 750L;// (long)
// (ViewConfiguration.getLongPressTimeout()
// * 1.5f);
private WeakReference<View> viewRef;
private Collection<AndroidElement> children = new LinkedHashSet<AndroidElement>();
private AndroidElement parent;
private ServerInstrumentation instrumentation;
private final KeySender keys;
private SearchContext nativeElementSearchScope = null;
private Coordinates coordinates = null;
final Object syncObject = new Object();
private Boolean done = false;
private KnownElements ke;
private int hashCode;
static final long UI_TIMEOUT = 3000L;
private final String id;
public AndroidNativeElement(View view, ServerInstrumentation instrumentation, KeySender keys,
KnownElements ke) {
Preconditions.checkNotNull(view);
this.viewRef = new WeakReference<View>(view);
hashCode = view.hashCode() + 31;
this.instrumentation = instrumentation;
this.keys = keys;
this.nativeElementSearchScope = new NativeElementSearchScope(instrumentation, keys, ke);
this.ke = ke;
Random random = new Random();
this.id = new UUID(random.nextLong(), random.nextLong()).toString();
}
@Override
public AndroidElement getParent() {
return parent;
}
public boolean isDisplayed() {
View view = getView();
boolean hasWindowFocus = view.hasWindowFocus();
boolean isEnabled = view.isEnabled();
int width = view.getWidth();
int height = view.getHeight();
int visibility = view.getVisibility();
boolean isVisible = (View.VISIBLE == visibility);
// Check visibility of the view and its parents as well.
// This is more reliable when transitions between activities are in progress.
boolean isShown = view.isShown();
boolean isDisplayed =
hasWindowFocus && isEnabled && isVisible && isShown && (width > 0) && (height > 0);
if (!isDisplayed) {
Activity activity = instrumentation.getCurrentActivity();
View focusedView = activity.getCurrentFocus();
String displayCheckFailureMessage =
String.format(
"Display check failed\n" +
"for view: %s\n" +
"isVisible: %b\nvisibility: %d\nisShown: %b\nhasWindowFocus: %b\n" +
"isEnabled: %b\nwidth: %d\nheight: %d\ncurrent activity: %s\nfocused view: %s",
view, isVisible, visibility, isShown, hasWindowFocus, isEnabled,
width, height, activity, focusedView);
SelendroidLogger.debug(displayCheckFailureMessage);
if (!isShown) {
logIsShownCheckFailure(view);
}
// Check the view belongs to the same view hierarchy as the view with current window focus.
// If true, this usually means a system alert dialog is rendered on top of the view
// (typically this is an app crash dialog).
if (!hasWindowFocus) {
if (activity != null && focusedView != null) {
if (view.getRootView() == focusedView.getRootView()) {
SelendroidLogger.debug("hasWindowFocus() check failed. " +
"This usually means the view is covered by a system dialog.");
}
}
}
}
return isDisplayed;
}
/**
* If view.isShown() == false, logs why exactly this evaluates to false.
* Copied from Android's implementation of View.isShown().
*/
private void logIsShownCheckFailure(View view) {
try {
SelendroidLogger.debug("Display check failed because View.isShown() == false");
View current = view;
do {
if ((current.getVisibility()) != View.VISIBLE) {
SelendroidLogger.debug(String.format(
"isShown: View %s is not visible because its ancestor %s has visibility %d",
view, current, current.getVisibility()));
break;
}
ViewParent parent = current.getParent();
if (parent == null) {
SelendroidLogger.debug(String.format(
"isShown: View %s is not visible because its ancestor %s has no parent " +
"(it is not attached to view root): ",
view, current));
break;
}
if (!(parent instanceof View)) {
// The only case where View.isShown() returns true:
// The view needs to have an ancestor that is not a View and all ancestors on the way up have to
// be visible.
break;
}
current = (View) parent;
} while (current != null);
SelendroidLogger.debug(String.format("View %s is not visible", view));
} catch (Exception e) {
// Don't let an exception in debug printing crash the caller
SelendroidLogger.error("isShown() debug printing failed", e);
}
}
private void waitUntilIsDisplayed() {
AndroidWait wait = instrumentation.getAndroidWait();
try {
wait.until(new Function<Void, Boolean>() {
@Override
public Boolean apply(Void input) {
return isDisplayed();
}
});
} catch (TimeoutException exception) {
throw new ElementNotVisibleException(
"You may only do passive read with element not displayed");
}
}
protected void scrollIntoScreenIfNeeded() {
Point leftTopLocation = getLocation();
final int left = leftTopLocation.x;
final int top = leftTopLocation.y;
instrumentation.runOnMainSync(new Runnable() {
@Override
public void run() {
synchronized (syncObject) {
Rect r = new Rect(left, top, getView().getWidth(), getView().getHeight());
getView().requestRectangleOnScreen(r);
done = true;
syncObject.notify();
}
}
});
long end = System.currentTimeMillis() + instrumentation.getAndroidWait().getTimeoutInMillis();
synchronized (syncObject) {
while (!done && System.currentTimeMillis() < end) {
try {
syncObject.wait(AndroidWait.DEFAULT_SLEEP_INTERVAL);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
}
}
}
@Override
public void enterText(CharSequence... keysToSend) {
requestFocus();
StringBuilder sb = new StringBuilder();
for (CharSequence keys : keysToSend) {
sb.append(keys);
}
send(sb);
}
private void requestFocus() {
final View viewview = getView();
instrumentation.runOnMainSync(new Runnable() {
@Override
public void run() {
viewview.requestFocus();
}
});
click();
}
@Override
public String getText() {
if (getView() instanceof TextView) {
return ((TextView) getView()).getText().toString();
}
System.err.println("not supported elment for getting the text: "
+ getView().getClass().getSimpleName());
return null;
}
@Override
public void click() {
waitUntilIsDisplayed();
scrollIntoScreenIfNeeded();
try {
// is needed for recalculation of location
Thread.sleep(300);
} catch (InterruptedException e) {}
int[] xy = new int[2];
getView().getLocationOnScreen(xy);
final int viewWidth = getView().getWidth();
final int viewHeight = getView().getHeight();
final float x = xy[0] + (viewWidth / 2.0f);
float y = xy[1] + (viewHeight / 2.0f);
clickOnScreen(x, y);
}
private void clickOnScreen(float x, float y) {
final ServerInstrumentation inst = ServerInstrumentation.getInstance();
long downTime = SystemClock.uptimeMillis();
long eventTime = SystemClock.uptimeMillis();
final MotionEvent event =
MotionEvent.obtain(downTime, eventTime, MotionEvent.ACTION_DOWN, x, y, 0);
final MotionEvent event2 =
MotionEvent.obtain(downTime, eventTime, MotionEvent.ACTION_UP, x, y, 0);
try {
inst.sendPointerSync(event);
inst.sendPointerSync(event2);
try {
Thread.sleep(300);
} catch (InterruptedException ignored) {}
} catch (SecurityException e) {
SelendroidLogger.error("error while clicking element", e);
}
}
public Integer getAndroidId() {
int viewId = getView().getId();
return (viewId == View.NO_ID) ? null : viewId;
}
@Override
public AndroidElement findElement(By by) throws NoSuchElementException {
return by.findElement(nativeElementSearchScope);
}
@Override
public List<AndroidElement> findElements(By by) throws NoSuchElementException {
return by.findElements(nativeElementSearchScope);
}
@Override
public Collection<AndroidElement> getChildren() {
return children;
}
public void setParent(AndroidElement parent) {
this.parent = parent;
}
public void addChild(AndroidElement child) {
this.children.add(child);
}
public String toString() {
return new StringBuilder().append("id: ").append(getView().getId()).append("view class: ")
.append(getView().getClass()).append("view content desc: ")
.append(getView().getContentDescription()).toString();
}
protected void send(CharSequence string) {
keys.send(string);
}
public JSONObject toJson() throws JSONException {
JSONObject object = new JSONObject();
JSONObject l10n = new JSONObject();
l10n.put("matches", 0);
object.put("l10n", l10n);
CharSequence cd = getView().getContentDescription();
if (cd != null && cd.length() > 0) {
String label = String.valueOf(cd);
object.put("name", label);
} else {
object.put("name", "");
}
String id = getNativeId();
object.put("id", id.startsWith("id/") ? id.replace("id/", "") : id);
JSONObject rect = new JSONObject();
object.put("rect", rect);
JSONObject origin = new JSONObject();
Point location = getLocation();
origin.put("x", location.x);
origin.put("y", location.y);
rect.put("origin", origin);
JSONObject size = new JSONObject();
Dimension s = getSize();
size.put("height", s.getHeight());
size.put("width", s.getWidth());
rect.put("size", size);
object.put("ref", ke.getIdOfElement(this));
object.put("type", getView().getClass().getSimpleName());
String value = "";
if (getView() instanceof TextView) {
value = String.valueOf(((TextView) getView()).getText());
}
object.put("value", value);
object.put("shown", getView().isShown());
if (getView() instanceof WebView) {
final WebView webview = (WebView) getView();
final WebViewSourceClient client = new WebViewSourceClient();
instrumentation.getCurrentActivity().runOnUiThread(new Runnable() {
public void run() {
synchronized (syncObject) {
webview.getSettings().setJavaScriptEnabled(true);
webview.setWebChromeClient(client);
String script = "document.body.parentNode.innerHTML";
webview.loadUrl("javascript:alert('selendroidSource:'+" + script + ")");
}
}
});
long end = System.currentTimeMillis() + 10000;
waitForDone(end, UI_TIMEOUT, "Error while grabbing web view source code.");
object.put("source", "<html>" + client.result + "</html>");
}
return object;
}
public class WebViewSourceClient extends WebChromeClient {
public Object result = null;
/**
* Unconventional way of adding a Javascript interface but the main reason why I took this way
* is that it is working stable compared to the webview.addJavascriptInterface way.
*/
@Override
public boolean onJsAlert(WebView view, String url, String message, JsResult jsResult) {
if (message != null && message.startsWith("selendroidSource:")) {
jsResult.confirm();
synchronized (syncObject) {
result = message.replaceFirst("selendroidSource:", "");
done = true;
syncObject.notify();
}
return true;
} else {
return super.onJsAlert(view, url, message, jsResult);
}
}
}
private void waitForDone(long end, long timeout, String error) {
synchronized (syncObject) {
while (!done && System.currentTimeMillis() < end) {
try {
syncObject.wait(timeout);
} catch (InterruptedException e) {
throw new SelendroidException(error, e);
}
}
}
}
private String getNativeId() {
return ViewHierarchyAnalyzer.getNativeId(getView());
}
public View getView() {
if (viewRef.get() == null) {
throw new IllegalStateException(
"Trying to access a native element that has already been garbage collected");
}
return viewRef.get();
}
@Override
public void clear() {
final View viewview = getView();
instrumentation.runOnMainSync(new Runnable() {
@Override
public void run() {
viewview.requestFocus();
if (viewview instanceof EditText) {
((EditText) viewview).setText("");
}
}
});
}
@Override
public void submit() {
throw new UnsupportedOperationException("Submit is not supported for native elements.");
}
@Override
public boolean isSelected() {
if (getView() instanceof CompoundButton) {
return ((CompoundButton) getView()).isChecked();
}
throw new UnsupportedOperationException(
"Is selected is only available for view class CheckBox and RadioButton.");
}
@Override
public Point getLocation() {
int[] xy = new int[2];
getView().getLocationOnScreen(xy);
return new Point(xy[0], xy[1]);
}
private class NativeElementSearchScope extends AbstractNativeElementContext {
public NativeElementSearchScope(ServerInstrumentation instrumentation, KeySender keys,
KnownElements knownElements) {
super(instrumentation, keys, knownElements);
}
@Override
protected View getRootView() {
return getView();
}
protected List<View> getTopLevelViews() {
return Arrays.asList(getView());
}
}
@Override
public Coordinates getCoordinates() {
if (coordinates == null) {
coordinates =
new AndroidCoordinates(String.valueOf(getView().getId()), getCenterCoordinates());
}
return coordinates;
}
private Point getCenterCoordinates() {
int height = getView().getHeight();
int width = getView().getWidth();
Point location = getLocation();
int x = location.x + (height / 2);
int y = location.y + (width / 2);
return new Point(x, y);
}
@Override
public int hashCode() {
return hashCode;
}
@Override
public boolean equals(Object obj) {
if (this == obj) return true;
if (obj == null) return false;
if (getClass() != obj.getClass()) return false;
AndroidNativeElement other = (AndroidNativeElement) obj;
// Not calling getView() here so inserting into a set with stale elements doesn't suddenly start
// throwing.
if (viewRef.get() == null) {
if (other.viewRef.get() != null) return false;
} else if (!getView().equals(other.viewRef.get())) return false;
return true;
}
@Override
public Dimension getSize() {
return new Dimension(getView().getWidth(), getView().getHeight());
}
@Override
public String getAttribute(String attribute) {
if (attribute.equalsIgnoreCase("nativeid")) {
return getNativeId();
}
String name = capitalizeFirstLetter(attribute);
Method method = getDeclaredMethod("get" + name);
if (method == null) {
method = getDeclaredMethod("is" + name);
if (method == null) {
throw new NoSuchElementAttributeException("The attribute with name '" + name
+ "' was not found.");
}
}
try {
Object result = method.invoke(getView());
return String.valueOf(result);
} catch (IllegalArgumentException e) {
throw new SelendroidException(e);
} catch (IllegalAccessException e) {
throw new SelendroidException(e);
} catch (InvocationTargetException e) {
throw new SelendroidException(e);
}
}
private String capitalizeFirstLetter(String name) {
return name.substring(0, 1).toUpperCase() + name.substring(1);
}
private Method getDeclaredMethod(String name) {
Preconditions.checkNotNull(name);
Method method = null;
try {
method = getView().getClass().getMethod(name);
} catch (NoSuchMethodException e) {
// can happen
}
return method;
}
@Override
public boolean isEnabled() {
return getView().isEnabled();
}
@Override
public String getTagName() {
return getView().getClass().getSimpleName();
}
@Override
public void setText(CharSequence... keysToSend) {
requestFocus();
final View viewview = getView();
StringBuilder sb = new StringBuilder();
for (CharSequence keys : keysToSend) {
sb.append(keys);
}
final String text = getText() + sb;
instrumentation.runOnMainSync(new Runnable() {
@Override
public void run() {
((EditText) viewview).setText(text);
}
});
}
@Override
public String id() {
return this.id;
}
}
| |
package csci230radomskizurnhardy;
import java.util.ArrayList;
/**
* This class creates an object of type University complete with all attributes
* and the ability to set and get the University's Emphases.
*
* @author Andrew Zurn, Wiley Radomski, Preston Hardy
* @version 1.1
*/
public class University {
/**
* The name of the school.
*/
private String school;
/**
* The state the school is in.
*/
private String state;
/**
* The location that the school is in.
*/
private String location;
/**
* The control of the university, public/state or private.
*/
private String control;
/**
* The number of students currently attending the school.
*/
private int numberOfStudents;
/**
* The percentage of students that are female.
*/
private double percentFemales;
/**
* The average SAT verbal score of those attending the school.
*/
private double SATVerbal;
/**
* The average SAT math score of those attending the school.
*/
private double SATMath;
/**
* The cost/expense of the school.
*/
private double expenses;
/**
* The amount of students receiving financial aid.
*/
private double percentFinancialAid;
/**
* The number of applicants that apply to this school.
*/
private int numberOfApplicants;
/**
* The percent of those applicants that are admitted.
*/
private double percentAdmitted;
/**
* The percent of those students admitted that go on to enroll.
*/
private double percentEnrolled;
/**
* The academic standing of the school, scale 1-5.
*/
private int academicsScale;
/**
* The social standing of the school, scale 1-5.
*/
private int socialScale;
/**
* The quality of life score of the school, scale 1-5.
*/
private int qualityOfLifeScale;
/**
* The emphases this school is known for.
*/
private ArrayList<String> emphases;
/**
* Constructor for University
*
* @param school The name of the University
* @param state The state where the University is located
* @param location What setting the university is in urban, rural,
* metropolitan
* @param control Whether or not the school is public or private
* @param numberOfStudents Number of students attending the University
* @param percentFemales Percent of student body which is female
* @param SATVerbal Average SATVerbal score
* @param SATMath Average SATMath score
* @param expenses Cost of attending the University
* @param percentFinancialAid Percent of students which receive financial
* aid
* @param numberOfApplicants Number of Applicants
* @param percentAdmitted Percent of potential students admitted
* @param percentEnrolled Percent of potential students enrolled
* @param academicsScale Scale from 1-5 of the academics
* @param socialScale Scale from 1-5 of the social structure
* @param qualityOfLifeScale Scale from 1-5 of the quality of life
*/
public University(String school, String state, String location,
String control, int numberOfStudents, double percentFemales,
double SATVerbal, double SATMath, double expenses,
double percentFinancialAid, int numberOfApplicants,
double percentAdmitted, double percentEnrolled, int academicsScale,
int socialScale, int qualityOfLifeScale) {
this.school = school;
this.state = state;
this.location = location;
this.control = control;
this.numberOfStudents = numberOfStudents;
this.percentFemales = percentFemales;
this.SATVerbal = SATVerbal;
this.SATMath = SATMath;
this.expenses = expenses;
this.percentFinancialAid = percentFinancialAid;
this.numberOfApplicants = numberOfApplicants;
this.percentAdmitted = percentAdmitted;
this.percentEnrolled = percentEnrolled;
this.academicsScale = academicsScale;
this.socialScale = socialScale;
this.qualityOfLifeScale = qualityOfLifeScale;
}
/**
* Will return the name of the school.
*
* @return the school
*/
public String getSchool() {
return school;
}
/**
* Will set the name of the school.
*
* @param school the school to set
*/
public void setSchool(String school) {
this.school = school;
}
/**
* Will return the state of the school.
*
* @return the state
*/
public String getState() {
return state;
}
/**
* Will set the state of the school.
*
* @param state the state to set
*/
public void setState(String state) {
this.state = state;
}
/**
* Will return the location of the school.
*
* @return the location
*/
public String getLocation() {
return location;
}
/**
* Will set the location of the school.
*
* @param location the location to set
*/
public void setLocation(String location) {
this.location = location;
}
/**
* Will return the control of the school.
*
* @return the control
*/
public String getControl() {
return control;
}
/**
* Will set the control of the school.
*
* @param control the control to set
*/
public void setControl(String control) {
this.control = control;
}
/**
* Will return the number of students at the school.
*
* @return the numberOfStudents
*/
public int getNumberOfStudents() {
return numberOfStudents;
}
/**
* Will set the number of students at the school.
*
* @param numberOfStudents the numberOfStudents to set
*/
public void setNumberOfStudents(int numberOfStudents) {
this.numberOfStudents = numberOfStudents;
}
/**
* Will return the percentage of females at the school.
*
* @return the percentFemales
*/
public double getPercentFemales() {
return percentFemales;
}
/**
* Will set the percentage of females at the school
*
* @param percentFemales the percentFemales to set
*/
public void setPercentFemales(double percentFemales) {
this.percentFemales = percentFemales;
}
/**
* Will return the SAT verbal score of the school.
*
* @return the SATVerbal
*/
public double getSATVerbal() {
return SATVerbal;
}
/**
* Will set the SAT verbal score of the school.
*
* @param SATVerbal the SATVerbal to set
*/
public void setSATVerbal(double SATVerbal) {
this.SATVerbal = SATVerbal;
}
/**
* Will return the SAT math score of the school.
*
* @return the SATMath
*/
public double getSATMath() {
return SATMath;
}
/**
* Will set the SAT math score of the school.
*
* @param SATMath the SATMath to set
*/
public void setSATMath(double SATMath) {
this.SATMath = SATMath;
}
/**
* Will return the expenses of the school.
*
* @return the expenses
*/
public double getExpenses() {
return expenses;
}
/**
* Will set the expenses of the school.
*
* @param expenses the expenses to set
*/
public void setExpenses(double expenses) {
this.expenses = expenses;
}
/**
* Will return the percent of financial aid of the school.
*
* @return the percentFinancialAid
*/
public double getPercentFinancialAid() {
return percentFinancialAid;
}
/**
* Will set the percent of financial aid of the school.
*
* @param percentFinancialAid the percentFinancialAid to set
*/
public void setPercentFinancialAid(double percentFinancialAid) {
this.percentFinancialAid = percentFinancialAid;
}
/**
* Will return the number of applicants to the school.
*
* @return the numberOfApplicants
*/
public int getNumberOfApplicants() {
return numberOfApplicants;
}
/**
* Will set the number of applicants to the school.
*
* @param numberOfApplicants the numberOfApplicants to set
*/
public void setNumberOfApplicants(int numberOfApplicants) {
this.numberOfApplicants = numberOfApplicants;
}
/**
* Will return the percent admitted to the school.
*
* @return the percentAdmitted
*/
public double getPercentAdmitted() {
return percentAdmitted;
}
/**
* Will set the percent admitted to the school.
*
* @param percentAdmitted the percentAdmitted to set
*/
public void setPercentAdmitted(double percentAdmitted) {
this.percentAdmitted = percentAdmitted;
}
/**
* Will return the percent enrolled at the school.
*
* @return the percentEnrolled
*/
public double getPercentEnrolled() {
return percentEnrolled;
}
/**
* Will set the percent enrolled at the school.
*
* @param percentEnrolled the percentEnrolled to set
*/
public void setPercentEnrolled(double percentEnrolled) {
this.percentEnrolled = percentEnrolled;
}
/**
* Will return the academic score at the school.
*
* @return the academicsScale
*/
public int getAcademicsScale() {
return academicsScale;
}
/**
* Will set the academic score at the school.
*
* @param academicsScale the academicsScale to set
*/
public void setAcademicsScale(int academicsScale) {
this.academicsScale = academicsScale;
}
/**
* Will return the social score at the school.
*
* @return the socialScale
*/
public int getSocialScale() {
return socialScale;
}
/**
* Will set the social score at the school.
*
* @param socialScale the socialScale to set
*/
public void setSocialScale(int socialScale) {
this.socialScale = socialScale;
}
/**
* Will return the quality of life score at the school.
*
* @return the qualityOfLifeScale
*/
public int getQualityOfLifeScale() {
return qualityOfLifeScale;
}
/**
* Will set the quality of life score at the school.
*
* @param qualityOfLifeScale the qualityOfLifeScale to set
*/
public void setQualityOfLifeScale(int qualityOfLifeScale) {
this.qualityOfLifeScale = qualityOfLifeScale;
}
/**
* Will return the emphases that are relevant to the school.
*
* @return the emphases
*/
public ArrayList<String> getEmphases() {
return emphases;
}
/**
* Will set the emphases that are relevant to the school.
*
* @param emphases the emphases to set
*/
public void setEmphases(ArrayList<String> emphases) {
this.emphases = emphases;
}
/**
* Will return the current values associated with University.
*
* @return the values to be return
*/
@Override
public String toString() {
return ("\n-University-\nSchool: " + school + ", State: "
+ state + ", Location: " + location + ", Control: "
+ control + ", NumberOfStudents: " + numberOfStudents + ", PercentFemales: "
+ percentFemales + ", SATVerbal: " + SATVerbal + ", SATMath: "
+ SATMath + ", expenses: " + expenses + ", PercentFinancialAid: "
+ percentFinancialAid + ", NumberOfApplicants: " + numberOfApplicants
+ ", PercentAdmitted: " + percentAdmitted + ", PercentEnrolled:"
+ percentEnrolled + ", AcademicsScale: " + academicsScale + ", SocialScale:"
+ socialScale + ", QualityOfLifeScale:" + qualityOfLifeScale + ", Emphases: " + emphases);
}
public static void main(String args[]) {
University uni = new University("Zurn University", "Zurn State", "URBAN", "PRIVATE",
5000, 65.5, 800, 800, 25000, 85.9, 2000, 50.0, 25.0, 5, 5, 5);
System.out.println(uni);
System.out.println("~We now set all of the University's attributes, get them and print them out");
uni.setSchool("NewSchoolName");
System.out.println("The new School Name for uni is: " + uni.getSchool());
uni.setState("NewState");
System.out.println("The new School State for uni is: " + uni.getState());
uni.setLocation("NewLocation");
System.out.println("The new Location for uni is: " + uni.getLocation());
uni.setControl("NewControl");
System.out.println("The new Control for uni is: " + uni.getControl());
uni.setNumberOfStudents(10000);
System.out.println("The new number of Students for uni is: " + uni.getNumberOfStudents());
uni.setPercentFemales(85);
System.out.println("The new percent females for uni is: " + uni.getPercentFemales());
uni.setSATVerbal(400);
System.out.println("The new SATVerbal for uni is: " + uni.getSATVerbal());
uni.setSATMath(600);
System.out.println("The new SATMath for uni is: " + uni.getSATMath());
uni.setExpenses(50000);
System.out.println("The new Expenses for uni is: " + uni.getExpenses());
uni.setPercentFinancialAid(93);
System.out.println("The new percent financial aid for uni is: " + uni.getPercentFinancialAid());
uni.setNumberOfApplicants(2500);
System.out.println("The new number of applicants for uni is: " + uni.getNumberOfApplicants());
uni.setPercentAdmitted(75);
System.out.println("The new percent admitted for uni is: " + uni.getPercentAdmitted());
uni.setPercentEnrolled(10);
System.out.println("The new percent enrolled for uni is: " + uni.getPercentEnrolled());
uni.setAcademicsScale(4);
System.out.println("The new academic scale for uni is: " + uni.getAcademicsScale());
uni.setSocialScale(4);
System.out.println("The new social scale for uni is: " + uni.getSocialScale());
uni.setQualityOfLifeScale(4);
System.out.println("The new quality of life for uni is: " + uni.getQualityOfLifeScale());
ArrayList<String> emphases1 = new ArrayList<String>();
emphases1.add("LIBERAL-ARTS");
uni.setEmphases(emphases1);
System.out.println("The new emphases for uni is" +uni.getEmphases());
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.engine;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.codecs.Codec;
import org.apache.lucene.index.MergePolicy;
import org.apache.lucene.index.SnapshotDeletionPolicy;
import org.apache.lucene.search.QueryCache;
import org.apache.lucene.search.QueryCachingPolicy;
import org.apache.lucene.search.similarities.Similarity;
import org.apache.lucene.util.SetOnce;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.ByteSizeUnit;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.index.codec.CodecService;
import org.elasticsearch.index.indexing.ShardIndexingService;
import org.elasticsearch.index.shard.IndexSearcherWrapper;
import org.elasticsearch.index.shard.MergeSchedulerConfig;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.index.shard.TranslogRecoveryPerformer;
import org.elasticsearch.index.store.Store;
import org.elasticsearch.index.translog.TranslogConfig;
import org.elasticsearch.indices.IndicesWarmer;
import org.elasticsearch.indices.memory.IndexingMemoryController;
import org.elasticsearch.threadpool.ThreadPool;
import java.util.concurrent.TimeUnit;
/*
* Holds all the configuration that is used to create an {@link Engine}.
* Once {@link Engine} has been created with this object, changes to this
* object will affect the {@link Engine} instance.
*/
public final class EngineConfig {
private final ShardId shardId;
private final TranslogRecoveryPerformer translogRecoveryPerformer;
private final Settings indexSettings;
private volatile ByteSizeValue indexingBufferSize;
private volatile ByteSizeValue versionMapSize;
private volatile String versionMapSizeSetting;
private volatile boolean compoundOnFlush = true;
private long gcDeletesInMillis = DEFAULT_GC_DELETES.millis();
private volatile boolean enableGcDeletes = true;
private final String codecName;
private final ThreadPool threadPool;
private final ShardIndexingService indexingService;
@Nullable
private final IndicesWarmer warmer;
private final Store store;
private final SnapshotDeletionPolicy deletionPolicy;
private final MergePolicy mergePolicy;
private final MergeSchedulerConfig mergeSchedulerConfig;
private final Analyzer analyzer;
private final Similarity similarity;
private final CodecService codecService;
private final Engine.FailedEngineListener failedEngineListener;
private final boolean forceNewTranslog;
private final QueryCache queryCache;
private final QueryCachingPolicy queryCachingPolicy;
private final SetOnce<IndexSearcherWrapper> searcherWrapper = new SetOnce<>();
/**
* Index setting for compound file on flush. This setting is realtime updateable.
*/
public static final String INDEX_COMPOUND_ON_FLUSH = "index.compound_on_flush";
/**
* Index setting to enable / disable deletes garbage collection.
* This setting is realtime updateable
*/
public static final String INDEX_GC_DELETES_SETTING = "index.gc_deletes";
/**
* Index setting to change the low level lucene codec used for writing new segments.
* This setting is <b>not</b> realtime updateable.
*/
public static final String INDEX_CODEC_SETTING = "index.codec";
/**
* The maximum size the version map should grow to before issuing a refresh. Can be an absolute value or a percentage of
* the current index memory buffer (defaults to 25%)
*/
public static final String INDEX_VERSION_MAP_SIZE = "index.version_map_size";
/** if set to true the engine will start even if the translog id in the commit point can not be found */
public static final String INDEX_FORCE_NEW_TRANSLOG = "index.engine.force_new_translog";
public static final TimeValue DEFAULT_REFRESH_INTERVAL = new TimeValue(1, TimeUnit.SECONDS);
public static final TimeValue DEFAULT_GC_DELETES = TimeValue.timeValueSeconds(60);
public static final String DEFAULT_VERSION_MAP_SIZE = "25%";
private static final String DEFAULT_CODEC_NAME = "default";
private TranslogConfig translogConfig;
private boolean create = false;
/**
* Creates a new {@link org.elasticsearch.index.engine.EngineConfig}
*/
public EngineConfig(ShardId shardId, ThreadPool threadPool, ShardIndexingService indexingService,
Settings indexSettings, IndicesWarmer warmer, Store store, SnapshotDeletionPolicy deletionPolicy,
MergePolicy mergePolicy, MergeSchedulerConfig mergeSchedulerConfig, Analyzer analyzer,
Similarity similarity, CodecService codecService, Engine.FailedEngineListener failedEngineListener,
TranslogRecoveryPerformer translogRecoveryPerformer, QueryCache queryCache, QueryCachingPolicy queryCachingPolicy, TranslogConfig translogConfig) {
this.shardId = shardId;
this.indexSettings = indexSettings;
this.threadPool = threadPool;
this.indexingService = indexingService;
this.warmer = warmer;
this.store = store;
this.deletionPolicy = deletionPolicy;
this.mergePolicy = mergePolicy;
this.mergeSchedulerConfig = mergeSchedulerConfig;
this.analyzer = analyzer;
this.similarity = similarity;
this.codecService = codecService;
this.failedEngineListener = failedEngineListener;
this.compoundOnFlush = indexSettings.getAsBoolean(EngineConfig.INDEX_COMPOUND_ON_FLUSH, compoundOnFlush);
codecName = indexSettings.get(EngineConfig.INDEX_CODEC_SETTING, EngineConfig.DEFAULT_CODEC_NAME);
// We start up inactive and rely on IndexingMemoryController to give us our fair share once we start indexing:
indexingBufferSize = IndexingMemoryController.INACTIVE_SHARD_INDEXING_BUFFER;
gcDeletesInMillis = indexSettings.getAsTime(INDEX_GC_DELETES_SETTING, EngineConfig.DEFAULT_GC_DELETES).millis();
versionMapSizeSetting = indexSettings.get(INDEX_VERSION_MAP_SIZE, DEFAULT_VERSION_MAP_SIZE);
updateVersionMapSize();
this.translogRecoveryPerformer = translogRecoveryPerformer;
this.forceNewTranslog = indexSettings.getAsBoolean(INDEX_FORCE_NEW_TRANSLOG, false);
this.queryCache = queryCache;
this.queryCachingPolicy = queryCachingPolicy;
this.translogConfig = translogConfig;
}
/** updates {@link #versionMapSize} based on current setting and {@link #indexingBufferSize} */
private void updateVersionMapSize() {
if (versionMapSizeSetting.endsWith("%")) {
double percent = Double.parseDouble(versionMapSizeSetting.substring(0, versionMapSizeSetting.length() - 1));
versionMapSize = new ByteSizeValue((long) ((double) indexingBufferSize.bytes() * (percent / 100)));
} else {
versionMapSize = ByteSizeValue.parseBytesSizeValue(versionMapSizeSetting, INDEX_VERSION_MAP_SIZE);
}
}
/**
* Settings the version map size that should trigger a refresh. See {@link #INDEX_VERSION_MAP_SIZE} for details.
*/
public void setVersionMapSizeSetting(String versionMapSizeSetting) {
this.versionMapSizeSetting = versionMapSizeSetting;
updateVersionMapSize();
}
/**
* current setting for the version map size that should trigger a refresh. See {@link #INDEX_VERSION_MAP_SIZE} for details.
*/
public String getVersionMapSizeSetting() {
return versionMapSizeSetting;
}
/** if true the engine will start even if the translog id in the commit point can not be found */
public boolean forceNewTranslog() {
return forceNewTranslog;
}
/**
* returns the size of the version map that should trigger a refresh
*/
public ByteSizeValue getVersionMapSize() {
return versionMapSize;
}
/**
* Sets the indexing buffer
*/
public void setIndexingBufferSize(ByteSizeValue indexingBufferSize) {
this.indexingBufferSize = indexingBufferSize;
updateVersionMapSize();
}
/**
* Enables / disables gc deletes
*
* @see #isEnableGcDeletes()
*/
public void setEnableGcDeletes(boolean enableGcDeletes) {
this.enableGcDeletes = enableGcDeletes;
}
/**
* Returns the initial index buffer size. This setting is only read on startup and otherwise controlled by {@link org.elasticsearch.indices.memory.IndexingMemoryController}
*/
public ByteSizeValue getIndexingBufferSize() {
return indexingBufferSize;
}
/**
* Returns <code>true</code> iff flushed segments should be written as compound file system. Defaults to <code>true</code>
*/
public boolean isCompoundOnFlush() {
return compoundOnFlush;
}
/**
* Returns the GC deletes cycle in milliseconds.
*/
public long getGcDeletesInMillis() {
return gcDeletesInMillis;
}
/**
* Returns <code>true</code> iff delete garbage collection in the engine should be enabled. This setting is updateable
* in realtime and forces a volatile read. Consumers can safely read this value directly go fetch it's latest value. The default is <code>true</code>
* <p>
* Engine GC deletion if enabled collects deleted documents from in-memory realtime data structures after a certain amount of
* time ({@link #getGcDeletesInMillis()} if enabled. Before deletes are GCed they will cause re-adding the document that was deleted
* to fail.
* </p>
*/
public boolean isEnableGcDeletes() {
return enableGcDeletes;
}
/**
* Returns the {@link Codec} used in the engines {@link org.apache.lucene.index.IndexWriter}
* <p>
* Note: this settings is only read on startup.
* </p>
*/
public Codec getCodec() {
return codecService.codec(codecName);
}
/**
* Returns a thread-pool mainly used to get estimated time stamps from {@link org.elasticsearch.threadpool.ThreadPool#estimatedTimeInMillis()} and to schedule
* async force merge calls on the {@link org.elasticsearch.threadpool.ThreadPool.Names#OPTIMIZE} thread-pool
*/
public ThreadPool getThreadPool() {
return threadPool;
}
/**
* Returns a {@link org.elasticsearch.index.indexing.ShardIndexingService} used inside the engine to inform about
* pre and post index. The operations are used for statistic purposes etc.
*
* @see org.elasticsearch.index.indexing.ShardIndexingService#postIndex(Engine.Index)
* @see org.elasticsearch.index.indexing.ShardIndexingService#preIndex(Engine.Index)
*
*/
public ShardIndexingService getIndexingService() {
return indexingService;
}
/**
* Returns an {@link org.elasticsearch.indices.IndicesWarmer} used to warm new searchers before they are used for searching.
* Note: This method might retrun <code>null</code>
*/
@Nullable
public IndicesWarmer getWarmer() {
return warmer;
}
/**
* Returns the {@link org.elasticsearch.index.store.Store} instance that provides access to the {@link org.apache.lucene.store.Directory}
* used for the engines {@link org.apache.lucene.index.IndexWriter} to write it's index files to.
* <p>
* Note: In order to use this instance the consumer needs to increment the stores reference before it's used the first time and hold
* it's reference until it's not needed anymore.
* </p>
*/
public Store getStore() {
return store;
}
/**
* Returns a {@link SnapshotDeletionPolicy} used in the engines
* {@link org.apache.lucene.index.IndexWriter}.
*/
public SnapshotDeletionPolicy getDeletionPolicy() {
return deletionPolicy;
}
/**
* Returns the {@link org.apache.lucene.index.MergePolicy} for the engines {@link org.apache.lucene.index.IndexWriter}
*/
public MergePolicy getMergePolicy() {
return mergePolicy;
}
/**
* Returns the {@link MergeSchedulerConfig}
*/
public MergeSchedulerConfig getMergeSchedulerConfig() {
return mergeSchedulerConfig;
}
/**
* Returns a listener that should be called on engine failure
*/
public Engine.FailedEngineListener getFailedEngineListener() {
return failedEngineListener;
}
/**
* Returns the latest index settings directly from the index settings service.
*/
public Settings getIndexSettings() {
return indexSettings;
}
/**
* Returns the engines shard ID
*/
public ShardId getShardId() { return shardId; }
/**
* Returns the analyzer as the default analyzer in the engines {@link org.apache.lucene.index.IndexWriter}
*/
public Analyzer getAnalyzer() {
return analyzer;
}
/**
* Returns the {@link org.apache.lucene.search.similarities.Similarity} used for indexing and searching.
*/
public Similarity getSimilarity() {
return similarity;
}
/**
* Sets the GC deletes cycle in milliseconds.
*/
public void setGcDeletesInMillis(long gcDeletesInMillis) {
this.gcDeletesInMillis = gcDeletesInMillis;
}
/**
* Sets if flushed segments should be written as compound file system. Defaults to <code>true</code>
*/
public void setCompoundOnFlush(boolean compoundOnFlush) {
this.compoundOnFlush = compoundOnFlush;
}
/**
* Returns the {@link org.elasticsearch.index.shard.TranslogRecoveryPerformer} for this engine. This class is used
* to apply transaction log operations to the engine. It encapsulates all the logic to transfer the translog entry into
* an indexing operation.
*/
public TranslogRecoveryPerformer getTranslogRecoveryPerformer() {
return translogRecoveryPerformer;
}
/**
* Return the cache to use for queries.
*/
public QueryCache getQueryCache() {
return queryCache;
}
/**
* Return the policy to use when caching queries.
*/
public QueryCachingPolicy getQueryCachingPolicy() {
return queryCachingPolicy;
}
/**
* Returns the translog config for this engine
*/
public TranslogConfig getTranslogConfig() {
return translogConfig;
}
/**
* Iff set to <code>true</code> the engine will create a new lucene index when opening the engine.
* Otherwise the lucene index writer is opened in append mode. The default is <code>false</code>
*/
public void setCreate(boolean create) {
this.create = create;
}
/**
* Iff <code>true</code> the engine should create a new lucene index when opening the engine.
* Otherwise the lucene index writer should be opened in append mode. The default is <code>false</code>
*/
public boolean isCreate() {
return create;
}
}
| |
/*
* Copyright 2017 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.testsuite.util.saml;
import org.keycloak.testsuite.util.SamlClientBuilder;
import org.keycloak.dom.saml.v2.SAML2Object;
import org.keycloak.saml.common.constants.GeneralConstants;
import org.keycloak.saml.processing.web.util.PostBindingUtil;
import org.keycloak.saml.processing.web.util.RedirectBindingUtil;
import org.keycloak.testsuite.util.SamlClient.Binding;
import java.io.IOException;
import java.io.InputStream;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import javax.ws.rs.core.Response.Status;
import org.apache.commons.io.IOUtils;
import org.apache.http.NameValuePair;
import org.apache.http.client.entity.UrlEncodedFormEntity;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.client.methods.HttpUriRequest;
import org.apache.http.client.protocol.HttpClientContext;
import org.apache.http.client.utils.URIBuilder;
import org.apache.http.client.utils.URLEncodedUtils;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.message.BasicNameValuePair;
import org.apache.http.util.EntityUtils;
import org.hamcrest.Matchers;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Element;
import org.jsoup.select.Elements;
import static org.hamcrest.Matchers.hasSize;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.notNullValue;
import static org.hamcrest.Matchers.nullValue;
import static org.junit.Assert.assertThat;
import static org.keycloak.testsuite.util.Matchers.statusCodeIsHC;
public class ModifySamlResponseStepBuilder extends SamlDocumentStepBuilder<SAML2Object, ModifySamlResponseStepBuilder> {
private final Binding binding;
private URI targetUri;
private String targetAttribute;
private Binding targetBinding;
public ModifySamlResponseStepBuilder(Binding binding, SamlClientBuilder clientBuilder) {
super(clientBuilder);
this.binding = binding;
this.targetBinding = binding;
}
// TODO: support for signing
@Override
public HttpUriRequest perform(CloseableHttpClient client, URI currentURI, CloseableHttpResponse currentResponse, HttpClientContext context) throws Exception {
switch (binding) {
case REDIRECT:
return handleRedirectBinding(currentResponse);
case POST:
return handlePostBinding(currentResponse);
}
throw new RuntimeException("Unknown binding for " + ModifySamlResponseStepBuilder.class.getName());
}
public Binding targetBinding() {
return targetBinding;
}
public ModifySamlResponseStepBuilder targetBinding(Binding targetBinding) {
this.targetBinding = targetBinding;
return this;
}
public String targetAttribute() {
return targetAttribute;
}
public ModifySamlResponseStepBuilder targetAttribute(String attribute) {
targetAttribute = attribute;
return this;
}
public ModifySamlResponseStepBuilder targetAttributeSamlRequest() {
return targetAttribute(GeneralConstants.SAML_REQUEST_KEY);
}
public ModifySamlResponseStepBuilder targetAttributeSamlResponse() {
return targetAttribute(GeneralConstants.SAML_RESPONSE_KEY);
}
public URI targetUri() {
return targetUri;
}
public ModifySamlResponseStepBuilder targetUri(URI forceUri) {
this.targetUri = forceUri;
return this;
}
protected HttpUriRequest handleRedirectBinding(CloseableHttpResponse currentResponse) throws Exception, IOException, URISyntaxException {
NameValuePair samlParam = null;
assertThat(currentResponse, statusCodeIsHC(Status.FOUND));
String location = currentResponse.getFirstHeader("Location").getValue();
URI locationUri = URI.create(location);
List<NameValuePair> params = URLEncodedUtils.parse(locationUri, "UTF-8");
for (Iterator<NameValuePair> it = params.iterator(); it.hasNext();) {
NameValuePair param = it.next();
if ("SAMLResponse".equals(param.getName()) || "SAMLRequest".equals(param.getName())) {
assertThat("Only one SAMLRequest/SAMLResponse check", samlParam, nullValue());
samlParam = param;
it.remove();
}
}
assertThat(samlParam, notNullValue());
String base64EncodedSamlDoc = samlParam.getValue();
InputStream decoded = RedirectBindingUtil.base64DeflateDecode(base64EncodedSamlDoc);
String samlDoc = IOUtils.toString(decoded, GeneralConstants.SAML_CHARSET);
IOUtils.closeQuietly(decoded);
String transformed = getTransformer().transform(samlDoc);
if (transformed == null) {
return null;
}
final String attrName = this.targetAttribute != null ? this.targetAttribute : samlParam.getName();
return createRequest(locationUri, attrName, transformed, params);
}
private HttpUriRequest handlePostBinding(CloseableHttpResponse currentResponse) throws Exception {
assertThat(currentResponse, statusCodeIsHC(Status.OK));
final String htmlBody = EntityUtils.toString(currentResponse.getEntity());
assertThat(htmlBody, Matchers.containsString("SAML"));
org.jsoup.nodes.Document theResponsePage = Jsoup.parse(htmlBody);
Elements samlResponses = theResponsePage.select("input[name=SAMLResponse]");
Elements samlRequests = theResponsePage.select("input[name=SAMLRequest]");
Elements forms = theResponsePage.select("form");
Elements relayStates = theResponsePage.select("input[name=RelayState]");
int size = samlResponses.size() + samlRequests.size();
assertThat("Checking uniqueness of SAMLResponse/SAMLRequest input field in the page", size, is(1));
assertThat("Checking uniqueness of forms in the page", forms, hasSize(1));
Element respElement = samlResponses.isEmpty() ? samlRequests.first() : samlResponses.first();
Element form = forms.first();
String base64EncodedSamlDoc = respElement.val();
InputStream decoded = PostBindingUtil.base64DecodeAsStream(base64EncodedSamlDoc);
String samlDoc = IOUtils.toString(decoded, GeneralConstants.SAML_CHARSET);
IOUtils.closeQuietly(decoded);
String transformed = getTransformer().transform(samlDoc);
if (transformed == null) {
return null;
}
final String attributeName = this.targetAttribute != null
? this.targetAttribute
: respElement.attr("name");
List<NameValuePair> parameters = new LinkedList<>();
if (! relayStates.isEmpty()) {
parameters.add(new BasicNameValuePair(GeneralConstants.RELAY_STATE, relayStates.first().val()));
}
URI locationUri = this.targetUri != null
? this.targetUri
: URI.create(form.attr("action"));
return createRequest(locationUri, attributeName, transformed, parameters);
}
protected HttpUriRequest createRequest(URI locationUri, String attributeName, String transformed, List<NameValuePair> parameters) throws IOException, URISyntaxException {
switch (this.targetBinding) {
case POST:
return createPostRequest(locationUri, attributeName, transformed, parameters);
case REDIRECT:
return createRedirectRequest(locationUri, attributeName, transformed, parameters);
}
throw new RuntimeException("Unknown target binding for " + ModifySamlResponseStepBuilder.class.getName());
}
protected HttpUriRequest createRedirectRequest(URI locationUri, String attributeName, String transformed, List<NameValuePair> parameters) throws IOException, URISyntaxException {
final byte[] responseBytes = transformed.getBytes(GeneralConstants.SAML_CHARSET);
parameters.add(new BasicNameValuePair(attributeName, RedirectBindingUtil.deflateBase64Encode(responseBytes)));
if (this.targetUri != null) {
locationUri = this.targetUri;
}
URI target = new URIBuilder(locationUri).setParameters(parameters).build();
return new HttpGet(target);
}
protected HttpUriRequest createPostRequest(URI locationUri, String attributeName, String transformed, List<NameValuePair> parameters) throws IOException {
HttpPost post = new HttpPost(locationUri);
parameters.add(new BasicNameValuePair(attributeName, PostBindingUtil.base64Encode(transformed)));
UrlEncodedFormEntity formEntity = new UrlEncodedFormEntity(parameters, GeneralConstants.SAML_CHARSET);
post.setEntity(formEntity);
return post;
}
}
| |
/*
* Copyright 2000-2009 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.execution.junit2;
import com.intellij.execution.Location;
import com.intellij.execution.junit2.info.TestInfo;
import com.intellij.execution.junit2.segments.InputConsumer;
import com.intellij.execution.junit2.states.Statistics;
import com.intellij.execution.junit2.states.TestState;
import com.intellij.execution.testframework.*;
import com.intellij.execution.testframework.ui.PrintableTestProxy;
import com.intellij.execution.ui.ConsoleViewContentType;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.project.Project;
import com.intellij.pom.Navigatable;
import com.intellij.rt.execution.junit.states.PoolOfTestStates;
import org.jetbrains.annotations.NotNull;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
public class TestProxy extends CompositePrintable implements PrintableTestProxy, InputConsumer, ChangingPrintable, TestProxyParent {
private static final Logger LOG = Logger.getInstance("#com.intellij.execution.junit2.TestProxy");
private final TestInfo myInfo;
private TestState myState = TestState.DEFAULT;
private Printer myPrinter = Printer.DEAF;
private final TestProxyListenersNotifier myNotifier = new TestProxyListenersNotifier();
private Statistics myStatistics = new Statistics();
private TestEventsConsumer myEventsConsumer;
private int myPreviousMagnitude = -1;
private int myStateTimestamp = 0;
private boolean myMarked = false;
// private ArrayList myChildren = new ArrayList();
private final FilterCache myChildren = new FilterCache();
private TestProxy myParent = null;
public static final Filter NOT_LEAF = Filter.LEAF.not();
public TestProxy(@NotNull final TestInfo info) {
myInfo = info;
}
public String toString() {
return getInfo().getComment() + "." + getInfo().getName();
}
public void onOutput(final String text, final ConsoleViewContentType contentType) {
if (!myMarked && contentType == ConsoleViewContentType.ERROR_OUTPUT) {
myPrinter.mark();
myMarked = true;
}
final ExternalOutput printable = new ExternalOutput(text, contentType);
addLast(printable);
}
public void addLast(final Printable printable) {
super.addLast(printable);
fireOnNewPrintable(printable);
}
private void fireOnNewPrintable(final Printable printable) {
myPrinter.onNewAvailable(printable);
}
public void printOn(final Printer printer) {
super.printOn(printer);
CompositePrintable.printAllOn(myChildren.getList(), printer);
myState.printOn(printer);
}
public TestState getState() {
return myState;
}
public void setState(final TestState state) {
if (myState != state) {
myState = state;
fireOnNewPrintable(state);
fireStateChanged();
}
}
private void fireStateChanged() {
myStateTimestamp++;
pullEvent(new StateChangedEvent(this));
if (myParent != null)
myParent.onChanged(this);
fireStatisticsChanged();
myNotifier.onChanged(this);
}
public int getStateTimestamp() {
return myStateTimestamp;
}
public TestProxy getChildAt(final int childIndex) {
return myChildren.getList().get(childIndex);
}
public int getChildCount() {
return myChildren.getList().size();
}
public List<TestProxy> getChildren() {
return myChildren.getUnmodifiableList();
}
public TestProxy getParent() {
return myParent;
}
public Navigatable getDescriptor(final Location location) {
return getState().getDescriptor(location);
}
public String getName() {
return getInfo().getName();
}
public boolean isInProgress() {
return getState().isInProgress();
}
public boolean isDefect() {
return getState().isDefect();
}
public boolean shouldRun() {
return getInfo().shouldRun();
}
public int getMagnitude() {
return getState().getMagnitude();
}
public Location getLocation(final Project project) {
return getInfo().getLocation(project);
}
public boolean isLeaf() {
return getChildCount() == 0;
}
public boolean isPassed() {
return getMagnitude() <= PoolOfTestStates.PASSED_INDEX;
}
public void addChild(final TestProxy child) {
if (myChildren.getList().contains(child))
return;
if (child.getParent() != null)
return;//todo throw new RuntimeException("Test: "+child + " already has parent: " + child.getParent());
myChildren.add(child);
child.myParent = this;
if (myPrinter != Printer.DEAF) {
child.setPrintLinstener(myPrinter);
child.fireOnNewPrintable(child);
}
pullEvent(new NewChildEvent(this, child));
fireStatisticsChanged();
getState().changeStateAfterAddingChaildTo(this, child);
myNotifier.onChildAdded(this, child);
}
public void setPrintLinstener(final Printer printer) {
myPrinter = printer;
for (Iterator iterator = myChildren.iterator(); iterator.hasNext();) {
final TestProxy testProxy = (TestProxy) iterator.next();
testProxy.setPrintLinstener(printer);
}
}
public TestInfo getInfo() {
return myInfo;
}
public void onChildAdded(final AbstractTestProxy parent, final AbstractTestProxy newChild) {
fireStatisticsChanged();
}
public void onChanged(final AbstractTestProxy test) {
myChildren.resetCache();
final int magnitude = test.getMagnitude();
getState().update();
if (myPreviousMagnitude < magnitude || getState().getMagnitude() <= magnitude) {
fireStateChanged();
myPreviousMagnitude = getState().getMagnitude();
}
}
public void onStatisticsChanged(final AbstractTestProxy testProxy) {
myChildren.resetCache();
fireStatisticsChanged();
}
private void fireStatisticsChanged() {
myChildren.resetCache();
if (myParent != null)
myParent.onStatisticsChanged(this);
pullEvent(new StatisticsChanged(this));
myNotifier.onStatisticsChanged(this);
}
public void addListener(final TestProxyListener listener) {
myNotifier.addListener(listener);
}
public void setStatistics(final Statistics statistics) {
myChildren.resetCache();
if (!myState.isFinal()) {
LOG.error("" + myState.getMagnitude());
}
myStatistics = statistics;
fireStatisticsChanged();
}
public Statistics getStatisticsImpl() {
return myStatistics;
}
public boolean hasChildSuites() {
return myChildren.detect(NOT_LEAF) != null;
}
public Statistics getStatistics() {
return myState.getStatisticsFor(this);
}
public TestProxy[] selectChildren(final Filter filter) {
return myChildren.select(filter);
}
public void setEventsConsumer(final TestEventsConsumer eventsConsumer) {
myEventsConsumer = eventsConsumer;
}
private void pullEvent(final TestEvent event) {
if (myEventsConsumer != null) {
myEventsConsumer.onEvent(event);
return;
}
if (myParent != null)
myParent.pullEvent(event);
}
public List<TestProxy> getAllTests() {
return myState.getAllTestsOf(this);
}
public void collectAllTestsTo(final ArrayList<TestProxy> allTests) {
allTests.add(this);
for (Iterator iterator = myChildren.iterator(); iterator.hasNext();) {
final TestProxy testProxy = (TestProxy) iterator.next();
testProxy.collectAllTestsTo(allTests);
}
}
public TestProxy getCommonAncestor(final TestProxy test) {
if (test == null) return this;
if (test.isAncestorOf(this)) return test;
for (TestProxy parent = this; parent != null; parent = parent.getParent())
if (parent.isAncestorOf(test)) return parent;
return null;
}
public boolean isAncestorOf(final TestProxy test) {
if (test == null) return false;
for (TestProxy parent = test; parent != null; parent = parent.getParent())
if (parent == this) return true;
return false;
}
public AbstractTestProxy[] getPathFromRoot() {
final ArrayList<TestProxy> parents = new ArrayList<TestProxy>();
TestProxy test = this;
do {
parents.add(test);
} while ((test = test.getParent()) != null);
Collections.reverse(parents);
return parents.toArray(new TestProxy[parents.size()]);
}
public boolean isRoot() {
return getParent() == null;
}
public TestState.StateInterval calculateInterval(final SuiteState state) {
final SuiteState.SuiteStateInterval result = new SuiteState.SuiteStateInterval(state, getChildAt(0).getState().getInterval());
for (TestProxy proxy : getChildren()) {
result.updateFrom(proxy.getState().getInterval());
}
return result;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.mapreduce.v2.hs.webapp;
import static org.apache.hadoop.yarn.webapp.WebServicesTestUtils.assertResponseStatusCode;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import java.io.StringReader;
import java.util.List;
import java.util.Map;
import javax.ws.rs.core.MediaType;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import com.google.inject.util.Providers;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.http.JettyUtils;
import org.apache.hadoop.mapreduce.v2.api.records.JobId;
import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId;
import org.apache.hadoop.mapreduce.v2.api.records.TaskType;
import org.apache.hadoop.mapreduce.v2.app.AppContext;
import org.apache.hadoop.mapreduce.v2.app.job.Job;
import org.apache.hadoop.mapreduce.v2.app.job.Task;
import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt;
import org.apache.hadoop.mapreduce.v2.hs.HistoryContext;
import org.apache.hadoop.mapreduce.v2.hs.MockHistoryContext;
import org.apache.hadoop.mapreduce.v2.util.MRApps;
import org.apache.hadoop.yarn.api.ApplicationClientProtocol;
import org.apache.hadoop.yarn.webapp.GenericExceptionHandler;
import org.apache.hadoop.yarn.webapp.GuiceServletConfig;
import org.apache.hadoop.yarn.webapp.JerseyTestBase;
import org.apache.hadoop.yarn.webapp.WebApp;
import org.apache.hadoop.yarn.webapp.WebServicesTestUtils;
import org.codehaus.jettison.json.JSONArray;
import org.codehaus.jettison.json.JSONException;
import org.codehaus.jettison.json.JSONObject;
import org.junit.Before;
import org.junit.Test;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.NodeList;
import org.xml.sax.InputSource;
import com.google.inject.Guice;
import com.google.inject.servlet.ServletModule;
import com.sun.jersey.api.client.ClientResponse;
import com.sun.jersey.api.client.ClientResponse.Status;
import com.sun.jersey.api.client.UniformInterfaceException;
import com.sun.jersey.api.client.WebResource;
import com.sun.jersey.guice.spi.container.servlet.GuiceContainer;
import com.sun.jersey.test.framework.WebAppDescriptor;
/**
* Test the history server Rest API for getting task attempts, a
* specific task attempt, and task attempt counters
*
* /ws/v1/history/mapreduce/jobs/{jobid}/tasks/{taskid}/attempts
* /ws/v1/history/mapreduce/jobs/{jobid}/tasks/{taskid}/attempts/{attemptid}
* /ws/v1/history/mapreduce/jobs/{jobid}/tasks/{taskid}/attempts/{attemptid}/
* counters
*/
public class TestHsWebServicesAttempts extends JerseyTestBase {
private static Configuration conf = new Configuration();
private static HistoryContext appContext;
private static HsWebApp webApp;
private static class WebServletModule extends ServletModule {
@Override
protected void configureServlets() {
appContext = new MockHistoryContext(0, 1, 2, 1);
webApp = mock(HsWebApp.class);
when(webApp.name()).thenReturn("hsmockwebapp");
bind(JAXBContextResolver.class);
bind(HsWebServices.class);
bind(GenericExceptionHandler.class);
bind(WebApp.class).toInstance(webApp);
bind(AppContext.class).toInstance(appContext);
bind(HistoryContext.class).toInstance(appContext);
bind(Configuration.class).toInstance(conf);
bind(ApplicationClientProtocol.class).toProvider(Providers.of(null));
serve("/*").with(GuiceContainer.class);
}
}
static {
GuiceServletConfig.setInjector(
Guice.createInjector(new WebServletModule()));
}
@Before
@Override
public void setUp() throws Exception {
super.setUp();
GuiceServletConfig.setInjector(
Guice.createInjector(new WebServletModule()));
}
public TestHsWebServicesAttempts() {
super(
new WebAppDescriptor.Builder("org.apache.hadoop.mapreduce.v2.hs.webapp")
.contextListenerClass(GuiceServletConfig.class)
.filterClass(com.google.inject.servlet.GuiceFilter.class)
.contextPath("jersey-guice-filter").servletPath("/").build());
}
@Test
public void testTaskAttempts() throws JSONException, Exception {
WebResource r = resource();
Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (JobId id : jobsMap.keySet()) {
String jobId = MRApps.toString(id);
for (Task task : jobsMap.get(id).getTasks().values()) {
String tid = MRApps.toString(task.getID());
ClientResponse response = r.path("ws").path("v1").path("history")
.path("mapreduce").path("jobs").path(jobId).path("tasks").path(tid)
.path("attempts").accept(MediaType.APPLICATION_JSON)
.get(ClientResponse.class);
assertEquals(MediaType.APPLICATION_JSON_TYPE + "; " + JettyUtils.UTF_8,
response.getType().toString());
JSONObject json = response.getEntity(JSONObject.class);
verifyHsTaskAttempts(json, task);
}
}
}
@Test
public void testTaskAttemptsSlash() throws JSONException, Exception {
WebResource r = resource();
Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (JobId id : jobsMap.keySet()) {
String jobId = MRApps.toString(id);
for (Task task : jobsMap.get(id).getTasks().values()) {
String tid = MRApps.toString(task.getID());
ClientResponse response = r.path("ws").path("v1").path("history")
.path("mapreduce").path("jobs").path(jobId).path("tasks").path(tid)
.path("attempts/").accept(MediaType.APPLICATION_JSON)
.get(ClientResponse.class);
assertEquals(MediaType.APPLICATION_JSON_TYPE + "; " + JettyUtils.UTF_8,
response.getType().toString());
JSONObject json = response.getEntity(JSONObject.class);
verifyHsTaskAttempts(json, task);
}
}
}
@Test
public void testTaskAttemptsDefault() throws JSONException, Exception {
WebResource r = resource();
Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (JobId id : jobsMap.keySet()) {
String jobId = MRApps.toString(id);
for (Task task : jobsMap.get(id).getTasks().values()) {
String tid = MRApps.toString(task.getID());
ClientResponse response = r.path("ws").path("v1").path("history")
.path("mapreduce").path("jobs").path(jobId).path("tasks").path(tid)
.path("attempts").get(ClientResponse.class);
assertEquals(MediaType.APPLICATION_JSON_TYPE + "; " + JettyUtils.UTF_8,
response.getType().toString());
JSONObject json = response.getEntity(JSONObject.class);
verifyHsTaskAttempts(json, task);
}
}
}
@Test
public void testTaskAttemptsXML() throws JSONException, Exception {
WebResource r = resource();
Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (JobId id : jobsMap.keySet()) {
String jobId = MRApps.toString(id);
for (Task task : jobsMap.get(id).getTasks().values()) {
String tid = MRApps.toString(task.getID());
ClientResponse response = r.path("ws").path("v1").path("history")
.path("mapreduce").path("jobs").path(jobId).path("tasks").path(tid)
.path("attempts").accept(MediaType.APPLICATION_XML)
.get(ClientResponse.class);
assertEquals(MediaType.APPLICATION_XML_TYPE + "; " + JettyUtils.UTF_8,
response.getType().toString());
String xml = response.getEntity(String.class);
DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
DocumentBuilder db = dbf.newDocumentBuilder();
InputSource is = new InputSource();
is.setCharacterStream(new StringReader(xml));
Document dom = db.parse(is);
NodeList attempts = dom.getElementsByTagName("taskAttempts");
assertEquals("incorrect number of elements", 1, attempts.getLength());
NodeList nodes = dom.getElementsByTagName("taskAttempt");
verifyHsTaskAttemptsXML(nodes, task);
}
}
}
@Test
public void testTaskAttemptId() throws JSONException, Exception {
WebResource r = resource();
Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (JobId id : jobsMap.keySet()) {
String jobId = MRApps.toString(id);
for (Task task : jobsMap.get(id).getTasks().values()) {
String tid = MRApps.toString(task.getID());
for (TaskAttempt att : task.getAttempts().values()) {
TaskAttemptId attemptid = att.getID();
String attid = MRApps.toString(attemptid);
ClientResponse response = r.path("ws").path("v1").path("history")
.path("mapreduce").path("jobs").path(jobId).path("tasks")
.path(tid).path("attempts").path(attid)
.accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
assertEquals(MediaType.APPLICATION_JSON_TYPE + "; "
+ JettyUtils.UTF_8, response.getType().toString());
JSONObject json = response.getEntity(JSONObject.class);
assertEquals("incorrect number of elements", 1, json.length());
JSONObject info = json.getJSONObject("taskAttempt");
verifyHsTaskAttempt(info, att, task.getType());
}
}
}
}
@Test
public void testTaskAttemptIdSlash() throws JSONException, Exception {
WebResource r = resource();
Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (JobId id : jobsMap.keySet()) {
String jobId = MRApps.toString(id);
for (Task task : jobsMap.get(id).getTasks().values()) {
String tid = MRApps.toString(task.getID());
for (TaskAttempt att : task.getAttempts().values()) {
TaskAttemptId attemptid = att.getID();
String attid = MRApps.toString(attemptid);
ClientResponse response = r.path("ws").path("v1").path("history")
.path("mapreduce").path("jobs").path(jobId).path("tasks")
.path(tid).path("attempts").path(attid + "/")
.accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
assertEquals(MediaType.APPLICATION_JSON_TYPE + "; "
+ JettyUtils.UTF_8, response.getType().toString());
JSONObject json = response.getEntity(JSONObject.class);
assertEquals("incorrect number of elements", 1, json.length());
JSONObject info = json.getJSONObject("taskAttempt");
verifyHsTaskAttempt(info, att, task.getType());
}
}
}
}
@Test
public void testTaskAttemptIdDefault() throws JSONException, Exception {
WebResource r = resource();
Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (JobId id : jobsMap.keySet()) {
String jobId = MRApps.toString(id);
for (Task task : jobsMap.get(id).getTasks().values()) {
String tid = MRApps.toString(task.getID());
for (TaskAttempt att : task.getAttempts().values()) {
TaskAttemptId attemptid = att.getID();
String attid = MRApps.toString(attemptid);
ClientResponse response = r.path("ws").path("v1").path("history")
.path("mapreduce").path("jobs").path(jobId).path("tasks")
.path(tid).path("attempts").path(attid).get(ClientResponse.class);
assertEquals(MediaType.APPLICATION_JSON_TYPE + "; "
+ JettyUtils.UTF_8, response.getType().toString());
JSONObject json = response.getEntity(JSONObject.class);
assertEquals("incorrect number of elements", 1, json.length());
JSONObject info = json.getJSONObject("taskAttempt");
verifyHsTaskAttempt(info, att, task.getType());
}
}
}
}
@Test
public void testTaskAttemptIdXML() throws JSONException, Exception {
WebResource r = resource();
Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (JobId id : jobsMap.keySet()) {
String jobId = MRApps.toString(id);
for (Task task : jobsMap.get(id).getTasks().values()) {
String tid = MRApps.toString(task.getID());
for (TaskAttempt att : task.getAttempts().values()) {
TaskAttemptId attemptid = att.getID();
String attid = MRApps.toString(attemptid);
ClientResponse response = r.path("ws").path("v1").path("history")
.path("mapreduce").path("jobs").path(jobId).path("tasks")
.path(tid).path("attempts").path(attid)
.accept(MediaType.APPLICATION_XML).get(ClientResponse.class);
assertEquals(MediaType.APPLICATION_XML_TYPE + "; " + JettyUtils.UTF_8,
response.getType().toString());
String xml = response.getEntity(String.class);
DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
DocumentBuilder db = dbf.newDocumentBuilder();
InputSource is = new InputSource();
is.setCharacterStream(new StringReader(xml));
Document dom = db.parse(is);
NodeList nodes = dom.getElementsByTagName("taskAttempt");
for (int i = 0; i < nodes.getLength(); i++) {
Element element = (Element) nodes.item(i);
verifyHsTaskAttemptXML(element, att, task.getType());
}
}
}
}
}
@Test
public void testTaskAttemptIdBogus() throws JSONException, Exception {
testTaskAttemptIdErrorGeneric("bogusid",
"java.lang.Exception: TaskAttemptId string : "
+ "bogusid is not properly formed");
}
@Test
public void testTaskAttemptIdNonExist() throws JSONException, Exception {
testTaskAttemptIdErrorGeneric(
"attempt_0_1234_m_000000_0",
"java.lang.Exception: Error getting info on task attempt id attempt_0_1234_m_000000_0");
}
@Test
public void testTaskAttemptIdInvalid() throws JSONException, Exception {
testTaskAttemptIdErrorGeneric("attempt_0_1234_d_000000_0",
"java.lang.Exception: Bad TaskType identifier. TaskAttemptId string : "
+ "attempt_0_1234_d_000000_0 is not properly formed.");
}
@Test
public void testTaskAttemptIdInvalid2() throws JSONException, Exception {
testTaskAttemptIdErrorGeneric("attempt_1234_m_000000_0",
"java.lang.Exception: TaskAttemptId string : "
+ "attempt_1234_m_000000_0 is not properly formed");
}
@Test
public void testTaskAttemptIdInvalid3() throws JSONException, Exception {
testTaskAttemptIdErrorGeneric("attempt_0_1234_m_000000",
"java.lang.Exception: TaskAttemptId string : "
+ "attempt_0_1234_m_000000 is not properly formed");
}
private void testTaskAttemptIdErrorGeneric(String attid, String error)
throws JSONException, Exception {
WebResource r = resource();
Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (JobId id : jobsMap.keySet()) {
String jobId = MRApps.toString(id);
for (Task task : jobsMap.get(id).getTasks().values()) {
String tid = MRApps.toString(task.getID());
try {
r.path("ws").path("v1").path("history").path("mapreduce")
.path("jobs").path(jobId).path("tasks").path(tid)
.path("attempts").path(attid).accept(MediaType.APPLICATION_JSON)
.get(JSONObject.class);
fail("should have thrown exception on invalid uri");
} catch (UniformInterfaceException ue) {
ClientResponse response = ue.getResponse();
assertResponseStatusCode(Status.NOT_FOUND, response.getStatusInfo());
assertEquals(MediaType.APPLICATION_JSON_TYPE + "; "
+ JettyUtils.UTF_8, response.getType().toString());
JSONObject msg = response.getEntity(JSONObject.class);
JSONObject exception = msg.getJSONObject("RemoteException");
assertEquals("incorrect number of elements", 3, exception.length());
String message = exception.getString("message");
String type = exception.getString("exception");
String classname = exception.getString("javaClassName");
WebServicesTestUtils.checkStringMatch("exception message", error,
message);
WebServicesTestUtils.checkStringMatch("exception type",
"NotFoundException", type);
WebServicesTestUtils.checkStringMatch("exception classname",
"org.apache.hadoop.yarn.webapp.NotFoundException", classname);
}
}
}
}
public void verifyHsTaskAttemptXML(Element element, TaskAttempt att,
TaskType ttype) {
verifyTaskAttemptGeneric(att, ttype,
WebServicesTestUtils.getXmlString(element, "id"),
WebServicesTestUtils.getXmlString(element, "state"),
WebServicesTestUtils.getXmlString(element, "type"),
WebServicesTestUtils.getXmlString(element, "rack"),
WebServicesTestUtils.getXmlString(element, "nodeHttpAddress"),
WebServicesTestUtils.getXmlString(element, "diagnostics"),
WebServicesTestUtils.getXmlString(element, "assignedContainerId"),
WebServicesTestUtils.getXmlLong(element, "startTime"),
WebServicesTestUtils.getXmlLong(element, "finishTime"),
WebServicesTestUtils.getXmlLong(element, "elapsedTime"),
WebServicesTestUtils.getXmlFloat(element, "progress"));
if (ttype == TaskType.REDUCE) {
verifyReduceTaskAttemptGeneric(att,
WebServicesTestUtils.getXmlLong(element, "shuffleFinishTime"),
WebServicesTestUtils.getXmlLong(element, "mergeFinishTime"),
WebServicesTestUtils.getXmlLong(element, "elapsedShuffleTime"),
WebServicesTestUtils.getXmlLong(element, "elapsedMergeTime"),
WebServicesTestUtils.getXmlLong(element, "elapsedReduceTime"));
}
}
public void verifyHsTaskAttempt(JSONObject info, TaskAttempt att,
TaskType ttype) throws JSONException {
if (ttype == TaskType.REDUCE) {
assertEquals("incorrect number of elements", 17, info.length());
} else {
assertEquals("incorrect number of elements", 12, info.length());
}
verifyTaskAttemptGeneric(att, ttype, info.getString("id"),
info.getString("state"), info.getString("type"),
info.getString("rack"), info.getString("nodeHttpAddress"),
info.getString("diagnostics"), info.getString("assignedContainerId"),
info.getLong("startTime"), info.getLong("finishTime"),
info.getLong("elapsedTime"), (float) info.getDouble("progress"));
if (ttype == TaskType.REDUCE) {
verifyReduceTaskAttemptGeneric(att, info.getLong("shuffleFinishTime"),
info.getLong("mergeFinishTime"), info.getLong("elapsedShuffleTime"),
info.getLong("elapsedMergeTime"), info.getLong("elapsedReduceTime"));
}
}
public void verifyHsTaskAttempts(JSONObject json, Task task)
throws JSONException {
assertEquals("incorrect number of elements", 1, json.length());
JSONObject attempts = json.getJSONObject("taskAttempts");
assertEquals("incorrect number of elements", 1, json.length());
JSONArray arr = attempts.getJSONArray("taskAttempt");
for (TaskAttempt att : task.getAttempts().values()) {
TaskAttemptId id = att.getID();
String attid = MRApps.toString(id);
Boolean found = false;
for (int i = 0; i < arr.length(); i++) {
JSONObject info = arr.getJSONObject(i);
if (attid.matches(info.getString("id"))) {
found = true;
verifyHsTaskAttempt(info, att, task.getType());
}
}
assertTrue("task attempt with id: " + attid
+ " not in web service output", found);
}
}
public void verifyHsTaskAttemptsXML(NodeList nodes, Task task) {
assertEquals("incorrect number of elements", 1, nodes.getLength());
for (TaskAttempt att : task.getAttempts().values()) {
TaskAttemptId id = att.getID();
String attid = MRApps.toString(id);
Boolean found = false;
for (int i = 0; i < nodes.getLength(); i++) {
Element element = (Element) nodes.item(i);
if (attid.matches(WebServicesTestUtils.getXmlString(element, "id"))) {
found = true;
verifyHsTaskAttemptXML(element, att, task.getType());
}
}
assertTrue("task with id: " + attid + " not in web service output", found);
}
}
public void verifyTaskAttemptGeneric(TaskAttempt ta, TaskType ttype,
String id, String state, String type, String rack,
String nodeHttpAddress, String diagnostics, String assignedContainerId,
long startTime, long finishTime, long elapsedTime, float progress) {
TaskAttemptId attid = ta.getID();
String attemptId = MRApps.toString(attid);
WebServicesTestUtils.checkStringMatch("id", attemptId, id);
WebServicesTestUtils.checkStringMatch("type", ttype.toString(), type);
WebServicesTestUtils.checkStringMatch("state", ta.getState().toString(),
state);
WebServicesTestUtils.checkStringMatch("rack", ta.getNodeRackName(), rack);
WebServicesTestUtils.checkStringMatch("nodeHttpAddress",
ta.getNodeHttpAddress(), nodeHttpAddress);
String expectDiag = "";
List<String> diagnosticsList = ta.getDiagnostics();
if (diagnosticsList != null && !diagnostics.isEmpty()) {
StringBuffer b = new StringBuffer();
for (String diag : diagnosticsList) {
b.append(diag);
}
expectDiag = b.toString();
}
WebServicesTestUtils.checkStringMatch("diagnostics", expectDiag,
diagnostics);
WebServicesTestUtils.checkStringMatch("assignedContainerId",
ta.getAssignedContainerID().toString(),
assignedContainerId);
assertEquals("startTime wrong", ta.getLaunchTime(), startTime);
assertEquals("finishTime wrong", ta.getFinishTime(), finishTime);
assertEquals("elapsedTime wrong", finishTime - startTime, elapsedTime);
assertEquals("progress wrong", ta.getProgress() * 100, progress, 1e-3f);
}
public void verifyReduceTaskAttemptGeneric(TaskAttempt ta,
long shuffleFinishTime, long mergeFinishTime, long elapsedShuffleTime,
long elapsedMergeTime, long elapsedReduceTime) {
assertEquals("shuffleFinishTime wrong", ta.getShuffleFinishTime(),
shuffleFinishTime);
assertEquals("mergeFinishTime wrong", ta.getSortFinishTime(),
mergeFinishTime);
assertEquals("elapsedShuffleTime wrong",
ta.getShuffleFinishTime() - ta.getLaunchTime(), elapsedShuffleTime);
assertEquals("elapsedMergeTime wrong",
ta.getSortFinishTime() - ta.getShuffleFinishTime(), elapsedMergeTime);
assertEquals("elapsedReduceTime wrong",
ta.getFinishTime() - ta.getSortFinishTime(), elapsedReduceTime);
}
@Test
public void testTaskAttemptIdCounters() throws JSONException, Exception {
WebResource r = resource();
Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (JobId id : jobsMap.keySet()) {
String jobId = MRApps.toString(id);
for (Task task : jobsMap.get(id).getTasks().values()) {
String tid = MRApps.toString(task.getID());
for (TaskAttempt att : task.getAttempts().values()) {
TaskAttemptId attemptid = att.getID();
String attid = MRApps.toString(attemptid);
ClientResponse response = r.path("ws").path("v1").path("history")
.path("mapreduce").path("jobs").path(jobId).path("tasks")
.path(tid).path("attempts").path(attid).path("counters")
.accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
assertEquals(MediaType.APPLICATION_JSON_TYPE + "; "
+ JettyUtils.UTF_8, response.getType().toString());
JSONObject json = response.getEntity(JSONObject.class);
assertEquals("incorrect number of elements", 1, json.length());
JSONObject info = json.getJSONObject("jobTaskAttemptCounters");
verifyHsJobTaskAttemptCounters(info, att);
}
}
}
}
@Test
public void testTaskAttemptIdXMLCounters() throws JSONException, Exception {
WebResource r = resource();
Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (JobId id : jobsMap.keySet()) {
String jobId = MRApps.toString(id);
for (Task task : jobsMap.get(id).getTasks().values()) {
String tid = MRApps.toString(task.getID());
for (TaskAttempt att : task.getAttempts().values()) {
TaskAttemptId attemptid = att.getID();
String attid = MRApps.toString(attemptid);
ClientResponse response = r.path("ws").path("v1").path("history")
.path("mapreduce").path("jobs").path(jobId).path("tasks")
.path(tid).path("attempts").path(attid).path("counters")
.accept(MediaType.APPLICATION_XML).get(ClientResponse.class);
assertEquals(MediaType.APPLICATION_XML_TYPE + "; " + JettyUtils.UTF_8,
response.getType().toString());
String xml = response.getEntity(String.class);
DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
DocumentBuilder db = dbf.newDocumentBuilder();
InputSource is = new InputSource();
is.setCharacterStream(new StringReader(xml));
Document dom = db.parse(is);
NodeList nodes = dom.getElementsByTagName("jobTaskAttemptCounters");
verifyHsTaskCountersXML(nodes, att);
}
}
}
}
public void verifyHsJobTaskAttemptCounters(JSONObject info, TaskAttempt att)
throws JSONException {
assertEquals("incorrect number of elements", 2, info.length());
WebServicesTestUtils.checkStringMatch("id", MRApps.toString(att.getID()),
info.getString("id"));
// just do simple verification of fields - not data is correct
// in the fields
JSONArray counterGroups = info.getJSONArray("taskAttemptCounterGroup");
for (int i = 0; i < counterGroups.length(); i++) {
JSONObject counterGroup = counterGroups.getJSONObject(i);
String name = counterGroup.getString("counterGroupName");
assertTrue("name not set", (name != null && !name.isEmpty()));
JSONArray counters = counterGroup.getJSONArray("counter");
for (int j = 0; j < counters.length(); j++) {
JSONObject counter = counters.getJSONObject(j);
String counterName = counter.getString("name");
assertTrue("name not set",
(counterName != null && !counterName.isEmpty()));
long value = counter.getLong("value");
assertTrue("value >= 0", value >= 0);
}
}
}
public void verifyHsTaskCountersXML(NodeList nodes, TaskAttempt att) {
for (int i = 0; i < nodes.getLength(); i++) {
Element element = (Element) nodes.item(i);
WebServicesTestUtils.checkStringMatch("id", MRApps.toString(att.getID()),
WebServicesTestUtils.getXmlString(element, "id"));
// just do simple verification of fields - not data is correct
// in the fields
NodeList groups = element.getElementsByTagName("taskAttemptCounterGroup");
for (int j = 0; j < groups.getLength(); j++) {
Element counters = (Element) groups.item(j);
assertNotNull("should have counters in the web service info", counters);
String name = WebServicesTestUtils.getXmlString(counters,
"counterGroupName");
assertTrue("name not set", (name != null && !name.isEmpty()));
NodeList counterArr = counters.getElementsByTagName("counter");
for (int z = 0; z < counterArr.getLength(); z++) {
Element counter = (Element) counterArr.item(z);
String counterName = WebServicesTestUtils.getXmlString(counter,
"name");
assertTrue("counter name not set",
(counterName != null && !counterName.isEmpty()));
long value = WebServicesTestUtils.getXmlLong(counter, "value");
assertTrue("value not >= 0", value >= 0);
}
}
}
}
}
| |
package rest;
import java.util.List;
import javax.servlet.ServletContext;
import javax.ws.rs.Consumes;
import javax.ws.rs.DELETE;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.PUT;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import tm.RotondAndesTM;
import vos.Postre;
/**
*
* @author angeloMarcetty
*
*/
@Path("restaurantesus/{idRestaurantesus: \\d+}/postres")
public class PostreService {
@Context
private ServletContext context;
private String getPath() {
return context.getRealPath("WEB-INF/ConnectionData");
}
private String doErrorMessage(Exception e){
return "{ \"ERROR\": \""+ e.getMessage() + "\"}" ;
}
@GET
@Produces({ MediaType.APPLICATION_JSON })
public Response getPostres(@PathParam("idRestaurantesus") Long idRestaurantesus) throws Exception {
RotondAndesTM tm = new RotondAndesTM(getPath());
if (tm.buscarRestauranteUsPorId(idRestaurantesus) != null)
{
List<Postre> postres;
try {
postres = tm.darPostres();
} catch (Exception e) {
return Response.status(500).entity(doErrorMessage(e)).build();
}
return Response.status(200).entity(postres).build();
}
else
{
throw new Exception("No tiene permisos para acceder a estos recursos");
}
}
@GET
@Path( "{id: \\d+}" )
@Produces( { MediaType.APPLICATION_JSON } )
public Response getPostre( @PathParam( "id" ) Long id,@PathParam("idRestaurantesus") Long idRestaurantesus ) throws Exception
{
RotondAndesTM tm = new RotondAndesTM( getPath( ) );
if (tm.buscarRestauranteUsPorId(idRestaurantesus) != null)
{
try
{
Postre e = tm.buscarPostreId(id);
return Response.status( 200 ).entity( e ).build( );
}
catch( Exception e )
{
return Response.status( 500 ).entity( doErrorMessage( e ) ).build( );
}
}
else
{
throw new Exception("No tiene permisos para acceder a estos recursos");
}
}
@GET
@Path( "{nombre}" )
@Produces( { MediaType.APPLICATION_JSON } )
public Response getPostreName( @QueryParam("nombre") String name,@PathParam("idRestaurantesus") Long idRestaurantesus) throws Exception {
RotondAndesTM tm = new RotondAndesTM(getPath());
if (tm.buscarRestauranteUsPorId(idRestaurantesus) != null)
{
List<Postre> postres;
try {
if (name == null || name.length() == 0)
throw new Exception("Nombre del Postre no valido");
postres = tm.buscarPostrePorName(name);
} catch (Exception e) {
return Response.status(500).entity(doErrorMessage(e)).build();
}
return Response.status(200).entity(postres).build();
}
else
{
throw new Exception("No tiene permisos para acceder a estos recursos");
}
}
@POST
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON)
public Response addPostre(Postre postre,@PathParam("idRestaurantesus") Long idRestaurantesus) throws Exception {
RotondAndesTM tm = new RotondAndesTM(getPath());
if (tm.buscarRestauranteUsPorId(idRestaurantesus) != null)
{
try {
tm.addPostre(postre);
} catch (Exception e) {
return Response.status(500).entity(doErrorMessage(e)).build();
}
return Response.status(200).entity(postre).build();
}
else
{
throw new Exception("No tiene permisos para acceder a estos recursos");
}
}
@POST
@Path("/varios")
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON)
public Response addPostres(List<Postre> postres,@PathParam("idRestaurantesus") Long idRestaurantesus) throws Exception {
RotondAndesTM tm = new RotondAndesTM(getPath());
if (tm.buscarRestauranteUsPorId(idRestaurantesus) != null)
{
try {
tm.addPostres(postres);
} catch (Exception e) {
return Response.status(500).entity(doErrorMessage(e)).build();
}
return Response.status(200).entity(postres).build();
}
else
{
throw new Exception("No tiene permisos para acceder a estos recursos");
}
}
@PUT
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON)
public Response updatePostre(Postre postre,@PathParam("idRestaurantesus") Long idRestaurantesus) throws Exception {
RotondAndesTM tm = new RotondAndesTM(getPath());
if (tm.buscarRestauranteUsPorId(idRestaurantesus) != null)
{
try {
tm.updatePostre(postre);
} catch (Exception e) {
return Response.status(500).entity(doErrorMessage(e)).build();
}
return Response.status(200).entity(postre).build();
}
else
{
throw new Exception("No tiene permisos para acceder a estos recursos");
}
}
@DELETE
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON)
public Response deletePostre(Postre postre,@PathParam("idRestaurantesus") Long idRestaurantesus) throws Exception {
RotondAndesTM tm = new RotondAndesTM(getPath());
if (tm.buscarRestauranteUsPorId(idRestaurantesus) != null)
{
try {
tm.deletePostre(postre);
} catch (Exception e) {
return Response.status(500).entity(doErrorMessage(e)).build();
}
return Response.status(200).entity(postre).build();
}
else
{
throw new Exception("No tiene permisos para acceder a estos recursos");
}
}
}
| |
package org.docksidestage.hangar.dbflute.dtomapper.bs;
import java.io.Serializable;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.util.HashMap;
import java.util.Set;
import org.dbflute.Entity;
import org.dbflute.optional.OptionalEntity;
import org.dbflute.dbmeta.DBMeta;
import org.dbflute.dbmeta.InstanceKeyEntity;
import org.dbflute.dbmeta.dtomap.DtoMapper;
import org.dbflute.dbmeta.dtomap.InstanceKeyDto;
import org.dbflute.helper.beans.DfBeanDesc;
import org.dbflute.helper.beans.DfPropertyDesc;
import org.dbflute.helper.beans.factory.DfBeanDescFactory;
import org.dbflute.jdbc.Classification;
import org.docksidestage.hangar.dbflute.exentity.*;
import org.docksidestage.hangar.simpleflute.dto.*;
import org.docksidestage.hangar.dbflute.dtomapper.*;
/**
* The DTO mapper of WHITE_ON_PARADE_REF as TABLE. <br>
* <pre>
* [primary-key]
* REF_ID
*
* [column]
* REF_ID, REF_NAME, NULLABLE_FK_ON_PARADE_ID, NULLABLE_FK_TO_MANY_ID
*
* [sequence]
*
*
* [identity]
*
*
* [version-no]
*
*
* [foreign-table]
* WHITE_ON_PARADE, WHITE_ON_PARADE_NULLABLE_TO_MANY
*
* [referrer-table]
*
*
* [foreign-property]
* whiteOnParade, whiteOnParadeNullableToMany
*
* [referrer-property]
*
* </pre>
* @author DBFlute(AutoGenerator)
*/
public abstract class BsWhiteOnParadeRefDtoMapper implements DtoMapper<WhiteOnParadeRef, WhiteOnParadeRefDto>, Serializable {
// ===================================================================================
// Definition
// ==========
/** The serial version UID for object serialization. (Default) */
private static final long serialVersionUID = 1L;
// ===================================================================================
// Attribute
// =========
protected final Map<Entity, Object> _relationDtoMap;
protected final Map<Object, Entity> _relationEntityMap;
protected boolean _exceptCommonColumn;
protected boolean _reverseReference; // default: one-way reference
protected boolean _instanceCache = true; // default: cached
protected boolean _suppressWhiteOnParade;
protected boolean _suppressWhiteOnParadeNullableToMany;
// ===================================================================================
// Constructor
// ===========
public BsWhiteOnParadeRefDtoMapper() {
_relationDtoMap = new HashMap<Entity, Object>();
_relationEntityMap = new HashMap<Object, Entity>();
}
public BsWhiteOnParadeRefDtoMapper(Map<Entity, Object> relationDtoMap, Map<Object, Entity> relationEntityMap) {
_relationDtoMap = relationDtoMap;
_relationEntityMap = relationEntityMap;
}
// ===================================================================================
// Mapping
// =======
// -----------------------------------------------------
// to DTO
// ------
/**
* {@inheritDoc}
*/
public WhiteOnParadeRefDto mappingToDto(WhiteOnParadeRef entity) {
if (entity == null) {
return null;
}
boolean instanceCache = _instanceCache;
Entity localKey = createInstanceKeyEntity(entity);
Object cachedLocalDto = instanceCache ? _relationDtoMap.get(localKey) : null;
if (cachedLocalDto != null) {
return (WhiteOnParadeRefDto)cachedLocalDto;
}
boolean exceptCommonColumn = isExceptCommonColumn();
WhiteOnParadeRefDto dto = new WhiteOnParadeRefDto();
dto.setRefId(entity.getRefId());
dto.setRefName(entity.getRefName());
dto.setNullableFkOnParadeId(entity.getNullableFkOnParadeId());
dto.setNullableFkToManyId(entity.getNullableFkToManyId());
reflectDerivedProperty(entity, dto, true);
if (instanceCache && entity.hasPrimaryKeyValue()) { // caches only a DTO that has a primary key value
_relationDtoMap.put(localKey, dto);
}
boolean reverseReference = isReverseReference();
if (!_suppressWhiteOnParade && entity.getWhiteOnParade().isPresent()) {
WhiteOnParade relationEntity = entity.getWhiteOnParade().get();
Entity relationKey = createInstanceKeyEntity(relationEntity);
Object cachedDto = instanceCache ? _relationDtoMap.get(relationKey) : null;
if (cachedDto != null) {
WhiteOnParadeDto relationDto = (WhiteOnParadeDto)cachedDto;
dto.setWhiteOnParade(relationDto);
if (reverseReference) {
relationDto.getWhiteOnParadeRefList().add(dto);
}
} else {
WhiteOnParadeDtoMapper mapper = new WhiteOnParadeDtoMapper(_relationDtoMap, _relationEntityMap);
mapper.setExceptCommonColumn(exceptCommonColumn);
mapper.setReverseReference(reverseReference);
if (!instanceCache) { mapper.disableInstanceCache(); }
mapper.suppressWhiteOnParadeRefList();
WhiteOnParadeDto relationDto = mapper.mappingToDto(relationEntity);
dto.setWhiteOnParade(relationDto);
if (reverseReference) {
relationDto.getWhiteOnParadeRefList().add(dto);
}
if (instanceCache && relationEntity.hasPrimaryKeyValue()) {
_relationDtoMap.put(relationKey, dto.getWhiteOnParade());
}
}
};
if (!_suppressWhiteOnParadeNullableToMany && entity.getWhiteOnParadeNullableToMany().isPresent()) {
WhiteOnParadeNullableToMany relationEntity = entity.getWhiteOnParadeNullableToMany().get();
Entity relationKey = createInstanceKeyEntity(relationEntity);
Object cachedDto = instanceCache ? _relationDtoMap.get(relationKey) : null;
if (cachedDto != null) {
WhiteOnParadeNullableToManyDto relationDto = (WhiteOnParadeNullableToManyDto)cachedDto;
dto.setWhiteOnParadeNullableToMany(relationDto);
if (reverseReference) {
relationDto.getWhiteOnParadeRefList().add(dto);
}
} else {
WhiteOnParadeNullableToManyDtoMapper mapper = new WhiteOnParadeNullableToManyDtoMapper(_relationDtoMap, _relationEntityMap);
mapper.setExceptCommonColumn(exceptCommonColumn);
mapper.setReverseReference(reverseReference);
if (!instanceCache) { mapper.disableInstanceCache(); }
mapper.suppressWhiteOnParadeRefList();
WhiteOnParadeNullableToManyDto relationDto = mapper.mappingToDto(relationEntity);
dto.setWhiteOnParadeNullableToMany(relationDto);
if (reverseReference) {
relationDto.getWhiteOnParadeRefList().add(dto);
}
if (instanceCache && relationEntity.hasPrimaryKeyValue()) {
_relationDtoMap.put(relationKey, dto.getWhiteOnParadeNullableToMany());
}
}
};
return dto;
}
/**
* {@inheritDoc}
*/
public List<WhiteOnParadeRefDto> mappingToDtoList(List<WhiteOnParadeRef> entityList) {
if (entityList == null) {
throw new IllegalArgumentException("The argument 'entityList' should not be null.");
}
List<WhiteOnParadeRefDto> dtoList = new ArrayList<WhiteOnParadeRefDto>();
for (WhiteOnParadeRef entity : entityList) {
WhiteOnParadeRefDto dto = mappingToDto(entity);
if (dto != null) {
dtoList.add(dto);
} else {
if (isAcceptNullElementOnList()) {
dtoList.add(null);
}
}
}
return dtoList;
}
// -----------------------------------------------------
// to Entity
// ---------
/**
* {@inheritDoc}
*/
public WhiteOnParadeRef mappingToEntity(WhiteOnParadeRefDto dto) {
if (dto == null) {
return null;
}
boolean instanceCache = _instanceCache;
Object localKey = createInstanceKeyDto(dto, dto.instanceHash());
Entity cachedLocalEntity = instanceCache ? _relationEntityMap.get(localKey) : null;
if (cachedLocalEntity != null) {
return (WhiteOnParadeRef)cachedLocalEntity;
}
boolean exceptCommonColumn = isExceptCommonColumn();
WhiteOnParadeRef entity = new WhiteOnParadeRef();
if (needsMapping(dto, dto.getRefId(), "refId")) {
entity.setRefId(dto.getRefId());
}
if (needsMapping(dto, dto.getRefName(), "refName")) {
entity.setRefName(dto.getRefName());
}
if (needsMapping(dto, dto.getNullableFkOnParadeId(), "nullableFkOnParadeId")) {
entity.setNullableFkOnParadeId(dto.getNullableFkOnParadeId());
}
if (needsMapping(dto, dto.getNullableFkToManyId(), "nullableFkToManyId")) {
entity.setNullableFkToManyId(dto.getNullableFkToManyId());
}
reflectDerivedProperty(entity, dto, false);
if (instanceCache && entity.hasPrimaryKeyValue()) { // caches only an entity that has a primary key value
_relationEntityMap.put(localKey, entity);
}
boolean reverseReference = isReverseReference();
if (!_suppressWhiteOnParade && dto.getWhiteOnParade() != null) {
WhiteOnParadeDto relationDto = dto.getWhiteOnParade();
Object relationKey = createInstanceKeyDto(relationDto, relationDto.instanceHash());
Entity cachedEntity = instanceCache ? _relationEntityMap.get(relationKey) : null;
if (cachedEntity != null) {
WhiteOnParade relationEntity = (WhiteOnParade)cachedEntity;
entity.setWhiteOnParade(OptionalEntity.of(relationEntity));
if (reverseReference) {
relationEntity.getWhiteOnParadeRefList().add(entity);
}
} else {
WhiteOnParadeDtoMapper mapper = new WhiteOnParadeDtoMapper(_relationDtoMap, _relationEntityMap);
mapper.setExceptCommonColumn(exceptCommonColumn);
mapper.setReverseReference(reverseReference);
if (!instanceCache) { mapper.disableInstanceCache(); }
mapper.suppressWhiteOnParadeRefList();
WhiteOnParade relationEntity = mapper.mappingToEntity(relationDto);
entity.setWhiteOnParade(OptionalEntity.of(relationEntity));
if (reverseReference) {
relationEntity.getWhiteOnParadeRefList().add(entity);
}
if (instanceCache && entity.getWhiteOnParade().get().hasPrimaryKeyValue()) {
_relationEntityMap.put(relationKey, entity.getWhiteOnParade().get());
}
}
};
if (!_suppressWhiteOnParadeNullableToMany && dto.getWhiteOnParadeNullableToMany() != null) {
WhiteOnParadeNullableToManyDto relationDto = dto.getWhiteOnParadeNullableToMany();
Object relationKey = createInstanceKeyDto(relationDto, relationDto.instanceHash());
Entity cachedEntity = instanceCache ? _relationEntityMap.get(relationKey) : null;
if (cachedEntity != null) {
WhiteOnParadeNullableToMany relationEntity = (WhiteOnParadeNullableToMany)cachedEntity;
entity.setWhiteOnParadeNullableToMany(OptionalEntity.of(relationEntity));
if (reverseReference) {
relationEntity.getWhiteOnParadeRefList().add(entity);
}
} else {
WhiteOnParadeNullableToManyDtoMapper mapper = new WhiteOnParadeNullableToManyDtoMapper(_relationDtoMap, _relationEntityMap);
mapper.setExceptCommonColumn(exceptCommonColumn);
mapper.setReverseReference(reverseReference);
if (!instanceCache) { mapper.disableInstanceCache(); }
mapper.suppressWhiteOnParadeRefList();
WhiteOnParadeNullableToMany relationEntity = mapper.mappingToEntity(relationDto);
entity.setWhiteOnParadeNullableToMany(OptionalEntity.of(relationEntity));
if (reverseReference) {
relationEntity.getWhiteOnParadeRefList().add(entity);
}
if (instanceCache && entity.getWhiteOnParadeNullableToMany().get().hasPrimaryKeyValue()) {
_relationEntityMap.put(relationKey, entity.getWhiteOnParadeNullableToMany().get());
}
}
};
return entity;
}
/**
* Does the property need to be mapped to an entity? <br>
* If modified info of DTO has at least one property, only modified properties are mapped.
* And if no property is modified, all properties are mapped (but the other option exists).
* @param dto The instance of DTO. (NotNull)
* @param value The value of DTO's property. (NotNull)
* @param propName The property name of DTO. (NotNull)
* @return The determination, true or false.
*/
protected boolean needsMapping(WhiteOnParadeRefDto dto, Object value, String propName) {
Set<String> modifiedProperties = dto.mymodifiedProperties();
if (modifiedProperties.isEmpty()) {
return isMappingToEntityContainsNull() || value != null;
}
return modifiedProperties.contains(propName);
}
/**
* Does the mapping to an entity contain null values? (when no property is modified) <br>
* Default is true that means a setter is called if the value is null.
* But this method is valid only when no property is modified.
* @return The determination, true or false.
*/
protected boolean isMappingToEntityContainsNull() { // for extension
return true; // as default
}
/**
* {@inheritDoc}
*/
public List<WhiteOnParadeRef> mappingToEntityList(List<WhiteOnParadeRefDto> dtoList) {
if (dtoList == null) {
throw new IllegalArgumentException("The argument 'dtoList' should not be null.");
}
List<WhiteOnParadeRef> entityList = new ArrayList<WhiteOnParadeRef>();
for (WhiteOnParadeRefDto dto : dtoList) {
WhiteOnParadeRef entity = mappingToEntity(dto);
if (entity != null) {
entityList.add(entity);
} else {
if (isAcceptNullElementOnList()) {
entityList.add(null);
}
}
}
return entityList;
}
protected boolean isAcceptNullElementOnList() {
return true; // as default
}
// -----------------------------------------------------
// Instance Key
// ------------
protected Object createInstanceKeyDto(final Object dto, final int instanceHash) {
return new InstanceKeyDto(dto, instanceHash);
}
protected InstanceKeyEntity createInstanceKeyEntity(Entity entity) {
return new InstanceKeyEntity(entity);
}
public void disableInstanceCache() { // internal option
_instanceCache = false;
}
// -----------------------------------------------------
// Derived Property
// ----------------
protected void reflectDerivedProperty(Entity entity, Object dto, boolean toDto) {
DfBeanDesc entityDesc = DfBeanDescFactory.getBeanDesc(entity.getClass());
DfBeanDesc dtoDesc = DfBeanDescFactory.getBeanDesc(dto.getClass());
DBMeta dbmeta = entity.asDBMeta();
for (String propertyName : entityDesc.getProppertyNameList()) {
if (isOutOfDerivedPropertyName(entity, dto, toDto, dbmeta, entityDesc, dtoDesc, propertyName)) {
continue;
}
DfPropertyDesc entityProp = entityDesc.getPropertyDesc(propertyName);
Class<?> propertyType = entityProp.getPropertyType();
if (isOutOfDerivedPropertyType(entity, dto, toDto, propertyName, propertyType)) {
continue;
}
if (entityProp.isReadable() && entityProp.isWritable()) {
DfPropertyDesc dtoProp = dtoDesc.getPropertyDesc(propertyName);
if (dtoProp.isReadable() && dtoProp.isWritable()) {
if (toDto) {
dtoProp.setValue(dto, entityProp.getValue(entity));
} else {
entityProp.setValue(entity, dtoProp.getValue(dto));
}
}
}
}
}
protected boolean isOutOfDerivedPropertyName(Entity entity, Object dto, boolean toDto
, DBMeta dbmeta, DfBeanDesc entityDesc, DfBeanDesc dtoDesc
, String propertyName) {
return dbmeta.hasColumn(propertyName)
|| dbmeta.hasForeign(propertyName) || dbmeta.hasReferrer(propertyName)
|| !dtoDesc.hasPropertyDesc(propertyName);
}
protected boolean isOutOfDerivedPropertyType(Entity entity, Object dto, boolean toDto
, String propertyName, Class<?> propertyType) {
return List.class.isAssignableFrom(propertyType)
|| Entity.class.isAssignableFrom(propertyType)
|| Classification.class.isAssignableFrom(propertyType);
}
// ===================================================================================
// Suppress Relation
// =================
// (basically) to suppress infinity loop
public void suppressWhiteOnParade() {
_suppressWhiteOnParade = true;
}
public void suppressWhiteOnParadeNullableToMany() {
_suppressWhiteOnParadeNullableToMany = true;
}
protected void doSuppressAll() { // internal
suppressWhiteOnParade();
suppressWhiteOnParadeNullableToMany();
}
protected void doSuppressClear() { // internal
_suppressWhiteOnParade = false;
_suppressWhiteOnParadeNullableToMany = false;
}
// ===================================================================================
// Mapping Option
// ==============
/**
* {@inheritDoc}
*/
public void setBaseOnlyMapping(boolean baseOnlyMapping) {
if (baseOnlyMapping) {
doSuppressAll();
} else {
doSuppressClear();
}
}
protected boolean isExceptCommonColumn() {
return _exceptCommonColumn;
}
/**
* {@inheritDoc}
*/
public void setExceptCommonColumn(boolean exceptCommonColumn) {
_exceptCommonColumn = exceptCommonColumn;
}
protected boolean isReverseReference() {
return _reverseReference;
}
/**
* {@inheritDoc}
*/
public void setReverseReference(boolean reverseReference) {
_reverseReference = reverseReference;
}
// -----------------------------------------------------
// Easy-to-Use
// -----------
/**
* Enable base-only mapping that means the mapping ignores all references.
* @return this. (NotNull)
*/
public WhiteOnParadeRefDtoMapper baseOnlyMapping() {
setBaseOnlyMapping(true);
return (WhiteOnParadeRefDtoMapper)this;
}
/**
* Enable except common column that means the mapping excepts common column.
* @return this. (NotNull)
*/
public WhiteOnParadeRefDtoMapper exceptCommonColumn() {
setExceptCommonColumn(true);
return (WhiteOnParadeRefDtoMapper)this;
}
/**
* Enable reverse reference that means the mapping contains reverse references.
* @return this. (NotNull)
*/
public WhiteOnParadeRefDtoMapper reverseReference() {
setReverseReference(true);
return (WhiteOnParadeRefDtoMapper)this;
}
}
| |
package org.hisp.dhis.dataset;
/*
* Copyright (c) 2004-2017, University of Oslo
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* Neither the name of the HISP project nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
import java.io.Serializable;
import java.util.Date;
import org.hisp.dhis.common.BaseIdentifiableObject;
import org.hisp.dhis.common.DxfNamespaces;
import org.hisp.dhis.dataelement.DataElementCategoryOptionCombo;
import org.hisp.dhis.organisationunit.OrganisationUnit;
import org.hisp.dhis.period.Period;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.databind.annotation.JsonSerialize;
import com.fasterxml.jackson.dataformat.xml.annotation.JacksonXmlProperty;
import com.fasterxml.jackson.dataformat.xml.annotation.JacksonXmlRootElement;
import com.google.common.base.MoreObjects;
/**
* @author Lars Helge Overland
*/
@JacksonXmlRootElement( localName = "completeDataSetRegistration", namespace = DxfNamespaces.DXF_2_0 )
public class CompleteDataSetRegistration
implements Serializable
{
/**
* Determines if a de-serialized file is compatible with this class.
*/
private static final long serialVersionUID = 334738541365949298L;
private DataSet dataSet;
private Period period;
private OrganisationUnit source;
private DataElementCategoryOptionCombo attributeOptionCombo;
private Date date; // TODO rename to created
private String storedBy;
private transient String periodName;
// -------------------------------------------------------------------------
// Constructors
// -------------------------------------------------------------------------
public CompleteDataSetRegistration()
{
}
public CompleteDataSetRegistration( DataSet dataSet, Period period, OrganisationUnit source,
DataElementCategoryOptionCombo attributeOptionCombo, Date date, String storedBy )
{
this.dataSet = dataSet;
this.period = period;
this.source = source;
this.attributeOptionCombo = attributeOptionCombo;
this.date = date;
this.storedBy = storedBy;
}
// -------------------------------------------------------------------------
// HashCode and equals
// -------------------------------------------------------------------------
@Override
public int hashCode()
{
final int prime = 31;
int result = 1;
result = prime * result + ((dataSet == null) ? 0 : dataSet.hashCode());
result = prime * result + ((period == null) ? 0 : period.hashCode());
result = prime * result + ((source == null) ? 0 : source.hashCode());
result = prime * result + ((attributeOptionCombo == null) ? 0 : attributeOptionCombo.hashCode());
return result;
}
@Override
public boolean equals( Object object )
{
if ( this == object )
{
return true;
}
if ( object == null )
{
return false;
}
if ( getClass() != object.getClass() )
{
return false;
}
final CompleteDataSetRegistration other = (CompleteDataSetRegistration) object;
if ( dataSet == null )
{
if ( other.dataSet != null )
{
return false;
}
}
else if ( !dataSet.equals( other.dataSet ) )
{
return false;
}
if ( period == null )
{
if ( other.period != null )
{
return false;
}
}
else if ( !period.equals( other.period ) )
{
return false;
}
if ( source == null )
{
if ( other.source != null )
{
return false;
}
}
else if ( !source.equals( other.source ) )
{
return false;
}
if ( attributeOptionCombo == null )
{
if ( other.attributeOptionCombo != null )
{
return false;
}
}
else if ( !attributeOptionCombo.equals( other.attributeOptionCombo ) )
{
return false;
}
return true;
}
// -------------------------------------------------------------------------
// Getters and setters
// -------------------------------------------------------------------------
@JsonProperty
@JsonSerialize( as = BaseIdentifiableObject.class )
@JacksonXmlProperty( namespace = DxfNamespaces.DXF_2_0 )
public DataSet getDataSet()
{
return dataSet;
}
public void setDataSet( DataSet dataSet )
{
this.dataSet = dataSet;
}
@JsonProperty
@JsonSerialize( as = BaseIdentifiableObject.class )
@JacksonXmlProperty( namespace = DxfNamespaces.DXF_2_0 )
public Period getPeriod()
{
return period;
}
public void setPeriod( Period period )
{
this.period = period;
}
@JsonProperty( value = "organisationUnit" )
@JsonSerialize( as = BaseIdentifiableObject.class )
@JacksonXmlProperty( localName = "organisationUnit", namespace = DxfNamespaces.DXF_2_0 )
public OrganisationUnit getSource()
{
return source;
}
public void setSource( OrganisationUnit source )
{
this.source = source;
}
@JsonProperty
@JsonSerialize( as = BaseIdentifiableObject.class )
@JacksonXmlProperty( namespace = DxfNamespaces.DXF_2_0 )
public DataElementCategoryOptionCombo getAttributeOptionCombo()
{
return attributeOptionCombo;
}
public void setAttributeOptionCombo( DataElementCategoryOptionCombo attributeOptionCombo )
{
this.attributeOptionCombo = attributeOptionCombo;
}
@JsonProperty
@JacksonXmlProperty( namespace = DxfNamespaces.DXF_2_0 )
public Date getDate()
{
return date;
}
public void setDate( Date date )
{
this.date = date;
}
@JsonProperty
@JacksonXmlProperty( namespace = DxfNamespaces.DXF_2_0 )
public String getStoredBy()
{
return storedBy;
}
public void setStoredBy( String storedBy )
{
this.storedBy = storedBy;
}
public String getPeriodName()
{
return periodName;
}
public void setPeriodName( String periodName )
{
this.periodName = periodName;
}
@Override
public String toString()
{
return MoreObjects.toStringHelper( this )
.add( "dataSet", dataSet )
.add( "period", period )
.add( "source", source )
.add( "attributeOptionCombo", attributeOptionCombo )
.add( "date", date )
.add( "storedBy", storedBy )
.add( "periodName", periodName )
.toString();
}
}
| |
/*
* Copyright 2010-2012 Luca Garulli (l.garulli--at--orientechnologies.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.orientechnologies.common.concur.lock;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.locks.ReentrantReadWriteLock;
public class OLockManager<RESOURCE_TYPE, REQUESTER_TYPE> {
public enum LOCK {
SHARED, EXCLUSIVE
}
private static final int DEFAULT_CONCURRENCY_LEVEL = 16;
protected long acquireTimeout;
protected final ConcurrentHashMap<RESOURCE_TYPE, CountableLock> map;
private final boolean enabled;
private final int shift;
private final int mask;
private final Object[] locks;
@SuppressWarnings("serial")
protected static class CountableLock extends ReentrantReadWriteLock {
protected int countLocks = 0;
public CountableLock() {
super(false);
}
}
public OLockManager(final boolean iEnabled, final int iAcquireTimeout) {
this(iEnabled, iAcquireTimeout, defaultConcurrency());
}
public OLockManager(final boolean iEnabled, final int iAcquireTimeout, final int concurrencyLevel) {
int cL = 1;
int sh = 0;
while (cL < concurrencyLevel) {
cL <<= 1;
sh++;
}
shift = 32 - sh;
mask = cL - 1;
map = new ConcurrentHashMap<RESOURCE_TYPE, CountableLock>(cL);
locks = new Object[cL];
for (int i = 0; i < locks.length; i++) {
locks[i] = new Object();
}
acquireTimeout = iAcquireTimeout;
enabled = iEnabled;
}
public void acquireLock(final REQUESTER_TYPE iRequester, final RESOURCE_TYPE iResourceId, final LOCK iLockType) {
acquireLock(iRequester, iResourceId, iLockType, acquireTimeout);
}
public void acquireLock(final REQUESTER_TYPE iRequester, final RESOURCE_TYPE iResourceId, final LOCK iLockType, long iTimeout) {
if (!enabled)
return;
CountableLock lock;
final Object internalLock = internalLock(iResourceId);
synchronized (internalLock) {
lock = map.get(iResourceId);
if (lock == null) {
final CountableLock newLock = new CountableLock();
lock = map.putIfAbsent(getImmutableResourceId(iResourceId), newLock);
if (lock == null)
lock = newLock;
}
lock.countLocks++;
}
try {
if (iTimeout <= 0) {
if (iLockType == LOCK.SHARED)
lock.readLock().lock();
else
lock.writeLock().lock();
} else {
try {
if (iLockType == LOCK.SHARED) {
if (!lock.readLock().tryLock(iTimeout, TimeUnit.MILLISECONDS))
throw new OLockException("Timeout on acquiring resource '" + iResourceId + "' because is locked from another thread");
} else {
if (!lock.writeLock().tryLock(iTimeout, TimeUnit.MILLISECONDS))
throw new OLockException("Timeout on acquiring resource '" + iResourceId + "' because is locked from another thread");
}
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw new OLockException("Thread interrupted while waiting for resource '" + iResourceId + "'");
}
}
} catch (RuntimeException e) {
synchronized (internalLock) {
lock.countLocks--;
if (lock.countLocks == 0)
map.remove(iResourceId);
}
throw e;
}
}
public boolean tryAcquireLock(final REQUESTER_TYPE iRequester, final RESOURCE_TYPE iResourceId, final LOCK iLockType) {
if (!enabled)
return true;
CountableLock lock;
final Object internalLock = internalLock(iResourceId);
synchronized (internalLock) {
lock = map.get(iResourceId);
if (lock == null) {
final CountableLock newLock = new CountableLock();
lock = map.putIfAbsent(getImmutableResourceId(iResourceId), newLock);
if (lock == null)
lock = newLock;
}
lock.countLocks++;
}
boolean result;
try {
if (iLockType == LOCK.SHARED)
result = lock.readLock().tryLock();
else
result = lock.writeLock().tryLock();
} catch (RuntimeException e) {
synchronized (internalLock) {
lock.countLocks--;
if (lock.countLocks == 0)
map.remove(iResourceId);
}
throw e;
}
if (!result) {
synchronized (internalLock) {
lock.countLocks--;
if (lock.countLocks == 0)
map.remove(iResourceId);
}
}
return result;
}
public void releaseLock(final REQUESTER_TYPE iRequester, final RESOURCE_TYPE iResourceId, final LOCK iLockType)
throws OLockException {
if (!enabled)
return;
final CountableLock lock;
final Object internalLock = internalLock(iResourceId);
synchronized (internalLock) {
lock = map.get(iResourceId);
if (lock == null)
throw new OLockException("Error on releasing a non acquired lock by the requester '" + iRequester
+ "' against the resource: '" + iResourceId + "'");
lock.countLocks--;
if (lock.countLocks == 0)
map.remove(iResourceId);
}
if (iLockType == LOCK.SHARED)
lock.readLock().unlock();
else
lock.writeLock().unlock();
}
public void clear() {
map.clear();
}
// For tests purposes.
public int getCountCurrentLocks() {
return map.size();
}
protected RESOURCE_TYPE getImmutableResourceId(final RESOURCE_TYPE iResourceId) {
return iResourceId;
}
private Object internalLock(final RESOURCE_TYPE iResourceId) {
final int hashCode = iResourceId.hashCode();
final int index = (hashCode >>> shift) & mask;
return locks[index];
}
private static int defaultConcurrency() {
return Runtime.getRuntime().availableProcessors() > DEFAULT_CONCURRENCY_LEVEL ? Runtime.getRuntime().availableProcessors()
: DEFAULT_CONCURRENCY_LEVEL;
}
}
| |
// Generated from C:\Users\Tarcisio\Desktop\aaaaaaaa\new\MonitoringDSL.g4 by ANTLR 4.1
package parser;
import org.antlr.v4.runtime.atn.*;
import org.antlr.v4.runtime.dfa.DFA;
import org.antlr.v4.runtime.*;
import org.antlr.v4.runtime.misc.*;
import org.antlr.v4.runtime.tree.*;
import java.util.List;
import java.util.Iterator;
import java.util.ArrayList;
import listener.MonitoringDSLListener;
@SuppressWarnings({"all", "warnings", "unchecked", "unused", "cast"})
public class MonitoringDSLParser extends Parser {
protected static final DFA[] _decisionToDFA;
protected static final PredictionContextCache _sharedContextCache =
new PredictionContextCache();
public static final int
GIVEN=1, WHEN=2, OR=3, RESOURCE=4, OF=5, RESOURCES=6, IS=7, THEN=8, ITS=9,
COMMA=10, LPAREN=11, RPAREN=12, EQ=13, GT=14, LT=15, NEQ=16, ID=17, STRING=18,
INT=19, WS=20;
public static final String[] tokenNames = {
"<INVALID>", "'Given'", "'When'", "'or'", "'resource'", "'of'", "'resource's'",
"'is'", "'Then'", "'its'", "','", "'('", "')'", "'equals to'", "'greater than'",
"'less than'", "NEQ", "ID", "STRING", "INT", "WS"
};
public static final int
RULE_root = 0, RULE_monitor_rule = 1, RULE_conditions = 2, RULE_condition = 3,
RULE_metric = 4, RULE_threshold = 5, RULE_actions = 6, RULE_action = 7,
RULE_executor = 8, RULE_params = 9, RULE_relational_operator = 10, RULE_value = 11,
RULE_resource = 12, RULE_metric_name = 13, RULE_target = 14;
public static final String[] ruleNames = {
"root", "monitor_rule", "conditions", "condition", "metric", "threshold",
"actions", "action", "executor", "params", "relational_operator", "value",
"resource", "metric_name", "target"
};
@Override
public String getGrammarFileName() { return "MonitoringDSL.g4"; }
@Override
public String[] getTokenNames() { return tokenNames; }
@Override
public String[] getRuleNames() { return ruleNames; }
@Override
public ATN getATN() { return _ATN; }
public MonitoringDSLParser(TokenStream input) {
super(input);
_interp = new ParserATNSimulator(this,_ATN,_decisionToDFA,_sharedContextCache);
}
public static class RootContext extends ParserRuleContext {
public List<Monitor_ruleContext> monitor_rule() {
return getRuleContexts(Monitor_ruleContext.class);
}
public Monitor_ruleContext monitor_rule(int i) {
return getRuleContext(Monitor_ruleContext.class,i);
}
public RootContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_root; }
@Override
public void enterRule(ParseTreeListener listener) {
if ( listener instanceof MonitoringDSLListener ) ((MonitoringDSLListener)listener).enterRoot(this);
}
@Override
public void exitRule(ParseTreeListener listener) {
if ( listener instanceof MonitoringDSLListener ) ((MonitoringDSLListener)listener).exitRoot(this);
}
}
public final RootContext root() throws RecognitionException {
RootContext _localctx = new RootContext(_ctx, getState());
enterRule(_localctx, 0, RULE_root);
int _la;
try {
enterOuterAlt(_localctx, 1);
{
setState(33);
_errHandler.sync(this);
_la = _input.LA(1);
while (_la==GIVEN) {
{
{
setState(30); monitor_rule();
}
}
setState(35);
_errHandler.sync(this);
_la = _input.LA(1);
}
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
exitRule();
}
return _localctx;
}
public static class Monitor_ruleContext extends ParserRuleContext {
public ConditionsContext conditions() {
return getRuleContext(ConditionsContext.class,0);
}
public ResourceContext resource() {
return getRuleContext(ResourceContext.class,0);
}
public TerminalNode GIVEN() { return getToken(MonitoringDSLParser.GIVEN, 0); }
public ActionsContext actions() {
return getRuleContext(ActionsContext.class,0);
}
public Monitor_ruleContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_monitor_rule; }
@Override
public void enterRule(ParseTreeListener listener) {
if ( listener instanceof MonitoringDSLListener ) ((MonitoringDSLListener)listener).enterMonitor_rule(this);
}
@Override
public void exitRule(ParseTreeListener listener) {
if ( listener instanceof MonitoringDSLListener ) ((MonitoringDSLListener)listener).exitMonitor_rule(this);
}
}
public final Monitor_ruleContext monitor_rule() throws RecognitionException {
Monitor_ruleContext _localctx = new Monitor_ruleContext(_ctx, getState());
enterRule(_localctx, 2, RULE_monitor_rule);
try {
enterOuterAlt(_localctx, 1);
{
setState(36); match(GIVEN);
setState(37); resource();
setState(38); conditions();
setState(39); actions();
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
exitRule();
}
return _localctx;
}
public static class ConditionsContext extends ParserRuleContext {
public List<ConditionContext> condition() {
return getRuleContexts(ConditionContext.class);
}
public ConditionContext condition(int i) {
return getRuleContext(ConditionContext.class,i);
}
public List<TerminalNode> OR() { return getTokens(MonitoringDSLParser.OR); }
public TerminalNode OR(int i) {
return getToken(MonitoringDSLParser.OR, i);
}
public TerminalNode WHEN() { return getToken(MonitoringDSLParser.WHEN, 0); }
public ConditionsContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_conditions; }
@Override
public void enterRule(ParseTreeListener listener) {
if ( listener instanceof MonitoringDSLListener ) ((MonitoringDSLListener)listener).enterConditions(this);
}
@Override
public void exitRule(ParseTreeListener listener) {
if ( listener instanceof MonitoringDSLListener ) ((MonitoringDSLListener)listener).exitConditions(this);
}
}
public final ConditionsContext conditions() throws RecognitionException {
ConditionsContext _localctx = new ConditionsContext(_ctx, getState());
enterRule(_localctx, 4, RULE_conditions);
int _la;
try {
enterOuterAlt(_localctx, 1);
{
setState(41); match(WHEN);
setState(42); condition();
setState(47);
_errHandler.sync(this);
_la = _input.LA(1);
while (_la==OR) {
{
{
setState(43); match(OR);
setState(44); condition();
}
}
setState(49);
_errHandler.sync(this);
_la = _input.LA(1);
}
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
exitRule();
}
return _localctx;
}
public static class ConditionContext extends ParserRuleContext {
public MetricContext metric() {
return getRuleContext(MetricContext.class,0);
}
public TerminalNode IS() { return getToken(MonitoringDSLParser.IS, 0); }
public ThresholdContext threshold() {
return getRuleContext(ThresholdContext.class,0);
}
public Relational_operatorContext relational_operator() {
return getRuleContext(Relational_operatorContext.class,0);
}
public ConditionContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_condition; }
@Override
public void enterRule(ParseTreeListener listener) {
if ( listener instanceof MonitoringDSLListener ) ((MonitoringDSLListener)listener).enterCondition(this);
}
@Override
public void exitRule(ParseTreeListener listener) {
if ( listener instanceof MonitoringDSLListener ) ((MonitoringDSLListener)listener).exitCondition(this);
}
}
public final ConditionContext condition() throws RecognitionException {
ConditionContext _localctx = new ConditionContext(_ctx, getState());
enterRule(_localctx, 6, RULE_condition);
try {
enterOuterAlt(_localctx, 1);
{
setState(50); metric();
setState(51); match(IS);
setState(52); relational_operator();
setState(53); threshold();
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
exitRule();
}
return _localctx;
}
public static class MetricContext extends ParserRuleContext {
public TerminalNode ITS() { return getToken(MonitoringDSLParser.ITS, 0); }
public TargetContext target() {
return getRuleContext(TargetContext.class,0);
}
public TerminalNode OF() { return getToken(MonitoringDSLParser.OF, 0); }
public TerminalNode RESOURCES() { return getToken(MonitoringDSLParser.RESOURCES, 0); }
public Metric_nameContext metric_name() {
return getRuleContext(Metric_nameContext.class,0);
}
public MetricContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_metric; }
@Override
public void enterRule(ParseTreeListener listener) {
if ( listener instanceof MonitoringDSLListener ) ((MonitoringDSLListener)listener).enterMetric(this);
}
@Override
public void exitRule(ParseTreeListener listener) {
if ( listener instanceof MonitoringDSLListener ) ((MonitoringDSLListener)listener).exitMetric(this);
}
}
public final MetricContext metric() throws RecognitionException {
MetricContext _localctx = new MetricContext(_ctx, getState());
enterRule(_localctx, 8, RULE_metric);
try {
setState(62);
switch (_input.LA(1)) {
case ID:
enterOuterAlt(_localctx, 1);
{
setState(55); metric_name();
setState(56); match(OF);
setState(57); match(RESOURCES);
setState(58); target();
}
break;
case ITS:
enterOuterAlt(_localctx, 2);
{
setState(60); match(ITS);
setState(61); metric_name();
}
break;
default:
throw new NoViableAltException(this);
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
exitRule();
}
return _localctx;
}
public static class ThresholdContext extends ParserRuleContext {
public ValueContext value() {
return getRuleContext(ValueContext.class,0);
}
public ThresholdContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_threshold; }
@Override
public void enterRule(ParseTreeListener listener) {
if ( listener instanceof MonitoringDSLListener ) ((MonitoringDSLListener)listener).enterThreshold(this);
}
@Override
public void exitRule(ParseTreeListener listener) {
if ( listener instanceof MonitoringDSLListener ) ((MonitoringDSLListener)listener).exitThreshold(this);
}
}
public final ThresholdContext threshold() throws RecognitionException {
ThresholdContext _localctx = new ThresholdContext(_ctx, getState());
enterRule(_localctx, 10, RULE_threshold);
try {
enterOuterAlt(_localctx, 1);
{
setState(64); value();
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
exitRule();
}
return _localctx;
}
public static class ActionsContext extends ParserRuleContext {
public TerminalNode THEN() { return getToken(MonitoringDSLParser.THEN, 0); }
public List<TerminalNode> COMMA() { return getTokens(MonitoringDSLParser.COMMA); }
public List<ActionContext> action() {
return getRuleContexts(ActionContext.class);
}
public TerminalNode COMMA(int i) {
return getToken(MonitoringDSLParser.COMMA, i);
}
public ActionContext action(int i) {
return getRuleContext(ActionContext.class,i);
}
public ActionsContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_actions; }
@Override
public void enterRule(ParseTreeListener listener) {
if ( listener instanceof MonitoringDSLListener ) ((MonitoringDSLListener)listener).enterActions(this);
}
@Override
public void exitRule(ParseTreeListener listener) {
if ( listener instanceof MonitoringDSLListener ) ((MonitoringDSLListener)listener).exitActions(this);
}
}
public final ActionsContext actions() throws RecognitionException {
ActionsContext _localctx = new ActionsContext(_ctx, getState());
enterRule(_localctx, 12, RULE_actions);
int _la;
try {
enterOuterAlt(_localctx, 1);
{
setState(66); match(THEN);
setState(67); action();
setState(72);
_errHandler.sync(this);
_la = _input.LA(1);
while (_la==COMMA) {
{
{
setState(68); match(COMMA);
setState(69); action();
}
}
setState(74);
_errHandler.sync(this);
_la = _input.LA(1);
}
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
exitRule();
}
return _localctx;
}
public static class ActionContext extends ParserRuleContext {
public ExecutorContext executor() {
return getRuleContext(ExecutorContext.class,0);
}
public ActionContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_action; }
@Override
public void enterRule(ParseTreeListener listener) {
if ( listener instanceof MonitoringDSLListener ) ((MonitoringDSLListener)listener).enterAction(this);
}
@Override
public void exitRule(ParseTreeListener listener) {
if ( listener instanceof MonitoringDSLListener ) ((MonitoringDSLListener)listener).exitAction(this);
}
}
public final ActionContext action() throws RecognitionException {
ActionContext _localctx = new ActionContext(_ctx, getState());
enterRule(_localctx, 14, RULE_action);
try {
enterOuterAlt(_localctx, 1);
{
setState(75); executor();
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
exitRule();
}
return _localctx;
}
public static class ExecutorContext extends ParserRuleContext {
public ParamsContext params() {
return getRuleContext(ParamsContext.class,0);
}
public TerminalNode ID() { return getToken(MonitoringDSLParser.ID, 0); }
public TerminalNode RPAREN() { return getToken(MonitoringDSLParser.RPAREN, 0); }
public TerminalNode LPAREN() { return getToken(MonitoringDSLParser.LPAREN, 0); }
public ExecutorContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_executor; }
@Override
public void enterRule(ParseTreeListener listener) {
if ( listener instanceof MonitoringDSLListener ) ((MonitoringDSLListener)listener).enterExecutor(this);
}
@Override
public void exitRule(ParseTreeListener listener) {
if ( listener instanceof MonitoringDSLListener ) ((MonitoringDSLListener)listener).exitExecutor(this);
}
}
public final ExecutorContext executor() throws RecognitionException {
ExecutorContext _localctx = new ExecutorContext(_ctx, getState());
enterRule(_localctx, 16, RULE_executor);
try {
enterOuterAlt(_localctx, 1);
{
setState(77); match(ID);
setState(78); match(LPAREN);
setState(79); params();
setState(80); match(RPAREN);
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
exitRule();
}
return _localctx;
}
public static class ParamsContext extends ParserRuleContext {
public ValueContext value(int i) {
return getRuleContext(ValueContext.class,i);
}
public List<ValueContext> value() {
return getRuleContexts(ValueContext.class);
}
public List<TerminalNode> COMMA() { return getTokens(MonitoringDSLParser.COMMA); }
public TerminalNode COMMA(int i) {
return getToken(MonitoringDSLParser.COMMA, i);
}
public ParamsContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_params; }
@Override
public void enterRule(ParseTreeListener listener) {
if ( listener instanceof MonitoringDSLListener ) ((MonitoringDSLListener)listener).enterParams(this);
}
@Override
public void exitRule(ParseTreeListener listener) {
if ( listener instanceof MonitoringDSLListener ) ((MonitoringDSLListener)listener).exitParams(this);
}
}
public final ParamsContext params() throws RecognitionException {
ParamsContext _localctx = new ParamsContext(_ctx, getState());
enterRule(_localctx, 18, RULE_params);
int _la;
try {
enterOuterAlt(_localctx, 1);
{
setState(82); value();
setState(87);
_errHandler.sync(this);
_la = _input.LA(1);
while (_la==COMMA) {
{
{
setState(83); match(COMMA);
setState(84); value();
}
}
setState(89);
_errHandler.sync(this);
_la = _input.LA(1);
}
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
exitRule();
}
return _localctx;
}
public static class Relational_operatorContext extends ParserRuleContext {
public TerminalNode NEQ() { return getToken(MonitoringDSLParser.NEQ, 0); }
public TerminalNode LT() { return getToken(MonitoringDSLParser.LT, 0); }
public TerminalNode GT() { return getToken(MonitoringDSLParser.GT, 0); }
public TerminalNode EQ() { return getToken(MonitoringDSLParser.EQ, 0); }
public Relational_operatorContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_relational_operator; }
@Override
public void enterRule(ParseTreeListener listener) {
if ( listener instanceof MonitoringDSLListener ) ((MonitoringDSLListener)listener).enterRelational_operator(this);
}
@Override
public void exitRule(ParseTreeListener listener) {
if ( listener instanceof MonitoringDSLListener ) ((MonitoringDSLListener)listener).exitRelational_operator(this);
}
}
public final Relational_operatorContext relational_operator() throws RecognitionException {
Relational_operatorContext _localctx = new Relational_operatorContext(_ctx, getState());
enterRule(_localctx, 20, RULE_relational_operator);
int _la;
try {
enterOuterAlt(_localctx, 1);
{
setState(90);
_la = _input.LA(1);
if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << EQ) | (1L << GT) | (1L << LT) | (1L << NEQ))) != 0)) ) {
_errHandler.recoverInline(this);
}
consume();
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
exitRule();
}
return _localctx;
}
public static class ValueContext extends ParserRuleContext {
public TerminalNode INT() { return getToken(MonitoringDSLParser.INT, 0); }
public TerminalNode ID() { return getToken(MonitoringDSLParser.ID, 0); }
public TerminalNode STRING() { return getToken(MonitoringDSLParser.STRING, 0); }
public ValueContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_value; }
@Override
public void enterRule(ParseTreeListener listener) {
if ( listener instanceof MonitoringDSLListener ) ((MonitoringDSLListener)listener).enterValue(this);
}
@Override
public void exitRule(ParseTreeListener listener) {
if ( listener instanceof MonitoringDSLListener ) ((MonitoringDSLListener)listener).exitValue(this);
}
}
public final ValueContext value() throws RecognitionException {
ValueContext _localctx = new ValueContext(_ctx, getState());
enterRule(_localctx, 22, RULE_value);
int _la;
try {
enterOuterAlt(_localctx, 1);
{
setState(92);
_la = _input.LA(1);
if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << ID) | (1L << STRING) | (1L << INT))) != 0)) ) {
_errHandler.recoverInline(this);
}
consume();
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
exitRule();
}
return _localctx;
}
public static class ResourceContext extends ParserRuleContext {
public TerminalNode RESOURCE() { return getToken(MonitoringDSLParser.RESOURCE, 0); }
public TerminalNode ID() { return getToken(MonitoringDSLParser.ID, 0); }
public ResourceContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_resource; }
@Override
public void enterRule(ParseTreeListener listener) {
if ( listener instanceof MonitoringDSLListener ) ((MonitoringDSLListener)listener).enterResource(this);
}
@Override
public void exitRule(ParseTreeListener listener) {
if ( listener instanceof MonitoringDSLListener ) ((MonitoringDSLListener)listener).exitResource(this);
}
}
public final ResourceContext resource() throws RecognitionException {
ResourceContext _localctx = new ResourceContext(_ctx, getState());
enterRule(_localctx, 24, RULE_resource);
try {
enterOuterAlt(_localctx, 1);
{
setState(94); match(RESOURCE);
setState(95); match(ID);
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
exitRule();
}
return _localctx;
}
public static class Metric_nameContext extends ParserRuleContext {
public TerminalNode ID() { return getToken(MonitoringDSLParser.ID, 0); }
public Metric_nameContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_metric_name; }
@Override
public void enterRule(ParseTreeListener listener) {
if ( listener instanceof MonitoringDSLListener ) ((MonitoringDSLListener)listener).enterMetric_name(this);
}
@Override
public void exitRule(ParseTreeListener listener) {
if ( listener instanceof MonitoringDSLListener ) ((MonitoringDSLListener)listener).exitMetric_name(this);
}
}
public final Metric_nameContext metric_name() throws RecognitionException {
Metric_nameContext _localctx = new Metric_nameContext(_ctx, getState());
enterRule(_localctx, 26, RULE_metric_name);
try {
enterOuterAlt(_localctx, 1);
{
setState(97); match(ID);
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
exitRule();
}
return _localctx;
}
public static class TargetContext extends ParserRuleContext {
public TerminalNode ID() { return getToken(MonitoringDSLParser.ID, 0); }
public TargetContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_target; }
@Override
public void enterRule(ParseTreeListener listener) {
if ( listener instanceof MonitoringDSLListener ) ((MonitoringDSLListener)listener).enterTarget(this);
}
@Override
public void exitRule(ParseTreeListener listener) {
if ( listener instanceof MonitoringDSLListener ) ((MonitoringDSLListener)listener).exitTarget(this);
}
}
public final TargetContext target() throws RecognitionException {
TargetContext _localctx = new TargetContext(_ctx, getState());
enterRule(_localctx, 28, RULE_target);
try {
enterOuterAlt(_localctx, 1);
{
setState(99); match(ID);
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
exitRule();
}
return _localctx;
}
public static final String _serializedATN =
"\3\uacf5\uee8c\u4f5d\u8b0d\u4a45\u78bd\u1b2f\u3378\3\26h\4\2\t\2\4\3\t"+
"\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7\4\b\t\b\4\t\t\t\4\n\t\n\4\13\t\13\4"+
"\f\t\f\4\r\t\r\4\16\t\16\4\17\t\17\4\20\t\20\3\2\7\2\"\n\2\f\2\16\2%\13"+
"\2\3\3\3\3\3\3\3\3\3\3\3\4\3\4\3\4\3\4\7\4\60\n\4\f\4\16\4\63\13\4\3\5"+
"\3\5\3\5\3\5\3\5\3\6\3\6\3\6\3\6\3\6\3\6\3\6\5\6A\n\6\3\7\3\7\3\b\3\b"+
"\3\b\3\b\7\bI\n\b\f\b\16\bL\13\b\3\t\3\t\3\n\3\n\3\n\3\n\3\n\3\13\3\13"+
"\3\13\7\13X\n\13\f\13\16\13[\13\13\3\f\3\f\3\r\3\r\3\16\3\16\3\16\3\17"+
"\3\17\3\20\3\20\3\20\2\21\2\4\6\b\n\f\16\20\22\24\26\30\32\34\36\2\4\3"+
"\2\17\22\3\2\23\25]\2#\3\2\2\2\4&\3\2\2\2\6+\3\2\2\2\b\64\3\2\2\2\n@\3"+
"\2\2\2\fB\3\2\2\2\16D\3\2\2\2\20M\3\2\2\2\22O\3\2\2\2\24T\3\2\2\2\26\\"+
"\3\2\2\2\30^\3\2\2\2\32`\3\2\2\2\34c\3\2\2\2\36e\3\2\2\2 \"\5\4\3\2! "+
"\3\2\2\2\"%\3\2\2\2#!\3\2\2\2#$\3\2\2\2$\3\3\2\2\2%#\3\2\2\2&\'\7\3\2"+
"\2\'(\5\32\16\2()\5\6\4\2)*\5\16\b\2*\5\3\2\2\2+,\7\4\2\2,\61\5\b\5\2"+
"-.\7\5\2\2.\60\5\b\5\2/-\3\2\2\2\60\63\3\2\2\2\61/\3\2\2\2\61\62\3\2\2"+
"\2\62\7\3\2\2\2\63\61\3\2\2\2\64\65\5\n\6\2\65\66\7\t\2\2\66\67\5\26\f"+
"\2\678\5\f\7\28\t\3\2\2\29:\5\34\17\2:;\7\7\2\2;<\7\b\2\2<=\5\36\20\2"+
"=A\3\2\2\2>?\7\13\2\2?A\5\34\17\2@9\3\2\2\2@>\3\2\2\2A\13\3\2\2\2BC\5"+
"\30\r\2C\r\3\2\2\2DE\7\n\2\2EJ\5\20\t\2FG\7\f\2\2GI\5\20\t\2HF\3\2\2\2"+
"IL\3\2\2\2JH\3\2\2\2JK\3\2\2\2K\17\3\2\2\2LJ\3\2\2\2MN\5\22\n\2N\21\3"+
"\2\2\2OP\7\23\2\2PQ\7\r\2\2QR\5\24\13\2RS\7\16\2\2S\23\3\2\2\2TY\5\30"+
"\r\2UV\7\f\2\2VX\5\30\r\2WU\3\2\2\2X[\3\2\2\2YW\3\2\2\2YZ\3\2\2\2Z\25"+
"\3\2\2\2[Y\3\2\2\2\\]\t\2\2\2]\27\3\2\2\2^_\t\3\2\2_\31\3\2\2\2`a\7\6"+
"\2\2ab\7\23\2\2b\33\3\2\2\2cd\7\23\2\2d\35\3\2\2\2ef\7\23\2\2f\37\3\2"+
"\2\2\7#\61@JY";
public static final ATN _ATN =
ATNSimulator.deserialize(_serializedATN.toCharArray());
static {
_decisionToDFA = new DFA[_ATN.getNumberOfDecisions()];
for (int i = 0; i < _ATN.getNumberOfDecisions(); i++) {
_decisionToDFA[i] = new DFA(_ATN.getDecisionState(i), i);
}
}
}
| |
/* Modified from https://scripting.dev.java.net/" (JSR 223 Java Scripting) */
/*
* Copyright (C) 2006 Sun Microsystems, Inc. All rights reserved.
* Use is subject to license terms.
*
* Redistribution and use in source and binary forms, with or without modification, are
* permitted provided that the following conditions are met: Redistributions of source code
* must retain the above copyright notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice, this list of
* conditions and the following disclaimer in the documentation and/or other materials
* provided with the distribution. Neither the name of the Sun Microsystems nor the names of
* is contributors may be used to endorse or promote products derived from this software
* without specific prior written permission.
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS
* OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY
* AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER
* OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
/*
* JavaScriptEngine.java
* @author A. Sundararajan
*/
package org.kitesdk.morphline.scriptengine.java;
import java.io.Reader;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.util.Iterator;
import java.util.Map;
import javax.script.AbstractScriptEngine;
import javax.script.Bindings;
import javax.script.Compilable;
import javax.script.CompiledScript;
import javax.script.ScriptContext;
import javax.script.ScriptEngine;
import javax.script.ScriptEngineFactory;
import javax.script.ScriptException;
import javax.script.SimpleBindings;
/**
* This is a fast script engine for Java programming language - I modified JSR
* 223 Java Scripting a little for some 100x less overhead for invoking "static" methods.
*/
class FastJavaScriptEngine extends AbstractScriptEngine implements Compilable {
// Java compiler
private JavaCompiler compiler;
public FastJavaScriptEngine() {
compiler = new JavaCompiler();
}
// my implementation for CompiledScript
public static final class JavaCompiledScript extends CompiledScript {
private final Method targetMethod; // TODO: use java.lang.invoke.MethodHandle on Java7 for better perf; e.g. see http://vanillajava.blogspot.com/2011/08/methodhandle-performance-in-java-7.html
@SuppressWarnings("unchecked")
JavaCompiledScript (Class clazz, String methodName, Class[] parameterTypes) throws ScriptException {
try {
this.targetMethod = clazz.getMethod(methodName, parameterTypes);
} catch (NoSuchMethodException e) {
throw new ScriptException(e);
}
int modifiers = this.targetMethod.getModifiers();
if (!Modifier.isPublic(modifiers) || !Modifier.isStatic(modifiers)) {
throw new ScriptException(
"Cannot find public static method: " + methodName); }
}
public Object eval(Object... params) throws ScriptException {
try {
return targetMethod.invoke(null, params);
} catch (Exception e) {
throw new ScriptException(e);
}
}
public ScriptEngine getEngine() {
throw new UnsupportedOperationException();
}
public Object eval(ScriptContext ctx) throws ScriptException {
throw new UnsupportedOperationException();
}
}
public CompiledScript compile(String script, String methodName, Class[] parameterTypes) throws ScriptException {
Class clazz = parse(script, context);
return new JavaCompiledScript(clazz, methodName, parameterTypes);
}
public CompiledScript compile(String script) throws ScriptException {
throw new UnsupportedOperationException();
}
public CompiledScript compile(Reader reader) throws ScriptException {
throw new UnsupportedOperationException();
}
public Object eval(String str, ScriptContext ctx) throws ScriptException {
throw new UnsupportedOperationException();
}
public Object eval(Reader reader, ScriptContext ctx) throws ScriptException {
throw new UnsupportedOperationException();
}
public ScriptEngineFactory getFactory() {
throw new UnsupportedOperationException();
}
public Bindings createBindings() {
return new SimpleBindings();
}
// Internals only below this point
private Class parse(String str, ScriptContext ctx) throws ScriptException {
String fileName = getFileName(ctx);
String sourcePath = getSourcePath(ctx);
String classPath = getClassPath(ctx);
Map<String, byte[]> classBytes = compiler.compile(fileName, str,
ctx.getErrorWriter(), sourcePath, classPath);
if (classBytes == null) {
throw new ScriptException("compilation failed");
}
// create a ClassLoader to load classes from MemoryJavaFileManager
MemoryClassLoader loader = new MemoryClassLoader(classBytes, classPath,
getParentLoader(ctx));
String mainClassName = getMainClassName(ctx);
if (mainClassName != null) {
try {
Class clazz = loader.load(mainClassName);
Method mainMethod = findMainMethod(clazz);
if (mainMethod == null) {
throw new ScriptException("no main method in " + mainClassName);
}
return clazz;
} catch (ClassNotFoundException cnfe) {
throw new ScriptException(cnfe);
}
}
// no main class configured - load all compiled classes
Iterable<Class> classes;
try {
classes = loader.loadAll();
} catch (ClassNotFoundException exp) {
throw new ScriptException(exp);
}
// search for class with main method
Class c = findMainClass(classes);
if (c != null) {
return c;
} else {
// if class with "main" method, then
// return first class
Iterator<Class> itr = classes.iterator();
if (itr.hasNext()) {
return itr.next();
} else {
return null;
}
}
}
private static Class findMainClass(Iterable<Class> classes) {
// find a public class with public static main method
for (Class clazz : classes) {
int modifiers = clazz.getModifiers();
if (Modifier.isPublic(modifiers)) {
Method mainMethod = findMainMethod(clazz);
if (mainMethod != null) {
return clazz;
}
}
}
// okay, try to find package private class that
// has public static main method
for (Class clazz : classes) {
Method mainMethod = findMainMethod(clazz);
if (mainMethod != null) {
return clazz;
}
}
// no main class found!
return null;
}
// find public static void main(String[]) method, if any
@SuppressWarnings("unchecked")
private static Method findMainMethod(Class clazz) {
try {
Method mainMethod = clazz.getMethod("main", new Class[] { String[].class });
int modifiers = mainMethod.getModifiers();
if (Modifier.isPublic(modifiers) &&
Modifier.isStatic(modifiers)) {
return mainMethod;
}
} catch (NoSuchMethodException nsme) {
}
return null;
}
private static String getFileName(ScriptContext ctx) {
int scope = ctx.getAttributesScope(ScriptEngine.FILENAME);
if (scope != -1) {
return ctx.getAttribute(ScriptEngine.FILENAME, scope).toString();
} else {
return "$unnamed.java";
}
}
// for certain variables, we look for System properties. This is
// the prefix used for such System properties
private static final String SYSPROP_PREFIX = "com.sun.script.java.";
private static final String SOURCEPATH = "sourcepath";
private static String getSourcePath(ScriptContext ctx) {
int scope = ctx.getAttributesScope(SOURCEPATH);
if (scope != -1) {
return ctx.getAttribute(SOURCEPATH).toString();
} else {
// look for "com.sun.script.java.sourcepath"
return System.getProperty(SYSPROP_PREFIX + SOURCEPATH);
}
}
private static final String CLASSPATH = "classpath";
private static String getClassPath(ScriptContext ctx) {
int scope = ctx.getAttributesScope(CLASSPATH);
if (scope != -1) {
return ctx.getAttribute(CLASSPATH).toString();
} else {
// look for "com.sun.script.java.classpath"
String res = System.getProperty(SYSPROP_PREFIX + CLASSPATH);
if (res == null) {
res = System.getProperty("java.class.path");
}
return res;
}
}
private static final String MAINCLASS = "mainClass";
private static String getMainClassName(ScriptContext ctx) {
int scope = ctx.getAttributesScope(MAINCLASS);
if (scope != -1) {
return ctx.getAttribute(MAINCLASS).toString();
} else {
// look for "com.sun.script.java.mainClass"
return System.getProperty(SYSPROP_PREFIX + MAINCLASS);
}
}
private static final String PARENTLOADER = "parentLoader";
private static ClassLoader getParentLoader(ScriptContext ctx) {
int scope = ctx.getAttributesScope(PARENTLOADER);
if (scope != -1) {
Object loader = ctx.getAttribute(PARENTLOADER);
if (loader instanceof ClassLoader) {
return (ClassLoader) loader;
} // else fall through..
}
return null;
}
}
| |
/*
* Copyright (C) 2007 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Test switch() blocks
*/
public class Main {
// TODO: This should be translated to smali tests, so it is guaranteed we have the right kind
// of switch.
// Simple packed-switch.
public static void packedSwitch(int value) {
switch (value) {
case 0:
System.out.println("0"); break;
case 1:
System.out.println("1"); break;
case 2:
System.out.println("2"); break;
case 3:
System.out.println("3"); break;
case 4:
System.out.println("4"); break;
default:
System.out.println("default"); break;
}
}
// Simple packed-switch starting at a negative index.
public static void packedSwitch2(int value) {
switch (value) {
case -3:
System.out.println("-3"); break;
case -2:
System.out.println("-2"); break;
case -1:
System.out.println("-1"); break;
case 0:
System.out.println("0"); break;
case 1:
System.out.println("1"); break;
case 2:
System.out.println("2"); break;
default:
System.out.println("default"); break;
}
}
// Simple packed-switch starting above 0.
public static void packedSwitch3(int value) {
switch (value) {
case 2:
System.out.println("2"); break;
case 3:
System.out.println("3"); break;
case 4:
System.out.println("4"); break;
case 5:
System.out.println("5"); break;
case 6:
System.out.println("6"); break;
default:
System.out.println("default"); break;
}
}
// Simple packed-switch going up to max_int.
public static void packedSwitch4(int value) {
switch (value) {
case Integer.MAX_VALUE - 1:
System.out.println(Integer.MAX_VALUE - 1); break;
case Integer.MAX_VALUE:
System.out.println(Integer.MAX_VALUE); break;
default:
System.out.println("default"); break;
}
}
// Simple packed-switch starting at min_int.
public static void packedSwitch5(int value) {
switch (value) {
case Integer.MIN_VALUE:
System.out.println(Integer.MIN_VALUE); break;
case Integer.MIN_VALUE + 1:
System.out.println(Integer.MIN_VALUE + 1); break;
default:
System.out.println("default"); break;
}
}
// Simple (packed-)switch with only min_int.
public static void packedSwitch6(int value) {
switch (value) {
case Integer.MIN_VALUE:
System.out.println(Integer.MIN_VALUE); break;
default:
System.out.println("default"); break;
}
}
// Long packed-switch that might lead to not creating chained-ifs.
public static void packedSwitch7(int value) {
switch (value) {
case 1:
System.out.println(1); break;
case 2:
System.out.println(2); break;
case 3:
System.out.println(3); break;
case 4:
System.out.println(4); break;
case 5:
System.out.println(5); break;
case 6:
System.out.println(6); break;
case 7:
System.out.println(7); break;
case 8:
System.out.println(8); break;
case 9:
System.out.println(9); break;
case 10:
System.out.println(10); break;
case 11:
System.out.println(11); break;
case 12:
System.out.println(12); break;
case 13:
System.out.println(13); break;
case 14:
System.out.println(14); break;
case 15:
System.out.println(15); break;
default:
System.out.println("default"); break;
}
}
// Sparse switch, just leave a gap.
public static void sparseSwitch(int value) {
switch (value) {
case 0:
System.out.println("0"); break;
case 1:
System.out.println("1"); break;
case 3:
System.out.println("3"); break;
case 4:
System.out.println("4"); break;
default:
System.out.println("default"); break;
}
}
// Simple sparse-switch starting at a negative index.
public static void sparseSwitch2(int value) {
switch (value) {
case -3:
System.out.println("-3"); break;
case -2:
System.out.println("-2"); break;
case -1:
System.out.println("-1"); break;
case 0:
System.out.println("0"); break;
case 2:
System.out.println("2"); break;
default:
System.out.println("default"); break;
}
}
// Simple sparse-switch starting above 0.
public static void sparseSwitch3(int value) {
switch (value) {
case 2:
System.out.println("2"); break;
case 4:
System.out.println("4"); break;
case 5:
System.out.println("5"); break;
case 6:
System.out.println("6"); break;
default:
System.out.println("default"); break;
}
}
// Simple sparse-switch going up to max_int.
public static void sparseSwitch4(int value) {
switch (value) {
case Integer.MAX_VALUE - 2:
System.out.println(Integer.MAX_VALUE - 2); break;
case Integer.MAX_VALUE:
System.out.println(Integer.MAX_VALUE); break;
default:
System.out.println("default"); break;
}
}
// Simple sparse-switch starting at min_int.
public static void sparseSwitch5(int value) {
switch (value) {
case Integer.MIN_VALUE:
System.out.println(Integer.MIN_VALUE); break;
case Integer.MIN_VALUE + 2:
System.out.println(Integer.MIN_VALUE + 2); break;
default:
System.out.println("default"); break;
}
}
// Long sparse-switch that might lead to not creating chained-ifs.
public static void sparseSwitch7(int value) {
switch (value) {
case 1:
System.out.println(1); break;
case 2:
System.out.println(2); break;
case 4:
System.out.println(4); break;
case 5:
System.out.println(5); break;
case 6:
System.out.println(6); break;
case 7:
System.out.println(7); break;
case 8:
System.out.println(8); break;
case 9:
System.out.println(9); break;
case 10:
System.out.println(10); break;
case 11:
System.out.println(11); break;
case 12:
System.out.println(12); break;
case 13:
System.out.println(13); break;
case 14:
System.out.println(14); break;
case 15:
System.out.println(15); break;
default:
System.out.println("default"); break;
}
}
public static void main(String args[]) {
/*
* Note: We are using for loops and calls to hopefully avoid simplifying the switch
* structure from constant propagation. When inlining is supported, this needs to
* be revisited.
*/
System.out.println("packed");
for (int i = -2; i < 3; i++) {
packedSwitch(i);
}
packedSwitch(Integer.MIN_VALUE);
packedSwitch(Integer.MAX_VALUE);
System.out.println("packed2");
for (int i = -2; i < 3; i++) {
packedSwitch2(i);
}
packedSwitch2(Integer.MIN_VALUE);
packedSwitch2(Integer.MAX_VALUE);
System.out.println("packed3");
for (int i = -2; i < 7; i++) {
packedSwitch3(i);
}
packedSwitch3(Integer.MIN_VALUE);
packedSwitch3(Integer.MAX_VALUE);
System.out.println("packed4");
for (int i = Integer.MAX_VALUE - 2; i > 0; i++) {
packedSwitch4(i);
}
packedSwitch4(Integer.MIN_VALUE);
System.out.println("packed5");
for (int i = Integer.MIN_VALUE; i < Integer.MIN_VALUE + 2; i++) {
packedSwitch5(i);
}
packedSwitch5(Integer.MAX_VALUE);
System.out.println("packed6");
packedSwitch6(Integer.MIN_VALUE);
packedSwitch6(Integer.MAX_VALUE);
System.out.println("packed7");
for (int i = -1; i < 17; i++) {
packedSwitch7(i);
}
System.out.println("sparse");
for (int i = -2; i < 4; i++) {
sparseSwitch(i);
}
sparseSwitch(Integer.MIN_VALUE);
sparseSwitch(Integer.MAX_VALUE);
System.out.println("sparse2");
for (int i = -2; i < 3; i++) {
sparseSwitch2(i);
}
sparseSwitch2(Integer.MIN_VALUE);
sparseSwitch2(Integer.MAX_VALUE);
System.out.println("sparse3");
for (int i = -2; i < 7; i++) {
sparseSwitch3(i);
}
sparseSwitch3(Integer.MIN_VALUE);
sparseSwitch3(Integer.MAX_VALUE);
System.out.println("sparse4");
for (int i = Integer.MAX_VALUE - 2; i > 0; i++) {
sparseSwitch4(i);
}
sparseSwitch4(Integer.MIN_VALUE);
System.out.println("sparse5");
for (int i = Integer.MIN_VALUE; i < Integer.MIN_VALUE + 2; i++) {
sparseSwitch5(i);
}
sparseSwitch5(Integer.MAX_VALUE);
System.out.println("sparse7");
for (int i = -1; i < 17; i++) {
sparseSwitch7(i);
}
// Older tests.
int a = 1;
switch (a) {
case -1: System.out.print("neg one\n"); break;
case 0: System.out.print("zero\n"); break;
case 1: System.out.print("CORRECT (one)\n"); break;
case 2: System.out.print("two\n"); break;
case 3: System.out.print("three\n"); break;
case 4: System.out.print("four\n"); break;
default: System.out.print("???\n"); break;
}
switch (a) {
case 3: System.out.print("three\n"); break;
case 4: System.out.print("four\n"); break;
default: System.out.print("CORRECT (not found)\n"); break;
}
a = 0x12345678;
switch (a) {
case 0x12345678: System.out.print("CORRECT (large)\n"); break;
case 0x12345679: System.out.print("large+1\n"); break;
default: System.out.print("nuts\n"); break;
}
switch (a) {
case 0x12345678: System.out.print("CORRECT (large2)\n"); break;
case 0x12345700: System.out.print("large+many\n"); break;
default: System.out.print("nuts\n"); break;
}
switch (a) {
case 57: System.out.print("fifty-seven!\n"); break;
case -6: System.out.print("neg six!\n"); break;
case 0x12345678: System.out.print("CORRECT (large3)\n"); break;
case 22: System.out.print("twenty-two!\n"); break;
case 3: System.out.print("three!\n"); break;
default: System.out.print("huh?\n"); break;
}
switch (a) {
case -6: System.out.print("neg six!\n"); break;
case 3: System.out.print("three!\n"); break;
default: System.out.print("CORRECT (not found)\n"); break;
}
a = -5;
switch (a) {
case 12: System.out.print("twelve\n"); break;
case -5: System.out.print("CORRECT (not found)\n"); break;
case 0: System.out.print("zero\n"); break;
default: System.out.print("wah?\n"); break;
}
switch (a) {
default: System.out.print("CORRECT (default only)\n"); break;
}
a = -10;
switch (a) {
case -10: System.out.print("CORRECT big sparse / first\n"); break;
case -5: System.out.print("neg five\n"); break;
case 0: System.out.print("zero\n"); break;
case 5: System.out.print("five\n"); break;
case 10: System.out.print("ten\n"); break;
case 15: System.out.print("fifteen\n"); break;
case 20: System.out.print("twenty\n"); break;
case 50: System.out.print("fifty\n"); break;
case 100: System.out.print("hundred\n"); break;
default: System.out.print("blah!\n"); break;
}
a = 100;
switch (a) {
case -10: System.out.print("neg ten\n"); break;
case -5: System.out.print("neg five\n"); break;
case 0: System.out.print("zero\n"); break;
case 5: System.out.print("five\n"); break;
case 10: System.out.print("ten\n"); break;
case 15: System.out.print("fifteen\n"); break;
case 20: System.out.print("twenty\n"); break;
case 50: System.out.print("fifty\n"); break;
case 100: System.out.print("CORRECT big sparse / last\n"); break;
default: System.out.print("blah!\n"); break;
}
for (a = 253; a <= 258; a++) {
switch (a) {
case 254: System.out.println("254"); break;
case 255: System.out.println("255"); break;
case 256: System.out.println("256"); break;
case 257: System.out.println("257"); break;
default: System.out.println("default"); break;
}
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.printer;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.Map;
import javax.print.DocFlavor;
import javax.print.attribute.standard.MediaSizeName;
import javax.print.attribute.standard.OrientationRequested;
import javax.print.attribute.standard.Sides;
import org.apache.camel.spi.Metadata;
import org.apache.camel.spi.UriParam;
import org.apache.camel.spi.UriParams;
import org.apache.camel.spi.UriPath;
import org.apache.camel.util.ObjectHelper;
import org.apache.camel.util.URISupport;
@UriParams
public class PrinterConfiguration {
private URI uri;
private MediaSizeName mediaSizeName;
private Sides internalSides;
private OrientationRequested internalOrientation;
@UriPath @Metadata(required = "true")
private String hostname;
@UriPath @Metadata(required = "true")
private int port;
@UriPath @Metadata(required = "true")
private String printername;
@UriParam
private String printerPrefix;
@UriParam(defaultValue = "1")
private int copies = 1;
@UriParam
private String flavor;
@UriParam
private DocFlavor docFlavor;
@UriParam
private String mimeType;
@UriParam(defaultValue = "na-letter")
private String mediaSize;
@UriParam(defaultValue = "one-sided", enums = "one-sided,duplex,tumble,two-sided-short-edge,two-sided-long-edge")
private String sides;
@UriParam(defaultValue = "portrait", enums = "portrait,landscape,reverse-portrait,reverse-landscape")
private String orientation;
@UriParam(defaultValue = "true")
private boolean sendToPrinter = true;
@UriParam
private String mediaTray;
public PrinterConfiguration() {
}
public PrinterConfiguration(URI uri) throws URISyntaxException {
this.uri = uri;
}
public void parseURI(URI uri) throws Exception {
String protocol = uri.getScheme();
if (!protocol.equalsIgnoreCase("lpr")) {
throw new IllegalArgumentException("Unrecognized Print protocol: " + protocol + " for uri: " + uri);
}
setUri(uri);
setHostname(uri.getHost());
setPort(uri.getPort());
// use path as printer name, but without any leading slashes
String path = uri.getPath();
path = ObjectHelper.removeStartingCharacters(path, '/');
path = ObjectHelper.removeStartingCharacters(path, '\\');
setPrintername(path);
Map<String, Object> printSettings = URISupport.parseParameters(uri);
setFlavor((String) printSettings.get("flavor"));
setMimeType((String) printSettings.get("mimeType"));
setDocFlavor(assignDocFlavor(flavor, mimeType));
setPrinterPrefix((String) printSettings.get("printerPrefix"));
if (printSettings.containsKey("copies")) {
setCopies(Integer.valueOf((String) printSettings.get("copies")));
}
setMediaSize((String) printSettings.get("mediaSize"));
setSides((String) printSettings.get("sides"));
setOrientation((String) printSettings.get("orientation"));
setMediaSizeName(assignMediaSize(mediaSize));
setInternalSides(assignSides(sides));
setInternalOrientation(assignOrientation(orientation));
if (printSettings.containsKey("sendToPrinter")) {
if (!(Boolean.valueOf((String) printSettings.get("sendToPrinter")))) {
setSendToPrinter(false);
}
}
if (printSettings.containsKey("mediaTray")) {
setMediaTray((String) printSettings.get("mediaTray"));
}
}
private DocFlavor assignDocFlavor(String flavor, String mimeType) throws Exception {
// defaults
if (mimeType == null) {
mimeType = "AUTOSENSE";
}
if (flavor == null) {
flavor = "DocFlavor.BYTE_ARRAY";
}
DocFlavor d = DocFlavor.BYTE_ARRAY.AUTOSENSE;
DocFlavorAssigner docFlavorAssigner = new DocFlavorAssigner();
if (mimeType.equalsIgnoreCase("AUTOSENSE")) {
d = docFlavorAssigner.forMimeTypeAUTOSENSE(flavor);
} else if (mimeType.equalsIgnoreCase("GIF")) {
d = docFlavorAssigner.forMimeTypeGIF(flavor);
} else if (mimeType.equalsIgnoreCase("JPEG")) {
d = docFlavorAssigner.forMimeTypeJPEG(flavor);
} else if (mimeType.equalsIgnoreCase("PDF")) {
d = docFlavorAssigner.forMimeTypePDF(flavor);
} else if (mimeType.equalsIgnoreCase("PCL")) {
d = docFlavorAssigner.forMimeTypePCL(flavor);
} else if (mimeType.equalsIgnoreCase("POSTSCRIPT")) {
d = docFlavorAssigner.forMimeTypePOSTSCRIPT(flavor);
} else if (mimeType.equalsIgnoreCase("TEXT_HTML_HOST")) {
d = docFlavorAssigner.forMimeTypeHOST(flavor, mimeType);
} else if (mimeType.equalsIgnoreCase("TEXT_HTML_US_ASCII")) {
d = docFlavorAssigner.forMimeTypeUSASCII(flavor, mimeType);
} else if (mimeType.equalsIgnoreCase("TEXT_HTML_UTF_16")) {
d = docFlavorAssigner.forMimeTypeUTF16(flavor, mimeType);
} else if (mimeType.equalsIgnoreCase("TEXT_HTML_UTF_16LE")) {
d = docFlavorAssigner.forMimeTypeUTF16LE(flavor, mimeType);
} else if (mimeType.equalsIgnoreCase("TEXT_HTML_UTF_16BE")) {
d = docFlavorAssigner.forMimeTypeUTF16BE(flavor, mimeType);
} else if (mimeType.equalsIgnoreCase("TEXT_HTML_UTF_8")) {
d = docFlavorAssigner.forMimeTypeUTF8(flavor, mimeType);
} else if (mimeType.equalsIgnoreCase("TEXT_PLAIN_HOST")) {
d = docFlavorAssigner.forMimeTypeHOST(flavor, mimeType);
} else if (mimeType.equalsIgnoreCase("TEXT_PLAIN_US_ASCII")) {
d = docFlavorAssigner.forMimeTypeUSASCII(flavor, mimeType);
} else if (mimeType.equalsIgnoreCase("TEXT_PLAIN_UTF_16")) {
d = docFlavorAssigner.forMimeTypeUTF16(flavor, mimeType);
} else if (mimeType.equalsIgnoreCase("TEXT_PLAIN_UTF_16LE")) {
d = docFlavorAssigner.forMimeTypeUTF16LE(flavor, mimeType);
} else if (mimeType.equalsIgnoreCase("TEXT_PLAIN_UTF_16BE")) {
d = docFlavorAssigner.forMimeTypeUTF16BE(flavor, mimeType);
} else if (mimeType.equalsIgnoreCase("TEXT_PLAIN_UTF_8")) {
d = docFlavorAssigner.forMimeTypeUTF8(flavor, mimeType);
} else if (mimeType.equalsIgnoreCase("TEXT_HTML")) {
d = docFlavorAssigner.forMimeTypeBasic(flavor, mimeType);
} else if (mimeType.equalsIgnoreCase("TEXT_PLAIN")) {
d = docFlavorAssigner.forMimeTypeBasic(flavor, mimeType);
} else if (mimeType.equalsIgnoreCase("PAGEABLE")) {
d = docFlavorAssigner.forMimeTypePAGEABLE(flavor);
} else if (mimeType.equalsIgnoreCase("PRINTABLE")) {
d = docFlavorAssigner.forMimeTypePRINTABLE(flavor);
} else if (mimeType.equalsIgnoreCase("RENDERABLE_IMAGE")) {
d = docFlavorAssigner.forMimeTypeRENDERABLEIMAGE(flavor);
}
return d;
}
private MediaSizeName assignMediaSize(String size) {
MediaSizeAssigner mediaSizeAssigner = new MediaSizeAssigner();
MediaSizeName answer;
if (size == null) {
// default to NA letter if no size configured
answer = MediaSizeName.NA_LETTER;
} else if (size.toLowerCase().startsWith("iso")) {
answer = mediaSizeAssigner.selectMediaSizeNameISO(size);
} else if (size.startsWith("jis")) {
answer = mediaSizeAssigner.selectMediaSizeNameJIS(size);
} else if (size.startsWith("na")) {
answer = mediaSizeAssigner.selectMediaSizeNameNA(size);
} else {
answer = mediaSizeAssigner.selectMediaSizeNameOther(size);
}
return answer;
}
public Sides assignSides(String sidesString) {
Sides answer;
if (sidesString == null) {
// default to one side if no slides configured
answer = Sides.ONE_SIDED;
} else if (sidesString.equalsIgnoreCase("one-sided")) {
answer = Sides.ONE_SIDED;
} else if (sidesString.equalsIgnoreCase("duplex")) {
answer = Sides.DUPLEX;
} else if (sidesString.equalsIgnoreCase("tumble")) {
answer = Sides.TUMBLE;
} else if (sidesString.equalsIgnoreCase("two-sided-short-edge")) {
answer = Sides.TWO_SIDED_SHORT_EDGE;
} else if (sidesString.equalsIgnoreCase("two-sided-long-edge")) {
answer = Sides.TWO_SIDED_LONG_EDGE;
} else {
answer = Sides.ONE_SIDED;
}
return answer;
}
public OrientationRequested assignOrientation(final String orientation) {
OrientationRequested answer;
if (orientation == null) {
// default to portrait
answer = OrientationRequested.PORTRAIT;
} else if (orientation.equalsIgnoreCase("portrait")) {
answer = OrientationRequested.PORTRAIT;
} else if (orientation.equalsIgnoreCase("landscape")) {
answer = OrientationRequested.LANDSCAPE;
} else if (orientation.equalsIgnoreCase("reverse-portrait")) {
answer = OrientationRequested.REVERSE_PORTRAIT;
} else if (orientation.equalsIgnoreCase("reverse-landscape")) {
answer = OrientationRequested.REVERSE_LANDSCAPE;
} else {
answer = OrientationRequested.PORTRAIT;
}
return answer;
}
public URI getUri() {
return uri;
}
public void setUri(URI uri) {
this.uri = uri;
}
public String getHostname() {
return hostname;
}
/**
* Hostname of the printer
*/
public void setHostname(String hostname) {
this.hostname = hostname;
}
public int getPort() {
return port;
}
/**
* Port number of the printer
*/
public void setPort(int port) {
this.port = port;
}
public String getPrintername() {
return printername;
}
/**
* Name of the printer
*/
public void setPrintername(String printername) {
this.printername = printername;
}
public int getCopies() {
return copies;
}
/**
* Number of copies to print
*/
public void setCopies(int copies) {
this.copies = copies;
}
public String getFlavor() {
return flavor;
}
/**
* Sets DocFlavor to use.
*/
public void setFlavor(String flavor) {
this.flavor = flavor;
}
public DocFlavor getDocFlavor() {
return docFlavor;
}
/**
* Sets DocFlavor to use.
*/
public void setDocFlavor(DocFlavor docFlavor) {
this.docFlavor = docFlavor;
}
public String getMediaSize() {
return mediaSize;
}
/**
* Sets the stationary as defined by enumeration names in the javax.print.attribute.standard.MediaSizeName API.
* The default setting is to use North American Letter sized stationary.
* The value's case is ignored, e.g. values of iso_a4 and ISO_A4 may be used.
*/
public void setMediaSize(String mediaSize) {
this.mediaSize = mediaSize;
}
public String getSides() {
return sides;
}
/**
* Sets one sided or two sided printing based on the javax.print.attribute.standard.Sides API
*/
public void setSides(String sides) {
this.sides = sides;
}
public MediaSizeName getMediaSizeName() {
return mediaSizeName;
}
public void setMediaSizeName(MediaSizeName mediaSizeName) {
this.mediaSizeName = mediaSizeName;
}
public Sides getInternalSides() {
return internalSides;
}
public void setInternalSides(Sides internalSides) {
this.internalSides = internalSides;
}
public OrientationRequested getInternalOrientation() {
return internalOrientation;
}
public void setInternalOrientation(OrientationRequested internalOrientation) {
this.internalOrientation = internalOrientation;
}
public String getOrientation() {
return orientation;
}
/**
* Sets the page orientation.
*/
public void setOrientation(String orientation) {
this.orientation = orientation;
}
public String getMimeType() {
return mimeType;
}
/**
* Sets mimeTypes supported by the javax.print.DocFlavor API
*/
public void setMimeType(String mimeType) {
this.mimeType = mimeType;
}
public boolean isSendToPrinter() {
return sendToPrinter;
}
/**
* etting this option to false prevents sending of the print data to the printer
*/
public void setSendToPrinter(boolean sendToPrinter) {
this.sendToPrinter = sendToPrinter;
}
public String getMediaTray() {
return mediaTray;
}
/**
* Sets MediaTray supported by the javax.print.DocFlavor API, for example upper,middle etc.
*/
public void setMediaTray(String mediaTray) {
this.mediaTray = mediaTray;
}
public String getPrinterPrefix() {
return printerPrefix;
}
/**
* Sets the prefix name of the printer, it is useful when the printer name does not start with //hostname/printer
*/
public void setPrinterPrefix(String printerPrefix) {
this.printerPrefix = printerPrefix;
}
}
| |
package org.basex.gui.view.explore;
import static org.basex.core.Text.*;
import java.awt.*;
import java.awt.event.*;
import javax.swing.text.*;
import org.basex.core.cmd.*;
import org.basex.data.*;
import org.basex.gui.*;
import org.basex.gui.layout.*;
import org.basex.index.name.*;
import org.basex.index.stats.*;
import org.basex.util.*;
import org.basex.util.list.*;
/**
* This view provides standard GUI components to browse the currently opened database.
*
* @author BaseX Team 2005-15, BSD License
* @author Christian Gruen
* @author Bastian Lemke
*/
final class ExploreArea extends BaseXPanel implements ActionListener {
/** Component width. */
private static final int COMPW = 150;
/** Exact search pattern. */
private static final String PATEX = "[% = \"%\"]";
/** Substring search pattern. */
private static final String PATSUB = "[% contains text \"%\"]";
/** Numeric search pattern. */
private static final String PATNUM = "[% >= % and % <= %]";
/** Simple search pattern. */
private static final String PATSIMPLE = "[%]";
/** Main panel. */
private final ExploreView main;
/** Main panel. */
private final BaseXBack panel;
/** Query field. */
private final BaseXTextField all;
/** Last Query. */
private String last = "";
/**
* Default constructor.
* @param m main panel
*/
ExploreArea(final ExploreView m) {
super(m.gui);
main = m;
layout(new BorderLayout(0, 5)).setOpaque(false);
all = new BaseXTextField(gui);
all.addKeyListener(main);
all.addKeyListener(new KeyAdapter() {
@Override
public void keyReleased(final KeyEvent e) {
query(false);
}
});
add(all, BorderLayout.NORTH);
panel = new BaseXBack(false).layout(new TableLayout(32, 2, 10, 5));
add(panel, BorderLayout.CENTER);
}
/**
* Initializes the panel.
*/
void init() {
panel.removeAll();
panel.revalidate();
panel.repaint();
}
@Override
public void paintComponent(final Graphics g) {
super.paintComponent(g);
final Data data = gui.context.data();
if(!main.visible() || data == null || panel.getComponentCount() != 0) return;
addKeys(gui.context.data());
panel.revalidate();
panel.repaint();
}
/**
* Adds a text field.
* @param pos position
*/
private void addInput(final int pos) {
final BaseXTextField txt = new BaseXTextField(gui);
BaseXLayout.setWidth(txt, COMPW);
txt.setPreferredSize(new Dimension(getPreferredSize().width, txt.getFont().getSize() + 11));
txt.setMargin(new Insets(0, 0, 0, 10));
txt.addKeyListener(new KeyAdapter() {
@Override
public void keyReleased(final KeyEvent e) {
query(false);
}
});
txt.addKeyListener(main);
panel.add(txt, pos);
}
/**
* Adds a category combobox.
* @param data data reference
*/
private void addKeys(final Data data) {
final TokenList tl = new TokenList();
final int cs = panel.getComponentCount();
for(int c = 0; c < cs; c += 2) {
final BaseXCombo combo = (BaseXCombo) panel.getComponent(c);
if(combo.getSelectedIndex() == 0) continue;
final String elem = combo.getSelectedItem();
if(!elem.startsWith("@")) tl.add(Token.token(elem));
}
final String[] entries = entries(data.paths.desc(tl, true, false));
final BaseXCombo cm = new BaseXCombo(gui, entries);
cm.addActionListener(this);
cm.addKeyListener(main);
if(entries.length == 1) cm.setEnabled(false);
panel.add(cm);
panel.add(new BaseXLabel(""));
}
/**
* Adds a combobox.
* @param values combobox values
* @param pos position
*/
private void addCombo(final String[] values, final int pos) {
final BaseXCombo cm = new BaseXCombo(gui, values);
BaseXLayout.setWidth(cm, COMPW);
cm.addActionListener(this);
cm.addKeyListener(main);
panel.add(cm, pos);
}
/**
* Adds a combobox.
* @param min minimum value
* @param max maximum value
* @param pos position
* @param itr integer flag
*/
private void addSlider(final double min, final double max, final int pos, final boolean itr) {
final BaseXDSlider sl = new BaseXDSlider(min, max, gui, this);
BaseXLayout.setWidth(sl, COMPW + BaseXDSlider.LABELW);
sl.itr = itr;
sl.addKeyListener(main);
panel.add(sl, pos);
}
@Override
public void actionPerformed(final ActionEvent e) {
if(e != null) {
final Object source = e.getSource();
// find modified component
int cp = 0;
final int cs = panel.getComponentCount();
for(int c = 0; c < cs; ++c) if(panel.getComponent(c) == source) cp = c;
if((cp & 1) == 0) {
// combo box with element/attribute names
final BaseXCombo combo = (BaseXCombo) source;
panel.remove(cp + 1);
final Data data = gui.context.data();
final boolean selected = combo.getSelectedIndex() != 0;
if(selected) {
final String item = combo.getSelectedItem();
final boolean att = item.startsWith("@");
final Names names = att ? data.attrNames : data.elemNames;
final byte[] key = Token.token(att ? item.substring(1) : item);
final Stats stat = names.stat(names.id(key));
switch(stat.type) {
case INTEGER:
addSlider(stat.min, stat.max, cp + 1, true);
break;
case DOUBLE:
addSlider(stat.min, stat.max, cp + 1, false);
break;
case CATEGORY:
addCombo(entries(new TokenList(stat.cats)), cp + 1);
break;
case TEXT:
addInput(cp + 1);
break;
case NONE:
panel.add(new BaseXLabel(""), cp + 1);
break;
}
} else {
panel.add(new BaseXLabel(""), cp + 1);
}
while(cp + 2 < panel.getComponentCount()) {
panel.remove(cp + 2);
panel.remove(cp + 2);
}
if(selected) addKeys(data);
panel.revalidate();
panel.repaint();
}
}
query(false);
}
/**
* Runs a query.
* @param force force the execution of a new query
*/
private void query(final boolean force) {
final TokenBuilder tb = new TokenBuilder();
final Data data = gui.context.data();
final int cs = panel.getComponentCount();
for(int c = 0; c < cs; c += 2) {
final BaseXCombo com = (BaseXCombo) panel.getComponent(c);
final int k = com.getSelectedIndex();
if(k <= 0) continue;
String key = com.getSelectedItem().replaceAll("^(@?)(.*):", "$1*:");
final boolean attr = key.startsWith("@");
final Component comp = panel.getComponent(c + 1);
String pattern = "";
String val1 = null;
String val2 = null;
if(comp instanceof BaseXTextField) {
val1 = ((JTextComponent) comp).getText();
if(!val1.isEmpty()) {
if(val1.startsWith("\"")) {
val1 = val1.replaceAll("\"", "");
pattern = PATEX;
} else {
pattern = attr && data.meta.attrindex ||
!attr && data.meta.textindex ? PATSUB : PATEX;
}
}
} else if(comp instanceof BaseXCombo) {
final BaseXCombo combo = (BaseXCombo) comp;
if(combo.getSelectedIndex() != 0) {
val1 = combo.getSelectedItem();
pattern = PATEX;
}
} else if(comp instanceof BaseXDSlider) {
final BaseXDSlider slider = (BaseXDSlider) comp;
if(slider.min != slider.totMin || slider.max != slider.totMax) {
final double m = slider.min;
final double n = slider.max;
val1 = (long) m == m ? Long.toString((long) m) : Double.toString(m);
val2 = (long) n == n ? Long.toString((long) n) : Double.toString(n);
pattern = PATNUM;
}
}
if(attr) {
key = "descendant-or-self::node()/" + key;
if(tb.isEmpty()) tb.add("//*");
if(pattern.isEmpty()) pattern = PATSIMPLE;
} else {
tb.add("//" + key);
key = "text()";
}
tb.addExt(pattern, key, val1, key, val2);
}
String qu = tb.toString();
final boolean root = gui.context.root();
final boolean rt = gui.gopts.get(GUIOptions.FILTERRT);
if(!qu.isEmpty() && !rt && !root) qu = '.' + qu;
String simple = all.getText().trim();
if(!simple.isEmpty()) {
simple = Find.find(simple, gui.context, rt);
qu = qu.isEmpty() ? simple : simple + " | " + qu;
}
if(qu.isEmpty()) qu = rt || root ? "/" : ".";
if(!force && last.equals(qu)) return;
last = qu;
gui.xquery(qu, false);
}
/**
* Returns the combo box selections and the keys of the specified set.
* @param names keys
* @return key array
*/
private static String[] entries(final TokenList names) {
final StringList entries = new StringList();
entries.add(Util.info(ENTRIES, names.size()));
for(final byte[] k : names) entries.add(Token.string(k));
return entries.sort(true, true, 1).finish();
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper.string;
import org.apache.lucene.index.DocValuesType;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.index.IndexableFieldType;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.mapper.ContentPath;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.DocumentMapperParser;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.Mapper.BuilderContext;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.ParseContext.Document;
import org.elasticsearch.index.mapper.ParsedDocument;
import org.elasticsearch.index.mapper.core.StringFieldMapper;
import org.elasticsearch.index.mapper.core.StringFieldMapper.Builder;
import org.elasticsearch.index.mapper.core.StringFieldMapper.StringFieldType;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.test.ESSingleNodeTestCase;
import org.elasticsearch.test.InternalSettingsPlugin;
import org.junit.Before;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collection;
import java.util.Map;
import static java.util.Collections.emptyMap;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.notNullValue;
import static org.hamcrest.Matchers.nullValue;
/**
*/
public class SimpleStringMappingTests extends ESSingleNodeTestCase {
@Override
protected Collection<Class<? extends Plugin>> getPlugins() {
return pluginList(InternalSettingsPlugin.class);
}
IndexService indexService;
DocumentMapperParser parser;
@Before
public void before() {
indexService = createIndex("test",
// we need 2.x since string is deprecated in 5.0
Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_3_0).build());
parser = indexService.mapperService().documentMapperParser();
}
public void testLimit() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", "string").field("ignore_above", 5).endObject().endObject()
.endObject().endObject().string();
DocumentMapper defaultMapper = parser.parse("type", new CompressedXContent(mapping));
ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
.field("field", "1234")
.endObject()
.bytes());
assertThat(doc.rootDoc().getField("field"), notNullValue());
doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
.field("field", "12345")
.endObject()
.bytes());
assertThat(doc.rootDoc().getField("field"), notNullValue());
doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
.field("field", "123456")
.endObject()
.bytes());
assertThat(doc.rootDoc().getField("field"), nullValue());
}
private void assertDefaultAnalyzedFieldType(IndexableFieldType fieldType) {
assertThat(fieldType.omitNorms(), equalTo(false));
assertThat(fieldType.indexOptions(), equalTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS));
assertThat(fieldType.storeTermVectors(), equalTo(false));
assertThat(fieldType.storeTermVectorOffsets(), equalTo(false));
assertThat(fieldType.storeTermVectorPositions(), equalTo(false));
assertThat(fieldType.storeTermVectorPayloads(), equalTo(false));
}
private void assertEquals(IndexableFieldType ft1, IndexableFieldType ft2) {
assertEquals(ft1.tokenized(), ft2.tokenized());
assertEquals(ft1.omitNorms(), ft2.omitNorms());
assertEquals(ft1.indexOptions(), ft2.indexOptions());
assertEquals(ft1.storeTermVectors(), ft2.storeTermVectors());
assertEquals(ft1.docValuesType(), ft2.docValuesType());
}
private void assertParseIdemPotent(IndexableFieldType expected, DocumentMapper mapper) throws Exception {
String mapping = mapper.toXContent(XContentFactory.jsonBuilder().startObject(), new ToXContent.MapParams(emptyMap())).endObject().string();
mapper = parser.parse("type", new CompressedXContent(mapping));
ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
.field("field", "2345")
.endObject()
.bytes());
assertEquals(expected, doc.rootDoc().getField("field").fieldType());
}
public void testDefaultsForAnalyzed() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", "string").endObject().endObject()
.endObject().endObject().string();
DocumentMapper defaultMapper = parser.parse("type", new CompressedXContent(mapping));
ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
.field("field", "1234")
.endObject()
.bytes());
IndexableFieldType fieldType = doc.rootDoc().getField("field").fieldType();
assertDefaultAnalyzedFieldType(fieldType);
assertParseIdemPotent(fieldType, defaultMapper);
}
public void testDefaultsForNotAnalyzed() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", "string").field("index", "not_analyzed").endObject().endObject()
.endObject().endObject().string();
DocumentMapper defaultMapper = parser.parse("type", new CompressedXContent(mapping));
ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
.field("field", "1234")
.endObject()
.bytes());
IndexableFieldType fieldType = doc.rootDoc().getField("field").fieldType();
assertThat(fieldType.omitNorms(), equalTo(true));
assertThat(fieldType.indexOptions(), equalTo(IndexOptions.DOCS));
assertThat(fieldType.storeTermVectors(), equalTo(false));
assertThat(fieldType.storeTermVectorOffsets(), equalTo(false));
assertThat(fieldType.storeTermVectorPositions(), equalTo(false));
assertThat(fieldType.storeTermVectorPayloads(), equalTo(false));
assertParseIdemPotent(fieldType, defaultMapper);
// now test it explicitly set
mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", "string").field("index", "not_analyzed").startObject("norms").field("enabled", true).endObject().field("index_options", "freqs").endObject().endObject()
.endObject().endObject().string();
defaultMapper = parser.parse("type", new CompressedXContent(mapping));
doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
.field("field", "1234")
.endObject()
.bytes());
fieldType = doc.rootDoc().getField("field").fieldType();
assertThat(fieldType.omitNorms(), equalTo(false));
assertThat(fieldType.indexOptions(), equalTo(IndexOptions.DOCS_AND_FREQS));
assertThat(fieldType.storeTermVectors(), equalTo(false));
assertThat(fieldType.storeTermVectorOffsets(), equalTo(false));
assertThat(fieldType.storeTermVectorPositions(), equalTo(false));
assertThat(fieldType.storeTermVectorPayloads(), equalTo(false));
assertParseIdemPotent(fieldType, defaultMapper);
// also test the deprecated omit_norms
mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", "string").field("index", "not_analyzed").field("omit_norms", false).endObject().endObject()
.endObject().endObject().string();
defaultMapper = parser.parse("type", new CompressedXContent(mapping));
doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
.field("field", "1234")
.endObject()
.bytes());
fieldType = doc.rootDoc().getField("field").fieldType();
assertThat(fieldType.omitNorms(), equalTo(false));
assertParseIdemPotent(fieldType, defaultMapper);
}
public void testSearchQuoteAnalyzerSerialization() throws Exception {
// Cases where search_quote_analyzer should not be added to the mapping.
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties")
.startObject("field1")
.field("type", "string")
.field("position_increment_gap", 1000)
.endObject()
.startObject("field2")
.field("type", "string")
.field("position_increment_gap", 1000)
.field("analyzer", "standard")
.endObject()
.startObject("field3")
.field("type", "string")
.field("position_increment_gap", 1000)
.field("analyzer", "standard")
.field("search_analyzer", "simple")
.endObject()
.startObject("field4")
.field("type", "string")
.field("position_increment_gap", 1000)
.field("analyzer", "standard")
.field("search_analyzer", "simple")
.field("search_quote_analyzer", "simple")
.endObject()
.endObject()
.endObject().endObject().string();
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
for (String fieldName : Arrays.asList("field1", "field2", "field3", "field4")) {
Map<String, Object> serializedMap = getSerializedMap(fieldName, mapper);
assertFalse(fieldName, serializedMap.containsKey("search_quote_analyzer"));
}
// Cases where search_quote_analyzer should be present.
mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties")
.startObject("field")
.field("type", "string")
.field("position_increment_gap", 1000)
.field("analyzer", "standard")
.field("search_analyzer", "standard")
.field("search_quote_analyzer", "simple")
.endObject()
.endObject()
.endObject().endObject().string();
mapper = parser.parse("type", new CompressedXContent(mapping));
Map<String, Object> serializedMap = getSerializedMap("field", mapper);
assertEquals(serializedMap.get("search_quote_analyzer"), "simple");
}
public void testSearchAnalyzerSerialization() throws IOException {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties")
.startObject("field")
.field("type", "string")
.field("analyzer", "standard")
.field("search_analyzer", "keyword")
.endObject()
.endObject().endObject().endObject().string();
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, mapper.mappingSource().toString());
// special case: default index analyzer
mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties")
.startObject("field")
.field("type", "string")
.field("analyzer", "default")
.field("search_analyzer", "keyword")
.endObject()
.endObject().endObject().endObject().string();
mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, mapper.mappingSource().toString());
}
private Map<String, Object> getSerializedMap(String fieldName, DocumentMapper mapper) throws Exception {
FieldMapper fieldMapper = mapper.mappers().smartNameFieldMapper(fieldName);
XContentBuilder builder = JsonXContent.contentBuilder().startObject();
fieldMapper.toXContent(builder, ToXContent.EMPTY_PARAMS).endObject();
builder.close();
Map<String, Object> fieldMap;
try (XContentParser parser = JsonXContent.jsonXContent.createParser(builder.bytes())) {
fieldMap = parser.map();
}
@SuppressWarnings("unchecked")
Map<String, Object> result = (Map<String, Object>) fieldMap.get(fieldName);
return result;
}
public void testTermVectors() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties")
.startObject("field1")
.field("type", "string")
.field("term_vector", "no")
.endObject()
.startObject("field2")
.field("type", "string")
.field("term_vector", "yes")
.endObject()
.startObject("field3")
.field("type", "string")
.field("term_vector", "with_offsets")
.endObject()
.startObject("field4")
.field("type", "string")
.field("term_vector", "with_positions")
.endObject()
.startObject("field5")
.field("type", "string")
.field("term_vector", "with_positions_offsets")
.endObject()
.startObject("field6")
.field("type", "string")
.field("term_vector", "with_positions_offsets_payloads")
.endObject()
.endObject()
.endObject().endObject().string();
DocumentMapper defaultMapper = parser.parse("type", new CompressedXContent(mapping));
ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
.field("field1", "1234")
.field("field2", "1234")
.field("field3", "1234")
.field("field4", "1234")
.field("field5", "1234")
.field("field6", "1234")
.endObject()
.bytes());
assertThat(doc.rootDoc().getField("field1").fieldType().storeTermVectors(), equalTo(false));
assertThat(doc.rootDoc().getField("field1").fieldType().storeTermVectorOffsets(), equalTo(false));
assertThat(doc.rootDoc().getField("field1").fieldType().storeTermVectorPositions(), equalTo(false));
assertThat(doc.rootDoc().getField("field1").fieldType().storeTermVectorPayloads(), equalTo(false));
assertThat(doc.rootDoc().getField("field2").fieldType().storeTermVectors(), equalTo(true));
assertThat(doc.rootDoc().getField("field2").fieldType().storeTermVectorOffsets(), equalTo(false));
assertThat(doc.rootDoc().getField("field2").fieldType().storeTermVectorPositions(), equalTo(false));
assertThat(doc.rootDoc().getField("field2").fieldType().storeTermVectorPayloads(), equalTo(false));
assertThat(doc.rootDoc().getField("field3").fieldType().storeTermVectors(), equalTo(true));
assertThat(doc.rootDoc().getField("field3").fieldType().storeTermVectorOffsets(), equalTo(true));
assertThat(doc.rootDoc().getField("field3").fieldType().storeTermVectorPositions(), equalTo(false));
assertThat(doc.rootDoc().getField("field3").fieldType().storeTermVectorPayloads(), equalTo(false));
assertThat(doc.rootDoc().getField("field4").fieldType().storeTermVectors(), equalTo(true));
assertThat(doc.rootDoc().getField("field4").fieldType().storeTermVectorOffsets(), equalTo(false));
assertThat(doc.rootDoc().getField("field4").fieldType().storeTermVectorPositions(), equalTo(true));
assertThat(doc.rootDoc().getField("field4").fieldType().storeTermVectorPayloads(), equalTo(false));
assertThat(doc.rootDoc().getField("field5").fieldType().storeTermVectors(), equalTo(true));
assertThat(doc.rootDoc().getField("field5").fieldType().storeTermVectorOffsets(), equalTo(true));
assertThat(doc.rootDoc().getField("field5").fieldType().storeTermVectorPositions(), equalTo(true));
assertThat(doc.rootDoc().getField("field5").fieldType().storeTermVectorPayloads(), equalTo(false));
assertThat(doc.rootDoc().getField("field6").fieldType().storeTermVectors(), equalTo(true));
assertThat(doc.rootDoc().getField("field6").fieldType().storeTermVectorOffsets(), equalTo(true));
assertThat(doc.rootDoc().getField("field6").fieldType().storeTermVectorPositions(), equalTo(true));
assertThat(doc.rootDoc().getField("field6").fieldType().storeTermVectorPayloads(), equalTo(true));
}
public void testDocValues() throws Exception {
// doc values only work on non-analyzed content
final BuilderContext ctx = new BuilderContext(indexService.getIndexSettings().getSettings(), new ContentPath(1));
try {
new StringFieldMapper.Builder("anything").docValues(true).build(ctx);
fail();
} catch (Exception e) { /* OK */ }
assertFalse(new Builder("anything").index(false).build(ctx).fieldType().hasDocValues());
assertTrue(new Builder("anything").index(true).tokenized(false).build(ctx).fieldType().hasDocValues());
assertFalse(new Builder("anything").index(true).tokenized(true).build(ctx).fieldType().hasDocValues());
assertFalse(new Builder("anything").index(false).tokenized(false).docValues(false).build(ctx).fieldType().hasDocValues());
assertTrue(new Builder("anything").index(false).docValues(true).build(ctx).fieldType().hasDocValues());
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties")
.startObject("str1")
.field("type", "string")
.field("index", "no")
.endObject()
.startObject("str2")
.field("type", "string")
.field("index", "not_analyzed")
.endObject()
.startObject("str3")
.field("type", "string")
.field("index", "analyzed")
.endObject()
.startObject("str4")
.field("type", "string")
.field("index", "not_analyzed")
.field("doc_values", false)
.endObject()
.startObject("str5")
.field("type", "string")
.field("index", "no")
.field("doc_values", false)
.endObject()
.endObject()
.endObject().endObject().string();
DocumentMapper defaultMapper = parser.parse("type", new CompressedXContent(mapping));
ParsedDocument parsedDoc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
.field("str1", "1234")
.field("str2", "1234")
.field("str3", "1234")
.field("str4", "1234")
.field("str5", "1234")
.endObject()
.bytes());
final Document doc = parsedDoc.rootDoc();
assertEquals(DocValuesType.NONE, docValuesType(doc, "str1"));
assertEquals(DocValuesType.SORTED_SET, docValuesType(doc, "str2"));
assertEquals(DocValuesType.NONE, docValuesType(doc, "str3"));
assertEquals(DocValuesType.NONE, docValuesType(doc, "str4"));
assertEquals(DocValuesType.NONE, docValuesType(doc, "str5"));
}
public void testBwCompatDocValues() throws Exception {
Settings oldIndexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_2_0).build();
indexService = createIndex("test_old", oldIndexSettings);
parser = indexService.mapperService().documentMapperParser();
// doc values only work on non-analyzed content
final BuilderContext ctx = new BuilderContext(indexService.getIndexSettings().getSettings(), new ContentPath(1));
try {
new StringFieldMapper.Builder("anything").docValues(true).build(ctx);
fail();
} catch (Exception e) { /* OK */ }
assertFalse(new Builder("anything").index(false).build(ctx).fieldType().hasDocValues());
assertTrue(new Builder("anything").index(true).tokenized(false).build(ctx).fieldType().hasDocValues());
assertFalse(new Builder("anything").index(true).tokenized(true).build(ctx).fieldType().hasDocValues());
assertFalse(new Builder("anything").index(false).tokenized(false).docValues(false).build(ctx).fieldType().hasDocValues());
assertTrue(new Builder("anything").index(false).docValues(true).build(ctx).fieldType().hasDocValues());
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties")
.startObject("str1")
.field("type", "string")
.field("index", "no")
.endObject()
.startObject("str2")
.field("type", "string")
.field("index", "not_analyzed")
.endObject()
.startObject("str3")
.field("type", "string")
.field("index", "analyzed")
.endObject()
.startObject("str4")
.field("type", "string")
.field("index", "not_analyzed")
.field("doc_values", false)
.endObject()
.startObject("str5")
.field("type", "string")
.field("index", "no")
.field("doc_values", true)
.endObject()
.endObject()
.endObject().endObject().string();
DocumentMapper defaultMapper = parser.parse("type", new CompressedXContent(mapping));
ParsedDocument parsedDoc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
.field("str1", "1234")
.field("str2", "1234")
.field("str3", "1234")
.field("str4", "1234")
.field("str5", "1234")
.endObject()
.bytes());
final Document doc = parsedDoc.rootDoc();
assertEquals(DocValuesType.NONE, docValuesType(doc, "str1"));
assertEquals(DocValuesType.SORTED_SET, docValuesType(doc, "str2"));
assertEquals(DocValuesType.NONE, docValuesType(doc, "str3"));
assertEquals(DocValuesType.NONE, docValuesType(doc, "str4"));
assertEquals(DocValuesType.SORTED_SET, docValuesType(doc, "str5"));
}
// TODO: this function shouldn't be necessary. parsing should just add a single field that is indexed and dv
public static DocValuesType docValuesType(Document document, String fieldName) {
for (IndexableField field : document.getFields(fieldName)) {
if (field.fieldType().docValuesType() != DocValuesType.NONE) {
return field.fieldType().docValuesType();
}
}
return DocValuesType.NONE;
}
public void testDisableNorms() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", "string").endObject().endObject()
.endObject().endObject().string();
MapperService mapperService = indexService.mapperService();
DocumentMapper defaultMapper = mapperService.merge("type", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE, false);
ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
.field("field", "1234")
.endObject()
.bytes());
IndexableFieldType fieldType = doc.rootDoc().getField("field").fieldType();
assertEquals(false, fieldType.omitNorms());
String updatedMapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", "string").startObject("norms").field("enabled", false).endObject()
.endObject().endObject().endObject().endObject().string();
defaultMapper = mapperService.merge("type", new CompressedXContent(updatedMapping), MapperService.MergeReason.MAPPING_UPDATE, false);
doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
.field("field", "1234")
.endObject()
.bytes());
fieldType = doc.rootDoc().getField("field").fieldType();
assertEquals(true, fieldType.omitNorms());
updatedMapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", "string").startObject("norms").field("enabled", true).endObject()
.endObject().endObject().endObject().endObject().string();
try {
mapperService.merge("type", new CompressedXContent(updatedMapping), MapperService.MergeReason.MAPPING_UPDATE, false);
fail();
} catch (IllegalArgumentException e) {
assertThat(e.getMessage(), containsString("different [norms]"));
}
}
/**
* Test that expected exceptions are thrown when creating a new index with position_offset_gap
*/
public void testPositionOffsetGapDeprecation() throws Exception {
// test deprecation exceptions on newly created indexes
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties")
.startObject("field1")
.field("type", "string")
.field("position_increment_gap", 10)
.endObject()
.startObject("field2")
.field("type", "string")
.field("position_offset_gap", 50)
.field("analyzer", "standard")
.endObject().endObject().endObject().endObject().string();
try {
parser.parse("type", new CompressedXContent(mapping));
fail("Mapping definition should fail with the position_offset_gap setting");
}catch (MapperParsingException e) {
assertEquals(e.getMessage(), "Mapping definition for [field2] has unsupported parameters: [position_offset_gap : 50]");
}
}
public void testFielddataLoading() throws IOException {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field")
.field("type", "string")
.startObject("fielddata")
.field("loading", "eager_global_ordinals")
.endObject()
.endObject().endObject()
.endObject().endObject().string();
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
String expectedMapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field")
.field("type", "string")
.field("eager_global_ordinals", true)
.endObject().endObject()
.endObject().endObject().string();
assertEquals(expectedMapping, mapper.mappingSource().toString());
assertTrue(mapper.mappers().getMapper("field").fieldType().eagerGlobalOrdinals());
}
public void testFielddataFilter() throws IOException {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field")
.field("type", "string")
.startObject("fielddata")
.startObject("filter")
.startObject("frequency")
.field("min", 2d)
.field("min_segment_size", 1000)
.endObject()
.startObject("regex")
.field("pattern", "^#.*")
.endObject()
.endObject()
.endObject()
.endObject().endObject()
.endObject().endObject().string();
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
String expectedMapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field")
.field("type", "string")
.startObject("fielddata_frequency_filter")
.field("min", 2d)
.field("min_segment_size", 1000)
.endObject()
.endObject().endObject()
.endObject().endObject().string();
assertEquals(expectedMapping, mapper.mappingSource().toString());
StringFieldType fieldType = (StringFieldType) mapper.mappers().getMapper("field").fieldType();
assertThat(fieldType.fielddataMinFrequency(), equalTo(2d));
assertThat(fieldType.fielddataMaxFrequency(), equalTo((double) Integer.MAX_VALUE));
assertThat(fieldType.fielddataMinSegmentSize(), equalTo(1000));
}
public void testDisabledFielddata() throws IOException {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field")
.field("type", "string")
.startObject("fielddata")
.field("format", "disabled")
.endObject()
.endObject().endObject()
.endObject().endObject().string();
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
String expectedMapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field")
.field("type", "string")
.field("fielddata", false)
.endObject().endObject()
.endObject().endObject().string();
assertEquals(expectedMapping, mapper.mappingSource().toString());
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> mapper.mappers().getMapper("field").fieldType().fielddataBuilder());
assertThat(e.getMessage(), containsString("Fielddata is disabled"));
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jasper.compiler;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.Vector;
import javax.el.ExpressionFactory;
import javax.servlet.jsp.tagext.TagLibraryInfo;
import org.apache.jasper.Constants;
import org.apache.jasper.JasperException;
/**
* A repository for various info about the translation unit under compilation.
*
* @author Kin-man Chung
*/
class PageInfo {
private Vector<String> imports;
private Map<String,Long> dependants;
private BeanRepository beanRepository;
private Set<String> varInfoNames;
private HashMap<String,TagLibraryInfo> taglibsMap;
private HashMap<String, String> jspPrefixMapper;
private HashMap<String, LinkedList<String>> xmlPrefixMapper;
private HashMap<String, Mark> nonCustomTagPrefixMap;
private String jspFile;
private String defaultLanguage = "java";
private String language;
private String defaultExtends = Constants.JSP_SERVLET_BASE;
private String xtends;
private String contentType = null;
private String session;
private boolean isSession = true;
private String bufferValue;
private int buffer = 8*1024; // XXX confirm
private String autoFlush;
private boolean isAutoFlush = true;
private String isThreadSafeValue;
private boolean isThreadSafe = true;
private String isErrorPageValue;
private boolean isErrorPage = false;
private String errorPage = null;
private String info;
private boolean scriptless = false;
private boolean scriptingInvalid = false;
private String isELIgnoredValue;
private boolean isELIgnored = false;
// JSP 2.1
private String deferredSyntaxAllowedAsLiteralValue;
private boolean deferredSyntaxAllowedAsLiteral = false;
private ExpressionFactory expressionFactory =
ExpressionFactory.newInstance();
private String trimDirectiveWhitespacesValue;
private boolean trimDirectiveWhitespaces = false;
private String omitXmlDecl = null;
private String doctypeName = null;
private String doctypePublic = null;
private String doctypeSystem = null;
private boolean isJspPrefixHijacked;
// Set of all element and attribute prefixes used in this translation unit
private HashSet<String> prefixes;
private boolean hasJspRoot = false;
private Vector<String> includePrelude;
private Vector<String> includeCoda;
private Vector<String> pluginDcls; // Id's for tagplugin declarations
// JSP 2.2
private boolean errorOnUndeclaredNamepsace = false;
private boolean isTagFile = false;
PageInfo(BeanRepository beanRepository, String jspFile, boolean isTagFile) {
this.isTagFile = isTagFile;
this.jspFile = jspFile;
this.beanRepository = beanRepository;
this.varInfoNames = new HashSet<String>();
this.taglibsMap = new HashMap<String, TagLibraryInfo>();
this.jspPrefixMapper = new HashMap<String, String>();
this.xmlPrefixMapper = new HashMap<String, LinkedList<String>>();
this.nonCustomTagPrefixMap = new HashMap<String, Mark>();
this.imports = new Vector<String>();
this.dependants = new HashMap<String,Long>();
this.includePrelude = new Vector<String>();
this.includeCoda = new Vector<String>();
this.pluginDcls = new Vector<String>();
this.prefixes = new HashSet<String>();
// Enter standard imports
imports.addAll(Constants.STANDARD_IMPORTS);
}
public boolean isTagFile() {
return isTagFile;
}
/**
* Check if the plugin ID has been previously declared. Make a not
* that this Id is now declared.
* @return true if Id has been declared.
*/
public boolean isPluginDeclared(String id) {
if (pluginDcls.contains(id))
return true;
pluginDcls.add(id);
return false;
}
public void addImports(List<String> imports) {
this.imports.addAll(imports);
}
public void addImport(String imp) {
this.imports.add(imp);
}
public List<String> getImports() {
return imports;
}
public String getJspFile() {
return jspFile;
}
public void addDependant(String d, Long lastModified) {
if (!dependants.containsKey(d) && !jspFile.equals(d))
dependants.put(d, lastModified);
}
public Map<String,Long> getDependants() {
return dependants;
}
public BeanRepository getBeanRepository() {
return beanRepository;
}
public void setScriptless(boolean s) {
scriptless = s;
}
public boolean isScriptless() {
return scriptless;
}
public void setScriptingInvalid(boolean s) {
scriptingInvalid = s;
}
public boolean isScriptingInvalid() {
return scriptingInvalid;
}
public List<String> getIncludePrelude() {
return includePrelude;
}
public void setIncludePrelude(Vector<String> prelude) {
includePrelude = prelude;
}
public List<String> getIncludeCoda() {
return includeCoda;
}
public void setIncludeCoda(Vector<String> coda) {
includeCoda = coda;
}
public void setHasJspRoot(boolean s) {
hasJspRoot = s;
}
public boolean hasJspRoot() {
return hasJspRoot;
}
public String getOmitXmlDecl() {
return omitXmlDecl;
}
public void setOmitXmlDecl(String omit) {
omitXmlDecl = omit;
}
public String getDoctypeName() {
return doctypeName;
}
public void setDoctypeName(String doctypeName) {
this.doctypeName = doctypeName;
}
public String getDoctypeSystem() {
return doctypeSystem;
}
public void setDoctypeSystem(String doctypeSystem) {
this.doctypeSystem = doctypeSystem;
}
public String getDoctypePublic() {
return doctypePublic;
}
public void setDoctypePublic(String doctypePublic) {
this.doctypePublic = doctypePublic;
}
/* Tag library and XML namespace management methods */
public void setIsJspPrefixHijacked(boolean isHijacked) {
isJspPrefixHijacked = isHijacked;
}
public boolean isJspPrefixHijacked() {
return isJspPrefixHijacked;
}
/*
* Adds the given prefix to the set of prefixes of this translation unit.
*
* @param prefix The prefix to add
*/
public void addPrefix(String prefix) {
prefixes.add(prefix);
}
/*
* Checks to see if this translation unit contains the given prefix.
*
* @param prefix The prefix to check
*
* @return true if this translation unit contains the given prefix, false
* otherwise
*/
public boolean containsPrefix(String prefix) {
return prefixes.contains(prefix);
}
/*
* Maps the given URI to the given tag library.
*
* @param uri The URI to map
* @param info The tag library to be associated with the given URI
*/
public void addTaglib(String uri, TagLibraryInfo info) {
taglibsMap.put(uri, info);
}
/*
* Gets the tag library corresponding to the given URI.
*
* @return Tag library corresponding to the given URI
*/
public TagLibraryInfo getTaglib(String uri) {
return taglibsMap.get(uri);
}
/*
* Gets the collection of tag libraries that are associated with a URI
*
* @return Collection of tag libraries that are associated with a URI
*/
public Collection<TagLibraryInfo> getTaglibs() {
return taglibsMap.values();
}
/*
* Checks to see if the given URI is mapped to a tag library.
*
* @param uri The URI to map
*
* @return true if the given URI is mapped to a tag library, false
* otherwise
*/
public boolean hasTaglib(String uri) {
return taglibsMap.containsKey(uri);
}
/*
* Maps the given prefix to the given URI.
*
* @param prefix The prefix to map
* @param uri The URI to be associated with the given prefix
*/
public void addPrefixMapping(String prefix, String uri) {
jspPrefixMapper.put(prefix, uri);
}
/*
* Pushes the given URI onto the stack of URIs to which the given prefix
* is mapped.
*
* @param prefix The prefix whose stack of URIs is to be pushed
* @param uri The URI to be pushed onto the stack
*/
public void pushPrefixMapping(String prefix, String uri) {
LinkedList<String> stack = xmlPrefixMapper.get(prefix);
if (stack == null) {
stack = new LinkedList<String>();
xmlPrefixMapper.put(prefix, stack);
}
stack.addFirst(uri);
}
/*
* Removes the URI at the top of the stack of URIs to which the given
* prefix is mapped.
*
* @param prefix The prefix whose stack of URIs is to be popped
*/
public void popPrefixMapping(String prefix) {
LinkedList<String> stack = xmlPrefixMapper.get(prefix);
if (stack == null || stack.size() == 0) {
// XXX throw new Exception("XXX");
}
stack.removeFirst();
}
/*
* Returns the URI to which the given prefix maps.
*
* @param prefix The prefix whose URI is sought
*
* @return The URI to which the given prefix maps
*/
public String getURI(String prefix) {
String uri = null;
LinkedList<String> stack = xmlPrefixMapper.get(prefix);
if (stack == null || stack.size() == 0) {
uri = jspPrefixMapper.get(prefix);
} else {
uri = stack.getFirst();
}
return uri;
}
/* Page/Tag directive attributes */
/*
* language
*/
public void setLanguage(String value, Node n, ErrorDispatcher err,
boolean pagedir)
throws JasperException {
if (!"java".equalsIgnoreCase(value)) {
if (pagedir)
err.jspError(n, "jsp.error.page.language.nonjava");
else
err.jspError(n, "jsp.error.tag.language.nonjava");
}
language = value;
}
public String getLanguage(boolean useDefault) {
return (language == null && useDefault ? defaultLanguage : language);
}
public String getLanguage() {
return getLanguage(true);
}
/*
* extends
*/
public void setExtends(String value) {
xtends = value;
}
/**
* @deprecated Use {@link #setExtends(String)}
*/
@Deprecated
public void setExtends(String value, @SuppressWarnings("unused") Node.PageDirective n) {
xtends = value;
}
/**
* Gets the value of the 'extends' page directive attribute.
*
* @param useDefault TRUE if the default
* (org.apache.jasper.runtime.HttpJspBase) should be returned if this
* attribute has not been set, FALSE otherwise
*
* @return The value of the 'extends' page directive attribute, or the
* default (org.apache.jasper.runtime.HttpJspBase) if this attribute has
* not been set and useDefault is TRUE
*/
public String getExtends(boolean useDefault) {
return (xtends == null && useDefault ? defaultExtends : xtends);
}
/**
* Gets the value of the 'extends' page directive attribute.
*
* @return The value of the 'extends' page directive attribute, or the
* default (org.apache.jasper.runtime.HttpJspBase) if this attribute has
* not been set
*/
public String getExtends() {
return getExtends(true);
}
/*
* contentType
*/
public void setContentType(String value) {
contentType = value;
}
public String getContentType() {
return contentType;
}
/*
* buffer
*/
public void setBufferValue(String value, Node n, ErrorDispatcher err)
throws JasperException {
if ("none".equalsIgnoreCase(value))
buffer = 0;
else {
if (value == null || !value.endsWith("kb")) {
if (n == null) {
err.jspError("jsp.error.page.invalid.buffer");
} else {
err.jspError(n, "jsp.error.page.invalid.buffer");
}
}
try {
Integer k = new Integer(value.substring(0, value.length()-2));
buffer = k.intValue() * 1024;
} catch (NumberFormatException e) {
if (n == null) {
err.jspError("jsp.error.page.invalid.buffer");
} else {
err.jspError(n, "jsp.error.page.invalid.buffer");
}
}
}
bufferValue = value;
}
public String getBufferValue() {
return bufferValue;
}
public int getBuffer() {
return buffer;
}
/*
* session
*/
public void setSession(String value, Node n, ErrorDispatcher err)
throws JasperException {
if ("true".equalsIgnoreCase(value))
isSession = true;
else if ("false".equalsIgnoreCase(value))
isSession = false;
else
err.jspError(n, "jsp.error.page.invalid.session");
session = value;
}
public String getSession() {
return session;
}
public boolean isSession() {
return isSession;
}
/*
* autoFlush
*/
public void setAutoFlush(String value, Node n, ErrorDispatcher err)
throws JasperException {
if ("true".equalsIgnoreCase(value))
isAutoFlush = true;
else if ("false".equalsIgnoreCase(value))
isAutoFlush = false;
else
err.jspError(n, "jsp.error.autoFlush.invalid");
autoFlush = value;
}
public String getAutoFlush() {
return autoFlush;
}
public boolean isAutoFlush() {
return isAutoFlush;
}
/*
* isThreadSafe
*/
public void setIsThreadSafe(String value, Node n, ErrorDispatcher err)
throws JasperException {
if ("true".equalsIgnoreCase(value))
isThreadSafe = true;
else if ("false".equalsIgnoreCase(value))
isThreadSafe = false;
else
err.jspError(n, "jsp.error.page.invalid.isthreadsafe");
isThreadSafeValue = value;
}
public String getIsThreadSafe() {
return isThreadSafeValue;
}
public boolean isThreadSafe() {
return isThreadSafe;
}
/*
* info
*/
public void setInfo(String value) {
info = value;
}
public String getInfo() {
return info;
}
/*
* errorPage
*/
public void setErrorPage(String value) {
errorPage = value;
}
public String getErrorPage() {
return errorPage;
}
/*
* isErrorPage
*/
public void setIsErrorPage(String value, Node n, ErrorDispatcher err)
throws JasperException {
if ("true".equalsIgnoreCase(value))
isErrorPage = true;
else if ("false".equalsIgnoreCase(value))
isErrorPage = false;
else
err.jspError(n, "jsp.error.page.invalid.iserrorpage");
isErrorPageValue = value;
}
public String getIsErrorPage() {
return isErrorPageValue;
}
public boolean isErrorPage() {
return isErrorPage;
}
/*
* isELIgnored
*/
public void setIsELIgnored(String value, Node n, ErrorDispatcher err,
boolean pagedir)
throws JasperException {
if ("true".equalsIgnoreCase(value))
isELIgnored = true;
else if ("false".equalsIgnoreCase(value))
isELIgnored = false;
else {
if (pagedir)
err.jspError(n, "jsp.error.page.invalid.iselignored");
else
err.jspError(n, "jsp.error.tag.invalid.iselignored");
}
isELIgnoredValue = value;
}
/*
* deferredSyntaxAllowedAsLiteral
*/
public void setDeferredSyntaxAllowedAsLiteral(String value, Node n, ErrorDispatcher err,
boolean pagedir)
throws JasperException {
if ("true".equalsIgnoreCase(value))
deferredSyntaxAllowedAsLiteral = true;
else if ("false".equalsIgnoreCase(value))
deferredSyntaxAllowedAsLiteral = false;
else {
if (pagedir)
err.jspError(n, "jsp.error.page.invalid.deferredsyntaxallowedasliteral");
else
err.jspError(n, "jsp.error.tag.invalid.deferredsyntaxallowedasliteral");
}
deferredSyntaxAllowedAsLiteralValue = value;
}
/*
* trimDirectiveWhitespaces
*/
public void setTrimDirectiveWhitespaces(String value, Node n, ErrorDispatcher err,
boolean pagedir)
throws JasperException {
if ("true".equalsIgnoreCase(value))
trimDirectiveWhitespaces = true;
else if ("false".equalsIgnoreCase(value))
trimDirectiveWhitespaces = false;
else {
if (pagedir)
err.jspError(n, "jsp.error.page.invalid.trimdirectivewhitespaces");
else
err.jspError(n, "jsp.error.tag.invalid.trimdirectivewhitespaces");
}
trimDirectiveWhitespacesValue = value;
}
public void setELIgnored(boolean s) {
isELIgnored = s;
}
public String getIsELIgnored() {
return isELIgnoredValue;
}
public boolean isELIgnored() {
return isELIgnored;
}
public void putNonCustomTagPrefix(String prefix, Mark where) {
nonCustomTagPrefixMap.put(prefix, where);
}
public Mark getNonCustomTagPrefix(String prefix) {
return nonCustomTagPrefixMap.get(prefix);
}
public String getDeferredSyntaxAllowedAsLiteral() {
return deferredSyntaxAllowedAsLiteralValue;
}
public boolean isDeferredSyntaxAllowedAsLiteral() {
return deferredSyntaxAllowedAsLiteral;
}
public void setDeferredSyntaxAllowedAsLiteral(boolean isELDeferred) {
this.deferredSyntaxAllowedAsLiteral = isELDeferred;
}
public ExpressionFactory getExpressionFactory() {
return expressionFactory;
}
public String getTrimDirectiveWhitespaces() {
return trimDirectiveWhitespacesValue;
}
public boolean isTrimDirectiveWhitespaces() {
return trimDirectiveWhitespaces;
}
public void setTrimDirectiveWhitespaces(boolean trimDirectiveWhitespaces) {
this.trimDirectiveWhitespaces = trimDirectiveWhitespaces;
}
public Set<String> getVarInfoNames() {
return varInfoNames;
}
public boolean isErrorOnUndeclaredNamespace() {
return errorOnUndeclaredNamepsace;
}
public void setErrorOnUndeclaredNamespace(
boolean errorOnUndeclaredNamespace) {
this.errorOnUndeclaredNamepsace = errorOnUndeclaredNamespace;
}
}
| |
package edu.java.cosc111.samples;
import java.awt.Component;
import java.io.IOException;
import java.nio.file.FileSystem;
import java.nio.file.FileSystems;
import java.nio.file.Files;
import java.nio.file.LinkOption;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.attribute.BasicFileAttributes;
import java.nio.file.attribute.FileTime;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.List;
import javax.swing.JLabel;
import javax.swing.JTable;
import javax.swing.JTree;
import javax.swing.filechooser.FileSystemView;
import javax.swing.table.DefaultTableCellRenderer;
import javax.swing.table.DefaultTableModel;
import javax.swing.tree.DefaultMutableTreeNode;
import javax.swing.tree.DefaultTreeCellRenderer;
import javax.swing.tree.DefaultTreeModel;
import javax.swing.tree.TreeNode;
import javax.swing.tree.TreePath;
import javax.swing.tree.TreeSelectionModel;
/**
*
* @author Russel
*/
public class DirectoryExplorer extends javax.swing.JFrame {
private static final long[] limit= new long[6];
private static final String[] unit={"KB", "MB","GB","TB","PB","EB"};
private static final FileSystemView fs = FileSystemView.getFileSystemView();
private static final FileSystem fsys = FileSystems.getDefault();
private static class ModifiedTreeRenderer extends DefaultTreeCellRenderer{
@Override
public Component getTreeCellRendererComponent(JTree tree, Object value,
boolean sel, boolean expanded,
boolean leaf, int row, boolean hasFocus) {
super.getTreeCellRendererComponent(tree, value, sel,
expanded, leaf, row, hasFocus);
DefaultMutableTreeNode node = (DefaultMutableTreeNode) value;
Path p = treeNodesToFilePath(node.getPath());
setIcon(fs.getSystemIcon(p.toFile()));
return this;
}
}
private static class ModifiedCellRenderer extends DefaultTableCellRenderer{
@Override
public Component getTableCellRendererComponent(JTable table,
Object value,
boolean isSelected,
boolean hasFocus,
int row, int column) {
setIcon(null);
if(value instanceof JLabel){
JLabel val = (JLabel) value;
String newValue = val.getText();
setIcon(val.getIcon());
return super.getTableCellRendererComponent(table, newValue,
isSelected,hasFocus,
row, column);
}else if(value instanceof Long){
String newValue = formatFileSize((Long)value);
return super.getTableCellRendererComponent(table, newValue,
isSelected,hasFocus,
row, column);
}else if(value instanceof FileTime){
FileTime val = (FileTime) value;
Calendar cal = Calendar.getInstance();
cal.setTimeInMillis(val.toMillis());
String newValue = String.format("%1$tm/%1$td/%1$tY %1$tr",cal);
return super.getTableCellRendererComponent(table, newValue,
isSelected,hasFocus,
row, column);
}
return super.getTableCellRendererComponent(table, value, isSelected,
hasFocus, row, column);
}
}
private static String formatFileSize(long sz){
int i=0;
for(long l:limit){
if(sz < l){
break;
}
i++;
}
if(i==0)
return sz + "bytes";
else if(i==limit.length)
i--;
return String.format("%.2f %s",((double)sz/limit[i-1]) ,unit[i-1]);
}
private static Path treeNodesToFilePath(TreeNode[] nodes){
Path p = Paths.get("");
for(int i = 1;i<nodes.length;i++){
p = p.resolve(nodes[i].toString());
}
return p;
}
private void fillFileList(TreePath tPath){
DefaultMutableTreeNode cur = (DefaultMutableTreeNode)
tPath.getLastPathComponent();
if(tPath.getPathCount()>1){
Path p = treeNodesToFilePath(cur.getPath());
try {
DefaultTableModel tblModel = (DefaultTableModel)jTableContent.getModel();
while(tblModel.getRowCount()>0){
tblModel.removeRow(0);
}
int i = 0;
for(Path o:Files.newDirectoryStream(p)){
List<Object> values = new ArrayList<>();
JLabel lbl = new JLabel(o.getFileName().toString(),
fs.getSystemIcon(o.toFile()),
JLabel.LEFT);
values.add(lbl);
BasicFileAttributes attr = Files.readAttributes(o,
BasicFileAttributes.class,
LinkOption.NOFOLLOW_LINKS);
if(Files.isDirectory(o, LinkOption.NOFOLLOW_LINKS)){
values.add("");
}else{
values.add(attr.size());
}
values.add(attr.creationTime());
values.add(attr.lastAccessTime());
values.add(attr.lastModifiedTime());
tblModel.addRow(values.toArray());
i++;
}
}catch (IOException ex) {
}
}
}
private void fillDirectoryTree(TreePath tPath){
DefaultMutableTreeNode cur = (DefaultMutableTreeNode)
tPath.getLastPathComponent();
if(tPath.getPathCount()>1 &&
((DefaultMutableTreeNode)cur.getChildAt(0)).getUserObject()==null){
cur.remove(0);
Path p = treeNodesToFilePath(cur.getPath());
try {
int i=0;
for(Path o:Files.newDirectoryStream(p)){
if(Files.isDirectory(o, LinkOption.NOFOLLOW_LINKS)){
DefaultMutableTreeNode n = new DefaultMutableTreeNode();
n.add(new DefaultMutableTreeNode(null));
n.setUserObject(o.getFileName());
cur.insert(n, i++);
}
}
}catch (IOException ex) {
}
}
}
private void initFileList(){
String[] labels = {"Filename","Size","Created",
"Accessed", "Modified","Attributes"};
DefaultTableModel tblModel = new DefaultTableModel();
jTableContent.setModel(tblModel);
for(String s:labels){
tblModel.addColumn(s);
}
jTableContent.setDefaultEditor(Object.class, null);
jTableContent.setDefaultRenderer(Object.class,new ModifiedCellRenderer());
}
private void initDirectoryTree(){
DefaultMutableTreeNode nRoot = new DefaultMutableTreeNode("My Computer");
DefaultTreeModel treeModel = new DefaultTreeModel(nRoot);
jTreeExplorer = new JTree(treeModel);
jTreeExplorer.getSelectionModel().setSelectionMode(
TreeSelectionModel.SINGLE_TREE_SELECTION);
jTreeExplorer.setCellRenderer(new ModifiedTreeRenderer());
for(Path root:fsys.getRootDirectories()){
DefaultMutableTreeNode nRootDir = new DefaultMutableTreeNode(root.toString(),true);
nRoot.add(nRootDir);
nRootDir.add(new DefaultMutableTreeNode(null));
}
jTreeExplorer.expandRow(0);
}
/**
* Creates new form Explorer
*/
public DirectoryExplorer() {
long x = 1;
for(int i=0;i<limit.length;i++){
x = x<<10;
limit[i] = x;
}
initComponents();
}
/**
* This method is called from within the constructor to initialize the form.
* WARNING: Do NOT modify this code. The content of this method is always
* regenerated by the Form Editor.
*/
@SuppressWarnings("unchecked")
// <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents
private void initComponents() {
jSplitPane1 = new javax.swing.JSplitPane();
jScrollPane1 = new javax.swing.JScrollPane();
jTreeExplorer = null;
jScrollPane3 = new javax.swing.JScrollPane();
jTableContent = new javax.swing.JTable();
setDefaultCloseOperation(javax.swing.WindowConstants.EXIT_ON_CLOSE);
setTitle("Explorer");
jSplitPane1.setDividerSize(2);
initDirectoryTree();
jTreeExplorer.addTreeWillExpandListener(new javax.swing.event.TreeWillExpandListener() {
public void treeWillCollapse(javax.swing.event.TreeExpansionEvent evt)throws javax.swing.tree.ExpandVetoException {
}
public void treeWillExpand(javax.swing.event.TreeExpansionEvent evt)throws javax.swing.tree.ExpandVetoException {
jTreeExplorerTreeWillExpand(evt);
}
});
jTreeExplorer.addTreeSelectionListener(new javax.swing.event.TreeSelectionListener() {
public void valueChanged(javax.swing.event.TreeSelectionEvent evt) {
jTreeExplorerValueChanged(evt);
}
});
jScrollPane1.setViewportView(jTreeExplorer);
jSplitPane1.setLeftComponent(jScrollPane1);
jTableContent.setAutoCreateRowSorter(true);
initFileList();
jTableContent.setFillsViewportHeight(true);
jTableContent.setSelectionMode(javax.swing.ListSelectionModel.SINGLE_INTERVAL_SELECTION);
jTableContent.setShowHorizontalLines(false);
jTableContent.setShowVerticalLines(false);
jScrollPane3.setViewportView(jTableContent);
jSplitPane1.setRightComponent(jScrollPane3);
getContentPane().add(jSplitPane1, java.awt.BorderLayout.CENTER);
jSplitPane1.setDividerLocation(200);
pack();
}// </editor-fold>//GEN-END:initComponents
private void jTreeExplorerTreeWillExpand(javax.swing.event.TreeExpansionEvent evt)throws javax.swing.tree.ExpandVetoException {//GEN-FIRST:event_jTreeExplorerTreeWillExpand
fillDirectoryTree(evt.getPath());
}//GEN-LAST:event_jTreeExplorerTreeWillExpand
private void jTreeExplorerValueChanged(javax.swing.event.TreeSelectionEvent evt) {//GEN-FIRST:event_jTreeExplorerValueChanged
fillFileList(evt.getPath());
}//GEN-LAST:event_jTreeExplorerValueChanged
/**
* @param args the command line arguments
*/
public static void main(String args[]) {
/* Set the Nimbus look and feel */
//<editor-fold defaultstate="collapsed" desc=" Look and feel setting code (optional) ">
/* If Nimbus (introduced in Java SE 6) is not available, stay with the default look and feel.
* For details see http://download.oracle.com/javase/tutorial/uiswing/lookandfeel/plaf.html
*/
try {
for (javax.swing.UIManager.LookAndFeelInfo info : javax.swing.UIManager.getInstalledLookAndFeels()) {
if ("Windows".equals(info.getName())) {
javax.swing.UIManager.setLookAndFeel(info.getClassName());
break;
}
}
} catch (ClassNotFoundException ex) {
java.util.logging.Logger.getLogger(DirectoryExplorer.class.getName()).log(java.util.logging.Level.SEVERE, null, ex);
} catch (InstantiationException ex) {
java.util.logging.Logger.getLogger(DirectoryExplorer.class.getName()).log(java.util.logging.Level.SEVERE, null, ex);
} catch (IllegalAccessException ex) {
java.util.logging.Logger.getLogger(DirectoryExplorer.class.getName()).log(java.util.logging.Level.SEVERE, null, ex);
} catch (javax.swing.UnsupportedLookAndFeelException ex) {
java.util.logging.Logger.getLogger(DirectoryExplorer.class.getName()).log(java.util.logging.Level.SEVERE, null, ex);
}
//</editor-fold>
//</editor-fold>
//</editor-fold>
//</editor-fold>
/* Create and display the form */
java.awt.EventQueue.invokeLater(new Runnable() {
public void run() {
new DirectoryExplorer().setVisible(true);
}
});
}
// Variables declaration - do not modify//GEN-BEGIN:variables
private javax.swing.JScrollPane jScrollPane1;
private javax.swing.JScrollPane jScrollPane3;
private javax.swing.JSplitPane jSplitPane1;
private javax.swing.JTable jTableContent;
private javax.swing.JTree jTreeExplorer;
// End of variables declaration//GEN-END:variables
}
| |
/*
* Copyright 2016 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.exportimport.util;
import static org.keycloak.models.utils.ModelToRepresentation.toRepresentation;
import java.io.IOException;
import java.io.OutputStream;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;
import org.keycloak.authorization.AuthorizationProvider;
import org.keycloak.authorization.AuthorizationProviderFactory;
import org.keycloak.authorization.model.Policy;
import org.keycloak.authorization.model.Resource;
import org.keycloak.authorization.model.ResourceServer;
import org.keycloak.authorization.model.Scope;
import org.keycloak.authorization.store.PolicyStore;
import org.keycloak.authorization.store.StoreFactory;
import org.keycloak.common.Version;
import org.keycloak.common.util.Base64;
import org.keycloak.common.util.MultivaluedHashMap;
import org.keycloak.component.ComponentModel;
import org.keycloak.credential.CredentialModel;
import org.keycloak.models.ClientModel;
import org.keycloak.models.ClientScopeModel;
import org.keycloak.models.FederatedIdentityModel;
import org.keycloak.models.GroupModel;
import org.keycloak.models.KeycloakSession;
import org.keycloak.models.RealmModel;
import org.keycloak.models.RoleContainerModel;
import org.keycloak.models.RoleModel;
import org.keycloak.models.UserConsentModel;
import org.keycloak.models.UserModel;
import org.keycloak.models.utils.ModelToRepresentation;
import org.keycloak.representations.idm.ClientRepresentation;
import org.keycloak.representations.idm.ClientScopeRepresentation;
import org.keycloak.representations.idm.ComponentExportRepresentation;
import org.keycloak.representations.idm.CredentialRepresentation;
import org.keycloak.representations.idm.FederatedIdentityRepresentation;
import org.keycloak.representations.idm.RealmRepresentation;
import org.keycloak.representations.idm.RoleRepresentation;
import org.keycloak.representations.idm.RolesRepresentation;
import org.keycloak.representations.idm.ScopeMappingRepresentation;
import org.keycloak.representations.idm.UserConsentRepresentation;
import org.keycloak.representations.idm.UserRepresentation;
import org.keycloak.representations.idm.authorization.PolicyRepresentation;
import org.keycloak.representations.idm.authorization.ResourceOwnerRepresentation;
import org.keycloak.representations.idm.authorization.ResourceRepresentation;
import org.keycloak.representations.idm.authorization.ResourceServerRepresentation;
import org.keycloak.representations.idm.authorization.ScopeRepresentation;
import org.keycloak.util.JsonSerialization;
import com.fasterxml.jackson.core.JsonEncoding;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.SerializationFeature;
/**
* @author <a href="mailto:mposolda@redhat.com">Marek Posolda</a>
*/
public class ExportUtils {
public static RealmRepresentation exportRealm(KeycloakSession session, RealmModel realm, boolean includeUsers, boolean internal) {
ExportOptions opts = new ExportOptions(false, true, true);
if (includeUsers) {
opts.setUsersIncluded(true);
}
return exportRealm(session, realm, opts, internal);
}
public static RealmRepresentation exportRealm(KeycloakSession session, RealmModel realm, ExportOptions options, boolean internal) {
RealmRepresentation rep = ModelToRepresentation.toRepresentation(realm, internal);
ModelToRepresentation.exportAuthenticationFlows(realm, rep);
ModelToRepresentation.exportRequiredActions(realm, rep);
// Project/product version
rep.setKeycloakVersion(Version.VERSION);
// Client Scopes
List<ClientScopeModel> clientScopeModels = realm.getClientScopes();
List<ClientScopeRepresentation> clientScopesReps = new ArrayList<>();
for (ClientScopeModel app : clientScopeModels) {
ClientScopeRepresentation clientRep = ModelToRepresentation.toRepresentation(app);
clientScopesReps.add(clientRep);
}
rep.setClientScopes(clientScopesReps);
List<String> defaultClientScopeNames = realm.getDefaultClientScopes(true).stream().map((ClientScopeModel clientScope) -> {
return clientScope.getName();
}).collect(Collectors.toList());
rep.setDefaultDefaultClientScopes(defaultClientScopeNames);
List<String> optionalClientScopeNames = realm.getDefaultClientScopes(false).stream().map((ClientScopeModel clientScope) -> {
return clientScope.getName();
}).collect(Collectors.toList());
rep.setDefaultOptionalClientScopes(optionalClientScopeNames);
// Clients
List<ClientModel> clients = Collections.emptyList();
if (options.isClientsIncluded()) {
clients = realm.getClients();
List<ClientRepresentation> clientReps = new ArrayList<>();
for (ClientModel app : clients) {
ClientRepresentation clientRep = exportClient(session, app);
clientReps.add(clientRep);
}
rep.setClients(clientReps);
}
// Groups and Roles
if (options.isGroupsAndRolesIncluded()) {
ModelToRepresentation.exportGroups(realm, rep);
List<RoleRepresentation> realmRoleReps = null;
Map<String, List<RoleRepresentation>> clientRolesReps = new HashMap<>();
Set<RoleModel> realmRoles = realm.getRoles();
if (realmRoles != null && realmRoles.size() > 0) {
realmRoleReps = exportRoles(realmRoles);
}
RolesRepresentation rolesRep = new RolesRepresentation();
if (realmRoleReps != null) {
rolesRep.setRealm(realmRoleReps);
}
if (options.isClientsIncluded()) {
for (ClientModel client : clients) {
Set<RoleModel> currentAppRoles = client.getRoles();
List<RoleRepresentation> currentAppRoleReps = exportRoles(currentAppRoles);
clientRolesReps.put(client.getClientId(), currentAppRoleReps);
}
if (clientRolesReps.size() > 0) {
rolesRep.setClient(clientRolesReps);
}
}
rep.setRoles(rolesRep);
}
// Scopes
Map<String, List<ScopeMappingRepresentation>> clientScopeReps = new HashMap<>();
if (options.isClientsIncluded()) {
List<ClientModel> allClients = new ArrayList<>(clients);
// Scopes of clients
for (ClientModel client : allClients) {
Set<RoleModel> clientScopes = client.getScopeMappings();
ScopeMappingRepresentation scopeMappingRep = null;
for (RoleModel scope : clientScopes) {
if (scope.getContainer() instanceof RealmModel) {
if (scopeMappingRep == null) {
scopeMappingRep = rep.clientScopeMapping(client.getClientId());
}
scopeMappingRep.role(scope.getName());
} else {
ClientModel app = (ClientModel) scope.getContainer();
String appName = app.getClientId();
List<ScopeMappingRepresentation> currentAppScopes = clientScopeReps.get(appName);
if (currentAppScopes == null) {
currentAppScopes = new ArrayList<>();
clientScopeReps.put(appName, currentAppScopes);
}
ScopeMappingRepresentation currentClientScope = null;
for (ScopeMappingRepresentation scopeMapping : currentAppScopes) {
if (client.getClientId().equals(scopeMapping.getClient())) {
currentClientScope = scopeMapping;
break;
}
}
if (currentClientScope == null) {
currentClientScope = new ScopeMappingRepresentation();
currentClientScope.setClient(client.getClientId());
currentAppScopes.add(currentClientScope);
}
currentClientScope.role(scope.getName());
}
}
}
}
// Scopes of client scopes
for (ClientScopeModel clientScope : realm.getClientScopes()) {
Set<RoleModel> clientScopes = clientScope.getScopeMappings();
ScopeMappingRepresentation scopeMappingRep = null;
for (RoleModel scope : clientScopes) {
if (scope.getContainer() instanceof RealmModel) {
if (scopeMappingRep == null) {
scopeMappingRep = rep.clientScopeScopeMapping(clientScope.getName());
}
scopeMappingRep.role(scope.getName());
} else {
ClientModel app = (ClientModel)scope.getContainer();
String appName = app.getClientId();
List<ScopeMappingRepresentation> currentAppScopes = clientScopeReps.get(appName);
if (currentAppScopes == null) {
currentAppScopes = new ArrayList<>();
clientScopeReps.put(appName, currentAppScopes);
}
ScopeMappingRepresentation currentClientTemplateScope = null;
for (ScopeMappingRepresentation scopeMapping : currentAppScopes) {
if (clientScope.getName().equals(scopeMapping.getClientScope())) {
currentClientTemplateScope = scopeMapping;
break;
}
}
if (currentClientTemplateScope == null) {
currentClientTemplateScope = new ScopeMappingRepresentation();
currentClientTemplateScope.setClientScope(clientScope.getName());
currentAppScopes.add(currentClientTemplateScope);
}
currentClientTemplateScope.role(scope.getName());
}
}
}
if (clientScopeReps.size() > 0) {
rep.setClientScopeMappings(clientScopeReps);
}
// Finally users if needed
if (options.isUsersIncluded()) {
List<UserModel> allUsers = session.users().getUsers(realm, true);
List<UserRepresentation> users = new LinkedList<>();
for (UserModel user : allUsers) {
UserRepresentation userRep = exportUser(session, realm, user, options);
users.add(userRep);
}
if (users.size() > 0) {
rep.setUsers(users);
}
List<UserRepresentation> federatedUsers = new LinkedList<>();
for (String userId : session.userFederatedStorage().getStoredUsers(realm, 0, -1)) {
UserRepresentation userRep = exportFederatedUser(session, realm, userId, options);
federatedUsers.add(userRep);
}
if (federatedUsers.size() > 0) {
rep.setFederatedUsers(federatedUsers);
}
}
// components
MultivaluedHashMap<String, ComponentExportRepresentation> components = exportComponents(realm, realm.getId());
rep.setComponents(components);
return rep;
}
public static MultivaluedHashMap<String, ComponentExportRepresentation> exportComponents(RealmModel realm, String parentId) {
List<ComponentModel> componentList = realm.getComponents(parentId);
MultivaluedHashMap<String, ComponentExportRepresentation> components = new MultivaluedHashMap<>();
for (ComponentModel component : componentList) {
ComponentExportRepresentation compRep = new ComponentExportRepresentation();
compRep.setId(component.getId());
compRep.setProviderId(component.getProviderId());
compRep.setConfig(component.getConfig());
compRep.setName(component.getName());
compRep.setSubType(component.getSubType());
compRep.setSubComponents(exportComponents(realm, component.getId()));
components.add(component.getProviderType(), compRep);
}
return components;
}
/**
* Full export of application including claims and secret
* @param client
* @return full ApplicationRepresentation
*/
public static ClientRepresentation exportClient(KeycloakSession session, ClientModel client) {
ClientRepresentation clientRep = ModelToRepresentation.toRepresentation(client, session);
clientRep.setSecret(client.getSecret());
clientRep.setAuthorizationSettings(exportAuthorizationSettings(session,client));
return clientRep;
}
public static ResourceServerRepresentation exportAuthorizationSettings(KeycloakSession session, ClientModel client) {
AuthorizationProviderFactory providerFactory = (AuthorizationProviderFactory) session.getKeycloakSessionFactory().getProviderFactory(AuthorizationProvider.class);
AuthorizationProvider authorization = providerFactory.create(session, client.getRealm());
StoreFactory storeFactory = authorization.getStoreFactory();
ResourceServer settingsModel = authorization.getStoreFactory().getResourceServerStore().findById(client.getId());
if (settingsModel == null) {
return null;
}
ResourceServerRepresentation representation = toRepresentation(settingsModel, client);
representation.setId(null);
representation.setName(null);
representation.setClientId(null);
List<ResourceRepresentation> resources = storeFactory.getResourceStore().findByResourceServer(settingsModel.getId())
.stream().map(resource -> {
ResourceRepresentation rep = toRepresentation(resource, settingsModel, authorization);
if (rep.getOwner().getId().equals(settingsModel.getId())) {
rep.setOwner((ResourceOwnerRepresentation) null);
} else {
rep.getOwner().setId(null);
}
rep.getScopes().forEach(scopeRepresentation -> {
scopeRepresentation.setId(null);
scopeRepresentation.setIconUri(null);
});
return rep;
}).collect(Collectors.toList());
representation.setResources(resources);
List<PolicyRepresentation> policies = new ArrayList<>();
PolicyStore policyStore = storeFactory.getPolicyStore();
policies.addAll(policyStore.findByResourceServer(settingsModel.getId())
.stream().filter(policy -> !policy.getType().equals("resource") && !policy.getType().equals("scope") && policy.getOwner() == null)
.map(policy -> createPolicyRepresentation(authorization, policy)).collect(Collectors.toList()));
policies.addAll(policyStore.findByResourceServer(settingsModel.getId())
.stream().filter(policy -> (policy.getType().equals("resource") || policy.getType().equals("scope") && policy.getOwner() == null))
.map(policy -> createPolicyRepresentation(authorization, policy)).collect(Collectors.toList()));
representation.setPolicies(policies);
List<ScopeRepresentation> scopes = storeFactory.getScopeStore().findByResourceServer(settingsModel.getId()).stream().map(scope -> {
ScopeRepresentation rep = toRepresentation(scope);
rep.setPolicies(null);
rep.setResources(null);
return rep;
}).collect(Collectors.toList());
representation.setScopes(scopes);
return representation;
}
private static PolicyRepresentation createPolicyRepresentation(AuthorizationProvider authorizationProvider, Policy policy) {
try {
PolicyRepresentation rep = toRepresentation(policy, authorizationProvider, true, true);
Map<String, String> config = new HashMap<>(rep.getConfig());
rep.setConfig(config);
Set<Scope> scopes = policy.getScopes();
if (!scopes.isEmpty()) {
List<String> scopeNames = scopes.stream().map(Scope::getName).collect(Collectors.toList());
config.put("scopes", JsonSerialization.writeValueAsString(scopeNames));
}
Set<Resource> policyResources = policy.getResources();
if (!policyResources.isEmpty()) {
List<String> resourceNames = policyResources.stream().map(Resource::getName).collect(Collectors.toList());
config.put("resources", JsonSerialization.writeValueAsString(resourceNames));
}
Set<Policy> associatedPolicies = policy.getAssociatedPolicies();
if (!associatedPolicies.isEmpty()) {
config.put("applyPolicies", JsonSerialization.writeValueAsString(associatedPolicies.stream().map(associated -> associated.getName()).collect(Collectors.toList())));
}
return rep;
} catch (Exception e) {
throw new RuntimeException("Error while exporting policy [" + policy.getName() + "].", e);
}
}
public static List<RoleRepresentation> exportRoles(Collection<RoleModel> roles) {
List<RoleRepresentation> roleReps = new ArrayList<RoleRepresentation>();
for (RoleModel role : roles) {
RoleRepresentation roleRep = exportRole(role);
roleReps.add(roleRep);
}
return roleReps;
}
public static List<String> getRoleNames(Collection<RoleModel> roles) {
List<String> roleNames = new ArrayList<String>();
for (RoleModel role : roles) {
roleNames.add(role.getName());
}
return roleNames;
}
/**
* Full export of role including composite roles
* @param role
* @return RoleRepresentation with all stuff filled (including composite roles)
*/
public static RoleRepresentation exportRole(RoleModel role) {
RoleRepresentation roleRep = ModelToRepresentation.toRepresentation(role);
Set<RoleModel> composites = role.getComposites();
if (composites != null && composites.size() > 0) {
Set<String> compositeRealmRoles = null;
Map<String, List<String>> compositeClientRoles = null;
for (RoleModel composite : composites) {
RoleContainerModel crContainer = composite.getContainer();
if (crContainer instanceof RealmModel) {
if (compositeRealmRoles == null) {
compositeRealmRoles = new HashSet<>();
}
compositeRealmRoles.add(composite.getName());
} else {
if (compositeClientRoles == null) {
compositeClientRoles = new HashMap<>();
}
ClientModel app = (ClientModel)crContainer;
String appName = app.getClientId();
List<String> currentAppComposites = compositeClientRoles.get(appName);
if (currentAppComposites == null) {
currentAppComposites = new ArrayList<>();
compositeClientRoles.put(appName, currentAppComposites);
}
currentAppComposites.add(composite.getName());
}
}
RoleRepresentation.Composites compRep = new RoleRepresentation.Composites();
if (compositeRealmRoles != null) {
compRep.setRealm(compositeRealmRoles);
}
if (compositeClientRoles != null) {
compRep.setClient(compositeClientRoles);
}
roleRep.setComposites(compRep);
}
return roleRep;
}
/**
* Full export of user (including role mappings and credentials)
*
* @param user
* @return fully exported user representation
*/
public static UserRepresentation exportUser(KeycloakSession session, RealmModel realm, UserModel user, ExportOptions options) {
UserRepresentation userRep = ModelToRepresentation.toRepresentation(session, realm, user);
// Social links
Set<FederatedIdentityModel> socialLinks = session.users().getFederatedIdentities(user, realm);
List<FederatedIdentityRepresentation> socialLinkReps = new ArrayList<FederatedIdentityRepresentation>();
for (FederatedIdentityModel socialLink : socialLinks) {
FederatedIdentityRepresentation socialLinkRep = exportSocialLink(socialLink);
socialLinkReps.add(socialLinkRep);
}
if (socialLinkReps.size() > 0) {
userRep.setFederatedIdentities(socialLinkReps);
}
// Role mappings
Set<RoleModel> roles = user.getRoleMappings();
List<String> realmRoleNames = new ArrayList<>();
Map<String, List<String>> clientRoleNames = new HashMap<>();
for (RoleModel role : roles) {
if (role.getContainer() instanceof RealmModel) {
realmRoleNames.add(role.getName());
} else {
ClientModel client = (ClientModel)role.getContainer();
String clientId = client.getClientId();
List<String> currentClientRoles = clientRoleNames.get(clientId);
if (currentClientRoles == null) {
currentClientRoles = new ArrayList<>();
clientRoleNames.put(clientId, currentClientRoles);
}
currentClientRoles.add(role.getName());
}
}
if (realmRoleNames.size() > 0) {
userRep.setRealmRoles(realmRoleNames);
}
if (clientRoleNames.size() > 0) {
userRep.setClientRoles(clientRoleNames);
}
// Credentials
List<CredentialModel> creds = session.userCredentialManager().getStoredCredentials(realm, user);
List<CredentialRepresentation> credReps = new ArrayList<CredentialRepresentation>();
for (CredentialModel cred : creds) {
CredentialRepresentation credRep = exportCredential(cred);
credReps.add(credRep);
}
userRep.setCredentials(credReps);
userRep.setFederationLink(user.getFederationLink());
// Grants
List<UserConsentModel> consents = session.users().getConsents(realm, user.getId());
LinkedList<UserConsentRepresentation> consentReps = new LinkedList<UserConsentRepresentation>();
for (UserConsentModel consent : consents) {
UserConsentRepresentation consentRep = ModelToRepresentation.toRepresentation(consent);
consentReps.add(consentRep);
}
if (consentReps.size() > 0) {
userRep.setClientConsents(consentReps);
}
// Not Before
int notBefore = session.users().getNotBeforeOfUser(realm, user);
userRep.setNotBefore(notBefore);
// Service account
if (user.getServiceAccountClientLink() != null) {
String clientInternalId = user.getServiceAccountClientLink();
ClientModel client = realm.getClientById(clientInternalId);
if (client != null) {
userRep.setServiceAccountClientId(client.getClientId());
}
}
if (options.isGroupsAndRolesIncluded()) {
List<String> groups = new LinkedList<>();
for (GroupModel group : user.getGroups()) {
groups.add(ModelToRepresentation.buildGroupPath(group));
}
userRep.setGroups(groups);
}
return userRep;
}
public static FederatedIdentityRepresentation exportSocialLink(FederatedIdentityModel socialLink) {
FederatedIdentityRepresentation socialLinkRep = new FederatedIdentityRepresentation();
socialLinkRep.setIdentityProvider(socialLink.getIdentityProvider());
socialLinkRep.setUserId(socialLink.getUserId());
socialLinkRep.setUserName(socialLink.getUserName());
return socialLinkRep;
}
public static CredentialRepresentation exportCredential(CredentialModel userCred) {
return ModelToRepresentation.toRepresentation(userCred);
}
// Streaming API
public static void exportUsersToStream(KeycloakSession session, RealmModel realm, List<UserModel> usersToExport, ObjectMapper mapper, OutputStream os) throws IOException {
exportUsersToStream(session, realm, usersToExport, mapper, os, new ExportOptions());
}
public static void exportUsersToStream(KeycloakSession session, RealmModel realm, List<UserModel> usersToExport, ObjectMapper mapper, OutputStream os, ExportOptions options) throws IOException {
JsonFactory factory = mapper.getFactory();
JsonGenerator generator = factory.createGenerator(os, JsonEncoding.UTF8);
try {
if (mapper.isEnabled(SerializationFeature.INDENT_OUTPUT)) {
generator.useDefaultPrettyPrinter();
}
generator.writeStartObject();
generator.writeStringField("realm", realm.getName());
// generator.writeStringField("strategy", strategy.toString());
generator.writeFieldName("users");
generator.writeStartArray();
for (UserModel user : usersToExport) {
UserRepresentation userRep = ExportUtils.exportUser(session, realm, user, options);
generator.writeObject(userRep);
}
generator.writeEndArray();
generator.writeEndObject();
} finally {
generator.close();
}
}
public static void exportFederatedUsersToStream(KeycloakSession session, RealmModel realm, List<String> usersToExport, ObjectMapper mapper, OutputStream os) throws IOException {
exportFederatedUsersToStream(session, realm, usersToExport, mapper, os, new ExportOptions());
}
public static void exportFederatedUsersToStream(KeycloakSession session, RealmModel realm, List<String> usersToExport, ObjectMapper mapper, OutputStream os, ExportOptions options) throws IOException {
JsonFactory factory = mapper.getFactory();
JsonGenerator generator = factory.createGenerator(os, JsonEncoding.UTF8);
try {
if (mapper.isEnabled(SerializationFeature.INDENT_OUTPUT)) {
generator.useDefaultPrettyPrinter();
}
generator.writeStartObject();
generator.writeStringField("realm", realm.getName());
// generator.writeStringField("strategy", strategy.toString());
generator.writeFieldName("federatedUsers");
generator.writeStartArray();
for (String userId : usersToExport) {
UserRepresentation userRep = ExportUtils.exportFederatedUser(session, realm, userId, options);
generator.writeObject(userRep);
}
generator.writeEndArray();
generator.writeEndObject();
} finally {
generator.close();
}
}
/**
* Full export of user data stored in federated storage (including role mappings and credentials)
*
* @param id
* @return fully exported user representation
*/
public static UserRepresentation exportFederatedUser(KeycloakSession session, RealmModel realm, String id, ExportOptions options) {
UserRepresentation userRep = new UserRepresentation();
userRep.setId(id);
MultivaluedHashMap<String, String> attributes = session.userFederatedStorage().getAttributes(realm, id);
if (attributes.size() > 0) {
Map<String, List<String>> attrs = new HashMap<>();
attrs.putAll(attributes);
userRep.setAttributes(attrs);
}
Set<String> requiredActions = session.userFederatedStorage().getRequiredActions(realm, id);
if (requiredActions.size() > 0) {
List<String> actions = new LinkedList<>();
actions.addAll(requiredActions);
userRep.setRequiredActions(actions);
}
// Social links
Set<FederatedIdentityModel> socialLinks = session.userFederatedStorage().getFederatedIdentities(id, realm);
List<FederatedIdentityRepresentation> socialLinkReps = new ArrayList<FederatedIdentityRepresentation>();
for (FederatedIdentityModel socialLink : socialLinks) {
FederatedIdentityRepresentation socialLinkRep = exportSocialLink(socialLink);
socialLinkReps.add(socialLinkRep);
}
if (socialLinkReps.size() > 0) {
userRep.setFederatedIdentities(socialLinkReps);
}
// Role mappings
if (options.isGroupsAndRolesIncluded()) {
Set<RoleModel> roles = session.userFederatedStorage().getRoleMappings(realm, id);
List<String> realmRoleNames = new ArrayList<>();
Map<String, List<String>> clientRoleNames = new HashMap<>();
for (RoleModel role : roles) {
if (role.getContainer() instanceof RealmModel) {
realmRoleNames.add(role.getName());
} else {
ClientModel client = (ClientModel) role.getContainer();
String clientId = client.getClientId();
List<String> currentClientRoles = clientRoleNames.get(clientId);
if (currentClientRoles == null) {
currentClientRoles = new ArrayList<>();
clientRoleNames.put(clientId, currentClientRoles);
}
currentClientRoles.add(role.getName());
}
}
if (realmRoleNames.size() > 0) {
userRep.setRealmRoles(realmRoleNames);
}
if (clientRoleNames.size() > 0) {
userRep.setClientRoles(clientRoleNames);
}
}
// Credentials
List<CredentialModel> creds = session.userFederatedStorage().getStoredCredentials(realm, id);
List<CredentialRepresentation> credReps = new ArrayList<CredentialRepresentation>();
for (CredentialModel cred : creds) {
CredentialRepresentation credRep = exportCredential(cred);
credReps.add(credRep);
}
userRep.setCredentials(credReps);
// Grants
List<UserConsentModel> consents = session.users().getConsents(realm, id);
LinkedList<UserConsentRepresentation> consentReps = new LinkedList<UserConsentRepresentation>();
for (UserConsentModel consent : consents) {
UserConsentRepresentation consentRep = ModelToRepresentation.toRepresentation(consent);
consentReps.add(consentRep);
}
if (consentReps.size() > 0) {
userRep.setClientConsents(consentReps);
}
// Not Before
int notBefore = session.userFederatedStorage().getNotBeforeOfUser(realm, userRep.getId());
userRep.setNotBefore(notBefore);
if (options.isGroupsAndRolesIncluded()) {
List<String> groups = new LinkedList<>();
for (GroupModel group : session.userFederatedStorage().getGroups(realm, id)) {
groups.add(ModelToRepresentation.buildGroupPath(group));
}
userRep.setGroups(groups);
}
return userRep;
}
}
| |
/**
*Licensed to the Apache Software Foundation (ASF) under one
*or more contributor license agreements. See the NOTICE file
*distributed with this work for additional information
*regarding copyright ownership. The ASF licenses this file
*to you under the Apache License, Version 2.0 (the"
*License"); you may not use this file except in compliance
*with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
*Unless required by applicable law or agreed to in writing, software
*distributed under the License is distributed on an "AS IS" BASIS,
*WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*See the License for the specific language governing permissions and
*limitations under the License.
*/
package org.apache.gora.cassandra.example.generated.AvroSerialization;
/** This Object is created to used as Cassandra Key to test cassandra data store, Cassandra Key can be used to define partition keys, clustering keys. */
public class CassandraKey extends org.apache.gora.persistency.impl.PersistentBase implements org.apache.avro.specific.SpecificRecord, org.apache.gora.persistency.Persistent {
public static final org.apache.avro.Schema SCHEMA$ = new org.apache.avro.Schema.Parser().parse("{\"type\":\"record\",\"name\":\"CassandraKey\",\"namespace\":\"org.apache.gora.cassandra.example.generated.AvroSerialization\",\"doc\":\"This Object is created to used as Cassandra Key to test cassandra data store, Cassandra Key can be used to define partition keys, clustering keys.\",\"fields\":[{\"name\":\"url\",\"type\":[\"null\",\"string\"],\"default\":null},{\"name\":\"timestamp\",\"type\":\"long\",\"default\":0}],\"default\":null}");
private static final long serialVersionUID = -4231222814786458061L;
/** Enum containing all data bean's fields. */
public static enum Field {
URL(0, "url"),
TIMESTAMP(1, "timestamp"),
;
/**
* Field's index.
*/
private int index;
/**
* Field's name.
*/
private String name;
/**
* Field's constructor
* @param index field's index.
* @param name field's name.
*/
Field(int index, String name) {this.index=index;this.name=name;}
/**
* Gets field's index.
* @return int field's index.
*/
public int getIndex() {return index;}
/**
* Gets field's name.
* @return String field's name.
*/
public String getName() {return name;}
/**
* Gets field's attributes to string.
* @return String field's attributes to string.
*/
public String toString() {return name;}
};
public static final String[] _ALL_FIELDS = {
"url",
"timestamp",
};
/**
* Gets the total field count.
* @return int field count
*/
public int getFieldsCount() {
return CassandraKey._ALL_FIELDS.length;
}
private java.lang.CharSequence url;
private long timestamp;
public org.apache.avro.Schema getSchema() { return SCHEMA$; }
// Used by DatumWriter. Applications should not call.
public java.lang.Object get(int field$) {
switch (field$) {
case 0: return this.url;
case 1: return this.timestamp;
default: throw new org.apache.avro.AvroRuntimeException("Bad index");
}
}
// Used by DatumReader. Applications should not call.
@SuppressWarnings(value="unchecked")
public void put(int field$, java.lang.Object value) {
switch (field$) {
case 0: this.url = (java.lang.CharSequence)(value); break;
case 1: this.timestamp = (java.lang.Long)(value); break;
default: throw new org.apache.avro.AvroRuntimeException("Bad index");
}
}
/**
* Gets the value of the 'url' field.
*/
public java.lang.CharSequence getUrl() {
return url;
}
/**
* Sets the value of the 'url' field.
* @param value the value to set.
*/
public void setUrl(java.lang.CharSequence value) {
this.url = value;
setDirty(0);
}
/**
* Checks the dirty status of the 'url' field. A field is dirty if it represents a change that has not yet been written to the database.
* @param value the value to set.
*/
public boolean isUrlDirty() {
return isDirty(0);
}
/**
* Gets the value of the 'timestamp' field.
*/
public java.lang.Long getTimestamp() {
return timestamp;
}
/**
* Sets the value of the 'timestamp' field.
* @param value the value to set.
*/
public void setTimestamp(java.lang.Long value) {
this.timestamp = value;
setDirty(1);
}
/**
* Checks the dirty status of the 'timestamp' field. A field is dirty if it represents a change that has not yet been written to the database.
* @param value the value to set.
*/
public boolean isTimestampDirty() {
return isDirty(1);
}
/** Creates a new CassandraKey RecordBuilder */
public static org.apache.gora.cassandra.example.generated.AvroSerialization.CassandraKey.Builder newBuilder() {
return new org.apache.gora.cassandra.example.generated.AvroSerialization.CassandraKey.Builder();
}
/** Creates a new CassandraKey RecordBuilder by copying an existing Builder */
public static org.apache.gora.cassandra.example.generated.AvroSerialization.CassandraKey.Builder newBuilder(org.apache.gora.cassandra.example.generated.AvroSerialization.CassandraKey.Builder other) {
return new org.apache.gora.cassandra.example.generated.AvroSerialization.CassandraKey.Builder(other);
}
/** Creates a new CassandraKey RecordBuilder by copying an existing CassandraKey instance */
public static org.apache.gora.cassandra.example.generated.AvroSerialization.CassandraKey.Builder newBuilder(org.apache.gora.cassandra.example.generated.AvroSerialization.CassandraKey other) {
return new org.apache.gora.cassandra.example.generated.AvroSerialization.CassandraKey.Builder(other);
}
@Override
public org.apache.gora.cassandra.example.generated.AvroSerialization.CassandraKey clone() {
return newBuilder(this).build();
}
private static java.nio.ByteBuffer deepCopyToReadOnlyBuffer(
java.nio.ByteBuffer input) {
java.nio.ByteBuffer copy = java.nio.ByteBuffer.allocate(input.capacity());
int position = input.position();
input.reset();
int mark = input.position();
int limit = input.limit();
input.rewind();
input.limit(input.capacity());
copy.put(input);
input.rewind();
copy.rewind();
input.position(mark);
input.mark();
copy.position(mark);
copy.mark();
input.position(position);
copy.position(position);
input.limit(limit);
copy.limit(limit);
return copy.asReadOnlyBuffer();
}
/**
* RecordBuilder for CassandraKey instances.
*/
public static class Builder extends org.apache.avro.specific.SpecificRecordBuilderBase<CassandraKey>
implements org.apache.avro.data.RecordBuilder<CassandraKey> {
private java.lang.CharSequence url;
private long timestamp;
/** Creates a new Builder */
private Builder() {
super(org.apache.gora.cassandra.example.generated.AvroSerialization.CassandraKey.SCHEMA$);
}
/** Creates a Builder by copying an existing Builder */
private Builder(org.apache.gora.cassandra.example.generated.AvroSerialization.CassandraKey.Builder other) {
super(other);
}
/** Creates a Builder by copying an existing CassandraKey instance */
private Builder(org.apache.gora.cassandra.example.generated.AvroSerialization.CassandraKey other) {
super(org.apache.gora.cassandra.example.generated.AvroSerialization.CassandraKey.SCHEMA$);
if (isValidValue(fields()[0], other.url)) {
this.url = (java.lang.CharSequence) data().deepCopy(fields()[0].schema(), other.url);
fieldSetFlags()[0] = true;
}
if (isValidValue(fields()[1], other.timestamp)) {
this.timestamp = (java.lang.Long) data().deepCopy(fields()[1].schema(), other.timestamp);
fieldSetFlags()[1] = true;
}
}
/** Gets the value of the 'url' field */
public java.lang.CharSequence getUrl() {
return url;
}
/** Sets the value of the 'url' field */
public org.apache.gora.cassandra.example.generated.AvroSerialization.CassandraKey.Builder setUrl(java.lang.CharSequence value) {
validate(fields()[0], value);
this.url = value;
fieldSetFlags()[0] = true;
return this;
}
/** Checks whether the 'url' field has been set */
public boolean hasUrl() {
return fieldSetFlags()[0];
}
/** Clears the value of the 'url' field */
public org.apache.gora.cassandra.example.generated.AvroSerialization.CassandraKey.Builder clearUrl() {
url = null;
fieldSetFlags()[0] = false;
return this;
}
/** Gets the value of the 'timestamp' field */
public java.lang.Long getTimestamp() {
return timestamp;
}
/** Sets the value of the 'timestamp' field */
public org.apache.gora.cassandra.example.generated.AvroSerialization.CassandraKey.Builder setTimestamp(long value) {
validate(fields()[1], value);
this.timestamp = value;
fieldSetFlags()[1] = true;
return this;
}
/** Checks whether the 'timestamp' field has been set */
public boolean hasTimestamp() {
return fieldSetFlags()[1];
}
/** Clears the value of the 'timestamp' field */
public org.apache.gora.cassandra.example.generated.AvroSerialization.CassandraKey.Builder clearTimestamp() {
fieldSetFlags()[1] = false;
return this;
}
@Override
public CassandraKey build() {
try {
CassandraKey record = new CassandraKey();
record.url = fieldSetFlags()[0] ? this.url : (java.lang.CharSequence) defaultValue(fields()[0]);
record.timestamp = fieldSetFlags()[1] ? this.timestamp : (java.lang.Long) defaultValue(fields()[1]);
return record;
} catch (Exception e) {
throw new org.apache.avro.AvroRuntimeException(e);
}
}
}
public CassandraKey.Tombstone getTombstone(){
return TOMBSTONE;
}
public CassandraKey newInstance(){
return newBuilder().build();
}
private static final Tombstone TOMBSTONE = new Tombstone();
public static final class Tombstone extends CassandraKey implements org.apache.gora.persistency.Tombstone {
private Tombstone() { }
/**
* Gets the value of the 'url' field.
*/
public java.lang.CharSequence getUrl() {
throw new java.lang.UnsupportedOperationException("Get is not supported on tombstones");
}
/**
* Sets the value of the 'url' field.
* @param value the value to set.
*/
public void setUrl(java.lang.CharSequence value) {
throw new java.lang.UnsupportedOperationException("Set is not supported on tombstones");
}
/**
* Checks the dirty status of the 'url' field. A field is dirty if it represents a change that has not yet been written to the database.
* @param value the value to set.
*/
public boolean isUrlDirty() {
throw new java.lang.UnsupportedOperationException("IsDirty is not supported on tombstones");
}
/**
* Gets the value of the 'timestamp' field.
*/
public java.lang.Long getTimestamp() {
throw new java.lang.UnsupportedOperationException("Get is not supported on tombstones");
}
/**
* Sets the value of the 'timestamp' field.
* @param value the value to set.
*/
public void setTimestamp(java.lang.Long value) {
throw new java.lang.UnsupportedOperationException("Set is not supported on tombstones");
}
/**
* Checks the dirty status of the 'timestamp' field. A field is dirty if it represents a change that has not yet been written to the database.
* @param value the value to set.
*/
public boolean isTimestampDirty() {
throw new java.lang.UnsupportedOperationException("IsDirty is not supported on tombstones");
}
}
private static final org.apache.avro.io.DatumWriter
DATUM_WRITER$ = new org.apache.avro.specific.SpecificDatumWriter(SCHEMA$);
private static final org.apache.avro.io.DatumReader
DATUM_READER$ = new org.apache.avro.specific.SpecificDatumReader(SCHEMA$);
/**
* Writes AVRO data bean to output stream in the form of AVRO Binary encoding format. This will transform
* AVRO data bean from its Java object form to it s serializable form.
*
* @param out java.io.ObjectOutput output stream to write data bean in serializable form
*/
@Override
public void writeExternal(java.io.ObjectOutput out)
throws java.io.IOException {
out.write(super.getDirtyBytes().array());
DATUM_WRITER$.write(this, org.apache.avro.io.EncoderFactory.get()
.directBinaryEncoder((java.io.OutputStream) out,
null));
}
/**
* Reads AVRO data bean from input stream in it s AVRO Binary encoding format to Java object format.
* This will transform AVRO data bean from it s serializable form to deserialized Java object form.
*
* @param in java.io.ObjectOutput input stream to read data bean in serializable form
*/
@Override
public void readExternal(java.io.ObjectInput in)
throws java.io.IOException {
byte[] __g__dirty = new byte[getFieldsCount()];
in.read(__g__dirty);
super.setDirtyBytes(java.nio.ByteBuffer.wrap(__g__dirty));
DATUM_READER$.read(this, org.apache.avro.io.DecoderFactory.get()
.directBinaryDecoder((java.io.InputStream) in,
null));
}
}
| |
package io.github.mthli.Bitocle.Main;
import android.app.ActionBar;
import android.content.Context;
import android.content.Intent;
import android.content.SharedPreferences;
import android.database.SQLException;
import android.os.AsyncTask;
import android.os.Bundle;
import android.util.SparseBooleanArray;
import android.view.*;
import android.widget.*;
import com.devspark.progressfragment.ProgressFragment;
import com.oguzdev.circularfloatingactionmenu.library.FloatingActionButton;
import com.oguzdev.circularfloatingactionmenu.library.FloatingActionMenu;
import com.oguzdev.circularfloatingactionmenu.library.SubActionButton;
import io.github.mthli.Bitocle.Bookmark.BookmarkItem;
import io.github.mthli.Bitocle.Bookmark.BookmarkItemAdapter;
import io.github.mthli.Bitocle.Bookmark.BookmarkTask;
import io.github.mthli.Bitocle.Commit.CommitItem;
import io.github.mthli.Bitocle.Commit.CommitItemAdapter;
import io.github.mthli.Bitocle.Commit.CommitTask;
import io.github.mthli.Bitocle.Content.ContentItem;
import io.github.mthli.Bitocle.Content.ContentItemAdapter;
import io.github.mthli.Bitocle.Content.ContentTask;
import io.github.mthli.Bitocle.Database.Bookmark.BAction;
import io.github.mthli.Bitocle.Database.Bookmark.Bookmark;
import io.github.mthli.Bitocle.R;
import io.github.mthli.Bitocle.Repo.*;
import io.github.mthli.Bitocle.Watch.WatchItem;
import io.github.mthli.Bitocle.Watch.WatchItemAdapter;
import io.github.mthli.Bitocle.Watch.WatchTask;
import io.github.mthli.Bitocle.WebView.MimeType;
import io.github.mthli.Bitocle.WebView.WebViewActivity;
import org.eclipse.egit.github.core.RepositoryContents;
import org.eclipse.egit.github.core.client.GitHubClient;
import uk.co.senab.actionbarpulltorefresh.library.ActionBarPullToRefresh;
import uk.co.senab.actionbarpulltorefresh.library.PullToRefreshLayout;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
public class MainFragment extends ProgressFragment {
public static final int REPO_ID = 0;
public static final int BOOKMARK_ID = 1;
public static final int CONTENT_ID = 2;
public static final int HISTORY_ID = 3;
public static final int COMMIT_ID = 4;
public int CURRENT_ID = 0;
private View view;
private ListView listView;
private ActionBar actionBar;
private String titleName;
private String titlePath;
private int refreshType = 0;
private boolean refreshStatus = false;
private boolean multiChoiceStatus = false;
private boolean searchViewStatus = false;
private int location = 0;
private RepoTask repoTask;
private BookmarkTask bookmarkTask;
private ContentTask contentTask;
private AddTask addTask;
private WatchTask watchTask;
private CommitTask commitTask;
private GitHubClient gitHubClient;
private String repoOwner;
private String repoName;
private String repoPath;
private RepoItemAdapter repoItemAdapter;
private List<RepoItem> repoItemList = new ArrayList<RepoItem>();
private BookmarkItemAdapter bookmarkItemAdapter;
private List<BookmarkItem> bookmarkItemList = new ArrayList<BookmarkItem>();
private ContentItemAdapter contentItemAdapter;
private List<ContentItem> contentItemList = new ArrayList<ContentItem>();
private List<List<ContentItem>> contentItemListBuffer = new ArrayList<List<ContentItem>>();
private WatchItemAdapter watchItemAdapter;
private List<WatchItem> watchItemList = new ArrayList<WatchItem>();
private CommitItemAdapter commitItemAdapter;
private List<CommitItem> commitItemList = new ArrayList<CommitItem>();
private FloatingActionMenu actionMenu;
private PullToRefreshLayout pullToRefreshLayout;
@Override
public void onActivityCreated(Bundle savedInstanceState) {
super.onActivityCreated(savedInstanceState);
setContentView(R.layout.main_fragment);
view = getContentView();
setContentShown(true);
listView = (ListView) view.findViewById(R.id.main_fragment_listview);
actionBar = getActivity().getActionBar();
ViewGroup viewGroup = (ViewGroup) view;
pullToRefreshLayout = new PullToRefreshLayout(viewGroup.getContext());
ActionBarPullToRefresh.from(getActivity())
.insertLayoutInto(viewGroup)
.setup(pullToRefreshLayout);
ImageView imageView = new ImageView(view.getContext());
imageView.setImageDrawable(getResources().getDrawable(R.drawable.ic_action_plus));
final FloatingActionButton actionButton = new FloatingActionButton.Builder(getActivity())
.setContentView(imageView)
.build();
SubActionButton.Builder builder = new SubActionButton.Builder(getActivity());
ImageView imageViewHistory = new ImageView(view.getContext());
imageViewHistory.setImageDrawable(getResources().getDrawable(R.drawable.ic_action_history));
SubActionButton buttonHistory = builder.setContentView(imageViewHistory).build();
ImageView imageViewAdd = new ImageView(view.getContext());
imageViewAdd.setImageDrawable(getResources().getDrawable(R.drawable.ic_action_add));
SubActionButton buttonAdd = builder.setContentView(imageViewAdd).build();
ImageView imageViewRefresh = new ImageView(view.getContext());
imageViewRefresh.setImageDrawable(getResources().getDrawable(R.drawable.ic_action_am_refresh));
SubActionButton buttonRefresh = builder.setContentView(imageViewRefresh).build();
actionMenu = new FloatingActionMenu.Builder(getActivity())
.addSubActionView(buttonRefresh)
.addSubActionView(buttonAdd)
.addSubActionView(buttonHistory)
.attachTo(actionButton)
.build();
buttonRefresh.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
if (!refreshStatus) {
switch (CURRENT_ID) {
case REPO_ID:
refreshType = RefreshType.REPO_BUTTON;
repoTask = new RepoTask(MainFragment.this);
repoTask.execute();
break;
case BOOKMARK_ID:
refreshType = RefreshType.BOOKMARK_BUTTON;
bookmarkTask = new BookmarkTask(MainFragment.this);
bookmarkTask.execute();
break;
case CONTENT_ID:
refreshType = RefreshType.CONTENT_BUTTON;
contentTask = new ContentTask(MainFragment.this);
contentTask.execute();
break;
case HISTORY_ID:
refreshType = RefreshType.WATCH_BUTTON;
watchTask = new WatchTask(MainFragment.this);
watchTask.execute();
break;
case COMMIT_ID:
refreshType = RefreshType.COMMIT_BUTTON;
commitTask = new CommitTask(MainFragment.this, repoItemList.get(location));
commitTask.execute();
break;
default:
break;
}
}
if (searchViewStatus) {
searchViewDown();
}
actionMenu.close(true);
}
});
buttonAdd.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
if (!refreshStatus && !multiChoiceStatus) {
searchViewUp();
}
actionMenu.close(true);
}
});
buttonHistory.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
if (CURRENT_ID != HISTORY_ID) {
if (repoTask != null && repoTask.getStatus() == AsyncTask.Status.RUNNING) {
repoTask.cancel(true);
}
if (bookmarkTask != null && bookmarkTask.getStatus() == AsyncTask.Status.RUNNING) {
bookmarkTask.cancel(true);
}
if (contentTask != null && contentTask.getStatus() == AsyncTask.Status.RUNNING) {
contentTask.cancel(true);
}
if (commitTask != null && commitTask.getStatus() == AsyncTask.Status.RUNNING) {
commitTask.cancel(true);
}
setContentShown(true);
actionBar.setTitle(R.string.watch_label);
actionBar.setSubtitle(null);
actionBar.setDisplayHomeAsUpEnabled(true);
listView.setAdapter(watchItemAdapter);
watchItemAdapter.notifyDataSetChanged();
refreshType = RefreshType.WATCH_FIRST;
CURRENT_ID = HISTORY_ID;
refreshStatus = false; //
watchTask = new WatchTask(MainFragment.this);
watchTask.execute();
}
actionMenu.close(true);
}
});
SharedPreferences sharedPreferences = getActivity().getSharedPreferences(getString(R.string.login_sp), Context.MODE_PRIVATE);
String oAuth = sharedPreferences.getString(getString(R.string.login_sp_oauth), null);
gitHubClient = new GitHubClient();
gitHubClient.setOAuth2Token(oAuth);
repoItemAdapter = new RepoItemAdapter(
MainFragment.this,
view.getContext(),
R.layout.repo_item,
repoItemList
);
repoItemAdapter.notifyDataSetChanged();
listView.setAdapter(repoItemAdapter);
bookmarkItemAdapter = new BookmarkItemAdapter(
view.getContext(),
R.layout.bookmark_item,
bookmarkItemList
);
bookmarkItemAdapter.notifyDataSetChanged();
contentItemAdapter = new ContentItemAdapter(
view.getContext(),
R.layout.content_item,
contentItemList
);
contentItemAdapter.notifyDataSetChanged();
watchItemAdapter = new WatchItemAdapter(
MainFragment.this,
view.getContext(),
R.layout.watch_item,
watchItemList
);
watchItemAdapter.notifyDataSetChanged();
commitItemAdapter = new CommitItemAdapter(
view.getContext(),
R.layout.commit_item,
commitItemList
);
commitItemAdapter.notifyDataSetChanged();
final Intent intent = getActivity().getIntent();
if (intent.getBooleanExtra(getString(R.string.login_intent), false)) {
refreshType = RefreshType.REPO_FIRST;
repoTask = new RepoTask(this);
repoTask.execute();
} else {
refreshType = RefreshType.REPO_ALREADY;
repoTask = new RepoTask(this);
repoTask.execute();
}
CURRENT_ID = REPO_ID;
listView.setOnItemClickListener(new AdapterView.OnItemClickListener() {
@Override
public void onItemClick(AdapterView<?> parent, View view, int position, long id) {
searchViewDown();
actionMenu.close(true);
switch (CURRENT_ID) {
case REPO_ID:
RepoItem repoItem = repoItemList.get(position);
repoOwner = repoItem.getOwner();
repoName = repoItem.getTitle();
repoPath = getString(R.string.repo_path_root);
titleName = repoName;
titlePath = repoName + getString(R.string.repo_path_root);
actionBar.setTitle(titleName);
actionBar.setSubtitle(titlePath);
actionBar.setDisplayHomeAsUpEnabled(true);
contentItemListBuffer.clear();
listView.setAdapter(contentItemAdapter);
contentItemAdapter.notifyDataSetChanged();
refreshType = RefreshType.CONTENT_FIRST;
CURRENT_ID = CONTENT_ID;
contentTask = new ContentTask(MainFragment.this);
contentTask.execute();
break;
case BOOKMARK_ID:
BookmarkItem bookmarkItem = bookmarkItemList.get(position);
repoOwner = bookmarkItem.getRepoOwner();
repoName = bookmarkItem.getRepoName();
if (bookmarkItem.getType().equals(RepositoryContents.TYPE_DIR)) {
repoPath = bookmarkItem.getRepoPath();
titleName = repoName;
titlePath = repoName
+ getString(R.string.repo_path_root)
+ bookmarkItem.getRepoPath()
+ getString(R.string.repo_path_root);
actionBar.setTitle(titleName);
actionBar.setSubtitle(titlePath);
actionBar.setDisplayHomeAsUpEnabled(true);
listView.setAdapter(contentItemAdapter);
contentItemAdapter.notifyDataSetChanged();
refreshType = RefreshType.CONTENT_FIRST;
CURRENT_ID = CONTENT_ID;
contentTask = new ContentTask(MainFragment.this);
contentTask.execute();
} else {
if (MimeType.isUnSupport(bookmarkItem.getTitle())) {
Toast.makeText(
view.getContext(),
R.string.content_mimetype_unsupport,
Toast.LENGTH_SHORT
).show();
} else {
Intent intentToWebView = new Intent(getActivity(), WebViewActivity.class);
intentToWebView.putExtra(getString(R.string.content_intent_repoowner), repoOwner);
intentToWebView.putExtra(getString(R.string.content_intent_reponame), repoName);
intentToWebView.putExtra(getString(R.string.content_intent_filename), bookmarkItem.getTitle());
intentToWebView.putExtra(
getString(R.string.content_intent_filepath),
repoName
+ getString(R.string.repo_path_root)
+ bookmarkItem.getRepoPath()
);
intentToWebView.putExtra(getString(R.string.content_intent_sha), bookmarkItem.getSha());
startActivity(intentToWebView);
}
}
break;
case CONTENT_ID:
ContentItem contentItem = contentItemList.get(position);
if (contentItem.getType().equals(RepositoryContents.TYPE_DIR)) {
repoPath = contentItem.getRepoPath();
titleName = contentItem.getTitle();
titlePath = repoName
+ getString(R.string.repo_path_root)
+ contentItem.getRepoPath()
+ getString(R.string.repo_path_root);
actionBar.setTitle(titleName);
actionBar.setSubtitle(titlePath);
actionBar.setDisplayHomeAsUpEnabled(true);
refreshType = RefreshType.CONTENT_FIRST;
contentTask = new ContentTask(MainFragment.this);
contentTask.execute();
} else {
if (MimeType.isUnSupport(contentItem.getTitle())) {
Toast.makeText(
view.getContext(),
R.string.content_mimetype_unsupport,
Toast.LENGTH_SHORT
).show();
} else {
Intent intentToWebView = new Intent(getActivity(), WebViewActivity.class);
intentToWebView.putExtra(getString(R.string.content_intent_repoowner), repoOwner);
intentToWebView.putExtra(getString(R.string.content_intent_reponame), repoName);
intentToWebView.putExtra(getString(R.string.content_intent_filename), contentItem.getTitle());
intentToWebView.putExtra(
getString(R.string.content_intent_filepath),
repoName
+ getString(R.string.repo_path_root)
+ contentItem.getRepoPath()
);
intentToWebView.putExtra(getString(R.string.content_intent_sha), contentItem.getSha());
startActivity(intentToWebView);
}
}
break;
default:
break;
}
}
});
listView.setChoiceMode(ListView.CHOICE_MODE_MULTIPLE_MODAL);
listView.setMultiChoiceModeListener(new AbsListView.MultiChoiceModeListener() {
@Override
public void onItemCheckedStateChanged(ActionMode mode, int position, long id, boolean checked) {
Integer integer = listView.getCheckedItemCount();
mode.setTitle(integer.toString());
actionMenu.close(true);
}
@Override
public boolean onCreateActionMode(ActionMode mode, Menu menu) {
if (CURRENT_ID == BOOKMARK_ID) {
MenuInflater menuInflater = getActivity().getMenuInflater();
menuInflater.inflate(R.menu.bookmark_choice_menu, menu);
return true;
} else if (CURRENT_ID == CONTENT_ID) {
MenuInflater menuInflater = getActivity().getMenuInflater();
menuInflater.inflate(R.menu.content_choice_menu, menu);
/* Do somthing */
return true;
} else {
return false;
}
}
@Override
public boolean onPrepareActionMode(ActionMode mode, Menu menu) {
searchViewDown();
actionMenu.close(true);
multiChoiceStatus = true;
return true;
}
@Override
public boolean onActionItemClicked(ActionMode mode, MenuItem menuItem) {
actionMenu.close(true);
BAction bAction = new BAction(view.getContext());
try {
bAction.openDatabase(true);
} catch (SQLException s) {
Toast.makeText(
view.getContext(),
getString(R.string.bookmark_database_open_error),
Toast.LENGTH_SHORT
).show();
bAction.closeDatabase();
return false;
}
SparseBooleanArray sparseBooleanArray = listView.getCheckedItemPositions();
switch (menuItem.getItemId()) {
case R.id.content_choice_menu_add_bookmark:
for (int i = 0; i < sparseBooleanArray.size(); i++) {
if (sparseBooleanArray.valueAt(i)) {
ContentItem contentItem = contentItemAdapter.getItem(sparseBooleanArray.keyAt(i));
if (!bAction.checkBookmark(contentItem.getSha())) {
Bookmark bookmark = new Bookmark();
bookmark.setTitle(contentItem.getTitle());
SimpleDateFormat simpleDateFormat = new SimpleDateFormat("yyyy-MM-dd");
Date date = new Date();
bookmark.setDate(simpleDateFormat.format(date));
bookmark.setType(contentItem.getType());
bookmark.setRepoOwner(repoOwner);
bookmark.setRepoName(repoName);
bookmark.setRepoPath(contentItem.getRepoPath());
bookmark.setSha(contentItem.getSha());
String key = repoOwner + getString(R.string.repo_path_root) + repoName;
bookmark.setKey(key);
bAction.addBookmark(bookmark);
}
}
}
onDestroyActionMode(mode);
Toast.makeText(
view.getContext(),
getString(R.string.bookmark_add_successful),
Toast.LENGTH_SHORT
).show();
break;
case R.id.bookmark_choice_menu_remove_bookmark:
for (int i = 0; i < sparseBooleanArray.size(); i++) {
if (sparseBooleanArray.valueAt(i)) {
BookmarkItem bookmarkItem = bookmarkItemAdapter.getItem(sparseBooleanArray.keyAt(i));
bAction.unMarkBySha(bookmarkItem.getSha());
}
}
onDestroyActionMode(mode);
Toast.makeText(
view.getContext(),
getString(R.string.bookmark_remove_successful),
Toast.LENGTH_SHORT
).show();
bookmarkTask = new BookmarkTask(MainFragment.this);
bookmarkTask.execute();
break;
default:
break;
}
bAction.closeDatabase();
return true;
}
@Override
public void onDestroyActionMode(ActionMode mode) {
SparseBooleanArray sparseBooleanArray = listView.getCheckedItemPositions();
for (int i = 0; i < sparseBooleanArray.size(); i++) {
if (sparseBooleanArray.valueAt(i)) {
listView.setItemChecked(i, false);
}
}
multiChoiceStatus = false;
actionMenu.close(true);
}
});
listView.setOnScrollListener(new AbsListView.OnScrollListener() {
@Override
public void onScrollStateChanged(AbsListView view, int scrollState) {
actionMenu.close(true);
searchViewDown();
}
@Override
public void onScroll(AbsListView view, int firstVisibleItem, int visibleItemCount, int totalItemCount) {
/* Do nothing */
}
});
}
public void changeToRepo() {
actionBar.setTitle(getString(R.string.app_name));
actionBar.setSubtitle(null);
actionBar.setDisplayHomeAsUpEnabled(false);
actionBar.setHomeButtonEnabled(false);
if (bookmarkTask != null && bookmarkTask.getStatus() == AsyncTask.Status.RUNNING) {
bookmarkTask.cancel(true);
}
if (contentTask != null && contentTask.getStatus() == AsyncTask.Status.RUNNING) {
contentTask.cancel(true);
}
if (watchTask != null && watchTask.getStatus() == AsyncTask.Status.RUNNING) {
watchTask.cancel(true);
}
if (commitTask != null && commitTask.getStatus() == AsyncTask.Status.RUNNING) {
commitTask.cancel(true);
}
setContentShown(true);
contentItemListBuffer.clear();
listView.setAdapter(repoItemAdapter);
repoItemAdapter.notifyDataSetChanged();
refreshType = RefreshType.REPO_ALREADY;
CURRENT_ID = REPO_ID;
repoTask = new RepoTask(MainFragment.this);
repoTask.execute();
}
public void changeToBookmark() {
if (repoTask != null && repoTask.getStatus() == AsyncTask.Status.RUNNING) {
repoTask.cancel(true);
}
if (contentTask != null && contentTask.getStatus() == AsyncTask.Status.RUNNING) {
contentTask.cancel(true);
}
if (watchTask != null && watchTask.getStatus() == AsyncTask.Status.RUNNING) {
watchTask.cancel(true);
}
if (commitTask != null && commitTask.getStatus() == AsyncTask.Status.RUNNING) {
commitTask.cancel(true);
}
setContentShown(true);
if (CURRENT_ID != BOOKMARK_ID) {
actionBar.setTitle(getString(R.string.bookmark_label));
actionBar.setSubtitle(null);
actionBar.setDisplayHomeAsUpEnabled(true);
listView.setAdapter(bookmarkItemAdapter);
bookmarkItemAdapter.notifyDataSetChanged();
refreshType = RefreshType.BOOKMARK_FIRST;
CURRENT_ID = BOOKMARK_ID;
bookmarkTask = new BookmarkTask(MainFragment.this);
bookmarkTask.execute();
}
}
public void changeToContent() {
if (repoTask != null && repoTask.getStatus() == AsyncTask.Status.RUNNING) {
repoTask.cancel(true);
}
if (bookmarkTask != null && bookmarkTask.getStatus() == AsyncTask.Status.RUNNING) {
bookmarkTask.cancel(true);
}
if (contentTask != null && contentTask.getStatus() == AsyncTask.Status.RUNNING) {
contentTask.cancel(true);
}
if (watchTask != null && watchTask.getStatus() == AsyncTask.Status.RUNNING) {
watchTask.cancel(true);
}
if (commitTask != null && commitTask.getStatus() == AsyncTask.Status.RUNNING) {
commitTask.cancel(true);
}
actionBar.setTitle(titleName);
actionBar.setSubtitle(titlePath);
actionBar.setDisplayHomeAsUpEnabled(true);
refreshType = RefreshType.CONTENT_FIRST;
CURRENT_ID = CONTENT_ID;
listView.setAdapter(contentItemAdapter);
commitItemAdapter.notifyDataSetChanged();
}
public void backToParent() {
String[] arr = titlePath.split(getString(R.string.repo_path_root));
titleName = arr[arr.length - 2];
titlePath = arr[0];
for (int i = 1; i < arr.length - 1; i++) {
titlePath = titlePath + getString(R.string.repo_path_root) + arr[i];
}
titlePath = titlePath + getString(R.string.repo_path_root);
actionBar.setTitle(titleName);
actionBar.setSubtitle(titlePath);
actionBar.setDisplayHomeAsUpEnabled(true);
contentItemListBuffer.remove(contentItemListBuffer.size() - 1);
List<ContentItem> contentItems = contentItemListBuffer.get(contentItemListBuffer.size() - 1);
contentItemList.clear();
for (ContentItem c: contentItems) {
contentItemList.add(c);
}
contentItemAdapter.notifyDataSetChanged();
}
private void searchViewUp() {
actionBar.setDisplayHomeAsUpEnabled(true);
searchViewStatus = true;
MenuItem searchItem = MainActivity.searchItem;
SearchView searchView = MainActivity.searchView;
searchView.onActionViewExpanded();
searchItem.setVisible(true);
searchView.setOnQueryTextListener(new SearchView.OnQueryTextListener() {
@Override
public boolean onQueryTextSubmit(String query) {
pullToRefreshLayout.setRefreshing(true);
actionMenu.close(true);
searchViewDown();
addTask = new AddTask(MainFragment.this, query);
addTask.execute();
return false;
}
@Override
public boolean onQueryTextChange(String newText) {
/* Do nothing */
return false;
}
});
}
public void searchViewDown() {
if (CURRENT_ID == REPO_ID) {
actionBar.setDisplayHomeAsUpEnabled(false);
}
searchViewStatus = false;
MenuItem searchItem = MainActivity.searchItem;
SearchView searchView = MainActivity.searchView;
searchView.onActionViewCollapsed();
searchItem.setVisible(false);
}
public void cancelAllTasks() {
if (repoTask != null && repoTask.getStatus() == AsyncTask.Status.RUNNING) {
repoTask.cancel(true);
}
if (bookmarkTask != null && bookmarkTask.getStatus() == AsyncTask.Status.RUNNING) {
bookmarkTask.cancel(true);
}
if (contentTask != null && contentTask.getStatus() == AsyncTask.Status.RUNNING) {
contentTask.cancel(true);
}
if (addTask != null && addTask.getStatus() == AsyncTask.Status.RUNNING) {
addTask.cancel(true);
}
if (watchTask != null && watchTask.getStatus() == AsyncTask.Status.RUNNING) {
watchTask.cancel(true);
}
if (commitTask != null && commitTask.getStatus() == AsyncTask.Status.RUNNING) {
commitTask.cancel(true);
}
}
public ListView getListView() {
return listView;
}
public ActionBar getActionBar() {
return actionBar;
}
public void setLocation(int location) {
this.location = location;
}
public int getRefreshType() {
return refreshType;
}
public void setRefreshType(int refreshType) {
this.refreshType = refreshType;
}
public boolean getSearchViewStatus() {
return searchViewStatus;
}
public void setRefreshStatus(boolean refreshStatus) {
this.refreshStatus = refreshStatus;
}
public void setRepoTask(RepoTask repoTask) {
this.repoTask = repoTask;
}
public void setCommitTask(CommitTask commitTask) {
this.commitTask = commitTask;
}
public GitHubClient getGitHubClient() {
return gitHubClient;
}
public String getRepoOwner() {
return repoOwner;
}
public String getRepoName() {
return repoName;
}
public String getRepoPath() {
return repoPath;
}
public RepoItemAdapter getRepoItemAdapter() {
return repoItemAdapter;
}
public List<RepoItem> getRepoItemList() {
return repoItemList;
}
public BookmarkItemAdapter getBookmarkItemAdapter() {
return bookmarkItemAdapter;
}
public List<BookmarkItem> getBookmarkItemList() {
return bookmarkItemList;
}
public ContentItemAdapter getContentItemAdapter() {
return contentItemAdapter;
}
public List<ContentItem> getContentItemList() {
return contentItemList;
}
public List<List<ContentItem>> getContentItemListBuffer() {
return contentItemListBuffer;
}
public WatchItemAdapter getWatchItemAdapter() {
return watchItemAdapter;
}
public List<WatchItem> getWatchItemList() {
return watchItemList;
}
public CommitItemAdapter getCommitItemAdapter() {
return commitItemAdapter;
}
public List<CommitItem> getCommitItemList() {
return commitItemList;
}
public FloatingActionMenu getActionMenu() {
return actionMenu;
}
public PullToRefreshLayout getPullToRefreshLayout() {
return pullToRefreshLayout;
}
}
| |
package com.fasterxml.jackson.core.read;
import com.fasterxml.jackson.core.*;
import com.fasterxml.jackson.core.testsupport.MockDataInput;
import com.fasterxml.jackson.core.util.JsonParserDelegate;
import java.io.*;
import java.net.URL;
import java.util.*;
/**
* Set of basic unit tests for verifying that the basic parser
* functionality works as expected.
*/
@SuppressWarnings("resource")
public class JsonParserTest extends BaseTest
{
public void testConfig() throws Exception
{
JsonParser p = createParserUsingReader("[ ]");
p.enable(JsonParser.Feature.AUTO_CLOSE_SOURCE);
assertTrue(p.isEnabled(JsonParser.Feature.AUTO_CLOSE_SOURCE));
p.disable(JsonParser.Feature.AUTO_CLOSE_SOURCE);
assertFalse(p.isEnabled(JsonParser.Feature.AUTO_CLOSE_SOURCE));
p.configure(JsonParser.Feature.AUTO_CLOSE_SOURCE, true);
assertTrue(p.isEnabled(JsonParser.Feature.AUTO_CLOSE_SOURCE));
p.configure(JsonParser.Feature.AUTO_CLOSE_SOURCE, false);
assertFalse(p.isEnabled(JsonParser.Feature.AUTO_CLOSE_SOURCE));
p.close();
}
public void testInterningWithStreams() throws Exception
{
_testIntern(true, true, "a");
_testIntern(true, false, "b");
}
public void testInterningWithReaders() throws Exception
{
_testIntern(false, true, "c");
_testIntern(false, false, "d");
}
private void _testIntern(boolean useStream, boolean enableIntern, String expName) throws IOException
{
JsonFactory f = new JsonFactory();
f.configure(JsonFactory.Feature.INTERN_FIELD_NAMES, enableIntern);
assertEquals(enableIntern, f.isEnabled(JsonFactory.Feature.INTERN_FIELD_NAMES));
final String JSON = "{ \""+expName+"\" : 1}";
JsonParser p = useStream ?
createParserUsingStream(f, JSON, "UTF-8") : createParserUsingReader(f, JSON);
assertToken(JsonToken.START_OBJECT, p.nextToken());
assertToken(JsonToken.FIELD_NAME, p.nextToken());
// needs to be same of cours
String actName = p.currentName();
assertEquals(expName, actName);
if (enableIntern) {
assertSame(expName, actName);
} else {
assertNotSame(expName, actName);
}
p.close();
}
/**
* This basic unit test verifies that example given in the JSON
* specification (RFC-4627 or later) is properly parsed at
* high-level, without verifying values.
*/
public void testSpecExampleSkipping() throws Exception
{
_doTestSpec(false);
}
/**
* Unit test that verifies that the spec example JSON is completely
* parsed, and proper values are given for contents of all
* events/tokens.
*/
public void testSpecExampleFully() throws Exception
{
_doTestSpec(true);
}
/**
* Unit test that verifies that 3 basic keywords (null, true, false)
* are properly parsed in various contexts.
*/
public void testKeywords() throws Exception
{
final String DOC = "{\n"
+"\"key1\" : null,\n"
+"\"key2\" : true,\n"
+"\"key3\" : false,\n"
+"\"key4\" : [ false, null, true ]\n"
+"}"
;
JsonParser p = createParserUsingStream(JSON_FACTORY, DOC, "UTF-8");
_testKeywords(p, true);
p.close();
p = createParserUsingReader(JSON_FACTORY, DOC);
_testKeywords(p, true);
p.close();
p = createParserForDataInput(JSON_FACTORY, new MockDataInput(DOC));
_testKeywords(p, false);
p.close();
}
private void _testKeywords(JsonParser p, boolean checkColumn) throws Exception
{
TokenStreamContext ctxt = p.getParsingContext();
assertEquals("/", ctxt.toString());
assertTrue(ctxt.inRoot());
assertFalse(ctxt.inArray());
assertFalse(ctxt.inObject());
assertEquals(0, ctxt.getEntryCount());
assertEquals(0, ctxt.getCurrentIndex());
// Before advancing to content, we should have following default state...
assertFalse(p.hasCurrentToken());
assertNull(p.getText());
assertNull(p.getTextCharacters());
assertEquals(0, p.getTextLength());
// not sure if this is defined but:
assertEquals(0, p.getTextOffset());
assertToken(JsonToken.START_OBJECT, p.nextToken());
assertEquals("/", ctxt.toString());
assertTrue(p.hasCurrentToken());
JsonLocation loc = p.getTokenLocation();
assertNotNull(loc);
assertEquals(1, loc.getLineNr());
if (checkColumn) {
assertEquals(1, loc.getColumnNr());
}
ctxt = p.getParsingContext();
assertFalse(ctxt.inRoot());
assertFalse(ctxt.inArray());
assertTrue(ctxt.inObject());
assertEquals(0, ctxt.getEntryCount());
assertEquals(0, ctxt.getCurrentIndex());
assertToken(JsonToken.FIELD_NAME, p.nextToken());
verifyFieldName(p, "key1");
assertEquals("{\"key1\"}", ctxt.toString());
assertEquals(2, p.getTokenLocation().getLineNr());
ctxt = p.getParsingContext();
assertFalse(ctxt.inRoot());
assertFalse(ctxt.inArray());
assertTrue(ctxt.inObject());
assertEquals(1, ctxt.getEntryCount());
assertEquals(0, ctxt.getCurrentIndex());
assertEquals("key1", ctxt.currentName());
assertToken(JsonToken.VALUE_NULL, p.nextToken());
assertEquals("key1", ctxt.currentName());
ctxt = p.getParsingContext();
assertEquals(1, ctxt.getEntryCount());
assertEquals(0, ctxt.getCurrentIndex());
assertToken(JsonToken.FIELD_NAME, p.nextToken());
verifyFieldName(p, "key2");
ctxt = p.getParsingContext();
assertEquals(2, ctxt.getEntryCount());
assertEquals(1, ctxt.getCurrentIndex());
assertEquals("key2", ctxt.currentName());
assertToken(JsonToken.VALUE_TRUE, p.nextToken());
assertEquals("key2", ctxt.currentName());
assertToken(JsonToken.FIELD_NAME, p.nextToken());
verifyFieldName(p, "key3");
assertToken(JsonToken.VALUE_FALSE, p.nextToken());
assertToken(JsonToken.FIELD_NAME, p.nextToken());
verifyFieldName(p, "key4");
assertToken(JsonToken.START_ARRAY, p.nextToken());
ctxt = p.getParsingContext();
assertTrue(ctxt.inArray());
assertNull(ctxt.currentName());
assertEquals("key4", ctxt.getParent().currentName());
assertToken(JsonToken.VALUE_FALSE, p.nextToken());
assertEquals("[0]", ctxt.toString());
assertToken(JsonToken.VALUE_NULL, p.nextToken());
assertToken(JsonToken.VALUE_TRUE, p.nextToken());
assertToken(JsonToken.END_ARRAY, p.nextToken());
ctxt = p.getParsingContext();
assertTrue(ctxt.inObject());
assertToken(JsonToken.END_OBJECT, p.nextToken());
ctxt = p.getParsingContext();
assertTrue(ctxt.inRoot());
assertNull(ctxt.currentName());
}
public void testSkipping() throws Exception {
_testSkipping(MODE_INPUT_STREAM);
_testSkipping(MODE_INPUT_STREAM_THROTTLED);
_testSkipping(MODE_READER);
_testSkipping(MODE_DATA_INPUT);
}
private void _testSkipping(int mode) throws Exception
{
// InputData has some limitations to take into consideration
boolean isInputData = (mode == MODE_DATA_INPUT);
String DOC =
"[ 1, 3, [ true, null ], 3, { \"a\":\"b\" }, [ [ ] ], { } ]";
;
JsonParser p = createParser(mode, DOC);
// First, skipping of the whole thing
assertToken(JsonToken.START_ARRAY, p.nextToken());
p.skipChildren();
assertEquals(JsonToken.END_ARRAY, p.currentToken());
if (!isInputData) {
JsonToken t = p.nextToken();
if (t != null) {
fail("Expected null at end of doc, got "+t);
}
}
p.close();
// Then individual ones
p = createParser(mode, DOC);
assertToken(JsonToken.START_ARRAY, p.nextToken());
assertToken(JsonToken.VALUE_NUMBER_INT, p.nextToken());
p.skipChildren();
// shouldn't move
assertToken(JsonToken.VALUE_NUMBER_INT, p.currentToken());
assertEquals(1, p.getIntValue());
assertToken(JsonToken.VALUE_NUMBER_INT, p.nextToken());
// then skip array
assertToken(JsonToken.START_ARRAY, p.nextToken());
p.skipChildren();
assertToken(JsonToken.END_ARRAY, p.currentToken());
assertToken(JsonToken.VALUE_NUMBER_INT, p.nextToken());
assertToken(JsonToken.START_OBJECT, p.nextToken());
p.skipChildren();
assertToken(JsonToken.END_OBJECT, p.currentToken());
assertToken(JsonToken.START_ARRAY, p.nextToken());
p.skipChildren();
assertToken(JsonToken.END_ARRAY, p.currentToken());
assertToken(JsonToken.START_OBJECT, p.nextToken());
p.skipChildren();
assertToken(JsonToken.END_OBJECT, p.currentToken());
assertToken(JsonToken.END_ARRAY, p.nextToken());
p.close();
}
public void testNameEscaping() throws IOException
{
_testNameEscaping(MODE_INPUT_STREAM);
_testNameEscaping(MODE_READER);
_testNameEscaping(MODE_DATA_INPUT);
}
private void _testNameEscaping(int mode) throws IOException
{
final Map<String,String> NAME_MAP = new LinkedHashMap<String,String>();
NAME_MAP.put("", "");
NAME_MAP.put("\\\"funny\\\"", "\"funny\"");
NAME_MAP.put("\\\\", "\\");
NAME_MAP.put("\\r", "\r");
NAME_MAP.put("\\n", "\n");
NAME_MAP.put("\\t", "\t");
NAME_MAP.put("\\r\\n", "\r\n");
NAME_MAP.put("\\\"\\\"", "\"\"");
NAME_MAP.put("Line\\nfeed", "Line\nfeed");
NAME_MAP.put("Yet even longer \\\"name\\\"!", "Yet even longer \"name\"!");
int entry = 0;
for (Map.Entry<String,String> en : NAME_MAP.entrySet()) {
++entry;
String input = en.getKey();
String expResult = en.getValue();
final String DOC = "{ \""+input+"\":null}";
JsonParser p = createParser(mode, DOC);
assertToken(JsonToken.START_OBJECT, p.nextToken());
assertToken(JsonToken.FIELD_NAME, p.nextToken());
// first, sanity check (field name == getText()
String act = p.currentName();
assertEquals(act, getAndVerifyText(p));
if (!expResult.equals(act)) {
String msg = "Failed for name #"+entry+"/"+NAME_MAP.size();
if (expResult.length() != act.length()) {
fail(msg+": exp length "+expResult.length()+", actual "+act.length());
}
assertEquals(msg, expResult, act);
}
assertToken(JsonToken.VALUE_NULL, p.nextToken());
assertToken(JsonToken.END_OBJECT, p.nextToken());
p.close();
}
}
/**
* Unit test that verifies that long text segments are handled
* correctly; mostly to stress-test underlying segment-based
* text buffer(s).
*/
public void testLongText() throws Exception {
// lengths chosen to tease out problems with buffer allocation...
_testLongText(310);
_testLongText(7700);
_testLongText(49000);
_testLongText(96000);
}
private void _testLongText(int LEN) throws Exception
{
StringBuilder sb = new StringBuilder(LEN + 100);
Random r = new Random(LEN);
while (sb.length() < LEN) {
sb.append(r.nextInt());
sb.append(" xyz foo");
if (r.nextBoolean()) {
sb.append(" and \"bar\"");
} else if (r.nextBoolean()) {
sb.append(" [whatever].... ");
} else {
// Let's try some more 'exotic' chars
sb.append(" UTF-8-fu: try this {\u00E2/\u0BF8/\uA123!} (look funny?)");
}
if (r.nextBoolean()) {
if (r.nextBoolean()) {
sb.append('\n');
} else if (r.nextBoolean()) {
sb.append('\r');
} else {
sb.append("\r\n");
}
}
}
final String VALUE = sb.toString();
// Let's use real generator to get JSON done right
StringWriter sw = new StringWriter(LEN + (LEN >> 2));
JsonGenerator g = JSON_FACTORY.createGenerator(ObjectWriteContext.empty(), sw);
g.writeStartObject();
g.writeFieldName("doc");
g.writeString(VALUE);
g.writeEndObject();
g.close();
final String DOC = sw.toString();
for (int type = 0; type < 4; ++type) {
JsonParser p;
switch (type) {
case MODE_INPUT_STREAM:
case MODE_READER:
case MODE_DATA_INPUT:
p = createParser(type, DOC);
break;
default:
p = JSON_FACTORY.createParser(ObjectReadContext.empty(), encodeInUTF32BE(DOC));
}
assertToken(JsonToken.START_OBJECT, p.nextToken());
assertToken(JsonToken.FIELD_NAME, p.nextToken());
assertEquals("doc", p.currentName());
assertToken(JsonToken.VALUE_STRING, p.nextToken());
String act = getAndVerifyText(p);
if (act.length() != VALUE.length()) {
fail("Expected length "+VALUE.length()+", got "+act.length()+" (mode = "+type+")");
}
if (!act.equals(VALUE)) {
fail("Long text differs");
}
// should still know the field name
assertEquals("doc", p.currentName());
assertToken(JsonToken.END_OBJECT, p.nextToken());
// InputDate somewhat special, so:
if (type != MODE_DATA_INPUT) {
assertNull(p.nextToken());
}
p.close();
}
}
/**
* Simple unit test that verifies that passing in a byte array
* as source works as expected.
*/
public void testBytesAsSource() throws Exception
{
String JSON = "[ 1, 2, 3, 4 ]";
byte[] b = JSON.getBytes("UTF-8");
int offset = 50;
int len = b.length;
byte[] src = new byte[offset + len + offset];
System.arraycopy(b, 0, src, offset, len);
JsonParser p = JSON_FACTORY.createParser(ObjectReadContext.empty(), src, offset, len);
assertToken(JsonToken.START_ARRAY, p.nextToken());
assertToken(JsonToken.VALUE_NUMBER_INT, p.nextToken());
assertEquals(1, p.getIntValue());
assertToken(JsonToken.VALUE_NUMBER_INT, p.nextToken());
assertEquals(2, p.getIntValue());
assertToken(JsonToken.VALUE_NUMBER_INT, p.nextToken());
assertEquals(3, p.getIntValue());
assertToken(JsonToken.VALUE_NUMBER_INT, p.nextToken());
assertEquals(4, p.getIntValue());
assertToken(JsonToken.END_ARRAY, p.nextToken());
assertNull(p.nextToken());
p.close();
}
public void testUtf8BOMHandling() throws Exception
{
ByteArrayOutputStream bytes = new ByteArrayOutputStream();
// first, write BOM:
bytes.write(0xEF);
bytes.write(0xBB);
bytes.write(0xBF);
bytes.write("[ 1 ]".getBytes("UTF-8"));
byte[] input = bytes.toByteArray();
JsonParser p = JSON_FACTORY.createParser(ObjectReadContext.empty(), input);
assertEquals(JsonToken.START_ARRAY, p.nextToken());
// should also have skipped first 3 bytes of BOM; but do we have offset available?
/* 08-Oct-2013, tatu: Alas, due to [core#111], we have to omit BOM in calculations
* as we do not know what the offset is due to -- may need to revisit, if this
* discrepancy becomes an issue. For now it just means that BOM is considered
* "out of stream" (not part of input).
*/
JsonLocation loc = p.getTokenLocation();
// so if BOM was consider in-stream (part of input), this should expect 3:
assertEquals(0, loc.getByteOffset());
assertEquals(-1, loc.getCharOffset());
assertEquals(JsonToken.VALUE_NUMBER_INT, p.nextToken());
assertEquals(JsonToken.END_ARRAY, p.nextToken());
p.close();
p = JSON_FACTORY.createParser(ObjectReadContext.empty(),
new MockDataInput(input));
assertEquals(JsonToken.START_ARRAY, p.nextToken());
// same BOM, but DataInput is more restrctive so can skip but offsets
// are not reliable...
loc = p.getTokenLocation();
assertNotNull(loc);
assertEquals(JsonToken.VALUE_NUMBER_INT, p.nextToken());
assertEquals(JsonToken.END_ARRAY, p.nextToken());
p.close();
}
// [core#48]
public void testSpacesInURL() throws Exception
{
File f = File.createTempFile("pre fix&stuff", ".txt");
BufferedWriter w = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(f), "UTF-8"));
w.write("{ }");
w.close();
URL url = f.toURI().toURL();
JsonParser p = JSON_FACTORY.createParser(ObjectReadContext.empty(), url);
assertToken(JsonToken.START_OBJECT, p.nextToken());
assertToken(JsonToken.END_OBJECT, p.nextToken());
p.close();
}
// [core#142]
public void testHandlingOfInvalidSpaceByteStream() throws Exception {
_testHandlingOfInvalidSpace(MODE_INPUT_STREAM);
_testHandlingOfInvalidSpaceFromResource(true);
}
// [core#142]
public void testHandlingOfInvalidSpaceChars() throws Exception {
_testHandlingOfInvalidSpace(MODE_READER);
_testHandlingOfInvalidSpaceFromResource(false);
}
// [core#142]
public void testHandlingOfInvalidSpaceDataInput() throws Exception {
_testHandlingOfInvalidSpace(MODE_DATA_INPUT);
}
private void _testHandlingOfInvalidSpace(int mode) throws Exception
{
final String JSON = "{ \u00A0 \"a\":1}";
JsonParser p = createParser(mode, JSON);
assertToken(JsonToken.START_OBJECT, p.nextToken());
try {
p.nextToken();
fail("Should have failed");
} catch (JsonParseException e) {
verifyException(e, "unexpected character");
// and correct error code
verifyException(e, "code 160");
}
p.close();
}
private void _testHandlingOfInvalidSpaceFromResource(boolean useStream) throws Exception
{
InputStream in = getClass().getResourceAsStream("/test_0xA0.json");
JsonParser p = useStream
? JSON_FACTORY.createParser(ObjectReadContext.empty(), in)
: JSON_FACTORY.createParser(ObjectReadContext.empty(), new InputStreamReader(in, "UTF-8"));
assertToken(JsonToken.START_OBJECT, p.nextToken());
try {
assertToken(JsonToken.FIELD_NAME, p.nextToken());
assertEquals("request", p.currentName());
assertToken(JsonToken.START_OBJECT, p.nextToken());
assertToken(JsonToken.FIELD_NAME, p.nextToken());
assertEquals("mac", p.currentName());
assertToken(JsonToken.VALUE_STRING, p.nextToken());
assertNotNull(p.getText());
assertToken(JsonToken.FIELD_NAME, p.nextToken());
assertEquals("data", p.currentName());
assertToken(JsonToken.START_OBJECT, p.nextToken());
// ... and from there on, just loop
while (p.nextToken() != null) { }
fail("Should have failed");
} catch (JsonParseException e) {
verifyException(e, "unexpected character");
// and correct error code
verifyException(e, "code 160");
}
p.close();
}
public void testGetValueAsTextBytes() throws Exception
{
_testGetValueAsText(MODE_INPUT_STREAM, false);
_testGetValueAsText(MODE_INPUT_STREAM, true);
}
public void testGetValueAsTextDataInput() throws Exception
{
_testGetValueAsText(MODE_DATA_INPUT, false);
_testGetValueAsText(MODE_DATA_INPUT, true);
}
public void testGetValueAsTextChars() throws Exception
{
_testGetValueAsText(MODE_READER, false);
_testGetValueAsText(MODE_READER, true);
}
private void _testGetValueAsText(int mode, boolean delegate) throws Exception
{
String JSON = "{\"a\":1,\"b\":true,\"c\":null,\"d\":\"foo\"}";
JsonParser p = createParser(mode, JSON);
if (delegate) {
p = new JsonParserDelegate(p);
}
assertToken(JsonToken.START_OBJECT, p.nextToken());
assertNull(p.getValueAsString());
assertEquals("foobar", p.getValueAsString("foobar"));
assertToken(JsonToken.FIELD_NAME, p.nextToken());
assertEquals("a", p.getText());
assertEquals("a", p.getValueAsString());
assertEquals("a", p.getValueAsString("default"));
assertToken(JsonToken.VALUE_NUMBER_INT, p.nextToken());
assertEquals("1", p.getValueAsString());
assertToken(JsonToken.FIELD_NAME, p.nextToken());
assertEquals("b", p.getValueAsString());
assertToken(JsonToken.VALUE_TRUE, p.nextToken());
assertEquals("true", p.getValueAsString());
assertEquals("true", p.getValueAsString("foobar"));
assertToken(JsonToken.FIELD_NAME, p.nextToken());
assertEquals("c", p.getValueAsString());
assertToken(JsonToken.VALUE_NULL, p.nextToken());
// null token returned as Java null, as per javadoc
assertNull(p.getValueAsString());
assertToken(JsonToken.FIELD_NAME, p.nextToken());
assertEquals("d", p.getValueAsString());
assertToken(JsonToken.VALUE_STRING, p.nextToken());
assertEquals("foo", p.getValueAsString("default"));
assertEquals("foo", p.getValueAsString());
assertToken(JsonToken.END_OBJECT, p.nextToken());
assertNull(p.getValueAsString());
// InputData can't peek into end-of-input so:
if (mode != MODE_DATA_INPUT) {
assertNull(p.nextToken());
}
p.close();
}
public void testGetTextViaWriter() throws Exception
{
for (int mode : ALL_MODES) {
_testGetTextViaWriter(mode);
}
}
private void _testGetTextViaWriter(int mode) throws Exception
{
final String INPUT_TEXT = "this is a sample text for json parsing using readText() method";
final String JSON = "{\"a\":\""+INPUT_TEXT+"\",\"b\":true,\"c\":null,\"d\":\"foo\"}";
JsonParser parser = createParser(mode, JSON);
assertToken(JsonToken.START_OBJECT, parser.nextToken());
assertToken(JsonToken.FIELD_NAME, parser.nextToken());
assertEquals("a", parser.currentName());
assertToken(JsonToken.VALUE_STRING, parser.nextToken());
Writer writer = new StringWriter();
int len = parser.getText(writer);
String resultString = writer.toString();
assertEquals(len, resultString.length());
assertEquals(INPUT_TEXT, resultString);
parser.close();
}
public void testLongerReadText() throws Exception
{
for (int mode : ALL_MODES) {
_testLongerReadText(mode);
}
}
private void _testLongerReadText(int mode) throws Exception
{
StringBuilder builder = new StringBuilder();
for(int i= 0; i < 1000; i++) {
builder.append("Sample Text"+i);
}
String longText = builder.toString();
final String JSON = "{\"a\":\""+ longText +"\",\"b\":true,\"c\":null,\"d\":\"foo\"}";
JsonParser parser = createParser(MODE_READER, JSON);
assertToken(JsonToken.START_OBJECT, parser.nextToken());
assertToken(JsonToken.FIELD_NAME, parser.nextToken());
assertEquals("a", parser.currentName());
assertToken(JsonToken.VALUE_STRING, parser.nextToken());
Writer writer = new StringWriter();
int len = parser.getText(writer);
String resultString = writer.toString();
assertEquals(len, resultString.length());
assertEquals(longText, resultString);
parser.close();
}
/*
/**********************************************************
/* Helper methods
/**********************************************************
*/
private void _doTestSpec(boolean verify) throws IOException
{
JsonParser p;
// First, using a StringReader:
p = createParserUsingReader(JSON_FACTORY, SAMPLE_DOC_JSON_SPEC);
verifyJsonSpecSampleDoc(p, verify);
p.close();
// Then with streams using supported encodings:
p = createParserUsingStream(JSON_FACTORY, SAMPLE_DOC_JSON_SPEC, "UTF-8");
verifyJsonSpecSampleDoc(p, verify);
p.close();
p = createParserUsingStream(JSON_FACTORY, SAMPLE_DOC_JSON_SPEC, "UTF-16BE");
verifyJsonSpecSampleDoc(p, verify);
p.close();
p = createParserUsingStream(JSON_FACTORY, SAMPLE_DOC_JSON_SPEC, "UTF-16LE");
verifyJsonSpecSampleDoc(p, verify);
p.close();
// Hmmh. UTF-32 is harder only because JDK doesn't come with
// a codec for it. Can't test it yet using this method
p = createParserUsingStream(JSON_FACTORY, SAMPLE_DOC_JSON_SPEC, "UTF-32");
verifyJsonSpecSampleDoc(p, verify);
p.close();
// and finally, new (as of May 2016) source, DataInput:
p = createParserForDataInput(JSON_FACTORY, new MockDataInput(SAMPLE_DOC_JSON_SPEC));
verifyJsonSpecSampleDoc(p, verify);
p.close();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.ivy.core.report;
import java.io.File;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.ivy.core.cache.ResolutionCacheManager;
import org.apache.ivy.core.module.descriptor.Artifact;
import org.apache.ivy.core.module.descriptor.ModuleDescriptor;
import org.apache.ivy.core.module.id.ModuleId;
import org.apache.ivy.core.module.id.ModuleRevisionId;
import org.apache.ivy.core.resolve.IvyNode;
import org.apache.ivy.core.resolve.ResolveEngine;
import org.apache.ivy.core.resolve.ResolveOptions;
import org.apache.ivy.core.sort.SortOptions;
import org.apache.ivy.plugins.report.XmlReportParser;
import org.apache.ivy.util.Message;
/**
* Represents a whole resolution report for a module but for a specific configuration
*/
public class ConfigurationResolveReport {
private final ModuleDescriptor md;
private final String conf;
private final Date date;
private final ResolveOptions options;
private Map/*<IvyNode, List<ArtifactDownloadReport>>*/ dependencyReports = new LinkedHashMap();
private Map/*<ModuleRevisionId, IvyNode>*/ dependencies = new LinkedHashMap();
private final ResolveEngine resolveEngine;
private Map/*<ModuleId, Collection<IvyNode>>*/ modulesIdsMap = new LinkedHashMap();
private List modulesIds;
private Boolean hasChanged = null;
public ConfigurationResolveReport(ResolveEngine resolveEngine, ModuleDescriptor md,
String conf, Date date, ResolveOptions options) {
this.resolveEngine = resolveEngine;
this.md = md;
this.conf = conf;
this.date = date;
this.options = options;
}
/**
* Check if the set of dependencies has changed since the previous execution
* of a resolution.<br/>
* This function use the report file found in the cache. So the function must be called
* before the new report is serialized there.</br>
* This function also use the internal dependencies that must already be filled.
* This function might be 'heavy' because it may have to parse the previous
* report.
* @return
*/
public void checkIfChanged() {
ResolutionCacheManager cache = resolveEngine.getSettings().getResolutionCacheManager();
String resolveId = options.getResolveId();
File previousReportFile = cache.getConfigurationResolveReportInCache(resolveId, conf);
if (previousReportFile.exists()) {
try {
XmlReportParser parser = new XmlReportParser();
parser.parse(previousReportFile);
List previousDeps = Arrays.asList(parser.getDependencyRevisionIds());
HashSet previousDepSet = new HashSet(previousDeps);
hasChanged = Boolean.valueOf(!previousDepSet.equals(getModuleRevisionIds()));
} catch (Exception e) {
Message.warn("Error while parsing configuration resolve report "
+ previousReportFile.getAbsolutePath(), e);
hasChanged = Boolean.TRUE;
}
} else {
hasChanged = Boolean.TRUE;
}
}
/**
* @pre checkIfChanged has been called.
*/
public boolean hasChanged() {
return hasChanged.booleanValue();
}
/**
* Returns all non evicted and non error dependency mrids The returned set is ordered so that a
* dependency will always be found before their own dependencies
*
* @return all non evicted and non error dependency mrids
*/
public Set getModuleRevisionIds() {
Set mrids = new LinkedHashSet();
for (Iterator iter = getDependencies().iterator(); iter.hasNext();) {
IvyNode node = (IvyNode) iter.next();
if (!node.isEvicted(getConfiguration()) && !node.hasProblem()) {
mrids.add(node.getResolvedId());
}
}
return mrids;
}
public void addDependency(IvyNode node) {
dependencies.put(node.getId(), node);
dependencies.put(node.getResolvedId(), node);
dependencyReports.put(node, Collections.EMPTY_LIST);
}
public void updateDependency(ModuleRevisionId mrid, IvyNode node) {
dependencies.put(mrid, node);
}
public void addDependency(IvyNode node, DownloadReport report) {
dependencies.put(node.getId(), node);
dependencies.put(node.getResolvedId(), node);
List adrs = new ArrayList();
Artifact[] artifacts = node.getArtifacts(conf);
for (int i = 0; i < artifacts.length; i++) {
ArtifactDownloadReport artifactReport = report.getArtifactReport(artifacts[i]);
if (artifactReport != null) {
adrs.add(artifactReport);
} else {
Message.debug("no report found for " + artifacts[i]);
}
}
dependencyReports.put(node, adrs);
}
public String getConfiguration() {
return conf;
}
public Date getDate() {
return date;
}
public ModuleDescriptor getModuleDescriptor() {
return md;
}
public IvyNode[] getUnresolvedDependencies() {
List unresolved = new ArrayList();
for (Iterator iter = getDependencies().iterator(); iter.hasNext();) {
IvyNode node = (IvyNode) iter.next();
if (node.hasProblem()) {
unresolved.add(node);
}
}
return (IvyNode[]) unresolved.toArray(new IvyNode[unresolved.size()]);
}
private Collection getDependencies() {
return new LinkedHashSet(dependencies.values());
}
public IvyNode[] getEvictedNodes() {
List evicted = new ArrayList();
for (Iterator iter = getDependencies().iterator(); iter.hasNext();) {
IvyNode node = (IvyNode) iter.next();
if (node.isEvicted(conf)) {
evicted.add(node);
}
}
return (IvyNode[]) evicted.toArray(new IvyNode[evicted.size()]);
}
private Set/*<ModuleRevisionId>*/ getEvictedMrids() {
Set/*<ModuleRevisionId>*/ evicted = new LinkedHashSet();
IvyNode[] evictedNodes = getEvictedNodes();
for (int i = 0; i < evictedNodes.length; i++) {
evicted.add(evictedNodes[i].getId());
}
return evicted;
}
public IvyNode[] getDownloadedNodes() {
List downloaded = new ArrayList();
for (Iterator iter = getDependencies().iterator(); iter.hasNext();) {
IvyNode node = (IvyNode) iter.next();
if (node.isDownloaded() && node.getRealNode() == node) {
downloaded.add(node);
}
}
return (IvyNode[]) downloaded.toArray(new IvyNode[downloaded.size()]);
}
public IvyNode[] getSearchedNodes() {
List downloaded = new ArrayList();
for (Iterator iter = getDependencies().iterator(); iter.hasNext();) {
IvyNode node = (IvyNode) iter.next();
if (node.isSearched() && node.getRealNode() == node) {
downloaded.add(node);
}
}
return (IvyNode[]) downloaded.toArray(new IvyNode[downloaded.size()]);
}
public ArtifactDownloadReport[] getDownloadReports(ModuleRevisionId mrid) {
Collection col = (Collection) dependencyReports.get(getDependency(mrid));
if (col == null) {
return new ArtifactDownloadReport[0];
}
return (ArtifactDownloadReport[]) col.toArray(new ArtifactDownloadReport[col.size()]);
}
public IvyNode getDependency(ModuleRevisionId mrid) {
return (IvyNode) dependencies.get(mrid);
}
/**
* gives all the modules ids concerned by this report, from the most dependent to the least one
*
* @return a list of ModuleId
*/
public List/* <ModuleId> */getModuleIds() {
if (modulesIds == null) {
List sortedDependencies = resolveEngine.getSortEngine().sortNodes(
getDependencies(), SortOptions.SILENT);
Collections.reverse(sortedDependencies);
for (Iterator iter = sortedDependencies.iterator(); iter.hasNext();) {
IvyNode dependency = (IvyNode) iter.next();
ModuleId mid = dependency.getResolvedId().getModuleId();
Collection deps = (Collection) modulesIdsMap.get(mid);
if (deps == null) {
deps = new LinkedHashSet();
modulesIdsMap.put(mid, deps);
}
deps.add(dependency);
}
modulesIds = new ArrayList(modulesIdsMap.keySet());
}
return Collections.unmodifiableList(modulesIds);
}
public Collection/* <IvyNode> */getNodes(ModuleId mid) {
if (modulesIds == null) {
getModuleIds();
}
return (Collection) modulesIdsMap.get(mid);
}
public ResolveEngine getResolveEngine() {
return resolveEngine;
}
public int getArtifactsNumber() {
int total = 0;
for (Iterator iter = dependencyReports.values().iterator(); iter.hasNext();) {
Collection reports = (Collection) iter.next();
total += reports == null ? 0 : reports.size();
}
return total;
}
/**
* Get every report on the download requests.
*
* @return the list of reports, never <code>null</code>
*/
public ArtifactDownloadReport[] getAllArtifactsReports() {
return getArtifactsReports(null, true);
}
/**
* Get the report on the download requests. The list of download report can be restricted to a
* specific download status, and also remove the download report for the evicted modules.
*
* @param downloadStatus
* the status of download to retreive. Set it to <code>null</code> for no
* restriction on the download status
* @param withEvicted
* set it to <code>true</code> if the report for the evicted modules have to be
* retrieved.
* @return the list of reports, never <code>null</code>
* @see ArtifactDownloadReport
*/
public ArtifactDownloadReport[] getArtifactsReports(
DownloadStatus downloadStatus, boolean withEvicted) {
Collection all = new LinkedHashSet();
Collection evictedMrids = null;
if (!withEvicted) {
evictedMrids = getEvictedMrids();
}
for (Iterator iter = dependencyReports.values().iterator(); iter.hasNext();) {
Collection reports = (Collection) iter.next();
for (Iterator itReport = reports.iterator(); itReport.hasNext();) {
ArtifactDownloadReport report = (ArtifactDownloadReport) itReport.next();
if (downloadStatus != null && report.getDownloadStatus() != downloadStatus) {
continue;
}
if (withEvicted
|| !evictedMrids.contains(report.getArtifact().getModuleRevisionId())) {
all.add(report);
}
}
}
return (ArtifactDownloadReport[]) all.toArray(new ArtifactDownloadReport[all.size()]);
}
/**
* Get the report on the sucessfull download requests with the evicted modules
*
* @return the list of reports, never <code>null</code>
*/
public ArtifactDownloadReport[] getDownloadedArtifactsReports() {
return getArtifactsReports(DownloadStatus.SUCCESSFUL, true);
}
/**
* Get the report on the failed download requests with the evicted modules
*
* @return the list of reports, never <code>null</code>
*/
public ArtifactDownloadReport[] getFailedArtifactsReports() {
ArtifactDownloadReport[] allFailedReports
= getArtifactsReports(DownloadStatus.FAILED, true);
return filterOutMergedArtifacts(allFailedReports);
}
public boolean hasError() {
return getUnresolvedDependencies().length > 0 || getFailedArtifactsReports().length > 0;
}
public int getNodesNumber() {
return getDependencies().size();
}
public static ArtifactDownloadReport[] filterOutMergedArtifacts(
ArtifactDownloadReport[] allFailedReports) {
Collection adrs = new ArrayList(Arrays.asList(allFailedReports));
for (Iterator iterator = adrs.iterator(); iterator.hasNext();) {
ArtifactDownloadReport adr = (ArtifactDownloadReport) iterator.next();
if (adr.getArtifact().getExtraAttribute("ivy:merged") != null) {
iterator.remove();
}
}
return (ArtifactDownloadReport[]) adrs.toArray(new ArtifactDownloadReport[adrs.size()]);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.cassandra.db.commitlog;
import java.io.File;
import java.io.IOError;
import java.io.IOException;
import java.io.RandomAccessFile;
import java.nio.channels.FileChannel;
import java.nio.MappedByteBuffer;
import java.util.Collection;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.zip.CRC32;
import java.util.HashMap;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.cassandra.config.CFMetaData;
import org.apache.cassandra.config.DatabaseDescriptor;
import org.apache.cassandra.config.Schema;
import org.apache.cassandra.db.ColumnFamily;
import org.apache.cassandra.db.RowMutation;
import org.apache.cassandra.io.util.FileUtils;
import org.apache.cassandra.net.MessagingService;
/*
* A single commit log file on disk. Manages creation of the file and writing row mutations to disk,
* as well as tracking the last mutation position of any "dirty" CFs covered by the segment file. Segment
* files are initially allocated to a fixed size and can grow to accomidate a larger value if necessary.
*/
public class CommitLogSegment
{
private static final Logger logger = LoggerFactory.getLogger(CommitLogSegment.class);
private static final String FILENAME_PREFIX = "CommitLog-";
private static final String FILENAME_EXTENSION = ".log";
private static Pattern COMMIT_LOG_FILE_PATTERN = Pattern.compile(FILENAME_PREFIX + "(\\d+)" + FILENAME_EXTENSION);
// The commit log entry overhead in bytes (int: length + long: head checksum + long: tail checksum)
static final int ENTRY_OVERHEAD_SIZE = 4 + 8 + 8;
// cache which cf is dirty in this segment to avoid having to lookup all ReplayPositions to decide if we can delete this segment
private final HashMap<Integer, Integer> cfLastWrite = new HashMap<Integer, Integer>();
public final long id;
private final File logFile;
private RandomAccessFile logFileAccessor;
private boolean needsSync = false;
private final MappedByteBuffer buffer;
private boolean closed;
/**
* @return a newly minted segment file
*/
public static CommitLogSegment freshSegment()
{
return new CommitLogSegment(null);
}
/**
* Constructs a new segment file.
*
* @param filePath if not null, recycles the existing file by renaming it and truncating it to CommitLog.SEGMENT_SIZE.
*/
CommitLogSegment(String filePath)
{
id = System.nanoTime();
logFile = new File(DatabaseDescriptor.getCommitLogLocation(), FILENAME_PREFIX + id + FILENAME_EXTENSION);
boolean isCreating = true;
try
{
if (filePath != null)
{
File oldFile = new File(filePath);
if (oldFile.exists())
{
logger.debug("Re-using discarded CommitLog segment for " + id + " from " + filePath);
oldFile.renameTo(logFile);
isCreating = false;
}
}
// Open the initial the segment file
logFileAccessor = new RandomAccessFile(logFile, "rw");
if (isCreating)
{
logger.debug("Creating new commit log segment " + logFile.getPath());
}
// Map the segment, extending or truncating it to the standard segment size
logFileAccessor.setLength(CommitLog.SEGMENT_SIZE);
buffer = logFileAccessor.getChannel().map(FileChannel.MapMode.READ_WRITE, 0, CommitLog.SEGMENT_SIZE);
buffer.putInt(CommitLog.END_OF_SEGMENT_MARKER);
buffer.position(0);
needsSync = true;
}
catch (IOException e)
{
throw new IOError(e);
}
}
/**
* Extracts the commit log ID from filename
*
* @param filename the filename of the commit log file
* @return the extracted commit log ID
*/
public static long idFromFilename(String filename)
{
Matcher matcher = COMMIT_LOG_FILE_PATTERN.matcher(filename);
try
{
if (matcher.matches())
return Long.valueOf(matcher.group(1));
else
return -1L;
}
catch (NumberFormatException e)
{
return -1L;
}
}
/**
* @param filename the filename to check
* @return true if filename could be a commit log based on it's filename
*/
public static boolean possibleCommitLogFile(String filename)
{
return COMMIT_LOG_FILE_PATTERN.matcher(filename).matches();
}
/**
* Completely discards a segment file by deleting it. (Potentially blocking operation)
*/
public void discard()
{
close();
try
{
FileUtils.deleteWithConfirm(logFile);
}
catch (IOException e)
{
throw new IOError(e);
}
}
/**
* Recycle processes an unneeded segment file for reuse.
*
* @return a new CommitLogSegment representing the newly reusable segment.
*/
public CommitLogSegment recycle()
{
// writes an end-of-segment marker at the very beginning of the file and closes it
buffer.position(0);
buffer.putInt(CommitLog.END_OF_SEGMENT_MARKER);
buffer.position(0);
try
{
sync();
}
catch (IOException e)
{
// This is a best effort thing anyway
logger.warn("I/O error flushing " + this + " " + e);
}
close();
return new CommitLogSegment(getPath());
}
/**
* @return true if there is room to write() @param mutation to this segment
*/
public boolean hasCapacityFor(RowMutation mutation)
{
long totalSize = RowMutation.serializer().serializedSize(mutation, MessagingService.version_) + ENTRY_OVERHEAD_SIZE;
return totalSize <= buffer.remaining();
}
/**
* mark all of the column families we're modifying as dirty at this position
*/
private void markDirty(RowMutation rowMutation, ReplayPosition repPos)
{
for (ColumnFamily columnFamily : rowMutation.getColumnFamilies())
{
// check for null cfm in case a cl write goes through after the cf is
// defined but before a new segment is created.
CFMetaData cfm = Schema.instance.getCFMetaData(columnFamily.id());
if (cfm == null)
{
logger.error("Attempted to write commit log entry for unrecognized column family: " + columnFamily.id());
}
else
{
markCFDirty(cfm.cfId, repPos.position);
}
}
}
/**
* Appends a row mutation onto the commit log. Requres that hasCapacityFor has already been checked.
*
* @param rowMutation the mutation to append to the commit log.
* @return the position of the appended mutation
*/
public ReplayPosition write(RowMutation rowMutation) throws IOException
{
assert !closed;
ReplayPosition repPos = getContext();
markDirty(rowMutation, repPos);
CRC32 checksum = new CRC32();
byte[] serializedRow = rowMutation.getSerializedBuffer(MessagingService.version_);
checksum.update(serializedRow.length);
buffer.putInt(serializedRow.length);
buffer.putLong(checksum.getValue());
buffer.put(serializedRow);
checksum.update(serializedRow);
buffer.putLong(checksum.getValue());
if (buffer.remaining() >= 4)
{
// writes end of segment marker and rewinds back to position where it starts
buffer.putInt(CommitLog.END_OF_SEGMENT_MARKER);
buffer.position(buffer.position() - CommitLog.END_OF_SEGMENT_MARKER_SIZE);
}
needsSync = true;
return repPos;
}
/**
* Forces a disk flush for this segment file.
*/
public void sync() throws IOException
{
if (needsSync)
{
buffer.force();
needsSync = false;
}
}
/**
* @return the current ReplayPosition for this log segment
*/
public ReplayPosition getContext()
{
return new ReplayPosition(id, buffer.position());
}
/**
* @return the file path to this segment
*/
public String getPath()
{
return logFile.getPath();
}
/**
* @return the file name of this segment
*/
public String getName()
{
return logFile.getName();
}
/**
* Close the segment file.
*/
public void close()
{
if (closed)
return;
try
{
logFileAccessor.close();
closed = true;
}
catch (IOException e)
{
throw new IOError(e);
}
}
/**
* Records the CF as dirty at a certain position.
*
* @param cfId the column family ID that is now dirty
* @param position the position the last write for this CF was written at
*/
private void markCFDirty(Integer cfId, Integer position)
{
cfLastWrite.put(cfId, position);
}
/**
* Marks the ColumnFamily specified by cfId as clean for this log segment. If the
* given context argument is contained in this file, it will only mark the CF as
* clean if no newer writes have taken place.
*
* @param cfId the column family ID that is now clean
* @param context the optional clean offset
*/
public void markClean(Integer cfId, ReplayPosition context)
{
Integer lastWritten = cfLastWrite.get(cfId);
if (lastWritten != null && (!contains(context) || lastWritten < context.position))
{
cfLastWrite.remove(cfId);
}
}
/**
* @return a collection of dirty CFIDs for this segment file.
*/
public Collection<Integer> getDirtyCFIDs()
{
return cfLastWrite.keySet();
}
/**
* @return true if this segment is unused and safe to recycle or delete
*/
public boolean isUnused()
{
return cfLastWrite.isEmpty();
}
/**
* Check to see if a certain ReplayPosition is contained by this segment file.
*
* @param context the replay position to be checked
* @return true if the replay position is contained by this segment file.
*/
public boolean contains(ReplayPosition context)
{
return context.segment == id;
}
// For debugging, not fast
public String dirtyString()
{
StringBuilder sb = new StringBuilder();
for (Integer cfId : cfLastWrite.keySet())
{
CFMetaData m = Schema.instance.getCFMetaData(cfId);
sb.append(m == null ? "<deleted>" : m.cfName).append(" (").append(cfId).append("), ");
}
return sb.toString();
}
@Override
public String toString()
{
return "CommitLogSegment(" + getPath() + ')';
}
public int position()
{
return buffer.position();
}
}
| |
package com.wikidreams.haarcascade;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.util.ArrayList;
import javax.swing.JOptionPane;
import com.wikidreams.properties.PropertiesManager;
import com.wikidreams.shellcommand.ShellManager;
public class HaarCascadeManager {
static {
PropertiesManager.loadProperties("resources/config.properties");
}
private String processName;
private File[] positiveImages;
private File bgFile;
private String samplesByImage;
private String maxxangle;
private String maxyangle;
private String maxzangle;
private String width;
private String height;
private String num;
private String numPos;
private String numNeg;
private String numStages;
private File processDir;
private File infoFile;
public HaarCascadeManager(String processName, File[] positiveImages, File bgFile,
String samplesByImage, String maxxangle, String maxyangle, String maxzangle,
String width, String height, String numPos, String numNeg, String numStages) {
super();
this.processName = processName;
this.positiveImages = positiveImages;
this.bgFile = bgFile;
this.samplesByImage = samplesByImage;
this.maxxangle = maxxangle;
this.maxyangle = maxyangle;
this.maxzangle = maxzangle;
this.width = width;
this.height = height;
this.numPos = numPos;
this.numNeg = numNeg;
this.numStages = numStages;
// Create workspace folder.
File dir = new File(PropertiesManager.properties.get("OpenCVBin").trim() + "\\Workspace");
if (! dir.exists()) {
dir.mkdir();
}
// Create cascades folder.
dir = new File(PropertiesManager.properties.get("OpenCVBin").trim() + "\\Workspace\\Cascades");
if (! dir.exists()) {
dir.mkdir();
}
// Create samples.
this.createSamples();
}
private void createSamples() {
// Create process folder.
this.processDir = new File(PropertiesManager.properties.get("OpenCVBin").trim() + "\\Workspace\\Cascades\\" + this.processName);
if (this.processDir.exists()) {
JOptionPane.showMessageDialog(null, "A process with this name already exists.");
return;
}
this.processDir.mkdir();
// Create info folder.
File infoDir = new File(this.processDir + "\\info\\");
infoDir.mkdir();
// Create info.lst file.
this.infoFile = new File(infoDir.getAbsolutePath() + "\\info.lst");
StringBuilder sbInfoFile = new StringBuilder();
File reportFile = new File(this.processDir + "\\samples_report.log");
StringBuilder sbReportFile = new StringBuilder();
for (File f : this.positiveImages) {
// Create samples.bat file
StringBuilder b = new StringBuilder();
b.append(PropertiesManager.properties.get("OpenCVBin").trim() + "\\opencv_createsamples.exe");
b.append(" -info ");
b.append(infoDir.getAbsolutePath() + "\\info.lst");
b.append(" -pngoutput ");
b.append(infoDir.getAbsolutePath());
b.append(" -img ");
b.append(f.getAbsolutePath());
b.append(" -bg ");
b.append(this.bgFile.getAbsolutePath());
b.append(" -num ");
b.append(this.samplesByImage);
b.append(" -maxxangle ");
b.append(this.maxxangle);
b.append(" -maxyangle ");
b.append(this.maxyangle);
b.append(" -maxzangle ");
b.append(this.maxzangle);
b.append(" -w ");
b.append(this.width);
b.append(" -h ");
b.append(this.height);
try {
File tFile = new File(this.processDir.getAbsolutePath() + "\\create_samples.bat");
FileWriter tFileWriter = new FileWriter(tFile);
BufferedWriter tBufferedWriter = new BufferedWriter(tFileWriter);
tBufferedWriter.write(b.toString());
tBufferedWriter.close();
} catch (IOException e1) {
e1.printStackTrace();
}
// Execute samples.bat file.
ArrayList<String> command = new ArrayList<>();
command.add(this.processDir.getAbsolutePath() + "\\create_samples.bat");
String output = ShellManager.executeCommand(command);
try {
Thread.sleep(3000);
} catch (InterruptedException e) {
e.printStackTrace();
}
sbReportFile.append(output);
sbReportFile.append("\n");
if (this.infoFile.exists()) {
try {
BufferedReader br = new BufferedReader(new FileReader(this.infoFile.getAbsolutePath()));
String line = br.readLine();
while (line != null) {
sbInfoFile.append(line);
sbInfoFile.append("\n");
line = br.readLine();
}
br.close();
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
}
// Save info and create samples report files.
try{
FileWriter infoFileWriter = new FileWriter(infoFile);
BufferedWriter infoBufferedWriter = new BufferedWriter(infoFileWriter);
infoBufferedWriter.write(sbInfoFile.toString());
infoBufferedWriter.close();
FileWriter reportFileWriter = new FileWriter(reportFile);
BufferedWriter reportBufferedWriter = new BufferedWriter(reportFileWriter);
reportBufferedWriter.write(sbReportFile.toString());
reportBufferedWriter.close();
}
catch (Exception e){
e.printStackTrace();
}
// Create vector file.
this.createVectorFile();
}
private void createVectorFile() {
// Create vector folder.
File vecDir = new File(this.processDir.getAbsolutePath() + "\\vector");
vecDir.mkdir();
// Get number of created images.
File infoDir = new File(this.processDir.getAbsolutePath() + "\\info");
File[] allFiles = infoDir.listFiles();
this.num = Integer.toString(allFiles.length - 1);
// Create vector.bat file.
StringBuilder b = new StringBuilder();
b.append(PropertiesManager.properties.get("OpenCVBin").trim() + "\\opencv_createsamples.exe");
b.append(" -info ");
b.append(this.processDir.getAbsolutePath() + "\\info\\info.lst");
b.append(" -num ");
b.append(this.num);
b.append(" -w ");
b.append(this.width);
b.append(" -h ");
b.append(this.height);
b.append(" -vec ");
b.append(this.processDir.getAbsolutePath() + "\\vector\\samples.vec");
try {
File tFile = new File(this.processDir.getAbsolutePath() + "\\create_vector.bat");
FileWriter tFileWriter = new FileWriter(tFile);
BufferedWriter tBufferedWriter = new BufferedWriter(tFileWriter);
tBufferedWriter.write(b.toString());
tBufferedWriter.close();
} catch (IOException e1) {
e1.printStackTrace();
}
// Execute create_vector.bat file.
ArrayList<String> command = new ArrayList<>();
command.add(this.processDir.getAbsolutePath() + "\\create_vector.bat");
String output = ShellManager.executeCommand(command);
try {
Thread.sleep(2000);
} catch (InterruptedException e) {
e.printStackTrace();
}
// Create vector report file.
try {
File reportFile = new File(this.processDir.getAbsolutePath() + "\\vector_report.log");
FileWriter reportFileWriter = new FileWriter(reportFile);
BufferedWriter reportBufferedWriter = new BufferedWriter(reportFileWriter);
reportBufferedWriter.write(output);
reportBufferedWriter.close();
} catch (IOException e) {
e.printStackTrace();
}
// Create train cascade file.
this.createCascade();
}
private void createCascade() {
// Create data folder.
File dataDir = new File(this.processDir.getAbsolutePath() + "\\data");
dataDir.mkdir();
// Create bat file content.
StringBuilder b = new StringBuilder();
b.append(PropertiesManager.properties.get("OpenCVBin").trim() + "\\opencv_traincascade.exe");
b.append(" -data " + dataDir.getAbsolutePath());
b.append(" -vec " + this.processDir.getAbsolutePath() + "\\vector\\samples.vec");
b.append(" -bg " + this.bgFile.getAbsolutePath());
b.append(" -numPos " + this.numPos);
b.append(" -numNeg " + this.numNeg);
b.append(" -numStages " + this.numStages);
b.append(" -w " + this.width);
b.append(" -h " + this.height);
b.append(" > " + this.processDir.getAbsolutePath() + "\\cascade_report.log");
File tFile = new File(PropertiesManager.properties.get("OpenCVBin").trim() + "\\" + this.processName + "_train.bat");
try {
FileWriter tFileWriter = new FileWriter(tFile);
BufferedWriter tBufferedWriter = new BufferedWriter(tFileWriter);
tBufferedWriter.write(b.toString());
tBufferedWriter.close();
} catch (IOException e1) {
e1.printStackTrace();
}
// Alert user to start the process.
JOptionPane.showMessageDialog(null, "Start the process with the file: " + tFile.getAbsolutePath());
}
}
| |
package graphTheory.algorithms.shortestDistance.arcCost;
import graphTheory.algorithms.Algorithm;
import graphTheory.graph.Arc;
import graphTheory.instances.Instance;
import graphTheory.instances.shortestPath.ArcShortestPathOneDestinationInstance;
import graphTheory.instances.shortestPath.ArcShortestPathOneSourceInstance;
import graphTheory.utils.FibonacciHeap;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
/**
* Implementation of the dijkstra algorithm. This algorithm compute the shortest
* path between a source and all the nodes in a directed or undirected graph.
*
* This version is implemented with a Fibonacci Heap.
*
* @author Watel Dimitri
*
*/
public class ArcDijkstraOneSourceAlgorithm extends Algorithm<ArcShortestPathOneSourceInstance> {
/**
* If true, do not compute the shortest paths, but only the costs of the
* shortest paths
*/
protected boolean computeOnlyCosts;
/**
* For each node v, this map contains the shortest path from the source to v
*/
protected HashMap<Integer, List<Arc>> shPs;
/**
* For each node v, this map contains the cost of the shortest path from the
* source to v
*/
protected HashMap<Integer, Integer> costs;
public HashMap<Integer, List<Arc>> getShortestPaths() {
return shPs;
}
public HashMap<Integer, Integer> getCosts() {
return costs;
}
public void setComputeOnlyCosts(boolean computeOnlyCosts) {
this.computeOnlyCosts = computeOnlyCosts;
}
/**
* Map linking every node to its distance from the source
*/
private HashMap<Integer, Integer> distanceFromSource;
/**
* Map linking every node arc preceding this node in one lowest cost path.
*/
private HashMap<Integer, Arc> preceding;
/**
* Fibonacci Heap linking every node to its distance from the source.
*/
private FibonacciHeap<Integer> fibTree;
/**
* Map linking every node to its fibonacciHeapNode in the heap.
*/
private HashMap<Integer, FibonacciHeap<Integer>.FibonacciHeapNode<Integer>> nodes;
@Override
protected void computeWithoutTime() {
init();
Integer n;
while (!fibTree.isEmpty()) {
n = fibTree.removeMin().getData();
expandFrom(n);
}
costs = distanceFromSource;
if (!computeOnlyCosts)
shPs = computePaths();
}
/**
* Init the parameters.
*/
private void init() {
distanceFromSource = new HashMap<Integer, Integer>();
preceding = new HashMap<Integer, Arc>();
fibTree = new FibonacciHeap<Integer>();
nodes = new HashMap<Integer, FibonacciHeap<Integer>.FibonacciHeapNode<Integer>>();
initDistances();
}
/**
* Init the distance from the source to every node : the source is at
* distance 0 from the source and every other node is at infinite distance
* until they are reached.
*/
private void initDistances() {
FibonacciHeap<Integer>.FibonacciHeapNode<Integer> fhn;
Integer n;
Iterator<Integer> it = instance.getGraph().getVerticesIterator();
while (it.hasNext()) {
n = it.next();
double d;
if (n.equals(instance.getSource())) {
distanceFromSource.put(n, 0);
d = 0.0;
} else {
distanceFromSource.put(n, null);
d = Double.POSITIVE_INFINITY;
}
if (!computeOnlyCosts)
preceding.put(n, null);
fhn = fibTree.insert(n, d);
nodes.put(n, fhn);
}
}
/**
* Read the output arc (directed or not) (n,v) of n and see if this arc can
* reduce the distance from the source to v.
*
* @param n
*/
private void expandFrom(Integer n) {
Integer distanceFromSource = this.distanceFromSource.get(n);
if (distanceFromSource == null) // Infinite distance from the source.
return;
Integer output;
Arc a;
Iterator<Arc> it = instance.getGraph().getOutputArcsIterator(n);
while (it.hasNext()) {
a = it.next();
output = a.getOutput();
expand(distanceFromSource, a, output);
}
it = instance.getGraph().getUndirectedNeighbourEdgesIterator(n);
while (it.hasNext()) {
a = it.next();
output = instance.getGraph().getNeighbourNode(n, a);
expand(distanceFromSource, a, output);
}
}
/**
* See if using a path of weight distanceFromSource, and the arc a, we can
* reduce the distance from the source to the node output.
*
* @param distanceFromSource
* @param a
* @param output
*/
private void expand(Integer distanceFromSource, Arc a, Integer output) {
Integer dist = distanceFromSource + instance.getCost(a);
Integer nndN = this.distanceFromSource.get(output);
if ((nndN == null || nndN > dist)) {
FibonacciHeap<Integer>.FibonacciHeapNode<Integer> fhn = nodes
.get(output);
fibTree.decreaseKey(fhn, dist);
this.distanceFromSource.put(output, dist);
if (!computeOnlyCosts)
preceding.put(output, a);
}
}
/**
* Using the map preceding compute and return the lowest cost path from
* source to all nodes
*
* @return the lowest cost path from source to all nodes
*/
private HashMap<Integer, List<Arc>> computePaths() {
HashMap<Integer, List<Arc>> paths = new HashMap<Integer, List<Arc>>();
Iterator<Integer> it = instance.getGraph().getVerticesIterator();
while (it.hasNext()) {
Integer v = it.next();
computePaths(v, paths);
}
return paths;
}
/**
* Using the map preceding compute and return the lowest cost path from
* source to v
*
* @return the lowest cost path from source to v
*/
private List<Arc> computePaths(Integer v, HashMap<Integer, List<Arc>> paths) {
List<Arc> l = paths.get(v);
if (l != null)
return l;
if (v.equals(instance.getSource())) {
l = new LinkedList<Arc>();
} else {
Arc a = preceding.get(v);
if (a == null)
l = null;
else {
l = new LinkedList<Arc>(computePaths(a.getInput(), paths));
l.add(a);
}
}
paths.put(v, l);
return l;
}
@Override
protected void setNoSolution() {
shPs = null;
costs = null;
}
}
| |
/*
* Copyright (c) Nmote Ltd. 2003-2014. All rights reserved.
* See LICENSE doc in a root of project folder for additional information.
*/
package com.nmote.nanohttp;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.InetAddress;
import java.net.ServerSocket;
import java.net.Socket;
import java.net.SocketException;
import java.net.SocketTimeoutException;
import java.net.URL;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
public class NanoServer {
private class Acceptor implements Runnable {
public void run() {
try {
serverSocket.setSoTimeout(500);
} catch (SocketException e) {
log(e);
}
while (!executor.isShutdown()) {
try {
executor.execute(new Handler(serverSocket.accept()));
} catch (SocketTimeoutException e) {
// Loop again
} catch (IOException e) {
log(e);
}
}
}
}
private class Handler implements Runnable, NanoRequest {
private Handler(Socket socket) {
this.socket = socket;
this.requestHeaders = new HashMap<String, String>();
this.responseHeaders = new HashMap<String, String>();
responseHeaders.put("server", "Nmote NanoHTTP/1.0");
}
public InputStream getInputStream() throws IOException {
assertProcessing();
return socket.getInputStream();
}
public String getMethod() {
return method;
}
public OutputStream getOutputStream() throws IOException {
assertProcessing();
OutputStream out = socket.getOutputStream();
if (!responseCommited) {
responseCommited = true;
StringBuilder b = new StringBuilder(200);
b.append("HTTP/1.1 ");
b.append(response);
b.append("\r\n");
for (Map.Entry<String, String> entry : responseHeaders.entrySet()) {
b.append(entry.getKey());
b.append(": ");
b.append(entry.getValue());
b.append("\r\n");
}
b.append("connection: close\r\n");
b.append("\r\n");
out.write(b.toString().getBytes("iso-8859-1"));
}
return out;
}
public Map<String, String> getRequestHeaders() {
return requestHeaders;
}
public String getRequestPath() {
return requestPath;
}
public Map<String, String> getResponseHeaders() {
assertProcessing();
return responseHeaders;
}
public void response(String response) {
assertProcessing();
if (responseCommited) {
throw new IllegalStateException("HTTP response already generated");
}
this.response = response;
}
public void run() {
try {
InputStream in = socket.getInputStream();
readHeaders(in);
if (servlets != null) {
for (NanoServlet nanoServlet : servlets) {
if (nanoServlet.canProcess(this)) {
processing = true;
nanoServlet.process(this);
break;
}
}
}
OutputStream out = getOutputStream();
out.flush();
out.close();
} catch (IOException e) {
log(e);
} finally {
try {
socket.close();
} catch (IOException e) {
log(e);
}
}
}
private void assertProcessing() {
if (!processing) { throw new IllegalStateException(); }
}
private void readHeaders(InputStream in) throws IOException {
char[] buffer = new char[256];
String line = readLine(in, buffer);
// Parse first line
int space = line.indexOf(' ');
method = line.substring(space).toUpperCase();
line = line.substring(space + 1);
space = line.indexOf(' ');
requestPath = line.substring(0, space).trim();
// Parse headers
for (;;) {
line = readLine(in, buffer);
if (line == null) break;
line = line.trim();
if (line.length() > 0) {
// Parse header
int colon = line.indexOf(':');
if (colon != -1) {
String headerName = line.substring(0, colon).toLowerCase();
String headerValue = line.substring(colon + 1).trim();
requestHeaders.put(headerName, headerValue);
}
} else {
break;
}
}
}
private String method;
private boolean processing;
private final Map<String, String> requestHeaders;
private String requestPath;
private String response = "404 Not Found";
private boolean responseCommited;
private final Map<String, String> responseHeaders;
private final Socket socket;
}
public NanoServer() throws IOException {
this(7070);
}
public NanoServer(int port) throws IOException {
super();
this.serverSocket = new ServerSocket(port, 20);
}
public NanoServer(String url) throws IOException {
super();
URL serverUrl = new URL(url);
if (!"http".equals(serverUrl.getProtocol())) { throw new IllegalArgumentException("unsupported protocol: "
+ url); }
this.serverSocket = new ServerSocket(serverUrl.getPort(), 20, InetAddress.getByName(serverUrl.getHost()));
}
public void add(NanoServlet nanoServlet) {
if (servlets == null) {
servlets = new ArrayList<NanoServlet>();
}
servlets.add(nanoServlet);
}
public List<NanoServlet> getServlets() {
return servlets;
}
public void remove(NanoServlet nanoServlet) {
if (servlets != null) {
servlets.remove(nanoServlet);
}
}
public void setServlets(List<NanoServlet> servlets) {
this.servlets = servlets;
}
public void start() {
start(Executors.newCachedThreadPool());
}
public void start(ExecutorService executor) {
if (this.executor != null) { throw new IllegalStateException("already in use"); }
if (executor == null) { throw new NullPointerException("executor == null"); }
this.executor = executor;
this.executor.execute(new Acceptor());
}
public void stop() throws InterruptedException, IOException {
stop(2, TimeUnit.SECONDS);
}
public void stop(long timeout, TimeUnit timeUnit) throws InterruptedException, IOException {
if (executor == null) { throw new IllegalStateException("not started"); }
executor.shutdown();
executor.awaitTermination(timeout, timeUnit);
serverSocket.close();
}
protected void log(Throwable t) {
t.printStackTrace();
}
private ExecutorService executor;
private ServerSocket serverSocket;
private List<NanoServlet> servlets;
private static String readLine(InputStream in, char[] buffer) throws IOException {
int room = buffer.length;
int offset = 0;
int c = in.read();
loop: while (offset < room) {
switch (c) {
case -1:
case '\n':
break loop;
case '\r':
int c2 = in.read();
if ((c2 != '\n') && (c2 != -1)) { throw new IOException("expected \\n"); }
break loop;
default:
buffer[offset++] = (char) c;
}
c = in.read();
}
if ((c == -1) && (offset == 0)) { return null; }
return String.copyValueOf(buffer, 0, offset);
}
}
| |
package liquibase.change.core;
import liquibase.change.*;
import liquibase.database.Database;
import liquibase.database.core.DB2Database;
import liquibase.database.core.InformixDatabase;
import liquibase.database.core.MSSQLDatabase;
import liquibase.database.core.OracleDatabase;
import liquibase.database.core.SybaseASADatabase;
import liquibase.statement.SqlStatement;
import liquibase.statement.core.RawSqlStatement;
import liquibase.statement.core.ReorganizeTableStatement;
import liquibase.structure.core.Column;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
/**
* Extracts data from an existing column to create a lookup table.
* A foreign key is created between the old column and the new lookup table.
*/
@DatabaseChange(name="addLookupTable",
description = "Creates a lookup table containing values stored in a column and creates a foreign key to the new table.",
priority = ChangeMetaData.PRIORITY_DEFAULT, appliesTo = "column")
public class AddLookupTableChange extends AbstractChange {
private String existingTableCatalogName;
private String existingTableSchemaName;
private String existingTableName;
private String existingColumnName;
private String newTableCatalogName;
private String newTableSchemaName;
private String newTableName;
private String newColumnName;
private String newColumnDataType;
private String constraintName;
public String getExistingTableCatalogName() {
return existingTableCatalogName;
}
public void setExistingTableCatalogName(String existingTableCatalogName) {
this.existingTableCatalogName = existingTableCatalogName;
}
@DatabaseChangeProperty(mustEqualExisting ="column.relation.schema")
public String getExistingTableSchemaName() {
return existingTableSchemaName;
}
public void setExistingTableSchemaName(String existingTableSchemaName) {
this.existingTableSchemaName = existingTableSchemaName;
}
@DatabaseChangeProperty(mustEqualExisting = "column.relation", description = "Name of the table containing the data to extract", exampleValue = "address")
public String getExistingTableName() {
return existingTableName;
}
public void setExistingTableName(String existingTableName) {
this.existingTableName = existingTableName;
}
@DatabaseChangeProperty(mustEqualExisting = "column", description = "Name of the column containing the data to extract", exampleValue = "state")
public String getExistingColumnName() {
return existingColumnName;
}
public void setExistingColumnName(String existingColumnName) {
this.existingColumnName = existingColumnName;
}
@DatabaseChangeProperty(since = "3.0")
public String getNewTableCatalogName() {
return newTableCatalogName;
}
public void setNewTableCatalogName(String newTableCatalogName) {
this.newTableCatalogName = newTableCatalogName;
}
public String getNewTableSchemaName() {
return newTableSchemaName;
}
public void setNewTableSchemaName(String newTableSchemaName) {
this.newTableSchemaName = newTableSchemaName;
}
@DatabaseChangeProperty(description = "Name of lookup table to create", exampleValue = "state")
public String getNewTableName() {
return newTableName;
}
public void setNewTableName(String newTableName) {
this.newTableName = newTableName;
}
@DatabaseChangeProperty(description = "Name of the column in the new table to create", exampleValue = "abbreviation")
public String getNewColumnName() {
return newColumnName;
}
public void setNewColumnName(String newColumnName) {
this.newColumnName = newColumnName;
}
@DatabaseChangeProperty(description = "Data type of the new table column", exampleValue = "char(2)")
public String getNewColumnDataType() {
return newColumnDataType;
}
public void setNewColumnDataType(String newColumnDataType) {
this.newColumnDataType = newColumnDataType;
}
@DatabaseChangeProperty(description = "Name of the foreign-key constraint to create between the existing table and the lookup table", exampleValue = "fk_address_state")
public String getConstraintName() {
return constraintName;
}
public String getFinalConstraintName() {
if (constraintName == null) {
return ("FK_" + getExistingTableName() + "_" + getNewTableName()).toUpperCase();
} else {
return constraintName;
}
}
public void setConstraintName(String constraintName) {
this.constraintName = constraintName;
}
@Override
protected Change[] createInverses() {
DropForeignKeyConstraintChange dropFK = new DropForeignKeyConstraintChange();
dropFK.setBaseTableSchemaName(getExistingTableSchemaName());
dropFK.setBaseTableName(getExistingTableName());
dropFK.setConstraintName(getFinalConstraintName());
DropTableChange dropTable = new DropTableChange();
dropTable.setSchemaName(getNewTableSchemaName());
dropTable.setTableName(getNewTableName());
return new Change[]{
dropFK,
dropTable,
};
}
@Override
public SqlStatement[] generateStatements(Database database) {
List<SqlStatement> statements = new ArrayList<SqlStatement>();
String newTableCatalogName = getNewTableCatalogName();
String newTableSchemaName = getNewTableSchemaName();
String existingTableCatalogName = getExistingTableCatalogName();
String existingTableSchemaName = getExistingTableSchemaName();
SqlStatement[] createTablesSQL = {new RawSqlStatement("CREATE TABLE " + database.escapeTableName(newTableCatalogName, newTableSchemaName, getNewTableName()) + " AS SELECT DISTINCT " + database.escapeObjectName(getExistingColumnName(), Column.class) + " AS " + database.escapeObjectName(getNewColumnName(), Column.class) + " FROM " + database.escapeTableName(existingTableCatalogName, existingTableSchemaName, getExistingTableName()) + " WHERE " + database.escapeObjectName(getExistingColumnName(), Column.class) + " IS NOT NULL")};
if (database instanceof MSSQLDatabase) {
createTablesSQL = new SqlStatement[]{new RawSqlStatement("SELECT DISTINCT " + database.escapeObjectName(getExistingColumnName(), Column.class) + " AS " + database.escapeObjectName(getNewColumnName(), Column.class) + " INTO " + database.escapeTableName(newTableCatalogName, newTableSchemaName, getNewTableName()) + " FROM " + database.escapeTableName(existingTableCatalogName, existingTableSchemaName, getExistingTableName()) + " WHERE " + database.escapeObjectName(getExistingColumnName(), Column.class) + " IS NOT NULL"),};
} else if (database instanceof SybaseASADatabase) {
createTablesSQL = new SqlStatement[]{new RawSqlStatement("SELECT DISTINCT " + database.escapeObjectName(getExistingColumnName(), Column.class) + " AS " + database.escapeObjectName(getNewColumnName(), Column.class) + " INTO " + database.escapeTableName(newTableCatalogName, newTableSchemaName, getNewTableName()) + " FROM " + database.escapeTableName(existingTableCatalogName, existingTableSchemaName, getExistingTableName()) + " WHERE " + database.escapeObjectName(getExistingColumnName(), Column.class) + " IS NOT NULL"),};
} else if (database instanceof DB2Database) {
createTablesSQL = new SqlStatement[]{
new RawSqlStatement("CREATE TABLE " + database.escapeTableName(newTableCatalogName, newTableSchemaName, getNewTableName()) + " AS (SELECT " + database.escapeObjectName(getExistingColumnName(), Column.class) + " AS " + database.escapeObjectName(getNewColumnName(), Column.class) + " FROM " + database.escapeTableName(existingTableCatalogName, existingTableSchemaName, getExistingTableName()) + ") WITH NO DATA"),
new RawSqlStatement("INSERT INTO " + database.escapeTableName(newTableCatalogName, newTableSchemaName, getNewTableName()) + " SELECT DISTINCT " + database.escapeObjectName(getExistingColumnName(), Column.class) + " FROM " + database.escapeTableName(existingTableCatalogName, existingTableSchemaName, getExistingTableName()) + " WHERE " + database.escapeObjectName(getExistingColumnName(), Column.class) + " IS NOT NULL"),
};
} else if (database instanceof InformixDatabase) {
createTablesSQL = new SqlStatement[] {
new RawSqlStatement("CREATE TABLE " + database.escapeTableName(newTableCatalogName, newTableSchemaName, getNewTableName()) + " ( " + database.escapeObjectName(getNewColumnName(), Column.class) + " " + getNewColumnDataType() + " )"),
new RawSqlStatement("INSERT INTO " + database.escapeTableName(newTableCatalogName, newTableSchemaName, getNewTableName()) + " ( " + database.escapeObjectName(getNewColumnName(), Column.class) + " ) SELECT DISTINCT " + database.escapeObjectName(getExistingColumnName(), Column.class) + " FROM " + database.escapeTableName(existingTableCatalogName, existingTableSchemaName, getExistingTableName()) + " WHERE " + database.escapeObjectName(getExistingColumnName(), Column.class) + " IS NOT NULL"),
};
}
statements.addAll(Arrays.asList(createTablesSQL));
if (!(database instanceof OracleDatabase)) {
AddNotNullConstraintChange addNotNullChange = new AddNotNullConstraintChange();
addNotNullChange.setSchemaName(newTableSchemaName);
addNotNullChange.setTableName(getNewTableName());
addNotNullChange.setColumnName(getNewColumnName());
addNotNullChange.setColumnDataType(getNewColumnDataType());
statements.addAll(Arrays.asList(addNotNullChange.generateStatements(database)));
}
if (database instanceof DB2Database) {
statements.add(new ReorganizeTableStatement(newTableCatalogName, newTableSchemaName, getNewTableName()));
}
AddPrimaryKeyChange addPKChange = new AddPrimaryKeyChange();
addPKChange.setSchemaName(newTableSchemaName);
addPKChange.setTableName(getNewTableName());
addPKChange.setColumnNames(getNewColumnName());
statements.addAll(Arrays.asList(addPKChange.generateStatements(database)));
if (database instanceof DB2Database) {
statements.add(new ReorganizeTableStatement(newTableCatalogName,newTableSchemaName, getNewTableName()));
}
AddForeignKeyConstraintChange addFKChange = new AddForeignKeyConstraintChange();
addFKChange.setBaseTableSchemaName(existingTableSchemaName);
addFKChange.setBaseTableName(getExistingTableName());
addFKChange.setBaseColumnNames(getExistingColumnName());
addFKChange.setReferencedTableSchemaName(newTableSchemaName);
addFKChange.setReferencedTableName(getNewTableName());
addFKChange.setReferencedColumnNames(getNewColumnName());
addFKChange.setConstraintName(getFinalConstraintName());
statements.addAll(Arrays.asList(addFKChange.generateStatements(database)));
return statements.toArray(new SqlStatement[statements.size()]);
}
@Override
public String getConfirmationMessage() {
return "Lookup table added for "+getExistingTableName()+"."+getExistingColumnName();
}
@Override
public String getSerializedObjectNamespace() {
return STANDARD_CHANGELOG_NAMESPACE;
}
}
| |
package org.mtransit.android.ui.view;
import android.content.Context;
import android.graphics.Typeface;
import android.text.TextUtils;
import android.view.View;
import android.view.ViewStub;
import android.widget.ImageView;
import android.widget.TextView;
import androidx.annotation.LayoutRes;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.core.util.Pair;
import androidx.navigation.NavController;
import androidx.navigation.Navigation;
import androidx.navigation.fragment.FragmentNavigator;
import androidx.viewbinding.ViewBinding;
import org.mtransit.android.R;
import org.mtransit.android.commons.MTLog;
import org.mtransit.android.commons.data.AppStatus;
import org.mtransit.android.commons.data.AvailabilityPercent;
import org.mtransit.android.commons.data.POI;
import org.mtransit.android.commons.data.POIStatus;
import org.mtransit.android.commons.data.Route;
import org.mtransit.android.commons.data.RouteTripStop;
import org.mtransit.android.commons.data.ServiceUpdate;
import org.mtransit.android.data.DataSourceType;
import org.mtransit.android.data.IAgencyUIProperties;
import org.mtransit.android.data.JPaths;
import org.mtransit.android.data.Module;
import org.mtransit.android.data.POIManager;
import org.mtransit.android.data.UISchedule;
import org.mtransit.android.databinding.LayoutPoiBasicBinding;
import org.mtransit.android.databinding.LayoutPoiBasicWithAvailabilityPercentBinding;
import org.mtransit.android.databinding.LayoutPoiModuleBinding;
import org.mtransit.android.databinding.LayoutPoiModuleWithAppStatusBinding;
import org.mtransit.android.databinding.LayoutPoiRtsBinding;
import org.mtransit.android.databinding.LayoutPoiRtsWithScheduleBinding;
import org.mtransit.android.dev.DemoModeManager;
import org.mtransit.android.ui.MainActivity;
import org.mtransit.android.ui.rts.route.RTSRouteFragment;
import org.mtransit.android.ui.view.common.MTTransitions;
import org.mtransit.commons.FeatureFlags;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.concurrent.TimeUnit;
@SuppressWarnings({"WeakerAccess", "unused", "DuplicateBranchesInSwitch"})
public class POIViewController implements MTLog.Loggable {
private static final String LOG_TAG = POIViewController.class.getSimpleName();
@NonNull
@Override
public String getLogTag() {
return LOG_TAG;
}
@NonNull
public static ViewBinding getLayoutViewBinding(int poiType, int poiStatusType, @NonNull ViewStub viewStub) {
viewStub.setLayoutResource(getLayoutResId(poiType, poiStatusType));
switch (poiType) {
case POI.ITEM_VIEW_TYPE_TEXT_MESSAGE:
return LayoutPoiBasicBinding.bind(viewStub.inflate());
case POI.ITEM_VIEW_TYPE_MODULE:
return getModuleLayoutViewBinding(poiStatusType, viewStub);
case POI.ITEM_VIEW_TYPE_ROUTE_TRIP_STOP:
return getRTSLayoutViewBinding(poiStatusType, viewStub);
case POI.ITEM_VIEW_TYPE_BASIC_POI:
return getBasicPOILayoutViewBinding(poiStatusType, viewStub);
default:
MTLog.w(LOG_TAG, "getLayoutViewBinding() > Unknown view type '%s' for status %s!", poiType, poiStatusType);
return getBasicPOILayoutViewBinding(poiStatusType, viewStub);
}
}
@LayoutRes
public static int getLayoutResId(@NonNull POIManager poim) {
return getLayoutResId(poim.poi.getType(), poim.getStatusType());
}
@LayoutRes
public static int getLayoutResId(int poiType, int poiStatusType) {
switch (poiType) {
case POI.ITEM_VIEW_TYPE_TEXT_MESSAGE:
return R.layout.layout_poi_basic;
case POI.ITEM_VIEW_TYPE_MODULE:
return getModuleLayout(poiStatusType);
case POI.ITEM_VIEW_TYPE_ROUTE_TRIP_STOP:
return getRTSLayout(poiStatusType);
case POI.ITEM_VIEW_TYPE_BASIC_POI:
return getBasicPOILayout(poiStatusType);
default:
MTLog.w(LOG_TAG, "getLayoutResId() > Unknown view type '%s' for status %s!", poiType, poiStatusType);
return getBasicPOILayout(poiStatusType);
}
}
private static int getRTSLayout(int status) {
int layoutRes = R.layout.layout_poi_rts;
switch (status) {
case POI.ITEM_STATUS_TYPE_NONE:
break;
case POI.ITEM_STATUS_TYPE_SCHEDULE:
layoutRes = R.layout.layout_poi_rts_with_schedule;
break;
default:
MTLog.w(LOG_TAG, "Unexpected status '%s' (rts view w/o status)!", status);
break;
}
return layoutRes;
}
@NonNull
private static ViewBinding getRTSLayoutViewBinding(int status, @NonNull ViewStub viewStub) {
switch (status) {
case POI.ITEM_STATUS_TYPE_SCHEDULE:
return LayoutPoiRtsWithScheduleBinding.bind(viewStub.inflate());
case POI.ITEM_STATUS_TYPE_NONE:
return LayoutPoiRtsBinding.bind(viewStub.inflate());
default:
MTLog.w(LOG_TAG, "Unexpected status '%s' (rts view w/o status)!", status);
return LayoutPoiRtsBinding.bind(viewStub.inflate());
}
}
@LayoutRes
private static int getBasicPOILayout(int status) {
int layoutRes = R.layout.layout_poi_basic;
switch (status) {
case POI.ITEM_STATUS_TYPE_NONE:
break;
case POI.ITEM_STATUS_TYPE_AVAILABILITY_PERCENT:
layoutRes = R.layout.layout_poi_basic_with_availability_percent;
break;
default:
MTLog.w(LOG_TAG, "Unexpected status '%s' (basic view w/o status)!", status);
break;
}
return layoutRes;
}
@NonNull
private static ViewBinding getBasicPOILayoutViewBinding(int status, @NonNull ViewStub viewStub) {
switch (status) {
case POI.ITEM_STATUS_TYPE_AVAILABILITY_PERCENT:
return LayoutPoiBasicWithAvailabilityPercentBinding.bind(viewStub.inflate());
case POI.ITEM_STATUS_TYPE_NONE:
return LayoutPoiBasicBinding.bind(viewStub.inflate());
default:
MTLog.w(LOG_TAG, "Unexpected status '%s' (basic view w/o status)!", status);
return LayoutPoiBasicBinding.bind(viewStub.inflate());
}
}
private static int getModuleLayout(int status) {
int layoutRes = R.layout.layout_poi_module;
switch (status) {
case POI.ITEM_STATUS_TYPE_NONE:
break;
case POI.ITEM_STATUS_TYPE_APP:
layoutRes = R.layout.layout_poi_module_with_app_status;
break;
default:
MTLog.w(LOG_TAG, "Unexpected status '%s' (module view w/o status)!", status);
break;
}
return layoutRes;
}
@NonNull
private static ViewBinding getModuleLayoutViewBinding(int status, @NonNull ViewStub viewStub) {
switch (status) {
case POI.ITEM_STATUS_TYPE_APP:
return LayoutPoiModuleWithAppStatusBinding.bind(viewStub.inflate());
case POI.ITEM_STATUS_TYPE_NONE:
return LayoutPoiModuleBinding.bind(viewStub.inflate());
default:
MTLog.w(LOG_TAG, "Unexpected status '%s' (module view w/o status)!", status);
return LayoutPoiModuleBinding.bind(viewStub.inflate());
}
}
private static void initViewHolder(@NonNull POIManager poim, @NonNull View view) {
initViewHolder(poim.poi, view);
}
private static void initViewHolder(@NonNull POI poi, @NonNull View view) {
initViewHolder(poi.getType(), poi.getStatusType(), view);
}
private static void initViewHolder(int poiType, int poiStatusType, @NonNull View view) {
CommonViewHolder holder;
switch (poiType) {
case POI.ITEM_VIEW_TYPE_TEXT_MESSAGE:
holder = initTextMessageViewHolder(view);
break;
case POI.ITEM_VIEW_TYPE_MODULE:
holder = initModuleViewHolder(view);
break;
case POI.ITEM_VIEW_TYPE_ROUTE_TRIP_STOP:
holder = initRTSViewHolder(view);
break;
case POI.ITEM_VIEW_TYPE_BASIC_POI:
holder = initBasicViewHolder(view);
break;
default:
MTLog.w(LOG_TAG, "initViewHolder() > Unknown view type for poi type %s!", poiType);
holder = initBasicViewHolder(view);
}
initCommonViewHolder(holder, view);
holder.statusViewHolder = initPOIStatusViewHolder(poiStatusType, view);
holder.serviceUpdateViewHolder = initServiceUpdateViewHolder(view);
view.setTag(holder);
}
private static ServiceUpdateViewHolder initServiceUpdateViewHolder(@NonNull View view) {
ServiceUpdateViewHolder holder = new ServiceUpdateViewHolder();
holder.warningImg = view.findViewById(R.id.service_update_warning);
return holder;
}
private static CommonViewHolder initModuleViewHolder(@NonNull View view) {
ModuleViewHolder holder = new ModuleViewHolder();
holder.moduleExtraTypeImg = view.findViewById(R.id.extra);
return holder;
}
private static CommonViewHolder initTextMessageViewHolder(@NonNull View view) {
return new TextMessageViewHolder();
}
private static CommonViewHolder initBasicViewHolder(@NonNull View view) {
return new BasicPOIViewHolder();
}
private static CommonViewHolder initRTSViewHolder(@NonNull View view) {
RouteTripStopViewHolder holder = new RouteTripStopViewHolder();
initRTSExtra(view, holder);
return holder;
}
private static void initRTSExtra(@NonNull View view, @NonNull RouteTripStopViewHolder holder) {
holder.rtsExtraV = view.findViewById(R.id.extra);
holder.routeFL = view.findViewById(R.id.route);
holder.routeShortNameTv = view.findViewById(R.id.route_short_name);
holder.routeTypeImg = view.findViewById(R.id.route_type_img);
holder.tripHeadingTv = view.findViewById(R.id.trip_heading);
holder.tripHeadingBg = view.findViewById(R.id.trip_heading_bg);
}
@Nullable
private static CommonStatusViewHolder initPOIStatusViewHolder(int status, @NonNull View view) {
CommonStatusViewHolder statusViewHolder = null;
switch (status) {
case POI.ITEM_STATUS_TYPE_NONE:
break;
case POI.ITEM_STATUS_TYPE_AVAILABILITY_PERCENT:
statusViewHolder = initAvailabilityPercentViewHolder(view);
break;
case POI.ITEM_STATUS_TYPE_SCHEDULE:
statusViewHolder = initScheduleViewHolder(view);
break;
case POI.ITEM_STATUS_TYPE_APP:
statusViewHolder = initAppStatusViewHolder(view);
break;
default:
MTLog.w(LOG_TAG, "Unexpected status '%s' (no view holder)!", status);
break;
}
if (statusViewHolder != null) {
initCommonStatusViewHolderHolder(statusViewHolder, view);
}
return statusViewHolder;
}
private static CommonStatusViewHolder initScheduleViewHolder(@NonNull View view) {
ScheduleStatusViewHolder scheduleStatusViewHolder = new ScheduleStatusViewHolder();
scheduleStatusViewHolder.dataNextLine1Tv = view.findViewById(R.id.data_next_line_1);
scheduleStatusViewHolder.dataNextLine2Tv = view.findViewById(R.id.data_next_line_2);
return scheduleStatusViewHolder;
}
@NonNull
private static CommonStatusViewHolder initAppStatusViewHolder(@NonNull View view) {
AppStatusViewHolder appStatusViewHolder = new AppStatusViewHolder();
appStatusViewHolder.textTv = view.findViewById(R.id.textTv);
return appStatusViewHolder;
}
@NonNull
private static CommonStatusViewHolder initAvailabilityPercentViewHolder(@NonNull View view) {
AvailabilityPercentStatusViewHolder availabilityPercentStatusViewHolder = new AvailabilityPercentStatusViewHolder();
availabilityPercentStatusViewHolder.textTv = view.findViewById(R.id.textTv);
availabilityPercentStatusViewHolder.piePercentV = view.findViewById(R.id.pie);
return availabilityPercentStatusViewHolder;
}
private static void initCommonStatusViewHolderHolder(@NonNull CommonStatusViewHolder holder, @NonNull View view) {
holder.statusV = view.findViewById(R.id.status);
}
private static void initCommonViewHolder(@NonNull CommonViewHolder holder, @NonNull View view) {
holder.uuid = null;
holder.view = view;
holder.nameTv = view.findViewById(R.id.name);
holder.favImg = view.findViewById(R.id.fav);
holder.distanceTv = view.findViewById(R.id.distance);
holder.compassV = view.findViewById(R.id.compass);
}
public static void updatePOIView(@Nullable View view, @Nullable POI poi, @NonNull POIDataProvider dataProvider) {
if (view == null || poi == null) {
MTLog.d(LOG_TAG, "updateView() > SKIP (no view or poi)");
return;
}
if (view.getTag() == null || !(view.getTag() instanceof CommonViewHolder)) {
final int poiType = poi.getType();
final int poiStatusType = poi.getStatusType();
initViewHolder(poiType, poiStatusType, view);
}
CommonViewHolder holder = (CommonViewHolder) view.getTag();
updatePOICommonView(holder, poi, dataProvider);
updateExtra(view.getContext(), holder, poi, dataProvider);
if (holder.statusViewHolder != null && !dataProvider.isShowingStatus()) {
holder.statusViewHolder.statusV.setVisibility(View.INVISIBLE);
return;
}
if (holder.serviceUpdateViewHolder != null && !dataProvider.isShowingServiceUpdates()) {
holder.serviceUpdateViewHolder.warningImg.setVisibility(View.GONE);
}
}
private static void updatePOICommonView(@NonNull CommonViewHolder holder, @NonNull POI poi, @NonNull POIDataProvider dataProvider) {
//noinspection ConstantConditions // poi always non-null?
if (poi == null) {
MTLog.d(LOG_TAG, "updateCommonView() > SKIP (no poi)");
return;
}
holder.nameTv.setText(poi.getLabel());
final DemoModeManager demoModeManager = dataProvider.providesDemoModeManager();
holder.nameTv.setSingleLine(true); // marquee forever
holder.nameTv.setSelected(!demoModeManager.getEnabled()); // marquee forever
if (dataProvider.isShowingFavorite() && dataProvider.isFavorite(poi.getUUID())) {
holder.favImg.setVisibility(View.VISIBLE);
} else {
holder.favImg.setVisibility(View.GONE);
}
int index;
if (dataProvider.isClosestPOI(poi.getUUID())) {
index = 0;
} else {
index = -1;
}
holder.compassV.setLatLng(poi.getLat(), poi.getLng());
switch (index) {
case 0:
holder.nameTv.setTypeface(Typeface.DEFAULT_BOLD);
holder.distanceTv.setTypeface(Typeface.DEFAULT_BOLD);
break;
default:
holder.nameTv.setTypeface(Typeface.DEFAULT);
holder.distanceTv.setTypeface(Typeface.DEFAULT);
break;
}
}
public static void updateView(@Nullable View view, @Nullable POIManager poim, @NonNull POIDataProvider dataProvider) {
if (view == null || poim == null) {
MTLog.d(LOG_TAG, "updateView() > SKIP (no view or poi)");
return;
}
if (view.getTag() == null || !(view.getTag() instanceof CommonViewHolder)) {
final int poiType = poim.poi.getType();
final int poiStatusType = poim.poi.getStatusType();
initViewHolder(poiType, poiStatusType, view);
}
CommonViewHolder holder = (CommonViewHolder) view.getTag();
updateCommonView(holder, poim, dataProvider);
updateExtra(view.getContext(), holder, poim, dataProvider);
updatePOIStatus(view.getContext(), holder.statusViewHolder, poim, dataProvider);
updatePOIServiceUpdate(view.getContext(), holder.serviceUpdateViewHolder, poim, dataProvider);
}
public static void updatePOIColorView(@Nullable View view,
int poiType,
int poiStatusType,
@Nullable Integer poiColor,
@NonNull final POIDataProvider dataProvider) {
if (view == null) {
MTLog.d(LOG_TAG, "updatePOIColorView() > SKIP (no view)");
return;
}
if (view.getTag() == null || !(view.getTag() instanceof CommonViewHolder)) {
initViewHolder(poiType, poiStatusType, view);
}
switch (poiType) {
case POI.ITEM_VIEW_TYPE_ROUTE_TRIP_STOP:
if (dataProvider.isShowingExtra() && poiColor != null) {
RouteTripStopViewHolder holder = (RouteTripStopViewHolder) view.getTag();
holder.rtsExtraV.setBackgroundColor(poiColor);
}
break;
case POI.ITEM_VIEW_TYPE_MODULE:
if (dataProvider.isShowingExtra() && poiColor != null) {
ModuleViewHolder holder = (ModuleViewHolder) view.getTag();
holder.moduleExtraTypeImg.setBackgroundColor(poiColor);
}
break;
case POI.ITEM_VIEW_TYPE_TEXT_MESSAGE:
break;
case POI.ITEM_VIEW_TYPE_BASIC_POI:
break;
default:
MTLog.w(LOG_TAG, "updatePOIColorView() > Unknown view type for poi type %s!", poiType);
}
}
private static void updateExtra(@NonNull Context context,
@NonNull CommonViewHolder holder,
@NonNull POIManager poim,
@NonNull POIDataProvider dataProvider) {
final int poiType = poim.poi.getType();
switch (poiType) {
case POI.ITEM_VIEW_TYPE_ROUTE_TRIP_STOP:
updateRTSExtra(context, poim, (RouteTripStopViewHolder) holder, dataProvider);
break;
case POI.ITEM_VIEW_TYPE_MODULE:
updateModuleExtra(poim, (ModuleViewHolder) holder, dataProvider);
break;
case POI.ITEM_VIEW_TYPE_TEXT_MESSAGE:
break;
case POI.ITEM_VIEW_TYPE_BASIC_POI:
break;
default:
MTLog.w(LOG_TAG, "updateView() > Unknown view type for poi type %s!", poiType);
}
}
private static void updateModuleExtra(@NonNull POIManager poim,
@NonNull ModuleViewHolder holder,
@NonNull POIDataProvider dataProvider) {
if (poim.poi instanceof Module) {
Module module = (Module) poim.poi;
holder.moduleExtraTypeImg.setBackgroundColor(poim.getColor(dataProvider.providesDataSourcesRepository()));
final DataSourceType moduleType = DataSourceType.parseId(module.getTargetTypeId());
if (moduleType != null) {
holder.moduleExtraTypeImg.setImageResource(moduleType.getIconResId());
} else {
holder.moduleExtraTypeImg.setImageResource(0);
}
holder.moduleExtraTypeImg.setVisibility(View.VISIBLE);
} else {
holder.moduleExtraTypeImg.setVisibility(View.GONE);
}
}
private static void updateExtra(@NonNull Context context,
@NonNull CommonViewHolder holder,
@NonNull POI poi,
@NonNull POIDataProvider dataProvider) {
final int poiType = poi.getType();
switch (poiType) {
case POI.ITEM_VIEW_TYPE_ROUTE_TRIP_STOP:
updateRTSExtra(context, poi, (RouteTripStopViewHolder) holder, dataProvider);
break;
case POI.ITEM_VIEW_TYPE_MODULE:
updateModuleExtra(poi, (ModuleViewHolder) holder, dataProvider);
break;
case POI.ITEM_VIEW_TYPE_TEXT_MESSAGE:
break;
case POI.ITEM_VIEW_TYPE_BASIC_POI:
break;
default:
MTLog.w(LOG_TAG, "updateView() > Unknown view type for poi type %s!", poiType);
}
}
private static void updateRTSExtra(@NonNull Context context,
@NonNull POI poi,
@NonNull RouteTripStopViewHolder holder,
@NonNull final POIDataProvider dataProvider) {
if (poi instanceof RouteTripStop) {
RouteTripStop rts = (RouteTripStop) poi;
//noinspection ConstantConditions // route is always non-null?
if (dataProvider.isShowingExtra() && rts.getRoute() == null) {
if (holder.rtsExtraV != null) {
holder.rtsExtraV.setVisibility(View.GONE);
}
if (holder.routeFL != null) {
holder.routeFL.setVisibility(View.GONE);
}
if (holder.tripHeadingBg != null) {
holder.tripHeadingBg.setVisibility(View.GONE);
}
} else {
final String authority = rts.getAuthority();
final Route route = rts.getRoute();
if (TextUtils.isEmpty(route.getShortName())) {
holder.routeShortNameTv.setVisibility(View.INVISIBLE);
if (holder.routeTypeImg.hasPaths() && poi.getAuthority().equals(holder.routeTypeImg.getTag())) {
holder.routeTypeImg.setVisibility(View.VISIBLE);
} else {
final IAgencyUIProperties agency = dataProvider.providesDataSourcesRepository().getAgency(poi.getAuthority());
JPaths rtsRouteLogo = agency == null ? null : agency.getLogo();
if (rtsRouteLogo != null) {
holder.routeTypeImg.setJSON(rtsRouteLogo);
holder.routeTypeImg.setTag(poi.getAuthority());
holder.routeTypeImg.setVisibility(View.VISIBLE);
} else {
holder.routeTypeImg.setVisibility(View.GONE);
}
}
} else {
holder.routeTypeImg.setVisibility(View.GONE);
holder.routeShortNameTv.setText(route.getShortName());
holder.routeShortNameTv.setVisibility(View.VISIBLE);
}
holder.routeFL.setVisibility(View.VISIBLE);
holder.rtsExtraV.setVisibility(View.VISIBLE);
//noinspection ConstantConditions // trip always non-null?
if (rts.getTrip() == null) {
holder.tripHeadingBg.setVisibility(View.GONE);
} else {
holder.tripHeadingTv.setText(rts.getTrip().getUIHeading(context, true));
final DemoModeManager demoModeManager = dataProvider.providesDemoModeManager();
holder.tripHeadingTv.setSingleLine(true); // marquee forever
holder.tripHeadingTv.setSelected(!demoModeManager.getEnabled()); // marquee forever
holder.tripHeadingBg.setVisibility(View.VISIBLE);
}
//noinspection ConstantConditions // stop always non-null?
final Integer stopId = rts.getStop() == null ? null : rts.getStop().getId();
holder.rtsExtraV.setOnClickListener(view -> {
MTTransitions.setTransitionName(view, "r_" + rts.getAuthority() + "_" + rts.getRoute().getId());
if (FeatureFlags.F_NAVIGATION) {
final NavController navController = Navigation.findNavController(view);
FragmentNavigator.Extras extras = null;
if (FeatureFlags.F_TRANSITION) {
extras = new FragmentNavigator.Extras.Builder()
.addSharedElement(view, view.getTransitionName())
.build();
}
navController.navigate(
R.id.nav_to_rts_route_screen,
RTSRouteFragment.newInstanceArgs(rts),
null,
extras
);
} else {
final MainActivity mainActivity = (MainActivity) dataProvider.getActivity();
if (mainActivity == null) {
return;
}
mainActivity.addFragmentToStack(
RTSRouteFragment.newInstance(rts),
view
);
}
});
}
}
}
private static void updateModuleExtra(@NonNull POI poi,
@NonNull ModuleViewHolder holder,
@NonNull POIDataProvider dataProvider) {
if (poi instanceof Module) {
Module module = (Module) poi;
final DataSourceType moduleType = DataSourceType.parseId(module.getTargetTypeId());
if (moduleType != null) {
holder.moduleExtraTypeImg.setImageResource(moduleType.getIconResId());
} else {
holder.moduleExtraTypeImg.setImageResource(0);
}
holder.moduleExtraTypeImg.setVisibility(View.VISIBLE);
} else {
holder.moduleExtraTypeImg.setVisibility(View.GONE);
}
}
private static void updateRTSExtra(@NonNull Context context,
@NonNull POIManager poim,
@NonNull RouteTripStopViewHolder holder,
@NonNull final POIDataProvider dataProvider) {
final POI poi = poim.poi;
if (poi instanceof RouteTripStop) {
RouteTripStop rts = (RouteTripStop) poi;
//noinspection ConstantConditions // route is always non-null?
if (dataProvider.isShowingExtra() && rts.getRoute() == null) {
if (holder.rtsExtraV != null) {
holder.rtsExtraV.setVisibility(View.GONE);
}
if (holder.routeFL != null) {
holder.routeFL.setVisibility(View.GONE);
}
if (holder.tripHeadingBg != null) {
holder.tripHeadingBg.setVisibility(View.GONE);
}
} else {
final String authority = rts.getAuthority();
final Route route = rts.getRoute();
if (TextUtils.isEmpty(route.getShortName())) {
holder.routeShortNameTv.setVisibility(View.INVISIBLE);
if (holder.routeTypeImg.hasPaths() && poi.getAuthority().equals(holder.routeTypeImg.getTag())) {
holder.routeTypeImg.setVisibility(View.VISIBLE);
} else {
final IAgencyUIProperties agency = dataProvider.providesDataSourcesRepository().getAgency(poi.getAuthority());
JPaths rtsRouteLogo = agency == null ? null : agency.getLogo();
if (rtsRouteLogo != null) {
holder.routeTypeImg.setJSON(rtsRouteLogo);
holder.routeTypeImg.setTag(poi.getAuthority());
holder.routeTypeImg.setVisibility(View.VISIBLE);
} else {
holder.routeTypeImg.setVisibility(View.GONE);
}
}
} else {
holder.routeTypeImg.setVisibility(View.GONE);
holder.routeShortNameTv.setText(route.getShortName());
holder.routeShortNameTv.setVisibility(View.VISIBLE);
}
holder.routeFL.setVisibility(View.VISIBLE);
holder.rtsExtraV.setVisibility(View.VISIBLE);
//noinspection ConstantConditions // trip always non-null?
if (rts.getTrip() == null) {
holder.tripHeadingBg.setVisibility(View.GONE);
} else {
holder.tripHeadingTv.setText(rts.getTrip().getUIHeading(context, true));
final DemoModeManager demoModeManager = dataProvider.providesDemoModeManager();
holder.tripHeadingTv.setSingleLine(true); // marquee forever
holder.tripHeadingTv.setSelected(!demoModeManager.getEnabled()); // marquee forever
holder.tripHeadingBg.setVisibility(View.VISIBLE);
}
holder.rtsExtraV.setBackgroundColor(poim.getColor(dataProvider.providesDataSourcesRepository()));
//noinspection ConstantConditions // stop always non-null?
final Integer stopId = rts.getStop() == null ? null : rts.getStop().getId();
holder.rtsExtraV.setOnClickListener(view -> {
MTTransitions.setTransitionName(view, "r_" + rts.getAuthority() + "_" + rts.getRoute().getId());
if (FeatureFlags.F_NAVIGATION) {
final NavController navController = Navigation.findNavController(view);
FragmentNavigator.Extras extras = null;
if (FeatureFlags.F_TRANSITION) {
extras = new FragmentNavigator.Extras.Builder()
.addSharedElement(view, view.getTransitionName())
.build();
}
navController.navigate(
R.id.nav_to_rts_route_screen,
RTSRouteFragment.newInstanceArgs(rts),
null,
extras
);
} else {
final MainActivity mainActivity = (MainActivity) dataProvider.getActivity();
if (mainActivity == null) {
return;
}
mainActivity.addFragmentToStack(
RTSRouteFragment.newInstance(rts),
view
);
}
});
}
}
}
public static void updatePOIStatus(@Nullable View view, @NonNull POIStatus status, @NonNull POIDataProvider dataProvider) {
if (view == null || view.getTag() == null || !(view.getTag() instanceof CommonViewHolder)) {
MTLog.d(LOG_TAG, "updatePOIStatus() > SKIP (no view or view holder)");
return;
}
CommonViewHolder holder = (CommonViewHolder) view.getTag();
updatePOIStatus(view.getContext(), holder.statusViewHolder, status, dataProvider);
}
private static void updatePOIStatus(@NonNull Context context,
@Nullable CommonStatusViewHolder statusViewHolder,
@NonNull POIStatus status,
@NonNull POIDataProvider dataProvider) {
if (statusViewHolder == null) {
MTLog.d(LOG_TAG, "updatePOIStatus() > SKIP (no view holder)");
return;
}
if (!dataProvider.isShowingStatus()) {
statusViewHolder.statusV.setVisibility(View.INVISIBLE);
return;
}
switch (status.getType()) {
case POI.ITEM_STATUS_TYPE_NONE:
statusViewHolder.statusV.setVisibility(View.INVISIBLE);
break;
case POI.ITEM_STATUS_TYPE_AVAILABILITY_PERCENT:
updateAvailabilityPercent(context, statusViewHolder, status);
break;
case POI.ITEM_STATUS_TYPE_SCHEDULE:
updateRTSSchedule(context, statusViewHolder, status, dataProvider);
break;
case POI.ITEM_STATUS_TYPE_APP:
updateAppStatus(context, statusViewHolder, status);
break;
default:
MTLog.w(LOG_TAG, "Unexpected status type '%s'!", status.getType());
statusViewHolder.statusV.setVisibility(View.INVISIBLE);
}
}
public static void updatePOIStatus(@Nullable View view, @NonNull POIManager poim, @NonNull POIDataProvider dataProvider) {
if (view == null) {
MTLog.d(LOG_TAG, "updatePOIStatus() > SKIP (no view)");
return;
}
if (view.getTag() == null || !(view.getTag() instanceof CommonViewHolder)) {
initViewHolder(poim, view);
}
CommonViewHolder holder = (CommonViewHolder) view.getTag();
updatePOIStatus(view.getContext(), holder.statusViewHolder, poim, dataProvider);
}
private static void updatePOIStatus(@NonNull Context context,
@Nullable CommonStatusViewHolder statusViewHolder,
@NonNull POIManager poim,
@NonNull POIDataProvider dataProvider) {
if (statusViewHolder == null) {
MTLog.d(LOG_TAG, "updatePOIStatus() > SKIP (no view holder)");
return;
}
if (!dataProvider.isShowingStatus()) {
statusViewHolder.statusV.setVisibility(View.INVISIBLE);
return;
}
final POI poi = poim.poi;
final int statusType = poi.getStatusType();
switch (statusType) {
case POI.ITEM_STATUS_TYPE_NONE:
statusViewHolder.statusV.setVisibility(View.INVISIBLE);
break;
case POI.ITEM_STATUS_TYPE_AVAILABILITY_PERCENT:
updateAvailabilityPercent(context, statusViewHolder, poim, dataProvider);
break;
case POI.ITEM_STATUS_TYPE_SCHEDULE:
updateRTSSchedule(context, statusViewHolder, poim, dataProvider);
break;
case POI.ITEM_STATUS_TYPE_APP:
updateAppStatus(context, statusViewHolder, poim, dataProvider);
break;
default:
MTLog.w(LOG_TAG, "Unexpected status type '%s'!", statusType);
statusViewHolder.statusV.setVisibility(View.INVISIBLE);
}
}
private static void updateAppStatus(@NonNull Context context,
@NonNull CommonStatusViewHolder statusViewHolder,
@NonNull POIManager poim,
@NonNull POIDataProvider dataProvider) {
if (dataProvider.isShowingStatus() && statusViewHolder instanceof AppStatusViewHolder) {
poim.setStatusLoaderListener(dataProvider);
updateAppStatus(context, statusViewHolder, poim.getStatus(context, dataProvider.providesStatusLoader()));
} else {
statusViewHolder.statusV.setVisibility(View.INVISIBLE);
}
}
private static void updateAppStatus(@NonNull Context context, @NonNull CommonStatusViewHolder statusViewHolder, @Nullable POIStatus status) {
AppStatusViewHolder appStatusViewHolder = (AppStatusViewHolder) statusViewHolder;
if (status instanceof AppStatus) {
final AppStatus appStatus = (AppStatus) status;
appStatusViewHolder.textTv.setText(appStatus.getStatusMsg(context), TextView.BufferType.SPANNABLE);
appStatusViewHolder.textTv.setVisibility(View.VISIBLE);
statusViewHolder.statusV.setVisibility(View.VISIBLE);
} else {
statusViewHolder.statusV.setVisibility(View.INVISIBLE);
}
}
private static void updateRTSSchedule(@NonNull Context context,
@NonNull CommonStatusViewHolder statusViewHolder,
@NonNull POIManager poim,
@NonNull POIDataProvider dataProvider) {
if (dataProvider.isShowingStatus() && statusViewHolder instanceof ScheduleStatusViewHolder) {
poim.setStatusLoaderListener(dataProvider);
updateRTSSchedule(context, statusViewHolder, poim.getStatus(context, dataProvider.providesStatusLoader()), dataProvider);
} else {
statusViewHolder.statusV.setVisibility(View.INVISIBLE);
}
}
private static void updateRTSSchedule(@NonNull Context context,
@NonNull CommonStatusViewHolder statusViewHolder,
@Nullable POIStatus status,
@NonNull POIDataProvider dataProvider) {
CharSequence line1CS = null;
CharSequence line2CS = null;
if (status instanceof UISchedule) {
UISchedule schedule = (UISchedule) status;
ArrayList<Pair<CharSequence, CharSequence>> lines = schedule.getStatus(
context,
dataProvider.getNowToTheMinute(),
TimeUnit.MINUTES.toMillis(30L),
null,
10,
null);
if (lines != null && lines.size() >= 1) {
line1CS = lines.get(0).first;
line2CS = lines.get(0).second;
}
}
ScheduleStatusViewHolder scheduleStatusViewHolder = (ScheduleStatusViewHolder) statusViewHolder;
scheduleStatusViewHolder.dataNextLine1Tv.setText(line1CS, TextView.BufferType.SPANNABLE);
scheduleStatusViewHolder.dataNextLine2Tv.setText(line2CS, TextView.BufferType.SPANNABLE);
scheduleStatusViewHolder.dataNextLine2Tv.setVisibility(line2CS != null && line2CS.length() > 0 ? View.VISIBLE : View.GONE);
statusViewHolder.statusV.setVisibility(line1CS != null && line1CS.length() > 0 ? View.VISIBLE : View.INVISIBLE);
}
private static void updateAvailabilityPercent(@NonNull Context context,
@NonNull CommonStatusViewHolder statusViewHolder,
@NonNull POIManager poim,
@NonNull POIDataProvider dataProvider) {
if (dataProvider.isShowingStatus() && statusViewHolder instanceof AvailabilityPercentStatusViewHolder) {
poim.setStatusLoaderListener(dataProvider);
updateAvailabilityPercent(context, statusViewHolder, poim.getStatus(context, dataProvider.providesStatusLoader()));
} else {
statusViewHolder.statusV.setVisibility(View.INVISIBLE);
}
}
private static void updateAvailabilityPercent(@NonNull Context context,
@NonNull CommonStatusViewHolder statusViewHolder,
@Nullable POIStatus status) {
AvailabilityPercentStatusViewHolder availabilityPercentStatusViewHolder = (AvailabilityPercentStatusViewHolder) statusViewHolder;
if (status instanceof AvailabilityPercent) {
AvailabilityPercent availabilityPercent = (AvailabilityPercent) status;
if (!availabilityPercent.isStatusOK()) {
availabilityPercentStatusViewHolder.piePercentV.setVisibility(View.GONE);
availabilityPercentStatusViewHolder.textTv.setText(availabilityPercent.getStatusMsg(context), TextView.BufferType.SPANNABLE);
availabilityPercentStatusViewHolder.textTv.setVisibility(View.VISIBLE);
} else if (availabilityPercent.isShowingLowerValue()) {
availabilityPercentStatusViewHolder.piePercentV.setVisibility(View.GONE);
availabilityPercentStatusViewHolder.textTv.setText(availabilityPercent.getLowerValueText(context), TextView.BufferType.SPANNABLE);
availabilityPercentStatusViewHolder.textTv.setVisibility(View.VISIBLE);
} else {
availabilityPercentStatusViewHolder.textTv.setVisibility(View.GONE);
availabilityPercentStatusViewHolder.piePercentV.setPiecesColors( //
Arrays.asList(
new Pair<>(
availabilityPercent.getValue1SubValueDefaultColor(), //
availabilityPercent.getValue1SubValueDefaultColorBg()), //
new Pair<>(
availabilityPercent.getValue1SubValue1Color(), //
availabilityPercent.getValue1SubValue1ColorBg()), //
new Pair<>(
availabilityPercent.getValue2Color(), //
availabilityPercent.getValue2ColorBg()) //
)
);
availabilityPercentStatusViewHolder.piePercentV.setPieces(
Arrays.asList(
availabilityPercent.getValue1SubValueDefault(),
availabilityPercent.getValue1SubValue1(),
availabilityPercent.getValue2()
)
);
availabilityPercentStatusViewHolder.piePercentV.setVisibility(View.VISIBLE);
}
statusViewHolder.statusV.setVisibility(View.VISIBLE);
} else {
statusViewHolder.statusV.setVisibility(View.INVISIBLE);
}
}
public static void updateServiceUpdatesView(@Nullable View view,
@Nullable List<ServiceUpdate> serviceUpdates,
@NonNull POIDataProvider dataProvider) {
if (view == null || view.getTag() == null || !(view.getTag() instanceof CommonViewHolder)) {
MTLog.d(LOG_TAG, "updateServiceUpdatesView() > SKIP (no view or view holder)");
return;
}
CommonViewHolder holder = (CommonViewHolder) view.getTag();
updateServiceUpdateViewHolder(holder.serviceUpdateViewHolder, ServiceUpdate.isSeverityWarning(serviceUpdates), dataProvider);
}
public static void updatePOIServiceUpdate(@Nullable View view,
@NonNull POIManager poim,
@NonNull POIDataProvider dataProvider) {
if (view == null) {
MTLog.d(LOG_TAG, "updatePOIServiceUpdate() > SKIP (no view");
return;
}
if (view.getTag() == null || !(view.getTag() instanceof CommonViewHolder)) {
final int poiType = poim.poi.getType();
final int poiStatusType = poim.poi.getStatusType();
initViewHolder(poiType, poiStatusType, view);
}
CommonViewHolder holder = (CommonViewHolder) view.getTag();
updatePOIServiceUpdate(view.getContext(), holder.serviceUpdateViewHolder, poim, dataProvider);
}
private static void updatePOIServiceUpdate(@NonNull Context context,
@Nullable ServiceUpdateViewHolder serviceUpdateViewHolder,
@NonNull POIManager poim,
@NonNull POIDataProvider dataProvider) {
if (serviceUpdateViewHolder != null) {
if (dataProvider.isShowingServiceUpdates()) {
poim.setServiceUpdateLoaderListener(dataProvider);
updateServiceUpdateViewHolder(serviceUpdateViewHolder, poim.isServiceUpdateWarning(context, dataProvider.providesServiceUpdateLoader()), dataProvider);
} else {
serviceUpdateViewHolder.warningImg.setVisibility(View.GONE);
}
}
}
private static void updateServiceUpdateViewHolder(@NonNull ServiceUpdateViewHolder serviceUpdateViewHolder,
@Nullable Boolean isServiceUpdateWarning,
@NonNull POIDataProvider dataProvider) {
if (serviceUpdateViewHolder.warningImg == null) {
return;
}
if (dataProvider.isShowingServiceUpdates() && isServiceUpdateWarning != null) {
serviceUpdateViewHolder.warningImg.setVisibility(isServiceUpdateWarning ? View.VISIBLE : View.GONE);
} else {
serviceUpdateViewHolder.warningImg.setVisibility(View.GONE);
}
}
public static void updatePOIDistanceAndCompass(@Nullable View view,
int poiType,
int poiStatusType,
@Nullable Float distance,
@Nullable CharSequence distanceString,
@NonNull POIDataProvider dataProvider) {
if (view == null) {
MTLog.d(LOG_TAG, "updatePOIDistanceAndCompass() > skip (no view)");
return;
}
if (view.getTag() == null || !(view.getTag() instanceof CommonViewHolder)) {
initViewHolder(poiType, poiStatusType, view);
}
CommonViewHolder holder = (CommonViewHolder) view.getTag();
updatePOIDistanceAndCompass(holder, distance, distanceString, dataProvider);
}
private static void updatePOIDistanceAndCompass(@Nullable CommonViewHolder holder,
@Nullable Float distance,
@Nullable CharSequence distanceString,
@NonNull POIDataProvider dataProvider) {
if (holder == null) {
MTLog.d(LOG_TAG, "updatePOIDistanceAndCompass() > skip (no view holder)");
return;
}
if (distanceString != null) {
if (!distanceString.equals(holder.distanceTv.getText())) {
holder.distanceTv.setText(distanceString);
}
holder.distanceTv.setVisibility(View.VISIBLE);
} else {
holder.distanceTv.setVisibility(View.GONE);
holder.distanceTv.setText(null);
}
if (holder.distanceTv.getVisibility() == View.VISIBLE) {
if (dataProvider.getLocation() != null
&& dataProvider.getLastCompassInDegree() != null
&& dataProvider.getLocationDeclination() != null
&& distance != null
&& dataProvider.getLocation().getAccuracy() <= distance) {
holder.compassV.generateAndSetHeadingN(
dataProvider.getLocation(), dataProvider.getLastCompassInDegree(), dataProvider.getLocationDeclination());
} else {
holder.compassV.resetHeading();
}
holder.compassV.setVisibility(View.VISIBLE);
} else {
holder.compassV.resetHeading();
holder.compassV.setVisibility(View.GONE);
}
}
public static void updatePOIDistanceAndCompass(@Nullable View view, @NonNull POIManager poim, @NonNull POIDataProvider dataProvider) {
if (view == null) {
MTLog.d(LOG_TAG, "updatePOIDistanceAndCompass() > skip (no view)");
return;
}
CommonViewHolder holder = (CommonViewHolder) view.getTag();
updatePOIDistanceAndCompass(holder, poim, dataProvider);
}
private static void updatePOIDistanceAndCompass(@Nullable CommonViewHolder holder, @Nullable POIManager poim, @NonNull POIDataProvider dataProvider) {
//noinspection ConstantConditions // poi always non-null?
final POI poi = poim == null ? null : poim.poi;
if (poi == null || holder == null) {
MTLog.d(LOG_TAG, "updatePOIDistanceAndCompass() > skip (no poi or view holder)");
return;
}
holder.compassV.setLatLng(poi.getLat(), poi.getLng());
if (poim.getDistanceString() != null) {
if (!poim.getDistanceString().equals(holder.distanceTv.getText())) {
holder.distanceTv.setText(poim.getDistanceString());
}
holder.distanceTv.setVisibility(View.VISIBLE);
} else {
holder.distanceTv.setVisibility(View.GONE);
holder.distanceTv.setText(null);
}
if (holder.distanceTv.getVisibility() == View.VISIBLE) {
if (dataProvider.getLocation() != null
&& dataProvider.getLastCompassInDegree() != null
&& dataProvider.getLocationDeclination() != null
&& dataProvider.getLocation().getAccuracy() <= poim.getDistance()) {
holder.compassV.generateAndSetHeadingN(
dataProvider.getLocation(), dataProvider.getLastCompassInDegree(), dataProvider.getLocationDeclination());
} else {
holder.compassV.resetHeading();
}
holder.compassV.setVisibility(View.VISIBLE);
} else {
holder.compassV.resetHeading();
holder.compassV.setVisibility(View.GONE);
}
}
private static void updateCommonView(@NonNull CommonViewHolder holder, @NonNull POIManager poim, @NonNull POIDataProvider dataProvider) {
//noinspection ConstantConditions // poi always non-null?
if (poim.poi == null) {
MTLog.d(LOG_TAG, "updateCommonView() > SKIP (no poi)");
return;
}
final POI poi = poim.poi;
holder.uuid = poi.getUUID();
MTTransitions.setTransitionName(holder.view, "poi_" + poi.getUUID());
holder.nameTv.setText(poi.getLabel());
final DemoModeManager demoModeManager = dataProvider.providesDemoModeManager();
holder.nameTv.setSingleLine(true); // marquee forever
holder.nameTv.setSelected(!demoModeManager.getEnabled()); // marquee forever
updatePOIDistanceAndCompass(holder, poim, dataProvider);
if (dataProvider.isShowingFavorite() && dataProvider.isFavorite(poi.getUUID())) {
holder.favImg.setVisibility(View.VISIBLE);
} else {
holder.favImg.setVisibility(View.GONE);
}
int index;
if (dataProvider.isClosestPOI(poi.getUUID())) {
index = 0;
} else {
index = -1;
}
switch (index) {
case 0:
holder.nameTv.setTypeface(Typeface.DEFAULT_BOLD);
holder.distanceTv.setTypeface(Typeface.DEFAULT_BOLD);
break;
default:
holder.nameTv.setTypeface(Typeface.DEFAULT);
holder.distanceTv.setTypeface(Typeface.DEFAULT);
break;
}
}
private static class CommonViewHolder {
String uuid;
View view;
TextView nameTv;
TextView distanceTv;
ImageView favImg;
MTCompassView compassV;
CommonStatusViewHolder statusViewHolder;
ServiceUpdateViewHolder serviceUpdateViewHolder;
}
private static class TextMessageViewHolder extends CommonViewHolder {
}
private static class ModuleViewHolder extends CommonViewHolder {
ImageView moduleExtraTypeImg;
}
private static class RouteTripStopViewHolder extends CommonViewHolder {
TextView routeShortNameTv;
View routeFL;
View rtsExtraV;
MTJPathsView routeTypeImg;
TextView tripHeadingTv;
View tripHeadingBg;
}
private static class BasicPOIViewHolder extends CommonViewHolder {
}
private static class ServiceUpdateViewHolder {
ImageView warningImg;
}
private static class CommonStatusViewHolder {
View statusV;
}
private static class AvailabilityPercentStatusViewHolder extends CommonStatusViewHolder {
TextView textTv;
MTPieChartPercentView piePercentV;
}
private static class AppStatusViewHolder extends CommonStatusViewHolder {
TextView textTv;
}
private static class ScheduleStatusViewHolder extends CommonStatusViewHolder {
TextView dataNextLine1Tv;
TextView dataNextLine2Tv;
}
}
| |
package com.linkedin.databus2.client.util;
/*
*
* Copyright 2013 LinkedIn Corp. All rights reserved
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
import java.util.ArrayList;
import java.util.List;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.GnuParser;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import com.linkedin.databus.client.DatabusClientDSCUpdater;
import com.linkedin.databus.client.pub.DatabusClientGroupMember;
import com.linkedin.databus.client.pub.DatabusClientNode;
import com.linkedin.databus.core.Checkpoint;
import com.linkedin.databus.core.DbusClientMode;
import com.linkedin.databus.core.util.InvalidConfigException;
public class DbusClientClusterUtil {
/**
* @param args
* DbusClientClusterUtil -s <serverList> -n <namespace> -g <group> -d <dir> members
* leader
* keys
* readSCN <key>
* writeSCN <key> SCN [OFFSET]
* remove <key>
* readLastTS
* writeLastTS TIMESTAMP
*/
public static void main(String[] args) {
try
{
GnuParser cmdLineParser = new GnuParser();
Options options = new Options();
options.addOption("n",true,"Zookeeper namespace [/DatabusClient")
.addOption("g",true,"Groupname [default-group-name] ")
.addOption("d",true,"Shared directory name [shareddata] ")
.addOption("s",true,"Zookeeper server list [localhost:2181] ")
.addOption("h",false,"help");
CommandLine cmdLineArgs = cmdLineParser.parse(options, args,false);
if (cmdLineArgs.hasOption('h')) {
usage();
System.exit(0);
}
String namespace = cmdLineArgs.getOptionValue('n');
if (namespace==null || namespace.isEmpty())
{
namespace = "/DatabusClient";
}
String groupname = cmdLineArgs.getOptionValue('g');
if (groupname==null || groupname.isEmpty())
{
groupname = "default-group-name";
}
String sharedDir = cmdLineArgs.getOptionValue('d');
if (sharedDir==null || sharedDir.isEmpty())
{
sharedDir = "shareddata";
}
String serverList = cmdLineArgs.getOptionValue('s');
if (serverList==null || serverList.isEmpty())
{
serverList = "localhost:2181";
}
String[] fns = cmdLineArgs.getArgs();
if (fns.length < 1)
{
usage();
System.exit(1);
}
String function = fns[0];
String arg1 = (fns.length > 1) ? fns[1] : null;
String arg2 = (fns.length > 2) ? fns[2] : null;
try
{
String memberName = "cmd-line-tool";
DatabusClientNode clusterNode = new DatabusClientNode (new DatabusClientNode.StaticConfig(true,serverList,
2000,
5000,
namespace,
groupname,
memberName,false,sharedDir));
DatabusClientGroupMember member = clusterNode.getMember(namespace,groupname,memberName);
if (member == null || !member.joinWithoutLeadershipDuties())
{
System.err.println("Initialization failed for: " + member);
System.exit(1);
}
if (function.equals("members")) {
List<String> mlist = member.getMembers();
for (String m: mlist) {
System.out.println(m);
}
} else if (function.equals("leader")) {
String leader = member.getLeader();
System.out.println(leader);
} else if (function.equals("keys")) {
List<String> keyList = member.getSharedKeys();
if (keyList != null) {
for (String m: keyList) {
System.out.println(m);
}
}
} else if (function.equals("readSCN")) {
List<String> keyList ;
if (arg1 == null)
{
keyList = member.getSharedKeys();
} else {
keyList = new ArrayList<String>();
keyList.add(arg1);
}
if (keyList != null)
{
for (String k: keyList)
{
if (!k.equals(DatabusClientDSCUpdater.DSCKEY))
{
Checkpoint cp = (Checkpoint) member.readSharedData(k);
if (cp != null)
{
System.out.println(k + " " + cp.getWindowScn() + " "+cp.getWindowOffset());
}
else
{
System.err.println(k + " null null");
}
}
}
}
} else if (function.equals("writeSCN")) {
if (arg1 != null && arg2 != null)
{
Checkpoint cp = new Checkpoint();
cp.setConsumptionMode(DbusClientMode.ONLINE_CONSUMPTION);
cp.setWindowScn(Long.parseLong(arg2));
if (fns.length > 3)
{
cp.setWindowOffset(Integer.parseInt(fns[3]));
} else {
cp.setWindowOffset(-1);
}
if (member.writeSharedData(arg1, cp))
{
System.out.println(arg1 + " " + cp.getWindowScn() + " " + cp.getWindowOffset());
}
else
{
System.err.println("Write failed! " + member + " couldn't write key=" + arg1);
System.exit(1);
}
}
else
{
usage();
System.exit(1);
}
} else if (function.equals("readLastTs")) {
Long timeInMs = (Long) member.readSharedData(DatabusClientDSCUpdater.DSCKEY);
if (timeInMs != null)
{
System.out.println(DatabusClientDSCUpdater.DSCKEY + " " + timeInMs.longValue());
}
else
{
System.err.println(DatabusClientDSCUpdater.DSCKEY + " null");
}
} else if (function.equals("writeLastTs")) {
if (arg1 != null)
{
Long ts = Long.parseLong(arg1);
if (member.writeSharedData(DatabusClientDSCUpdater.DSCKEY, ts))
{
System.out.println(DatabusClientDSCUpdater.DSCKEY + " " + ts);
}
else
{
System.err.println("Write failed! " + member + " couldn't write key=" + DatabusClientDSCUpdater.DSCKEY);
System.exit(1);
}
}
else
{
usage();
System.exit(1);
}
} else if (function.equals("remove")) {
if (!member.removeSharedData(arg1))
{
System.err.println("Remove failed! " + arg1);
System.exit(1);
}
} else if (function.equals("create")) {
if (!member.createPaths())
{
System.err.println("Create path failed!" );
System.exit(1);
}
} else {
usage();
System.exit(1);
}
}
catch (InvalidConfigException e) {
e.printStackTrace();
usage();
System.exit(1);
}
}
catch (ParseException e)
{
usage();
System.exit(1);
}
}
public static void usage()
{
System.err.println (" [ -n <namespace>] [-g <groupname>] [-d <shareddir>] [-s <cluster-server-list>] FUNCTION-NAME");
System.err.println(" FUNCTION-NAME one of: ");
System.err.println(" members : lists members belonging to the group <groupname> ");
System.err.println(" leader: lists the leader of the group <groupname> ");
System.err.println(" keys: lists the keys written to shared directory <shareddir> ");
System.err.println(" readSCN [<key>] : reads the SCN written to shared directory <shareddir> ");
System.err.println(" writeSCN <key> <SCN> [offset] : writes the SCN written to shared directory <shareddir> ");
System.err.println(" remove <[key]> : removes key ; all keys if none specified in <shareddir>");
System.err.println(" readLastTs : reads the last TS");
System.err.println(" create : create the path if none exists ");
System.err.println(" writeLastTs <TIMESTAMP> : writes latest TS");
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.jmeter.threads;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.ListIterator;
import java.util.Map;
import java.util.Set;
import org.apache.jmeter.assertions.Assertion;
import org.apache.jmeter.config.ConfigTestElement;
import org.apache.jmeter.control.Controller;
import org.apache.jmeter.control.TransactionController;
import org.apache.jmeter.control.TransactionSampler;
import org.apache.jmeter.engine.event.LoopIterationListener;
import org.apache.jmeter.engine.util.ConfigMergabilityIndicator;
import org.apache.jmeter.engine.util.NoConfigMerge;
import org.apache.jmeter.processor.PostProcessor;
import org.apache.jmeter.processor.PreProcessor;
import org.apache.jmeter.samplers.SampleListener;
import org.apache.jmeter.samplers.Sampler;
import org.apache.jmeter.testbeans.TestBeanHelper;
import org.apache.jmeter.testelement.TestElement;
import org.apache.jmeter.timers.Timer;
import org.apache.jmeter.util.JMeterUtils;
import org.apache.jorphan.collections.HashTree;
import org.apache.jorphan.collections.HashTreeTraverser;
import org.apache.jorphan.logging.LoggingManager;
import org.apache.log.Logger;
/**
* HashTreeTraverser implementation that traverses the Test Tree to build:
* <ul>
* <li>A map with key Sampler and as value the associated SamplePackage</li>
* <li>A map with key TransactionController and as value the associated SamplePackage</li>
* </ul>
*/
public class TestCompiler implements HashTreeTraverser {
private static final Logger LOG = LoggingManager.getLoggerForClass();
/**
* Set this property {@value} to true to revert to using a shared static set.
*/
private static final String USE_STATIC_SET = "TestCompiler.useStaticSet";
/**
* The default value - {@value} - assumed for {@link #USE_STATIC_SET}.
*/
private static final boolean USE_STATIC_SET_DEFAULT = false;
public static final boolean IS_USE_STATIC_SET = JMeterUtils.getPropDefault(USE_STATIC_SET, USE_STATIC_SET_DEFAULT);
/**
* This set keeps track of which ObjectPairs have been seen.
* It seems to be used to prevent adding a child to a parent if the child has already been added.
* If the ObjectPair (child, parent) is present, then the child has been added.
* Otherwise, the child is added to the parent and the pair is added to the Set.
*/
private static final Set<ObjectPair> PAIRING = new HashSet<ObjectPair>();
private final LinkedList<TestElement> stack = new LinkedList<TestElement>();
private final Map<Sampler, SamplePackage> samplerConfigMap = new HashMap<Sampler, SamplePackage>();
private final Map<TransactionController, SamplePackage> transactionControllerConfigMap =
new HashMap<TransactionController, SamplePackage>();
private final HashTree testTree;
public TestCompiler(HashTree testTree) {
this.testTree = testTree;
}
/**
* Clears the pairing Set Called by StandardJmeterEngine at the start of a
* test run.
*/
public static void initialize() {
// synch is probably not needed as only called before run starts
synchronized (PAIRING) {
PAIRING.clear();
}
}
/**
* Configures sampler from SamplePackage extracted from Test plan and returns it
* @param sampler {@link Sampler}
* @return {@link SamplePackage}
*/
public SamplePackage configureSampler(Sampler sampler) {
SamplePackage pack = samplerConfigMap.get(sampler);
pack.setSampler(sampler);
configureWithConfigElements(sampler, pack.getConfigs());
return pack;
}
/**
* Configures Transaction Sampler from SamplePackage extracted from Test plan and returns it
* @param transactionSampler {@link TransactionSampler}
* @return {@link SamplePackage}
*/
public SamplePackage configureTransactionSampler(TransactionSampler transactionSampler) {
TransactionController controller = transactionSampler.getTransactionController();
SamplePackage pack = transactionControllerConfigMap.get(controller);
pack.setSampler(transactionSampler);
return pack;
}
/**
* Reset pack to its initial state
* @param pack
*/
public void done(SamplePackage pack) {
pack.recoverRunningVersion();
}
/** {@inheritDoc} */
@Override
public void addNode(Object node, HashTree subTree) {
stack.addLast((TestElement) node);
}
/** {@inheritDoc} */
@Override
public void subtractNode() {
LOG.debug("Subtracting node, stack size = " + stack.size());
TestElement child = stack.getLast();
trackIterationListeners(stack);
if (child instanceof Sampler) {
saveSamplerConfigs((Sampler) child);
}
else if(child instanceof TransactionController) {
saveTransactionControllerConfigs((TransactionController) child);
}
stack.removeLast();
if (stack.size() > 0) {
TestElement parent = stack.getLast();
boolean duplicate = false;
// Bug 53750: this condition used to be in ObjectPair#addTestElements()
if (parent instanceof Controller && (child instanceof Sampler || child instanceof Controller)) {
if (!IS_USE_STATIC_SET && parent instanceof TestCompilerHelper) {
TestCompilerHelper te = (TestCompilerHelper) parent;
duplicate = !te.addTestElementOnce(child);
} else { // this is only possible for 3rd party controllers by default
ObjectPair pair = new ObjectPair(child, parent);
synchronized (PAIRING) {// Called from multiple threads
if (!PAIRING.contains(pair)) {
parent.addTestElement(child);
PAIRING.add(pair);
} else {
duplicate = true;
}
}
}
}
if (duplicate) {
LOG.warn("Unexpected duplicate for " + parent.getClass().getName() + " and " + child.getClass().getName());
}
}
}
@SuppressWarnings("deprecation") // TestBeanHelper.prepare() is OK
private void trackIterationListeners(LinkedList<TestElement> p_stack) {
TestElement child = p_stack.getLast();
if (child instanceof LoopIterationListener) {
ListIterator<TestElement> iter = p_stack.listIterator(p_stack.size());
while (iter.hasPrevious()) {
TestElement item = iter.previous();
if (item == child) {
continue;
}
if (item instanceof Controller) {
TestBeanHelper.prepare(child);
((Controller) item).addIterationListener((LoopIterationListener) child);
break;
}
}
}
}
/** {@inheritDoc} */
@Override
public void processPath() {
}
private void saveSamplerConfigs(Sampler sam) {
List<ConfigTestElement> configs = new LinkedList<ConfigTestElement>();
List<Controller> controllers = new LinkedList<Controller>();
List<SampleListener> listeners = new LinkedList<SampleListener>();
List<Timer> timers = new LinkedList<Timer>();
List<Assertion> assertions = new LinkedList<Assertion>();
LinkedList<PostProcessor> posts = new LinkedList<PostProcessor>();
LinkedList<PreProcessor> pres = new LinkedList<PreProcessor>();
for (int i = stack.size(); i > 0; i--) {
addDirectParentControllers(controllers, stack.get(i - 1));
List<PreProcessor> tempPre = new LinkedList<PreProcessor> ();
List<PostProcessor> tempPost = new LinkedList<PostProcessor>();
for (Object item : testTree.list(stack.subList(0, i))) {
if ((item instanceof ConfigTestElement)) {
configs.add((ConfigTestElement) item);
}
if (item instanceof SampleListener) {
listeners.add((SampleListener) item);
}
if (item instanceof Timer) {
timers.add((Timer) item);
}
if (item instanceof Assertion) {
assertions.add((Assertion) item);
}
if (item instanceof PostProcessor) {
tempPost.add((PostProcessor) item);
}
if (item instanceof PreProcessor) {
tempPre.add((PreProcessor) item);
}
}
pres.addAll(0, tempPre);
posts.addAll(0, tempPost);
}
SamplePackage pack = new SamplePackage(configs, listeners, timers, assertions,
posts, pres, controllers);
pack.setSampler(sam);
pack.setRunningVersion(true);
samplerConfigMap.put(sam, pack);
}
private void saveTransactionControllerConfigs(TransactionController tc) {
List<ConfigTestElement> configs = new LinkedList<ConfigTestElement>();
List<Controller> controllers = new LinkedList<Controller>();
List<SampleListener> listeners = new LinkedList<SampleListener>();
List<Timer> timers = new LinkedList<Timer>();
List<Assertion> assertions = new LinkedList<Assertion>();
LinkedList<PostProcessor> posts = new LinkedList<PostProcessor>();
LinkedList<PreProcessor> pres = new LinkedList<PreProcessor>();
for (int i = stack.size(); i > 0; i--) {
addDirectParentControllers(controllers, stack.get(i - 1));
for (Object item : testTree.list(stack.subList(0, i))) {
if (item instanceof SampleListener) {
listeners.add((SampleListener) item);
}
if (item instanceof Assertion) {
assertions.add((Assertion) item);
}
}
}
SamplePackage pack = new SamplePackage(configs, listeners, timers, assertions,
posts, pres, controllers);
pack.setSampler(new TransactionSampler(tc, tc.getName()));
pack.setRunningVersion(true);
transactionControllerConfigMap.put(tc, pack);
}
/**
* @param controllers
* @param i
*/
private void addDirectParentControllers(List<Controller> controllers, TestElement maybeController) {
if (maybeController instanceof Controller) {
LOG.debug("adding controller: " + maybeController + " to sampler config");
controllers.add((Controller) maybeController);
}
}
private static class ObjectPair
{
private final TestElement child;
private final TestElement parent;
public ObjectPair(TestElement child, TestElement parent) {
this.child = child;
this.parent = parent;
}
/** {@inheritDoc} */
@Override
public int hashCode() {
return child.hashCode() + parent.hashCode();
}
/** {@inheritDoc} */
@Override
public boolean equals(Object o) {
if (o instanceof ObjectPair) {
return child == ((ObjectPair) o).child && parent == ((ObjectPair) o).parent;
}
return false;
}
}
private void configureWithConfigElements(Sampler sam, List<ConfigTestElement> configs) {
sam.clearTestElementChildren();
for (ConfigTestElement config : configs) {
if (!(config instanceof NoConfigMerge))
{
if(sam instanceof ConfigMergabilityIndicator) {
if(((ConfigMergabilityIndicator)sam).applies(config)) {
sam.addTestElement(config);
}
} else {
// Backward compatibility
sam.addTestElement(config);
}
}
}
}
}
| |
/*
* Kubernetes
* No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)
*
* The version of the OpenAPI document: v1.18.2
*
*
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
* https://openapi-generator.tech
* Do not edit the class manually.
*/
package it.reply.orchestrator.dto.kubernetes.fluxcd;
import java.util.Objects;
import com.google.gson.annotations.SerializedName;
import io.swagger.annotations.ApiModelProperty;
/**
* V1HelmReleaseSpecChart
*/
@javax.annotation.Generated(value = "org.openapitools.codegen.languages.JavaClientCodegen", date = "2020-10-16T10:28:05.581Z[Etc/UTC]")
public class V1HelmReleaseSpecChart {
public static final String SERIALIZED_NAME_CHART_PULL_SECRET = "chartPullSecret";
@SerializedName(SERIALIZED_NAME_CHART_PULL_SECRET)
private V1HelmReleaseSpecChartChartPullSecret chartPullSecret;
public static final String SERIALIZED_NAME_GIT = "git";
@SerializedName(SERIALIZED_NAME_GIT)
private String git;
public static final String SERIALIZED_NAME_NAME = "name";
@SerializedName(SERIALIZED_NAME_NAME)
private String name;
public static final String SERIALIZED_NAME_PATH = "path";
@SerializedName(SERIALIZED_NAME_PATH)
private String path;
public static final String SERIALIZED_NAME_REF = "ref";
@SerializedName(SERIALIZED_NAME_REF)
private String ref;
public static final String SERIALIZED_NAME_REPOSITORY = "repository";
@SerializedName(SERIALIZED_NAME_REPOSITORY)
private String repository;
public static final String SERIALIZED_NAME_SECRET_REF = "secretRef";
@SerializedName(SERIALIZED_NAME_SECRET_REF)
private V1HelmReleaseSpecChartSecretRef secretRef;
public static final String SERIALIZED_NAME_SKIP_DEP_UPDATE = "skipDepUpdate";
@SerializedName(SERIALIZED_NAME_SKIP_DEP_UPDATE)
private Boolean skipDepUpdate;
public static final String SERIALIZED_NAME_VERSION = "version";
@SerializedName(SERIALIZED_NAME_VERSION)
private String version;
public V1HelmReleaseSpecChart chartPullSecret(V1HelmReleaseSpecChartChartPullSecret chartPullSecret) {
this.chartPullSecret = chartPullSecret;
return this;
}
/**
* Get chartPullSecret
* @return chartPullSecret
**/
@javax.annotation.Nullable
@ApiModelProperty(value = "")
public V1HelmReleaseSpecChartChartPullSecret getChartPullSecret() {
return chartPullSecret;
}
public void setChartPullSecret(V1HelmReleaseSpecChartChartPullSecret chartPullSecret) {
this.chartPullSecret = chartPullSecret;
}
public V1HelmReleaseSpecChart git(String git) {
this.git = git;
return this;
}
/**
* Git URL is the URL of the Git repository, e.g. `git@github.com:org/repo`, `http://github.com/org/repo`, or `ssh://git@example.com:2222/org/repo.git`.
* @return git
**/
@javax.annotation.Nullable
@ApiModelProperty(value = "Git URL is the URL of the Git repository, e.g. `git@github.com:org/repo`, `http://github.com/org/repo`, or `ssh://git@example.com:2222/org/repo.git`.")
public String getGit() {
return git;
}
public void setGit(String git) {
this.git = git;
}
public V1HelmReleaseSpecChart name(String name) {
this.name = name;
return this;
}
/**
* Name is the name of the Helm chart _without_ an alias, e.g. redis (for `helm upgrade [flags] stable/redis`).
* @return name
**/
@javax.annotation.Nullable
@ApiModelProperty(value = "Name is the name of the Helm chart _without_ an alias, e.g. redis (for `helm upgrade [flags] stable/redis`).")
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public V1HelmReleaseSpecChart path(String path) {
this.path = path;
return this;
}
/**
* Path is the path to the chart relative to the repository root.
* @return path
**/
@javax.annotation.Nullable
@ApiModelProperty(value = "Path is the path to the chart relative to the repository root.")
public String getPath() {
return path;
}
public void setPath(String path) {
this.path = path;
}
public V1HelmReleaseSpecChart ref(String ref) {
this.ref = ref;
return this;
}
/**
* Ref is the Git branch (or other reference) to use. Defaults to 'master', or the configured default Git ref.
* @return ref
**/
@javax.annotation.Nullable
@ApiModelProperty(value = "Ref is the Git branch (or other reference) to use. Defaults to 'master', or the configured default Git ref.")
public String getRef() {
return ref;
}
public void setRef(String ref) {
this.ref = ref;
}
public V1HelmReleaseSpecChart repository(String repository) {
this.repository = repository;
return this;
}
/**
* RepoURL is the URL of the Helm repository, e.g. `https://kubernetes-charts.storage.googleapis.com` or `https://charts.example.com`.
* @return repository
**/
@javax.annotation.Nullable
@ApiModelProperty(value = "RepoURL is the URL of the Helm repository, e.g. `https://kubernetes-charts.storage.googleapis.com` or `https://charts.example.com`.")
public String getRepository() {
return repository;
}
public void setRepository(String repository) {
this.repository = repository;
}
public V1HelmReleaseSpecChart secretRef(V1HelmReleaseSpecChartSecretRef secretRef) {
this.secretRef = secretRef;
return this;
}
/**
* Get secretRef
* @return secretRef
**/
@javax.annotation.Nullable
@ApiModelProperty(value = "")
public V1HelmReleaseSpecChartSecretRef getSecretRef() {
return secretRef;
}
public void setSecretRef(V1HelmReleaseSpecChartSecretRef secretRef) {
this.secretRef = secretRef;
}
public V1HelmReleaseSpecChart skipDepUpdate(Boolean skipDepUpdate) {
this.skipDepUpdate = skipDepUpdate;
return this;
}
/**
* SkipDepUpdate will tell the operator to skip running 'helm dep update' before installing or upgrading the chart, the chart dependencies _must_ be present for this to succeed.
* @return skipDepUpdate
**/
@javax.annotation.Nullable
@ApiModelProperty(value = "SkipDepUpdate will tell the operator to skip running 'helm dep update' before installing or upgrading the chart, the chart dependencies _must_ be present for this to succeed.")
public Boolean getSkipDepUpdate() {
return skipDepUpdate;
}
public void setSkipDepUpdate(Boolean skipDepUpdate) {
this.skipDepUpdate = skipDepUpdate;
}
public V1HelmReleaseSpecChart version(String version) {
this.version = version;
return this;
}
/**
* Version is the targeted Helm chart version, e.g. 7.0.1.
* @return version
**/
@javax.annotation.Nullable
@ApiModelProperty(value = "Version is the targeted Helm chart version, e.g. 7.0.1.")
public String getVersion() {
return version;
}
public void setVersion(String version) {
this.version = version;
}
@Override
public boolean equals(java.lang.Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
V1HelmReleaseSpecChart v1HelmReleaseSpecChart = (V1HelmReleaseSpecChart) o;
return Objects.equals(this.chartPullSecret, v1HelmReleaseSpecChart.chartPullSecret) &&
Objects.equals(this.git, v1HelmReleaseSpecChart.git) &&
Objects.equals(this.name, v1HelmReleaseSpecChart.name) &&
Objects.equals(this.path, v1HelmReleaseSpecChart.path) &&
Objects.equals(this.ref, v1HelmReleaseSpecChart.ref) &&
Objects.equals(this.repository, v1HelmReleaseSpecChart.repository) &&
Objects.equals(this.secretRef, v1HelmReleaseSpecChart.secretRef) &&
Objects.equals(this.skipDepUpdate, v1HelmReleaseSpecChart.skipDepUpdate) &&
Objects.equals(this.version, v1HelmReleaseSpecChart.version);
}
@Override
public int hashCode() {
return Objects.hash(chartPullSecret, git, name, path, ref, repository, secretRef, skipDepUpdate, version);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("class V1HelmReleaseSpecChart {\n");
sb.append(" chartPullSecret: ").append(toIndentedString(chartPullSecret)).append("\n");
sb.append(" git: ").append(toIndentedString(git)).append("\n");
sb.append(" name: ").append(toIndentedString(name)).append("\n");
sb.append(" path: ").append(toIndentedString(path)).append("\n");
sb.append(" ref: ").append(toIndentedString(ref)).append("\n");
sb.append(" repository: ").append(toIndentedString(repository)).append("\n");
sb.append(" secretRef: ").append(toIndentedString(secretRef)).append("\n");
sb.append(" skipDepUpdate: ").append(toIndentedString(skipDepUpdate)).append("\n");
sb.append(" version: ").append(toIndentedString(version)).append("\n");
sb.append("}");
return sb.toString();
}
/**
* Convert the given object to string with each line indented by 4 spaces
* (except the first line).
*/
private String toIndentedString(java.lang.Object o) {
if (o == null) {
return "null";
}
return o.toString().replace("\n", "\n ");
}
}
| |
/*
* Copyright (C) 2010 JFrog Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jfrog.hudson.ivy;
import com.google.common.collect.Lists;
import hudson.Extension;
import hudson.FilePath;
import hudson.Launcher;
import hudson.ivy.AntIvyBuildWrapper;
import hudson.model.*;
import hudson.remoting.Which;
import hudson.tasks.BuildWrapperDescriptor;
import hudson.util.FormValidation;
import hudson.util.XStream2;
import jenkins.model.Jenkins;
import net.sf.json.JSONObject;
import org.apache.commons.lang.StringUtils;
import org.jfrog.build.extractor.listener.ArtifactoryBuildListener;
import org.jfrog.hudson.*;
import org.jfrog.hudson.action.ActionableHelper;
import org.jfrog.hudson.util.*;
import org.jfrog.hudson.util.publisher.PublisherContext;
import org.kohsuke.stapler.DataBoundConstructor;
import org.kohsuke.stapler.QueryParameter;
import org.kohsuke.stapler.StaplerRequest;
import org.kohsuke.stapler.bind.JavaScriptMethod;
import java.io.File;
import java.io.IOException;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Map;
/**
* @author Tomer Cohen
*/
public class ArtifactoryIvyConfigurator extends AntIvyBuildWrapper implements DeployerOverrider,
BuildInfoAwareConfigurator {
private final Credentials overridingDeployerCredentials;
private final IncludesExcludes artifactDeploymentPatterns;
private final boolean discardBuildArtifacts;
private final String matrixParams;
private final boolean filterExcludedArtifactsFromBuild;
private ServerDetails details;
private boolean deployArtifacts;
private boolean deployBuildInfo;
private boolean includeEnvVars;
private IncludesExcludes envVarsPatterns;
private boolean runChecks;
private String violationRecipients;
private boolean includePublishArtifacts;
private String scopes;
private boolean licenseAutoDiscovery;
private boolean disableLicenseAutoDiscovery;
private boolean discardOldBuilds;
private boolean notM2Compatible;
private String ivyPattern;
private String aggregationBuildStatus;
private String artifactPattern;
private boolean enableIssueTrackerIntegration;
private boolean aggregateBuildIssues;
private boolean blackDuckRunChecks;
private String blackDuckAppName;
private String blackDuckAppVersion;
private String blackDuckReportRecipients; //csv
private String blackDuckScopes; //csv
private boolean blackDuckIncludePublishedArtifacts;
private boolean autoCreateMissingComponentRequests;
private boolean autoDiscardStaleComponentRequests;
/**
* @deprecated: Use org.jfrog.hudson.DeployerOverrider#getOverridingDeployerCredentials()
*/
@Deprecated
private transient String username;
/**
* @deprecated: Use org.jfrog.hudson.DeployerOverrider#getOverridingDeployerCredentials()
*/
@Deprecated
private transient String password;
@DataBoundConstructor
public ArtifactoryIvyConfigurator(ServerDetails details, Credentials overridingDeployerCredentials,
boolean deployArtifacts, IncludesExcludes artifactDeploymentPatterns, boolean deployBuildInfo,
boolean includeEnvVars, IncludesExcludes envVarsPatterns,
boolean runChecks, String violationRecipients, boolean includePublishArtifacts,
String scopes, boolean disableLicenseAutoDiscovery, boolean notM2Compatible, String ivyPattern,
String artifactPattern, boolean discardOldBuilds, boolean discardBuildArtifacts, String matrixParams,
boolean enableIssueTrackerIntegration, boolean aggregateBuildIssues, String aggregationBuildStatus,
boolean blackDuckRunChecks, String blackDuckAppName, String blackDuckAppVersion,
String blackDuckReportRecipients, String blackDuckScopes, boolean blackDuckIncludePublishedArtifacts,
boolean autoCreateMissingComponentRequests, boolean autoDiscardStaleComponentRequests,
boolean filterExcludedArtifactsFromBuild) {
this.details = details;
this.overridingDeployerCredentials = overridingDeployerCredentials;
this.deployArtifacts = deployArtifacts;
this.artifactDeploymentPatterns = artifactDeploymentPatterns;
this.deployBuildInfo = deployBuildInfo;
this.includeEnvVars = includeEnvVars;
this.envVarsPatterns = envVarsPatterns;
this.runChecks = runChecks;
this.violationRecipients = violationRecipients;
this.includePublishArtifacts = includePublishArtifacts;
this.scopes = scopes;
this.disableLicenseAutoDiscovery = disableLicenseAutoDiscovery;
this.notM2Compatible = notM2Compatible;
this.ivyPattern = ivyPattern;
this.aggregationBuildStatus = aggregationBuildStatus;
this.filterExcludedArtifactsFromBuild = filterExcludedArtifactsFromBuild;
this.artifactPattern = clearApostrophes(artifactPattern);
this.discardOldBuilds = discardOldBuilds;
this.discardBuildArtifacts = discardBuildArtifacts;
this.matrixParams = matrixParams;
this.licenseAutoDiscovery = !disableLicenseAutoDiscovery;
this.enableIssueTrackerIntegration = enableIssueTrackerIntegration;
this.aggregateBuildIssues = aggregateBuildIssues;
this.blackDuckRunChecks = blackDuckRunChecks;
this.blackDuckAppName = blackDuckAppName;
this.blackDuckAppVersion = blackDuckAppVersion;
this.blackDuckReportRecipients = blackDuckReportRecipients;
this.blackDuckScopes = blackDuckScopes;
this.blackDuckIncludePublishedArtifacts = blackDuckIncludePublishedArtifacts;
this.autoCreateMissingComponentRequests = autoCreateMissingComponentRequests;
this.autoDiscardStaleComponentRequests = autoDiscardStaleComponentRequests;
}
/**
* Clears the extra apostrophes from the start and the end of the string
*/
private String clearApostrophes(String artifactPattern) {
return StringUtils.removeEnd(StringUtils.removeStart(artifactPattern, "\""), "\"");
}
public ServerDetails getDetails() {
return details;
}
public String getMatrixParams() {
return matrixParams;
}
public boolean isOverridingDefaultDeployer() {
return (getOverridingDeployerCredentials() != null);
}
public Credentials getOverridingDeployerCredentials() {
return overridingDeployerCredentials;
}
public boolean isNotM2Compatible() {
return notM2Compatible;
}
public void setNotM2Compatible(boolean notM2Compatible) {
this.notM2Compatible = notM2Compatible;
}
public boolean isDiscardBuildArtifacts() {
return discardBuildArtifacts;
}
public boolean isDiscardOldBuilds() {
return discardOldBuilds;
}
public void setDiscardOldBuilds(boolean discardOldBuilds) {
this.discardOldBuilds = discardOldBuilds;
}
public String getArtifactPattern() {
return clearApostrophes(artifactPattern);
}
public void setArtifactPattern(String artifactPattern) {
this.artifactPattern = clearApostrophes(artifactPattern);
}
public String getIvyPattern() {
return ivyPattern;
}
public void setIvyPattern(String ivyPattern) {
this.ivyPattern = ivyPattern;
}
public boolean isM2Compatible() {
return !notM2Compatible;
}
public boolean isIncludePublishArtifacts() {
return includePublishArtifacts;
}
public void setIncludePublishArtifacts(boolean includePublishArtifacts) {
this.includePublishArtifacts = includePublishArtifacts;
}
public boolean isRunChecks() {
return runChecks;
}
public void setRunChecks(boolean runChecks) {
this.runChecks = runChecks;
}
public boolean isDisableLicenseAutoDiscovery() {
return disableLicenseAutoDiscovery;
}
public String getScopes() {
return scopes;
}
public boolean isLicenseAutoDiscovery() {
return licenseAutoDiscovery;
}
public void setLicenseAutoDiscovery(boolean licenseAutoDiscovery) {
this.licenseAutoDiscovery = licenseAutoDiscovery;
}
public boolean isDeployArtifacts() {
return deployArtifacts;
}
public IncludesExcludes getArtifactDeploymentPatterns() {
return artifactDeploymentPatterns;
}
public boolean isDeployBuildInfo() {
return deployBuildInfo;
}
public boolean isIncludeEnvVars() {
return includeEnvVars;
}
public IncludesExcludes getEnvVarsPatterns() {
return envVarsPatterns;
}
public String getArtifactoryName() {
return details != null ? details.artifactoryName : null;
}
public String getRepositoryKey() {
return details != null ? details.getDeployReleaseRepository().getRepoKey() : null;
}
public String getArtifactoryUrl() {
return details != null ? details.getArtifactoryUrl() : null;
}
public String getViolationRecipients() {
return violationRecipients;
}
public void setViolationRecipients(String violationRecipients) {
this.violationRecipients = violationRecipients;
}
public boolean isEnableIssueTrackerIntegration() {
return enableIssueTrackerIntegration;
}
public void setEnableIssueTrackerIntegration(boolean enableIssueTrackerIntegration) {
this.enableIssueTrackerIntegration = enableIssueTrackerIntegration;
}
public boolean isAggregateBuildIssues() {
return aggregateBuildIssues;
}
public void setAggregateBuildIssues(boolean aggregateBuildIssues) {
this.aggregateBuildIssues = aggregateBuildIssues;
}
public String getAggregationBuildStatus() {
return aggregationBuildStatus;
}
public void setAggregationBuildStatus(String aggregationBuildStatus) {
this.aggregationBuildStatus = aggregationBuildStatus;
}
public boolean isBlackDuckRunChecks() {
return blackDuckRunChecks;
}
public void setBlackDuckRunChecks(boolean blackDuckRunChecks) {
this.blackDuckRunChecks = blackDuckRunChecks;
}
public String getBlackDuckAppName() {
return blackDuckAppName;
}
public void setBlackDuckAppName(String blackDuckAppName) {
this.blackDuckAppName = blackDuckAppName;
}
public String getBlackDuckAppVersion() {
return blackDuckAppVersion;
}
public void setBlackDuckAppVersion(String blackDuckAppVersion) {
this.blackDuckAppVersion = blackDuckAppVersion;
}
public String getBlackDuckReportRecipients() {
return blackDuckReportRecipients;
}
public String getBlackDuckScopes() {
return blackDuckScopes;
}
public boolean isBlackDuckIncludePublishedArtifacts() {
return blackDuckIncludePublishedArtifacts;
}
public boolean isAutoCreateMissingComponentRequests() {
return autoCreateMissingComponentRequests;
}
public boolean isAutoDiscardStaleComponentRequests() {
return autoDiscardStaleComponentRequests;
}
public boolean isFilterExcludedArtifactsFromBuild() {
return filterExcludedArtifactsFromBuild;
}
@Override
public Collection<? extends Action> getProjectActions(AbstractProject project) {
return ActionableHelper.getArtifactoryProjectAction(details.getArtifactoryUrl(), project);
}
@Override
public Environment setUp(final AbstractBuild build, Launcher launcher, final BuildListener listener)
throws IOException, InterruptedException {
File localDependencyFile = Which.jarFile(ArtifactoryBuildListener.class);
final FilePath actualDependencyDir =
PluginDependencyHelper.getActualDependencyDirectory(build, localDependencyFile);
final PublisherContext context = new PublisherContext.Builder().artifactoryServer(getArtifactoryServer())
.serverDetails(getDetails()).deployerOverrider(ArtifactoryIvyConfigurator.this).runChecks(isRunChecks())
.includePublishArtifacts(isIncludePublishArtifacts()).violationRecipients(getViolationRecipients())
.scopes(getScopes()).licenseAutoDiscovery(licenseAutoDiscovery).discardOldBuilds(isDiscardOldBuilds())
.deployArtifacts(isDeployArtifacts()).includesExcludes(getArtifactDeploymentPatterns())
.skipBuildInfoDeploy(!isDeployBuildInfo())
.includeEnvVars(isIncludeEnvVars()).envVarsPatterns(getEnvVarsPatterns())
.discardBuildArtifacts(isDiscardBuildArtifacts()).matrixParams(getMatrixParams())
.artifactsPattern(getArtifactPattern()).ivyPattern(getIvyPattern()).maven2Compatible(isM2Compatible())
.enableIssueTrackerIntegration(isEnableIssueTrackerIntegration())
.aggregateBuildIssues(isAggregateBuildIssues()).aggregationBuildStatus(getAggregationBuildStatus())
.integrateBlackDuck(isBlackDuckRunChecks(), getBlackDuckAppName(), getBlackDuckAppVersion(),
getBlackDuckReportRecipients(), getBlackDuckScopes(), isBlackDuckIncludePublishedArtifacts(),
isAutoCreateMissingComponentRequests(), isAutoDiscardStaleComponentRequests())
.filterExcludedArtifactsFromBuild(isFilterExcludedArtifactsFromBuild())
.build();
build.setResult(Result.SUCCESS);
return new AntIvyBuilderEnvironment() {
@Override
public void buildEnvVars(Map<String, String> env) {
try {
ExtractorUtils.addBuilderInfoArguments(env, build, listener, context, null);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
@Override
public String getAdditionalArgs() {
StringBuilder targets = new StringBuilder();
String actualDependencyDirPath = actualDependencyDir.getRemote();
actualDependencyDirPath = actualDependencyDirPath.replace('\\', '/');
actualDependencyDirPath = "\"" + actualDependencyDirPath + "\"";
targets.append("-lib ").append(actualDependencyDirPath).append(" ");
targets.append("-listener ").append("org.jfrog.build.extractor.listener.ArtifactoryBuildListener")
.append(" ");
return targets.toString();
}
};
}
public ArtifactoryServer getArtifactoryServer() {
return RepositoriesUtils.getArtifactoryServer(getArtifactoryName(), getDescriptor().getArtifactoryServers());
}
public List<Repository> getReleaseRepositoryList() {
return RepositoriesUtils.collectRepositories(getDescriptor().releaseRepositoryList, details.getDeployReleaseRepository().getKeyFromSelect());
}
@Override
public DescriptorImpl getDescriptor() {
return (DescriptorImpl) super.getDescriptor();
}
@Extension(optional = true)
public static class DescriptorImpl extends BuildWrapperDescriptor {
private List<Repository> releaseRepositoryList;
public DescriptorImpl() {
super(ArtifactoryIvyConfigurator.class);
load();
}
@Override
public boolean isApplicable(AbstractProject<?, ?> item) {
return "hudson.ivy.IvyModuleSet".equals(item.getClass().getName());
}
/**
* This method triggered from the client side by Ajax call.
* The Element that trig is the "Refresh Repositories" button.
*
* @param url the artifactory url
* @param credentialsUsername override credentials user name
* @param credentialsPassword override credentials password
* @param overridingDeployerCredentials user choose to override credentials
* @return {@link org.jfrog.hudson.util.RefreshServerResponse} object that represents the response of the repositories
*/
@JavaScriptMethod
public RefreshServerResponse refreshFromArtifactory(String url, String credentialsUsername, String credentialsPassword, boolean overridingDeployerCredentials) {
RefreshServerResponse response = new RefreshServerResponse();
ArtifactoryServer artifactoryServer = RepositoriesUtils.getArtifactoryServer(url, RepositoriesUtils.getArtifactoryServers());
try {
List<String> releaseRepositoryKeysFirst = RepositoriesUtils.getLocalRepositories(url, credentialsUsername, credentialsPassword,
overridingDeployerCredentials, artifactoryServer);
Collections.sort(releaseRepositoryKeysFirst);
releaseRepositoryList = RepositoriesUtils.createRepositoriesList(releaseRepositoryKeysFirst);
response.setRepositories(releaseRepositoryList);
response.setSuccess(true);
return response;
} catch (Exception e) {
e.printStackTrace();
response.setResponseMessage(e.getMessage());
response.setSuccess(false);
}
return response;
}
@Override
public String getDisplayName() {
return "Publish to Artifactory";
}
@Override
public String getHelpFile() {
return "/plugin/artifactory/ivy/help-publish.html";
}
@Override
public boolean configure(StaplerRequest req, JSONObject json) throws FormException {
req.bindParameters(this, "ivy");
save();
return true;
}
public FormValidation doCheckArtifactoryName(@QueryParameter String value) {
return FormValidations.validateInternetAddress(value);
}
public FormValidation doCheckViolationRecipients(@QueryParameter String value) {
return FormValidations.validateEmails(value);
}
/**
* Returns the list of {@link org.jfrog.hudson.ArtifactoryServer} configured.
*
* @return can be empty but never null.
*/
public List<ArtifactoryServer> getArtifactoryServers() {
return RepositoriesUtils.getArtifactoryServers();
}
public boolean isJiraPluginEnabled() {
return (Jenkins.getInstance().getPlugin("jira") != null);
}
}
/**
* Convert any remaining local credential variables to a credentials object
*/
public static final class ConverterImpl extends OverridingDeployerCredentialsConverter {
public ConverterImpl(XStream2 xstream) {
super(xstream);
}
}
}
| |
/*
* Copyright (c) 2018, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.carbon.identity.application.authentication.framework.handler.request.impl;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.http.client.utils.URIBuilder;
import org.wso2.carbon.CarbonException;
import org.wso2.carbon.core.util.AnonymousSessionUtil;
import org.wso2.carbon.identity.application.authentication.framework.config.ConfigurationFacade;
import org.wso2.carbon.identity.application.authentication.framework.config.model.StepConfig;
import org.wso2.carbon.identity.application.authentication.framework.context.AuthenticationContext;
import org.wso2.carbon.identity.application.authentication.framework.exception.FrameworkException;
import org.wso2.carbon.identity.application.authentication.framework.exception.PostAuthenticationFailedException;
import org.wso2.carbon.identity.application.authentication.framework.exception.UserIdNotFoundException;
import org.wso2.carbon.identity.application.authentication.framework.handler.request.AbstractPostAuthnHandler;
import org.wso2.carbon.identity.application.authentication.framework.handler.request.PostAuthnHandlerFlowStatus;
import org.wso2.carbon.identity.application.authentication.framework.internal.FrameworkServiceComponent;
import org.wso2.carbon.identity.application.authentication.framework.internal.FrameworkServiceDataHolder;
import org.wso2.carbon.identity.application.authentication.framework.model.AuthenticatedUser;
import org.wso2.carbon.identity.application.authentication.framework.util.FrameworkConstants;
import org.wso2.carbon.identity.application.authentication.framework.util.FrameworkUtils;
import org.wso2.carbon.identity.application.common.model.ClaimMapping;
import org.wso2.carbon.identity.claim.metadata.mgt.ClaimMetadataManagementService;
import org.wso2.carbon.identity.claim.metadata.mgt.exception.ClaimMetadataException;
import org.wso2.carbon.identity.claim.metadata.mgt.model.LocalClaim;
import org.wso2.carbon.identity.user.profile.mgt.association.federation.FederatedAssociationManager;
import org.wso2.carbon.identity.user.profile.mgt.association.federation.exception.FederatedAssociationManagerException;
import org.wso2.carbon.user.api.Claim;
import org.wso2.carbon.user.api.ClaimManager;
import org.wso2.carbon.user.core.UserCoreConstants;
import org.wso2.carbon.user.core.UserRealm;
import org.wso2.carbon.user.core.UserStoreClientException;
import org.wso2.carbon.user.core.UserStoreException;
import org.wso2.carbon.user.core.common.AbstractUserStoreManager;
import org.wso2.carbon.user.core.constants.UserCoreErrorConstants.ErrorMessages;
import org.wso2.carbon.user.core.util.UserCoreUtil;
import java.io.IOException;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.StringJoiner;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import static org.wso2.carbon.identity.application.authentication.framework.handler.request.PostAuthnHandlerFlowStatus.UNSUCCESS_COMPLETED;
import static org.wso2.carbon.identity.application.authentication.framework.util.FrameworkConstants.ERROR_CODE_INVALID_ATTRIBUTE_UPDATE;
import static org.wso2.carbon.identity.application.authentication.framework.util.FrameworkConstants.POST_AUTHENTICATION_REDIRECTION_TRIGGERED;
import static org.wso2.carbon.identity.application.authentication.framework.util.FrameworkConstants.POST_AUTH_MISSING_CLAIMS_ERROR;
import static org.wso2.carbon.identity.application.authentication.framework.util.FrameworkConstants.POST_AUTH_MISSING_CLAIMS_ERROR_CODE;
import static org.wso2.carbon.identity.claim.metadata.mgt.util.ClaimConstants.DISPLAY_NAME_PROPERTY;
/**
* Post authentication handler for missing claims.
*/
public class PostAuthnMissingClaimHandler extends AbstractPostAuthnHandler {
private static final Log log = LogFactory.getLog(PostAuthnMissingClaimHandler.class);
private static volatile PostAuthnMissingClaimHandler instance;
public static PostAuthnMissingClaimHandler getInstance() {
if (instance == null) {
synchronized (PostAuthnMissingClaimHandler.class) {
if (instance == null) {
instance = new PostAuthnMissingClaimHandler();
}
}
}
return instance;
}
@Override
public int getPriority() {
return 100;
}
@Override
public String getName() {
return "MissingClaimPostAuthnHandler";
}
@Override
public PostAuthnHandlerFlowStatus handle(HttpServletRequest request, HttpServletResponse response,
AuthenticationContext context) throws PostAuthenticationFailedException {
if (log.isDebugEnabled()) {
log.debug("Post authentication handling for missing claims started");
}
if (getAuthenticatedUser(context) == null) {
if (log.isDebugEnabled()) {
log.debug("No authenticated user found. Hence returning without handling mandatory claims");
}
return UNSUCCESS_COMPLETED;
}
boolean postAuthRequestTriggered = isPostAuthRequestTriggered(context);
if (!postAuthRequestTriggered) {
PostAuthnHandlerFlowStatus flowStatus = handlePostAuthenticationForMissingClaimsRequest(request, response,
context);
return flowStatus;
} else {
try {
handlePostAuthenticationForMissingClaimsResponse(request, response, context);
} catch (PostAuthenticationFailedException e) {
if (context.getProperty(POST_AUTH_MISSING_CLAIMS_ERROR) != null) {
PostAuthnHandlerFlowStatus flowStatus =
handlePostAuthenticationForMissingClaimsRequest(request, response, context);
return flowStatus;
}
throw e;
}
if (log.isDebugEnabled()) {
log.debug("Successfully returning from missing claim handler");
}
return PostAuthnHandlerFlowStatus.SUCCESS_COMPLETED;
}
}
protected boolean isPostAuthRequestTriggered(AuthenticationContext context) {
Object object = context.getProperty(POST_AUTHENTICATION_REDIRECTION_TRIGGERED);
boolean postAuthRequestTriggered = false;
if (object != null && object instanceof Boolean) {
postAuthRequestTriggered = (boolean) object;
}
return postAuthRequestTriggered;
}
/**
* To get display names of missing mandatory claims from SP side.
*
* @param missingClaimMap Mandatory claim's URIs.
* @param localClaims All claims.
* @return set of display names of missing claims.
*/
private String getMissingClaimsDisplayNames(Map<String, String> missingClaimMap, List<LocalClaim> localClaims) {
StringJoiner displayNameMappingString = new StringJoiner(",");
for (Map.Entry<String, String> entry : missingClaimMap.entrySet()) {
for (LocalClaim localClaim : localClaims) {
if (entry.getValue().equalsIgnoreCase(localClaim.getClaimURI())) {
displayNameMappingString.
add(entry.getKey() + "|" + localClaim.getClaimProperties().get(DISPLAY_NAME_PROPERTY));
break;
}
}
}
return displayNameMappingString.toString();
}
protected PostAuthnHandlerFlowStatus handlePostAuthenticationForMissingClaimsRequest(HttpServletRequest request,
HttpServletResponse response,
AuthenticationContext context)
throws PostAuthenticationFailedException {
String[] missingClaims = FrameworkUtils.getMissingClaims(context);
if (StringUtils.isNotBlank(missingClaims[0])) {
if (log.isDebugEnabled()) {
log.debug("Mandatory claims missing for the application : " + missingClaims[0]);
}
try {
// If there are read only claims marked as mandatory and they are missing, we cannot proceed further.
// We have to end the flow and show an error message to user.
ClaimManager claimManager = getUserRealm(context.getTenantDomain()).getClaimManager();
Map<String, String> missingClaimMap = FrameworkUtils.getMissingClaimsMap(context);
for (Map.Entry<String, String> missingClaim : missingClaimMap.entrySet()) {
Claim claimObj = claimManager.getClaim(missingClaim.getValue());
if (claimObj != null && claimObj.isReadOnly()) {
throw new PostAuthenticationFailedException("One or more read-only claim is missing in the " +
"requested claim set. Please contact your administrator for more information about " +
"this issue.", "One or more read-only claim is missing in the requested claim set");
}
}
List<LocalClaim> localClaims =
getClaimMetadataManagementService().getLocalClaims(context.getTenantDomain());
String displayNames = getMissingClaimsDisplayNames(missingClaimMap, localClaims);
URIBuilder uriBuilder = new URIBuilder(ConfigurationFacade.getInstance()
.getAuthenticationEndpointMissingClaimsURL());
uriBuilder.addParameter(FrameworkConstants.MISSING_CLAIMS,
missingClaims[0]);
uriBuilder.addParameter(FrameworkConstants.DISPLAY_NAMES,
displayNames);
uriBuilder.addParameter(FrameworkConstants.SESSION_DATA_KEY,
context.getContextIdentifier());
uriBuilder.addParameter(FrameworkConstants.REQUEST_PARAM_SP,
context.getSequenceConfig().getApplicationConfig().getApplicationName());
if (context.getProperty(POST_AUTH_MISSING_CLAIMS_ERROR) != null) {
uriBuilder.addParameter("errorMessage",
context.getProperty(POST_AUTH_MISSING_CLAIMS_ERROR).toString());
context.removeProperty(POST_AUTH_MISSING_CLAIMS_ERROR);
}
if (context.getProperty(POST_AUTH_MISSING_CLAIMS_ERROR_CODE) != null) {
uriBuilder.addParameter("errorCode",
context.getProperty(POST_AUTH_MISSING_CLAIMS_ERROR_CODE).toString());
context.removeProperty(POST_AUTH_MISSING_CLAIMS_ERROR_CODE);
}
response.sendRedirect(uriBuilder.build().toString());
context.setProperty(POST_AUTHENTICATION_REDIRECTION_TRIGGERED, true);
if (log.isDebugEnabled()) {
log.debug("Redirecting to outside to pick mandatory claims");
}
} catch (IOException e) {
throw new PostAuthenticationFailedException("Error while handling missing mandatory claims", "Error " +
"while redirecting to request claims page", e);
} catch (URISyntaxException e) {
throw new PostAuthenticationFailedException("Error while handling missing mandatory claims",
"Error while building redirect URI", e);
} catch (org.wso2.carbon.user.api.UserStoreException e) {
throw new PostAuthenticationFailedException("Error while handling missing mandatory claims",
"Error while retrieving claim from claim URI.", e);
} catch (ClaimMetadataException e) {
throw new PostAuthenticationFailedException("Error while handling missing mandatory claims",
"Error while retrieving claim metadata.", e);
}
return PostAuthnHandlerFlowStatus.INCOMPLETE;
} else {
return PostAuthnHandlerFlowStatus.SUCCESS_COMPLETED;
}
}
protected void handlePostAuthenticationForMissingClaimsResponse(HttpServletRequest request, HttpServletResponse
response, AuthenticationContext context) throws PostAuthenticationFailedException {
if (log.isDebugEnabled()) {
log.debug("Starting to process the response with missing claims");
}
Map<String, String> claims = new HashMap<String, String>();
Map<String, String> claimsForContext = new HashMap<String, String>();
Map<String, String[]> requestParams = request.getParameterMap();
boolean persistClaims = false;
AuthenticatedUser user = context.getSequenceConfig().getAuthenticatedUser();
Map<String, String> carbonToSPClaimMapping = new HashMap<>();
Object spToCarbonClaimMappingObject = context.getProperty(FrameworkConstants.SP_TO_CARBON_CLAIM_MAPPING);
if (spToCarbonClaimMappingObject instanceof Map) {
Map<String, String> spToCarbonClaimMapping = (Map<String, String>) spToCarbonClaimMappingObject;
for (Map.Entry<String, String> entry : spToCarbonClaimMapping.entrySet()) {
carbonToSPClaimMapping.put(entry.getValue(), entry.getKey());
}
}
boolean doMandatoryClaimsExist = false;
for (Map.Entry<String, String[]> entry : requestParams.entrySet()) {
if (entry.getKey().startsWith(FrameworkConstants.RequestParams.MANDOTARY_CLAIM_PREFIX)) {
doMandatoryClaimsExist = true;
break;
}
}
if (!doMandatoryClaimsExist) {
// Check whether mandatory claims exist in the request. If not throw error.
throw new PostAuthenticationFailedException("Mandatory missing claims are not found", "Mandatory missing " +
"claims are not found in the request for the session with context identifier: " +
context.getContextIdentifier());
}
List<String> missingClaims = new ArrayList<>();
for (Map.Entry<String, String[]> entry : requestParams.entrySet()) {
if (entry.getKey().startsWith(FrameworkConstants.RequestParams.MANDOTARY_CLAIM_PREFIX)) {
String localClaimURI
= entry.getKey().substring(FrameworkConstants.RequestParams.MANDOTARY_CLAIM_PREFIX.length());
if (StringUtils.isBlank(entry.getValue()[0])) {
missingClaims.add(localClaimURI);
continue;
}
claims.put(localClaimURI, entry.getValue()[0]);
if (spToCarbonClaimMappingObject != null) {
String spClaimURI = carbonToSPClaimMapping.get(localClaimURI);
claimsForContext.put(spClaimURI, entry.getValue()[0]);
} else {
claimsForContext.put(localClaimURI, entry.getValue()[0]);
}
}
}
if (CollectionUtils.isNotEmpty(missingClaims)) {
String missingClaimURIs = StringUtils.join(missingClaims, ",");
if (log.isDebugEnabled()) {
log.debug("Claim values for the mandatory claims: " + missingClaimURIs + " are empty");
}
throw new PostAuthenticationFailedException("Mandatory claim is not found", "Claim " +
"values for the claim URIs: " + missingClaimURIs + " are empty");
}
Map<ClaimMapping, String> authenticatedUserAttributes = FrameworkUtils.buildClaimMappings(claimsForContext);
authenticatedUserAttributes.putAll(user.getUserAttributes());
for (Map.Entry<Integer, StepConfig> entry : context.getSequenceConfig().getStepMap().entrySet()) {
StepConfig stepConfig = entry.getValue();
if (stepConfig.isSubjectAttributeStep()) {
if (stepConfig.getAuthenticatedUser() != null) {
user = stepConfig.getAuthenticatedUser();
}
if (!user.isFederatedUser()) {
persistClaims = true;
} else {
String associatedID;
String subject = user.getAuthenticatedSubjectIdentifier();
try {
FederatedAssociationManager federatedAssociationManager = FrameworkUtils
.getFederatedAssociationManager();
associatedID = federatedAssociationManager.getUserForFederatedAssociation(context
.getTenantDomain(), stepConfig.getAuthenticatedIdP(), subject);
if (StringUtils.isNotBlank(associatedID)) {
String fullQualifiedAssociatedUserId = FrameworkUtils.prependUserStoreDomainToName(
associatedID + UserCoreConstants.TENANT_DOMAIN_COMBINER
+ context.getTenantDomain());
UserCoreUtil.setDomainInThreadLocal(UserCoreUtil.extractDomainFromName(associatedID));
user = AuthenticatedUser.createLocalAuthenticatedUserFromSubjectIdentifier(
fullQualifiedAssociatedUserId);
persistClaims = true;
}
} catch (FederatedAssociationManagerException | FrameworkException e) {
throw new PostAuthenticationFailedException("Error while handling missing mandatory claims",
"Error while getting association for " + subject, e);
}
}
break;
}
}
if (persistClaims) {
if (log.isDebugEnabled()) {
log.debug("Local user mapping found. Claims will be persisted");
}
try {
Map<String, String> claimMapping = context.getSequenceConfig().getApplicationConfig()
.getClaimMappings();
Map<String, String> localIdpClaims = new HashMap<>();
for (Map.Entry<String, String> entry : claims.entrySet()) {
String localClaim = claimMapping.get(entry.getKey());
localIdpClaims.put(localClaim, entry.getValue());
}
if (log.isDebugEnabled()) {
log.debug("Updating user profile of user : " + user.getLoggableUserId());
}
UserRealm realm = getUserRealm(user.getTenantDomain());
AbstractUserStoreManager userStoreManager = (AbstractUserStoreManager) realm.getUserStoreManager();
userStoreManager.setUserClaimValuesWithID(user.getUserId(), localIdpClaims, null);
} catch (UserStoreException e) {
if (e instanceof UserStoreClientException) {
context.setProperty(POST_AUTH_MISSING_CLAIMS_ERROR, e.getMessage());
if (StringUtils.isNotBlank(e.getErrorCode())) {
context.setProperty(POST_AUTH_MISSING_CLAIMS_ERROR_CODE, e.getErrorCode());
}
/*
When the attribute update is disabled for JIT provisioned users, the mandatory claim update
request will be identified through the error code and handled it.
*/
if (ERROR_CODE_INVALID_ATTRIBUTE_UPDATE.equals(e.getErrorCode())) {
context.getSequenceConfig().getAuthenticatedUser().
setUserAttributes(authenticatedUserAttributes);
return;
}
}
if (ErrorMessages.ERROR_CODE_READONLY_USER_STORE.getCode().
equals(e.getErrorCode())) {
context.getSequenceConfig().getAuthenticatedUser().
setUserAttributes(authenticatedUserAttributes);
return;
}
throw new PostAuthenticationFailedException(
e.getMessage(), "Error while updating claims for local user. Could not update profile", e);
} catch (UserIdNotFoundException e) {
throw new PostAuthenticationFailedException(
"User id not found",
"User id not found for local user. Could not update profile", e);
}
}
context.getSequenceConfig().getAuthenticatedUser().setUserAttributes(authenticatedUserAttributes);
}
protected UserRealm getUserRealm(String tenantDomain) throws PostAuthenticationFailedException {
UserRealm realm;
try {
realm = AnonymousSessionUtil.getRealmByTenantDomain(
FrameworkServiceComponent.getRegistryService(),
FrameworkServiceComponent.getRealmService(), tenantDomain);
} catch (CarbonException e) {
throw new PostAuthenticationFailedException("Error while handling missing mandatory claims",
"Error occurred while retrieving the Realm for " + tenantDomain + " to handle local claims", e);
}
return realm;
}
protected AuthenticatedUser getAuthenticatedUser(AuthenticationContext authenticationContext) {
AuthenticatedUser user = authenticationContext.getSequenceConfig().getAuthenticatedUser();
return user;
}
private ClaimMetadataManagementService getClaimMetadataManagementService() {
return FrameworkServiceDataHolder.getInstance().getClaimMetadataManagementService();
}
}
| |
/*
* Copyright (c) 2010-2015 Pivotal Software, Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you
* may not use this file except in compliance with the License. You
* may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License. See accompanying
* LICENSE file.
*/
package com.pivotal.gemfirexd.dbsync;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.Statement;
import java.util.Arrays;
import java.util.List;
import java.util.Properties;
import java.util.concurrent.Callable;
import com.gemstone.gemfire.cache.asyncqueue.AsyncEventQueue;
import com.pivotal.gemfirexd.TestUtil;
import com.pivotal.gemfirexd.callbacks.Event;
import com.pivotal.gemfirexd.dbsync.DBSynchronizerTestBase.TestNewGatewayEventListenerNotify;
import com.pivotal.gemfirexd.internal.engine.Misc;
import com.pivotal.gemfirexd.internal.engine.ddl.wan.GfxdGatewayEventListener;
import com.pivotal.gemfirexd.internal.engine.jdbc.GemFireXDRuntimeException;
import com.pivotal.gemfirexd.internal.engine.store.GemFireStore;
import dunit.SerializableCallable;
import dunit.SerializableRunnable;
import com.pivotal.gemfirexd.callbacks.AsyncEventListener;
import com.pivotal.gemfirexd.internal.engine.GfxdConstants;
import com.pivotal.gemfirexd.internal.shared.common.sanity.SanityManager;
import org.junit.Assert;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.sql.DriverManager;
import java.sql.PreparedStatement;
import java.sql.SQLException;
public class SerialAsyncEventListenerDUnit extends DBSynchronizerTestBase {
public SerialAsyncEventListenerDUnit(String name) {
super(name);
}
public static class AggregationListener implements AsyncEventListener {
private static final Logger LOG = LoggerFactory.getLogger(
AggregationListener.class.getName());
private static final String DRIVER = "com.pivotal.gemfirexd.jdbc.ClientDriver";
private static final String CONN_URL = "jdbc:gemfirexd:";
private static final String SELECT_SQL = "select * from load_averages where weekday=? and time_slice=? and plug_id=?";
private static final String UPDATE_SQL = "update load_averages set total_load=?, event_count=? where weekday=? and time_slice=? and plug_id=?";
private String valueColumn;
//load driver
static {
try {
Class.forName(DRIVER).newInstance();
} catch (ClassNotFoundException cnfe) {
throw new RuntimeException("Unable to load the JDBC driver", cnfe);
} catch (InstantiationException ie) {
throw new RuntimeException("Unable to instantiate the JDBC driver", ie);
} catch (IllegalAccessException iae) {
throw new RuntimeException("Not allowed to access the JDBC driver", iae);
}
}
private static ThreadLocal<Connection> dConn = new ThreadLocal<Connection>() {
protected Connection initialValue() {
return getConnection();
}
};
private static Connection getConnection() {
Connection conn;
try {
conn = DriverManager.getConnection(CONN_URL);
} catch (SQLException e) {
throw new IllegalStateException("Unable to create connection", e);
}
return conn;
}
private static ThreadLocal<PreparedStatement> selectStmt = new ThreadLocal<PreparedStatement> () {
protected PreparedStatement initialValue() {
PreparedStatement stmt = null;
try {
stmt = dConn.get().prepareStatement(SELECT_SQL);
} catch (SQLException se) {
throw new IllegalStateException("Unable to retrieve statement ", se);
}
return stmt;
}
};
private static ThreadLocal<PreparedStatement> updateStmt = new ThreadLocal<PreparedStatement> () {
protected PreparedStatement initialValue() {
PreparedStatement stmt = null;
try {
stmt = dConn.get().prepareStatement(UPDATE_SQL);
} catch (SQLException se) {
throw new IllegalStateException("Unable to retrieve statement ", se);
}
return stmt;
}
};
@Override
public boolean processEvents(List<Event> events) {
for (Event e : events) {
LOG.info("AggregateListener::Processing event" + e);
if (e.getType() == Event.Type.AFTER_INSERT) {
ResultSet eventRS = e.getNewRowsAsResultSet();
try {
PreparedStatement s = selectStmt.get();
s.setInt(1, eventRS.getInt("weekday"));
s.setInt(2, eventRS.getInt("time_slice"));
s.setInt(3, eventRS.getInt("plug_id"));
ResultSet queryRS = s.executeQuery();
if (queryRS.next()) {
PreparedStatement update = updateStmt.get();
update.setFloat(1,
queryRS.getFloat("total_load") + eventRS.getFloat(valueColumn));
update.setInt(2, queryRS.getInt("event_count") + 1);
update.setInt(3, queryRS.getInt("weekday"));
update.setInt(4, queryRS.getInt("time_slice"));
update.setInt(5, queryRS.getInt("plug_id"));
update.executeUpdate();
}
} catch (SQLException ex) {
ex.printStackTrace();
}
}
}
return true;
}
@Override
public void close() {
System.out.println("--->>> Closing connection from AEQ listener");
try {
getConnection().close();
} catch (SQLException ex) {
}
}
@Override
public void init(String s) {
valueColumn = s;
SanityManager.TRACE_ON(GfxdConstants.TRACE_LOCK_PREFIX + "RAW_SENSOR");
SanityManager.TRACE_ON(GfxdConstants.TRACE_LOCK_PREFIX + "APP.RAW_SENSOR");
SanityManager.TRACE_ON(GfxdConstants.TRACE_LOCK_PREFIX + "LOAD_AVERAGES");
SanityManager.TRACE_ON(GfxdConstants.TRACE_LOCK_PREFIX + "APP.LOAD_AVERAGES");
}
@Override
public void start() {
}
}
public void testBug50091() throws Exception {
startVMs(1, 2, -1, "SG1", null);
//clientSQLExecute(1, "drop table if exists app.load_averages");
//clientSQLExecute(1, "drop table if exists app.raw_sensor");
clientSQLExecute(1, "create table app.raw_sensor" +
"(id bigint, timestamp bigint, value float(23), " +
"property smallint, plug_id integer, household_id integer, " +
"house_id integer, weekday smallint, " +
"time_slice smallint ) " +
"partition by column (house_id) "
);
//clientSQLExecute(1, "drop index if exists app.raw_sensor_idx");
clientSQLExecute(1, "create index app.raw_sensor_idx on app.raw_sensor (weekday, time_slice, plug_id)");
clientSQLExecute(1, "create table app.load_averages (house_id integer not null, " +
"household_id integer, " +
"plug_id integer not null, " +
"weekday smallint not null, " +
"time_slice smallint not null, " +
"total_load float(23), " +
"event_count integer) partition by column (house_id) colocate with (app.raw_sensor)");
clientSQLExecute(1, "alter table app.load_averages " +
"add constraint LOAD_AVERAGES_PK PRIMARY KEY (house_id, plug_id, weekday, time_slice)");
//clientSQLExecute(1, "drop index if exists app.load_averages_idx");
clientSQLExecute(1, "create index app.load_averages_idx on app.load_averages (weekday, time_slice, plug_id)");
//clientSQLExecute(1, "drop asynceventlistener if exists AggListener");
Runnable runnable = getExecutorForWBCLConfiguration(
"SG1",
"AggListener",
"com.pivotal.gemfirexd.dbsync.SerialAsyncEventListenerDUnit$AggregationListener",
"org.apache.derby.jdbc.EmbeddedDriver",
"jdbc:derby:newDB;create=true,app,app", true, Integer.valueOf(1), null,
Boolean.TRUE, null, null, null, 100000, null, false);
runnable.run();
clientSQLExecute(1, "alter table app.raw_sensor set asynceventlistener (AggListener)");
clientSQLExecute(1, "call sys.start_async_event_listener('AggListener')");
clientSQLExecute(1, "insert into app.raw_sensor values(1, null, 1.1, 1, 1, 1, 1, 1, 1)");
clientSQLExecute(1, "insert into app.raw_sensor values(1, null, 1.1, 1, 1, 1, 1, 1, 1)");
clientSQLExecute(1, "insert into app.raw_sensor values(1, null, 1.1, 1, 1, 1, 1, 1, 1)");
clientSQLExecute(1, "call SYS.WAIT_FOR_SENDER_QUEUE_FLUSH('AGGLISTENER', 'true', 0)");
clientSQLExecute(1, "drop table if exists app.load_averages");
Connection connection = TestUtil.getConnection();
ResultSet metadataRs = connection.getMetaData().getTables(null, "APP",
"LOAD_AVERAGES", null);
boolean foundTable = false;
while (metadataRs.next()) {
foundTable = metadataRs.getString(3).equalsIgnoreCase("LOAD_AVERAGES")
&& metadataRs.getString(2).equalsIgnoreCase("APP");
assertFalse(foundTable);
}
metadataRs.close();
connection.close();
}
public void testAsyncEventListenerConfiguration() throws Exception {
startClientVMs(1, 0, null);
startServerVMs(2, -1, "SG1");
startServerVMs(2, -1, "SG2");
Runnable runnable = getExecutorForWBCLConfiguration(
"SG1",
"WBCL1",
"com.pivotal.gemfirexd.dbsync.DBSynchronizerTestBase$TestNewGatewayEventListener",
"org.apache.derby.jdbc.EmbeddedDriver",
"jdbc:derby:newDB;create=true,app,app", true, Integer.valueOf(1), null,
Boolean.TRUE, null, null, null, 100000,
"org.apache.derby.jdbc.EmbeddedDriver,jdbc:derby:newDB;create=true",
false);
runnable.run();
// Verify Listener not attached on client
Runnable listenerNotAttached = DBSynchronizerTestBase
.getExecutorToCheckListenerNotAttached("WBCL1");
Runnable listenerAttached = DBSynchronizerTestBase
.getExecutorToCheckListenerAttached("WBCL1");
clientExecute(1, listenerNotAttached);
serverExecute(3, listenerNotAttached);
serverExecute(4, listenerNotAttached);
serverExecute(1, listenerAttached);
serverExecute(2, listenerAttached);
Runnable startWBCL = startAsyncEventListener("WBCL1");
clientExecute(1, startWBCL);
checkHubRunningAndIsPrimaryVerifier("WBCL1");
Runnable wbclConfigVerifier = getExecutorForWBCLConfigurationVerification(
"WBCL1", Integer.valueOf(1), null, Boolean.TRUE, null, null, null,
100000,
"org.apache.derby.jdbc.EmbeddedDriver,jdbc:derby:newDB;create=true,app,app");
serverExecute(1, wbclConfigVerifier);
serverExecute(2, wbclConfigVerifier);
}
public void testCreateAsyncEventListenerDDLReplay() throws Exception {
startClientVMs(1, 0, null);
startServerVMs(2, -1, "SG1");
Runnable runnable = getExecutorForWBCLConfiguration(
"SG1",
"WBCL1",
"com.pivotal.gemfirexd.dbsync.DBSynchronizerTestBase$TestNewGatewayEventListener",
"org.apache.derby.jdbc.EmbeddedDriver", "jdbc:derby:newDB;create=true",
true, Integer.valueOf(1), null, Boolean.TRUE, null, null, null, 100000,
"org.apache.derby.jdbc.EmbeddedDriver,jdbc:derby:newDB;create=true",
false);
runnable.run();
// Verify Listener not attached on client ( controller VM);
Runnable listenerNotAttached = getExecutorToCheckListenerNotAttached("WBCL1");
getExecutorToCheckListenerAttached("WBCL1");
clientExecute(1, listenerNotAttached);
Runnable startWBCL = startAsyncEventListener("WBCL1");
clientExecute(1, startWBCL);
// Start the other 2 dunit VMs as Server which belong to different Server
// Group SG2
startServerVMs(2, -1, "SG2");
Callable<?> hubRunningAndIsPrimaryVerifier = getExecutorToCheckForHubRunningAndIsPrimary(
"WBCL1", true /* wbcl should be running */);
boolean[] isHubRunningAndPrimary1 = (boolean[])serverExecute(1,
hubRunningAndIsPrimaryVerifier);
boolean[] isHubRunningAndPrimary2 = (boolean[])serverExecute(2,
hubRunningAndIsPrimaryVerifier);
assertTrue(isHubRunningAndPrimary2[0]);
assertTrue(isHubRunningAndPrimary1[0]);
assertTrue(isHubRunningAndPrimary1[1] || isHubRunningAndPrimary2[1]);
assertFalse(isHubRunningAndPrimary1[1] && isHubRunningAndPrimary2[1]);
Runnable wbclConfigVerifier = getExecutorForWBCLConfigurationVerification(
"WBCL1", Integer.valueOf(1), null, Boolean.TRUE, null, null, null,
100000,
"org.apache.derby.jdbc.EmbeddedDriver,jdbc:derby:newDB;create=true");
serverExecute(1, wbclConfigVerifier);
serverExecute(2, wbclConfigVerifier);
serverExecute(3, listenerNotAttached);
serverExecute(4, listenerNotAttached);
}
public void testAsyncEventListenerStopPropagationAndReplay() throws Exception {
startVMs(1, 2, -1, "SG1", null);
Runnable runnable = getExecutorForWBCLConfiguration(
"SG1",
"WBCL1",
"com.pivotal.gemfirexd.dbsync.DBSynchronizerTestBase$TestNewGatewayEventListener",
"org.apache.derby.jdbc.EmbeddedDriver",
"jdbc:derby:newDB;create=true,app,app", true, Integer.valueOf(1), null,
Boolean.TRUE, null, null, null, 100000, null, false);
runnable.run();
// Verify Listener not attached on client ( controller VM);
Runnable listenerNotAttached = getExecutorToCheckListenerNotAttached("WBCL1");
Runnable listenerAttached = getExecutorToCheckListenerAttached("WBCL1");
clientExecute(1, listenerNotAttached);
serverExecute(1, listenerAttached);
serverExecute(2, listenerAttached);
Runnable startWBCL = startAsyncEventListener("WBCL1");
clientExecute(1, startWBCL);
checkHubRunningAndIsPrimaryVerifier("WBCL1");
// Now stop wbcl.
Runnable stopWBCL = stopAsyncEventListener("WBCL1");
clientExecute(1, stopWBCL);
Callable<?> hubStoppedVerifier = getExecutorToCheckForHubRunningAndIsPrimary(
"WBCL1", false);
serverExecute(1, hubStoppedVerifier);
serverExecute(2, hubStoppedVerifier);
// Now Start two more server VMs , one part of SG1 while other is not.
startServerVMs(1, -1, "SG1");
startServerVMs(1, -1, "SG2");
serverExecute(3, listenerAttached);
serverExecute(3, hubStoppedVerifier);
serverExecute(4, listenerNotAttached);
}
public void testAsyncEventListenerRemovePropagationAndReplay() throws Exception {
startVMs(1, 2, -1, "SG1", null);
Runnable runnable = getExecutorForWBCLConfiguration(
"SG1",
"WBCL1",
"com.pivotal.gemfirexd.dbsync.DBSynchronizerTestBase$TestNewGatewayEventListener",
"org.apache.derby.jdbc.EmbeddedDriver", "jdbc:derby:newDB;create=true",
true, Integer.valueOf(1), null, Boolean.TRUE, null, null, null, 100000,
null, false);
runnable.run();
// Verify Listener not attached on client ( controller VM);
Runnable listenerNotAttached = getExecutorToCheckListenerNotAttached("WBCL1");
Runnable listenerAttached = getExecutorToCheckListenerAttached("WBCL1");
clientExecute(1, listenerNotAttached);
serverExecute(1, listenerAttached);
serverExecute(2, listenerAttached);
Runnable startWBCL = startAsyncEventListener("WBCL1");
clientExecute(1, startWBCL);
checkHubRunningAndIsPrimaryVerifier("WBCL1");
// Now stop wbcl.
Runnable stopWBCL = stopAsyncEventListener("WBCL1");
clientExecute(1, stopWBCL);
Callable<?> hubStoppedVerifier = getExecutorToCheckForHubRunningAndIsPrimary(
"WBCL1", false);
serverExecute(1, hubStoppedVerifier);
serverExecute(2, hubStoppedVerifier);
// Now remove WBCL
Runnable removeWBCL = dropAsyncEventListener("WBCL1");
clientExecute(1, removeWBCL);
Callable<?> wbclRemovedVerifier = getExecutorToCheckWBCLRemoved("WBCL1");
serverExecute(1, wbclRemovedVerifier);
serverExecute(2, wbclRemovedVerifier);
// Now Start two more server VMs , one part of SG1 while other is not.
startServerVMs(1, -1, "SG1");
startServerVMs(1, -1, "SG2");
serverExecute(3, wbclRemovedVerifier);
serverExecute(4, listenerNotAttached);
}
public void _testAsyncEventListenerDispatching() throws Exception {
startVMs(1, 1, -1, "SG2", null);
clientSQLExecute(
1,
"create table TESTTABLE (ID int not null , "
+ "DESCRIPTION varchar(1024) , ADDRESS varchar(1024)) AsyncEventListener (WBCL1)");
// now lets first do a dummy insert to create bucket on server 1
clientSQLExecute(1, "insert into testtable values(114,'desc114','add114')");
startServerVMs(1, -1, "SG2");
Runnable runnable = getExecutorForWBCLConfiguration(
"SG2",
"WBCL1",
"com.pivotal.gemfirexd.dbsync.DBSynchronizerTestBase$TestNewGatewayEventListenerNotify",
"org.apache.derby.jdbc.EmbeddedDriver", "jdbc:derby:newDB;create=true",
true, Integer.valueOf(1), null, Boolean.FALSE, null, null, null,
100000, null, false);
runnable.run();
// configure the listener to collect events
SerializableCallable sr = new SerializableCallable("Set Events Collector") {
public Object call() {
AsyncEventQueue asyncQueue = Misc.getGemFireCache().getAsyncEventQueue(
"WBCL1");
GfxdGatewayEventListener listener = (GfxdGatewayEventListener)asyncQueue
.getAsyncEventListener();
TestNewGatewayEventListenerNotify tgen = (TestNewGatewayEventListenerNotify)listener
.getAsyncEventListenerForTest();
Event[] events = new Event[2];
tgen.setEventsExpected(events);
return GemFireStore.getMyId().toString();
}
};
final String listenerMember = (String)serverExecute(2, sr);
Runnable startWBCL = startAsyncEventListener("WBCL1");
clientExecute(1, startWBCL);
// now lets first do an insert & then update
clientSQLExecute(1, "insert into testtable values(1,'desc1','add1')");
clientSQLExecute(1,
"update TESTTABLE set description = 'modified' where ID =1");
// validate data
SerializableRunnable sc = new SerializableRunnable("validate callback data") {
public void run() {
try {
AsyncEventQueue asyncQueue = Misc.getGemFireCache()
.getAsyncEventQueue("WBCL1");
GfxdGatewayEventListener listener = (GfxdGatewayEventListener)asyncQueue
.getAsyncEventListener();
TestNewGatewayEventListenerNotify tgen = (TestNewGatewayEventListenerNotify)listener
.getAsyncEventListenerForTest();
while (tgen.getNumEventsProcessed() != 2) {
Thread.sleep(1000);
}
Event createdEvent = tgen.getEvents()[0];
Object pk = createdEvent.getPrimaryKey()[0];
Event ev = tgen.getEvents()[1];
assertNotNull(ev);
List<Object> list = ev.getNewRow();
assertEquals(list.size(), 3);
assertEquals(list.get(1), "modified");
assertNull(list.get(0));
assertNull(list.get(2));
assertEquals(pk, ev.getPrimaryKey()[0]);
} catch (Exception e) {
throw GemFireXDRuntimeException.newRuntimeException(null, e);
}
}
};
serverExecute(2, sc);
}
public void Bug51213testAsyncEventListenerOnNonBucketHostingNode() throws Exception {
startVMs(1, 1, -1, "SG1", null);
clientSQLExecute(
1,
"create table TESTTABLE (ID int not null , "
+ "DESCRIPTION varchar(1024) , ADDRESS varchar(1024)) AsyncEventListener (WBCL1)");
clientSQLExecute(1, "insert into testtable values(114,'desc114','add114')");
startServerVMs(1, -1, "SG2");
Runnable runnable = getExecutorForWBCLConfiguration(
"SG2",
"WBCL1",
"com.pivotal.gemfirexd.dbsync.DBSynchronizerTestBase$TestNewGatewayEventListenerNotify",
"org.apache.derby.jdbc.EmbeddedDriver", "jdbc:derby:newDB;create=true",
true, Integer.valueOf(1), null, Boolean.FALSE, null, null, null,
100000, null, false);
runnable.run();
// configure the listener to collect events
SerializableCallable sr = new SerializableCallable("Set Events Collector") {
public Object call() {
AsyncEventQueue asyncQueue = Misc.getGemFireCache().getAsyncEventQueue(
"WBCL1");
GfxdGatewayEventListener listener = (GfxdGatewayEventListener)asyncQueue
.getAsyncEventListener();
TestNewGatewayEventListenerNotify tgen = (TestNewGatewayEventListenerNotify)listener
.getAsyncEventListenerForTest();
Event[] events = new Event[2];
tgen.setEventsExpected(events);
return GemFireStore.getMyId().toString();
}
};
final String listenerMember = (String)serverExecute(2, sr);
Runnable startWBCL = startAsyncEventListener("WBCL1");
clientExecute(1, startWBCL);
// now lets first do an insert & then update
clientSQLExecute(1, "insert into testtable values(1,'desc1','add1')");
clientSQLExecute(1,
"update TESTTABLE set description = 'modified' where id =1");
// validate data
SerializableRunnable sc = new SerializableRunnable("validate callback data") {
public void run() {
try {
AsyncEventQueue asyncQueue = Misc.getGemFireCache()
.getAsyncEventQueue("WBCL1");
GfxdGatewayEventListener listener = (GfxdGatewayEventListener)asyncQueue
.getAsyncEventListener();
TestNewGatewayEventListenerNotify tgen = (TestNewGatewayEventListenerNotify)listener
.getAsyncEventListenerForTest();
while (tgen.getNumEventsProcessed() != 2) {
Thread.sleep(1000);
}
Event createdEvent = tgen.getEvents()[0];
Object pk = createdEvent.getPrimaryKey()[0];
Event ev = tgen.getEvents()[1];
assertNotNull(ev);
List<Object> list = ev.getNewRow();
assertEquals(list.size(), 3);
assertEquals(list.get(1), "modified");
assertNull(list.get(0));
assertNull(list.get(2));
assertEquals(pk, ev.getPrimaryKey()[0]);
} catch (Exception e) {
throw GemFireXDRuntimeException.newRuntimeException(null, e);
}
}
};
serverExecute(2, sc);
// check SYSTABLES entries on the server
Statement stmt = TestUtil.jdbcConn.createStatement();
ResultSet rs = stmt
.executeQuery("select t.*, m.ID DSID from SYS.SYSTABLES t, SYS.MEMBERS m "
+ "where t.tablename='TESTTABLE' and m.SERVERGROUPS='SG2'");
assertTrue(rs.next());
assertEquals("TESTTABLE", rs.getString("TABLENAME"));
assertEquals("PARTITION", rs.getString("DATAPOLICY"));
// check for partition resolver
assertEquals("PARTITION BY COLUMN ()", rs.getString("RESOLVER"));
assertTrue(rs.getBoolean("GATEWAYENABLED"));
assertEquals("WBCL1", rs.getString("ASYNCLISTENERS"));
// null check for other attributes
assertNull(rs.getString("EVICTIONATTRS"));
assertNull(rs.getString("DISKATTRS"));
assertNull(rs.getString("EXPIRATIONATTRS"));
assertNull(rs.getString("LOADER"));
assertNull(rs.getString("WRITER"));
assertNull(rs.getString("LISTENERS"));
assertEquals(listenerMember, rs.getObject("DSID"));
assertFalse(rs.next());
rs = stmt.executeQuery("select t.*, m.ID DSID from "
+ "SYS.SYSTABLES t, SYS.MEMBERS m, SYS.ASYNCEVENTLISTENERS a "
+ "where t.tablename='TESTTABLE' and groupsintersect("
+ "a.SERVER_GROUPS, m.SERVERGROUPS) and "
+ "groupsintersect(t.ASYNCLISTENERS, a.ID)");
assertTrue(rs.next());
assertEquals("TESTTABLE", rs.getString("TABLENAME"));
assertEquals("PARTITION", rs.getString("DATAPOLICY"));
// check for partition resolver
assertEquals("PARTITION BY COLUMN ()", rs.getString("RESOLVER"));
assertTrue(rs.getBoolean("GATEWAYENABLED"));
assertEquals("WBCL1", rs.getString("ASYNCLISTENERS"));
// null check for other attributes
assertNull(rs.getString("EVICTIONATTRS"));
assertNull(rs.getString("DISKATTRS"));
assertNull(rs.getString("EXPIRATIONATTRS"));
assertNull(rs.getString("LOADER"));
assertNull(rs.getString("WRITER"));
assertNull(rs.getString("LISTENERS"));
assertEquals(listenerMember, rs.getObject("DSID"));
assertFalse(rs.next());
// check for entry for async listeners on the client
rs = stmt
.executeQuery("select * from SYS.SYSTABLES where tablename='TESTTABLE'");
assertTrue(rs.next());
assertEquals("TESTTABLE", rs.getString("TABLENAME"));
assertEquals("PARTITION", rs.getString("DATAPOLICY"));
// check for partition resolver
assertEquals("PARTITION BY COLUMN ()", rs.getString("RESOLVER"));
assertTrue(rs.getBoolean("GATEWAYENABLED"));
assertEquals("WBCL1", rs.getString("ASYNCLISTENERS"));
// null check for other attributes
assertNull(rs.getString("EVICTIONATTRS"));
assertNull(rs.getString("DISKATTRS"));
assertNull(rs.getString("EXPIRATIONATTRS"));
assertNull(rs.getString("LOADER"));
assertNull(rs.getString("WRITER"));
assertNull(rs.getString("LISTENERS"));
assertFalse(rs.next());
}
public void Bug51213testAsyncEventListenerOnNonPkBasedTable() throws Exception {
startVMs(1, 2, -1, "SG1", null);
try {
clientSQLExecute(
1,
"create table TESTTABLE (ID int , "
+ "DESCRIPTION varchar(1024) , ADDRESS varchar(1024)) AsyncEventListener (WBCL1)");
// now lets first do a dummy insert to create bucket on server 1
clientSQLExecute(1,
"insert into testtable values(114,'desc114','add114')");
startServerVMs(1, -1, "SG2");
Runnable runnable = getExecutorForWBCLConfiguration(
"SG2",
"WBCL1",
"com.pivotal.gemfirexd.dbsync.DBSynchronizerTestBase$TestNewGatewayEventListenerNotify",
null, null, true, Integer.valueOf(1), null, Boolean.FALSE, null,
null, null, 100000, null, false);
runnable.run();
// configure the listener to collect events
SerializableRunnable sr = new SerializableRunnable("Set Events Collector") {
public void run() {
AsyncEventQueue asyncQueue = Misc.getGemFireCache()
.getAsyncEventQueue("WBCL1");
GfxdGatewayEventListener listener = (GfxdGatewayEventListener)asyncQueue
.getAsyncEventListener();
TestNewGatewayEventListenerNotify tgen = (TestNewGatewayEventListenerNotify)listener
.getAsyncEventListenerForTest();
Event[] events = new Event[25];
tgen.setEventsExpected(events);
}
};
serverExecute(3, sr);
Runnable startWBCL = startAsyncEventListener("WBCL1");
clientExecute(1, startWBCL);
// now lets first do an insert & then update
for (int i = 1; i < 11; ++i) {
clientSQLExecute(1, "insert into testtable values(" + i + ",'desc" + i
+ "','add" + i + "')");
}
for (int i = 1; i < 11; ++i) {
clientSQLExecute(1, "update TESTTABLE set description = 'modified"
+ (i + 1) + "' where id = " + i);
}
for (int i = 1; i < 6; ++i) {
clientSQLExecute(1, "Delete from TESTTABLE where id = " + i);
}
// validate data
SerializableRunnable sc = new SerializableRunnable(
"validate callback data") {
public void run() {
try {
AsyncEventQueue asyncQueue = Misc.getGemFireCache()
.getAsyncEventQueue("WBCL1");
GfxdGatewayEventListener listener = (GfxdGatewayEventListener)asyncQueue
.getAsyncEventListener();
TestNewGatewayEventListenerNotify tgen = (TestNewGatewayEventListenerNotify)listener
.getAsyncEventListenerForTest();
while (tgen.getNumEventsProcessed() != 25) {
Thread.sleep(1000);
}
Long createdPKs[] = new Long[10];
for (int i = 0; i < 10; ++i) {
Event ev = tgen.getEvents()[i];
assertNotNull(ev);
List<Object> list = ev.getNewRow();
assertEquals(list.size(), 3);
assertEquals(list.get(0), new Integer(i + 1));
assertEquals(list.get(1), "desc" + (i + 1));
assertEquals(list.get(2), "add" + (i + 1));
createdPKs[i] = (Long)ev.getPrimaryKey()[0];
}
for (int i = 10; i < 20; ++i) {
Event ev = tgen.getEvents()[i];
assertNotNull(ev);
List<Object> list = ev.getNewRow();
assertEquals(list.size(), 3);
assertEquals(list.get(1), "modified" + (i - 8));
assertNull(list.get(0));
assertNull(list.get(2));
assertEquals(ev.getPrimaryKey()[0], createdPKs[i - 10]);
}
for (int i = 20; i < 25; ++i) {
Event ev = tgen.getEvents()[i];
assertNotNull(ev);
List<Object> list = ev.getNewRow();
assertNull(list);
assertEquals(ev.getPrimaryKey()[0], createdPKs[i - 20]);
}
} catch (Exception e) {
throw GemFireXDRuntimeException.newRuntimeException(null, e);
}
}
};
serverExecute(3, sc);
} finally {
clientSQLExecute(1, "drop table TESTTABLE");
}
}
public void Bug51213testAsyncEventListenerOnMultiColumnPkBasedTable()
throws Exception {
startVMs(1, 2, -1, "SG1", null);
try {
clientSQLExecute(
1,
"create table TESTTABLE (ID int , DESCRIPTION varchar(1024) , ADDRESS varchar(1024),"
+ " constraint pk primary key (ID, ADDRESS)) AsyncEventListener (WBCL1)");
// now lets first do a dummy insert to create bucket on server 1
clientSQLExecute(1,
"insert into testtable values(114,'desc114','add114')");
startServerVMs(1, -1, "SG2");
Runnable runnable = getExecutorForWBCLConfiguration(
"SG2",
"WBCL1",
"com.pivotal.gemfirexd.dbsync.DBSynchronizerTestBase$TestNewGatewayEventListenerNotify",
null, null, true, Integer.valueOf(1), null, Boolean.FALSE, null,
null, null, 100000, null, false);
runnable.run();
// configure the listener to collect events
SerializableRunnable sr = new SerializableRunnable("Set Events Collector") {
public void run() {
AsyncEventQueue asyncQueue = Misc.getGemFireCache()
.getAsyncEventQueue("WBCL1");
GfxdGatewayEventListener listener = (GfxdGatewayEventListener)asyncQueue
.getAsyncEventListener();
TestNewGatewayEventListenerNotify tgen = (TestNewGatewayEventListenerNotify)listener
.getAsyncEventListenerForTest();
Event[] events = new Event[25];
tgen.setEventsExpected(events);
}
};
serverExecute(3, sr);
Runnable startWBCL = startAsyncEventListener("WBCL1");
clientExecute(1, startWBCL);
// now lets first do an insert & then update
for (int i = 1; i < 11; ++i) {
clientSQLExecute(1, "insert into testtable values(" + i + ",'desc" + i
+ "','add" + i + "')");
}
for (int i = 1; i < 11; ++i) {
clientSQLExecute(1, "update TESTTABLE set description = 'modified"
+ (i + 1) + "' where id = " + i);
}
for (int i = 1; i < 6; ++i) {
clientSQLExecute(1, "Delete from TESTTABLE where id = " + i);
}
// validate data
SerializableRunnable sc = new SerializableRunnable(
"validate callback data") {
public void run() {
try {
AsyncEventQueue asyncQueue = Misc.getGemFireCache()
.getAsyncEventQueue("WBCL1");
GfxdGatewayEventListener listener = (GfxdGatewayEventListener)asyncQueue
.getAsyncEventListener();
TestNewGatewayEventListenerNotify tgen = (TestNewGatewayEventListenerNotify)listener
.getAsyncEventListenerForTest();
while (tgen.getNumEventsProcessed() != 25) {
getLogWriter().info(
"Number of events processed = "
+ tgen.getNumEventsProcessed()
+ ". Total events expected = " + tgen.getEvents().length);
Thread.sleep(1000);
}
Object createdPKs[] = new Object[10];
for (int i = 0; i < 10; ++i) {
Event ev = tgen.getEvents()[i];
assertNotNull(ev);
List<Object> list = ev.getNewRow();
assertEquals(list.size(), 3);
assertEquals(list.get(0), new Integer(i + 1));
assertEquals(list.get(1), "desc" + (i + 1));
assertEquals(list.get(2), "add" + (i + 1));
createdPKs[i] = ev.getPrimaryKey();
}
for (int i = 10; i < 20; ++i) {
Event ev = tgen.getEvents()[i];
assertNotNull(ev);
List<Object> list = ev.getNewRow();
assertEquals(list.size(), 3);
assertEquals(list.get(1), "modified" + (i - 8));
assertNull(list.get(0));
assertNull(list.get(2));
Object[] pkArrUpdate = ev.getPrimaryKey();
Object[] pkArrCreate = (Object[])createdPKs[i - 10];
assertTrue(Arrays.equals(pkArrCreate, pkArrUpdate));
}
for (int i = 20; i < 25; ++i) {
Event ev = tgen.getEvents()[i];
assertNotNull(ev);
List<Object> list = ev.getNewRow();
assertNull(list);
Object[] pkArrDelete = ev.getPrimaryKey();
Object[] pkArrCreate = (Object[])createdPKs[i - 20];
assertTrue(Arrays.equals(pkArrCreate, pkArrDelete));
}
} catch (Exception e) {
throw GemFireXDRuntimeException.newRuntimeException(null, e);
}
}
};
serverExecute(3, sc);
} finally {
clientSQLExecute(1, "drop table TESTTABLE");
}
}
public void testSkipListenerBehaviourForAsyncEventListenerReplicate()
throws Exception {
this.skipListenerBehaviourForAsyncEventListener(true, false, -1);
}
public void testSkipListenerBehaviourForAsyncEventListenerPR()
throws Exception {
this.skipListenerBehaviourForAsyncEventListener(false, false, -1);
}
public void testSkipListenerBehaviourForAsyncEventListenerReplicateUsingNetConnection()
throws Exception {
this.skipListenerBehaviourForAsyncEventListener(true, true, 2726);
}
public void testSkipListenerBehaviourForAsyncEventListenerPRUsingNetConnection()
throws Exception {
this.skipListenerBehaviourForAsyncEventListener(false, true, 2727);
}
private void skipListenerBehaviourForAsyncEventListener(boolean useReplicate,
boolean useNetConnection, int port) throws Exception {
// create one accessor
// create two data store.
try {
startVMs(1, 2, -1, "SG1", null);
clientSQLExecute(
1,
"create table TESTTABLE (ID int not null primary key, "
+ "DESCRIPTION varchar(1024) , ADDRESS varchar(1024)) AsyncEventListener (WBCL1) "
+ (useReplicate ? " replicate " : ""));
clientSQLExecute(1,
"insert into testtable values(114,'desc114','add114')");
clientSQLExecute(1,
"insert into testtable values(115,'desc115','add115')");
clientSQLExecute(1,
"insert into testtable values(116,'desc116','add116')");
if (useNetConnection) {
TestUtil.startNetServer(port, null);
}
Runnable runnable = getExecutorForWBCLConfiguration(
"SG1",
"WBCL1",
"com.pivotal.gemfirexd.dbsync.DBSynchronizerTestBase$TestNewGatewayEventListenerNotify",
null, null, true, Integer.valueOf(1), null, Boolean.FALSE, null,
null, null, 100000, null, false);
runnable.run();
// configure the listener to collect events
SerializableRunnable sr = new SerializableRunnable("Set Events Collector") {
public void run() {
AsyncEventQueue asyncQueue = Misc.getGemFireCache()
.getAsyncEventQueue("WBCL1");
GfxdGatewayEventListener listener = (GfxdGatewayEventListener)asyncQueue
.getAsyncEventListener();
TestNewGatewayEventListenerNotify tgen = (TestNewGatewayEventListenerNotify)listener
.getAsyncEventListenerForTest();
Event[] events = new Event[0];
tgen.setEventsExpected(events);
}
};
serverExecute(1, sr);
serverExecute(2, sr);
Runnable startWBCL = startAsyncEventListener("WBCL1");
clientExecute(1, startWBCL);
// PK based inserts
Properties props = new Properties();
props.put(com.pivotal.gemfirexd.Attribute.SKIP_LISTENERS, "true");
Connection conn = useNetConnection ? TestUtil.getNetConnection(port,
null, props) : TestUtil.getConnection(props);
Statement stmt = conn.createStatement();
stmt.execute("insert into testtable values(1,'desc1','add1')");
stmt.execute("insert into testtable values(2,'desc2','add2')");
// PK based updates
stmt.execute("update TESTTABLE set description = 'modified' where id = 114");
// Bulk updates
stmt.execute("update TESTTABLE set description = 'modified' ");
// PK based delete
stmt.execute("delete from TESTTABLE where id = 114");
// Bulk delete
stmt.execute("delete from TESTTABLE ");
Thread.sleep(3000);
// validate data
SerializableRunnable sc = new SerializableRunnable(
"validate callback data") {
public void run() {
try {
AsyncEventQueue asyncQueue = Misc.getGemFireCache()
.getAsyncEventQueue("WBCL1");
GfxdGatewayEventListener listener = (GfxdGatewayEventListener)asyncQueue
.getAsyncEventListener();
TestNewGatewayEventListenerNotify tgen = (TestNewGatewayEventListenerNotify)listener
.getAsyncEventListenerForTest();
assertFalse(tgen.exceptionOccured);
assertEquals(0, tgen.getNumCallbacks());
}
catch (Exception e) {
throw GemFireXDRuntimeException.newRuntimeException(null, e);
}
}
};
serverExecute(1, sc);
serverExecute(2, sc);
}
finally {
if (useNetConnection) {
TestUtil.stopNetServer();
}
}
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.ml.training;
import org.dmg.pmml.BayesInput;
import org.dmg.pmml.BayesInputs;
import org.dmg.pmml.BayesOutput;
import org.dmg.pmml.DataDictionary;
import org.dmg.pmml.DataField;
import org.dmg.pmml.DataType;
import org.dmg.pmml.FieldName;
import org.dmg.pmml.FieldUsageType;
import org.dmg.pmml.GaussianDistribution;
import org.dmg.pmml.MiningField;
import org.dmg.pmml.MiningFunctionType;
import org.dmg.pmml.MiningSchema;
import org.dmg.pmml.NaiveBayesModel;
import org.dmg.pmml.OpType;
import org.dmg.pmml.PMML;
import org.dmg.pmml.PairCounts;
import org.dmg.pmml.TargetValueCount;
import org.dmg.pmml.TargetValueCounts;
import org.dmg.pmml.TargetValueStat;
import org.dmg.pmml.TargetValueStats;
import org.dmg.pmml.Value;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.cluster.metadata.MappingMetaData;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.search.aggregations.Aggregation;
import org.elasticsearch.search.aggregations.AggregationBuilder;
import org.elasticsearch.search.aggregations.Aggregations;
import org.elasticsearch.search.aggregations.bucket.terms.Terms;
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder;
import org.elasticsearch.search.aggregations.metrics.stats.extended.ExtendedStats;
import org.jpmml.model.JAXBUtil;
import javax.xml.bind.JAXBException;
import javax.xml.transform.stream.StreamResult;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.nio.charset.Charset;
import java.security.AccessController;
import java.security.PrivilegedAction;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeMap;
import java.util.TreeSet;
import static org.elasticsearch.search.aggregations.AggregationBuilders.extendedStats;
import static org.elasticsearch.search.aggregations.AggregationBuilders.terms;
/**
*
*/
public class NaiveBayesModelTrainer implements ModelTrainer {
@Override
public String modelType() {
return "naive_bayes";
}
@Override
public TrainingSession createTrainingSession(MappingMetaData mappingMetaData, List<ModelInputField> inputs, ModelTargetField target,
Settings settings) {
return new NaiveBayesTrainingSession(mappingMetaData, inputs, target, settings);
}
private static class NaiveBayesTrainingSession implements TrainingSession {
final TermsAggregationBuilder termsAggregationBuilder;
private NaiveBayesTrainingSession(MappingMetaData mappingMetaData, List<ModelInputField> fields, ModelTargetField target,
Settings settings) {
TermsAggregationBuilder topLevelClassAgg = terms(target.getName());
topLevelClassAgg.field(target.getName());
topLevelClassAgg.size(Integer.MAX_VALUE);
topLevelClassAgg.shardMinDocCount(1);
topLevelClassAgg.minDocCount(1);
topLevelClassAgg.order(Terms.Order.term(true));
Map<String, Object> fieldMappings = getFiledMappings(mappingMetaData);
for (ModelInputField field : fields) {
String fieldType = getFieldType(fieldMappings, field.getName());
if (fieldType == null) {
throw new IllegalArgumentException("input field [" + field.getName() + "] not found");
}
if (fieldType.equals("text") || fieldType.equals("keyword")) {
topLevelClassAgg.subAggregation(terms(field.getName()).field(field.getName())
.size(Integer.MAX_VALUE).shardMinDocCount(1).minDocCount(1)
.order(Terms.Order.term(true)));
} else if (fieldType.equals("double") || fieldType.equals("float") || fieldType.equals("integer") ||
fieldType.equals("long")) {
topLevelClassAgg.subAggregation(extendedStats(field.getName()).field(field.getName()));
} else {
throw new UnsupportedOperationException("have not implemented naive bayes training for anything but " +
"number and string field yet");
}
}
termsAggregationBuilder = topLevelClassAgg;
}
@SuppressWarnings("unchecked")
private Map<String, Object> getFiledMappings(MappingMetaData mappingMetaData) {
try {
return (Map<String, Object>) mappingMetaData.sourceAsMap().get("properties");
} catch (IOException ex) {
throw new IllegalStateException(ex);
}
}
@SuppressWarnings("unchecked")
private String getFieldType(Map<String, Object> fieldMappings, String field) {
Map<String, Object> attributes = (Map<String, Object>) fieldMappings.get(field);
return (String) attributes.get("type");
}
@Override
public AggregationBuilder trainingRequest() {
return termsAggregationBuilder;
}
@Override
public String model(SearchResponse searchResponse) {
NaiveBayesModel naiveBayesModel = new NaiveBayesModel();
Aggregations aggs = searchResponse.getAggregations();
Terms classAgg = (Terms) aggs.asList().get(0);
int numClasses = classAgg.getBuckets().size();
long[] classCounts = new long[numClasses];
String[] classLabels = new String[numClasses];
int classCounter = 0;
for (Terms.Bucket bucket : classAgg.getBuckets()) {
classCounts[classCounter] = bucket.getDocCount();
classLabels[classCounter] = bucket.getKeyAsString();
classCounter++;
}
if (classCounter < 2) {
throw new RuntimeException("Need at least two classes for naive bayes!");
}
setTargetValueCounts(naiveBayesModel, classAgg, classCounts, classLabels);
// field, value, class -> count
TreeMap<String, TreeMap<String, TreeMap<String, Long>>> stringFieldValueCounts = new TreeMap<>();
TreeMap<String, TreeSet<String>> allTermsPerField = new TreeMap<>();
TreeMap<String, TreeMap<String, Map<String, Double>>> numericFieldStats = new TreeMap<>();
for (Terms.Bucket bucket : classAgg.getBuckets()) {
String className = bucket.getKeyAsString();
for (Aggregation aggregation : bucket.getAggregations()) {
String fieldName = aggregation.getName();
if (aggregation instanceof Terms) {
Terms termAgg = (Terms) aggregation;
// init the data structure if not present
if (stringFieldValueCounts.containsKey(fieldName) == false) {
stringFieldValueCounts.put(fieldName, new TreeMap<>());
allTermsPerField.put(fieldName, new TreeSet<>());
}
TreeMap<String, TreeMap<String, Long>> valueCounts = stringFieldValueCounts.get(fieldName);
for (Terms.Bucket termBucket : termAgg.getBuckets()) {
String value = termBucket.getKeyAsString();
if (valueCounts.containsKey(value) == false) {
valueCounts.put(value, new TreeMap<>());
}
TreeMap<String, Long> termCountsPerClass = valueCounts.get(value);
allTermsPerField.get(fieldName).add(termBucket.getKeyAsString());
termCountsPerClass.put(className, termBucket.getDocCount());
}
} else if (aggregation instanceof ExtendedStats) {
ExtendedStats extendedStats = (ExtendedStats) aggregation;
if (numericFieldStats.containsKey(fieldName) == false) {
numericFieldStats.put(fieldName, new TreeMap<>());
}
Map<String, Double> stats = new HashMap<>();
stats.put("mean", extendedStats.getAvg());
stats.put("variance", extendedStats.getVariance());
numericFieldStats.get(fieldName).put(className, stats);
} else {
throw new RuntimeException("unsupported agg " + aggregation.getClass().getName());
}
}
}
setBayesInputs(naiveBayesModel, stringFieldValueCounts, numericFieldStats, classLabels);
naiveBayesModel.setFunctionName(MiningFunctionType.CLASSIFICATION);
final PMML pmml = new PMML();
setDataDictionary(pmml, allTermsPerField, numericFieldStats.keySet());
setMiningFields(naiveBayesModel, allTermsPerField.keySet(), numericFieldStats.keySet(), classAgg.getName());
naiveBayesModel.setThreshold(1.0 / searchResponse.getHits().totalHits());
pmml.addModels(naiveBayesModel);
final StreamResult streamResult = new StreamResult();
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
streamResult.setOutputStream(outputStream);
AccessController.doPrivileged(new PrivilegedAction<Object>() {
public Object run() {
try {
JAXBUtil.marshal(pmml, streamResult);
} catch (JAXBException e) {
throw new RuntimeException("No idea what went wrong here", e);
}
return null;
}
});
return new String(outputStream.toByteArray(), Charset.defaultCharset());
}
}
private static void setMiningFields(NaiveBayesModel naiveBayesModel, Set<String> categoricalFields, Set<String> numericFields,
String classField) {
MiningSchema miningSchema = new MiningSchema();
for (String fieldName : categoricalFields) {
MiningField miningField = new MiningField();
miningField.setName(new FieldName(fieldName));
miningField.setUsageType(FieldUsageType.ACTIVE);
miningSchema.addMiningFields(miningField);
}
for (String fieldName : numericFields) {
MiningField miningField = new MiningField();
miningField.setName(new FieldName(fieldName));
miningField.setUsageType(FieldUsageType.ACTIVE);
miningSchema.addMiningFields(miningField);
}
MiningField miningField = new MiningField();
miningField.setName(new FieldName(classField));
miningField.setUsageType(FieldUsageType.PREDICTED);
miningSchema.addMiningFields(miningField);
naiveBayesModel.setMiningSchema(miningSchema);
}
private static void setBayesInputs(NaiveBayesModel naiveBayesModel,
TreeMap<String, TreeMap<String, TreeMap<String, Long>>> stringFieldValueCounts,
TreeMap<String, TreeMap<String, Map<String, Double>>> numericFieldStats, String[] classNames) {
BayesInputs bayesInputs = new BayesInputs();
for (Map.Entry<String, TreeMap<String, TreeMap<String, Long>>> categoricalField : stringFieldValueCounts.entrySet()) {
String fieldName = categoricalField.getKey();
BayesInput bayesInput = new BayesInput();
bayesInput.setFieldName(new FieldName(fieldName));
for (Map.Entry<String, TreeMap<String, Long>> valueCounts : categoricalField.getValue().entrySet()) {
String value = valueCounts.getKey();
PairCounts pairCounts = new PairCounts();
pairCounts.setValue(value);
TargetValueCounts targetValueCounts = new TargetValueCounts();
TreeMap<String, Long> classCounts = valueCounts.getValue();
for (String className : classNames) {
if (classCounts.containsKey(className)) {
targetValueCounts.addTargetValueCounts(new TargetValueCount().setValue(className).setCount(classCounts.get
(className)));
} else {
targetValueCounts.addTargetValueCounts(new TargetValueCount().setValue(className).setCount(0));
}
}
pairCounts.setTargetValueCounts(targetValueCounts);
bayesInput.addPairCounts(pairCounts);
}
bayesInputs.addBayesInputs(bayesInput);
}
for (Map.Entry<String, TreeMap<String, Map<String, Double>>> continuousField : numericFieldStats.entrySet()) {
String fieldName = continuousField.getKey();
BayesInput bayesInput = new BayesInput();
bayesInput.setFieldName(new FieldName(fieldName));
TargetValueStats targetValueStats = new TargetValueStats();
for (Map.Entry<String, Map<String, Double>> valueStats : continuousField.getValue().entrySet()) {
String className = valueStats.getKey();
GaussianDistribution gaussianDistribution = new GaussianDistribution();
gaussianDistribution.setMean(valueStats.getValue().get("mean"));
gaussianDistribution.setVariance(valueStats.getValue().get("variance"));
TargetValueStat targetValueStat = new TargetValueStat();
targetValueStat.setValue(className);
targetValueStat.setContinuousDistribution(gaussianDistribution);
targetValueStats.addTargetValueStats(targetValueStat);
}
bayesInput.setTargetValueStats(targetValueStats);
bayesInputs.addBayesInputs(bayesInput);
}
naiveBayesModel.setBayesInputs(bayesInputs);
}
private static void setDataDictionary(PMML pmml, TreeMap<String, TreeSet<String>> allTermsPerField,
Set<String> numericFieldsNames) {
DataDictionary dataDictionary = new DataDictionary();
for (Map.Entry<String, TreeSet<String>> fieldNameAndTerms : allTermsPerField.entrySet()) {
DataField dataField = new DataField();
dataField.setName(new FieldName(fieldNameAndTerms.getKey()));
dataField.setOpType(OpType.CATEGORICAL);
dataField.setDataType(DataType.STRING);
for (String term : fieldNameAndTerms.getValue()) {
dataField.addValues(new Value(term));
}
dataDictionary.addDataFields(dataField);
}
for (String fieldname : numericFieldsNames) {
DataField dataField = new DataField();
dataField.setName(new FieldName(fieldname));
dataField.setOpType(OpType.CONTINUOUS);
// TODO: handle ints etc.
dataField.setDataType(DataType.DOUBLE);
dataDictionary.addDataFields(dataField);
}
pmml.setDataDictionary(dataDictionary);
}
private static void setTargetValueCounts(NaiveBayesModel naiveBayesModel, Terms classAgg, long[] classCounts, String[] classLabels) {
TargetValueCounts targetValueCounts = new TargetValueCounts();
for (int i = 0; i < classLabels.length; i++) {
TargetValueCount targetValueCount = new TargetValueCount();
targetValueCount.setValue(classLabels[i]);
targetValueCount.setCount(classCounts[i]);
targetValueCounts.addTargetValueCounts(targetValueCount);
}
naiveBayesModel.setBayesOutput(new BayesOutput().setFieldName(new FieldName(classAgg.getName())).setTargetValueCounts
(targetValueCounts));
}
}
| |
/**
* ISC License terms (http://opensource.org/licenses/isc-license):
* <p>
* Copyright (c) 2015, Patrick Lehner <lehner (dot) patrick (at) gmx (dot) de>
* <p>
* Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby
* granted, provided that the above copyright notice and this permission notice appear in all copies.
* <p>
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
* INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN
* AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
* PERFORMANCE OF THIS SOFTWARE.
*/
package nevik;
import java.util.*;
import java.util.function.*;
import java.util.stream.Collector;
import java.util.stream.Collectors;
import java.util.stream.DoubleStream;
import java.util.stream.IntStream;
import java.util.stream.LongStream;
import java.util.stream.Stream;
import java.util.stream.StreamSupport;
/**
* @author Patrick Lehner
* @since 2015-08-26
*/
public class FancyStream<E> implements Stream<E> {
protected final Stream<E> underlyingStream;
/**
* Make a new {@code FancyStream} containing all the given elements in the given order.
*
* @param elements
* elements to create the stream from
* @param <E>
* type of elements in this stream
* @return a new {@code FancyStream} from the given {@code elements}
*
* @see #makeStream(Object[])
* @see java.util.stream.Stream#of(Object[])
*/
@SafeVarargs
public static <E> FancyStream<E> of(E... elements) {
return new FancyStream<>(Stream.of(elements));
}
/**
* Make a new {@code FancyStream} containing all the given elements in the given order.
* <p>
* Alias of {@link #of(Object[])} for use with static imports (to avoid name clashes and improve readability).
*
* @param elements
* elements to create the stream from
* @param <E>
* type of elements in this stream
* @return a new {@code FancyStream} from the given {@code elements}
*
* @see #of(Object[])
*/
@SafeVarargs
public static <E> FancyStream<E> makeStream(final E... elements) {
return new FancyStream<>(Stream.of(elements));
}
/**
* Make a new {@code FancyStream} from the given stream.
* <p>
* This is a convenience factory method that is equivalent to calling the constructor {@link
* #FancyStream(java.util.stream.Stream)}. The static method can be imported statically to make using code shorter
* and more readable.
*
* @param underlyingStream
* the stream to make fancy
* @param <E>
* type of elements in the resulting fancy stream
* @return a new {@code FancyStream} containing all the elements from the given stream
*
* @throws NullPointerException
* if {@code underlyingStream} is {@code null}
* @see #FancyStream(java.util.stream.Stream)
*/
public static <E> FancyStream<E> makeFancy(final Stream<E> underlyingStream) {
return new FancyStream<>(underlyingStream);
}
/**
* Make a new {@code FancyStream} containing all the elements from the given collection, in the order returned by
* the collection's {@link java.util.Collection#stream() stream()} method.
*
* @param collection
* collection whose elements to stream
* @param <E>
* type of elements in the resulting fancy stream
* @return a new {@code FancyStream} containing all the elements from the given collection
*
* @throws NullPointerException
* if {@code collection} is {@code null}
*/
public static <E> FancyStream<E> makeStream(final Collection<E> collection) {
return makeFancy(collection.stream());
}
/**
* Make a new (sequential) {@code FancyStream} containing all the elements from the given {@link Iterable}, in the
* order returned its {@link java.util.Spliterator}.
*
* @param iterable
* iterable whose elements to stream
* @param <E>
* type of elements in the resulting fancy stream
* @return a new {@code FancyStream} containing all the elements from the given iterable
*
* @throws NullPointerException
* if {@code iterable} is {@code null}
* @see java.util.stream.StreamSupport#stream(java.util.Spliterator, boolean)
*/
public static <E> FancyStream<E> makeStream(final Iterable<E> iterable) {
return makeStream(iterable, /*parallel?*/false);
}
/**
* Make a new {@code FancyStream} containing all the elements from the given {@link Iterable}, in the order returned
* its {@link java.util.Spliterator}. The stream can be made parallel by setting the corresponding argument to
* {@code true}.
*
* @param iterable
* iterable whose elements to stream
* @param parallel
* whether to make the resulting stream parallel
* @param <E>
* type of elements in the resulting fancy stream
* @return a new {@code FancyStream} containing all the elements from the given iterable
*
* @throws NullPointerException
* if {@code iterable} is {@code null}
* @see java.util.stream.StreamSupport#stream(java.util.Spliterator, boolean)
* @see #makeStream(Iterable)
*/
private static <E> FancyStream<E> makeStream(final Iterable<E> iterable, final boolean parallel) {
return makeFancy(StreamSupport.stream(Objects.requireNonNull(iterable.spliterator()), parallel));
}
/**
* Make a fancy stream containing the elements of {@code a}, followed by the elements of {@code b}.
*
* @param a
* first stream of elements
* @param b
* second stream of elements
* @param <E>
* type of elements in the resulting fancy stream
* @return new fancy stream containing all elements from {@code a} and {@code b}
*
* @throws NullPointerException
* if {@code a} or {@code b} is {@code null}
*/
public static <E> FancyStream<E> concat(final Stream<? extends E> a, final Stream<? extends E> b) {
return makeFancy(Stream.concat(a, b));
}
/**
* Make a new sequential unordered {@code FancyStream} that contains the given {@code element} exactly {@code n}
* times.
*
* @param element
* the item to repeat; may be {@code null}
* @param n
* the number of times to repeat the item
* @param <E>
* the type of the element
* @return a new sequential unordered {@code FancyStream} that repeats {@code element} exactly {@code n} times
*/
public static <E> FancyStream<E> repeat(final E element, final long n) {
return makeFancy(Stream.generate(() -> element).limit(n));
}
/**
* Make a new infinite sequential unordered {@code FancyStream} repeating the given {@code element}.
*
* @param element
* the item to repeat; may be {@code null}
* @param <E>
* the type of the element
* @return a new infinite sequential unordered {@code FancyStream} repeating {@code element}
*/
public static <E> FancyStream<E> repeat(final E element) {
return makeFancy(Stream.generate(() -> element));
}
/**
* Make a new {@code FancyStream} from the given stream.
*
* @param underlyingStream
* the stream to make fancy
* @throws NullPointerException
* if {@code underlyingStream} is {@code null}
* @see #makeFancy(java.util.stream.Stream)
*/
public FancyStream(final Stream<E> underlyingStream) {
this.underlyingStream = Objects.requireNonNull(underlyingStream);
}
/**
* Collect the elements of this stream into a {@link java.util.Set}.
*
* @return a set containing all elements of this stream
*
* @see java.util.stream.Collectors#toSet()
*/
public Set<E> toSet() {
return collect(Collectors.toSet());
}
public Set<E> toUnmodifiableSet() {
return Collections.unmodifiableSet(toSet());
}
/**
* Collect the elements of this stream into a {@link java.util.List}.
*
* @return a list containing all elements of this stream
*
* @see java.util.stream.Collectors#toList()
*/
public List<E> toList() {
return collect(Collectors.toList());
}
public List<E> toUnmodifiableList() {
return Collections.unmodifiableList(toList());
}
/**
* Collect the elements of this stream into a {@link java.util.Collection} created by the given {@code
* collectionFactory}.
*
* @param collectionFactory
* a factory to create a new, empty {@code Collection} of the appropriate type
* @param <C>
* the desired collection type
* @return a collection containing all elements of this stream
*
* @see java.util.stream.Collectors#toCollection(java.util.function.Supplier)
*/
public <C extends Collection<E>> C toCollection(final Supplier<C> collectionFactory) {
return collect(Collectors.toCollection(collectionFactory));
}
/**
* Collect the elements of this stream into a map, using the stream's elements as the keys and use values provided
* by the given {@code valueMapper}.
*
* @param valueMapper
* a function to derive a value from a key
* @param <V>
* the type for values in the resulting map
* @return a map from all elements of this stream as keys to the values provided by the given {@code valueMapper}
*/
public <V> Map<E, V> asMapKeys(final Function<? super E, ? extends V> valueMapper) {
return collect(Collectors.toMap(Function.identity(), valueMapper));
}
/**
* Return a stream that contains all elements from this stream followed by all elements of the given {@code
* otherStream}.
*
* @param otherStream
* another stream of elements to append to this one
* @return the combined stream of the elements in this and the other stream
*
* @throws NullPointerException
* if {@code otherStream} is {@code null}
* @see #concat(java.util.stream.Stream, java.util.stream.Stream)
*/
public FancyStream<E> append(final Stream<? extends E> otherStream) {
return concat(this, otherStream);
}
/**
* Returns a stream consisting of the elements of this stream that match the given predicate.
* <p>
* This is an intermediate operation.
*
* @param predicate
* a <i>non-interfering</i>, <i>stateless</i> predicate to apply to each element to determine if it should be
* included
* @return the new stream
*
* @see java.util.stream.Stream#filter(java.util.function.Predicate)
*/
@Override
public FancyStream<E> filter(final Predicate<? super E> predicate) {
return makeFancy(underlyingStream.filter(predicate));
}
/**
* Returns a stream consisting of the elements of this stream that <b>do not</b> match the given predicate.
* <p>
* This is an intermediate operation.
* <p>
* (This method is like {@link #filter(java.util.function.Predicate)}, but uses the given predicate to determine
* which elements to remove instead of which to keep.)
*
* @param predicate
* a <i>non-interfering</i>, <i>stateless</i> predicate to apply to each element to determine if it should
* <b>not</b> be included
* @return the new stream
*
* @throws NullPointerException
* if {@code predicate} is {@code null}
* @see #filter(java.util.function.Predicate)
* @see java.util.stream.Stream#filter(java.util.function.Predicate)
*/
public FancyStream<E> filterInvert(final Predicate<? super E> predicate) {
return filter(predicate.negate());
}
@Override
public <R> FancyStream<R> map(final Function<? super E, ? extends R> mapper) {
return makeFancy(underlyingStream.map(mapper));
}
public <R> FancyStream<R> splitMap(final Predicate<? super E> splitPredicate, //
final Function<? super E, ? extends R> matchingMapper, //
final Function<? super E, ? extends R> nonMatchingMapper) {
Objects.requireNonNull(splitPredicate);
Objects.requireNonNull(matchingMapper);
Objects.requireNonNull(nonMatchingMapper);
return map(e -> splitPredicate.test(e) ? matchingMapper.apply(e) : nonMatchingMapper.apply(e));
}
@Override
public IntStream mapToInt(final ToIntFunction<? super E> mapper) {
return underlyingStream.mapToInt(mapper);
}
@Override
public LongStream mapToLong(final ToLongFunction<? super E> mapper) {
return underlyingStream.mapToLong(mapper);
}
@Override
public DoubleStream mapToDouble(final ToDoubleFunction<? super E> mapper) {
return underlyingStream.mapToDouble(mapper);
}
@Override
public <R> FancyStream<R> flatMap(final Function<? super E, ? extends Stream<? extends R>> mapper) {
return makeFancy(underlyingStream.flatMap(mapper));
}
@Override
public IntStream flatMapToInt(final Function<? super E, ? extends IntStream> mapper) {
return underlyingStream.flatMapToInt(mapper);
}
@Override
public LongStream flatMapToLong(final Function<? super E, ? extends LongStream> mapper) {
return underlyingStream.flatMapToLong(mapper);
}
@Override
public DoubleStream flatMapToDouble(final Function<? super E, ? extends DoubleStream> mapper) {
return underlyingStream.flatMapToDouble(mapper);
}
@Override
public FancyStream<E> distinct() {
return makeFancy(underlyingStream.distinct());
}
@Override
public FancyStream<E> sorted() {
return makeFancy(underlyingStream.sorted());
}
@Override
public FancyStream<E> sorted(final Comparator<? super E> comparator) {
return makeFancy(underlyingStream.sorted(comparator));
}
@Override
public FancyStream<E> peek(final Consumer<? super E> action) {
return makeFancy(underlyingStream.peek(action));
}
@Override
public FancyStream<E> limit(final long maxSize) {
return makeFancy(underlyingStream.limit(maxSize));
}
@Override
public FancyStream<E> skip(final long n) {
return makeFancy(underlyingStream.skip(n));
}
@Override
public void forEach(final Consumer<? super E> action) {
underlyingStream.forEach(action);
}
@Override
public void forEachOrdered(final Consumer<? super E> action) {
underlyingStream.forEachOrdered(action);
}
@Override
public Object[] toArray() {
return underlyingStream.toArray();
}
@Override
public <A> A[] toArray(final IntFunction<A[]> generator) {
return underlyingStream.toArray(generator);
}
@Override
public E reduce(final E identity, final BinaryOperator<E> accumulator) {
return underlyingStream.reduce(identity, accumulator);
}
@Override
public Optional<E> reduce(final BinaryOperator<E> accumulator) {
return underlyingStream.reduce(accumulator);
}
@Override
public <U> U reduce(final U identity, final BiFunction<U, ? super E, U> accumulator,
final BinaryOperator<U> combiner) {
return underlyingStream.reduce(identity, accumulator, combiner);
}
@Override
public <R> R collect(final Supplier<R> supplier, final BiConsumer<R, ? super E> accumulator,
final BiConsumer<R, R> combiner) {
return underlyingStream.collect(supplier, accumulator, combiner);
}
@Override
public <R, A> R collect(final Collector<? super E, A, R> collector) {
return underlyingStream.collect(collector);
}
@Override
public Optional<E> min(final Comparator<? super E> comparator) {
return underlyingStream.min(comparator);
}
@Override
public Optional<E> max(final Comparator<? super E> comparator) {
return underlyingStream.max(comparator);
}
@Override
public long count() {
return underlyingStream.count();
}
@Override
public boolean anyMatch(final Predicate<? super E> predicate) {
return underlyingStream.anyMatch(predicate);
}
@Override
public boolean allMatch(final Predicate<? super E> predicate) {
return underlyingStream.allMatch(predicate);
}
@Override
public boolean noneMatch(final Predicate<? super E> predicate) {
return underlyingStream.noneMatch(predicate);
}
@Override
public Optional<E> findFirst() {
return underlyingStream.findFirst();
}
@Override
public Optional<E> findAny() {
return underlyingStream.findAny();
}
@Override
public Iterator<E> iterator() {
return underlyingStream.iterator();
}
@Override
public Spliterator<E> spliterator() {
return underlyingStream.spliterator();
}
@Override
public boolean isParallel() {
return underlyingStream.isParallel();
}
@Override
public FancyStream<E> sequential() {
return makeFancy(underlyingStream.sequential());
}
@Override
public FancyStream<E> parallel() {
return makeFancy(underlyingStream.parallel());
}
@Override
public FancyStream<E> unordered() {
return makeFancy(underlyingStream.unordered());
}
@Override
public FancyStream<E> onClose(final Runnable closeHandler) {
return makeFancy(underlyingStream.onClose(closeHandler));
}
@Override
public void close() {
underlyingStream.close();
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.sql.planner.optimizations;
import com.facebook.presto.Session;
import com.facebook.presto.SystemSessionProperties;
import com.facebook.presto.metadata.Metadata;
import com.facebook.presto.metadata.TableLayoutResult;
import com.facebook.presto.spi.ColumnHandle;
import com.facebook.presto.spi.Constraint;
import com.facebook.presto.spi.GroupingProperty;
import com.facebook.presto.spi.LocalProperty;
import com.facebook.presto.spi.SortingProperty;
import com.facebook.presto.spi.predicate.NullableValue;
import com.facebook.presto.spi.predicate.TupleDomain;
import com.facebook.presto.spi.type.Type;
import com.facebook.presto.sql.parser.SqlParser;
import com.facebook.presto.sql.planner.DomainTranslator;
import com.facebook.presto.sql.planner.ExpressionInterpreter;
import com.facebook.presto.sql.planner.LookupSymbolResolver;
import com.facebook.presto.sql.planner.Partitioning;
import com.facebook.presto.sql.planner.PartitioningScheme;
import com.facebook.presto.sql.planner.PlanNodeIdAllocator;
import com.facebook.presto.sql.planner.Symbol;
import com.facebook.presto.sql.planner.SymbolAllocator;
import com.facebook.presto.sql.planner.plan.AggregationNode;
import com.facebook.presto.sql.planner.plan.ApplyNode;
import com.facebook.presto.sql.planner.plan.Assignments;
import com.facebook.presto.sql.planner.plan.ChildReplacer;
import com.facebook.presto.sql.planner.plan.DistinctLimitNode;
import com.facebook.presto.sql.planner.plan.EnforceSingleRowNode;
import com.facebook.presto.sql.planner.plan.ExchangeNode;
import com.facebook.presto.sql.planner.plan.ExplainAnalyzeNode;
import com.facebook.presto.sql.planner.plan.FilterNode;
import com.facebook.presto.sql.planner.plan.GroupIdNode;
import com.facebook.presto.sql.planner.plan.IndexJoinNode;
import com.facebook.presto.sql.planner.plan.IndexSourceNode;
import com.facebook.presto.sql.planner.plan.JoinNode;
import com.facebook.presto.sql.planner.plan.LateralJoinNode;
import com.facebook.presto.sql.planner.plan.LimitNode;
import com.facebook.presto.sql.planner.plan.MarkDistinctNode;
import com.facebook.presto.sql.planner.plan.OutputNode;
import com.facebook.presto.sql.planner.plan.PlanNode;
import com.facebook.presto.sql.planner.plan.PlanVisitor;
import com.facebook.presto.sql.planner.plan.ProjectNode;
import com.facebook.presto.sql.planner.plan.RowNumberNode;
import com.facebook.presto.sql.planner.plan.SemiJoinNode;
import com.facebook.presto.sql.planner.plan.SortNode;
import com.facebook.presto.sql.planner.plan.TableFinishNode;
import com.facebook.presto.sql.planner.plan.TableScanNode;
import com.facebook.presto.sql.planner.plan.TableWriterNode;
import com.facebook.presto.sql.planner.plan.TopNNode;
import com.facebook.presto.sql.planner.plan.TopNRowNumberNode;
import com.facebook.presto.sql.planner.plan.UnionNode;
import com.facebook.presto.sql.planner.plan.UnnestNode;
import com.facebook.presto.sql.planner.plan.ValuesNode;
import com.facebook.presto.sql.planner.plan.WindowNode;
import com.facebook.presto.sql.tree.BooleanLiteral;
import com.facebook.presto.sql.tree.Expression;
import com.facebook.presto.sql.tree.NodeRef;
import com.facebook.presto.sql.tree.NullLiteral;
import com.facebook.presto.sql.tree.SymbolReference;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.cache.CacheBuilder;
import com.google.common.cache.CacheLoader;
import com.google.common.cache.LoadingCache;
import com.google.common.collect.ComparisonChain;
import com.google.common.collect.ImmutableBiMap;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableListMultimap;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.ImmutableSetMultimap;
import com.google.common.collect.SetMultimap;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.function.Function;
import static com.facebook.presto.SystemSessionProperties.isColocatedJoinEnabled;
import static com.facebook.presto.SystemSessionProperties.isForceSingleNodeOutput;
import static com.facebook.presto.sql.ExpressionUtils.combineConjuncts;
import static com.facebook.presto.sql.ExpressionUtils.filterDeterministicConjuncts;
import static com.facebook.presto.sql.ExpressionUtils.filterNonDeterministicConjuncts;
import static com.facebook.presto.sql.analyzer.ExpressionAnalyzer.getExpressionTypes;
import static com.facebook.presto.sql.planner.FragmentTableScanCounter.countSources;
import static com.facebook.presto.sql.planner.FragmentTableScanCounter.hasMultipleSources;
import static com.facebook.presto.sql.planner.SystemPartitioningHandle.FIXED_ARBITRARY_DISTRIBUTION;
import static com.facebook.presto.sql.planner.SystemPartitioningHandle.FIXED_HASH_DISTRIBUTION;
import static com.facebook.presto.sql.planner.SystemPartitioningHandle.SCALED_WRITER_DISTRIBUTION;
import static com.facebook.presto.sql.planner.SystemPartitioningHandle.SINGLE_DISTRIBUTION;
import static com.facebook.presto.sql.planner.optimizations.ActualProperties.Global.partitionedOn;
import static com.facebook.presto.sql.planner.optimizations.ActualProperties.Global.singleStreamPartition;
import static com.facebook.presto.sql.planner.optimizations.LocalProperties.grouped;
import static com.facebook.presto.sql.planner.plan.ExchangeNode.Scope.REMOTE;
import static com.facebook.presto.sql.planner.plan.ExchangeNode.Type.GATHER;
import static com.facebook.presto.sql.planner.plan.ExchangeNode.Type.REPARTITION;
import static com.facebook.presto.sql.planner.plan.ExchangeNode.gatheringExchange;
import static com.facebook.presto.sql.planner.plan.ExchangeNode.partitionedExchange;
import static com.facebook.presto.sql.planner.plan.ExchangeNode.replicatedExchange;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkState;
import static com.google.common.base.Verify.verify;
import static com.google.common.collect.ImmutableList.toImmutableList;
import static com.google.common.collect.ImmutableSet.toImmutableSet;
import static com.google.common.collect.Iterables.getOnlyElement;
import static java.lang.String.format;
import static java.util.Collections.emptyList;
import static java.util.stream.Collectors.toList;
public class AddExchanges
implements PlanOptimizer
{
private final SqlParser parser;
private final Metadata metadata;
public AddExchanges(Metadata metadata, SqlParser parser)
{
this.metadata = metadata;
this.parser = parser;
}
@Override
public PlanNode optimize(PlanNode plan, Session session, Map<Symbol, Type> types, SymbolAllocator symbolAllocator, PlanNodeIdAllocator idAllocator)
{
PlanWithProperties result = plan.accept(new Rewriter(idAllocator, symbolAllocator, session), PreferredProperties.any());
return result.getNode();
}
private class Rewriter
extends PlanVisitor<PlanWithProperties, PreferredProperties>
{
private final PlanNodeIdAllocator idAllocator;
private final SymbolAllocator symbolAllocator;
private final Map<Symbol, Type> types;
private final Session session;
private final boolean distributedIndexJoins;
private final boolean preferStreamingOperators;
private final boolean redistributeWrites;
private final boolean scaleWriters;
public Rewriter(PlanNodeIdAllocator idAllocator, SymbolAllocator symbolAllocator, Session session)
{
this.idAllocator = idAllocator;
this.symbolAllocator = symbolAllocator;
this.types = ImmutableMap.copyOf(symbolAllocator.getTypes());
this.session = session;
this.distributedIndexJoins = SystemSessionProperties.isDistributedIndexJoinEnabled(session);
this.redistributeWrites = SystemSessionProperties.isRedistributeWrites(session);
this.scaleWriters = SystemSessionProperties.isScaleWriters(session);
this.preferStreamingOperators = SystemSessionProperties.preferStreamingOperators(session);
}
@Override
protected PlanWithProperties visitPlan(PlanNode node, PreferredProperties preferredProperties)
{
return rebaseAndDeriveProperties(node, planChild(node, preferredProperties));
}
@Override
public PlanWithProperties visitProject(ProjectNode node, PreferredProperties preferredProperties)
{
Map<Symbol, Symbol> identities = computeIdentityTranslations(node.getAssignments());
PreferredProperties translatedPreferred = preferredProperties.translate(symbol -> Optional.ofNullable(identities.get(symbol)));
return rebaseAndDeriveProperties(node, planChild(node, translatedPreferred));
}
@Override
public PlanWithProperties visitOutput(OutputNode node, PreferredProperties preferredProperties)
{
PlanWithProperties child = planChild(node, PreferredProperties.undistributed());
if (!child.getProperties().isSingleNode() && isForceSingleNodeOutput(session)) {
child = withDerivedProperties(
gatheringExchange(idAllocator.getNextId(), REMOTE, child.getNode()),
child.getProperties());
}
return rebaseAndDeriveProperties(node, child);
}
@Override
public PlanWithProperties visitEnforceSingleRow(EnforceSingleRowNode node, PreferredProperties preferredProperties)
{
PlanWithProperties child = planChild(node, PreferredProperties.any());
if (!child.getProperties().isSingleNode()) {
child = withDerivedProperties(
gatheringExchange(idAllocator.getNextId(), REMOTE, child.getNode()),
child.getProperties());
}
return rebaseAndDeriveProperties(node, child);
}
@Override
public PlanWithProperties visitAggregation(AggregationNode node, PreferredProperties parentPreferredProperties)
{
Set<Symbol> partitioningRequirement = ImmutableSet.copyOf(node.getGroupingKeys());
boolean preferSingleNode = node.hasSingleNodeExecutionPreference(metadata.getFunctionRegistry());
PreferredProperties preferredProperties = preferSingleNode ? PreferredProperties.undistributed() : PreferredProperties.any();
if (!node.getGroupingKeys().isEmpty()) {
preferredProperties = PreferredProperties.partitionedWithLocal(partitioningRequirement, grouped(node.getGroupingKeys()))
.mergeWithParent(parentPreferredProperties);
}
PlanWithProperties child = planChild(node, preferredProperties);
if (child.getProperties().isSingleNode()) {
// If already unpartitioned, just drop the single aggregation back on
return rebaseAndDeriveProperties(node, child);
}
if (preferSingleNode) {
child = withDerivedProperties(
gatheringExchange(idAllocator.getNextId(), REMOTE, child.getNode()),
child.getProperties());
}
else if (!child.getProperties().isStreamPartitionedOn(partitioningRequirement) && !child.getProperties().isNodePartitionedOn(partitioningRequirement)) {
child = withDerivedProperties(
partitionedExchange(idAllocator.getNextId(), REMOTE, child.getNode(), node.getGroupingKeys(), node.getHashSymbol()),
child.getProperties());
}
return rebaseAndDeriveProperties(node, child);
}
@Override
public PlanWithProperties visitGroupId(GroupIdNode node, PreferredProperties preferredProperties)
{
PreferredProperties childPreference = preferredProperties.translate(translateGroupIdSymbols(node));
PlanWithProperties child = planChild(node, childPreference);
return rebaseAndDeriveProperties(node, child);
}
private Function<Symbol, Optional<Symbol>> translateGroupIdSymbols(GroupIdNode node)
{
return symbol -> {
if (node.getArgumentMappings().containsKey(symbol)) {
return Optional.of(node.getArgumentMappings().get(symbol));
}
if (node.getCommonGroupingColumns().contains(symbol)) {
return Optional.of(node.getGroupingSetMappings().get(symbol));
}
return Optional.empty();
};
}
@Override
public PlanWithProperties visitMarkDistinct(MarkDistinctNode node, PreferredProperties preferredProperties)
{
PreferredProperties preferredChildProperties = PreferredProperties.partitionedWithLocal(ImmutableSet.copyOf(node.getDistinctSymbols()), grouped(node.getDistinctSymbols()))
.mergeWithParent(preferredProperties);
PlanWithProperties child = node.getSource().accept(this, preferredChildProperties);
if (child.getProperties().isSingleNode() ||
!child.getProperties().isStreamPartitionedOn(node.getDistinctSymbols())) {
child = withDerivedProperties(
partitionedExchange(
idAllocator.getNextId(),
REMOTE,
child.getNode(),
node.getDistinctSymbols(),
node.getHashSymbol()),
child.getProperties());
}
return rebaseAndDeriveProperties(node, child);
}
@Override
public PlanWithProperties visitWindow(WindowNode node, PreferredProperties preferredProperties)
{
List<LocalProperty<Symbol>> desiredProperties = new ArrayList<>();
if (!node.getPartitionBy().isEmpty()) {
desiredProperties.add(new GroupingProperty<>(node.getPartitionBy()));
}
node.getOrderingScheme().ifPresent(orderingScheme ->
orderingScheme.getOrderBy().stream()
.map(symbol -> new SortingProperty<>(symbol, orderingScheme.getOrdering(symbol)))
.forEach(desiredProperties::add));
PlanWithProperties child = planChild(
node,
PreferredProperties.partitionedWithLocal(ImmutableSet.copyOf(node.getPartitionBy()), desiredProperties)
.mergeWithParent(preferredProperties));
if (!child.getProperties().isStreamPartitionedOn(node.getPartitionBy())) {
if (node.getPartitionBy().isEmpty()) {
child = withDerivedProperties(
gatheringExchange(idAllocator.getNextId(), REMOTE, child.getNode()),
child.getProperties());
}
else {
child = withDerivedProperties(
partitionedExchange(idAllocator.getNextId(), REMOTE, child.getNode(), node.getPartitionBy(), node.getHashSymbol()),
child.getProperties());
}
}
return rebaseAndDeriveProperties(node, child);
}
@Override
public PlanWithProperties visitRowNumber(RowNumberNode node, PreferredProperties preferredProperties)
{
if (node.getPartitionBy().isEmpty()) {
PlanWithProperties child = planChild(node, PreferredProperties.undistributed());
if (!child.getProperties().isSingleNode()) {
child = withDerivedProperties(
gatheringExchange(idAllocator.getNextId(), REMOTE, child.getNode()),
child.getProperties());
}
return rebaseAndDeriveProperties(node, child);
}
PlanWithProperties child = planChild(
node,
PreferredProperties.partitionedWithLocal(ImmutableSet.copyOf(node.getPartitionBy()), grouped(node.getPartitionBy()))
.mergeWithParent(preferredProperties));
// TODO: add config option/session property to force parallel plan if child is unpartitioned and window has a PARTITION BY clause
if (!child.getProperties().isStreamPartitionedOn(node.getPartitionBy())) {
child = withDerivedProperties(
partitionedExchange(
idAllocator.getNextId(),
REMOTE,
child.getNode(),
node.getPartitionBy(),
node.getHashSymbol()),
child.getProperties());
}
// TODO: streaming
return rebaseAndDeriveProperties(node, child);
}
@Override
public PlanWithProperties visitTopNRowNumber(TopNRowNumberNode node, PreferredProperties preferredProperties)
{
PreferredProperties preferredChildProperties;
Function<PlanNode, PlanNode> addExchange;
if (node.getPartitionBy().isEmpty()) {
preferredChildProperties = PreferredProperties.any();
addExchange = partial -> gatheringExchange(idAllocator.getNextId(), REMOTE, partial);
}
else {
preferredChildProperties = PreferredProperties.partitionedWithLocal(ImmutableSet.copyOf(node.getPartitionBy()), grouped(node.getPartitionBy()))
.mergeWithParent(preferredProperties);
addExchange = partial -> partitionedExchange(idAllocator.getNextId(), REMOTE, partial, node.getPartitionBy(), node.getHashSymbol());
}
PlanWithProperties child = planChild(node, preferredChildProperties);
if (!child.getProperties().isStreamPartitionedOn(node.getPartitionBy())) {
// add exchange + push function to child
child = withDerivedProperties(
new TopNRowNumberNode(
idAllocator.getNextId(),
child.getNode(),
node.getSpecification(),
node.getRowNumberSymbol(),
node.getMaxRowCountPerPartition(),
true,
node.getHashSymbol()),
child.getProperties());
child = withDerivedProperties(addExchange.apply(child.getNode()), child.getProperties());
}
return rebaseAndDeriveProperties(node, child);
}
@Override
public PlanWithProperties visitTopN(TopNNode node, PreferredProperties preferredProperties)
{
PlanWithProperties child;
switch (node.getStep()) {
case SINGLE:
case FINAL:
child = planChild(node, PreferredProperties.undistributed());
if (!child.getProperties().isSingleNode()) {
child = withDerivedProperties(
gatheringExchange(idAllocator.getNextId(), REMOTE, child.getNode()),
child.getProperties());
}
break;
case PARTIAL:
child = planChild(node, PreferredProperties.any());
break;
default:
throw new UnsupportedOperationException(format("Unsupported step for TopN [%s]", node.getStep()));
}
return rebaseAndDeriveProperties(node, child);
}
@Override
public PlanWithProperties visitSort(SortNode node, PreferredProperties preferredProperties)
{
PlanWithProperties child = planChild(node, PreferredProperties.undistributed());
if (!child.getProperties().isSingleNode()) {
child = withDerivedProperties(
gatheringExchange(idAllocator.getNextId(), REMOTE, child.getNode()),
child.getProperties());
}
else {
// current plan so far is single node, so local properties are effectively global properties
// skip the SortNode if the local properties guarantee ordering on Sort keys
// TODO: This should be extracted as a separate optimizer once the planner is able to reason about the ordering of each operator
List<LocalProperty<Symbol>> desiredProperties = new ArrayList<>();
for (Symbol symbol : node.getOrderingScheme().getOrderBy()) {
desiredProperties.add(new SortingProperty<>(symbol, node.getOrderingScheme().getOrdering(symbol)));
}
if (LocalProperties.match(child.getProperties().getLocalProperties(), desiredProperties).stream()
.noneMatch(Optional::isPresent)) {
return child;
}
}
return rebaseAndDeriveProperties(node, child);
}
@Override
public PlanWithProperties visitLimit(LimitNode node, PreferredProperties preferredProperties)
{
PlanWithProperties child = planChild(node, PreferredProperties.any());
if (!child.getProperties().isSingleNode()) {
child = withDerivedProperties(
new LimitNode(idAllocator.getNextId(), child.getNode(), node.getCount(), true),
child.getProperties());
child = withDerivedProperties(
gatheringExchange(idAllocator.getNextId(), REMOTE, child.getNode()),
child.getProperties());
}
return rebaseAndDeriveProperties(node, child);
}
@Override
public PlanWithProperties visitDistinctLimit(DistinctLimitNode node, PreferredProperties preferredProperties)
{
PlanWithProperties child = planChild(node, PreferredProperties.any());
if (!child.getProperties().isSingleNode()) {
child = withDerivedProperties(
gatheringExchange(
idAllocator.getNextId(),
REMOTE,
new DistinctLimitNode(idAllocator.getNextId(), child.getNode(), node.getLimit(), true, node.getDistinctSymbols(), node.getHashSymbol())),
child.getProperties());
}
return rebaseAndDeriveProperties(node, child);
}
@Override
public PlanWithProperties visitFilter(FilterNode node, PreferredProperties preferredProperties)
{
if (node.getSource() instanceof TableScanNode) {
return planTableScan((TableScanNode) node.getSource(), node.getPredicate(), preferredProperties);
}
return rebaseAndDeriveProperties(node, planChild(node, preferredProperties));
}
@Override
public PlanWithProperties visitTableScan(TableScanNode node, PreferredProperties preferredProperties)
{
return planTableScan(node, BooleanLiteral.TRUE_LITERAL, preferredProperties);
}
@Override
public PlanWithProperties visitTableWriter(TableWriterNode node, PreferredProperties preferredProperties)
{
PlanWithProperties source = node.getSource().accept(this, preferredProperties);
Optional<PartitioningScheme> partitioningScheme = node.getPartitioningScheme();
if (!partitioningScheme.isPresent()) {
if (scaleWriters) {
partitioningScheme = Optional.of(new PartitioningScheme(Partitioning.create(SCALED_WRITER_DISTRIBUTION, ImmutableList.of()), source.getNode().getOutputSymbols()));
}
else if (redistributeWrites) {
partitioningScheme = Optional.of(new PartitioningScheme(Partitioning.create(FIXED_ARBITRARY_DISTRIBUTION, ImmutableList.of()), source.getNode().getOutputSymbols()));
}
}
if (partitioningScheme.isPresent() && !source.getProperties().isNodePartitionedOn(partitioningScheme.get().getPartitioning(), false)) {
source = withDerivedProperties(
partitionedExchange(
idAllocator.getNextId(),
REMOTE,
source.getNode(),
partitioningScheme.get()),
source.getProperties());
}
return rebaseAndDeriveProperties(node, source);
}
private PlanWithProperties planTableScan(TableScanNode node, Expression predicate, PreferredProperties preferredProperties)
{
// don't include non-deterministic predicates
Expression deterministicPredicate = filterDeterministicConjuncts(predicate);
DomainTranslator.ExtractionResult decomposedPredicate = DomainTranslator.fromPredicate(
metadata,
session,
deterministicPredicate,
types);
TupleDomain<ColumnHandle> newDomain = decomposedPredicate.getTupleDomain()
.transform(node.getAssignments()::get)
.intersect(node.getCurrentConstraint());
Map<ColumnHandle, Symbol> assignments = ImmutableBiMap.copyOf(node.getAssignments()).inverse();
// Simplify the tuple domain to avoid creating an expression with too many nodes that's
// expensive to evaluate in the call to shouldPrune below.
Expression constraint = combineConjuncts(
deterministicPredicate,
DomainTranslator.toPredicate(newDomain.simplify().transform(assignments::get)));
LayoutConstraintEvaluator evaluator = new LayoutConstraintEvaluator(
session,
symbolAllocator.getTypes(),
node.getAssignments(),
constraint);
// Layouts will be returned in order of the connector's preference
List<TableLayoutResult> layouts = metadata.getLayouts(
session, node.getTable(),
new Constraint<>(newDomain, evaluator::isCandidate),
Optional.of(node.getOutputSymbols().stream()
.map(node.getAssignments()::get)
.collect(toImmutableSet())));
if (layouts.isEmpty()) {
return emptyRelation(node.getOutputSymbols());
}
// Filter out layouts that cannot supply all the required columns
layouts = layouts.stream()
.filter(layout -> layout.hasAllOutputs(node))
.collect(toList());
checkState(!layouts.isEmpty(), "No usable layouts for %s", node);
if (layouts.stream().anyMatch(layout -> layout.getLayout().getPredicate().isNone())) {
return emptyRelation(node.getOutputSymbols());
}
List<PlanWithProperties> possiblePlans = layouts.stream()
.map(layout -> {
TableScanNode tableScan = new TableScanNode(
node.getId(),
node.getTable(),
node.getOutputSymbols(),
node.getAssignments(),
Optional.of(layout.getLayout().getHandle()),
newDomain.intersect(layout.getLayout().getPredicate()),
Optional.ofNullable(node.getOriginalConstraint()).orElse(predicate));
PlanWithProperties result = new PlanWithProperties(tableScan, deriveProperties(tableScan, ImmutableList.of()));
Expression resultingPredicate = combineConjuncts(
DomainTranslator.toPredicate(layout.getUnenforcedConstraint().transform(assignments::get)),
filterNonDeterministicConjuncts(predicate),
decomposedPredicate.getRemainingExpression());
if (!BooleanLiteral.TRUE_LITERAL.equals(resultingPredicate)) {
return withDerivedProperties(
new FilterNode(idAllocator.getNextId(), result.getNode(), resultingPredicate),
deriveProperties(tableScan, ImmutableList.of()));
}
return result;
})
.collect(toList());
return pickPlan(possiblePlans, preferredProperties);
}
private PlanWithProperties emptyRelation(List<Symbol> outputSymbols)
{
return new PlanWithProperties(
new ValuesNode(idAllocator.getNextId(), outputSymbols, ImmutableList.of()),
ActualProperties.builder()
.global(singleStreamPartition())
.build());
}
/**
* possiblePlans should be provided in layout preference order
*/
private PlanWithProperties pickPlan(List<PlanWithProperties> possiblePlans, PreferredProperties preferredProperties)
{
checkArgument(!possiblePlans.isEmpty());
if (preferStreamingOperators) {
possiblePlans = new ArrayList<>(possiblePlans);
Collections.sort(possiblePlans, Comparator.comparing(PlanWithProperties::getProperties, streamingExecutionPreference(preferredProperties))); // stable sort; is Collections.min() guaranteed to be stable?
}
return possiblePlans.get(0);
}
@Override
public PlanWithProperties visitValues(ValuesNode node, PreferredProperties preferredProperties)
{
return new PlanWithProperties(
node,
ActualProperties.builder()
.global(singleStreamPartition())
.build());
}
@Override
public PlanWithProperties visitExplainAnalyze(ExplainAnalyzeNode node, PreferredProperties preferredProperties)
{
PlanWithProperties child = planChild(node, PreferredProperties.any());
// if the child is already a gathering exchange, don't add another
if ((child.getNode() instanceof ExchangeNode) && ((ExchangeNode) child.getNode()).getType() == ExchangeNode.Type.GATHER) {
return rebaseAndDeriveProperties(node, child);
}
// Always add an exchange because ExplainAnalyze should be in its own stage
child = withDerivedProperties(
gatheringExchange(idAllocator.getNextId(), REMOTE, child.getNode()),
child.getProperties());
return rebaseAndDeriveProperties(node, child);
}
@Override
public PlanWithProperties visitTableFinish(TableFinishNode node, PreferredProperties preferredProperties)
{
PlanWithProperties child = planChild(node, PreferredProperties.any());
// if the child is already a gathering exchange, don't add another
if ((child.getNode() instanceof ExchangeNode) && ((ExchangeNode) child.getNode()).getType().equals(GATHER)) {
return rebaseAndDeriveProperties(node, child);
}
if (!child.getProperties().isSingleNode() || !child.getProperties().isCoordinatorOnly()) {
child = withDerivedProperties(
gatheringExchange(idAllocator.getNextId(), REMOTE, child.getNode()),
child.getProperties());
}
return rebaseAndDeriveProperties(node, child);
}
private <T> SetMultimap<T, T> createMapping(List<T> keys, List<T> values)
{
checkArgument(keys.size() == values.size(), "Inputs must have the same size");
ImmutableSetMultimap.Builder<T, T> builder = ImmutableSetMultimap.builder();
for (int i = 0; i < keys.size(); i++) {
builder.put(keys.get(i), values.get(i));
}
return builder.build();
}
private <T> Function<T, Optional<T>> createTranslator(SetMultimap<T, T> inputToOutput)
{
return input -> inputToOutput.get(input).stream().findAny();
}
private <T> Function<T, T> createDirectTranslator(SetMultimap<T, T> inputToOutput)
{
return input -> inputToOutput.get(input).iterator().next();
}
@Override
public PlanWithProperties visitJoin(JoinNode node, PreferredProperties preferredProperties)
{
List<Symbol> leftSymbols = node.getCriteria().stream()
.map(JoinNode.EquiJoinClause::getLeft)
.collect(toImmutableList());
List<Symbol> rightSymbols = node.getCriteria().stream()
.map(JoinNode.EquiJoinClause::getRight)
.collect(toImmutableList());
JoinNode.Type type = node.getType();
PlanWithProperties left;
PlanWithProperties right;
JoinNode.DistributionType distributionType = node.getDistributionType().orElseThrow(() -> new IllegalArgumentException("distributionType not yet set"));
if (distributionType == JoinNode.DistributionType.PARTITIONED) {
SetMultimap<Symbol, Symbol> rightToLeft = createMapping(rightSymbols, leftSymbols);
SetMultimap<Symbol, Symbol> leftToRight = createMapping(leftSymbols, rightSymbols);
left = node.getLeft().accept(this, PreferredProperties.partitioned(ImmutableSet.copyOf(leftSymbols)));
if (left.getProperties().isNodePartitionedOn(leftSymbols) && !left.getProperties().isSingleNode()) {
Partitioning rightPartitioning = left.getProperties().translate(createTranslator(leftToRight)).getNodePartitioning().get();
right = node.getRight().accept(this, PreferredProperties.partitioned(rightPartitioning));
if (!right.getProperties().isNodePartitionedWith(left.getProperties(), rightToLeft::get)) {
right = withDerivedProperties(
partitionedExchange(idAllocator.getNextId(), REMOTE, right.getNode(), new PartitioningScheme(rightPartitioning, right.getNode().getOutputSymbols())),
right.getProperties());
}
}
else {
right = node.getRight().accept(this, PreferredProperties.partitioned(ImmutableSet.copyOf(rightSymbols)));
if (right.getProperties().isNodePartitionedOn(rightSymbols) && !right.getProperties().isSingleNode()) {
Partitioning leftPartitioning = right.getProperties().translate(createTranslator(rightToLeft)).getNodePartitioning().get();
left = withDerivedProperties(
partitionedExchange(idAllocator.getNextId(), REMOTE, left.getNode(), new PartitioningScheme(leftPartitioning, left.getNode().getOutputSymbols())),
left.getProperties());
}
else {
left = withDerivedProperties(
partitionedExchange(idAllocator.getNextId(), REMOTE, left.getNode(), leftSymbols, Optional.empty()),
left.getProperties());
right = withDerivedProperties(
partitionedExchange(idAllocator.getNextId(), REMOTE, right.getNode(), rightSymbols, Optional.empty()),
right.getProperties());
}
}
verify(left.getProperties().isNodePartitionedWith(right.getProperties(), leftToRight::get));
// if colocated joins are disabled, force redistribute when using a custom partitioning
if (!isColocatedJoinEnabled(session) && hasMultipleSources(left.getNode(), right.getNode())) {
Partitioning rightPartitioning = left.getProperties().translate(createTranslator(leftToRight)).getNodePartitioning().get();
right = withDerivedProperties(
partitionedExchange(idAllocator.getNextId(), REMOTE, right.getNode(), new PartitioningScheme(rightPartitioning, right.getNode().getOutputSymbols())),
right.getProperties());
}
}
else {
// Broadcast Join
left = node.getLeft().accept(this, PreferredProperties.any());
right = node.getRight().accept(this, PreferredProperties.any());
if (left.getProperties().isSingleNode()) {
if (!right.getProperties().isSingleNode() ||
(!isColocatedJoinEnabled(session) && hasMultipleSources(left.getNode(), right.getNode()))) {
right = withDerivedProperties(
gatheringExchange(idAllocator.getNextId(), REMOTE, right.getNode()),
right.getProperties());
}
}
else {
right = withDerivedProperties(
replicatedExchange(idAllocator.getNextId(), REMOTE, right.getNode()),
right.getProperties());
}
}
JoinNode result = new JoinNode(node.getId(),
type,
left.getNode(),
right.getNode(),
node.getCriteria(),
node.getOutputSymbols(),
node.getFilter(),
node.getLeftHashSymbol(),
node.getRightHashSymbol(),
node.getDistributionType());
return new PlanWithProperties(result, deriveProperties(result, ImmutableList.of(left.getProperties(), right.getProperties())));
}
@Override
public PlanWithProperties visitUnnest(UnnestNode node, PreferredProperties preferredProperties)
{
PreferredProperties translatedPreferred = preferredProperties.translate(symbol -> node.getReplicateSymbols().contains(symbol) ? Optional.of(symbol) : Optional.empty());
return rebaseAndDeriveProperties(node, planChild(node, translatedPreferred));
}
@Override
public PlanWithProperties visitSemiJoin(SemiJoinNode node, PreferredProperties preferredProperties)
{
PlanWithProperties source;
PlanWithProperties filteringSource;
SemiJoinNode.DistributionType distributionType = node.getDistributionType().orElseThrow(() -> new IllegalArgumentException("distributionType not yet set"));
if (distributionType == SemiJoinNode.DistributionType.PARTITIONED) {
List<Symbol> sourceSymbols = ImmutableList.of(node.getSourceJoinSymbol());
List<Symbol> filteringSourceSymbols = ImmutableList.of(node.getFilteringSourceJoinSymbol());
SetMultimap<Symbol, Symbol> sourceToFiltering = createMapping(sourceSymbols, filteringSourceSymbols);
SetMultimap<Symbol, Symbol> filteringToSource = createMapping(filteringSourceSymbols, sourceSymbols);
source = node.getSource().accept(this, PreferredProperties.partitioned(ImmutableSet.copyOf(sourceSymbols)));
if (source.getProperties().isNodePartitionedOn(sourceSymbols) && !source.getProperties().isSingleNode()) {
Partitioning filteringPartitioning = source.getProperties().translate(createTranslator(sourceToFiltering)).getNodePartitioning().get();
filteringSource = node.getFilteringSource().accept(this, PreferredProperties.partitionedWithNullsAndAnyReplicated(filteringPartitioning));
if (!source.getProperties().withReplicatedNulls(true).isNodePartitionedWith(filteringSource.getProperties(), sourceToFiltering::get)) {
filteringSource = withDerivedProperties(
partitionedExchange(idAllocator.getNextId(), REMOTE, filteringSource.getNode(), new PartitioningScheme(
filteringPartitioning,
filteringSource.getNode().getOutputSymbols(),
Optional.empty(),
true,
Optional.empty())),
filteringSource.getProperties());
}
}
else {
filteringSource = node.getFilteringSource().accept(this, PreferredProperties.partitionedWithNullsAndAnyReplicated(ImmutableSet.copyOf(filteringSourceSymbols)));
if (filteringSource.getProperties().isNodePartitionedOn(filteringSourceSymbols, true) && !filteringSource.getProperties().isSingleNode()) {
Partitioning sourcePartitioning = filteringSource.getProperties().translate(createTranslator(filteringToSource)).getNodePartitioning().get();
source = withDerivedProperties(
partitionedExchange(idAllocator.getNextId(), REMOTE, source.getNode(), new PartitioningScheme(sourcePartitioning, source.getNode().getOutputSymbols())),
source.getProperties());
}
else {
source = withDerivedProperties(
partitionedExchange(idAllocator.getNextId(), REMOTE, source.getNode(), sourceSymbols, Optional.empty()),
source.getProperties());
filteringSource = withDerivedProperties(
partitionedExchange(idAllocator.getNextId(), REMOTE, filteringSource.getNode(), filteringSourceSymbols, Optional.empty(), true),
filteringSource.getProperties());
}
}
verify(source.getProperties().withReplicatedNulls(true).isNodePartitionedWith(filteringSource.getProperties(), sourceToFiltering::get));
// if colocated joins are disabled, force redistribute when using a custom partitioning
if (!isColocatedJoinEnabled(session) && hasMultipleSources(source.getNode(), filteringSource.getNode())) {
Partitioning filteringPartitioning = source.getProperties().translate(createTranslator(sourceToFiltering)).getNodePartitioning().get();
filteringSource = withDerivedProperties(
partitionedExchange(idAllocator.getNextId(), REMOTE, filteringSource.getNode(), new PartitioningScheme(
filteringPartitioning,
filteringSource.getNode().getOutputSymbols(),
Optional.empty(),
true,
Optional.empty())),
filteringSource.getProperties());
}
}
else {
source = node.getSource().accept(this, PreferredProperties.any());
// Delete operator works fine even if TableScans on the filtering (right) side is not co-located with itself. It only cares about the corresponding TableScan,
// which is always on the source (left) side. Therefore, hash-partitioned semi-join is always allowed on the filtering side.
filteringSource = node.getFilteringSource().accept(this, PreferredProperties.any());
// make filtering source match requirements of source
if (source.getProperties().isSingleNode()) {
if (!filteringSource.getProperties().isSingleNode() ||
(!isColocatedJoinEnabled(session) && hasMultipleSources(source.getNode(), filteringSource.getNode()))) {
filteringSource = withDerivedProperties(
gatheringExchange(idAllocator.getNextId(), REMOTE, filteringSource.getNode()),
filteringSource.getProperties());
}
}
else {
filteringSource = withDerivedProperties(
replicatedExchange(idAllocator.getNextId(), REMOTE, filteringSource.getNode()),
filteringSource.getProperties());
}
}
return rebaseAndDeriveProperties(node, ImmutableList.of(source, filteringSource));
}
@Override
public PlanWithProperties visitIndexJoin(IndexJoinNode node, PreferredProperties preferredProperties)
{
List<Symbol> joinColumns = node.getCriteria().stream()
.map(IndexJoinNode.EquiJoinClause::getProbe)
.collect(toImmutableList());
// Only prefer grouping on join columns if no parent local property preferences
List<LocalProperty<Symbol>> desiredLocalProperties = preferredProperties.getLocalProperties().isEmpty() ? grouped(joinColumns) : ImmutableList.of();
PlanWithProperties probeSource = node.getProbeSource().accept(this, PreferredProperties.partitionedWithLocal(ImmutableSet.copyOf(joinColumns), desiredLocalProperties)
.mergeWithParent(preferredProperties));
ActualProperties probeProperties = probeSource.getProperties();
PlanWithProperties indexSource = node.getIndexSource().accept(this, PreferredProperties.any());
// TODO: allow repartitioning if unpartitioned to increase parallelism
if (shouldRepartitionForIndexJoin(joinColumns, preferredProperties, probeProperties)) {
probeSource = withDerivedProperties(
partitionedExchange(idAllocator.getNextId(), REMOTE, probeSource.getNode(), joinColumns, node.getProbeHashSymbol()),
probeProperties);
}
// TODO: if input is grouped, create streaming join
// index side is really a nested-loops plan, so don't add exchanges
PlanNode result = ChildReplacer.replaceChildren(node, ImmutableList.of(probeSource.getNode(), node.getIndexSource()));
return new PlanWithProperties(result, deriveProperties(result, ImmutableList.of(probeSource.getProperties(), indexSource.getProperties())));
}
private boolean shouldRepartitionForIndexJoin(List<Symbol> joinColumns, PreferredProperties parentPreferredProperties, ActualProperties probeProperties)
{
// See if distributed index joins are enabled
if (!distributedIndexJoins) {
return false;
}
// No point in repartitioning if the plan is not distributed
if (probeProperties.isSingleNode()) {
return false;
}
Optional<PreferredProperties.PartitioningProperties> parentPartitioningPreferences = parentPreferredProperties.getGlobalProperties()
.flatMap(PreferredProperties.Global::getPartitioningProperties);
// Disable repartitioning if it would disrupt a parent's partitioning preference when streaming is enabled
boolean parentAlreadyPartitionedOnChild = parentPartitioningPreferences
.map(partitioning -> probeProperties.isStreamPartitionedOn(partitioning.getPartitioningColumns()))
.orElse(false);
if (preferStreamingOperators && parentAlreadyPartitionedOnChild) {
return false;
}
// Otherwise, repartition if we need to align with the join columns
if (!probeProperties.isStreamPartitionedOn(joinColumns)) {
return true;
}
// If we are already partitioned on the join columns because the data has been forced effectively into one stream,
// then we should repartition if that would make a difference (from the single stream state).
return probeProperties.isEffectivelySingleStream() && probeProperties.isStreamRepartitionEffective(joinColumns);
}
@Override
public PlanWithProperties visitIndexSource(IndexSourceNode node, PreferredProperties preferredProperties)
{
return new PlanWithProperties(
node,
ActualProperties.builder()
.global(singleStreamPartition())
.build());
}
private Function<Symbol, Optional<Symbol>> outputToInputTranslator(UnionNode node, int sourceIndex)
{
return symbol -> Optional.of(node.getSymbolMapping().get(symbol).get(sourceIndex));
}
private Partitioning selectUnionPartitioning(UnionNode node, PreferredProperties preferredProperties, PreferredProperties.PartitioningProperties parentPreference)
{
// Use the parent's requested partitioning if available
if (parentPreference.getPartitioning().isPresent()) {
return parentPreference.getPartitioning().get();
}
// Try planning the children to see if any of them naturally produce a partitioning (for now, just select the first)
boolean nullsAndAnyReplicated = parentPreference.isNullsAndAnyReplicated();
for (int sourceIndex = 0; sourceIndex < node.getSources().size(); sourceIndex++) {
PreferredProperties.PartitioningProperties childPartitioning = parentPreference.translate(outputToInputTranslator(node, sourceIndex)).get();
PreferredProperties childPreferred = PreferredProperties.builder()
.global(PreferredProperties.Global.distributed(childPartitioning.withNullsAndAnyReplicated(nullsAndAnyReplicated)))
.build();
PlanWithProperties child = node.getSources().get(sourceIndex).accept(this, childPreferred);
if (child.getProperties().isNodePartitionedOn(childPartitioning.getPartitioningColumns(), nullsAndAnyReplicated)) {
Function<Symbol, Optional<Symbol>> childToParent = createTranslator(createMapping(node.sourceOutputLayout(sourceIndex), node.getOutputSymbols()));
return child.getProperties().translate(childToParent).getNodePartitioning().get();
}
}
// Otherwise, choose an arbitrary partitioning over the columns
return Partitioning.create(FIXED_HASH_DISTRIBUTION, ImmutableList.copyOf(parentPreference.getPartitioningColumns()));
}
@Override
public PlanWithProperties visitUnion(UnionNode node, PreferredProperties parentPreference)
{
Optional<PreferredProperties.Global> parentGlobal = parentPreference.getGlobalProperties();
if (parentGlobal.isPresent() && parentGlobal.get().isDistributed() && parentGlobal.get().getPartitioningProperties().isPresent()) {
PreferredProperties.PartitioningProperties parentPartitioningPreference = parentGlobal.get().getPartitioningProperties().get();
boolean nullsAndAnyReplicated = parentPartitioningPreference.isNullsAndAnyReplicated();
Partitioning desiredParentPartitioning = selectUnionPartitioning(node, parentPreference, parentPartitioningPreference);
ImmutableList.Builder<PlanNode> partitionedSources = ImmutableList.builder();
ImmutableListMultimap.Builder<Symbol, Symbol> outputToSourcesMapping = ImmutableListMultimap.builder();
for (int sourceIndex = 0; sourceIndex < node.getSources().size(); sourceIndex++) {
Partitioning childPartitioning = desiredParentPartitioning.translate(createDirectTranslator(createMapping(node.getOutputSymbols(), node.sourceOutputLayout(sourceIndex))));
PreferredProperties childPreferred = PreferredProperties.builder()
.global(PreferredProperties.Global.distributed(PreferredProperties.PartitioningProperties.partitioned(childPartitioning)
.withNullsAndAnyReplicated(nullsAndAnyReplicated)))
.build();
PlanWithProperties source = node.getSources().get(sourceIndex).accept(this, childPreferred);
if (!source.getProperties().isNodePartitionedOn(childPartitioning, nullsAndAnyReplicated)) {
source = withDerivedProperties(
partitionedExchange(
idAllocator.getNextId(),
REMOTE,
source.getNode(),
new PartitioningScheme(
childPartitioning,
source.getNode().getOutputSymbols(),
Optional.empty(),
nullsAndAnyReplicated,
Optional.empty())),
source.getProperties());
}
partitionedSources.add(source.getNode());
for (int column = 0; column < node.getOutputSymbols().size(); column++) {
outputToSourcesMapping.put(node.getOutputSymbols().get(column), node.sourceOutputLayout(sourceIndex).get(column));
}
}
UnionNode newNode = new UnionNode(
node.getId(),
partitionedSources.build(),
outputToSourcesMapping.build(),
ImmutableList.copyOf(outputToSourcesMapping.build().keySet()));
return new PlanWithProperties(
newNode,
ActualProperties.builder()
.global(partitionedOn(desiredParentPartitioning, Optional.of(desiredParentPartitioning)))
.build()
.withReplicatedNulls(parentPartitioningPreference.isNullsAndAnyReplicated()));
}
// first, classify children into partitioned and unpartitioned
List<PlanNode> unpartitionedChildren = new ArrayList<>();
List<List<Symbol>> unpartitionedOutputLayouts = new ArrayList<>();
List<PlanNode> partitionedChildren = new ArrayList<>();
List<List<Symbol>> partitionedOutputLayouts = new ArrayList<>();
List<PlanWithProperties> plannedChildren = new ArrayList<>();
for (int i = 0; i < node.getSources().size(); i++) {
PlanWithProperties child = node.getSources().get(i).accept(this, PreferredProperties.any());
plannedChildren.add(child);
if (child.getProperties().isSingleNode()) {
unpartitionedChildren.add(child.getNode());
unpartitionedOutputLayouts.add(node.sourceOutputLayout(i));
}
else {
partitionedChildren.add(child.getNode());
// union may drop or duplicate symbols from the input so we must provide an exact mapping
partitionedOutputLayouts.add(node.sourceOutputLayout(i));
}
}
PlanNode result;
if (!partitionedChildren.isEmpty() && unpartitionedChildren.isEmpty()) {
// parent does not have preference or prefers some partitioning without any explicit partitioning - just use
// children partitioning and don't GATHER partitioned inputs
// TODO: add FIXED_ARBITRARY_DISTRIBUTION support on non empty unpartitionedChildren
if (!parentGlobal.isPresent() || parentGlobal.get().isDistributed()) {
return arbitraryDistributeUnion(node, plannedChildren, partitionedChildren, partitionedOutputLayouts);
}
// add a gathering exchange above partitioned inputs
result = new ExchangeNode(
idAllocator.getNextId(),
GATHER,
REMOTE,
new PartitioningScheme(Partitioning.create(SINGLE_DISTRIBUTION, ImmutableList.of()), node.getOutputSymbols()),
partitionedChildren,
partitionedOutputLayouts);
}
else if (!unpartitionedChildren.isEmpty()) {
if (!partitionedChildren.isEmpty()) {
// add a gathering exchange above partitioned inputs and fold it into the set of unpartitioned inputs
// NOTE: new symbols for ExchangeNode output are required in order to keep plan logically correct with new local union below
List<Symbol> exchangeOutputLayout = node.getOutputSymbols().stream()
.map(outputSymbol -> symbolAllocator.newSymbol(outputSymbol.getName(), types.get(outputSymbol)))
.collect(toImmutableList());
result = new ExchangeNode(
idAllocator.getNextId(),
GATHER,
REMOTE,
new PartitioningScheme(Partitioning.create(SINGLE_DISTRIBUTION, ImmutableList.of()), exchangeOutputLayout),
partitionedChildren,
partitionedOutputLayouts);
unpartitionedChildren.add(result);
unpartitionedOutputLayouts.add(result.getOutputSymbols());
}
ImmutableListMultimap.Builder<Symbol, Symbol> mappings = ImmutableListMultimap.builder();
for (int i = 0; i < node.getOutputSymbols().size(); i++) {
for (List<Symbol> outputLayout : unpartitionedOutputLayouts) {
mappings.put(node.getOutputSymbols().get(i), outputLayout.get(i));
}
}
// add local union for all unpartitioned inputs
result = new UnionNode(node.getId(), unpartitionedChildren, mappings.build(), ImmutableList.copyOf(mappings.build().keySet()));
}
else {
throw new IllegalStateException("both unpartitionedChildren partitionedChildren are empty");
}
return new PlanWithProperties(
result,
ActualProperties.builder()
.global(singleStreamPartition())
.build());
}
private PlanWithProperties arbitraryDistributeUnion(
UnionNode node,
List<PlanWithProperties> plannedChildren,
List<PlanNode> partitionedChildren,
List<List<Symbol>> partitionedOutputLayouts)
{
// TODO: can we insert LOCAL exchange for one child SOURCE distributed and another HASH distributed?
if (countSources(partitionedChildren) == 0) {
// No source distributed child, we can use insert LOCAL exchange
// TODO: if all children have the same partitioning, pass this partitioning to the parent
// instead of "arbitraryPartition".
return new PlanWithProperties(node.replaceChildren(
plannedChildren.stream()
.map(PlanWithProperties::getNode)
.collect(toList())));
}
else {
// Presto currently can not execute stage that has multiple table scans, so in that case
// we have to insert REMOTE exchange with FIXED_ARBITRARY_DISTRIBUTION instead of local exchange
return new PlanWithProperties(
new ExchangeNode(
idAllocator.getNextId(),
REPARTITION,
REMOTE,
new PartitioningScheme(Partitioning.create(FIXED_ARBITRARY_DISTRIBUTION, ImmutableList.of()), node.getOutputSymbols()),
partitionedChildren,
partitionedOutputLayouts));
}
}
@Override
public PlanWithProperties visitApply(ApplyNode node, PreferredProperties preferredProperties)
{
throw new IllegalStateException("Unexpected node: " + node.getClass().getName());
}
@Override
public PlanWithProperties visitLateralJoin(LateralJoinNode node, PreferredProperties preferredProperties)
{
throw new IllegalStateException("Unexpected node: " + node.getClass().getName());
}
private PlanWithProperties planChild(PlanNode node, PreferredProperties preferredProperties)
{
return getOnlyElement(node.getSources()).accept(this, preferredProperties);
}
private PlanWithProperties rebaseAndDeriveProperties(PlanNode node, PlanWithProperties child)
{
return withDerivedProperties(
ChildReplacer.replaceChildren(node, ImmutableList.of(child.getNode())),
child.getProperties());
}
private PlanWithProperties rebaseAndDeriveProperties(PlanNode node, List<PlanWithProperties> children)
{
PlanNode result = node.replaceChildren(
children.stream()
.map(PlanWithProperties::getNode)
.collect(toList()));
return new PlanWithProperties(result, deriveProperties(result, children.stream().map(PlanWithProperties::getProperties).collect(toList())));
}
private PlanWithProperties withDerivedProperties(PlanNode node, ActualProperties inputProperties)
{
return new PlanWithProperties(node, deriveProperties(node, inputProperties));
}
private ActualProperties deriveProperties(PlanNode result, ActualProperties inputProperties)
{
return deriveProperties(result, ImmutableList.of(inputProperties));
}
private ActualProperties deriveProperties(PlanNode result, List<ActualProperties> inputProperties)
{
// TODO: move this logic to PlanSanityChecker once PropertyDerivations.deriveProperties fully supports local exchanges
ActualProperties outputProperties = PropertyDerivations.deriveProperties(result, inputProperties, metadata, session, types, parser);
verify(result instanceof SemiJoinNode || inputProperties.stream().noneMatch(ActualProperties::isNullsAndAnyReplicated) || outputProperties.isNullsAndAnyReplicated(),
"SemiJoinNode is the only node that can strip null replication");
return outputProperties;
}
}
private static Map<Symbol, Symbol> computeIdentityTranslations(Assignments assignments)
{
Map<Symbol, Symbol> outputToInput = new HashMap<>();
for (Map.Entry<Symbol, Expression> assignment : assignments.getMap().entrySet()) {
if (assignment.getValue() instanceof SymbolReference) {
outputToInput.put(assignment.getKey(), Symbol.from(assignment.getValue()));
}
}
return outputToInput;
}
@VisibleForTesting
static Comparator<ActualProperties> streamingExecutionPreference(PreferredProperties preferred)
{
// Calculating the matches can be a bit expensive, so cache the results between comparisons
LoadingCache<List<LocalProperty<Symbol>>, List<Optional<LocalProperty<Symbol>>>> matchCache = CacheBuilder.newBuilder()
.build(CacheLoader.from(actualProperties -> LocalProperties.match(actualProperties, preferred.getLocalProperties())));
return (actual1, actual2) -> {
List<Optional<LocalProperty<Symbol>>> matchLayout1 = matchCache.getUnchecked(actual1.getLocalProperties());
List<Optional<LocalProperty<Symbol>>> matchLayout2 = matchCache.getUnchecked(actual2.getLocalProperties());
return ComparisonChain.start()
.compareTrueFirst(hasLocalOptimization(preferred.getLocalProperties(), matchLayout1), hasLocalOptimization(preferred.getLocalProperties(), matchLayout2))
.compareTrueFirst(meetsPartitioningRequirements(preferred, actual1), meetsPartitioningRequirements(preferred, actual2))
.compare(matchLayout1, matchLayout2, matchedLayoutPreference())
.result();
};
}
private static <T> boolean hasLocalOptimization(List<LocalProperty<T>> desiredLayout, List<Optional<LocalProperty<T>>> matchResult)
{
checkArgument(desiredLayout.size() == matchResult.size());
if (matchResult.isEmpty()) {
return false;
}
// Optimizations can be applied if the first LocalProperty has been modified in the match in any way
return !matchResult.get(0).equals(Optional.of(desiredLayout.get(0)));
}
private static boolean meetsPartitioningRequirements(PreferredProperties preferred, ActualProperties actual)
{
if (!preferred.getGlobalProperties().isPresent()) {
return true;
}
PreferredProperties.Global preferredGlobal = preferred.getGlobalProperties().get();
if (!preferredGlobal.isDistributed()) {
return actual.isSingleNode();
}
if (!preferredGlobal.getPartitioningProperties().isPresent()) {
return !actual.isSingleNode();
}
return actual.isStreamPartitionedOn(preferredGlobal.getPartitioningProperties().get().getPartitioningColumns());
}
// Prefer the match result that satisfied the most requirements
private static <T> Comparator<List<Optional<LocalProperty<T>>>> matchedLayoutPreference()
{
return (matchLayout1, matchLayout2) -> {
Iterator<Optional<LocalProperty<T>>> match1Iterator = matchLayout1.iterator();
Iterator<Optional<LocalProperty<T>>> match2Iterator = matchLayout2.iterator();
while (match1Iterator.hasNext() && match2Iterator.hasNext()) {
Optional<LocalProperty<T>> match1 = match1Iterator.next();
Optional<LocalProperty<T>> match2 = match2Iterator.next();
if (match1.isPresent() && match2.isPresent()) {
return Integer.compare(match1.get().getColumns().size(), match2.get().getColumns().size());
}
else if (match1.isPresent()) {
return 1;
}
else if (match2.isPresent()) {
return -1;
}
}
checkState(!match1Iterator.hasNext() && !match2Iterator.hasNext()); // Should be the same size
return 0;
};
}
@VisibleForTesting
static class PlanWithProperties
{
private final PlanNode node;
private final ActualProperties properties;
public PlanWithProperties(PlanNode node)
{
this(node, ActualProperties.builder().build());
}
public PlanWithProperties(PlanNode node, ActualProperties properties)
{
this.node = node;
this.properties = properties;
}
public PlanNode getNode()
{
return node;
}
public ActualProperties getProperties()
{
return properties;
}
}
private class LayoutConstraintEvaluator
{
private final Map<Symbol, ColumnHandle> assignments;
private final ExpressionInterpreter evaluator;
public LayoutConstraintEvaluator(Session session, Map<Symbol, Type> types, Map<Symbol, ColumnHandle> assignments, Expression expression)
{
this.assignments = assignments;
Map<NodeRef<Expression>, Type> expressionTypes = getExpressionTypes(session, metadata, parser, types, expression, emptyList());
evaluator = ExpressionInterpreter.expressionOptimizer(expression, metadata, session, expressionTypes);
}
private boolean isCandidate(Map<ColumnHandle, NullableValue> bindings)
{
LookupSymbolResolver inputs = new LookupSymbolResolver(assignments, bindings);
// If any conjuncts evaluate to FALSE or null, then the whole predicate will never be true and so the partition should be pruned
Object optimized = evaluator.optimize(inputs);
if (Boolean.FALSE.equals(optimized) || optimized == null || optimized instanceof NullLiteral) {
return false;
}
return true;
}
}
}
| |
/* ./src/main/java/bdglue2/common/PropertyManagement.java
*
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package bdglue2.common;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.util.Enumeration;
import java.util.Properties;
import java.util.Set;
import java.util.SortedSet;
import java.util.TreeSet;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Manage properties for the application.
*/
public class PropertyManagement {
private static final Logger LOG =
LoggerFactory.getLogger(PropertyManagement.class);
private static PropertyManagement myProperties = null;
private Properties properties;
private Properties defaults;
private PropertyManagement() {
super();
}
/**
* Load the properties from the properties file.
*
* @param defaultProperties name of the default properties file
* @param externalProperties name of the external properties file
*/
public PropertyManagement(String defaultProperties, String externalProperties) {
super();
loadProperties(defaultProperties, externalProperties);
}
/**
* Set a property for this environment. If the property doesn't exist,
* a new one is created.
*
* @param property the name of the property
* @param value the value associated with this property
*/
public void setProperty(String property, String value) {
LOG.info("setProperty: {} : {} ", property, value);
properties.setProperty(property, value);
}
/**
* Get the value of the specified property.
*
* @param property the name of the property whose value is to be
* retrieved.
* @return the value of the requested property
*/
public String getProperty(String property) {
String rval = properties.getProperty(property);
if (rval == null) {
LOG.error("getProperty: Property not found: {}", property);
}
return rval.trim();
}
/**
* Get the value of the specified property, setting a default value
* if the property has not been intitialized.
*
* @param property the name of the property
* @param defaultValue the default value to set if it has not been initialized.
* @return the value of the requested property.
*/
public String getProperty(String property, String defaultValue) {
String rval;
rval = properties.getProperty(property);
if (rval == null) {
LOG.info("getProperty: Property not set: {}. Using default:",
property, defaultValue);
properties.setProperty(property, defaultValue);
rval = defaultValue;
}
return rval.trim();
}
/**
* Return the requested property as an integer, setting a default
* value if it hasn't been initialized. No error checking is performed
* here. If the value of the property is not in fact representative
* of an integer, results will be indeterminent.
*
* @param property the name of the property
* @param defaultValue the default value
* @return the value of the property as an integer.
*/
public int asInt(String property, String defaultValue) {
String value = getProperty(property, defaultValue);
return Integer.parseInt(value.trim());
}
/**
* Return the requested property as a boolean value, setting a default
* value if it hasn't been initialized. No error checking is performed
* here. If the value of the property is not in fact representative
* of a a boolean, results will be indeterminent.
*
* @param property the name of the property
* @param defaultValue the default value
* @return the value of the property as a boolean value.
*/
public boolean asBoolean(String property, String defaultValue) {
String value = getProperty(property, defaultValue);
return Boolean.parseBoolean(value.trim());
}
/**
* Get a list of all of the property names.
*
* @return a Set of those names.
*/
public Set<String> getKeyNames() {
return properties.stringPropertyNames();
}
/**
* Load the properties from their storage locations.
*
* @param defaultProperties name of the default properties resource
* @param externalProperties name of the system property that has the
* file name of the external properties file.
*/
public void loadProperties(String defaultProperties,
String externalProperties) {
defaults = new Properties();
loadDefaultProperties(defaultProperties);
properties = new Properties(defaults);
loadExternalProperties(externalProperties);
//printProperties(properties);
}
/**
* Load the default properties for this project.
*
* @param property the property that contains the name of the
* class resource containing the properties.
*/
private void loadDefaultProperties(String property) {
InputStream in = this.getClass().getResourceAsStream(property);
try {
LOG.info("loadDefaultProperties: properties resource found");
defaults.load(in);
} catch (IOException e) {
LOG.error("loadDefaultProperties: resource not found {}",
property);
}
}
/**
* Load proerties from an external properties file. It first
* looks for a system property that tells us where to look,
* and if not found looks for a file that has the name of
* the system property.
*
* @param property the property that contains the file name.
*/
private void loadExternalProperties(String property) {
InputStream fin = null;
String externalFileName = System.getProperty(property);
if (externalFileName == null) {
// property not found, so see if we can just
// find a file by the property name
LOG.info("loadExternaProperties: " +
"System property {} not defined. " +
"Looking for default file.",
property);
externalFileName = property;
}
try {
fin = new FileInputStream(new File(externalFileName));
try {
properties.load(fin);
LOG.info("loadExternalProperties: {}", externalFileName);
} catch (IOException e) {
LOG.error("loadExternalProperties: IO Exception", e);
}
} catch (FileNotFoundException e) {
LOG.warn("loadExternaProperties: file not found: {}",
externalFileName);
}
}
/**
* Return a subset of the properties that begin with "prefix".
*
* @param prefix the prefix of the properties to return.
* @param trimPrefix true: return the subset with "prefix" removed.
* @return the subset of the properties.
*/
public Properties getPropertySubset(String prefix, boolean trimPrefix) {
Properties subset = new Properties();
if (prefix.charAt(prefix.length() - 1) != '.') {
// prefix does not end in a dot, so add one.
prefix = prefix + '.';
}
for (String key : myProperties.getKeyNames()) {
if (trimPrefix) {
// remove the prefix from the result and return that.
if (key.startsWith(prefix)) {
subset.setProperty(key.substring(prefix.length()), myProperties.getProperty(key));
}
} else {
// return the property as is.
if (key.startsWith(prefix)) {
subset.setProperty(key, myProperties.getProperty(key));
}
}
}
return subset;
}
/**
* Print the properties for this instance of PropertyManagement.
*/
public void printProperties() {
printProperties(properties);
}
/**
* Print the properties found in the specified
* instance of Properties in sorted order.
*
* @param props the properties to print
*/
public void printProperties(Properties props) {
SortedSet<String> keySet = new TreeSet<String>();
keySet.addAll(props.stringPropertyNames());
LOG.info("************************");
LOG.info("*** Begin Properties ***");
LOG.info("************************");
for (String s : keySet) {
LOG.info("*** {} = {}", s, props.getProperty(s));
}
LOG.info("*************************");
LOG.info("*** End of Properties ***");
LOG.info("*************************");
}
/**
* Typically, we want a singleton in our application that
* will hold all of our properties. This supports
* that use case.
*
* @param defaultProperties the name of the default properties file
* @param externalProperties the name of the external properties file
* @return the singleton instance of PropertyManagement
*/
public static PropertyManagement getProperties(String defaultProperties,
String externalProperties) {
if (myProperties == null) {
myProperties = new PropertyManagement(defaultProperties,
externalProperties);
}
return myProperties;
}
/**
* Return the singleton instance. This method assumes that the
* overloaded method of the same name has already been called
* to construct the properties instance.
*
* @return the singleton properties instance
*/
public static PropertyManagement getProperties() {
return myProperties;
}
}
| |
/*
* Copyright 2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gradle.internal.logging.sink;
import net.jcip.annotations.ThreadSafe;
import org.gradle.api.logging.LogLevel;
import org.gradle.api.logging.StandardOutputListener;
import org.gradle.api.logging.configuration.ConsoleOutput;
import org.gradle.internal.Factory;
import org.gradle.internal.event.ListenerBroadcast;
import org.gradle.internal.logging.config.LoggingRouter;
import org.gradle.internal.logging.console.AnsiConsole;
import org.gradle.internal.logging.console.BuildLogLevelFilterRenderer;
import org.gradle.internal.logging.console.BuildStatusRenderer;
import org.gradle.internal.logging.console.ColorMap;
import org.gradle.internal.logging.console.Console;
import org.gradle.internal.logging.console.ConsoleLayoutCalculator;
import org.gradle.internal.logging.console.DefaultColorMap;
import org.gradle.internal.logging.console.DefaultWorkInProgressFormatter;
import org.gradle.internal.logging.console.StyledTextOutputBackedRenderer;
import org.gradle.internal.logging.console.ThrottlingOutputEventListener;
import org.gradle.internal.logging.console.UserInputConsoleRenderer;
import org.gradle.internal.logging.console.UserInputStandardOutputRenderer;
import org.gradle.internal.logging.console.WorkInProgressRenderer;
import org.gradle.internal.logging.events.EndOutputEvent;
import org.gradle.internal.logging.events.LogLevelChangeEvent;
import org.gradle.internal.logging.events.OutputEvent;
import org.gradle.internal.logging.events.OutputEventListener;
import org.gradle.internal.logging.events.ProgressCompleteEvent;
import org.gradle.internal.logging.events.ProgressEvent;
import org.gradle.internal.logging.events.ProgressStartEvent;
import org.gradle.internal.logging.format.PrettyPrefixedLogHeaderFormatter;
import org.gradle.internal.logging.text.StreamBackedStandardOutputListener;
import org.gradle.internal.logging.text.StreamingStyledTextOutput;
import org.gradle.internal.nativeintegration.console.ConsoleMetaData;
import org.gradle.internal.nativeintegration.console.FallbackConsoleMetaData;
import org.gradle.internal.time.Clock;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.util.concurrent.atomic.AtomicReference;
/**
* A {@link OutputEventListener} implementation which renders output events to various
* destinations. This implementation is thread-safe.
*/
@ThreadSafe
public class OutputEventRenderer implements OutputEventListener, LoggingRouter {
private final Object lock = new Object();
private final AtomicReference<LogLevel> logLevel = new AtomicReference<LogLevel>(LogLevel.LIFECYCLE);
private final Clock clock;
private final ListenerBroadcast<OutputEventListener> formatters = new ListenerBroadcast<OutputEventListener>(OutputEventListener.class);
private final ListenerBroadcast<StandardOutputListener> stdoutListeners = new ListenerBroadcast<StandardOutputListener>(StandardOutputListener.class);
private final ListenerBroadcast<StandardOutputListener> stderrListeners = new ListenerBroadcast<StandardOutputListener>(StandardOutputListener.class);
private ColorMap colourMap;
private OutputStream originalStdOut;
private OutputStream originalStdErr;
private StreamBackedStandardOutputListener stdOutListener;
private StreamBackedStandardOutputListener stdErrListener;
private OutputEventListener console;
public OutputEventRenderer(final Clock clock) {
this.clock = clock;
OutputEventListener stdOutChain = new LazyListener(new Factory<OutputEventListener>() {
@Override
public OutputEventListener create() {
return onNonError(new UserInputStandardOutputRenderer(new BuildLogLevelFilterRenderer(new ProgressLogEventGenerator(new StyledTextOutputBackedRenderer(new StreamingStyledTextOutput(stdoutListeners.getSource())), false)), clock));
}
});
formatters.add(stdOutChain);
OutputEventListener stdErrChain = new LazyListener(new Factory<OutputEventListener>() {
@Override
public OutputEventListener create() {
return onError(new BuildLogLevelFilterRenderer(new ProgressLogEventGenerator(new StyledTextOutputBackedRenderer(new StreamingStyledTextOutput(stderrListeners.getSource())), false)));
}
});
formatters.add(stdErrChain);
}
@Override
public Snapshot snapshot() {
synchronized (lock) {
// Currently only snapshot the console output listener. Should snapshot all output listeners, and cleanup in restore()
return new SnapshotImpl(logLevel.get(), console);
}
}
@Override
public void restore(Snapshot state) {
synchronized (lock) {
SnapshotImpl snapshot = (SnapshotImpl) state;
if (snapshot.logLevel != logLevel.get()) {
configure(snapshot.logLevel);
}
// TODO - also close console when it is replaced
if (snapshot.console != console) {
if (snapshot.console == null) {
formatters.remove(console);
console.onOutput(new EndOutputEvent());
console = null;
} else {
throw new UnsupportedOperationException("Cannot restore previous console. This is not implemented yet.");
}
}
}
}
public ColorMap getColourMap() {
synchronized (lock) {
if (colourMap == null) {
colourMap = new DefaultColorMap();
}
}
return colourMap;
}
public OutputStream getOriginalStdOut() {
return originalStdOut;
}
public OutputStream getOriginalStdErr() {
return originalStdErr;
}
public void attachProcessConsole(ConsoleOutput consoleOutput) {
synchronized (lock) {
ConsoleConfigureAction.execute(this, consoleOutput);
}
}
@Override
public void attachAnsiConsole(OutputStream outputStream) {
attachAnsiConsole(outputStream, false);
}
protected void attachAnsiConsole(OutputStream outputStream, boolean verbose) {
synchronized (lock) {
ConsoleMetaData consoleMetaData = FallbackConsoleMetaData.INSTANCE;
OutputStreamWriter writer = new OutputStreamWriter(outputStream);
Console console = new AnsiConsole(writer, writer, getColourMap(), consoleMetaData, true);
addConsole(console, true, true, consoleMetaData, verbose);
}
}
public void attachSystemOutAndErr() {
addStandardOutputListener();
addStandardErrorListener();
}
private void addStandardOutputListener() {
synchronized (lock) {
originalStdOut = System.out;
if (stdOutListener != null) {
stdoutListeners.remove(stdOutListener);
}
stdOutListener = new StreamBackedStandardOutputListener((Appendable) System.out);
addStandardOutputListener(stdOutListener);
}
}
private void addStandardErrorListener() {
synchronized (lock) {
originalStdErr = System.err;
if (stdErrListener != null) {
stderrListeners.remove(stdErrListener);
}
stdErrListener = new StreamBackedStandardOutputListener((Appendable) System.err);
addStandardErrorListener(stdErrListener);
}
}
private void removeStandardOutputListener() {
synchronized (lock) {
if (stdOutListener != null) {
stdoutListeners.remove(stdOutListener);
stdOutListener = null;
}
}
}
private void removeStandardErrorListener() {
synchronized (lock) {
if (stdErrListener != null) {
stderrListeners.remove(stdErrListener);
stdErrListener = null;
}
}
}
public void addOutputEventListener(OutputEventListener listener) {
synchronized (lock) {
formatters.add(listener);
}
}
public void removeOutputEventListener(OutputEventListener listener) {
synchronized (lock) {
formatters.remove(listener);
}
}
public OutputEventRenderer addConsole(Console console, boolean stdout, boolean stderr, ConsoleMetaData consoleMetaData) {
return addConsole(console, stdout, stderr, consoleMetaData, false);
}
public OutputEventRenderer addConsole(Console console, boolean stdout, boolean stderr, ConsoleMetaData consoleMetaData, boolean verbose) {
final OutputEventListener consoleChain = new ThrottlingOutputEventListener(
new UserInputConsoleRenderer(
new BuildStatusRenderer(
new WorkInProgressRenderer(
new BuildLogLevelFilterRenderer(
new GroupingProgressLogEventGenerator(new StyledTextOutputBackedRenderer(console.getBuildOutputArea()), clock, new PrettyPrefixedLogHeaderFormatter(), verbose)),
console.getBuildProgressArea(), new DefaultWorkInProgressFormatter(consoleMetaData), new ConsoleLayoutCalculator(consoleMetaData)),
console.getStatusBar(), console, consoleMetaData, clock),
console),
clock);
synchronized (lock) {
if (stdout && stderr) {
this.console = consoleChain;
removeStandardOutputListener();
removeStandardErrorListener();
} else if (stdout) {
this.console = onNonError(consoleChain);
removeStandardOutputListener();
} else {
this.console = onError(consoleChain);
removeStandardErrorListener();
}
consoleChain.onOutput(new LogLevelChangeEvent(logLevel.get()));
formatters.add(this.console);
}
return this;
}
private OutputEventListener onError(final OutputEventListener listener) {
return new OutputEventListener() {
public void onOutput(OutputEvent event) {
if (event.getLogLevel() == LogLevel.ERROR || event.getLogLevel() == null) {
listener.onOutput(event);
}
}
};
}
private OutputEventListener onNonError(final OutputEventListener listener) {
return new OutputEventListener() {
public void onOutput(OutputEvent event) {
if (event.getLogLevel() != LogLevel.ERROR || event.getLogLevel() == null) {
listener.onOutput(event);
}
}
};
}
public void addStandardErrorListener(StandardOutputListener listener) {
synchronized (lock) {
stderrListeners.add(listener);
}
}
public void addStandardOutputListener(StandardOutputListener listener) {
synchronized (lock) {
stdoutListeners.add(listener);
}
}
public void addStandardOutputListener(OutputStream outputStream) {
addStandardOutputListener(new StreamBackedStandardOutputListener(outputStream));
}
public void addStandardErrorListener(OutputStream outputStream) {
addStandardErrorListener(new StreamBackedStandardOutputListener(outputStream));
}
public void removeStandardOutputListener(StandardOutputListener listener) {
synchronized (lock) {
stdoutListeners.remove(listener);
}
}
public void removeStandardErrorListener(StandardOutputListener listener) {
synchronized (lock) {
stderrListeners.remove(listener);
}
}
public void configure(LogLevel logLevel) {
onOutput(new LogLevelChangeEvent(logLevel));
}
@Override
public void onOutput(OutputEvent event) {
if (event.getLogLevel() != null && event.getLogLevel().compareTo(logLevel.get()) < 0 && !isProgressEvent(event)) {
return;
}
if (event instanceof LogLevelChangeEvent) {
LogLevelChangeEvent changeEvent = (LogLevelChangeEvent) event;
LogLevel newLogLevel = changeEvent.getNewLogLevel();
if (newLogLevel == this.logLevel.get()) {
return;
}
this.logLevel.set(newLogLevel);
}
synchronized (lock) {
formatters.getSource().onOutput(event);
}
}
private boolean isProgressEvent(OutputEvent event) {
return event instanceof ProgressStartEvent || event instanceof ProgressEvent || event instanceof ProgressCompleteEvent;
}
private static class SnapshotImpl implements Snapshot {
private final LogLevel logLevel;
private final OutputEventListener console;
SnapshotImpl(LogLevel logLevel, OutputEventListener console) {
this.logLevel = logLevel;
this.console = console;
}
}
private static class LazyListener implements OutputEventListener {
private Factory<OutputEventListener> factory;
private OutputEventListener delegate;
private LazyListener(Factory<OutputEventListener> factory) {
this.factory = factory;
}
@Override
public void onOutput(OutputEvent event) {
if (delegate == null) {
delegate = factory.create();
factory = null;
}
delegate.onOutput(event);
}
}
}
| |
/*
* Copyright 2009, Strategic Gains, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
*
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.strategicgains.restexpress.pipeline;
import static com.strategicgains.restexpress.ContentType.TEXT_PLAIN;
import static org.jboss.netty.handler.codec.http.HttpHeaders.Names.CONTENT_TYPE;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import org.jboss.netty.channel.ChannelHandler.Sharable;
import org.jboss.netty.channel.ChannelHandlerContext;
import org.jboss.netty.channel.ExceptionEvent;
import org.jboss.netty.channel.MessageEvent;
import org.jboss.netty.channel.SimpleChannelUpstreamHandler;
import org.jboss.netty.handler.codec.http.HttpRequest;
import org.jboss.netty.handler.codec.http.HttpResponseStatus;
import com.strategicgains.restexpress.Request;
import com.strategicgains.restexpress.Response;
import com.strategicgains.restexpress.exception.BadRequestException;
import com.strategicgains.restexpress.exception.ExceptionMapping;
import com.strategicgains.restexpress.exception.ExceptionUtils;
import com.strategicgains.restexpress.exception.ServiceException;
import com.strategicgains.restexpress.response.DefaultHttpResponseWriter;
import com.strategicgains.restexpress.response.HttpResponseWriter;
import com.strategicgains.restexpress.response.ResponseProcessor;
import com.strategicgains.restexpress.response.ResponseProcessorResolver;
import com.strategicgains.restexpress.route.Action;
import com.strategicgains.restexpress.route.RouteResolver;
import com.strategicgains.restexpress.util.HttpSpecification;
import com.strategicgains.restexpress.util.StringUtils;
/**
* @author toddf
* @since Nov 13, 2009
*/
@Sharable
public class DefaultRequestHandler
extends SimpleChannelUpstreamHandler
{
// SECTION: INSTANCE VARIABLES
private RouteResolver routeResolver;
private ResponseProcessorResolver responseProcessorResolver;
private HttpResponseWriter responseWriter;
private List<Preprocessor> preprocessors = new ArrayList<Preprocessor>();
private List<Postprocessor> postprocessors = new ArrayList<Postprocessor>();
private List<Postprocessor> finallyProcessors = new ArrayList<Postprocessor>();
private ExceptionMapping exceptionMap = new ExceptionMapping();
private List<MessageObserver> messageObservers = new ArrayList<MessageObserver>();
// SECTION: CONSTRUCTORS
public DefaultRequestHandler(RouteResolver routeResolver, ResponseProcessorResolver responseProcessorResolver)
{
this(routeResolver, responseProcessorResolver, new DefaultHttpResponseWriter());
}
public DefaultRequestHandler(RouteResolver routeResolver, ResponseProcessorResolver responseProcessorResolver,
HttpResponseWriter responseWriter)
{
super();
this.routeResolver = routeResolver;
this.responseProcessorResolver = responseProcessorResolver;
setResponseWriter(responseWriter);
}
// SECTION: MUTATORS
public void addMessageObserver(MessageObserver... observers)
{
for (MessageObserver observer : observers)
{
if (!messageObservers.contains(observer))
{
messageObservers.add(observer);
}
}
}
public <T extends Throwable, U extends ServiceException> DefaultRequestHandler mapException(Class<T> from, Class<U> to)
{
exceptionMap.map(from, to);
return this;
}
public DefaultRequestHandler setExceptionMap(ExceptionMapping map)
{
this.exceptionMap = map;
return this;
}
public HttpResponseWriter getResponseWriter()
{
return this.responseWriter;
}
public void setResponseWriter(HttpResponseWriter writer)
{
this.responseWriter = writer;
}
// SECTION: SIMPLE-CHANNEL-UPSTREAM-HANDLER
@Override
public void messageReceived(ChannelHandlerContext ctx, MessageEvent event)
throws Exception
{
MessageContext context = createInitialContext(ctx, event);
try
{
notifyReceived(context);
resolveRoute(context);
boolean isResponseProcessorResolved = resolveResponseProcessor(context);
invokePreprocessors(preprocessors, context.getRequest());
Object result = context.getAction().invoke(context.getRequest(), context.getResponse());
if (result != null)
{
context.getResponse().setBody(result);
}
invokePostprocessors(postprocessors, context.getRequest(), context.getResponse());
if (!isResponseProcessorResolved && !context.supportsRequestedFormat())
{
throw new BadRequestException("Requested representation format not supported: "
+ context.getRequest().getFormat()
+ ". Supported formats: " + StringUtils.join(", ", getSupportedFormats(context)));
}
serializeResponse(context);
enforceHttpSpecification(context);
writeResponse(ctx, context);
notifySuccess(context);
}
catch(Throwable t)
{
handleRestExpressException(ctx, t);
}
finally
{
invokeFinallyProcessors(finallyProcessors, context.getRequest(), context.getResponse());
notifyComplete(context);
}
}
/**
* @return
*/
private Collection<String> getSupportedFormats(MessageContext context)
{
Collection<String> routeFormats = context.getSupportedRouteFormats();
if (routeFormats != null && !routeFormats.isEmpty())
{
return routeFormats;
}
return responseProcessorResolver.getSupportedFormats();
}
/**
* @param context
*/
private void enforceHttpSpecification(MessageContext context)
{
HttpSpecification.enforce(context.getResponse());
}
private void handleRestExpressException(ChannelHandlerContext ctx, Throwable cause)
throws Exception
{
MessageContext context = (MessageContext) ctx.getAttachment();
resolveResponseProcessor(context);
resolveResponseProcessorViaUrlFormat(context);
Throwable rootCause = mapServiceException(cause);
if (rootCause != null) // was/is a ServiceException
{
context.setHttpStatus(((ServiceException) rootCause).getHttpStatus());
if (ServiceException.class.isAssignableFrom(rootCause.getClass()))
{
((ServiceException) rootCause).augmentResponse(context.getResponse());
}
}
else
{
rootCause = ExceptionUtils.findRootCause(cause);
context.setHttpStatus(HttpResponseStatus.INTERNAL_SERVER_ERROR);
}
context.setException(rootCause);
notifyException(context);
serializeResponse(context);
writeResponse(ctx, context);
}
@Override
public void exceptionCaught(ChannelHandlerContext ctx, ExceptionEvent event)
throws Exception
{
try
{
MessageContext messageContext = (MessageContext) ctx.getAttachment();
if (messageContext != null)
{
messageContext.setException(event.getCause());
notifyException(messageContext);
}
}
catch(Throwable t)
{
System.err.print("DefaultRequestHandler.exceptionCaught() threw an exception.");
t.printStackTrace();
}
finally
{
event.getChannel().close();
}
}
private MessageContext createInitialContext(ChannelHandlerContext ctx, MessageEvent event)
{
Request request = createRequest((HttpRequest) event.getMessage(), ctx);
Response response = createResponse();
MessageContext context = new MessageContext(request, response);
ctx.setAttachment(context);
return context;
}
/**
* Resolve the ResponseProcessor based on the requested format (or the default, if none supplied).
*
* @param context the message context.
* @return true if the ResponseProcessor was resolved. False if the ResponseProcessor was
* resolved to the 'default' because it was unresolvable.
*/
private boolean resolveResponseProcessor(MessageContext context)
{
boolean isResolved = true;
if (context.hasResponseProcessor()) return isResolved;
ResponseProcessor rp = responseProcessorResolver.resolve(context.getRequestedFormat());
if (rp == null)
{
rp = responseProcessorResolver.getDefault();
isResolved = false;
}
context.setResponseProcessor(rp);
return isResolved;
}
private void resolveResponseProcessorViaUrlFormat(MessageContext context)
{
String urlFormat = parseRequestedFormatFromUrl(context.getRequest());
if (urlFormat != null && !urlFormat.isEmpty() && !urlFormat.equalsIgnoreCase(context.getRequestedFormat()))
{
ResponseProcessor rp = responseProcessorResolver.resolve(urlFormat);
if (rp != null)
{
context.setResponseProcessor(rp);
}
}
}
private String parseRequestedFormatFromUrl(Request request)
{
String uri = request.getUrl();
int queryDelimiterIndex = uri.indexOf('?');
String path = (queryDelimiterIndex > 0 ? uri.substring(0, queryDelimiterIndex) : uri);
int formatDelimiterIndex = path.indexOf('.');
return (formatDelimiterIndex > 0 ? path.substring(formatDelimiterIndex + 1) : null);
}
private void resolveRoute(MessageContext context)
{
Action action = routeResolver.resolve(context.getRequest());
context.setAction(action);
}
/**
* @param request
* @param response
*/
private void notifyReceived(MessageContext context)
{
for (MessageObserver observer : messageObservers)
{
observer.onReceived(context.getRequest(), context.getResponse());
}
}
/**
* @param request
* @param response
*/
private void notifyComplete(MessageContext context)
{
for (MessageObserver observer : messageObservers)
{
observer.onComplete(context.getRequest(), context.getResponse());
}
}
// SECTION: UTILITY -- PRIVATE
/**
* @param exception
* @param request
* @param response
*/
private void notifyException(MessageContext context)
{
Throwable exception = context.getException();
for (MessageObserver observer : messageObservers)
{
observer.onException(exception, context.getRequest(), context.getResponse());
}
}
/**
* @param request
* @param response
*/
private void notifySuccess(MessageContext context)
{
for (MessageObserver observer : messageObservers)
{
observer.onSuccess(context.getRequest(), context.getResponse());
}
}
public void addPreprocessor(Preprocessor handler)
{
if (!preprocessors.contains(handler))
{
preprocessors.add(handler);
}
}
public void addPostprocessor(Postprocessor handler)
{
if (!postprocessors.contains(handler))
{
postprocessors.add(handler);
}
}
public void addFinallyProcessor(Postprocessor handler)
{
if (!finallyProcessors.contains(handler))
{
finallyProcessors.add(handler);
}
}
private void invokePreprocessors(List<Preprocessor> processors, Request request)
{
for (Preprocessor handler : processors)
{
handler.process(request);
}
request.getBody().resetReaderIndex();
}
private void invokePostprocessors(List<Postprocessor> processors, Request request, Response response)
{
for (Postprocessor handler : processors)
{
handler.process(request, response);
}
}
private void invokeFinallyProcessors(List<Postprocessor> processors, Request request, Response response)
{
for (Postprocessor handler : processors)
{
try
{
handler.process(request, response);
}
catch(Throwable t)
{
t.printStackTrace(System.err);
}
}
}
/**
* Uses the exceptionMap to map a Throwable to a ServiceException, if possible.
*
* @param cause
* @return Either a ServiceException or the root cause of the exception.
*/
private Throwable mapServiceException(Throwable cause)
{
if (ServiceException.isAssignableFrom(cause))
{
return cause;
}
return exceptionMap.getExceptionFor(cause);
}
/**
* @param request
* @return
*/
private Request createRequest(HttpRequest request, ChannelHandlerContext context)
{
return new Request(request, routeResolver);
}
/**
* @param request
* @return
*/
private Response createResponse()
{
return new Response();
}
/**
* @param message
* @return
*/
private void writeResponse(ChannelHandlerContext ctx, MessageContext context)
{
getResponseWriter().write(ctx, context.getRequest(), context.getResponse());
}
private void serializeResponse(MessageContext context)
{
Response response = context.getResponse();
if (shouldSerialize(context))
{
response.serialize();
}
if (HttpSpecification.isContentTypeAllowed(response))
{
if (!response.hasHeader(CONTENT_TYPE))
{
String contentType = (context.getContentType() == null ? TEXT_PLAIN : context.getContentType());
response.addHeader(CONTENT_TYPE, contentType);
}
}
}
private boolean shouldSerialize(MessageContext context)
{
return (context.shouldSerializeResponse() && (responseProcessorResolver != null));
}
}
| |
// ***************************************************************************
// * Copyright 2013 Joseph Molnar
// *
// * Licensed under the Apache License, Version 2.0 (the "License");
// * you may not use this file except in compliance with the License.
// * You may obtain a copy of the License at
// *
// * http://www.apache.org/licenses/LICENSE-2.0
// *
// * Unless required by applicable law or agreed to in writing, software
// * distributed under the License is distributed on an "AS IS" BASIS,
// * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// * See the License for the specific language governing permissions and
// * limitations under the License.
// ***************************************************************************
package com.talvish.tales.services.http;
import java.util.Arrays;
import java.util.Collections;
import java.util.Enumeration;
import java.util.HashSet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletRequestWrapper;
import com.google.common.base.Strings;
/**
* Class to wrap an http servlet request and have the body of the request be
* compressed
*
* The compression related code is essentially Jetty's implementation, but
* fixing issues related to getting access to the proper stream at the
* right time. Ideally this wouldn't exist here and we could use Jetty directly.
*
* @author cschertz
* @author jmolnar
*
*/
public class HttpRequestWrapper extends HttpServletRequestWrapper {
// private static final Logger logger = LoggerFactory.getLogger(HttpRequestWrapper.class);
// private Request originalRequest;
/**
* Construct a new wrapper
*
* @param request
*/
public HttpRequestWrapper(HttpServletRequest request) {
super(request);
// originalRequest = (Request) request;
}
// private static final int __NONE = 0, _STREAM = 1, __READER = 2;
// private int inputState = __NONE;
// private boolean paramsExtracted = false;
// private MultiMap<String> parameters = null;
// private MultiMap<String> baseParameters = null;
// private BufferedReader reader;
// private String readerEncoding;
//
//
// @Override
// public String getParameter(String name) {
// if (!paramsExtracted)
// extractParameters();
// return (String) parameters.getValue(name, 0);
// }
//
// @Override
// public Map<String, String[]> getParameterMap() {
// if (!paramsExtracted)
// extractParameters();
//
// return Collections.unmodifiableMap(parameters.toStringArrayMap());
// }
//
// @Override
// public Enumeration<String> getParameterNames() {
// if (!paramsExtracted)
// extractParameters();
// return Collections.enumeration(parameters.keySet());
// }
//
// @Override
// public String[] getParameterValues(String name) {
// if (!paramsExtracted)
// extractParameters();
// @SuppressWarnings("unchecked")
// List<String> vals = parameters.getValues(name);
// if (vals == null) {
// return null;
// }
// return vals.toArray(new String[vals.size()]);
// }
//
// /* ------------------------------------------------------------ */
// /**
// * Extract Parameters from query string and/or form _content.
// */
// private void extractParameters() {
// if (baseParameters == null)
// baseParameters = new MultiMap<String>(16);
//
// if (paramsExtracted) {
// if (parameters == null)
// parameters = baseParameters;
// return;
// }
//
// paramsExtracted = true;
//
// try {
// // Handle query string
// HttpURI uri = originalRequest.getUri();
// if (uri != null && uri.hasQuery()) {
// if (originalRequest.getQueryEncoding() == null)
// uri.decodeQueryTo(baseParameters);
// else {
// try {
// uri.decodeQueryTo(baseParameters,
// originalRequest.getQueryEncoding());
// } catch (UnsupportedEncodingException e) {
// logger.warn(e.getMessage(), e);
// }
// }
// }
//
// // handle any _content.
// String encoding = getCharacterEncoding();
// String content_type = getContentType();
// if (content_type != null && content_type.length() > 0) {
// content_type = HttpFields.valueParameters(content_type, null);
//
// if (MimeTypes.FORM_ENCODED.equalsIgnoreCase(content_type)
// && inputState == __NONE
// && (HttpMethods.POST.equals(getMethod()) || HttpMethods.PUT
// .equals(getMethod()))) {
// int content_length = getContentLength();
// if (content_length != 0) {
// try {
// int maxFormContentSize = -1;
// int maxFormKeys = -1;
// ContextHandler.Context context = (ContextHandler.Context) originalRequest.getContext();
// if (context != null) {
// maxFormContentSize = context
// .getContextHandler()
// .getMaxFormContentSize();
// maxFormKeys = context.getContextHandler()
// .getMaxFormKeys();
// } else {
// AbstractHttpConnection connection = originalRequest
// .getConnection();
// Number size = (Number) connection
// .getConnector()
// .getServer()
// .getAttribute(
// "org.eclipse.jetty.server.Request.maxFormContentSize");
// maxFormContentSize = size == null ? 200000
// : size.intValue();
// Number keys = (Number) connection
// .getConnector()
// .getServer()
// .getAttribute(
// "org.eclipse.jetty.server.Request.maxFormKeys");
// maxFormKeys = keys == null ? 1000 : keys
// .intValue();
// }
//
// if (content_length > maxFormContentSize
// && maxFormContentSize > 0) {
// throw new IllegalStateException(
// "Form too large" + content_length + ">"
// + maxFormContentSize);
// }
// InputStream in = getInputStream();
//
// // Add form params to query params
// UrlEncoded.decodeTo(in, baseParameters, encoding,
// content_length < 0 ? maxFormContentSize
// : -1, maxFormKeys);
// } catch (IOException e) {
// logger.warn(e.getMessage(), e);
// }
// }
// }
// }
//
// if (parameters == null)
// parameters = baseParameters;
// else if (parameters != baseParameters) {
// // Merge parameters (needed if parameters extracted after a
// // forward).
// Iterator<?> iter = baseParameters.entrySet().iterator();
// while (iter.hasNext()) {
// Map.Entry<?, ?> entry = (Map.Entry<?, ?>) iter.next();
// String name = (String) entry.getKey();
// Object values = entry.getValue();
// for (int i = 0; i < LazyList.size(values); i++) {
// parameters.add(name, LazyList.get(values, i));
// }
// }
// }
// } finally {
// // ensure params always set (even if empty) after extraction
// if (parameters == null) {
// parameters = baseParameters;
// }
// }
// }
//
// @Override
// public ServletInputStream getInputStream() throws IOException {
//
// if (inputState != __NONE && inputState != _STREAM) {
// throw new IllegalStateException("READER");
// }
//
// inputState = _STREAM;
// String contentEncoding = originalRequest.getHeader("Content-encoding");
//
// if (!Strings.isNullOrEmpty(contentEncoding)
// && contentEncoding.toLowerCase().indexOf("gzip") > -1) {
//
// return new GZipRequestStream(originalRequest.getInputStream());
// }
// return originalRequest..getConnection().getInputStream();
// }
//
// @Override
// public BufferedReader getReader() throws IOException {
// if (inputState != __NONE && inputState != __READER)
// throw new IllegalStateException("STREAMED");
//
// if (inputState == __READER)
// return reader;
//
// String encoding = getCharacterEncoding();
// if (encoding == null)
// encoding = StringUtil.__ISO_8859_1;
//
// if (reader == null || !encoding.equalsIgnoreCase(readerEncoding)) {
// final ServletInputStream in = getInputStream();
// readerEncoding = encoding;
// reader = new BufferedReader(new InputStreamReader(in, encoding)) {
// @Override
// public void close() throws IOException {
// in.close();
// }
// };
// }
// inputState = __READER;
// return reader;
// }
/**
* Returns the header value, allowing parameter based overrides.
*/
@Override
public String getHeader(String name) {
String headerValue = null;
// for speed reasons we don't carry what the attribute's value, just that it exists
if( this.getAttribute( AttributeConstants.ENABLE_HEADER_OVERRIDES ) != null && !Strings.isNullOrEmpty( name ) ) {
headerValue = this.getParameter( ParameterConstants.OVERRIDE_HEADER + name );
}
if( headerValue == null ) {
return super.getHeader(name);
} else {
return headerValue;
}
}
/**
* Returns the header values, allowing parameter based overrides.
*/
@Override
public Enumeration<String> getHeaders(String name) {
String[] headerValues = null;
// for speed reasons we don't carry what the attribute's value, just that it exists
if( this.getAttribute( AttributeConstants.ENABLE_HEADER_OVERRIDES ) != null && !Strings.isNullOrEmpty( name ) ) {
headerValues = this.getParameterValues( ParameterConstants.OVERRIDE_HEADER + name );
}
if( headerValues == null ) {
return super.getHeaders( name );
} else {
return Collections.enumeration( Arrays.asList( headerValues ) );
}
}
/**
* Returns the header value, allowing parameter based overrides.
*/
@Override
public int getIntHeader(String name) {
String headerValue = null;
// for speed reasons we don't carry what the attribute's value, just that it exists
if( this.getAttribute( AttributeConstants.ENABLE_HEADER_OVERRIDES ) != null && !Strings.isNullOrEmpty( name ) ) {
headerValue = this.getParameter( ParameterConstants.OVERRIDE_HEADER + name );
}
if( headerValue == null ) {
return super.getIntHeader(name);
} else {
return Integer.parseInt( headerValue );
}
}
/**
* Returns the header value, allowing parameter based overrides.
*/
@Override
public long getDateHeader(String name) {
String headerValue = null;
// for speed reasons we don't carry what the attribute's value, just that it exists
if( this.getAttribute( AttributeConstants.ENABLE_HEADER_OVERRIDES ) != null && !Strings.isNullOrEmpty( name ) ) {
headerValue = this.getParameter( ParameterConstants.OVERRIDE_HEADER + name );
}
if( headerValue == null ) {
return super.getDateHeader(name);
} else {
// use Jetty's mechanism for parsing dates (which seems long and convoluted)
return HttpDateParser.parseDate( headerValue );
}
}
/**
* Returns the list of header names, including those
* added via parameter overrides.
*/
@Override
public Enumeration<String> getHeaderNames() {
// for speed reasons we don't carry what the attribute's value, just that it exists
if( this.getAttribute( AttributeConstants.ENABLE_HEADER_OVERRIDES ) != null ) {
HashSet<String> set = new HashSet<String>( );
// first extract the existing headers
Enumeration<String> actualHeaders = super.getHeaderNames();
while( actualHeaders.hasMoreElements( ) ) {
set.add( actualHeaders.nextElement( ) );
}
// now look at the overridden headers
Enumeration<String> parameters = this.getParameterNames( );
String parameter;
while( parameters.hasMoreElements( ) ) {
parameter = parameters.nextElement();
if( parameter.startsWith( ParameterConstants.OVERRIDE_HEADER ) ) {
parameter = parameter.substring( ParameterConstants.OVERRIDE_HEADER.length() );
if( parameter.length( ) > 0 ) {
set.add( parameter );
}
}
}
return Collections.enumeration( set );
} else {
return super.getHeaderNames( );
}
}
}
| |
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.glue.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.protocol.StructuredPojo;
import com.amazonaws.protocol.ProtocolMarshaller;
/**
* <p>
* Specifies a grok classifier to update when passed to <code>UpdateClassifier</code>.
* </p>
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/glue-2017-03-31/UpdateGrokClassifierRequest" target="_top">AWS
* API Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class UpdateGrokClassifierRequest implements Serializable, Cloneable, StructuredPojo {
/**
* <p>
* The name of the <code>GrokClassifier</code>.
* </p>
*/
private String name;
/**
* <p>
* An identifier of the data format that the classifier matches, such as Twitter, JSON, Omniture logs, Amazon
* CloudWatch Logs, and so on.
* </p>
*/
private String classification;
/**
* <p>
* The grok pattern used by this classifier.
* </p>
*/
private String grokPattern;
/**
* <p>
* Optional custom grok patterns used by this classifier.
* </p>
*/
private String customPatterns;
/**
* <p>
* The name of the <code>GrokClassifier</code>.
* </p>
*
* @param name
* The name of the <code>GrokClassifier</code>.
*/
public void setName(String name) {
this.name = name;
}
/**
* <p>
* The name of the <code>GrokClassifier</code>.
* </p>
*
* @return The name of the <code>GrokClassifier</code>.
*/
public String getName() {
return this.name;
}
/**
* <p>
* The name of the <code>GrokClassifier</code>.
* </p>
*
* @param name
* The name of the <code>GrokClassifier</code>.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public UpdateGrokClassifierRequest withName(String name) {
setName(name);
return this;
}
/**
* <p>
* An identifier of the data format that the classifier matches, such as Twitter, JSON, Omniture logs, Amazon
* CloudWatch Logs, and so on.
* </p>
*
* @param classification
* An identifier of the data format that the classifier matches, such as Twitter, JSON, Omniture logs, Amazon
* CloudWatch Logs, and so on.
*/
public void setClassification(String classification) {
this.classification = classification;
}
/**
* <p>
* An identifier of the data format that the classifier matches, such as Twitter, JSON, Omniture logs, Amazon
* CloudWatch Logs, and so on.
* </p>
*
* @return An identifier of the data format that the classifier matches, such as Twitter, JSON, Omniture logs,
* Amazon CloudWatch Logs, and so on.
*/
public String getClassification() {
return this.classification;
}
/**
* <p>
* An identifier of the data format that the classifier matches, such as Twitter, JSON, Omniture logs, Amazon
* CloudWatch Logs, and so on.
* </p>
*
* @param classification
* An identifier of the data format that the classifier matches, such as Twitter, JSON, Omniture logs, Amazon
* CloudWatch Logs, and so on.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public UpdateGrokClassifierRequest withClassification(String classification) {
setClassification(classification);
return this;
}
/**
* <p>
* The grok pattern used by this classifier.
* </p>
*
* @param grokPattern
* The grok pattern used by this classifier.
*/
public void setGrokPattern(String grokPattern) {
this.grokPattern = grokPattern;
}
/**
* <p>
* The grok pattern used by this classifier.
* </p>
*
* @return The grok pattern used by this classifier.
*/
public String getGrokPattern() {
return this.grokPattern;
}
/**
* <p>
* The grok pattern used by this classifier.
* </p>
*
* @param grokPattern
* The grok pattern used by this classifier.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public UpdateGrokClassifierRequest withGrokPattern(String grokPattern) {
setGrokPattern(grokPattern);
return this;
}
/**
* <p>
* Optional custom grok patterns used by this classifier.
* </p>
*
* @param customPatterns
* Optional custom grok patterns used by this classifier.
*/
public void setCustomPatterns(String customPatterns) {
this.customPatterns = customPatterns;
}
/**
* <p>
* Optional custom grok patterns used by this classifier.
* </p>
*
* @return Optional custom grok patterns used by this classifier.
*/
public String getCustomPatterns() {
return this.customPatterns;
}
/**
* <p>
* Optional custom grok patterns used by this classifier.
* </p>
*
* @param customPatterns
* Optional custom grok patterns used by this classifier.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public UpdateGrokClassifierRequest withCustomPatterns(String customPatterns) {
setCustomPatterns(customPatterns);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getName() != null)
sb.append("Name: ").append(getName()).append(",");
if (getClassification() != null)
sb.append("Classification: ").append(getClassification()).append(",");
if (getGrokPattern() != null)
sb.append("GrokPattern: ").append(getGrokPattern()).append(",");
if (getCustomPatterns() != null)
sb.append("CustomPatterns: ").append(getCustomPatterns());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof UpdateGrokClassifierRequest == false)
return false;
UpdateGrokClassifierRequest other = (UpdateGrokClassifierRequest) obj;
if (other.getName() == null ^ this.getName() == null)
return false;
if (other.getName() != null && other.getName().equals(this.getName()) == false)
return false;
if (other.getClassification() == null ^ this.getClassification() == null)
return false;
if (other.getClassification() != null && other.getClassification().equals(this.getClassification()) == false)
return false;
if (other.getGrokPattern() == null ^ this.getGrokPattern() == null)
return false;
if (other.getGrokPattern() != null && other.getGrokPattern().equals(this.getGrokPattern()) == false)
return false;
if (other.getCustomPatterns() == null ^ this.getCustomPatterns() == null)
return false;
if (other.getCustomPatterns() != null && other.getCustomPatterns().equals(this.getCustomPatterns()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getName() == null) ? 0 : getName().hashCode());
hashCode = prime * hashCode + ((getClassification() == null) ? 0 : getClassification().hashCode());
hashCode = prime * hashCode + ((getGrokPattern() == null) ? 0 : getGrokPattern().hashCode());
hashCode = prime * hashCode + ((getCustomPatterns() == null) ? 0 : getCustomPatterns().hashCode());
return hashCode;
}
@Override
public UpdateGrokClassifierRequest clone() {
try {
return (UpdateGrokClassifierRequest) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
@com.amazonaws.annotation.SdkInternalApi
@Override
public void marshall(ProtocolMarshaller protocolMarshaller) {
com.amazonaws.services.glue.model.transform.UpdateGrokClassifierRequestMarshaller.getInstance().marshall(this, protocolMarshaller);
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs.server.namenode;
import java.util.Iterator;
import org.apache.hadoop.hdfs.protocol.Block;
import org.apache.hadoop.hdfs.util.GSet;
import org.apache.hadoop.hdfs.util.LightWeightGSet;
/**
* This class maintains the map from a block to its metadata.
* block's metadata currently includes INode it belongs to and
* the datanodes that store the block.
*/
class BlocksMap {
/**
* Internal class for block metadata.
*/
static class BlockInfo extends Block implements LightWeightGSet.LinkedElement {
private INodeFile inode;
/** For implementing {@link LightWeightGSet.LinkedElement} interface */
private LightWeightGSet.LinkedElement nextLinkedElement;
/**
* This array contains triplets of references.
* For each i-th data-node the block belongs to
* triplets[3*i] is the reference to the DatanodeDescriptor
* and triplets[3*i+1] and triplets[3*i+2] are references
* to the previous and the next blocks, respectively, in the
* list of blocks belonging to this data-node.
*/
private Object[] triplets;
BlockInfo(Block blk, int replication) {
super(blk);
this.triplets = new Object[3*replication];
this.inode = null;
}
INodeFile getINode() {
return inode;
}
public void setINode(INodeFile inode) {
this.inode = inode;
}
DatanodeDescriptor getDatanode(int index) {
assert this.triplets != null : "BlockInfo is not initialized";
assert index >= 0 && index*3 < triplets.length : "Index is out of bound";
DatanodeDescriptor node = (DatanodeDescriptor)triplets[index*3];
assert node == null ||
DatanodeDescriptor.class.getName().equals(node.getClass().getName()) :
"DatanodeDescriptor is expected at " + index*3;
return node;
}
BlockInfo getPrevious(int index) {
assert this.triplets != null : "BlockInfo is not initialized";
assert index >= 0 && index*3+1 < triplets.length : "Index is out of bound";
BlockInfo info = (BlockInfo)triplets[index*3+1];
assert info == null ||
BlockInfo.class.getName().equals(info.getClass().getName()) :
"BlockInfo is expected at " + index*3;
return info;
}
BlockInfo getNext(int index) {
assert this.triplets != null : "BlockInfo is not initialized";
assert index >= 0 && index*3+2 < triplets.length : "Index is out of bound";
BlockInfo info = (BlockInfo)triplets[index*3+2];
assert info == null ||
BlockInfo.class.getName().equals(info.getClass().getName()) :
"BlockInfo is expected at " + index*3;
return info;
}
void setDatanode(int index, DatanodeDescriptor node) {
assert this.triplets != null : "BlockInfo is not initialized";
assert index >= 0 && index*3 < triplets.length : "Index is out of bound";
triplets[index*3] = node;
}
void setPrevious(int index, BlockInfo to) {
assert this.triplets != null : "BlockInfo is not initialized";
assert index >= 0 && index*3+1 < triplets.length : "Index is out of bound";
triplets[index*3+1] = to;
}
void setNext(int index, BlockInfo to) {
assert this.triplets != null : "BlockInfo is not initialized";
assert index >= 0 && index*3+2 < triplets.length : "Index is out of bound";
triplets[index*3+2] = to;
}
private int getCapacity() {
assert this.triplets != null : "BlockInfo is not initialized";
assert triplets.length % 3 == 0 : "Malformed BlockInfo";
return triplets.length / 3;
}
/**
* Ensure that there is enough space to include num more triplets.
* * @return first free triplet index.
*/
private int ensureCapacity(int num) {
assert this.triplets != null : "BlockInfo is not initialized";
int last = numNodes();
if(triplets.length >= (last+num)*3)
return last;
/* Not enough space left. Create a new array. Should normally
* happen only when replication is manually increased by the user. */
Object[] old = triplets;
triplets = new Object[(last+num)*3];
for(int i=0; i < last*3; i++) {
triplets[i] = old[i];
}
return last;
}
/**
* Count the number of data-nodes the block belongs to.
*/
int numNodes() {
assert this.triplets != null : "BlockInfo is not initialized";
assert triplets.length % 3 == 0 : "Malformed BlockInfo";
for(int idx = getCapacity()-1; idx >= 0; idx--) {
if(getDatanode(idx) != null)
return idx+1;
}
return 0;
}
/**
* Add data-node this block belongs to.
*/
boolean addNode(DatanodeDescriptor node) {
if(findDatanode(node) >= 0) // the node is already there
return false;
// find the last null node
int lastNode = ensureCapacity(1);
setDatanode(lastNode, node);
setNext(lastNode, null);
setPrevious(lastNode, null);
return true;
}
/**
* Remove data-node from the block.
*/
boolean removeNode(DatanodeDescriptor node) {
int dnIndex = findDatanode(node);
if(dnIndex < 0) // the node is not found
return false;
assert getPrevious(dnIndex) == null && getNext(dnIndex) == null :
"Block is still in the list and must be removed first.";
// find the last not null node
int lastNode = numNodes()-1;
// replace current node triplet by the lastNode one
setDatanode(dnIndex, getDatanode(lastNode));
setNext(dnIndex, getNext(lastNode));
setPrevious(dnIndex, getPrevious(lastNode));
// set the last triplet to null
setDatanode(lastNode, null);
setNext(lastNode, null);
setPrevious(lastNode, null);
return true;
}
/**
* Find specified DatanodeDescriptor.
* @param dn
* @return index or -1 if not found.
*/
int findDatanode(DatanodeDescriptor dn) {
int len = getCapacity();
for(int idx = 0; idx < len; idx++) {
DatanodeDescriptor cur = getDatanode(idx);
if(cur == dn)
return idx;
if(cur == null)
break;
}
return -1;
}
/**
* Insert this block into the head of the list of blocks
* related to the specified DatanodeDescriptor.
* If the head is null then form a new list.
* @return current block as the new head of the list.
*/
BlockInfo listInsert(BlockInfo head, DatanodeDescriptor dn) {
int dnIndex = this.findDatanode(dn);
assert dnIndex >= 0 : "Data node is not found: current";
assert getPrevious(dnIndex) == null && getNext(dnIndex) == null :
"Block is already in the list and cannot be inserted.";
this.setPrevious(dnIndex, null);
this.setNext(dnIndex, head);
if(head != null)
head.setPrevious(head.findDatanode(dn), this);
return this;
}
/**
* Remove this block from the list of blocks
* related to the specified DatanodeDescriptor.
* If this block is the head of the list then return the next block as
* the new head.
* @return the new head of the list or null if the list becomes
* empy after deletion.
*/
BlockInfo listRemove(BlockInfo head, DatanodeDescriptor dn) {
if(head == null)
return null;
int dnIndex = this.findDatanode(dn);
if(dnIndex < 0) // this block is not on the data-node list
return head;
BlockInfo next = this.getNext(dnIndex);
BlockInfo prev = this.getPrevious(dnIndex);
this.setNext(dnIndex, null);
this.setPrevious(dnIndex, null);
if(prev != null)
prev.setNext(prev.findDatanode(dn), next);
if(next != null)
next.setPrevious(next.findDatanode(dn), prev);
if(this == head) // removing the head
head = next;
return head;
}
int listCount(DatanodeDescriptor dn) {
int count = 0;
for(BlockInfo cur = this; cur != null;
cur = cur.getNext(cur.findDatanode(dn)))
count++;
return count;
}
boolean listIsConsistent(DatanodeDescriptor dn) {
// going forward
int count = 0;
BlockInfo next, nextPrev;
BlockInfo cur = this;
while(cur != null) {
next = cur.getNext(cur.findDatanode(dn));
if(next != null) {
nextPrev = next.getPrevious(next.findDatanode(dn));
if(cur != nextPrev) {
System.out.println("Inconsistent list: cur->next->prev != cur");
return false;
}
}
cur = next;
count++;
}
return true;
}
@Override
public LightWeightGSet.LinkedElement getNext() {
return nextLinkedElement;
}
@Override
public void setNext(LightWeightGSet.LinkedElement next) {
this.nextLinkedElement = next;
}
}
private static class NodeIterator implements Iterator<DatanodeDescriptor> {
private BlockInfo blockInfo;
private int nextIdx = 0;
NodeIterator(BlockInfo blkInfo) {
this.blockInfo = blkInfo;
}
public boolean hasNext() {
return blockInfo != null && nextIdx < blockInfo.getCapacity()
&& blockInfo.getDatanode(nextIdx) != null;
}
public DatanodeDescriptor next() {
return blockInfo.getDatanode(nextIdx++);
}
public void remove() {
throw new UnsupportedOperationException("Sorry. can't remove.");
}
}
/** Constant {@link LightWeightGSet} capacity. */
private final int capacity;
private GSet<Block, BlockInfo> blocks;
BlocksMap(int initialCapacity, float loadFactor) {
this.capacity = computeCapacity();
this.blocks = new LightWeightGSet<Block, BlockInfo>(capacity);
}
/**
* Let t = 2% of max memory.
* Let e = round(log_2 t).
* Then, we choose capacity = 2^e/(size of reference),
* unless it is outside the close interval [1, 2^30].
*/
private static int computeCapacity() {
//VM detection
//See http://java.sun.com/docs/hotspot/HotSpotFAQ.html#64bit_detection
final String vmBit = System.getProperty("sun.arch.data.model");
//2% of max memory
final double twoPC = Runtime.getRuntime().maxMemory()/50.0;
//compute capacity
final int e1 = (int)(Math.log(twoPC)/Math.log(2.0) + 0.5);
final int e2 = e1 - ("32".equals(vmBit)? 2: 3);
final int exponent = e2 < 0? 0: e2 > 30? 30: e2;
final int c = 1 << exponent;
LightWeightGSet.LOG.info("VM type = " + vmBit + "-bit");
LightWeightGSet.LOG.info("2% max memory = " + twoPC/(1 << 20) + " MB");
LightWeightGSet.LOG.info("capacity = 2^" + exponent
+ " = " + c + " entries");
return c;
}
void close() {
blocks = null;
}
/**
* Add BlockInfo if mapping does not exist.
*/
private BlockInfo checkBlockInfo(Block b, int replication) {
BlockInfo info = blocks.get(b);
if (info == null) {
info = new BlockInfo(b, replication);
blocks.put(info);
}
return info;
}
INodeFile getINode(Block b) {
BlockInfo info = blocks.get(b);
return (info != null) ? info.inode : null;
}
/**
* Add block b belonging to the specified file inode to the map.
*/
BlockInfo addINode(Block b, INodeFile iNode) {
BlockInfo info = checkBlockInfo(b, iNode.getReplication());
info.inode = iNode;
return info;
}
/**
* Remove INode reference from block b.
* If it does not belong to any file and data-nodes,
* then remove the block from the block map.
*/
void removeINode(Block b) {
BlockInfo info = blocks.get(b);
if (info != null) {
info.inode = null;
if (info.getDatanode(0) == null) { // no datanodes left
blocks.remove(b); // remove block from the map
}
}
}
/**
* Remove the block from the block map;
* remove it from all data-node lists it belongs to;
* and remove all data-node locations associated with the block.
*/
void removeBlock(BlockInfo blockInfo) {
if (blockInfo == null)
return;
blockInfo.inode = null;
for(int idx = blockInfo.numNodes()-1; idx >= 0; idx--) {
DatanodeDescriptor dn = blockInfo.getDatanode(idx);
dn.removeBlock(blockInfo); // remove from the list and wipe the location
}
blocks.remove(blockInfo); // remove block from the map
}
/** Returns the block object it it exists in the map. */
BlockInfo getStoredBlock(Block b) {
return blocks.get(b);
}
/** Returned Iterator does not support. */
Iterator<DatanodeDescriptor> nodeIterator(Block b) {
return new NodeIterator(blocks.get(b));
}
/** counts number of containing nodes. Better than using iterator. */
int numNodes(Block b) {
BlockInfo info = blocks.get(b);
return info == null ? 0 : info.numNodes();
}
/** returns true if the node does not already exists and is added.
* false if the node already exists.*/
boolean addNode(Block b, DatanodeDescriptor node, int replication) {
// insert into the map if not there yet
BlockInfo info = checkBlockInfo(b, replication);
// add block to the data-node list and the node to the block info
return node.addBlock(info);
}
/**
* Remove data-node reference from the block.
* Remove the block from the block map
* only if it does not belong to any file and data-nodes.
*/
boolean removeNode(Block b, DatanodeDescriptor node) {
BlockInfo info = blocks.get(b);
if (info == null)
return false;
// remove block from the data-node list and the node from the block info
boolean removed = node.removeBlock(info);
if (info.getDatanode(0) == null // no datanodes left
&& info.inode == null) { // does not belong to a file
blocks.remove(b); // remove block from the map
}
return removed;
}
int size() {
return blocks.size();
}
Iterable<BlockInfo> getBlocks() {
return blocks;
}
/**
* Check if the block exists in map
*/
boolean contains(Block block) {
return blocks.contains(block);
}
/**
* Check if the replica at the given datanode exists in map
*/
boolean contains(Block block, DatanodeDescriptor datanode) {
BlockInfo info = blocks.get(block);
if (info == null)
return false;
if (-1 == info.findDatanode(datanode))
return false;
return true;
}
/** Get the capacity of the HashMap that stores blocks */
public int getCapacity() {
return capacity;
}
}
| |
/*
* This file is part of the Jikes RVM project (http://jikesrvm.org).
*
* This file is licensed to You under the Common Public License (CPL);
* You may not use this file except in compliance with the License. You
* may obtain a copy of the License at
*
* http://www.opensource.org/licenses/cpl1.0.php
*
* See the COPYRIGHT.txt file distributed with this work for information
* regarding copyright ownership.
*/
package org.jikesrvm.compilers.opt.liveness;
import org.jikesrvm.compilers.opt.ir.BasicBlock;
import org.jikesrvm.compilers.opt.ir.Instruction;
import org.jikesrvm.compilers.opt.ir.Register;
import org.jikesrvm.compilers.opt.ir.operand.RegisterOperand;
import org.jikesrvm.compilers.opt.regalloc.LiveIntervalElement;
/**
* This class contains useful methods for managing liveIntervals.
*/
final class LiveInterval {
private static final boolean DEBUG = false;
/**
* This method iterates over each element in the the passed live set.
* For each element, it checks if an existing live interval node for
* the basic block passed exists. If one does not exist, it creates
* a node with the end instruction being "inst". If one already exists
* no action is taken.
*
* @param set the set of registers, encoded as a LiveSet object
* @param block the basic block
* @param inst the intruction where the register's live range ends,
* null represents the end of the basic block
*/
public static void createEndLiveRange(LiveSet set, BasicBlock block, Instruction inst) {
if (DEBUG) {
if (inst == null) {
System.out.println("The following are live on exit of block " + block.getNumber() + "\n" + set);
} else {
System.out.println("The following are live ending at inst\n " +
inst +
" for block " +
block.getNumber() +
"\n" +
set);
}
}
LiveSetEnumerator lsEnum = set.enumerator();
while (lsEnum.hasMoreElements()) {
RegisterOperand regOp = lsEnum.nextElement();
createEndLiveRange(regOp.getRegister(), block, inst);
}
}
/**
* This method checks if an existing unresolved live interval node, i.e.,
* one that has an end instruction, but no beginning instruction, is present
* for the register and basic block passed. If one does not exist, it
* creates a node with the end instruction being <code>inst</code>. If one
* already exists no action is taken.
*
* @param reg The register
* @param block The basic block
* @param inst The end instruction to use, if we have to create a neode.
*/
public static void createEndLiveRange(Register reg, BasicBlock block, Instruction inst) {
if (DEBUG) {
System.out.println("Marking Register " +
reg +
"'s live range as ENDing at instruction\n " +
inst +
" in block #" +
block.getNumber());
printLiveIntervalList(block);
}
if (!containsUnresolvedElement(block, reg)) {
LiveIntervalElement elem = new LiveIntervalElement(reg, null, inst);
// add elem to the list for the basic block
block.prependLiveIntervalElement(elem);
}
}
/**
* This method finds the LiveInterval node for the register and basic block
* passed. It then sets the begin instruction to the instruction passed
* and moves the node to the proper place on the list block list.
* (The block list is sorted by "begin" instruction.
*
* @param reg the register of interest
* @param inst the "begin" instruction
* @param block the basic block of interest
*/
public static void setStartLiveRange(Register reg, Instruction inst, BasicBlock block) {
if (DEBUG) {
System.out.println("Marking Register " +
reg +
"'s live range as STARTing at instruction\n " +
inst +
" in block #" +
block.getNumber());
}
LiveIntervalElement prev = null;
LiveIntervalElement elem = block.getFirstLiveIntervalElement();
while (elem != null) {
if (elem.getRegister() == reg && elem.getBegin() == null) {
break;
}
prev = elem;
elem = elem.getNext();
}
if (elem != null) {
elem.setBegin(inst);
// we want the list sorted by "begin" instruction. Since
// we are *assuming* that we are called in a traversal that is
// visiting instructions backwards, the instr passed will always
// be the most recent. Thus, we move "elem" to the front of the list.
if (prev != null) {
// remove elem from current position
prev.setNext(elem.getNext());
// add it to the begining
block.prependLiveIntervalElement(elem);
}
// if prev == null, the element is already first in the list!
} else {
// if we didn't find it, it means we have a def that is not later
// used, i.e., a dead assignment. This may exist because the
// instruction has side effects such as a function call or a PEI
// In this case, we create a LiveIntervalElement node with begining
// and ending instruction "inst"
LiveIntervalElement newElem = new LiveIntervalElement(reg, inst, inst);
block.prependLiveIntervalElement(newElem);
}
if (DEBUG) {
System.out.println("after add");
printLiveIntervalList(block);
}
}
/**
* This method finds any LiveInterval node that does not have a start
* instruction (it is null) and moves this node to the front of the list.
*
* @param block the basic block of interest
*/
public static void moveUpwardExposedRegsToFront(BasicBlock block) {
LiveIntervalElement prev = block.getFirstLiveIntervalElement();
if (prev == null) {
return;
}
// The first element is already at the front, so move on to the next one
LiveIntervalElement elem = prev.getNext();
while (elem != null) {
if (elem.getBegin() == null) {
// remove elem from current position
prev.setNext(elem.getNext());
// add it to the begining, se
block.prependLiveIntervalElement(elem);
// the next victum is the *new* one after prev
elem = prev.getNext();
} else {
prev = elem;
elem = elem.getNext();
}
}
}
/**
* Check to see if an unresolved LiveIntervalElement node for the register
* passed exists for the basic block passed.
*
* @param block the block
* @param reg the register of interest
* @return <code>true</code> if it does or <code>false</code>
* if it does not
*/
private static boolean containsUnresolvedElement(BasicBlock block, Register reg) {
if (DEBUG) {
System.out.println("containsUnresolvedElement called, block: " + block + " register: " + reg);
printLiveIntervalList(block);
}
for (LiveIntervalElement elem = block.getFirstLiveIntervalElement(); elem != null; elem = elem.getNext()) {
// if we got an element, down case it to LiveIntervalElement
if (elem.getRegister() == reg && elem.getBegin() == null) {
return true;
}
}
return false;
}
/**
* Print the live intervals for a block.
*
* @param block the block
*/
public static void printLiveIntervalList(BasicBlock block) {
System.out.println("Live Interval List for " + block);
for (LiveIntervalElement elem = block.getFirstLiveIntervalElement(); elem != null; elem = elem.getNext()) {
System.out.println(" " + elem);
}
}
}
| |
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package stallone.datasequence.io;
import static stallone.api.API.*;
import stallone.api.datasequence.IDataList;
import stallone.api.datasequence.IDataSequence;
import stallone.api.datasequence.DataSequence;
import stallone.api.datasequence.IDataReader;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.util.Iterator;
import stallone.api.doubles.*;
import stallone.io.CachedAsciiFileReader;
import stallone.util.StringTools;
/**
*
* @author noe
*/
public class AsciiDataSequenceReader
extends CachedAsciiFileReader
implements IDataReader
{
private int dimension = 0;
private int dataStartLine = 0;
/**
* The column which stores time information, by default this is column 1. If
* timeColumn is set to -1, that means, there is no time information.
*/
private int timeColumn = -1;
/**
* The columns, which contain data
*/
private int[] selectedColumns = null;
public AsciiDataSequenceReader(String filename)
throws FileNotFoundException, IOException
{
super(filename);
}
/**
*
* @param referencedReader
* @param dataStartLine
* @param dataEndLine
* @param timeColumn
* @param selectedColumns
*/
public AsciiDataSequenceReader(String filename, int _dataStartLine, int _timeColumn, int[] _selectedColumns)
throws FileNotFoundException, IOException
{
super(filename);
this.dataStartLine = _dataStartLine;
//this.dataEndLine = dataEndLine;
/*
* if ( dataEndLine <= dataStartLine ) { throw new
* IllegalArgumentException("Invalid start position of trajectory."); }
*/
this.timeColumn = _timeColumn;
this.selectedColumns = _selectedColumns;
}
@Override
protected boolean scanLine(String textline, int currentLineNumber)
{
if (selectedColumns == null)
{
try
{
double[] dline = StringTools.toDoubleArray(textline);
dimension = dline.length;
return true;
}
catch(Exception e)
{
return false;
}
}
else
{
String[] words = StringTools.split(textline);
words = StringTools.subarray(words, selectedColumns);
for (int i = 0; i < words.length; i++)
{
if (!StringTools.isDouble(words[i]))
{
return false;
}
}
return true;
}
}
/**
*
* @param lineNumber
* @return
*/
private String[] readTokens(int lineNumber)
{
if ((lineNumber >= 0) && (lineNumber < size()))
{
String currentLine = super.getLine(dataStartLine + lineNumber);
String[] elements = CachedAsciiFileReader.whiteSpacePattern.split(currentLine.trim());
return elements;
}
else
{
throw new IllegalArgumentException("Invalid line " + lineNumber + " requested.");
} // end if-else
}
@Override
public void setSource(String name)
{
super.setFilename(name);
}
@Override
public int size()
{
return getNumberOfLines();
}
@Override
public int dimension()
{
if (selectedColumns != null)
return selectedColumns.length;
else
return dimension;
}
@Override
public long memorySize()
{
return (8 * size() * dimension());
}
/**
*
* @param frameIndex
* @return
*/
@Override
public double getTime(int frameIndex)
{
if (timeColumn == -1)
{
return frameIndex;
}
String[] entries = readTokens(frameIndex);
double value = 0;
try
{
value = Double.parseDouble(entries[timeColumn]);
} catch (NumberFormatException nfe)
{
System.out.println("frameIndex : " + frameIndex);
//System.out.println(dataStartLine + " " + dataEndLine);
}
return value;
}
@Override
public void select(int[] selection)
{
this.selectedColumns = selection;
}
@Override
public int[] getSelection()
{
if (this.selectedColumns == null)
return intArrays.range(dimension());
else
return this.selectedColumns;
}
@Override
public IDoubleArray get(int index)
{
return get(index, null);
}
@Override
public IDoubleArray getView(int index)
{
return (get(index));
}
/**
*
* @param frameIndex
* @param factory
* @return
*/
public IDoubleArray get(int frameIndex, IDoubleArray target)
{
String[] entries = readTokens(frameIndex);
if (target == null)
{
if (selectedColumns == null)
{
target = Doubles.create.array(entries.length);
}
else
{
target = Doubles.create.array(selectedColumns.length);
}
}
try
{
if (selectedColumns == null)
{
for (int i = 0; i < entries.length; i++)
{
double value = Double.parseDouble(entries[i]);
target.set(i, value);
}
}
else
{
int n = selectedColumns.length;
for (int i = 0; i < n; i++)
{
double value = Double.parseDouble(entries[selectedColumns[i]]);
target.set(i, value);
}
}
}
catch (NumberFormatException nfe)
{
System.out.println("nfe frameIndex : " + frameIndex + "exe detail:" +nfe);
//System.out.println(dataStartLine + " " + dataEndLine);
}
return target;
}
@Override
public IDataSequence load()
{
IDataList res = DataSequence.create.list();
for (Iterator<IDoubleArray> it = iterator(); it.hasNext();)
{
res.add(it.next());
}
return res;
}
@Override
public Iterator<IDoubleArray> iterator()
{
return new DataReaderIterator(this);
}
@Override
public Iterator<IDoubleArray[]> pairIterator(int spacing)
{
return new DataReaderPairIterator(this, spacing);
}
@Override
public Iterable<IDoubleArray[]> pairs(int spacing)
{
class PairIterable implements Iterable<IDoubleArray[]>
{
private IDataReader seq;
private int spacing = 1;
public PairIterable(IDataReader _seq, int _spacing)
{
this.seq = _seq;
this.spacing = _spacing;
}
@Override
public Iterator<IDoubleArray[]> iterator()
{
return (new DataReaderPairIterator(seq, spacing));
}
}
return new PairIterable(this,spacing);
}
}
| |
package org.datacite.mds.domain;
import java.util.Collection;
import java.util.Date;
import java.util.List;
import java.util.Set;
import javax.persistence.CascadeType;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.JoinColumn;
import javax.persistence.ManyToMany;
import javax.persistence.ManyToOne;
import javax.persistence.OrderBy;
import javax.persistence.Temporal;
import javax.persistence.TemporalType;
import javax.persistence.TypedQuery;
import javax.persistence.Version;
import javax.persistence.Lob;
import javax.validation.GroupSequence;
import javax.validation.constraints.Max;
import javax.validation.constraints.Min;
import javax.validation.constraints.NotNull;
import javax.validation.constraints.Size;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlTransient;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.collections.Predicate;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import org.datacite.mds.util.FilterPredicates;
import org.datacite.mds.util.Utils;
import org.datacite.mds.validation.constraints.Email;
import org.datacite.mds.validation.constraints.ListOfDomains;
import org.datacite.mds.validation.constraints.MatchPrefixes;
import org.datacite.mds.validation.constraints.MatchSymbolPrefix;
import org.datacite.mds.validation.constraints.Symbol;
import org.datacite.mds.validation.constraints.Unique;
import org.springframework.format.annotation.DateTimeFormat;
import org.springframework.format.annotation.DateTimeFormat.ISO;
import org.springframework.roo.addon.entity.RooEntity;
import org.springframework.roo.addon.javabean.RooJavaBean;
import org.springframework.roo.addon.tostring.RooToString;
import org.springframework.transaction.annotation.Transactional;
import javax.persistence.Table;
import javax.persistence.JoinTable;
import javax.persistence.JoinColumn;
@RooJavaBean
@RooToString(excludeFields = { "quotaExceeded" })
@RooEntity(finders = { "findDatacentresBySymbolEquals", "findDatacentresByNameLike" })
@MatchPrefixes(groups = Datacentre.SecondLevelConstraint.class)
@MatchSymbolPrefix(groups = Datacentre.SecondLevelConstraint.class)
@Unique(field = "symbol")
@Entity
@XmlRootElement
@GroupSequence({ Datacentre.class, Datacentre.SecondLevelConstraint.class })
@Table(name="datacentre")
public class Datacentre implements AllocatorOrDatacentre {
private static Logger log4j = Logger.getLogger(Datacentre.class);
@Id
@GeneratedValue(strategy = GenerationType.IDENTITY)
@Column(name = "id")
private Long id;
@Version
@Column(name = "version")
private Integer version;
@XmlTransient
public Long getId() {
return this.id;
}
public void setId(Long id) {
this.id = id;
}
@XmlTransient
public Integer getVersion() {
return this.version;
}
public void setVersion(Integer version) {
this.version = version;
}
@NotNull
@Symbol(Symbol.Type.DATACENTRE)
@Column(unique = true)
private String symbol;
private String password;
@XmlTransient
public String getPassword() {
return this.password;
}
public void setPassword(String password) {
this.password = password;
}
@NotNull
@Size(min = 3, max = 255)
private String name;
@NotNull
@Size(min = 2, max = 80)
@Column(name = "contact_name")
private String contactName;
@NotNull
@Email
@Column(name = "contact_email")
private String contactEmail;
@NotNull
@Column(name = "doi_quota_allowed")
private Integer doiQuotaAllowed = -1;
@NotNull
@Min(0L)
@Max(999999999L)
@Column(name = "doi_quota_used")
private Integer doiQuotaUsed = 0;
@Column(name = "is_active")
private Boolean isActive = true;
@Column(name = "role_name")
private String roleName = "ROLE_DATACENTRE";
@XmlTransient
public String getRoleName() {
return this.roleName;
}
public void setRoleName(String roleName) {
this.roleName = roleName;
}
@Size(min = 0, max = 255)
@ListOfDomains
private String domains;
@Size(max = 4000)
private String comments;
@NotNull
@ManyToOne(targetEntity = Allocator.class)
@JoinColumn(name="allocator")
private Allocator allocator;
@XmlTransient
public Allocator getAllocator() {
return this.allocator;
}
public void setAllocator(Allocator allocator) {
this.allocator = allocator;
}
@ManyToMany(cascade = CascadeType.ALL)
@OrderBy("prefix")
@JoinTable(
name = "datacentre_prefixes",
joinColumns = { @JoinColumn(name = "datacentre") },
inverseJoinColumns = { @JoinColumn(name = "prefixes") }
)
private Set<org.datacite.mds.domain.Prefix> prefixes = new java.util.HashSet<org.datacite.mds.domain.Prefix>();
@Temporal(TemporalType.TIMESTAMP)
@DateTimeFormat(iso = ISO.DATE_TIME)
private Date updated;
@Temporal(TemporalType.TIMESTAMP)
@DateTimeFormat(iso = ISO.DATE_TIME)
private Date created;
private String experiments;
public enum ForceRefresh { YES, NO };
/**
* Increase used quota counter for a datacentre.
*
* Implementation uses HQL update in order to maintain potential concurrent access (i.e. a datacentre using
* concurrently many API clients. Using HQL update makes sure database row level lock will guarantee only one
* client changes the value at the time.
*
* @param forceRefresh the consequence of using HQL update is lack of the value in the instance field.
* Use ForceRefresh.YES to reread the value from database but be aware that refresh() rereads all fields, not
* only doiQuotaUsed so if you have any other changes in the object persist them first.
*/
@Transactional
public void incQuotaUsed(ForceRefresh forceRefresh) {
String qlString = "update Datacentre a set a.doiQuotaUsed = a.doiQuotaUsed + 1 where a.symbol = :symbol";
entityManager.createQuery(qlString).setParameter("symbol", getSymbol()).executeUpdate();
if (forceRefresh == ForceRefresh.YES)
refresh();
}
/**
* Check if quota exceeded.
*
* Implementation uses HQL select in order to maintain potential concurrent access (i.e. a datacentre using
* concurrently many API clients.
*
* @return true if quota is exceeded
*/
@Transactional
public boolean isQuotaExceeded() {
if (getDoiQuotaAllowed() < 0)
return false;
String qlString = "select doiQuotaAllowed - doiQuotaUsed from Datacentre o where id = :id";
Integer diff = (Integer) entityManager().createQuery(qlString).setParameter("id", getId()).getSingleResult();
return diff <= 0;
}
@SuppressWarnings("unchecked")
public static List<Datacentre> findAllDatacentresByAllocator(Allocator allocator) {
String qlString = "select o from Datacentre o where allocator = :allocator order by symbol";
return entityManager().createQuery(qlString).setParameter("allocator", allocator).getResultList();
}
@SuppressWarnings("unchecked")
public static List<Datacentre> findDatacentreEntriesByAllocator(Allocator allocator, int firstResult, int maxResults) {
String qlString = "select o from Datacentre o where allocator = :allocator order by symbol";
return entityManager().createQuery(qlString).setParameter("allocator", allocator).setFirstResult(firstResult)
.setMaxResults(maxResults).getResultList();
}
public static long countDatacentresByAllocator(Allocator allocator) {
TypedQuery<Long> q = entityManager().createQuery("SELECT COUNT(*) FROM Datacentre WHERE allocator = :allocator", Long.class);
q.setParameter("allocator", allocator);
return q.getSingleResult();
}
@Transactional
public void persist() {
Date date = new Date();
setCreated(date);
setUpdated(date);
if (this.entityManager == null)
this.entityManager = entityManager();
this.entityManager.persist(this);
}
@Transactional
public Datacentre merge() {
setUpdated(new Date());
if (this.entityManager == null)
this.entityManager = entityManager();
Datacentre merged = this.entityManager.merge(this);
this.entityManager.flush();
return merged;
}
@Transactional
public void refresh() {
if (this.entityManager == null)
this.entityManager = entityManager();
this.entityManager.refresh(this);
}
/**
* retrieve a datacentre by symbol
* @param symbol of an datacentre
* @return datacentre with the given symbol or null if no such datacentre exists
*/
public static Datacentre findDatacentreBySymbol(String symbol) {
if (symbol == null) {
return null;
}
try {
log4j.trace("search for '" + symbol + "'");
Datacentre dc = findDatacentresBySymbolEquals(symbol).getSingleResult();
log4j.trace("found '" + symbol + "'");
return dc;
} catch (Exception e) {
log4j.trace("no datacentre found");
return null;
}
}
/**
* calculate String to be used for magic auth key
*
* @return (unhashed) base part of the magic auth string
*/
public String getBaseAuthString() {
StringBuilder str = new StringBuilder();
str.append(getId());
str.append(getSymbol());
str.append(StringUtils.defaultString(getPassword()));
return str.toString();
}
public void setDomains(String domains) {
this.domains = Utils.normalizeCsvStandard(domains);
}
public static List<Datacentre> findDatacentresByPrefix (Prefix prefix) {
List<Datacentre> list = findAllDatacentres();
Predicate containsPrefix = FilterPredicates.getAllocatorOrDatacentreContainsPrefixPredicate(prefix);
CollectionUtils.filter(list, containsPrefix);
return list;
}
public Collection<String> getExperiments() {
return Utils.csvToList(this.experiments);
}
public void setExperiments(Collection<String> experiments) {
this.experiments = Utils.collectionToCsv(experiments);
}
public void setContactEmail(String contactEmail) {
this.contactEmail = contactEmail.trim();
}
public void setName(String name) {
this.name = name.replaceAll("\r?\n", " ").trim();
}
private transient long countDatasets;
public long getCountDatasets() {
return Dataset.countDatasetsByAllocatorOrDatacentre(this)
- Dataset.countTestDatasetsByAllocatorOrDatacentre(this);
}
@Override
public String toString() {
return getSymbol() + " (id=" + getId() + ")";
}
public interface SecondLevelConstraint {};
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.