gt stringclasses 1
value | context stringlengths 2.05k 161k |
|---|---|
package dyvil.reflect;
import dyvil.annotation.internal.DyvilModifiers;
import dyvil.annotation.internal.NonNull;
import dyvil.annotation.internal.Nullable;
import java.lang.reflect.Array;
import java.lang.reflect.Field;
import java.util.ArrayList;
import java.util.List;
/**
* @deprecated since v0.47.0
*/
@Deprecated
public class FieldReflection
{
private static final @Nullable Field modifiersField;
static
{
Field modField;
try
{
modField = Field.class.getDeclaredField("modifiers");
// Makes the 'modifiers' field of the java.lang.reflect.Field class
// accessible
modField.setAccessible(true);
}
catch (ReflectiveOperationException ignored)
{
modField = null;
}
modifiersField = modField;
}
/**
* Adds the modifiers {@code mod} to the given {@link Field} {@code field} if {@code flag} is true, and removed them
* otherwise.
*
* @param field
* the field
* @param mod
* the modifiers
* @param flag
* add or remove
*
* @deprecated since v0.47.0
*/
@Deprecated
@DyvilModifiers(Modifiers.INFIX)
public static void setModifier(@NonNull Field field, int mod, boolean flag)
{
try
{
field.setAccessible(true);
int modifiers = field.getModifiers();
if (flag)
{
modifiers |= mod;
}
else
{
modifiers &= ~mod;
}
modifiersField.setInt(field, modifiers);
}
catch (ReflectiveOperationException ex)
{
ex.printStackTrace();
}
}
/**
* @deprecated since v0.47.0
*/
@Deprecated
@DyvilModifiers(Modifiers.INFIX)
public static void setAssignable(@NonNull Field field)
{
try
{
field.setAccessible(true);
modifiersField.setInt(field, field.getModifiers() & ~Modifiers.FINAL);
}
catch (Exception ignored)
{
}
}
// Fields
/**
* @deprecated since v0.47.0
*/
@Deprecated
public static @NonNull <T> T[] getStaticObjects(@NonNull Class<?> clazz, @NonNull Class<T> fieldType, boolean subtypes)
{
return getObjects(clazz, null, fieldType, subtypes);
}
/**
* @deprecated since v0.47.0
*/
@Deprecated
public static @NonNull <T> T[] getObjects(@NonNull Class<?> clazz, Object instance, @NonNull Class<T> fieldType,
boolean subtypes)
{
List<T> list = new ArrayList<>();
Field[] fields = clazz.getDeclaredFields();
for (Field field : fields)
{
try
{
Class<?> c = field.getType();
Object o = field.get(instance);
if (c == fieldType || subtypes && fieldType.isAssignableFrom(c))
{
list.add((T) o);
}
}
catch (Exception ignored)
{
}
}
return list.toArray((T[]) Array.newInstance(fieldType, 0));
}
// Fields
/**
* Returns the {@link Field} of the given {@link Class} {@code clazz} with the name {@code name}.
*
* @param clazz
* the clazz
* @param name
* the field name
*
* @return the field
* @deprecated since v0.47.0
*/
@Deprecated
public static Field getField(@NonNull Class<?> clazz, @NonNull String name)
{
Field[] fields = clazz.getDeclaredFields();
for (Field field : fields)
{
if (name.equals(field.getName()))
{
return field;
}
}
return null;
}
/**
* Returns the {@link Field} of the given {@link Class} {@code clazz} with a name contained in {@code fieldNames}.
*
* @param clazz
* the clazz
* @param fieldNames
* the possible field names
*
* @return the field
* @deprecated since v0.47.0
*/
@Deprecated
public static Field getField(@NonNull Class<?> clazz, @NonNull String... fieldNames)
{
Field[] fields = clazz.getDeclaredFields();
for (String fieldName : fieldNames)
{
for (Field field : fields)
{
if (fieldName.equals(field.getName()))
{
return field;
}
}
}
return null;
}
/**
* Returns the {@link Field} of the given {@link Class} {@code clazz} with the field ID {@code fieldID}
*
* @param clazz
* the clazz
* @param fieldID
* the field ID
*
* @return the field
* @deprecated since v0.47.0
*/
@Deprecated
public static Field getField(@NonNull Class<?> clazz, int fieldID)
{
return clazz.getDeclaredFields()[fieldID];
}
// Field getters
// Reference
/**
* @deprecated since v0.47.0
*/
@Deprecated
public static @Nullable <T, R> R getStaticValue(@NonNull Class<? super T> clazz, String... fieldNames)
{
return getValue(clazz, null, fieldNames);
}
/**
* @deprecated since v0.47.0
*/
@Deprecated
public static @Nullable <T, R> R getValue(@NonNull T instance, String... fieldNames)
{
return getValue((Class<T>) instance.getClass(), instance, fieldNames);
}
/**
* @deprecated since v0.47.0
*/
@Deprecated
public static @Nullable <T, R> R getValue(@NonNull Class<? super T> clazz, T instance, String... fieldNames)
{
Field f = getField(clazz, fieldNames);
return getValue(f, instance);
}
// Field ID
/**
* @deprecated since v0.47.0
*/
@Deprecated
public static @Nullable <T, R> R getStaticValue(@NonNull Class<? super T> clazz, int fieldID)
{
return getValue(clazz, null, fieldID);
}
/**
* @deprecated since v0.47.0
*/
@Deprecated
public static @Nullable <T, R> R getValue(@NonNull T instance, int fieldID)
{
return getValue((Class<? super T>) instance.getClass(), instance, fieldID);
}
/**
* @deprecated since v0.47.0
*/
@Deprecated
public static @Nullable <T, R> R getValue(@NonNull Class<? super T> clazz, T instance, int fieldID)
{
Field f = getField(clazz, fieldID);
return getValue(f, instance);
}
/**
* Directly gets the value of the given {@link Field} on the given {@link Object} {@code instance}.
*
* @param <R>
* the field type
* @param field
* the field to get
* @param instance
* the instance
*
* @return the value
* @deprecated since v0.47.0
*/
@Deprecated
public static <R> R getValue(@NonNull Field field, Object instance)
{
try
{
field.setAccessible(true);
return (R) field.get(instance);
}
catch (Exception ex)
{
ex.printStackTrace();
return null;
}
}
// Field setters
// Reference
/**
* @deprecated since v0.47.0
*/
@Deprecated
public static <T, V> void setStaticValue(@NonNull Class<? super T> clazz, V value, String... fieldNames)
{
setValue(clazz, null, value, fieldNames);
}
/**
* @deprecated since v0.47.0
*/
@Deprecated
public static <T, V> void setValue(@NonNull T instance, V value, String... fieldNames)
{
setValue((Class<? super T>) instance.getClass(), instance, value, fieldNames);
}
/**
* @deprecated since v0.47.0
*/
@Deprecated
public static <T, V> void setValue(@NonNull Class<? super T> clazz, T instance, V value, String... fieldNames)
{
Field f = getField(clazz, fieldNames);
setField(f, instance, value);
}
// Field ID
/**
* @deprecated since v0.47.0
*/
@Deprecated
public static <T, V> void setStaticValue(@NonNull Class<? super T> clazz, V value, int fieldID)
{
setValue(clazz, null, value, fieldID);
}
/**
* @deprecated since v0.47.0
*/
@Deprecated
public static <T, V> void setValue(@NonNull T instance, V value, int fieldID)
{
setValue((Class<? super T>) instance.getClass(), instance, value, fieldID);
}
/**
* @deprecated since v0.47.0
*/
@Deprecated
public static <T, V> void setValue(@NonNull Class<? super T> clazz, T instance, V value, int fieldID)
{
Field f = getField(clazz, fieldID);
setField(f, instance, value);
}
/**
* Directly sets the value of the given {@link Field} on the given {@link Object} {@code instance} to the given
* {@link Object} {@code value} .
*
* @param <T>
* the receiver type
* @param <V>
* the field type
* @param field
* the field to set
* @param instance
* the instance
* @param value
* the new value
* @deprecated since v0.47.0
*/
@Deprecated
public static <T, V> void setField(@NonNull Field field, T instance, V value)
{
try
{
field.setAccessible(true);
field.set(instance, value);
}
catch (Exception ex)
{
ex.printStackTrace();
}
}
}
| |
/* JAI-Ext - OpenSource Java Advanced Image Extensions Library
* http://www.geo-solutions.it/
* Copyright 2014 GeoSolutions
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package it.geosolutions.jaiext.classifier;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import it.geosolutions.jaiext.piecewise.TransformationException;
import it.geosolutions.jaiext.range.RangeFactory;
import it.geosolutions.jaiext.testclasses.TestBase;
import it.geosolutions.jaiext.testclasses.TestData;
import it.geosolutions.rendered.viewer.RenderedImageBrowser;
import java.awt.Color;
import java.awt.Transparency;
import java.awt.color.ColorSpace;
import java.awt.image.BufferedImage;
import java.awt.image.ColorModel;
import java.awt.image.ComponentColorModel;
import java.awt.image.DataBuffer;
import java.awt.image.IndexColorModel;
import java.awt.image.RenderedImage;
import java.awt.image.WritableRaster;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import javax.media.jai.JAI;
import javax.media.jai.ParameterBlockJAI;
import javax.media.jai.RasterFactory;
import javax.media.jai.RenderedOp;
import javax.xml.crypto.dsig.TransformException;
import org.junit.Test;
/**
* Test class for the RasterClassifier operation
*
* @author Simone Giannecchini, GeoSolutions
* @author Nicola Lagomarsini, GeoSolutions
*
* @source $URL$
*/
public class TestClassifier extends TestBase {
private static final int TEST_NUM = 1;
/**
* Test with a synthetic image with Double Sample Model
*
* @throws IOException
*/
@Test
public void testSyntheticDouble() throws IOException {
// /////////////////////////////////////////////////////////////////////
//
// This test uses a Double datatype raster. We are also using some
// synthetic data where there is no NoData.
//
// /////////////////////////////////////////////////////////////////////
// /////////////////////////////////////////////////////////////////////
//
// Set the pixel values. Because we use only one tile with one band,
// the
// code below is pretty similar to the code we would have if we were
// just setting the values in a matrix.
//
// /////////////////////////////////////////////////////////////////////
final BufferedImage image = getSyntheticDoubleImage();
for (int i = 0; i < TEST_NUM; i++) {
// /////////////////////////////////////////////////////////////////////
//
// Build the categories
//
// /////////////////////////////////////////////////////////////////////
final LinearColorMapElement c0 = LinearColorMapElement.create("c0", Color.BLACK,
RangeFactory.create(Double.NEGATIVE_INFINITY, false, 10, true), 0);
final LinearColorMapElement c1 = LinearColorMapElement.create("c2", Color.blue,
RangeFactory.create(10.0, false, 100.0, true), 1);
final LinearColorMapElement c3 = LinearColorMapElement.create("c3", Color.green,
RangeFactory.create(100.0, false, 300.0, true), 2);
final LinearColorMapElement c4 = LinearColorMapElement.create("c4", new Color[] {
Color.green, Color.red }, RangeFactory.create(300.0, false, 400, true),
RangeFactory.create(3, 1000));
final LinearColorMapElement c5 = LinearColorMapElement.create("c5", new Color[] {
Color.red, Color.white }, RangeFactory.create(400.0, false, 1000, true),
RangeFactory.create(1001, 2000));
final LinearColorMapElement c6 = LinearColorMapElement.create("c6", Color.red, 1001.0,
2001);
final LinearColorMapElement c7 = LinearColorMapElement.create("nodata", new Color(0, 0,
0, 0), RangeFactory.create(Double.NaN, Double.NaN), 2201);
final LinearColorMap list = new LinearColorMap("", new LinearColorMapElement[] { c0,
c1, c3, c4, c5, c6 }, new LinearColorMapElement[] { c7 });
// Operation creation
final ParameterBlockJAI pbj = new ParameterBlockJAI(
RasterClassifierOpImage.OPERATION_NAME);
pbj.addSource(image);
pbj.setParameter("Domain1D", list);
final RenderedOp finalimage = JAI.create(RasterClassifierOpImage.OPERATION_NAME, pbj);
if (INTERACTIVE)
RenderedImageBrowser.showChain(finalimage, false, false, null);
else
finalimage.getTiles();
finalimage.dispose();
}
}
/**
* Synthetic Image with Double Sample Model
*
* @return {@linkplain BufferedImage}
*/
private BufferedImage getSyntheticDoubleImage() {
final int width = 500;
final int height = 500;
// Create the raster
final WritableRaster raster = RasterFactory.createBandedRaster(DataBuffer.TYPE_DOUBLE,
width, height, 1, null);
// Define the elements
for (int y = 0; y < height; y++) {
for (int x = 0; x < width; x++) {
raster.setSample(x, y, 0, (x + y));
}
}
// Define the colormodel
final ColorModel cm = new ComponentColorModel(ColorSpace.getInstance(ColorSpace.CS_GRAY),
false, false, Transparency.OPAQUE, DataBuffer.TYPE_DOUBLE);
// Create the image
final BufferedImage image = new BufferedImage(cm, raster, false, null);
return image;
}
/**
* Test with a synthetic image with Float Sample Model
*
* @throws IOException
*/
@Test
public void testSyntheticFloat() throws IOException {
// /////////////////////////////////////////////////////////////////////
//
// This test uses a Float datatype raster. We are also using some
// synthetic data where there is no NoData.
//
// /////////////////////////////////////////////////////////////////////
// /////////////////////////////////////////////////////////////////////
//
// Set the pixel values. Because we use only one tile with one band,
// the
// code below is pretty similar to the code we would have if we were
// just setting the values in a matrix.
//
// /////////////////////////////////////////////////////////////////////
final BufferedImage image = getSyntheticFloatImage();
for (int i = 0; i < TEST_NUM; i++) {
// /////////////////////////////////////////////////////////////////////
//
// Build the categories
//
// /////////////////////////////////////////////////////////////////////
final LinearColorMapElement c0 = LinearColorMapElement.create("c0", Color.BLACK,
RangeFactory.create(Double.NEGATIVE_INFINITY, false, 10, true), 0);
final LinearColorMapElement c1 = LinearColorMapElement.create("c2", Color.blue,
RangeFactory.create(10.0f, false, 100.0f, true), 1);
final LinearColorMapElement c3 = LinearColorMapElement.create("c3", Color.green,
RangeFactory.create(100.0f, false, 300.0f, true), 2);
final LinearColorMapElement c4 = LinearColorMapElement.create("c4", new Color[] {
Color.green, Color.red }, RangeFactory.create(300.0f, false, 400.0f, true),
RangeFactory.create(3, 1000));
final LinearColorMapElement c5 = LinearColorMapElement.create("c5", new Color[] {
Color.red, Color.white }, RangeFactory.create(400.0f, false, 1000.0f, true),
RangeFactory.create(1001, 2000));
final LinearColorMapElement c6 = LinearColorMapElement.create("c6", Color.red, 1001.0f,
2001);
final LinearColorMapElement c7 = LinearColorMapElement.create("nodata", new Color(0, 0,
0, 0), RangeFactory.create(Double.NaN, Double.NaN), 2201);
final LinearColorMap list = new LinearColorMap("", new LinearColorMapElement[] { c0,
c1, c3, c4, c5, c6 }, new LinearColorMapElement[] { c7 });
final ParameterBlockJAI pbj = new ParameterBlockJAI(
RasterClassifierOpImage.OPERATION_NAME);
pbj.addSource(image);
pbj.setParameter("Domain1D", list);
final RenderedOp finalimage = JAI.create(RasterClassifierOpImage.OPERATION_NAME, pbj);
if (INTERACTIVE)
RenderedImageBrowser.showChain(finalimage, false, false, null);
else
finalimage.getTiles();
finalimage.dispose();
}
}
/**
* Building a synthetic image upon a float Sample Model.
*
* @return {@linkplain BufferedImage}
*/
private BufferedImage getSyntheticFloatImage() {
final int width = 500;
final int height = 500;
// Define the Raster
final WritableRaster raster = RasterFactory.createBandedRaster(DataBuffer.TYPE_FLOAT,
width, height, 1, null);
// Populate raster
for (int y = 0; y < height; y++) {
for (int x = 0; x < width; x++) {
raster.setSample(x, y, 0, (x + y));
}
}
// Define the colormodel
final ColorModel cm = new ComponentColorModel(ColorSpace.getInstance(ColorSpace.CS_GRAY),
false, false, Transparency.OPAQUE, DataBuffer.TYPE_FLOAT);
// Create the image
final BufferedImage image = new BufferedImage(cm, raster, false, null);
return image;
}
/**
* NoData only test-case.
*
* @throws IOException
* @throws TransformException
*/
@Test
public void testNoDataOnly() throws IOException, TransformationException {
// /////////////////////////////////////////////////////////////////////
//
// We are covering here a case that can often be verified, i.e. the case
// when only NoData values are known and thus explicitly mapped by the
// user to a defined nodata DomainElement, but not the same for the
// others.
// In such case we want CategoryLists automatically map unknown data to
// a Passthrough DomainElement, which identically maps raster data to
// category
// data.
//
// /////////////////////////////////////////////////////////////////////
for (int i = 0; i < TEST_NUM; i++) {
final LinearColorMapElement n0 = LinearColorMapElement.create("nodata", new Color(0, 0,
0, 0), RangeFactory.create(Double.NaN, Double.NaN), 9999);
final LinearColorMap list = new LinearColorMap("", new LinearColorMapElement[] { n0 });
double testNum = Math.random();
boolean exceptionThrown = false;
try {
assertEquals(list.transform(testNum), testNum, 0.0);
} catch (Exception e) {
exceptionThrown = true;
}
assertTrue(exceptionThrown);
assertEquals(list.transform(Double.NaN), 9999, 0.0);
}
}
/**
* Spearfish test-case.
*
* @throws IOException
*/
@Test
public void testSpearfish() throws IOException {
// /////////////////////////////////////////////////////////////////////
//
// This test is quite standard since the NoData category specified
// is for NoData values since the input file is a GRASS file
// where the missing values are represented by 255. The only strange thing
// that we try here is that we map two different classes to the same
// color with the same index.
//
// /////////////////////////////////////////////////////////////////////
final RenderedImage image = getSpearfhisDemo();
for (int i = 0; i < TEST_NUM; i++) {
final LinearColorMapElement c0 = LinearColorMapElement.create("c0", Color.yellow,
RangeFactory.create(0, true, 11, true), 5);
final LinearColorMapElement c1 = LinearColorMapElement.create("c2", Color.blue,
RangeFactory.create(11, false, 12, true), 1);
final LinearColorMapElement c3 = LinearColorMapElement.create("c3", Color.green,
RangeFactory.create(12, false, 14, true), 7);
final LinearColorMapElement c4 = LinearColorMapElement.create("c4", Color.blue,
RangeFactory.create(14, false, 16, true), 1);
final LinearColorMapElement c5 = LinearColorMapElement.create("c4", Color.CYAN,
RangeFactory.create(16, false, 255, false), 11);
final LinearColorMapElement c6 = LinearColorMapElement.create("nodata", new Color(0, 0,
0, 0), RangeFactory.create(255, 255), 0);
final LinearColorMap list = new LinearColorMap("", new LinearColorMapElement[] { c0,
c1, c3, c4, c5 }, new LinearColorMapElement[] { c6 });
final ParameterBlockJAI pbj = new ParameterBlockJAI(
RasterClassifierOpImage.OPERATION_NAME);
pbj.addSource(image);
pbj.setParameter("Domain1D", list);
final RenderedOp finalimage = JAI.create(RasterClassifierOpImage.OPERATION_NAME, pbj);
if (INTERACTIVE)
RenderedImageBrowser.showChain(finalimage, false, false, null);
else
finalimage.getTiles();
finalimage.dispose();
}
}
/**
* Building an image based on Spearfish data.
*
* @return {@linkplain BufferedImage}
*
* @throws IOException
* @throws FileNotFoundException
*/
private RenderedImage getSpearfhisDemo() throws IOException, FileNotFoundException {
File spearfish = TestData.file(this, "spearfish.png");
RenderedOp image = JAI.create("ImageRead", spearfish);
return image;
}
/**
* SWAN test-case. We generate an image similar to the SWAN dataset.
*
* @throws IOException
*/
@Test
public void testSWAN() throws IOException {
// /////////////////////////////////////////////////////////////////////
//
// This test is interesting since it can be used to simulate the
// case where someone specifies a ColorMap that overlaps with the native
// NoData value.
//
// /////////////////////////////////////////////////////////////////////
final RenderedImage image = getSWANData();
for (int i = 0; i < TEST_NUM; i++) {
final LinearColorMapElement c0 = LinearColorMapElement.create("c0", Color.green,
RangeFactory.create(Double.NEGATIVE_INFINITY, 0.3), 51);
final LinearColorMapElement c1 = LinearColorMapElement.create("c2", Color.yellow,
RangeFactory.create(0.3, false, 0.6, true), 1);
final LinearColorMapElement c1b = LinearColorMapElement.create("c2", Color.BLACK,
RangeFactory.create(0.3, false, 0.6, true), 1);
final LinearColorMapElement c1c = LinearColorMapElement.create("c2", Color.yellow,
RangeFactory.create(0.3, false, 0.6, true), 1);
assertFalse(c1.equals(c1b));
assertTrue(c1.equals(c1c));
final LinearColorMapElement c3 = LinearColorMapElement.create("c3", Color.red,
RangeFactory.create(0.60, false, 0.90, true), 2);
final LinearColorMapElement c4 = LinearColorMapElement.create("c4", Color.BLUE,
RangeFactory.create(0.9, false, Double.POSITIVE_INFINITY, true), 3);
final LinearColorMapElement nodata = LinearColorMapElement.create("nodata", new Color(
0, 0, 0, 0), RangeFactory.create(-9.0, -9.0), 4);
final LinearColorMap list = new LinearColorMap("testSWAN", new LinearColorMapElement[] {
c0, c1, c3, c4 }, new LinearColorMapElement[] { nodata }, new Color(0, 0, 0));
assertEquals(list.getSourceDimensions(), 1);
assertEquals(list.getTargetDimensions(), 1);
assertEquals(list.getName().toString(), "testSWAN");
assertNotNull(c0.toString());
final ParameterBlockJAI pbj = new ParameterBlockJAI(
RasterClassifierOpImage.OPERATION_NAME);
pbj.addSource(image);
pbj.setParameter("Domain1D", list);
boolean exceptionThrown = false;
try {
// //
// forcing a bad band selection ...
// //
pbj.setParameter("bandIndex", new Integer(2));
final RenderedOp d = JAI.create(RasterClassifierOpImage.OPERATION_NAME, pbj);
d.getTiles();
// we should not be here!
} catch (Exception e) {
exceptionThrown = true;
// //
// ... ok, Exception wanted!
// //
}
assertTrue(exceptionThrown);
pbj.setParameter("bandIndex", new Integer(0));
final RenderedOp finalimage = JAI.create(RasterClassifierOpImage.OPERATION_NAME, pbj);
if (INTERACTIVE)
RenderedImageBrowser.showChain(finalimage, false, false, null);
else
finalimage.getTiles();
finalimage.dispose();
}
}
/**
* SWAN test-case. We generate an image similar to the SWAN dataset.
*
* @throws IOException
*/
@Test
public void testSWANwithGap() throws IOException {
// /////////////////////////////////////////////////////////////////////
//
// This test is interesting since it can be used to simulate the
// case where someone specifies a ColorMap that overlaps with the native
// NoData value. For this SWAN data the NoData value is -9.0 and no
// NaN which falls right into the first category.
//
// We overcome this problem by simply giving higher priority to the
// NoData category over the other categories when doing a search for
// the right category given a certain value. This force us to
// first evaluate the no data category and then evaluate a possible
// provided overlapping value.
//
// This test is also interesting since we create a color map by
// providing output indexes that are not ordered and also that are not
// all contained in a closed natural interval. As you can notice by
// inspecting the different classes below there is an index, 51, which
// is way outside the range of the others.
//
// /////////////////////////////////////////////////////////////////////
final RenderedImage image = getSWANData();
for (int i = 0; i < TEST_NUM; i++) {
final LinearColorMapElement c0 = LinearColorMapElement.create("c0", Color.green,
RangeFactory.create(Double.NEGATIVE_INFINITY, 0.3), 51);
final LinearColorMapElement c1 = LinearColorMapElement.create("c2", Color.yellow,
RangeFactory.create(0.3, false, 0.6, true), 1);
final LinearColorMapElement c3 = LinearColorMapElement.create("c3", Color.red,
RangeFactory.create(0.70, false, 0.90, true), 2);
final LinearColorMapElement c4 = LinearColorMapElement.create("c4", Color.BLUE,
RangeFactory.create(0.9, false, Double.POSITIVE_INFINITY, true), 3);
final LinearColorMapElement nodata = LinearColorMapElement.create("nodata", Color.red,
RangeFactory.create(-9.0, -9.0), 4);
final LinearColorMap list = new LinearColorMap("testSWAN", new LinearColorMapElement[] {
c0, c1, c3, c4 }, new LinearColorMapElement[] { nodata }, new Color(0, 0, 0, 0));
final ParameterBlockJAI pbj = new ParameterBlockJAI(
RasterClassifierOpImage.OPERATION_NAME);
pbj.addSource(image);
pbj.setParameter("Domain1D", list);
try {
// //
// forcing a bad band selection ...
// //
pbj.setParameter("bandIndex", new Integer(2));
final RenderedOp d = JAI.create(RasterClassifierOpImage.OPERATION_NAME, pbj);
d.getTiles();
// we should not be here!
assertTrue(false);
} catch (Exception e) {
// //
// ... ok, Exception wanted!
// //
}
pbj.setParameter("bandIndex", new Integer(0));
final RenderedOp finalimage = JAI.create(RasterClassifierOpImage.OPERATION_NAME, pbj);
final IndexColorModel icm = (IndexColorModel) finalimage.getColorModel();
assertEquals(icm.getRed(4), 255);
assertEquals(icm.getRed(2), 255);
if (INTERACTIVE)
RenderedImageBrowser.showChain(finalimage, false, false, null);
else
finalimage.getTiles();
finalimage.dispose();
}
}
/**
* Building an image simulating SWAN data.
*
* @return {@linkplain BufferedImage}
*
*/
private RenderedImage getSWANData() {
final int width = 500;
final int height = 500;
// Build the raster
final WritableRaster raster = RasterFactory.createBandedRaster(DataBuffer.TYPE_DOUBLE,
width, height, 1, null);
// Populate the raster
for (int y = 0; y < height; y++) {
for (int x = 0; x < width; x++) {
if (x == y || x == -y) {
raster.setSample(x, y, 0, -9.0);
} else {
raster.setSample(x, y, 0, Math.random() * 5 - 5);
}
}
}
// Define the colormodel
final ColorModel cm = new ComponentColorModel(ColorSpace.getInstance(ColorSpace.CS_GRAY),
false, false, Transparency.OPAQUE, DataBuffer.TYPE_DOUBLE);
// Create the image
final BufferedImage image = new BufferedImage(cm, raster, false, null);
return image;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.processor;
import org.apache.camel.AggregationStrategy;
import org.apache.camel.AsyncCallback;
import org.apache.camel.CamelContext;
import org.apache.camel.CamelContextAware;
import org.apache.camel.Exchange;
import org.apache.camel.Expression;
import org.apache.camel.impl.engine.DefaultClaimCheckRepository;
import org.apache.camel.spi.ClaimCheckRepository;
import org.apache.camel.spi.IdAware;
import org.apache.camel.support.AsyncProcessorSupport;
import org.apache.camel.support.ExchangeHelper;
import org.apache.camel.support.LanguageSupport;
import org.apache.camel.support.service.ServiceHelper;
import org.apache.camel.util.ObjectHelper;
/**
* ClaimCheck EIP implementation.
* <p/>
* The current Claim Check EIP implementation in Camel is only intended for temporary memory repository. Likewise
* the repository is not shared among {@link Exchange}s, but a private instance is created per {@link Exchange}.
* This guards against concurrent and thread-safe issues. For off-memory persistent storage of data, then use
* any of the many Camel components that support persistent storage, and do not use this Claim Check EIP implementation.
*/
public class ClaimCheckProcessor extends AsyncProcessorSupport implements IdAware, CamelContextAware {
private CamelContext camelContext;
private String id;
private String operation;
private AggregationStrategy aggregationStrategy;
private String key;
private Expression keyExpression;
private String filter;
@Override
public CamelContext getCamelContext() {
return camelContext;
}
@Override
public void setCamelContext(CamelContext camelContext) {
this.camelContext = camelContext;
}
@Override
public String getId() {
return id;
}
@Override
public void setId(String id) {
this.id = id;
}
public String getOperation() {
return operation;
}
public void setOperation(String operation) {
this.operation = operation;
}
public AggregationStrategy getAggregationStrategy() {
return aggregationStrategy;
}
public void setAggregationStrategy(AggregationStrategy aggregationStrategy) {
this.aggregationStrategy = aggregationStrategy;
}
public String getKey() {
return key;
}
public void setKey(String key) {
this.key = key;
}
public String getFilter() {
return filter;
}
public void setFilter(String filter) {
this.filter = filter;
}
@Override
public boolean process(Exchange exchange, AsyncCallback callback) {
// the repository is scoped per exchange
ClaimCheckRepository repo = exchange.getProperty(Exchange.CLAIM_CHECK_REPOSITORY, ClaimCheckRepository.class);
if (repo == null) {
repo = new DefaultClaimCheckRepository();
exchange.setProperty(Exchange.CLAIM_CHECK_REPOSITORY, repo);
}
try {
String claimKey = keyExpression.evaluate(exchange, String.class);
if ("Set".equals(operation)) {
// copy exchange, and do not share the unit of work
Exchange copy = ExchangeHelper.createCorrelatedCopy(exchange, false);
boolean addedNew = repo.add(claimKey, copy);
if (addedNew) {
log.debug("Add: {} -> {}", claimKey, copy);
} else {
log.debug("Override: {} -> {}", claimKey, copy);
}
} else if ("Get".equals(operation)) {
Exchange copy = repo.get(claimKey);
log.debug("Get: {} -> {}", claimKey, exchange);
if (copy != null) {
Exchange result = aggregationStrategy.aggregate(exchange, copy);
if (result != null) {
ExchangeHelper.copyResultsPreservePattern(exchange, result);
}
}
} else if ("GetAndRemove".equals(operation)) {
Exchange copy = repo.getAndRemove(claimKey);
log.debug("GetAndRemove: {} -> {}", claimKey, exchange);
if (copy != null) {
// prepare the exchanges for aggregation
ExchangeHelper.prepareAggregation(exchange, copy);
Exchange result = aggregationStrategy.aggregate(exchange, copy);
if (result != null) {
ExchangeHelper.copyResultsPreservePattern(exchange, result);
}
}
} else if ("Push".equals(operation)) {
// copy exchange, and do not share the unit of work
Exchange copy = ExchangeHelper.createCorrelatedCopy(exchange, false);
log.debug("Push: {} -> {}", claimKey, copy);
repo.push(copy);
} else if ("Pop".equals(operation)) {
Exchange copy = repo.pop();
log.debug("Pop: {} -> {}", claimKey, exchange);
if (copy != null) {
// prepare the exchanges for aggregation
ExchangeHelper.prepareAggregation(exchange, copy);
Exchange result = aggregationStrategy.aggregate(exchange, copy);
if (result != null) {
ExchangeHelper.copyResultsPreservePattern(exchange, result);
}
}
}
} catch (Throwable e) {
exchange.setException(e);
}
callback.done(true);
return true;
}
@Override
protected void doStart() throws Exception {
ObjectHelper.notNull(operation, "operation", this);
if (aggregationStrategy == null) {
aggregationStrategy = createAggregationStrategy();
}
if (aggregationStrategy instanceof CamelContextAware) {
((CamelContextAware) aggregationStrategy).setCamelContext(camelContext);
}
if (LanguageSupport.hasSimpleFunction(key)) {
keyExpression = camelContext.resolveLanguage("simple").createExpression(key);
} else {
keyExpression = camelContext.resolveLanguage("constant").createExpression(key);
}
ServiceHelper.startService(aggregationStrategy);
}
@Override
protected void doStop() throws Exception {
ServiceHelper.stopService(aggregationStrategy);
}
@Override
public String toString() {
return "ClaimCheck[" + operation + "]";
}
protected AggregationStrategy createAggregationStrategy() {
ClaimCheckAggregationStrategy answer = new ClaimCheckAggregationStrategy();
answer.setFilter(filter);
return answer;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.datatorrent.stram.plan.logical;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.Stack;
import java.util.TreeSet;
import java.util.concurrent.Callable;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
import javax.validation.ValidationException;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.apache.hadoop.yarn.util.Clock;
import org.apache.hadoop.yarn.util.SystemClock;
import com.google.common.collect.Sets;
import com.datatorrent.api.Context;
import com.datatorrent.api.DAG;
import com.datatorrent.api.DefaultInputPort;
import com.datatorrent.api.DefaultOutputPort;
import com.datatorrent.api.Operator;
import com.datatorrent.common.util.BaseOperator;
import com.datatorrent.common.util.DefaultDelayOperator;
import com.datatorrent.stram.StramLocalCluster;
import com.datatorrent.stram.StreamingContainerManager;
import com.datatorrent.stram.StreamingContainerManager.UpdateCheckpointsContext;
import com.datatorrent.stram.api.Checkpoint;
import com.datatorrent.stram.engine.GenericTestOperator;
import com.datatorrent.stram.engine.TestGeneratorInputOperator;
import com.datatorrent.stram.plan.logical.LogicalPlan.OperatorMeta;
import com.datatorrent.stram.plan.physical.PTOperator;
import com.datatorrent.stram.plan.physical.PhysicalPlan;
import com.datatorrent.stram.support.StramTestSupport;
import com.datatorrent.stram.support.StramTestSupport.MemoryStorageAgent;
import com.datatorrent.stram.support.StramTestSupport.TestMeta;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.fail;
/**
* Unit tests for topologies with delay operator
*/
public class DelayOperatorTest
{
@Rule
public TestMeta testMeta = new TestMeta();
private static Lock sequential = new ReentrantLock();
@Before
public void setup()
{
sequential.lock();
}
@After
public void teardown()
{
sequential.unlock();
}
@Test
public void testInvalidDelayDetection()
{
LogicalPlan dag = new LogicalPlan();
GenericTestOperator opB = dag.addOperator("B", GenericTestOperator.class);
GenericTestOperator opC = dag.addOperator("C", GenericTestOperator.class);
GenericTestOperator opD = dag.addOperator("D", GenericTestOperator.class);
DefaultDelayOperator<Object> opDelay = dag.addOperator("opDelay", DefaultDelayOperator.class);
dag.addStream("BtoC", opB.outport1, opC.inport1);
dag.addStream("CtoD", opC.outport1, opD.inport1);
dag.addStream("CtoDelay", opC.outport2, opDelay.input);
dag.addStream("DelayToD", opDelay.output, opD.inport2);
List<List<String>> invalidDelays = new ArrayList<>();
dag.findInvalidDelays(dag.getMeta(opB), invalidDelays, new Stack<OperatorMeta>());
assertEquals("operator invalid delay", 1, invalidDelays.size());
try {
dag.validate();
fail("validation should fail");
} catch (ValidationException e) {
// expected
}
dag = new LogicalPlan();
opB = dag.addOperator("B", GenericTestOperator.class);
opC = dag.addOperator("C", GenericTestOperator.class);
opD = dag.addOperator("D", GenericTestOperator.class);
opDelay = dag.addOperator("opDelay", DefaultDelayOperator.class);
dag.setAttribute(opDelay, Context.OperatorContext.APPLICATION_WINDOW_COUNT, 2);
dag.addStream("BtoC", opB.outport1, opC.inport1);
dag.addStream("CtoD", opC.outport1, opD.inport1);
dag.addStream("CtoDelay", opC.outport2, opDelay.input);
dag.addStream("DelayToC", opDelay.output, opC.inport2);
invalidDelays = new ArrayList<>();
dag.findInvalidDelays(dag.getMeta(opB), invalidDelays, new Stack<OperatorMeta>());
assertEquals("operator invalid delay", 1, invalidDelays.size());
try {
dag.validate();
fail("validation should fail");
} catch (ValidationException e) {
// expected
}
dag = new LogicalPlan();
opB = dag.addOperator("B", GenericTestOperator.class);
opC = dag.addOperator("C", GenericTestOperator.class);
opD = dag.addOperator("D", GenericTestOperator.class);
opDelay = dag.addOperator("opDelay", DefaultDelayOperator.class);
dag.addStream("BtoC", opB.outport1, opC.inport1);
dag.addStream("CtoD", opC.outport1, opD.inport1);
dag.addStream("CtoDelay", opC.outport2, opDelay.input).setLocality(DAG.Locality.THREAD_LOCAL);
dag.addStream("DelayToC", opDelay.output, opC.inport2).setLocality(DAG.Locality.THREAD_LOCAL);
try {
dag.validate();
fail("validation should fail");
} catch (ValidationException e) {
// expected
}
}
@Test
public void testValidDelay()
{
LogicalPlan dag = new LogicalPlan();
TestGeneratorInputOperator opA = dag.addOperator("A", TestGeneratorInputOperator.class);
GenericTestOperator opB = dag.addOperator("B", GenericTestOperator.class);
GenericTestOperator opC = dag.addOperator("C", GenericTestOperator.class);
GenericTestOperator opD = dag.addOperator("D", GenericTestOperator.class);
DefaultDelayOperator opDelay = dag.addOperator("opDelay", DefaultDelayOperator.class);
dag.addStream("AtoB", opA.outport, opB.inport1);
dag.addStream("BtoC", opB.outport1, opC.inport1);
dag.addStream("CtoD", opC.outport1, opD.inport1);
dag.addStream("CtoDelay", opC.outport2, opDelay.input);
dag.addStream("DelayToB", opDelay.output, opB.inport2);
dag.validate();
}
public static final Long[] FIBONACCI_NUMBERS = new Long[]{
1L, 1L, 2L, 3L, 5L, 8L, 13L, 21L, 34L, 55L, 89L, 144L, 233L, 377L, 610L, 987L, 1597L, 2584L, 4181L, 6765L,
10946L, 17711L, 28657L, 46368L, 75025L, 121393L, 196418L, 317811L, 514229L, 832040L, 1346269L, 2178309L,
3524578L, 5702887L, 9227465L, 14930352L, 24157817L, 39088169L, 63245986L, 102334155L
};
public static class FibonacciOperator extends BaseOperator
{
public static List<Long> results = new ArrayList<>();
public long currentNumber = 1;
private transient long tempNum;
public transient DefaultInputPort<Object> dummyInputPort = new DefaultInputPort<Object>()
{
@Override
public void process(Object tuple)
{
}
};
public transient DefaultInputPort<Long> input = new DefaultInputPort<Long>()
{
@Override
public void process(Long tuple)
{
tempNum = tuple;
}
};
public transient DefaultOutputPort<Long> output = new DefaultOutputPort<>();
@Override
public void endWindow()
{
output.emit(currentNumber);
results.add(currentNumber);
currentNumber += tempNum;
if (currentNumber <= 0) {
// overflow
currentNumber = 1;
}
}
}
public static class FailableFibonacciOperator extends FibonacciOperator implements Operator.CheckpointListener
{
private boolean committed = false;
private int simulateFailureWindows = 0;
private boolean simulateFailureAfterCommit = false;
private int windowCount = 0;
public static volatile boolean failureSimulated = false;
@Override
public void beginWindow(long windowId)
{
if (simulateFailureWindows > 0 && ((simulateFailureAfterCommit && committed) || !simulateFailureAfterCommit) &&
!failureSimulated) {
if (windowCount++ == simulateFailureWindows) {
failureSimulated = true;
throw new RuntimeException("simulating failure");
}
}
}
@Override
public void checkpointed(long windowId)
{
}
@Override
public void committed(long windowId)
{
committed = true;
}
public void setSimulateFailureWindows(int windows, boolean afterCommit)
{
this.simulateFailureAfterCommit = afterCommit;
this.simulateFailureWindows = windows;
}
}
public static class FailableDelayOperator extends DefaultDelayOperator implements Operator.CheckpointListener
{
private boolean committed = false;
private int simulateFailureWindows = 0;
private boolean simulateFailureAfterCommit = false;
private int windowCount = 0;
private static volatile boolean failureSimulated = false;
@Override
public void beginWindow(long windowId)
{
super.beginWindow(windowId);
if (simulateFailureWindows > 0 && ((simulateFailureAfterCommit && committed) || !simulateFailureAfterCommit) &&
!failureSimulated) {
if (windowCount++ == simulateFailureWindows) {
failureSimulated = true;
throw new RuntimeException("simulating failure");
}
}
}
@Override
public void checkpointed(long windowId)
{
}
@Override
public void committed(long windowId)
{
committed = true;
}
public void setSimulateFailureWindows(int windows, boolean afterCommit)
{
this.simulateFailureAfterCommit = afterCommit;
this.simulateFailureWindows = windows;
}
}
@Test
public void testFibonacci() throws Exception
{
LogicalPlan dag = new LogicalPlan();
TestGeneratorInputOperator dummyInput = dag.addOperator("DUMMY", TestGeneratorInputOperator.class);
FibonacciOperator fib = dag.addOperator("FIB", FibonacciOperator.class);
DefaultDelayOperator opDelay = dag.addOperator("opDelay", DefaultDelayOperator.class);
dag.addStream("dummy_to_operator", dummyInput.outport, fib.dummyInputPort);
dag.addStream("operator_to_delay", fib.output, opDelay.input);
dag.addStream("delay_to_operator", opDelay.output, fib.input);
FibonacciOperator.results.clear();
final StramLocalCluster localCluster = new StramLocalCluster(dag);
localCluster.setExitCondition(new Callable<Boolean>()
{
@Override
public Boolean call() throws Exception
{
return FibonacciOperator.results.size() >= 10;
}
});
localCluster.run(10000);
Assert.assertArrayEquals(Arrays.copyOfRange(FIBONACCI_NUMBERS, 0, 10),
FibonacciOperator.results.subList(0, 10).toArray());
}
@Test
public void testFibonacciRecovery1() throws Exception
{
LogicalPlan dag = StramTestSupport.createDAG(testMeta);
TestGeneratorInputOperator dummyInput = dag.addOperator("DUMMY", TestGeneratorInputOperator.class);
FailableFibonacciOperator fib = dag.addOperator("FIB", FailableFibonacciOperator.class);
DefaultDelayOperator opDelay = dag.addOperator("opDelay", DefaultDelayOperator.class);
fib.setSimulateFailureWindows(3, true);
dag.addStream("dummy_to_operator", dummyInput.outport, fib.dummyInputPort);
dag.addStream("operator_to_delay", fib.output, opDelay.input);
dag.addStream("delay_to_operator", opDelay.output, fib.input);
dag.getAttributes().put(LogicalPlan.CHECKPOINT_WINDOW_COUNT, 2);
dag.getAttributes().put(LogicalPlan.STREAMING_WINDOW_SIZE_MILLIS, 300);
FailableFibonacciOperator.results.clear();
FailableFibonacciOperator.failureSimulated = false;
final StramLocalCluster localCluster = new StramLocalCluster(dag);
localCluster.setPerContainerBufferServer(true);
localCluster.setExitCondition(new Callable<Boolean>()
{
@Override
public Boolean call() throws Exception
{
return FailableFibonacciOperator.results.size() >= 30;
}
});
localCluster.run(60000);
Assert.assertTrue("failure should be invoked", FailableFibonacciOperator.failureSimulated);
Assert.assertArrayEquals(Arrays.copyOfRange(new TreeSet<>(Arrays.asList(FIBONACCI_NUMBERS)).toArray(), 0, 20),
Arrays.copyOfRange(new TreeSet<>(FibonacciOperator.results).toArray(), 0, 20));
}
@Test
public void testFibonacciRecovery2() throws Exception
{
LogicalPlan dag = StramTestSupport.createDAG(testMeta);
TestGeneratorInputOperator dummyInput = dag.addOperator("DUMMY", TestGeneratorInputOperator.class);
FibonacciOperator fib = dag.addOperator("FIB", FibonacciOperator.class);
FailableDelayOperator opDelay = dag.addOperator("opDelay", FailableDelayOperator.class);
opDelay.setSimulateFailureWindows(5, true);
dag.addStream("dummy_to_operator", dummyInput.outport, fib.dummyInputPort);
dag.addStream("operator_to_delay", fib.output, opDelay.input);
dag.addStream("delay_to_operator", opDelay.output, fib.input);
dag.getAttributes().put(LogicalPlan.CHECKPOINT_WINDOW_COUNT, 2);
dag.getAttributes().put(LogicalPlan.STREAMING_WINDOW_SIZE_MILLIS, 300);
FibonacciOperator.results.clear();
FailableDelayOperator.failureSimulated = false;
final StramLocalCluster localCluster = new StramLocalCluster(dag);
localCluster.setPerContainerBufferServer(true);
localCluster.setExitCondition(new Callable<Boolean>()
{
@Override
public Boolean call() throws Exception
{
return FibonacciOperator.results.size() >= 30;
}
});
localCluster.run(60000);
Assert.assertTrue("failure should be invoked", FailableDelayOperator.failureSimulated);
Assert.assertArrayEquals(Arrays.copyOfRange(new TreeSet<>(Arrays.asList(FIBONACCI_NUMBERS)).toArray(), 0, 20),
Arrays.copyOfRange(new TreeSet<>(FibonacciOperator.results).toArray(), 0, 20));
}
@Test
public void testCheckpointUpdate()
{
LogicalPlan dag = StramTestSupport.createDAG(testMeta);
TestGeneratorInputOperator opA = dag.addOperator("A", TestGeneratorInputOperator.class);
GenericTestOperator opB = dag.addOperator("B", GenericTestOperator.class);
GenericTestOperator opC = dag.addOperator("C", GenericTestOperator.class);
GenericTestOperator opD = dag.addOperator("D", GenericTestOperator.class);
DefaultDelayOperator<Object> opDelay = dag.addOperator("opDelay", new DefaultDelayOperator<>());
dag.addStream("AtoB", opA.outport, opB.inport1);
dag.addStream("BtoC", opB.outport1, opC.inport1);
dag.addStream("CtoD", opC.outport1, opD.inport1);
dag.addStream("CtoDelay", opC.outport2, opDelay.input);
dag.addStream("DelayToB", opDelay.output, opB.inport2);
dag.validate();
dag.setAttribute(com.datatorrent.api.Context.OperatorContext.STORAGE_AGENT, new MemoryStorageAgent());
StreamingContainerManager scm = new StreamingContainerManager(dag);
PhysicalPlan plan = scm.getPhysicalPlan();
// set all operators as active to enable recovery window id update
for (PTOperator oper : plan.getAllOperators().values()) {
oper.setState(PTOperator.State.ACTIVE);
}
Clock clock = new SystemClock();
PTOperator opA1 = plan.getOperators(dag.getMeta(opA)).get(0);
PTOperator opB1 = plan.getOperators(dag.getMeta(opB)).get(0);
PTOperator opC1 = plan.getOperators(dag.getMeta(opC)).get(0);
PTOperator opDelay1 = plan.getOperators(dag.getMeta(opDelay)).get(0);
PTOperator opD1 = plan.getOperators(dag.getMeta(opD)).get(0);
Checkpoint cp3 = new Checkpoint(3L, 0, 0);
Checkpoint cp5 = new Checkpoint(5L, 0, 0);
Checkpoint cp4 = new Checkpoint(4L, 0, 0);
opB1.checkpoints.add(cp3);
opC1.checkpoints.add(cp3);
opC1.checkpoints.add(cp4);
opDelay1.checkpoints.add(cp3);
opDelay1.checkpoints.add(cp5);
opD1.checkpoints.add(cp5);
// construct grouping that would be supplied through LogicalPlan
Set<OperatorMeta> stronglyConnected = Sets.newHashSet(dag.getMeta(opB), dag.getMeta(opC), dag.getMeta(opDelay));
Map<OperatorMeta, Set<OperatorMeta>> groups = new HashMap<>();
for (OperatorMeta om : stronglyConnected) {
groups.put(om, stronglyConnected);
}
UpdateCheckpointsContext ctx = new UpdateCheckpointsContext(clock, false, groups);
scm.updateRecoveryCheckpoints(opB1, ctx);
Assert.assertEquals("checkpoint " + opA1, Checkpoint.INITIAL_CHECKPOINT, opA1.getRecoveryCheckpoint());
Assert.assertEquals("checkpoint " + opB1, cp3, opC1.getRecoveryCheckpoint());
Assert.assertEquals("checkpoint " + opC1, cp3, opC1.getRecoveryCheckpoint());
Assert.assertEquals("checkpoint " + opD1, cp5, opD1.getRecoveryCheckpoint());
}
@Test
public void testValidationWithMultipleStreamLoops()
{
LogicalPlan dag = StramTestSupport.createDAG(testMeta);
TestGeneratorInputOperator source = dag.addOperator("A", TestGeneratorInputOperator.class);
GenericTestOperator op1 = dag.addOperator("Op1", GenericTestOperator.class);
GenericTestOperator op2 = dag.addOperator("Op2", GenericTestOperator.class);
DefaultDelayOperator<Object> delay = dag.addOperator("Delay", DefaultDelayOperator.class);
dag.addStream("Source", source.outport, op1.inport1);
dag.addStream("Stream1", op1.outport1, op2.inport1);
dag.addStream("Stream2", op1.outport2, op2.inport2);
dag.addStream("Op to Delay", op2.outport1, delay.input);
dag.addStream("Delay to Op", delay.output, op1.inport2);
dag.validate();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.phoenix.end2end.index;
import static org.apache.phoenix.util.TestUtil.TEST_PROPERTIES;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import org.apache.hadoop.hbase.DoNotRetryIOException;
import org.apache.hadoop.hbase.HBaseIOException;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.client.Mutation;
import org.apache.hadoop.hbase.coprocessor.ObserverContext;
import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
import org.apache.hadoop.hbase.coprocessor.SimpleRegionObserver;
import org.apache.hadoop.hbase.regionserver.MiniBatchOperationInProgress;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.phoenix.end2end.BaseOwnClusterHBaseManagedTimeIT;
import org.apache.phoenix.end2end.NeedsOwnMiniClusterTest;
import org.apache.phoenix.query.QueryConstants;
import org.apache.phoenix.query.QueryServices;
import org.apache.phoenix.schema.PIndexState;
import org.apache.phoenix.schema.PTableType;
import org.apache.phoenix.util.MetaDataUtil;
import org.apache.phoenix.util.PropertiesUtil;
import org.apache.phoenix.util.QueryUtil;
import org.apache.phoenix.util.ReadOnlyProps;
import org.apache.phoenix.util.SchemaUtil;
import org.apache.phoenix.util.StringUtil;
import org.apache.phoenix.util.TestUtil;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameters;
import com.google.common.collect.Maps;
/**
*
* Test for failure of region server to write to index table.
* For some reason dropping tables after running this test
* fails unless it runs its own mini cluster.
*
*/
@Category(NeedsOwnMiniClusterTest.class)
@RunWith(Parameterized.class)
public class MutableIndexFailureIT extends BaseOwnClusterHBaseManagedTimeIT {
public static volatile boolean FAIL_WRITE = false;
public static final String INDEX_NAME = "IDX";
private String tableName;
private String indexName;
private String fullTableName;
private String fullIndexName;
private final boolean transactional;
private final boolean localIndex;
private final String tableDDLOptions;
private final boolean isNamespaceMapped;
private String schema = "TEST";
public MutableIndexFailureIT(boolean transactional, boolean localIndex, boolean isNamespaceMapped) {
this.transactional = transactional;
this.localIndex = localIndex;
this.tableDDLOptions = transactional ? " TRANSACTIONAL=true " : "";
this.tableName = (localIndex ? "L_" : "") + TestUtil.DEFAULT_DATA_TABLE_NAME + (transactional ? "_TXN" : "")
+ (isNamespaceMapped ? "_NM" : "");
this.indexName = INDEX_NAME;
this.fullTableName = SchemaUtil.getTableName(schema, tableName);
this.fullIndexName = SchemaUtil.getTableName(schema, indexName);
this.isNamespaceMapped = isNamespaceMapped;
}
@BeforeClass
public static void doSetup() throws Exception {
Map<String, String> serverProps = Maps.newHashMapWithExpectedSize(10);
serverProps.put("hbase.coprocessor.region.classes", FailingRegionObserver.class.getName());
serverProps.put(HConstants.HBASE_CLIENT_RETRIES_NUMBER, "2");
serverProps.put(HConstants.HBASE_RPC_TIMEOUT_KEY, "10000");
serverProps.put("hbase.client.pause", "5000");
serverProps.put("data.tx.snapshot.dir", "/tmp");
serverProps.put("hbase.balancer.period", String.valueOf(Integer.MAX_VALUE));
Map<String, String> clientProps = Collections.singletonMap(QueryServices.TRANSACTIONS_ENABLED, "true");
NUM_SLAVES_BASE = 4;
setUpTestDriver(new ReadOnlyProps(serverProps.entrySet().iterator()), new ReadOnlyProps(clientProps.entrySet().iterator()));
}
@Parameters(name = "transactional = {0}, localIndex = {1}, isNamespaceMapped = {2}")
public static Collection<Boolean[]> data() {
return Arrays.asList(new Boolean[][] { { false, false, true }, { false, false, false }, { false, true, true },
{ false, true, false }, { true, false, true }, { true, true, true }, { true, false, false },
{ true, true, false } });
}
@Test
public void testWriteFailureDisablesIndex() throws Exception {
helpTestWriteFailureDisablesIndex();
}
public void helpTestWriteFailureDisablesIndex() throws Exception {
Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
props.put(QueryServices.IS_NAMESPACE_MAPPING_ENABLED, String.valueOf(isNamespaceMapped));
try (Connection conn = driver.connect(url, props)) {
String query;
ResultSet rs;
conn.setAutoCommit(false);
if (isNamespaceMapped) {
conn.createStatement().execute("CREATE SCHEMA IF NOT EXISTS " + schema);
}
conn.createStatement().execute("CREATE TABLE " + fullTableName
+ " (k VARCHAR NOT NULL PRIMARY KEY, v1 VARCHAR, v2 VARCHAR) " + tableDDLOptions);
query = "SELECT * FROM " + fullTableName;
rs = conn.createStatement().executeQuery(query);
assertFalse(rs.next());
FAIL_WRITE = false;
conn.createStatement().execute(
"CREATE " + (localIndex ? "LOCAL " : "") + "INDEX " + indexName + " ON " + fullTableName + " (v1) INCLUDE (v2)");
query = "SELECT * FROM " + fullIndexName;
rs = conn.createStatement().executeQuery(query);
assertFalse(rs.next());
// Verify the metadata for index is correct.
rs = conn.getMetaData().getTables(null, StringUtil.escapeLike(schema), indexName,
new String[] { PTableType.INDEX.toString() });
assertTrue(rs.next());
assertEquals(indexName, rs.getString(3));
assertEquals(PIndexState.ACTIVE.toString(), rs.getString("INDEX_STATE"));
assertFalse(rs.next());
PreparedStatement stmt = conn.prepareStatement("UPSERT INTO " + fullTableName + " VALUES(?,?,?)");
stmt.setString(1, "a");
stmt.setString(2, "x");
stmt.setString(3, "1");
stmt.execute();
stmt.setString(1, "b");
stmt.setString(2, "y");
stmt.setString(3, "2");
stmt.execute();
stmt.setString(1, "c");
stmt.setString(2, "z");
stmt.setString(3, "3");
stmt.execute();
conn.commit();
query = "SELECT /*+ NO_INDEX */ k,v1 FROM " + fullTableName;
rs = conn.createStatement().executeQuery("EXPLAIN " + query);
String expectedPlan = "CLIENT PARALLEL 1-WAY FULL SCAN OVER "
+ SchemaUtil.getPhysicalTableName(fullTableName.getBytes(), isNamespaceMapped);
assertEquals(expectedPlan, QueryUtil.getExplainPlan(rs));
rs = conn.createStatement().executeQuery(query);
assertTrue(rs.next());
assertEquals("a", rs.getString(1));
assertEquals("x", rs.getString(2));
assertTrue(rs.next());
assertEquals("b", rs.getString(1));
assertEquals("y", rs.getString(2));
assertTrue(rs.next());
assertEquals("c", rs.getString(1));
assertEquals("z", rs.getString(2));
assertFalse(rs.next());
FAIL_WRITE = true;
stmt = conn.prepareStatement("UPSERT INTO " + fullTableName + " VALUES(?,?,?)");
// Insert new row
stmt.setString(1, "d");
stmt.setString(2, "d");
stmt.setString(3, "4");
stmt.execute();
// Update existing row
stmt.setString(1, "a");
stmt.setString(2, "x2");
stmt.setString(3, "2");
stmt.execute();
// Delete existing row
stmt = conn.prepareStatement("DELETE FROM " + fullTableName + " WHERE k=?");
stmt.setString(1, "b");
stmt.execute();
try {
conn.commit();
fail();
} catch (SQLException e) {
System.out.println();
} catch(Exception e) {
System.out.println();
}
// Verify the metadata for index is correct.
rs = conn.getMetaData().getTables(null, StringUtil.escapeLike(schema), indexName,
new String[] { PTableType.INDEX.toString() });
assertTrue(rs.next());
assertEquals(indexName, rs.getString(3));
// the index is only disabled for non-txn tables upon index table write failure
if (transactional) {
assertEquals(PIndexState.ACTIVE.toString(), rs.getString("INDEX_STATE"));
} else {
String indexState = rs.getString("INDEX_STATE");
assertTrue(PIndexState.DISABLE.toString().equals(indexState) || PIndexState.INACTIVE.toString().equals(indexState));
}
assertFalse(rs.next());
// If the table is transactional the write to both the data and index table will fail
// in an all or none manner. If the table is not transactional, then the data writes
// would have succeeded while the index writes would have failed.
if (!transactional) {
// Verify UPSERT on data table still work after index is disabled
stmt = conn.prepareStatement("UPSERT INTO " + fullTableName + " VALUES(?,?,?)");
stmt.setString(1, "a3");
stmt.setString(2, "x3");
stmt.setString(3, "3");
stmt.execute();
conn.commit();
// Verify previous writes succeeded to data table
query = "SELECT /*+ NO_INDEX */ k,v1 FROM " + fullTableName;
rs = conn.createStatement().executeQuery("EXPLAIN " + query);
expectedPlan = "CLIENT PARALLEL 1-WAY FULL SCAN OVER "
+ SchemaUtil.getPhysicalTableName(fullTableName.getBytes(), isNamespaceMapped);
assertEquals(expectedPlan, QueryUtil.getExplainPlan(rs));
rs = conn.createStatement().executeQuery(query);
assertTrue(rs.next());
assertEquals("a", rs.getString(1));
assertEquals("x2", rs.getString(2));
assertTrue(rs.next());
assertEquals("a3", rs.getString(1));
assertEquals("x3", rs.getString(2));
assertTrue(rs.next());
assertEquals("c", rs.getString(1));
assertEquals("z", rs.getString(2));
assertTrue(rs.next());
assertEquals("d", rs.getString(1));
assertEquals("d", rs.getString(2));
assertFalse(rs.next());
}
// re-enable index table
FAIL_WRITE = false;
boolean isActive = false;
if (!transactional) {
int maxTries = 3, nTries = 0;
do {
Thread.sleep(15 * 1000); // sleep 15 secs
rs = conn.getMetaData().getTables(null, StringUtil.escapeLike(schema), indexName,
new String[] { PTableType.INDEX.toString() });
assertTrue(rs.next());
if(PIndexState.ACTIVE.toString().equals(rs.getString("INDEX_STATE"))){
isActive = true;
break;
}
} while(++nTries < maxTries);
assertTrue(isActive);
}
// Verify UPSERT on data table still work after index table is recreated
stmt = conn.prepareStatement("UPSERT INTO " + fullTableName + " VALUES(?,?,?)");
stmt.setString(1, "a3");
stmt.setString(2, "x4");
stmt.setString(3, "4");
stmt.execute();
conn.commit();
// verify index table has correct data
query = "SELECT /*+ INDEX(" + indexName + ") */ k,v1 FROM " + fullTableName;
rs = conn.createStatement().executeQuery("EXPLAIN " + query);
expectedPlan = " OVER "
+ (localIndex
? Bytes.toString(SchemaUtil
.getPhysicalTableName(fullTableName.getBytes(), isNamespaceMapped).getName())
: SchemaUtil.getPhysicalTableName(fullIndexName.getBytes(), isNamespaceMapped).getNameAsString());
String explainPlan = QueryUtil.getExplainPlan(rs);
assertTrue(explainPlan.contains(expectedPlan));
rs = conn.createStatement().executeQuery(query);
if (transactional) { // failed commit does not get retried
assertTrue(rs.next());
assertEquals("a", rs.getString(1));
assertEquals("x", rs.getString(2));
assertTrue(rs.next());
assertEquals("a3", rs.getString(1));
assertEquals("x4", rs.getString(2));
assertTrue(rs.next());
assertEquals("b", rs.getString(1));
assertEquals("y", rs.getString(2));
assertTrue(rs.next());
assertEquals("c", rs.getString(1));
assertEquals("z", rs.getString(2));
assertFalse(rs.next());
} else { // failed commit eventually succeeds
assertTrue(rs.next());
assertEquals("d", rs.getString(1));
assertEquals("d", rs.getString(2));
assertTrue(rs.next());
assertEquals("a", rs.getString(1));
assertEquals("x2", rs.getString(2));
assertTrue(rs.next());
assertEquals("a3", rs.getString(1));
assertEquals("x4", rs.getString(2));
assertTrue(rs.next());
assertEquals("c", rs.getString(1));
assertEquals("z", rs.getString(2));
assertFalse(rs.next());
}
}
}
public static class FailingRegionObserver extends SimpleRegionObserver {
@Override
public void preBatchMutate(ObserverContext<RegionCoprocessorEnvironment> c, MiniBatchOperationInProgress<Mutation> miniBatchOp) throws HBaseIOException {
if (c.getEnvironment().getRegionInfo().getTable().getNameAsString().contains(INDEX_NAME) && FAIL_WRITE) {
throw new DoNotRetryIOException();
}
Mutation operation = miniBatchOp.getOperation(0);
Set<byte[]> keySet = operation.getFamilyMap().keySet();
for(byte[] family: keySet) {
if(Bytes.toString(family).startsWith(QueryConstants.LOCAL_INDEX_COLUMN_FAMILY_PREFIX) && FAIL_WRITE) {
throw new DoNotRetryIOException();
}
}
}
}
}
| |
/* Copyright (c) 2013 OpenPlans. All rights reserved.
* This code is licensed under the BSD New License, available at the root
* application directory.
*/
package org.geogit.api.plumbing.diff;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import java.util.List;
import java.util.Random;
import org.junit.Test;
import com.google.common.base.Optional;
import com.google.common.collect.Lists;
import com.vividsolutions.jts.geom.Coordinate;
import com.vividsolutions.jts.geom.Geometry;
import com.vividsolutions.jts.io.WKTReader;
public class GeometryDiffTest {
@Test
public void testModifiedMultiPolygon() throws Exception {
int NUM_COORDS = 10;
Random rand = new Random();
List<Coordinate> list = Lists.newArrayList();
for (int i = 0; i < NUM_COORDS; i++) {
list.add(new Coordinate(rand.nextInt(), rand.nextInt()));
}
Geometry oldGeom = new WKTReader()
.read("MULTIPOLYGON (((40 40, 20 45, 45 30, 40 40)),((20 35, 45 10, 30 5, 10 30, 20 35),(30 20, 20 25, 20 15, 30 20)))");
Geometry newGeom = new WKTReader()
.read("MULTIPOLYGON (((40 40, 20 45, 45 30, 40 40)),((20 35, 45 20, 30 5, 10 10, 10 30, 20 35)))");
LCSGeometryDiffImpl diff = new LCSGeometryDiffImpl(Optional.of(oldGeom),
Optional.of(newGeom));
LCSGeometryDiffImpl deserializedDiff = new LCSGeometryDiffImpl(diff.asText());
assertEquals(diff, deserializedDiff);
assertEquals("4 point(s) deleted, 1 new point(s) added, 1 point(s) moved", diff.toString());
Optional<Geometry> resultingGeom = diff.applyOn(Optional.of(oldGeom));
assertEquals(newGeom, resultingGeom.get());
}
@Test
public void testModifiedMultiLineString() throws Exception {
Geometry oldGeom = new WKTReader()
.read("MULTILINESTRING ((40 40, 20 45, 45 30, 40 40),(20 35, 45 10, 30 5, 10 30, 20 35))");
Geometry newGeom = new WKTReader()
.read("MULTILINESTRING ((40 40, 20 35, 45 30, 40 40),(20 35, 45 20, 30 15, 10 10, 10 30, 20 35),(10 10, 20 20, 35 30))");
LCSGeometryDiffImpl diff = new LCSGeometryDiffImpl(Optional.of(oldGeom),
Optional.of(newGeom));
LCSGeometryDiffImpl deserializedDiff = new LCSGeometryDiffImpl(diff.asText());
assertEquals(diff, deserializedDiff);
assertEquals("0 point(s) deleted, 4 new point(s) added, 3 point(s) moved", diff.toString());
Optional<Geometry> resultingGeom = diff.applyOn(Optional.of(oldGeom));
assertEquals(newGeom, resultingGeom.get());
}
@Test
public void testNoOldGeometry() throws Exception {
Geometry newGeom = new WKTReader()
.read("MULTILINESTRING ((40 40, 20 35, 45 30, 40 40),(20 35, 45 20, 30 15, 10 10, 10 30, 20 35),(10 10, 20 20, 35 30))");
LCSGeometryDiffImpl diff = new LCSGeometryDiffImpl(Optional.fromNullable((Geometry) null),
Optional.of(newGeom));
LCSGeometryDiffImpl deserializedDiff = new LCSGeometryDiffImpl(diff.asText());
assertEquals(diff, deserializedDiff);
assertEquals("0 point(s) deleted, 13 new point(s) added, 0 point(s) moved", diff.toString());
}
@Test
public void testNoNewGeometry() throws Exception {
Geometry oldGeom = new WKTReader()
.read("MULTILINESTRING ((40 40, 20 45, 45 30, 40 40),(20 35, 45 10, 30 5, 10 30, 20 35))");
LCSGeometryDiffImpl diff = new LCSGeometryDiffImpl(Optional.of(oldGeom),
Optional.fromNullable((Geometry) null));
LCSGeometryDiffImpl deserializedDiff = new LCSGeometryDiffImpl(diff.asText());
assertEquals(diff, deserializedDiff);
assertEquals("9 point(s) deleted, 0 new point(s) added, 0 point(s) moved", diff.toString());
Optional<Geometry> resultingGeom = diff.applyOn(Optional.of(oldGeom));
assertFalse(resultingGeom.isPresent());
}
@Test
public void testDoubleReverseEquality() throws Exception {
Geometry oldGeom = new WKTReader()
.read("MULTILINESTRING ((40 40, 20 45, 45 30, 40 40),(20 35, 45 10, 30 5, 10 30, 20 35))");
Geometry newGeom = new WKTReader()
.read("MULTILINESTRING ((40 40, 20 35, 45 30, 40 40),(20 35, 45 20, 30 15, 10 10, 10 30, 20 35),(10 10, 20 20, 35 30))");
LCSGeometryDiffImpl diff = new LCSGeometryDiffImpl(Optional.of(oldGeom),
Optional.of(newGeom));
LCSGeometryDiffImpl diff2 = diff.reversed().reversed();
assertEquals(diff, diff2);
}
@Test
public void testCanApply() throws Exception {
Geometry oldGeom = new WKTReader()
.read("MULTILINESTRING ((40 40, 20 45, 45 30, 40 40),(20 35, 45 10, 30 5, 10 30, 20 35))");
Geometry newGeom = new WKTReader()
.read("MULTILINESTRING ((40 40, 20 35, 45 30, 40 40),(20 35, 45 20, 30 15, 10 10, 10 30, 20 35),(10 10, 20 20, 35 30))");
LCSGeometryDiffImpl diff = new LCSGeometryDiffImpl(Optional.of(oldGeom),
Optional.of(newGeom));
Geometry oldGeomModified = new WKTReader()
.read("MULTILINESTRING ((40 40, 20 45, 45 30, 40 41),(20 35, 45 10, 30 5, 10 30, 20 35))");
assertTrue(diff.canBeAppliedOn(Optional.of(oldGeomModified)));
Geometry oldGeomModified2 = new WKTReader().read("MULTILINESTRING ((40 40, 10 10))");
assertFalse(diff.canBeAppliedOn(Optional.of(oldGeomModified2)));
}
@Test
public void testConflict() throws Exception {
Geometry oldGeom = new WKTReader()
.read("MULTILINESTRING ((40 40, 20 45, 45 30, 40 40),(20 35, 45 10, 30 5, 10 30, 20 35))");
Geometry newGeom = new WKTReader()
.read("MULTILINESTRING ((40 40, 20 45),(20 35, 45 10, 20 35))");
GeometryAttributeDiff diff = new GeometryAttributeDiff(Optional.of(oldGeom),
Optional.of(newGeom));
Geometry newGeom2 = new WKTReader()
.read("MULTILINESTRING ((40 40, 20 45, 41 33, 25 25),(20 35, 45 10, 30 5, 10 30, 20 35))");
GeometryAttributeDiff diff2 = new GeometryAttributeDiff(Optional.of(oldGeom),
Optional.of(newGeom2));
assertTrue(diff.conflicts(diff2));
}
@Test
public void testConflictEditedSamePoint() throws Exception {
Geometry oldGeom = new WKTReader()
.read("MULTILINESTRING ((40 40, 20 45, 45 30, 40 40),(20 35, 45 10, 30 5, 10 30, 20 35))");
Geometry newGeom = new WKTReader()
.read("MULTILINESTRING ((40 40, 20 45, 48 32, 40 40),(20 35, 45 10, 30 5, 10 30, 20 35))");
GeometryAttributeDiff diff = new GeometryAttributeDiff(Optional.of(oldGeom),
Optional.of(newGeom));
Geometry newGeom2 = new WKTReader()
.read("MULTILINESTRING ((40 40, 20 45, 41 33, 40 40),(20 35, 45 10, 30 5, 10 30, 20 35))");
GeometryAttributeDiff diff2 = new GeometryAttributeDiff(Optional.of(oldGeom),
Optional.of(newGeom2));
assertTrue(diff.conflicts(diff2));
}
@Test
public void testNoConflict() throws Exception {
Geometry oldGeom = new WKTReader()
.read("MULTILINESTRING ((40 40, 20 45, 45 30, 40 40),(20 35, 45 10, 30 5, 10 30, 20 35))");
Geometry newGeom = new WKTReader()
.read("MULTILINESTRING ((40 40, 20 45, 45 35, 30 30),(20 35, 45 10, 30 5, 10 30, 20 35))");
GeometryAttributeDiff diff = new GeometryAttributeDiff(Optional.of(oldGeom),
Optional.of(newGeom));
Geometry newGeom2 = new WKTReader()
.read("MULTILINESTRING ((40 40, 20 45, 45 30, 40 40),(20 35, 45 10, 31 6, 10 30, 20 35))");
GeometryAttributeDiff diff2 = new GeometryAttributeDiff(Optional.of(oldGeom),
Optional.of(newGeom2));
assertFalse(diff.conflicts(diff2));
Optional<?> merged = diff2.applyOn(Optional.of(newGeom));
assertTrue(merged.isPresent());
Geometry mergedGeom = (Geometry) merged.get();
assertEquals(
"MULTILINESTRING ((40 40, 20 45, 45 35, 30 30), (20 35, 45 10, 31 6, 10 30, 20 35))",
mergedGeom.toText());
}
@Test
public void testNoConflictAddingPoints() throws Exception {
Geometry oldGeom = new WKTReader()
.read("MULTILINESTRING ((40 40, 20 45, 45 30, 40 40),(20 35, 45 10, 30 5, 10 30, 20 35))");
Geometry newGeom = new WKTReader()
.read("MULTILINESTRING ((40 40, 10 10, 20 45, 45 30, 30 30),(20 35, 45 10, 30 5, 10 30, 20 35))");
GeometryAttributeDiff diff = new GeometryAttributeDiff(Optional.of(oldGeom),
Optional.of(newGeom));
Geometry newGeom2 = new WKTReader()
.read("MULTILINESTRING ((40 40, 20 45, 45 30, 40 40),(20 35, 45 10, 31 6, 10 30, 20 35))");
GeometryAttributeDiff diff2 = new GeometryAttributeDiff(Optional.of(oldGeom),
Optional.of(newGeom2));
assertFalse(diff.conflicts(diff2));
Optional<?> merged = diff2.applyOn(Optional.of(newGeom));
assertTrue(merged.isPresent());
Geometry mergedGeom = (Geometry) merged.get();
assertEquals(
"MULTILINESTRING ((40 40, 10 10, 20 45, 45 30, 30 30), (20 35, 45 10, 31 6, 10 30, 20 35))",
mergedGeom.toText());
}
@Test
public void testNoConflictRemovingPoints() throws Exception {
Geometry oldGeom = new WKTReader()
.read("MULTILINESTRING ((40 40, 20 45, 45 30, 40 40),(20 35, 45 10, 30 5, 10 30, 20 35))");
Geometry newGeom = new WKTReader()
.read("MULTILINESTRING ((40 40, 45 30, 40 40),(20 35, 45 10, 30 5, 10 30, 20 35))");
GeometryAttributeDiff diff = new GeometryAttributeDiff(Optional.of(oldGeom),
Optional.of(newGeom));
Geometry newGeom2 = new WKTReader()
.read("MULTILINESTRING ((40 40, 20 45, 45 30, 40 40),(20 35, 45 10, 31 6, 10 30, 20 35))");
GeometryAttributeDiff diff2 = new GeometryAttributeDiff(Optional.of(oldGeom),
Optional.of(newGeom2));
assertFalse(diff.conflicts(diff2));
Optional<?> merged = diff2.applyOn(Optional.of(newGeom));
assertTrue(merged.isPresent());
Geometry mergedGeom = (Geometry) merged.get();
assertEquals("MULTILINESTRING ((40 40, 45 30, 40 40), (20 35, 45 10, 31 6, 10 30, 20 35))",
mergedGeom.toText());
}
@Test
public void testNoConflictIfSameDiff() throws Exception {
Geometry oldGeom = new WKTReader()
.read("MULTILINESTRING ((40 40, 20 45, 45 30, 40 40),(20 35, 45 10, 30 5, 10 30, 20 35))");
Geometry newGeom = new WKTReader()
.read("MULTILINESTRING ((40 40, 20 45, 45 35, 30 30),(20 35, 45 10, 30 5, 10 30, 20 35))");
GeometryAttributeDiff diff = new GeometryAttributeDiff(Optional.of(oldGeom),
Optional.of(newGeom));
GeometryAttributeDiff diff2 = new GeometryAttributeDiff(Optional.of(oldGeom),
Optional.of(newGeom));
assertFalse(diff.conflicts(diff2));
}
}
| |
/*
Copyright (c) 2014-2015 F-Secure
See LICENSE for details
*/
package cc.softwarefactory.lokki.android.activities;
import android.app.Activity;
import android.content.ComponentName;
import android.content.ServiceConnection;
import android.os.IBinder;
import android.support.v4.view.MenuItemCompat;
import android.support.v7.app.AlertDialog;
import android.content.ActivityNotFoundException;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.DialogInterface;
import android.content.Intent;
import android.content.IntentFilter;
import android.os.Bundle;
import android.support.v4.app.FragmentManager;
import android.support.v4.content.LocalBroadcastManager;
import android.support.v4.widget.DrawerLayout;
import android.support.v7.app.ActionBar;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.widget.Toolbar;
import android.util.Log;
import android.view.KeyEvent;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.view.WindowManager;
import android.widget.CheckBox;
import android.widget.ImageView;
import android.widget.PopupMenu;
import android.support.v7.widget.SearchView;
import android.widget.Toast;
import com.androidquery.AQuery;
import org.json.JSONException;
import org.json.JSONObject;
import cc.softwarefactory.lokki.android.MainApplication;
import cc.softwarefactory.lokki.android.R;
import cc.softwarefactory.lokki.android.ResultListener;
import cc.softwarefactory.lokki.android.datasources.contacts.ContactDataSource;
import cc.softwarefactory.lokki.android.datasources.contacts.DefaultContactDataSource;
import cc.softwarefactory.lokki.android.fragments.AboutFragment;
import cc.softwarefactory.lokki.android.fragments.AddContactsFragment;
import cc.softwarefactory.lokki.android.fragments.ContactsFragment;
import cc.softwarefactory.lokki.android.fragments.MapViewFragment;
import cc.softwarefactory.lokki.android.fragments.NavigationDrawerFragment;
import cc.softwarefactory.lokki.android.fragments.PlacesFragment;
import cc.softwarefactory.lokki.android.fragments.PreferencesFragment;
import cc.softwarefactory.lokki.android.services.DataService;
import cc.softwarefactory.lokki.android.services.LocationService;
import cc.softwarefactory.lokki.android.utilities.AnalyticsUtils;
import cc.softwarefactory.lokki.android.utilities.PreferenceUtils;
import cc.softwarefactory.lokki.android.utilities.ServerApi;
import cc.softwarefactory.lokki.android.utilities.Utils;
import cc.softwarefactory.lokki.android.utilities.gcm.GcmHelper;
public class MainActivity extends AppCompatActivity implements NavigationDrawerFragment.NavigationDrawerCallbacks {
private static final String TAG = "MainActivity";
private static final int REQUEST_CODE_EMAIL = 1001;
private static final int REQUEST_TERMS = 1002;
public static final String TAG_MAP_FRAGMENT = "mapFragment";
public static final String TAG_PLACES_FRAGMENT = "placesFragment";
public static final String TAG_CONTACTS_FRAGMENT = "contactsFragment";
public static final String TAG_ADD_CONTACTS_FRAGMENT = "addContactsFragment";
public static final String TAG_PREFERENCES_FRAGMENT = "preferencesFragment";
public static final String TAG_ABOUT_FRAGMENT = "aboutFragment";
private NavigationDrawerFragment mNavigationDrawerFragment;
private CharSequence mTitle;
private int selectedOption = 0;
private ContactDataSource mContactDataSource;
@Override
protected void onCreate(Bundle savedInstanceState) {
Log.d(TAG, "onCreate");
mContactDataSource = new DefaultContactDataSource();
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
mTitle = getTitle();
// Create the navigation drawer
mNavigationDrawerFragment = (NavigationDrawerFragment) getSupportFragmentManager().findFragmentById(R.id.navigation_drawer);
mNavigationDrawerFragment.setUp(R.id.navigation_drawer, (DrawerLayout) findViewById(R.id.drawer_layout));
// Set up the callback for the user menu button
AQuery aq = new AQuery(findViewById(R.id.drawer_layout));
aq.id(R.id.user_popout_menu_button).clicked(new View.OnClickListener() {
@Override
public void onClick(View v) {
Log.d(TAG, "Clicked user menu button");
showUserPopupMenu(v);
}
});
Toolbar toolbar = (Toolbar) findViewById(R.id.toolbar_layout);
setSupportActionBar(toolbar);
ActionBar actionBar = getSupportActionBar();
if (actionBar != null) {
actionBar.setTitle(mTitle);
actionBar.setDisplayHomeAsUpEnabled(true);
}
}
/**
* Displays the popout user menu containing the Sign Out button
* @param v The UI element that was clicked to show the menu
*/
public void showUserPopupMenu(View v){
PopupMenu menu = new PopupMenu(this, v);
menu.setOnMenuItemClickListener(new PopupMenu.OnMenuItemClickListener(){
@Override
public boolean onMenuItemClick(MenuItem item){
switch (item.getItemId()){
// User clicked the Sign Out option
case R.id.signout :
// Close the drawer so it isn't open when you log back in
mNavigationDrawerFragment.toggleDrawer();
// Sign the user out
logout();
return true;
default:
return false;
}
}
});
menu.inflate(R.menu.user_menu);
menu.show();
}
@Override
protected void onStart() {
super.onStart();
Log.d(TAG, "onStart");
if (firstTimeLaunch()) {
Log.i(TAG, "onStart - firstTimeLaunch, so showing terms.");
startActivityForResult(new Intent(this, FirstTimeActivity.class), REQUEST_TERMS);
} else {
signUserIn();
}
}
/**
* Is this the first time the app has been launched?
* @return true, if the app hasn't been launched before
*/
private boolean firstTimeLaunch() {
return !PreferenceUtils.getBoolean(this, PreferenceUtils.KEY_NOT_FIRST_TIME_LAUNCH);
}
/**
* Is the user currently logged in?
* NOTE: this doesn't guarantee that all user information has already been fetched from the server,
* but it guarantees that the information can be safely fetched.
* @return true, if the user has signed in
*/
public boolean loggedIn() {
String userAccount = PreferenceUtils.getString(this, PreferenceUtils.KEY_USER_ACCOUNT);
String userId = PreferenceUtils.getString(this, PreferenceUtils.KEY_USER_ID);
String authorizationToken = PreferenceUtils.getString(this, PreferenceUtils.KEY_AUTH_TOKEN);
Log.i(TAG, "User email: " + userAccount);
Log.i(TAG, "User id: " + userId);
Log.i(TAG, "authorizationToken: " + authorizationToken);
return !(userId.isEmpty() || userAccount.isEmpty() || authorizationToken.isEmpty());
}
@Override
protected void onResume() {
super.onResume();
Log.d(TAG, "onResume");
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON); // WAKE_LOCK
if (!loggedIn()) {
Log.i(TAG, "onResume - user NOT logged in, so avoiding launching services.");
return;
}
Log.i(TAG, "onResume - user logged in, so launching services.");
startServices();
LocalBroadcastManager.getInstance(this).registerReceiver(exitMessageReceiver, new IntentFilter("EXIT"));
LocalBroadcastManager.getInstance(this).registerReceiver(switchToMapReceiver, new IntentFilter("GO-TO-MAP"));
Log.i(TAG, "onResume - check if dashboard is null");
if (MainApplication.dashboard == null) {
Log.w(TAG, "onResume - dashboard was null, get dashboard & contacts from server");
ServerApi.getDashboard(getApplicationContext());
ServerApi.getContacts(getApplicationContext());
}
}
//-------------Location service interface-------------
/**
* Reference to currently bound location service instance
*/
private LocationService mBoundLocationService;
/**
* Currently selected location update accuracy level
* Will be sent to the service when setLocationServiceAccuracyLevel is called
*/
private LocationService.LocationAccuracy currentAccuracy = LocationService.LocationAccuracy.BGINACCURATE;
/**
* Connection object in charge of fetching location service instances
*/
private ServiceConnection mLocationServiceConnection = new ServiceConnection() {
@Override
public void onServiceConnected(ComponentName name, IBinder service) {
mBoundLocationService = ((LocationService.LocationBinder)service).getService();
//Set accuracy level as soon as we're connected
setLocationServiceAccuracyLevel();
}
@Override
public void onServiceDisconnected(ComponentName name) {
mBoundLocationService = null;
}
};
/**
* Sends currently selected accuracy level (in currentAccuracy) to the location service if it's initialized.
* Called automatically when the service is first initialized.
*/
private void setLocationServiceAccuracyLevel(){
if (mBoundLocationService == null){
Log.i(TAG, "location service not yet bound, not changing accuracy");
}
mBoundLocationService.setLocationCheckAccuracy(currentAccuracy);
}
/**
* Creates a connection to the location service.
* Calls mLocationServiceConnection.onServiceConnected when done.
*/
private void bindLocationService(){
bindService(new Intent(this, LocationService.class), mLocationServiceConnection, Context.BIND_AUTO_CREATE);
}
/**
* Removes connection to location service.
* Calls mLocationServiceConnection.onServiceDisconnected when done.
*/
private void unbindLocationService(){
if (mBoundLocationService != null){
unbindService(mLocationServiceConnection);
}
}
/**
* Sets an appropriate location update accuracy for background updates.
* Call setLocationServiceAccuracyLevel() afterwards to send it to the service.
*/
private void setBackgroundLocationAccuracy(){
if (MainApplication.buzzPlaces.length() > 0){
currentAccuracy = LocationService.LocationAccuracy.BGACCURATE;
}
else {
currentAccuracy = LocationService.LocationAccuracy.BGINACCURATE;
}
}
//-------------Location service interface ends-------------
/**
* Launches background services if they aren't already running
*/
private void startServices() {
//Start location service
LocationService.start(this.getApplicationContext());
//Set appropriate location update accuracy
if (MainApplication.visible){
currentAccuracy = LocationService.LocationAccuracy.ACCURATE;
}
else {
setBackgroundLocationAccuracy();
}
//Create a connection to the location service if it doesn't already exist, else set new location check accuracy
if (mBoundLocationService == null){
bindLocationService();
} else {
setLocationServiceAccuracyLevel();
}
//Start data service
DataService.start(this.getApplicationContext());
//Request updates from server
try {
ServerApi.requestUpdates(this.getApplicationContext());
} catch (JSONException e) {
e.printStackTrace();
}
}
@Override
protected void onPause() {
// Fixes buggy avatars after leaving the app from the "Map" screen
MainApplication.avatarCache.evictAll();
//LocationService.stop(this.getApplicationContext());
//DataService.stop(this.getApplicationContext());
LocalBroadcastManager.getInstance(this).unregisterReceiver(switchToMapReceiver);
LocalBroadcastManager.getInstance(this).unregisterReceiver(exitMessageReceiver);
super.onPause();
//Set location update accuracy to low if the service has been initialized
if (mBoundLocationService != null) {
setBackgroundLocationAccuracy();
setLocationServiceAccuracyLevel();
}
}
@Override
protected void onDestroy()
{
LocationService.stop(this.getApplicationContext());
DataService.stop(this.getApplicationContext());
//Remove connection to LocationService
unbindLocationService();
super.onDestroy();
}
/**
* Ensures that the user is signed in by launching the SignUpActivity if they aren't
*/
private void signUserIn() {
if (!loggedIn()) {
try {
startActivityForResult(new Intent(this, SignUpActivity.class), REQUEST_CODE_EMAIL);
} catch (ActivityNotFoundException e) {
Toast.makeText(this, getString(R.string.general_error), Toast.LENGTH_LONG).show();
Log.e(TAG, "Could not start SignUpActivity " + e);
finish();
}
} else { // User already logged-in
MainApplication.userAccount = PreferenceUtils.getString(this, PreferenceUtils.KEY_USER_ACCOUNT);
GcmHelper.start(getApplicationContext()); // Register to GCM
}
}
@Override
public void onNavigationDrawerItemSelected(int position) {
// Position of the logout button
String[] menuOptions = getResources().getStringArray(R.array.nav_drawer_options);
FragmentManager fragmentManager = getSupportFragmentManager();
mTitle = menuOptions[position];
selectedOption = position;
ActionBar actionBar = getSupportActionBar();
// set action bar title if it exists and the user isn't trying to log off
if (actionBar != null) {
actionBar.setTitle(mTitle);
}
switch (position) {
case 0: // Map
fragmentManager.beginTransaction().replace(R.id.container, new MapViewFragment(), TAG_MAP_FRAGMENT).commit();
break;
case 1: // Places
fragmentManager.beginTransaction().replace(R.id.container, new PlacesFragment(), TAG_PLACES_FRAGMENT).commit();
break;
case 2: // Contacts
fragmentManager.beginTransaction().replace(R.id.container, new ContactsFragment(), TAG_CONTACTS_FRAGMENT).commit();
break;
case 3: // Settings
fragmentManager.beginTransaction().replace(R.id.container, new PreferencesFragment(), TAG_PREFERENCES_FRAGMENT).commit();
break;
case 4: // About
fragmentManager.beginTransaction().replace(R.id.container, new AboutFragment(), TAG_ABOUT_FRAGMENT).commit();
break;
default:
fragmentManager.beginTransaction().replace(R.id.container, new MapViewFragment(), TAG_MAP_FRAGMENT).commit();
break;
}
supportInvalidateOptionsMenu();
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
return true;
}
@Override
public boolean onPrepareOptionsMenu(Menu menu) {
final Activity mainactivity = this;
Log.d(TAG,"onPrepareOptionsMenu");
menu.clear();
if (mNavigationDrawerFragment != null && !mNavigationDrawerFragment.isDrawerOpen()) {
if (selectedOption == 0) { // Map
getMenuInflater().inflate(R.menu.map, menu);
MenuItem menuItem = menu.findItem(R.id.action_visibility);
if (menuItem != null) {
Log.d(TAG, "onPrepareOptionsMenu - Visible: " + MainApplication.visible);
if (MainApplication.visible) {
menuItem.setIcon(R.drawable.ic_visibility_white_48dp);
} else {
menuItem.setIcon(R.drawable.ic_visibility_off_white_48dp);
}
}
//Set up the search bar
final SearchView searchView=(SearchView) MenuItemCompat.getActionView(menu.findItem(R.id.search));
searchView.setQueryHint(getString(R.string.search_hint));
searchView.setOnQueryTextListener(new SearchView.OnQueryTextListener(){
@Override
public boolean onQueryTextChange(String newText)
{
return true;
}
@Override
public boolean onQueryTextSubmit(String query)
{
//Removes focus from the search field in order to prevent multiple key events from
//launching this callback. See:
//http://stackoverflow.com/questions/17874951/searchview-onquerytextsubmit-runs-twice-while-i-pressed-once
searchView.clearFocus();
//Launch search activity
Intent intent= new Intent(mainactivity,SearchActivity.class);
Log.d(TAG,"Search Query submitted");
intent.putExtra(SearchActivity.QUERY_MESSAGE, query);
startActivity(intent);
return true;
}
});
} else if (selectedOption == 2) { // Contacts screen
getMenuInflater().inflate(R.menu.contacts, menu);
} else if (selectedOption == -10) { // Add contacts screen
getMenuInflater().inflate(R.menu.add_contact, menu);
}
}
return super.onPrepareOptionsMenu(menu);
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
int id = item.getItemId();
switch (id) {
case R.id.add_contacts: // In Contacts (to add new ones)
FragmentManager fragmentManager = getSupportFragmentManager();
AddContactsFragment acf = new AddContactsFragment();
acf.setContactUtils(mContactDataSource);
fragmentManager.beginTransaction().replace(R.id.container, acf, TAG_ADD_CONTACTS_FRAGMENT).commit();
selectedOption = -10;
supportInvalidateOptionsMenu();
break;
case R.id.add_email: // In list of ALL contacts, when adding new ones.
AnalyticsUtils.eventHit(getString(R.string.analytics_category_ux),
getString(R.string.analytics_action_click),
getString(R.string.analytics_label_add_email_button));
AddContactsFragment.addContactFromEmail(this);
break;
case R.id.action_visibility:
AnalyticsUtils.eventHit(getString(R.string.analytics_category_ux),
getString(R.string.analytics_action_click),
getString(R.string.analytics_label_visibility_toggle));
toggleVisibility();
break;
}
return super.onOptionsItemSelected(item);
}
private void toggleVisibility() {
Utils.setVisibility(!MainApplication.visible, MainActivity.this);
PreferenceUtils.setBoolean(getApplicationContext(),PreferenceUtils.KEY_SETTING_VISIBILITY, MainApplication.visible);
if (MainApplication.visible) {
Toast.makeText(this, getString(R.string.you_are_visible), Toast.LENGTH_LONG).show();
} else {
Toast.makeText(this, getString(R.string.you_are_invisible), Toast.LENGTH_LONG).show();
}
supportInvalidateOptionsMenu();
}
@Override
public boolean onKeyUp(int keycode, KeyEvent e) {
switch (keycode) {
case KeyEvent.KEYCODE_MENU:
mNavigationDrawerFragment.toggleDrawer();
return true;
case KeyEvent.KEYCODE_BACK:
if (selectedOption == 0) {
Log.i(TAG, "Exiting app because requested by user.");
finish();
} else if (selectedOption == -10) { // -10 is the Add Contacts screen
mNavigationDrawerFragment.selectNavDrawerItem(3); // 3 is the Contacts screen
return true;
} else {
mNavigationDrawerFragment.selectNavDrawerItem(1);
return true;
}
}
return super.onKeyUp(keycode, e);
}
@Override
protected void onActivityResult(final int requestCode, final int resultCode, final Intent data) {
Log.d(TAG, "onActivityResult");
if (requestCode == REQUEST_CODE_EMAIL) {
if (resultCode == RESULT_OK) {
Log.d(TAG, "Returned from sign up. Now we will show the map.");
startServices();
mNavigationDrawerFragment.setUserInfo();
GcmHelper.start(getApplicationContext()); // Register to GCM
} else {
Log.w(TAG, "Returned from sign up. Exiting app on request.");
finish();
}
} else if (requestCode == REQUEST_TERMS && resultCode == RESULT_OK) {
Log.d(TAG, "Returned from terms. Now we will show sign up form.");
// Terms shown and accepted.
} else {
Log.e(TAG, "Got - request Code: " + requestCode + ", result: " + resultCode);
finish();
}
}
public void showUserInMap(View view) { // Used in Contacts
AnalyticsUtils.eventHit(getString(R.string.analytics_category_ux),
getString(R.string.analytics_action_click),
getString(R.string.analytics_label_avatar_show_user));
if (view == null) {
return;
}
ImageView image = (ImageView) view;
String email = (String) image.getTag();
showUserInMap(email);
}
private void showUserInMap(String email) { // Used in Contacts
Log.d(TAG, "showUserInMap: " + email);
MainApplication.emailBeingTracked = email;
mNavigationDrawerFragment.selectNavDrawerItem(1); // Position 1 is the Map
}
public void toggleIDontWantToSee(View view) {
AnalyticsUtils.eventHit(getString(R.string.analytics_category_ux),
getString(R.string.analytics_action_click),
getString(R.string.analytics_label_show_on_map_checkbox));
if (view == null) {
return;
}
CheckBox checkBox = (CheckBox) view;
Boolean allow = checkBox.isChecked();
String email = (String) checkBox.getTag();
Log.d(TAG, "toggleIDontWantToSee: " + email + ", Checkbox is: " + allow);
if (!allow) {
try {
MainApplication.iDontWantToSee.put(email, 1);
Log.d(TAG, MainApplication.iDontWantToSee.toString());
PreferenceUtils.setString(this, PreferenceUtils.KEY_I_DONT_WANT_TO_SEE, MainApplication.iDontWantToSee.toString());
ServerApi.ignoreUsers(this, email);
} catch (JSONException e) {
e.printStackTrace();
}
} else if (MainApplication.iDontWantToSee.has(email)) {
Log.d(TAG, "unignoring user");
MainApplication.iDontWantToSee.remove(email);
PreferenceUtils.setString(this, PreferenceUtils.KEY_I_DONT_WANT_TO_SEE, MainApplication.iDontWantToSee.toString());
ServerApi.unignoreUser(this, email);
}
}
public void toggleUserCanSeeMe(View view) { // Used in Contacts
AnalyticsUtils.eventHit(getString(R.string.analytics_category_ux),
getString(R.string.analytics_action_click),
getString(R.string.analytics_label_can_see_me_checkbox));
if (view != null) {
CheckBox checkBox = (CheckBox) view;
Boolean allow = checkBox.isChecked();
String email = (String) checkBox.getTag();
Log.d(TAG, "toggleUserCanSeeMe: " + email + ", Checkbox is: " + allow);
if (!allow) {
ServerApi.disallowUser(this, email);
} else {
ServerApi.allowPeople(this, email, new ResultListener(TAG, "allow user"));
}
}
}
private BroadcastReceiver exitMessageReceiver = new BroadcastReceiver() {
@Override
public void onReceive(Context context, Intent intent) {
Log.d(TAG, "exitMessageReceiver onReceive");
LocationService.stop(MainActivity.this.getApplicationContext());
DataService.stop(MainActivity.this.getApplicationContext());
AlertDialog.Builder alertDialog = new AlertDialog.Builder(MainActivity.this);
alertDialog.setTitle(getString(R.string.app_name));
String message = getString(R.string.security_sign_up, MainApplication.userAccount);
alertDialog.setMessage(message)
.setPositiveButton("OK", new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
finish();
}
})
.setCancelable(false);
alertDialog.show();
}
};
private BroadcastReceiver switchToMapReceiver = new BroadcastReceiver() {
@Override
public void onReceive(Context context, Intent intent) {
FragmentManager fragmentManager = getSupportFragmentManager();
fragmentManager.beginTransaction().replace(R.id.container, new MapViewFragment(), TAG_MAP_FRAGMENT).commit();
mNavigationDrawerFragment.selectNavDrawerItem(1); // Index 1 because index 0 is the list view header...
}
};
// For dependency injection
public void setContactUtils(ContactDataSource contactDataSource) {
this.mContactDataSource = contactDataSource;
}
public void logout(){
final MainActivity main = this;
new AlertDialog.Builder(main)
.setIcon(R.drawable.ic_power_settings_new_black_48dp)
.setMessage(R.string.confirm_logout)
.setPositiveButton(R.string.yes, new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which){
//Clear logged in status
PreferenceUtils.setString(main, PreferenceUtils.KEY_USER_ACCOUNT, null);
PreferenceUtils.setString(main, PreferenceUtils.KEY_USER_ID, null);
PreferenceUtils.setString(main, PreferenceUtils.KEY_AUTH_TOKEN, null);
PreferenceUtils.setString(main, PreferenceUtils.KEY_I_DONT_WANT_TO_SEE, null);
PreferenceUtils.setString(main, PreferenceUtils.KEY_CONTACTS, null);
PreferenceUtils.setString(main, PreferenceUtils.KEY_DASHBOARD, null);
PreferenceUtils.setString(main, PreferenceUtils.KEY_LOCAL_CONTACTS, null);
PreferenceUtils.setString(main, PreferenceUtils.KEY_PLACES, null);
MainApplication.userAccount = null;
MainApplication.dashboard = null;
MainApplication.contacts = null;
MainApplication.mapping = null;
MainApplication.places = null;
MainApplication.iDontWantToSee = new JSONObject();
MainApplication.firstTimeZoom = true;
//Restart main activity to clear state
main.recreate();
}
})
.setNegativeButton(R.string.no, null)
.show();
}
}
| |
/*
* Copyright 2000-2009 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.execution.runners;
import com.intellij.execution.*;
import com.intellij.execution.configurations.RunProfile;
import com.intellij.execution.process.ProcessHandler;
import com.intellij.execution.process.ProcessNotCreatedException;
import com.intellij.execution.ui.RunContentDescriptor;
import com.intellij.ide.DataManager;
import com.intellij.ide.util.PropertiesComponent;
import com.intellij.notification.Notification;
import com.intellij.notification.NotificationGroup;
import com.intellij.notification.NotificationListener;
import com.intellij.notification.NotificationType;
import com.intellij.openapi.actionSystem.LangDataKeys;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.MessageType;
import com.intellij.openapi.ui.Messages;
import com.intellij.openapi.wm.ToolWindowManager;
import com.intellij.ui.ColorUtil;
import com.intellij.ui.LayeredIcon;
import com.intellij.ui.content.Content;
import com.intellij.util.ui.GraphicsUtil;
import com.intellij.util.ui.JBUI;
import com.intellij.util.ui.UIUtil;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import javax.swing.event.HyperlinkEvent;
import javax.swing.event.HyperlinkListener;
import java.awt.*;
import java.awt.geom.Ellipse2D;
public class ExecutionUtil {
private static final Logger LOG = Logger.getInstance("com.intellij.execution.runners.ExecutionUtil");
private static final NotificationGroup ourNotificationGroup = NotificationGroup.logOnlyGroup("Execution");
private ExecutionUtil() {
}
public static void handleExecutionError(@NotNull Project project,
@NotNull String toolWindowId,
@NotNull RunProfile runProfile,
@NotNull ExecutionException e) {
handleExecutionError(project, toolWindowId, runProfile.getName(), e);
}
public static void handleExecutionError(@NotNull ExecutionEnvironment environment, @NotNull ExecutionException e) {
handleExecutionError(environment.getProject(), environment.getExecutor().getToolWindowId(), environment.getRunProfile().getName(), e);
}
public static void handleExecutionError(@NotNull final Project project,
@NotNull final String toolWindowId,
@NotNull String taskName,
@NotNull ExecutionException e) {
if (e instanceof RunCanceledByUserException) {
return;
}
LOG.debug(e);
String description = e.getMessage();
if (description == null) {
LOG.warn("Execution error without description", e);
description = "Unknown error";
}
HyperlinkListener listener = null;
if ((description.contains("87") || description.contains("111") || description.contains("206")) &&
e instanceof ProcessNotCreatedException &&
!PropertiesComponent.getInstance(project).isTrueValue("dynamic.classpath")) {
final String commandLineString = ((ProcessNotCreatedException)e).getCommandLine().getCommandLineString();
if (commandLineString.length() > 1024 * 32) {
description = "Command line is too long. In order to reduce its length classpath file can be used.<br>" +
"Would you like to enable classpath file mode for all run configurations of your project?<br>" +
"<a href=\"\">Enable</a>";
listener = new HyperlinkListener() {
@Override
public void hyperlinkUpdate(HyperlinkEvent event) {
PropertiesComponent.getInstance(project).setValue("dynamic.classpath", "true");
}
};
}
}
final String title = ExecutionBundle.message("error.running.configuration.message", taskName);
final String fullMessage = title + ":<br>" + description;
if (ApplicationManager.getApplication().isUnitTestMode()) {
LOG.error(fullMessage, e);
}
if (listener == null && e instanceof HyperlinkListener) {
listener = (HyperlinkListener)e;
}
final HyperlinkListener finalListener = listener;
final String finalDescription = description;
UIUtil.invokeLaterIfNeeded(() -> {
if (project.isDisposed()) {
return;
}
ToolWindowManager toolWindowManager = ToolWindowManager.getInstance(project);
if (toolWindowManager.canShowNotification(toolWindowId)) {
//noinspection SSBasedInspection
toolWindowManager.notifyByBalloon(toolWindowId, MessageType.ERROR, fullMessage, null, finalListener);
}
else {
Messages.showErrorDialog(project, UIUtil.toHtml(fullMessage), "");
}
NotificationListener notificationListener = finalListener == null ? null : new NotificationListener() {
@Override
public void hyperlinkUpdate(@NotNull Notification notification, @NotNull HyperlinkEvent event) {
finalListener.hyperlinkUpdate(event);
}
};
ourNotificationGroup.createNotification(title, finalDescription, NotificationType.ERROR, notificationListener).notify(project);
});
}
public static void restartIfActive(@NotNull RunContentDescriptor descriptor) {
ProcessHandler processHandler = descriptor.getProcessHandler();
if (processHandler != null
&& processHandler.isStartNotified()
&& !processHandler.isProcessTerminating()
&& !processHandler.isProcessTerminated()) {
restart(descriptor);
}
}
public static void restart(@NotNull RunContentDescriptor descriptor) {
restart(descriptor.getComponent());
}
public static void restart(@NotNull Content content) {
restart(content.getComponent());
}
private static void restart(@Nullable JComponent component) {
if (component != null) {
ExecutionEnvironment environment = LangDataKeys.EXECUTION_ENVIRONMENT.getData(DataManager.getInstance().getDataContext(component));
if (environment != null) {
restart(environment);
}
}
}
public static void restart(@NotNull ExecutionEnvironment environment) {
if (!ExecutorRegistry.getInstance().isStarting(environment)) {
ExecutionManager.getInstance(environment.getProject()).restartRunProfile(environment);
}
}
public static void runConfiguration(@NotNull RunnerAndConfigurationSettings configuration, @NotNull Executor executor) {
ExecutionEnvironmentBuilder builder = createEnvironment(executor, configuration);
if (builder != null) {
ExecutionManager.getInstance(configuration.getConfiguration().getProject()).restartRunProfile(builder
.activeTarget()
.build());
}
}
@Nullable
public static ExecutionEnvironmentBuilder createEnvironment(@NotNull Executor executor, @NotNull RunnerAndConfigurationSettings settings) {
try {
return ExecutionEnvironmentBuilder.create(executor, settings);
}
catch (ExecutionException e) {
handleExecutionError(settings.getConfiguration().getProject(), executor.getToolWindowId(), settings.getConfiguration().getName(), e);
return null;
}
}
public static Icon getLiveIndicator(@Nullable final Icon base) {
return new LayeredIcon(base, new Icon() {
@SuppressWarnings("UseJBColor")
@Override
public void paintIcon(Component c, Graphics g, int x, int y) {
int iSize = JBUI.scale(4);
Graphics2D g2d = (Graphics2D)g.create();
try {
GraphicsUtil.setupAAPainting(g2d);
g2d.setColor(Color.GREEN);
Ellipse2D.Double shape =
new Ellipse2D.Double(x + getIconWidth() - JBUI.scale(iSize), y + getIconHeight() - iSize, iSize, iSize);
g2d.fill(shape);
g2d.setColor(ColorUtil.withAlpha(Color.BLACK, .40));
g2d.draw(shape);
}
finally {
g2d.dispose();
}
}
@Override
public int getIconWidth() {
return base != null ? base.getIconWidth() : 13;
}
@Override
public int getIconHeight() {
return base != null ? base.getIconHeight() : 13;
}
});
}
}
| |
/*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.registry.core.app.catalog.resources;
import org.apache.airavata.common.exception.ApplicationSettingsException;
import org.apache.airavata.registry.core.app.catalog.model.*;
import org.apache.airavata.registry.core.app.catalog.util.AppCatalogJPAUtils;
import org.apache.airavata.registry.core.app.catalog.util.AppCatalogQueryGenerator;
import org.apache.airavata.registry.core.app.catalog.util.AppCatalogResourceType;
import org.apache.airavata.registry.cpi.AppCatalogException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.persistence.EntityManager;
import javax.persistence.Query;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class StoragePreferenceResource extends AppCatAbstractResource {
private final static Logger logger = LoggerFactory.getLogger(StoragePreferenceResource.class);
private String gatewayId;
private String storageResourceId;
private String loginUserName;
private String fsRootLocation;
private String resourceCSToken;
private GatewayProfileResource gatewayProfile;
public String getLoginUserName() {
return loginUserName;
}
public void setLoginUserName(String loginUserName) {
this.loginUserName = loginUserName;
}
public String getGatewayId() {
return gatewayId;
}
public void setGatewayId(String gatewayId) {
this.gatewayId = gatewayId;
}
public GatewayProfileResource getGatewayProfile() {
return gatewayProfile;
}
public void setGatewayProfile(GatewayProfileResource gatewayProfile) {
this.gatewayProfile = gatewayProfile;
}
public String getResourceCSToken() {
return resourceCSToken;
}
public void setResourceCSToken(String resourceCSToken) {
this.resourceCSToken = resourceCSToken;
}
public String getFsRootLocation() {
return fsRootLocation;
}
public void setFsRootLocation(String fsRootLocation) {
this.fsRootLocation = fsRootLocation;
}
public String getStorageResourceId() {
return storageResourceId;
}
public void setStorageResourceId(String storageResourceId) {
this.storageResourceId = storageResourceId;
}
@Override
public void remove(Object identifier) throws AppCatalogException {
HashMap<String, String> ids;
if (identifier instanceof Map) {
ids = (HashMap) identifier;
} else {
logger.error("Identifier should be a map with the field name and it's value");
throw new AppCatalogException("Identifier should be a map with the field name and it's value");
}
EntityManager em = null;
try {
em = AppCatalogJPAUtils.getEntityManager();
em.getTransaction().begin();
AppCatalogQueryGenerator generator = new AppCatalogQueryGenerator(STORAGE_PREFERENCE);
generator.setParameter(StoragePreferenceConstants.STORAGE_ID, ids.get(StoragePreferenceConstants.STORAGE_ID));
generator.setParameter(StoragePreferenceConstants.GATEWAY_ID, ids.get(StoragePreferenceConstants.GATEWAY_ID));
Query q = generator.deleteQuery(em);
q.executeUpdate();
em.getTransaction().commit();
if (em.isOpen()) {
if (em.getTransaction().isActive()){
em.getTransaction().rollback();
}
em.close();
}
} catch (ApplicationSettingsException e) {
logger.error(e.getMessage(), e);
throw new AppCatalogException(e);
} finally {
if (em != null && em.isOpen()) {
if (em.getTransaction().isActive()) {
em.getTransaction().rollback();
}
em.close();
}
}
}
@Override
public AppCatalogResource get(Object identifier) throws AppCatalogException {
HashMap<String, String> ids;
if (identifier instanceof Map) {
ids = (HashMap) identifier;
} else {
logger.error("Identifier should be a map with the field name and it's value");
throw new AppCatalogException("Identifier should be a map with the field name and it's value");
}
EntityManager em = null;
try {
em = AppCatalogJPAUtils.getEntityManager();
em.getTransaction().begin();
AppCatalogQueryGenerator generator = new AppCatalogQueryGenerator(STORAGE_PREFERENCE);
generator.setParameter(StoragePreferenceConstants.GATEWAY_ID, ids.get(StoragePreferenceConstants.GATEWAY_ID));
generator.setParameter(StoragePreferenceConstants.STORAGE_ID, ids.get(StoragePreferenceConstants.STORAGE_ID));
Query q = generator.selectQuery(em);
StoragePreference preference = (StoragePreference) q.getSingleResult();
StoragePreferenceResource preferenceResource =
(StoragePreferenceResource) AppCatalogJPAUtils.getResource(AppCatalogResourceType.STORAGE_PREFERENCE, preference);
em.getTransaction().commit();
if (em.isOpen()) {
if (em.getTransaction().isActive()){
em.getTransaction().rollback();
}
em.close();
}
return preferenceResource;
} catch (ApplicationSettingsException e) {
logger.error(e.getMessage(), e);
throw new AppCatalogException(e);
} finally {
if (em != null && em.isOpen()) {
if (em.getTransaction().isActive()) {
em.getTransaction().rollback();
}
em.close();
}
}
}
@Override
public List<AppCatalogResource> get(String fieldName, Object value) throws AppCatalogException {
List<AppCatalogResource> preferenceResourceList = new ArrayList<AppCatalogResource>();
EntityManager em = null;
try {
em = AppCatalogJPAUtils.getEntityManager();
em.getTransaction().begin();
Query q;
AppCatalogQueryGenerator generator = new AppCatalogQueryGenerator(STORAGE_PREFERENCE);
List results;
if (fieldName.equals(StoragePreferenceConstants.STORAGE_ID)) {
generator.setParameter(StoragePreferenceConstants.STORAGE_ID, value);
q = generator.selectQuery(em);
results = q.getResultList();
if (results.size() != 0) {
for (Object result : results) {
StoragePreference preference = (StoragePreference) result;
if (preference.getStorageResourceId()!=null) {
StoragePreferenceResource preferenceResource = (StoragePreferenceResource) AppCatalogJPAUtils
.getResource(
AppCatalogResourceType.STORAGE_PREFERENCE,
preference);
preferenceResourceList.add(preferenceResource);
}
}
}
} else if (fieldName.equals(StoragePreferenceConstants.GATEWAY_ID)) {
generator.setParameter(StoragePreferenceConstants.GATEWAY_ID, value);
q = generator.selectQuery(em);
results = q.getResultList();
if (results.size() != 0) {
for (Object result : results) {
StoragePreference preference = (StoragePreference) result;
if (preference.getStorageResourceId()!=null) {
StoragePreferenceResource preferenceResource =
(StoragePreferenceResource) AppCatalogJPAUtils.getResource(AppCatalogResourceType.STORAGE_PREFERENCE, preference);
preferenceResourceList.add(preferenceResource);
}
}
}
} else {
em.getTransaction().commit();
if (em.isOpen()) {
if (em.getTransaction().isActive()){
em.getTransaction().rollback();
}
em.close();
}
logger.error("Unsupported field name for data storage preference Resource.", new IllegalArgumentException());
throw new IllegalArgumentException("Unsupported field name for data storage preference Resource.");
}
em.getTransaction().commit();
if (em.isOpen()) {
if (em.getTransaction().isActive()){
em.getTransaction().rollback();
}
em.close();
}
} catch (Exception e) {
logger.error(e.getMessage(), e);
throw new AppCatalogException(e);
} finally {
if (em != null && em.isOpen()) {
if (em.getTransaction().isActive()) {
em.getTransaction().rollback();
}
em.close();
}
}
return preferenceResourceList;
}
@Override
public List<AppCatalogResource> getAll() throws AppCatalogException {
return null;
}
@Override
public List<String> getAllIds() throws AppCatalogException {
return null;
}
@Override
public List<String> getIds(String fieldName, Object value) throws AppCatalogException {
logger.error("Unsupported for objects with a composite identifier");
throw new AppCatalogException("Unsupported for objects with a composite identifier");
}
@Override
public void save() throws AppCatalogException {
EntityManager em = null;
try {
em = AppCatalogJPAUtils.getEntityManager();
StoragePreference existingPreference = em.find(StoragePreference.class, new StoragePreferencePK(gatewayId, storageResourceId));
if (em.isOpen()) {
if (em.getTransaction().isActive()){
em.getTransaction().rollback();
}
em.close();
}
em = AppCatalogJPAUtils.getEntityManager();
em.getTransaction().begin();
GatewayProfile gatewayProf = em.find(GatewayProfile.class, gatewayId);
if (existingPreference != null) {
existingPreference.setStorageResourceId(storageResourceId);
existingPreference.setGatewayId(gatewayId);
existingPreference.setGatewayProfile(gatewayProf);
existingPreference.setLoginUserName(loginUserName);
existingPreference.setComputeResourceCSToken(resourceCSToken);
existingPreference.setFsRootLocation(fsRootLocation);
em.merge(existingPreference);
} else {
StoragePreference resourcePreference = new StoragePreference();
resourcePreference.setStorageResourceId(storageResourceId);
resourcePreference.setGatewayId(gatewayId);
resourcePreference.setGatewayProfile(gatewayProf);
resourcePreference.setLoginUserName(loginUserName);
resourcePreference.setComputeResourceCSToken(resourceCSToken);
resourcePreference.setFsRootLocation(fsRootLocation);
em.persist(resourcePreference);
}
em.getTransaction().commit();
if (em.isOpen()) {
if (em.getTransaction().isActive()){
em.getTransaction().rollback();
}
em.close();
}
} catch (Exception e) {
logger.error(e.getMessage(), e);
throw new AppCatalogException(e);
} finally {
if (em != null && em.isOpen()) {
if (em.getTransaction().isActive()) {
em.getTransaction().rollback();
}
em.close();
}
}
}
@Override
public boolean isExists(Object identifier) throws AppCatalogException {
HashMap<String, String> ids;
if (identifier instanceof Map) {
ids = (HashMap) identifier;
} else {
logger.error("Identifier should be a map with the field name and it's value");
throw new AppCatalogException("Identifier should be a map with the field name and it's value");
}
EntityManager em = null;
try {
em = AppCatalogJPAUtils.getEntityManager();
StoragePreference existingPreference = em.find(StoragePreference.class,
new StoragePreferencePK(ids.get(StoragePreferenceConstants.GATEWAY_ID),
ids.get(StoragePreferenceConstants.STORAGE_ID)));
if (em.isOpen()) {
if (em.getTransaction().isActive()){
em.getTransaction().rollback();
}
em.close();
}
return existingPreference != null;
}catch (Exception e) {
logger.error(e.getMessage(), e);
throw new AppCatalogException(e);
} finally {
if (em != null && em.isOpen()) {
if (em.getTransaction().isActive()) {
em.getTransaction().rollback();
}
em.close();
}
}
}
}
| |
/*
* Copyright (c) 2008-2015 Citrix Systems, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.citrix.netscaler.nitro.resource.config.vpn;
import com.citrix.netscaler.nitro.resource.base.*;
import com.citrix.netscaler.nitro.service.nitro_service;
import com.citrix.netscaler.nitro.service.options;
import com.citrix.netscaler.nitro.util.*;
import com.citrix.netscaler.nitro.exception.nitro_exception;
class vpnglobal_auditnslogpolicy_binding_response extends base_response
{
public vpnglobal_auditnslogpolicy_binding[] vpnglobal_auditnslogpolicy_binding;
}
/**
* Binding class showing the auditnslogpolicy that can be bound to vpnglobal.
*/
public class vpnglobal_auditnslogpolicy_binding extends base_resource
{
private String policyname;
private Long priority;
private Boolean secondary;
private Boolean groupextraction;
private Long __count;
/**
* <pre>
* The priority of the policy.
* </pre>
*/
public void set_priority(long priority) throws Exception {
this.priority = new Long(priority);
}
/**
* <pre>
* The priority of the policy.
* </pre>
*/
public void set_priority(Long priority) throws Exception{
this.priority = priority;
}
/**
* <pre>
* The priority of the policy.
* </pre>
*/
public Long get_priority() throws Exception {
return this.priority;
}
/**
* <pre>
* The name of the policy.
* </pre>
*/
public void set_policyname(String policyname) throws Exception{
this.policyname = policyname;
}
/**
* <pre>
* The name of the policy.
* </pre>
*/
public String get_policyname() throws Exception {
return this.policyname;
}
/**
* <pre>
* Bind the authentication policy as the secondary policy to use in a two-factor configuration. A user must then authenticate not only to a primary authentication server but also to a secondary authentication server. User groups are aggregated across both authentication servers. The user name must be exactly the same on both authentication servers, but the authentication servers can require different passwords.
* </pre>
*/
public void set_secondary(boolean secondary) throws Exception {
this.secondary = new Boolean(secondary);
}
/**
* <pre>
* Bind the authentication policy as the secondary policy to use in a two-factor configuration. A user must then authenticate not only to a primary authentication server but also to a secondary authentication server. User groups are aggregated across both authentication servers. The user name must be exactly the same on both authentication servers, but the authentication servers can require different passwords.
* </pre>
*/
public void set_secondary(Boolean secondary) throws Exception{
this.secondary = secondary;
}
/**
* <pre>
* Bind the authentication policy as the secondary policy to use in a two-factor configuration. A user must then authenticate not only to a primary authentication server but also to a secondary authentication server. User groups are aggregated across both authentication servers. The user name must be exactly the same on both authentication servers, but the authentication servers can require different passwords.
* </pre>
*/
public Boolean get_secondary() throws Exception {
return this.secondary;
}
/**
* <pre>
* Bind the Authentication policy to a tertiary chain which will be used only for group extraction. The user will not authenticate against this server, and this will only be called it primary and/or secondary authentication has succeeded.
* </pre>
*/
public void set_groupextraction(boolean groupextraction) throws Exception {
this.groupextraction = new Boolean(groupextraction);
}
/**
* <pre>
* Bind the Authentication policy to a tertiary chain which will be used only for group extraction. The user will not authenticate against this server, and this will only be called it primary and/or secondary authentication has succeeded.
* </pre>
*/
public void set_groupextraction(Boolean groupextraction) throws Exception{
this.groupextraction = groupextraction;
}
/**
* <pre>
* Bind the Authentication policy to a tertiary chain which will be used only for group extraction. The user will not authenticate against this server, and this will only be called it primary and/or secondary authentication has succeeded.
* </pre>
*/
public Boolean get_groupextraction() throws Exception {
return this.groupextraction;
}
/**
* <pre>
* converts nitro response into object and returns the object array in case of get request.
* </pre>
*/
protected base_resource[] get_nitro_response(nitro_service service, String response) throws Exception{
vpnglobal_auditnslogpolicy_binding_response result = (vpnglobal_auditnslogpolicy_binding_response) service.get_payload_formatter().string_to_resource(vpnglobal_auditnslogpolicy_binding_response.class, response);
if(result.errorcode != 0) {
if (result.errorcode == 444) {
service.clear_session();
}
if(result.severity != null)
{
if (result.severity.equals("ERROR"))
throw new nitro_exception(result.message,result.errorcode);
}
else
{
throw new nitro_exception(result.message,result.errorcode);
}
}
return result.vpnglobal_auditnslogpolicy_binding;
}
/**
* <pre>
* Returns the value of object identifier argument
* </pre>
*/
protected String get_object_name() {
return null;
}
public static base_response add(nitro_service client, vpnglobal_auditnslogpolicy_binding resource) throws Exception {
vpnglobal_auditnslogpolicy_binding updateresource = new vpnglobal_auditnslogpolicy_binding();
updateresource.policyname = resource.policyname;
updateresource.priority = resource.priority;
updateresource.secondary = resource.secondary;
updateresource.groupextraction = resource.groupextraction;
return updateresource.update_resource(client);
}
public static base_responses add(nitro_service client, vpnglobal_auditnslogpolicy_binding resources[]) throws Exception {
base_responses result = null;
if (resources != null && resources.length > 0) {
vpnglobal_auditnslogpolicy_binding updateresources[] = new vpnglobal_auditnslogpolicy_binding[resources.length];
for (int i=0;i<resources.length;i++){
updateresources[i] = new vpnglobal_auditnslogpolicy_binding();
updateresources[i].policyname = resources[i].policyname;
updateresources[i].priority = resources[i].priority;
updateresources[i].secondary = resources[i].secondary;
updateresources[i].groupextraction = resources[i].groupextraction;
}
result = update_bulk_request(client, updateresources);
}
return result;
}
public static base_response delete(nitro_service client, vpnglobal_auditnslogpolicy_binding resource) throws Exception {
vpnglobal_auditnslogpolicy_binding deleteresource = new vpnglobal_auditnslogpolicy_binding();
deleteresource.policyname = resource.policyname;
deleteresource.secondary = resource.secondary;
deleteresource.groupextraction = resource.groupextraction;
return deleteresource.delete_resource(client);
}
public static base_responses delete(nitro_service client, vpnglobal_auditnslogpolicy_binding resources[]) throws Exception {
base_responses result = null;
if (resources != null && resources.length > 0) {
vpnglobal_auditnslogpolicy_binding deleteresources[] = new vpnglobal_auditnslogpolicy_binding[resources.length];
for (int i=0;i<resources.length;i++){
deleteresources[i] = new vpnglobal_auditnslogpolicy_binding();
deleteresources[i].policyname = resources[i].policyname;
deleteresources[i].secondary = resources[i].secondary;
deleteresources[i].groupextraction = resources[i].groupextraction;
}
result = delete_bulk_request(client, deleteresources);
}
return result;
}
/**
* Use this API to fetch a vpnglobal_auditnslogpolicy_binding resources.
*/
public static vpnglobal_auditnslogpolicy_binding[] get(nitro_service service) throws Exception{
vpnglobal_auditnslogpolicy_binding obj = new vpnglobal_auditnslogpolicy_binding();
vpnglobal_auditnslogpolicy_binding response[] = (vpnglobal_auditnslogpolicy_binding[]) obj.get_resources(service);
return response;
}
/**
* Use this API to fetch filtered set of vpnglobal_auditnslogpolicy_binding resources.
* filter string should be in JSON format.eg: "port:80,servicetype:HTTP".
*/
public static vpnglobal_auditnslogpolicy_binding[] get_filtered(nitro_service service, String filter) throws Exception{
vpnglobal_auditnslogpolicy_binding obj = new vpnglobal_auditnslogpolicy_binding();
options option = new options();
option.set_filter(filter);
vpnglobal_auditnslogpolicy_binding[] response = (vpnglobal_auditnslogpolicy_binding[]) obj.getfiltered(service, option);
return response;
}
/**
* Use this API to fetch filtered set of vpnglobal_auditnslogpolicy_binding resources.
* set the filter parameter values in filtervalue object.
*/
public static vpnglobal_auditnslogpolicy_binding[] get_filtered(nitro_service service, filtervalue[] filter) throws Exception{
vpnglobal_auditnslogpolicy_binding obj = new vpnglobal_auditnslogpolicy_binding();
options option = new options();
option.set_filter(filter);
vpnglobal_auditnslogpolicy_binding[] response = (vpnglobal_auditnslogpolicy_binding[]) obj.getfiltered(service, option);
return response;
}
/**
* Use this API to count vpnglobal_auditnslogpolicy_binding resources configued on NetScaler.
*/
public static long count(nitro_service service) throws Exception{
vpnglobal_auditnslogpolicy_binding obj = new vpnglobal_auditnslogpolicy_binding();
options option = new options();
option.set_count(true);
vpnglobal_auditnslogpolicy_binding response[] = (vpnglobal_auditnslogpolicy_binding[]) obj.get_resources(service,option);
if (response != null) {
return response[0].__count;
}
return 0;
}
/**
* Use this API to count the filtered set of vpnglobal_auditnslogpolicy_binding resources.
* filter string should be in JSON format.eg: "port:80,servicetype:HTTP".
*/
public static long count_filtered(nitro_service service, String filter) throws Exception{
vpnglobal_auditnslogpolicy_binding obj = new vpnglobal_auditnslogpolicy_binding();
options option = new options();
option.set_count(true);
option.set_filter(filter);
vpnglobal_auditnslogpolicy_binding[] response = (vpnglobal_auditnslogpolicy_binding[]) obj.getfiltered(service, option);
if (response != null) {
return response[0].__count;
}
return 0;
}
/**
* Use this API to count the filtered set of vpnglobal_auditnslogpolicy_binding resources.
* set the filter parameter values in filtervalue object.
*/
public static long count_filtered(nitro_service service, filtervalue[] filter) throws Exception{
vpnglobal_auditnslogpolicy_binding obj = new vpnglobal_auditnslogpolicy_binding();
options option = new options();
option.set_count(true);
option.set_filter(filter);
vpnglobal_auditnslogpolicy_binding[] response = (vpnglobal_auditnslogpolicy_binding[]) obj.getfiltered(service, option);
if (response != null) {
return response[0].__count;
}
return 0;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.config;
import java.io.File;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.*;
import java.util.concurrent.ExecutionException;
import org.apache.cassandra.OrderedJUnit4ClassRunner;
import org.apache.cassandra.SchemaLoader;
import org.apache.cassandra.Util;
import org.apache.cassandra.db.*;
import org.apache.cassandra.db.filter.QueryFilter;
import org.apache.cassandra.db.filter.QueryPath;
import org.apache.cassandra.db.marshal.BytesType;
import org.apache.cassandra.db.marshal.UTF8Type;
import org.apache.cassandra.db.marshal.TimeUUIDType;
import org.apache.cassandra.exceptions.ConfigurationException;
import org.apache.cassandra.io.sstable.Component;
import org.apache.cassandra.io.sstable.Descriptor;
import org.apache.cassandra.io.sstable.SSTableDeletingTask;
import org.apache.cassandra.locator.OldNetworkTopologyStrategy;
import org.apache.cassandra.locator.SimpleStrategy;
import org.apache.cassandra.service.MigrationManager;
import org.apache.cassandra.thrift.IndexType;
import org.apache.cassandra.utils.ByteBufferUtil;
import org.junit.Test;
import org.junit.runner.RunWith;
@RunWith(OrderedJUnit4ClassRunner.class)
public class DefsTest extends SchemaLoader
{
@Test
public void ensureStaticCFMIdsAreLessThan1000()
{
assert CFMetaData.OldStatusCf.cfId.equals(CFMetaData.getId(Table.SYSTEM_KS, SystemTable.OLD_STATUS_CF));
assert CFMetaData.OldHintsCf.cfId.equals(CFMetaData.getId(Table.SYSTEM_KS, SystemTable.OLD_HINTS_CF));
}
@Test
public void testCFMetaDataApply() throws ConfigurationException
{
Map<ByteBuffer, ColumnDefinition> indexes = new HashMap<ByteBuffer, ColumnDefinition>();
for (int i = 0; i < 5; i++)
{
ByteBuffer name = ByteBuffer.wrap(new byte[] { (byte)i });
indexes.put(name, new ColumnDefinition(name, BytesType.instance, IndexType.KEYS, null, Integer.toString(i), null));
}
CFMetaData cfm = new CFMetaData("Keyspace1",
"TestApplyCFM_CF",
ColumnFamilyType.Standard,
BytesType.instance,
null);
cfm.comment("No comment")
.readRepairChance(0.5)
.replicateOnWrite(false)
.gcGraceSeconds(100000)
.defaultValidator(null)
.minCompactionThreshold(500)
.maxCompactionThreshold(500)
.columnMetadata(indexes);
// we'll be adding this one later. make sure it's not already there.
assert cfm.getColumn_metadata().get(ByteBuffer.wrap(new byte[] { 5 })) == null;
CFMetaData cfNew = cfm.clone();
// add one.
ColumnDefinition addIndexDef = new ColumnDefinition(ByteBuffer.wrap(new byte[] { 5 }),
BytesType.instance,
IndexType.KEYS,
null,
"5",
null);
cfNew.addColumnDefinition(addIndexDef);
// remove one.
ColumnDefinition removeIndexDef = new ColumnDefinition(ByteBuffer.wrap(new byte[] { 0 }),
BytesType.instance,
IndexType.KEYS,
null,
"0",
null);
assert cfNew.removeColumnDefinition(removeIndexDef);
cfm.apply(cfNew);
for (int i = 1; i < indexes.size(); i++)
assert cfm.getColumn_metadata().get(ByteBuffer.wrap(new byte[] { 1 })) != null;
assert cfm.getColumn_metadata().get(ByteBuffer.wrap(new byte[] { 0 })) == null;
assert cfm.getColumn_metadata().get(ByteBuffer.wrap(new byte[] { 5 })) != null;
}
@Test
public void testInvalidNames() throws IOException
{
String[] valid = {"1", "a", "_1", "b_", "__", "1_a"};
for (String s : valid)
assert CFMetaData.isNameValid(s);
String[] invalid = {"b@t", "dash-y", "", " ", "dot.s", ".hidden"};
for (String s : invalid)
assert !CFMetaData.isNameValid(s);
}
@Test
public void saveAndRestore() throws IOException
{
/*
// verify dump and reload.
UUID first = UUIDGen.makeType1UUIDFromHost(FBUtilities.getBroadcastAddress());
DefsTable.dumpToStorage(first);
List<KSMetaData> defs = new ArrayList<KSMetaData>(DefsTable.loadFromStorage(first));
assert defs.size() > 0;
assert defs.size() == Schema.instance.getNonSystemTables().size();
for (KSMetaData loaded : defs)
{
KSMetaData defined = Schema.instance.getTableDefinition(loaded.name);
assert defined.equals(loaded) : String.format("%s != %s", loaded, defined);
}
*/
}
@Test
public void addNewCfToBogusTable() throws InterruptedException
{
CFMetaData newCf = addTestCF("MadeUpKeyspace", "NewCF", "new cf");
try
{
MigrationManager.announceNewColumnFamily(newCf);
throw new AssertionError("You shouldn't be able to do anything to a keyspace that doesn't exist.");
}
catch (ConfigurationException expected)
{
}
}
@Test
public void addNewCfWithNullComment() throws ConfigurationException, IOException, ExecutionException, InterruptedException
{
final String ks = "Keyspace1";
final String cf = "BrandNewCfWithNull";
KSMetaData original = Schema.instance.getTableDefinition(ks);
CFMetaData newCf = addTestCF(original.name, cf, null);
assert !Schema.instance.getTableDefinition(ks).cfMetaData().containsKey(newCf.cfName);
MigrationManager.announceNewColumnFamily(newCf);
assert Schema.instance.getTableDefinition(ks).cfMetaData().containsKey(newCf.cfName);
assert Schema.instance.getTableDefinition(ks).cfMetaData().get(newCf.cfName).equals(newCf);
}
@Test
public void addNewCF() throws ConfigurationException, IOException, ExecutionException, InterruptedException
{
final String ks = "Keyspace1";
final String cf = "BrandNewCf";
KSMetaData original = Schema.instance.getTableDefinition(ks);
CFMetaData newCf = addTestCF(original.name, cf, "A New Column Family");
assert !Schema.instance.getTableDefinition(ks).cfMetaData().containsKey(newCf.cfName);
MigrationManager.announceNewColumnFamily(newCf);
assert Schema.instance.getTableDefinition(ks).cfMetaData().containsKey(newCf.cfName);
assert Schema.instance.getTableDefinition(ks).cfMetaData().get(newCf.cfName).equals(newCf);
// now read and write to it.
DecoratedKey dk = Util.dk("key0");
RowMutation rm = new RowMutation(ks, dk.key);
rm.add(new QueryPath(cf, null, ByteBufferUtil.bytes("col0")), ByteBufferUtil.bytes("value0"), 1L);
rm.apply();
ColumnFamilyStore store = Table.open(ks).getColumnFamilyStore(cf);
assert store != null;
store.forceBlockingFlush();
ColumnFamily cfam = store.getColumnFamily(QueryFilter.getNamesFilter(dk, new QueryPath(cf), ByteBufferUtil.bytes("col0")));
assert cfam.getColumn(ByteBufferUtil.bytes("col0")) != null;
IColumn col = cfam.getColumn(ByteBufferUtil.bytes("col0"));
assert ByteBufferUtil.bytes("value0").equals(col.value());
}
@Test
public void dropCf() throws ConfigurationException, IOException, ExecutionException, InterruptedException
{
DecoratedKey dk = Util.dk("dropCf");
// sanity
final KSMetaData ks = Schema.instance.getTableDefinition("Keyspace1");
assert ks != null;
final CFMetaData cfm = ks.cfMetaData().get("Standard1");
assert cfm != null;
// write some data, force a flush, then verify that files exist on disk.
RowMutation rm = new RowMutation(ks.name, dk.key);
for (int i = 0; i < 100; i++)
rm.add(new QueryPath(cfm.cfName, null, ByteBufferUtil.bytes(("col" + i))), ByteBufferUtil.bytes("anyvalue"), 1L);
rm.apply();
ColumnFamilyStore store = Table.open(cfm.ksName).getColumnFamilyStore(cfm.cfName);
assert store != null;
store.forceBlockingFlush();
assert store.directories.sstableLister().list().size() > 0;
MigrationManager.announceColumnFamilyDrop(ks.name, cfm.cfName, null);
assert !Schema.instance.getTableDefinition(ks.name).cfMetaData().containsKey(cfm.cfName);
// any write should fail.
rm = new RowMutation(ks.name, dk.key);
boolean success = true;
try
{
rm.add(new QueryPath("Standard1", null, ByteBufferUtil.bytes("col0")), ByteBufferUtil.bytes("value0"), 1L);
rm.apply();
}
catch (Throwable th)
{
success = false;
}
assert !success : "This mutation should have failed since the CF no longer exists.";
// verify that the files are gone.
for (File file : store.directories.sstableLister().listFiles())
{
if (file.getPath().endsWith("Data.db") && !new File(file.getPath().replace("Data.db", "Compacted")).exists())
throw new AssertionError("undeleted file " + file);
}
}
@Test
public void addNewKS() throws ConfigurationException, IOException, ExecutionException, InterruptedException
{
DecoratedKey dk = Util.dk("key0");
CFMetaData newCf = addTestCF("NewKeyspace1", "AddedStandard1", "A new cf for a new ks");
KSMetaData newKs = KSMetaData.testMetadata(newCf.ksName, SimpleStrategy.class, KSMetaData.optsWithRF(5), newCf);
MigrationManager.announceNewKeyspace(newKs);
assert Schema.instance.getTableDefinition(newCf.ksName) != null;
assert Schema.instance.getTableDefinition(newCf.ksName).equals(newKs);
// test reads and writes.
RowMutation rm = new RowMutation(newCf.ksName, dk.key);
rm.add(new QueryPath(newCf.cfName, null, ByteBufferUtil.bytes("col0")), ByteBufferUtil.bytes("value0"), 1L);
rm.apply();
ColumnFamilyStore store = Table.open(newCf.ksName).getColumnFamilyStore(newCf.cfName);
assert store != null;
store.forceBlockingFlush();
ColumnFamily cfam = store.getColumnFamily(QueryFilter.getNamesFilter(dk, new QueryPath(newCf.cfName), ByteBufferUtil.bytes("col0")));
assert cfam.getColumn(ByteBufferUtil.bytes("col0")) != null;
IColumn col = cfam.getColumn(ByteBufferUtil.bytes("col0"));
assert ByteBufferUtil.bytes("value0").equals(col.value());
}
@Test
public void dropKS() throws ConfigurationException, IOException, ExecutionException, InterruptedException
{
DecoratedKey dk = Util.dk("dropKs");
// sanity
final KSMetaData ks = Schema.instance.getTableDefinition("Keyspace1");
assert ks != null;
final CFMetaData cfm = ks.cfMetaData().get("Standard2");
assert cfm != null;
// write some data, force a flush, then verify that files exist on disk.
RowMutation rm = new RowMutation(ks.name, dk.key);
for (int i = 0; i < 100; i++)
rm.add(new QueryPath(cfm.cfName, null, ByteBufferUtil.bytes(("col" + i))), ByteBufferUtil.bytes("anyvalue"), 1L);
rm.apply();
ColumnFamilyStore store = Table.open(cfm.ksName).getColumnFamilyStore(cfm.cfName);
assert store != null;
store.forceBlockingFlush();
assert store.directories.sstableLister().list().size() > 0;
MigrationManager.announceKeyspaceDrop(ks.name, null);
assert Schema.instance.getTableDefinition(ks.name) == null;
// write should fail.
rm = new RowMutation(ks.name, dk.key);
boolean success = true;
try
{
rm.add(new QueryPath("Standard1", null, ByteBufferUtil.bytes("col0")), ByteBufferUtil.bytes("value0"), 1L);
rm.apply();
}
catch (Throwable th)
{
success = false;
}
assert !success : "This mutation should have failed since the CF no longer exists.";
// reads should fail too.
boolean threw = false;
try
{
Table.open(ks.name);
}
catch (Throwable th)
{
threw = true;
}
assert threw;
}
@Test
public void dropKSUnflushed() throws ConfigurationException, IOException, ExecutionException, InterruptedException
{
DecoratedKey dk = Util.dk("dropKs");
// sanity
final KSMetaData ks = Schema.instance.getTableDefinition("Keyspace3");
assert ks != null;
final CFMetaData cfm = ks.cfMetaData().get("Standard1");
assert cfm != null;
// write some data
RowMutation rm = new RowMutation(ks.name, dk.key);
for (int i = 0; i < 100; i++)
rm.add(new QueryPath(cfm.cfName, null, ByteBufferUtil.bytes(("col" + i))), ByteBufferUtil.bytes("anyvalue"), 1L);
rm.apply();
MigrationManager.announceKeyspaceDrop(ks.name, null);
assert Schema.instance.getTableDefinition(ks.name) == null;
}
@Test
public void createEmptyKsAddNewCf() throws ConfigurationException, IOException, ExecutionException, InterruptedException
{
assert Schema.instance.getTableDefinition("EmptyKeyspace") == null;
KSMetaData newKs = KSMetaData.testMetadata("EmptyKeyspace", SimpleStrategy.class, KSMetaData.optsWithRF(5));
MigrationManager.announceNewKeyspace(newKs);
assert Schema.instance.getTableDefinition("EmptyKeyspace") != null;
CFMetaData newCf = addTestCF("EmptyKeyspace", "AddedLater", "A new CF to add to an empty KS");
//should not exist until apply
assert !Schema.instance.getTableDefinition(newKs.name).cfMetaData().containsKey(newCf.cfName);
//add the new CF to the empty space
MigrationManager.announceNewColumnFamily(newCf);
assert Schema.instance.getTableDefinition(newKs.name).cfMetaData().containsKey(newCf.cfName);
assert Schema.instance.getTableDefinition(newKs.name).cfMetaData().get(newCf.cfName).equals(newCf);
// now read and write to it.
DecoratedKey dk = Util.dk("key0");
RowMutation rm = new RowMutation(newKs.name, dk.key);
rm.add(new QueryPath(newCf.cfName, null, ByteBufferUtil.bytes("col0")), ByteBufferUtil.bytes("value0"), 1L);
rm.apply();
ColumnFamilyStore store = Table.open(newKs.name).getColumnFamilyStore(newCf.cfName);
assert store != null;
store.forceBlockingFlush();
ColumnFamily cfam = store.getColumnFamily(QueryFilter.getNamesFilter(dk, new QueryPath(newCf.cfName), ByteBufferUtil.bytes("col0")));
assert cfam.getColumn(ByteBufferUtil.bytes("col0")) != null;
IColumn col = cfam.getColumn(ByteBufferUtil.bytes("col0"));
assert ByteBufferUtil.bytes("value0").equals(col.value());
}
@Test
public void testUpdateKeyspace() throws ConfigurationException, IOException, ExecutionException, InterruptedException
{
// create a keyspace to serve as existing.
CFMetaData cf = addTestCF("UpdatedKeyspace", "AddedStandard1", "A new cf for a new ks");
KSMetaData oldKs = KSMetaData.testMetadata(cf.ksName, SimpleStrategy.class, KSMetaData.optsWithRF(5), cf);
MigrationManager.announceNewKeyspace(oldKs);
assert Schema.instance.getTableDefinition(cf.ksName) != null;
assert Schema.instance.getTableDefinition(cf.ksName).equals(oldKs);
// names should match.
KSMetaData newBadKs2 = KSMetaData.testMetadata(cf.ksName + "trash", SimpleStrategy.class, KSMetaData.optsWithRF(4));
try
{
MigrationManager.announceKeyspaceUpdate(newBadKs2, null);
throw new AssertionError("Should not have been able to update a KS with an invalid KS name.");
}
catch (ConfigurationException ex)
{
// expected.
}
KSMetaData newKs = KSMetaData.testMetadata(cf.ksName, OldNetworkTopologyStrategy.class, KSMetaData.optsWithRF(1));
MigrationManager.announceKeyspaceUpdate(newKs, null);
KSMetaData newFetchedKs = Schema.instance.getKSMetaData(newKs.name);
assert newFetchedKs.strategyClass.equals(newKs.strategyClass);
assert !newFetchedKs.strategyClass.equals(oldKs.strategyClass);
}
@Test
public void testUpdateColumnFamilyNoIndexes() throws ConfigurationException, IOException, ExecutionException, InterruptedException
{
// create a keyspace with a cf to update.
CFMetaData cf = addTestCF("UpdatedCfKs", "Standard1added", "A new cf that will be updated");
KSMetaData ksm = KSMetaData.testMetadata(cf.ksName, SimpleStrategy.class, KSMetaData.optsWithRF(1), cf);
MigrationManager.announceNewKeyspace(ksm);
assert Schema.instance.getTableDefinition(cf.ksName) != null;
assert Schema.instance.getTableDefinition(cf.ksName).equals(ksm);
assert Schema.instance.getCFMetaData(cf.ksName, cf.cfName) != null;
// updating certain fields should fail.
CFMetaData newCfm = cf.clone();
newCfm.columnMetadata(new HashMap<ByteBuffer, ColumnDefinition>());
newCfm.defaultValidator(BytesType.instance);
newCfm.minCompactionThreshold(5);
newCfm.maxCompactionThreshold(31);
// test valid operations.
newCfm.comment("Modified comment");
MigrationManager.announceColumnFamilyUpdate(newCfm); // doesn't get set back here.
newCfm.readRepairChance(0.23);
MigrationManager.announceColumnFamilyUpdate(newCfm);
newCfm.gcGraceSeconds(12);
MigrationManager.announceColumnFamilyUpdate(newCfm);
newCfm.defaultValidator(UTF8Type.instance);
MigrationManager.announceColumnFamilyUpdate(newCfm);
newCfm.minCompactionThreshold(3);
MigrationManager.announceColumnFamilyUpdate(newCfm);
newCfm.maxCompactionThreshold(33);
MigrationManager.announceColumnFamilyUpdate(newCfm);
// can't test changing the reconciler because there is only one impl.
// check the cumulative affect.
assert Schema.instance.getCFMetaData(cf.ksName, cf.cfName).getComment().equals(newCfm.getComment());
assert Schema.instance.getCFMetaData(cf.ksName, cf.cfName).getReadRepairChance() == newCfm.getReadRepairChance();
assert Schema.instance.getCFMetaData(cf.ksName, cf.cfName).getGcGraceSeconds() == newCfm.getGcGraceSeconds();
assert Schema.instance.getCFMetaData(cf.ksName, cf.cfName).getDefaultValidator() == UTF8Type.instance;
// Change cfId
newCfm = new CFMetaData(cf.ksName, cf.cfName, cf.cfType, cf.comparator, cf.subcolumnComparator, UUID.randomUUID());
CFMetaData.copyOpts(newCfm, cf);
try
{
cf.apply(newCfm);
throw new AssertionError("Should have blown up when you used a different id.");
}
catch (ConfigurationException expected) {}
// Change cfName
newCfm = new CFMetaData(cf.ksName, cf.cfName + "_renamed", cf.cfType, cf.comparator, cf.subcolumnComparator);
CFMetaData.copyOpts(newCfm, cf);
try
{
cf.apply(newCfm);
throw new AssertionError("Should have blown up when you used a different name.");
}
catch (ConfigurationException expected) {}
// Change ksName
newCfm = new CFMetaData(cf.ksName + "_renamed", cf.cfName, cf.cfType, cf.comparator, cf.subcolumnComparator);
CFMetaData.copyOpts(newCfm, cf);
try
{
cf.apply(newCfm);
throw new AssertionError("Should have blown up when you used a different keyspace.");
}
catch (ConfigurationException expected) {}
// Change cf type
newCfm = new CFMetaData(cf.ksName, cf.cfName, ColumnFamilyType.Super, cf.comparator, cf.subcolumnComparator);
CFMetaData.copyOpts(newCfm, cf);
try
{
cf.apply(newCfm);
throw new AssertionError("Should have blwon up when you used a different cf type.");
}
catch (ConfigurationException expected) {}
// Change comparator
newCfm = new CFMetaData(cf.ksName, cf.cfName, cf.cfType, TimeUUIDType.instance, cf.subcolumnComparator);
CFMetaData.copyOpts(newCfm, cf);
try
{
cf.apply(newCfm);
throw new AssertionError("Should have blown up when you used a different comparator.");
}
catch (ConfigurationException expected) {}
}
@Test
public void testDropIndex() throws IOException, ExecutionException, InterruptedException, ConfigurationException
{
// persist keyspace definition in the system table
Schema.instance.getKSMetaData("Keyspace6").toSchema(System.currentTimeMillis()).apply();
// insert some data. save the sstable descriptor so we can make sure it's marked for delete after the drop
RowMutation rm = new RowMutation("Keyspace6", ByteBufferUtil.bytes("k1"));
rm.add(new QueryPath("Indexed1", null, ByteBufferUtil.bytes("notbirthdate")), ByteBufferUtil.bytes(1L), 0);
rm.add(new QueryPath("Indexed1", null, ByteBufferUtil.bytes("birthdate")), ByteBufferUtil.bytes(1L), 0);
rm.apply();
ColumnFamilyStore cfs = Table.open("Keyspace6").getColumnFamilyStore("Indexed1");
cfs.forceBlockingFlush();
ColumnFamilyStore indexedCfs = cfs.indexManager.getIndexForColumn(ByteBufferUtil.bytes("birthdate")).getIndexCfs();
Descriptor desc = indexedCfs.getSSTables().iterator().next().descriptor;
// drop the index
CFMetaData meta = cfs.metadata.clone();
ColumnDefinition cdOld = meta.getColumn_metadata().values().iterator().next();
ColumnDefinition cdNew = new ColumnDefinition(cdOld.name, cdOld.getValidator(), null, null, null, null);
meta.columnMetadata(Collections.singletonMap(cdOld.name, cdNew));
MigrationManager.announceColumnFamilyUpdate(meta);
// check
assert cfs.indexManager.getIndexes().isEmpty();
SSTableDeletingTask.waitForDeletions();
assert !new File(desc.filenameFor(Component.DATA)).exists();
}
private CFMetaData addTestCF(String ks, String cf, String comment)
{
CFMetaData newCFMD = new CFMetaData(ks, cf, ColumnFamilyType.Standard, UTF8Type.instance, null);
newCFMD.comment(comment)
.readRepairChance(0.0);
return newCFMD;
}
}
| |
package DataBaseUtility;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Map;
public class Utility {
//------
private static Statement myStatement;
private static ResultSet myResultSet;
private static Connection myConnection;
//-------
// ----Parametri di connessione db----------
private static String host = "";
private static String db = "";
private static String user = "";
private static String pass = "";
//-------------------------------------------
public static void setHost(String value){
host = value;
}
public static void setDB(String value){
db = value;
}
public static void setUser(String value){
user = value;
}
public static void setPass(String value){
pass = value;
}
/**
* Metodo per apertura di una connessione ad un DBMS
*
* @param host
* @param db
* @param user
* @param pass
*/
public static void dbOpenConnection(String host, String db, String user,String pass) {
try {
Class.forName("com.mysql.jdbc.Driver");
String connectionUrl = "jdbc:mysql://" + host + ":3306/" + db
+ "?user=" + user + "&password=" + pass;
myConnection = DriverManager.getConnection(connectionUrl);
} catch (ClassNotFoundException e) {
// TODO Auto-generated catch block
e.printStackTrace();
System.out.println(e.getMessage());
} catch (SQLException e) {
// TODO Auto-generated catch block
e.printStackTrace();
System.out.println(e.getMessage());
}
}
/**
* Metodo per la chiusura della connessione al DBMS
*/
public static void dbCloseConnection() {
try {
myResultSet.close();
myStatement.close();
myConnection.close();
} catch (SQLException e) {
// TODO Auto-generated catch block
e.printStackTrace();
System.out.println(e.getMessage());
}
}
/**
* Metdodo per il reperimento delle competenze necessarie per l'esuzione di un determinato intervento
* @param id ID dell'intervento
* @return ResultSet contenente il risultato della query e in particolare idIntevento,idInterventoTipo,tipoCompetenza,idCompetenza
*/
public static ResultSet getCopetenzeIntervento(String id){
dbOpenConnection(host, db, user, pass);
try {
myStatement = myConnection.createStatement();
/*System.out.print("SELECT intervento.idIntervento, " +
"intervento.idInterventoTipo, competenzetecniche.tipoCompetenza, competenzetecniche.idCompetenza " +
"FROM `capacitarichieste` JOIN intervento ON " +
"intervento.idInterventoTipo = capacitarichieste.idInterventoTipo "+
"JOIN competenzetecniche ON capacitarichieste.idCompetenza = competenzetecniche.idCompetenza "+
"WHERE intervento.idIntervento= "+id+" ;");*/
return myResultSet = myStatement.executeQuery("SELECT intervento.idIntervento, " +
"intervento.idInterventoTipo, competenzetecniche.tipoCompetenza, competenzetecniche.idCompetenza " +
"FROM `capacitarichieste` JOIN intervento ON " +
"intervento.idInterventoTipo = capacitarichieste.idInterventoTipo "+
"JOIN competenzetecniche ON capacitarichieste.idCompetenza = competenzetecniche.idCompetenza "+
"WHERE intervento.idIntervento= "+id+" ;");
} catch (SQLException e) {
// TODO Auto-generated catch block
e.printStackTrace();
System.out.println(e.getMessage());
return myResultSet = null;
}
}
/**
* Metodo per l'estrazione delle informazioni sulle squadre dal database
*
* @return ResultSet della query "Select * from squadra"
*
*/
public static ResultSet getSquadre() {
dbOpenConnection(host, db, user, pass);
try {
myStatement = myConnection.createStatement();
return myResultSet = myStatement
.executeQuery("SELECT * FROM squadra");
} catch (SQLException e) {
// TODO Auto-generated catch block
e.printStackTrace();
System.out.println(e.getMessage());
return myResultSet = null;
}
}
/**
* Metodo di supporto per il calcolo del costo monetario. Il metodo estrae dal DB i valori
* riguardanti il costo orario delle risorse componeti una squadra
* @param idSquadra id della squadra
* @return double con il costo orario totale della squadra
*/
public static float getCostoOrarioSquadra(String idSquadra){
float costoOra=0;
dbOpenConnection(host, db, user, pass);
try{
myStatement = myConnection.createStatement();
myResultSet = myStatement.executeQuery("Select costoOra, idOperatore " +
"From risorsaoperativa where idSquadra="+idSquadra+";");
while(myResultSet.next()){
costoOra += myResultSet.getFloat("costoOra");
}
return costoOra;
}
catch (SQLException e) {
// TODO Auto-generated catch block
e.printStackTrace();
System.out.println(e.getMessage());
return -1;
}
finally{
dbCloseConnection();
}
}
/**
* Metodo per determinare tutte le competenze di una squadra
* @param id idSquadra
* @return Resultset contenete una tabella con gli id delle competenze della squadra
*/
public static ResultSet getCopetenzethatSquadra(String id){
dbOpenConnection(host, db, user, pass);
try {
myStatement = myConnection.createStatement();
return myResultSet = myStatement
.executeQuery("SELECT DISTINCT capacitaoperative.IdCompetenza"
+ " FROM squadra JOIN risorsaoperativa ON "
+ "squadra.idSquadra = risorsaoperativa.idSquadra JOIN "
+ "capacitaoperative ON risorsaoperativa.idOperatore = "
+ "capacitaoperative.IdOperatore "
+ "WHERE squadra.idSquadra = "+id+";");
} catch (SQLException e) {
// TODO Auto-generated catch block
e.printStackTrace();
System.out.println(e.getMessage());
return myResultSet = null;
}
}
/**
* Metodo di supporto per l'estrazione del tipo di alimentazione delle vetture delle squadre
* ed il consumo per 100km
* @return ResultSet della query contentente una tabella con idSqaudra idCaratteristica alimentazione e consumo
*/
public static ResultSet initAutomezzoSquadre(){
dbOpenConnection(host, db, user, pass);
try{
myStatement = myConnection.createStatement();
return myResultSet = myStatement.executeQuery("" +
"SELECT squadra.idSquadra, risorsaautomezzo.caratteristicheTecniche, " +
"caratteristichetecniche.alimentazione, caratteristichetecniche.consumo, " +
"caratteristichetecniche.costiProporzionali " +
"from squadra join risorsaautomezzo on squadra.idAutomezzo = " +
"risorsaautomezzo.idAutomezzo join caratteristichetecniche on " +
"risorsaautomezzo.caratteristichetecniche = caratteristichetecniche.idCaratteristica;");
}
catch (SQLException e) {
// TODO Auto-generated catch block
e.printStackTrace();
System.out.println(e.getMessage());
return myResultSet = null;
}
}
/**
* Metodo di supporto per l'estrazione dal DB del costo della benzina dei costi del carburante
*/
public static float getCostoBenzinaCarburanti(){
dbOpenConnection(host, db, user, pass);
float result = -1;
try{
myStatement = myConnection.createStatement();
myResultSet = myStatement.executeQuery("Select costoBenzina From datiglobali;");
myResultSet.next();
result = myResultSet.getFloat("costoBenzina");
}
catch (SQLException e) {
// TODO Auto-generated catch block
e.printStackTrace();
System.out.println(e.getMessage());
}
finally{
dbCloseConnection();
}
return result;
}
/**
* Metodo di supporto per l'estrazione dal DB del costo della benzina dei costi del carburante
*/
public static float getCostoDieselCarburanti(){
dbOpenConnection(host, db, user, pass);
float result = -1;
try{
myStatement = myConnection.createStatement();
myResultSet = myStatement.executeQuery("Select costoDiesel From datiglobali;");
myResultSet.next();
result = myResultSet.getFloat("costoDiesel");
}
catch (SQLException e) {
// TODO Auto-generated catch block
e.printStackTrace();
System.out.println(e.getMessage());
}
finally{
dbCloseConnection();
}
return result;
}
/**
* Metdo di supporto per la creazione di una mappa contenete i valodi di distanze e tempi
*/
public static ArrayList<Map<String, String>> getMapSpostamenti(){
ArrayList<Map<String, String>> result = new ArrayList<Map<String, String>>();
dbOpenConnection(host, db, user, pass);
try{
myStatement = myConnection.createStatement();
myResultSet = myStatement.executeQuery("Select * From distanzeimpianti; ");
ResultSetMetaData myMeta = myResultSet.getMetaData();
while(myResultSet.next()){
HashMap<String, String> row = new HashMap<String, String>();
row.put(myMeta.getColumnName(1), myResultSet.getString(1));
row.put(myMeta.getColumnName(2), myResultSet.getString(2));
row.put(myMeta.getColumnName(3), myResultSet.getString(3));
row.put(myMeta.getColumnName(4), myResultSet.getString(4));
result.add(row);
}
}
catch (SQLException e) {
// TODO Auto-generated catch block
e.printStackTrace();
System.out.println(e.getMessage());
}
finally{
dbCloseConnection();
}
return result;
}
/**
* Metodo per l'estrazione delle informazioni sugli interventi dal database
*
* @return ResultSet della query "Select * from intervento"
*
*/
public static ResultSet getInterventi() {
dbOpenConnection(host, db, user, pass);
try {
myStatement = myConnection.createStatement();
return myResultSet = myStatement
.executeQuery("SELECT * FROM intervento");
} catch (SQLException e) {
// TODO Auto-generated catch block
e.printStackTrace();
System.out.println(e.getMessage());
return myResultSet = null;
}
}
/**
* Metodo di supporto per l'estrazione delle informazioni dei singoli interventi
* in particolare l'id dell'impinato in cui si trova l'intervento e la durata dell'intervento
*
* @return ResultSet della query che genera una tabella con idIntervento, durata e idImpianto
*/
public static ResultSet getInfoInterventi(){
dbOpenConnection(host, db, user, pass);
try{
myStatement = myConnection.createStatement();
return myResultSet = myStatement.executeQuery("SELECT intervento.idIntervento, " +
"intervento.durata, intervento.idImpianto " +
"FROM intervento join impianto on intervento.idImpianto = impianto.idImpianto " +
"order by idIntervento asc;");
}
catch (SQLException e) {
// TODO Auto-generated catch block
e.printStackTrace();
System.out.println(e.getMessage());
return myResultSet = null;
}
}
}
| |
package io.codekvast.javaagent.scheduler;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.is;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.verifyNoMoreInteractions;
import static org.mockito.Mockito.when;
import io.codekvast.javaagent.InvocationRegistry;
import io.codekvast.javaagent.config.AgentConfig;
import io.codekvast.javaagent.config.AgentConfigFactory;
import io.codekvast.javaagent.model.v2.GetConfigResponse2;
import io.codekvast.javaagent.publishing.CodeBasePublisher;
import io.codekvast.javaagent.publishing.CodeBasePublisherFactory;
import io.codekvast.javaagent.publishing.InvocationDataPublisher;
import io.codekvast.javaagent.publishing.InvocationDataPublisherFactory;
import io.codekvast.javaagent.publishing.impl.NoOpCodeBasePublisherImpl;
import io.codekvast.javaagent.publishing.impl.NoOpInvocationDataPublisherImpl;
import io.codekvast.junit5.extensions.CaptureSystemOutput;
import io.codekvast.junit5.extensions.CaptureSystemOutput.OutputCapture;
import java.io.IOException;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
/** @author olle.hallin@crisp.se */
@CaptureSystemOutput
class SchedulerTest {
private final long T1 = System.currentTimeMillis();
private final GetConfigResponse2 configResponse =
GetConfigResponse2.sample().toBuilder()
.configPollIntervalSeconds(0)
.configPollRetryIntervalSeconds(0)
.codeBasePublisherCheckIntervalSeconds(0)
.invocationDataPublisherIntervalSeconds(0)
.build();
@Mock private ConfigPoller configPollerMock;
@Mock private CodeBasePublisherFactory codeBasePublisherFactoryMock;
@Mock private InvocationDataPublisherFactory invocationDataPublisherFactoryMock;
@Mock private SystemClock systemClockMock;
private AgentConfig config =
AgentConfigFactory.createSampleAgentConfig().toBuilder().appVersion("literal 1.17").build();
private CodeBasePublisher codeBasePublisher = new NoOpCodeBasePublisherImpl(config);
private InvocationDataPublisher invocationDataPublisher =
new NoOpInvocationDataPublisherImpl(config);
private Scheduler scheduler;
@BeforeEach
public void setUp() {
MockitoAnnotations.openMocks(this);
scheduler =
new Scheduler(
config,
configPollerMock,
codeBasePublisherFactoryMock,
invocationDataPublisherFactoryMock,
systemClockMock);
when(codeBasePublisherFactoryMock.create("no-op", config)).thenReturn(codeBasePublisher);
when(invocationDataPublisherFactoryMock.create("no-op", config))
.thenReturn(invocationDataPublisher);
when(systemClockMock.currentTimeMillis()).thenReturn(T1);
InvocationRegistry.initialize(config);
}
private void setTimeToSecondsAndRunScheduler(double seconds) {
when(systemClockMock.currentTimeMillis()).thenReturn(T1 + (long) (seconds * 1000.0));
scheduler.run();
}
@Test
void should_handle_shutdown_before_first_poll(OutputCapture outputCapture) {
scheduler.shutdown();
verifyNoMoreInteractions(configPollerMock);
assertThat(codeBasePublisher.getSequenceNumber(), is(0));
assertThat(invocationDataPublisher.getSequenceNumber(), is(0));
outputCapture.expect(containsString("Codekvast agent stopped in 0 ms"));
}
@Test
void should_handle_shutdown_after_being_started() throws Exception {
// given
when(configPollerMock.doPoll()).thenReturn(configResponse);
// when
scheduler.run();
scheduler.run();
scheduler.shutdown();
// then
verify(configPollerMock, times(2)).doPoll();
verifyNoMoreInteractions(configPollerMock);
assertThat(codeBasePublisher.getSequenceNumber(), is(1));
assertThat(invocationDataPublisher.getSequenceNumber(), is(3));
}
@Test
void should_schedule_correctly() throws Exception {
// given
when(configPollerMock.doPoll())
.thenReturn(
configResponse.toBuilder()
.configPollIntervalSeconds(4)
.codeBasePublisherCheckIntervalSeconds(6)
.invocationDataPublisherIntervalSeconds(10)
.build());
// timeline
// 012345678901234567890123456789012345678901234567890
// P P P P P P
// C C C C C
// I I I I
// 012345678901234567890123456789012345678901234567890
// when
setTimeToSecondsAndRunScheduler(0);
// then
verify(configPollerMock, times(1)).doPoll();
assertThat(codeBasePublisher.getCodeBaseCheckCount(), is(1));
assertThat(invocationDataPublisher.getSequenceNumber(), is(1));
// when
setTimeToSecondsAndRunScheduler(1);
// then
verify(configPollerMock, times(1)).doPoll();
assertThat(codeBasePublisher.getCodeBaseCheckCount(), is(1));
assertThat(invocationDataPublisher.getSequenceNumber(), is(1));
// when
setTimeToSecondsAndRunScheduler(3.5);
// then
verify(configPollerMock, times(1)).doPoll();
assertThat(codeBasePublisher.getCodeBaseCheckCount(), is(1));
assertThat(invocationDataPublisher.getSequenceNumber(), is(1));
// when
setTimeToSecondsAndRunScheduler(4.5);
// then
verify(configPollerMock, times(2)).doPoll();
assertThat(codeBasePublisher.getCodeBaseCheckCount(), is(1));
assertThat(invocationDataPublisher.getSequenceNumber(), is(1));
// when
setTimeToSecondsAndRunScheduler(4.5);
// then
verify(configPollerMock, times(2)).doPoll();
assertThat(codeBasePublisher.getCodeBaseCheckCount(), is(1));
assertThat(invocationDataPublisher.getSequenceNumber(), is(1));
// when
setTimeToSecondsAndRunScheduler(7);
// then
verify(configPollerMock, times(2)).doPoll();
assertThat(codeBasePublisher.getCodeBaseCheckCount(), is(2));
assertThat(invocationDataPublisher.getSequenceNumber(), is(1));
// when
setTimeToSecondsAndRunScheduler(9);
// then
verify(configPollerMock, times(3)).doPoll();
assertThat(codeBasePublisher.getCodeBaseCheckCount(), is(2));
assertThat(invocationDataPublisher.getSequenceNumber(), is(1));
// when
setTimeToSecondsAndRunScheduler(11);
// then
verify(configPollerMock, times(3)).doPoll();
assertThat(codeBasePublisher.getCodeBaseCheckCount(), is(2));
assertThat(invocationDataPublisher.getSequenceNumber(), is(2));
// when
setTimeToSecondsAndRunScheduler(13);
// then
verify(configPollerMock, times(4)).doPoll();
assertThat(codeBasePublisher.getCodeBaseCheckCount(), is(3));
assertThat(invocationDataPublisher.getSequenceNumber(), is(2));
}
@Test
void should_handle_initial_poll_exceptions() throws Exception {
when(configPollerMock.doPoll()).thenThrow(new IOException("Mock: No contact with server"));
scheduler.run();
}
@Test
void should_retry_with_exponential_back_off() {
// given
Scheduler.SchedulerState state =
new Scheduler.SchedulerState("poller", systemClockMock).initialize(10, 10);
assertThat(state.getRetryIntervalFactor(), is(1));
state.scheduleRetry();
assertThat(state.getRetryIntervalFactor(), is(1));
state.scheduleRetry();
assertThat(state.getRetryIntervalFactor(), is(1));
state.scheduleRetry();
assertThat(state.getRetryIntervalFactor(), is(1));
state.scheduleRetry();
assertThat(state.getRetryIntervalFactor(), is(1));
state.scheduleRetry();
assertThat(state.getRetryIntervalFactor(), is(1));
state.scheduleRetry();
assertThat(state.getRetryIntervalFactor(), is(2));
state.scheduleRetry();
assertThat(state.getRetryIntervalFactor(), is(4));
state.scheduleRetry();
assertThat(state.getRetryIntervalFactor(), is(8));
state.scheduleRetry();
assertThat(state.getRetryIntervalFactor(), is(16));
state.scheduleRetry();
assertThat(state.getRetryIntervalFactor(), is(16));
state.scheduleNext();
assertThat(state.getRetryIntervalFactor(), is(1));
}
}
| |
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.lakeformation.model;
import java.io.Serializable;
import javax.annotation.Generated;
/**
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/lakeformation-2017-03-31/SearchDatabasesByLFTags"
* target="_top">AWS API Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class SearchDatabasesByLFTagsResult extends com.amazonaws.AmazonWebServiceResult<com.amazonaws.ResponseMetadata> implements Serializable, Cloneable {
/**
* <p>
* A continuation token, present if the current list segment is not the last.
* </p>
*/
private String nextToken;
/**
* <p>
* A list of databases that meet the LF-tag conditions.
* </p>
*/
private java.util.List<TaggedDatabase> databaseList;
/**
* <p>
* A continuation token, present if the current list segment is not the last.
* </p>
*
* @param nextToken
* A continuation token, present if the current list segment is not the last.
*/
public void setNextToken(String nextToken) {
this.nextToken = nextToken;
}
/**
* <p>
* A continuation token, present if the current list segment is not the last.
* </p>
*
* @return A continuation token, present if the current list segment is not the last.
*/
public String getNextToken() {
return this.nextToken;
}
/**
* <p>
* A continuation token, present if the current list segment is not the last.
* </p>
*
* @param nextToken
* A continuation token, present if the current list segment is not the last.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public SearchDatabasesByLFTagsResult withNextToken(String nextToken) {
setNextToken(nextToken);
return this;
}
/**
* <p>
* A list of databases that meet the LF-tag conditions.
* </p>
*
* @return A list of databases that meet the LF-tag conditions.
*/
public java.util.List<TaggedDatabase> getDatabaseList() {
return databaseList;
}
/**
* <p>
* A list of databases that meet the LF-tag conditions.
* </p>
*
* @param databaseList
* A list of databases that meet the LF-tag conditions.
*/
public void setDatabaseList(java.util.Collection<TaggedDatabase> databaseList) {
if (databaseList == null) {
this.databaseList = null;
return;
}
this.databaseList = new java.util.ArrayList<TaggedDatabase>(databaseList);
}
/**
* <p>
* A list of databases that meet the LF-tag conditions.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setDatabaseList(java.util.Collection)} or {@link #withDatabaseList(java.util.Collection)} if you want to
* override the existing values.
* </p>
*
* @param databaseList
* A list of databases that meet the LF-tag conditions.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public SearchDatabasesByLFTagsResult withDatabaseList(TaggedDatabase... databaseList) {
if (this.databaseList == null) {
setDatabaseList(new java.util.ArrayList<TaggedDatabase>(databaseList.length));
}
for (TaggedDatabase ele : databaseList) {
this.databaseList.add(ele);
}
return this;
}
/**
* <p>
* A list of databases that meet the LF-tag conditions.
* </p>
*
* @param databaseList
* A list of databases that meet the LF-tag conditions.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public SearchDatabasesByLFTagsResult withDatabaseList(java.util.Collection<TaggedDatabase> databaseList) {
setDatabaseList(databaseList);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getNextToken() != null)
sb.append("NextToken: ").append(getNextToken()).append(",");
if (getDatabaseList() != null)
sb.append("DatabaseList: ").append(getDatabaseList());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof SearchDatabasesByLFTagsResult == false)
return false;
SearchDatabasesByLFTagsResult other = (SearchDatabasesByLFTagsResult) obj;
if (other.getNextToken() == null ^ this.getNextToken() == null)
return false;
if (other.getNextToken() != null && other.getNextToken().equals(this.getNextToken()) == false)
return false;
if (other.getDatabaseList() == null ^ this.getDatabaseList() == null)
return false;
if (other.getDatabaseList() != null && other.getDatabaseList().equals(this.getDatabaseList()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getNextToken() == null) ? 0 : getNextToken().hashCode());
hashCode = prime * hashCode + ((getDatabaseList() == null) ? 0 : getDatabaseList().hashCode());
return hashCode;
}
@Override
public SearchDatabasesByLFTagsResult clone() {
try {
return (SearchDatabasesByLFTagsResult) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
}
| |
package com.tinkerpop.blueprints.util.io.graphson;
import java.io.IOException;
import java.io.InputStream;
import java.lang.reflect.Array;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.codehaus.jettison.json.JSONException;
import org.codehaus.jettison.json.JSONObject;
import org.codehaus.jettison.json.JSONTokener;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.MappingJsonFactory;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.JsonNodeFactory;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.tinkerpop.blueprints.Direction;
import com.tinkerpop.blueprints.Edge;
import com.tinkerpop.blueprints.Element;
import com.tinkerpop.blueprints.Vertex;
import com.tinkerpop.blueprints.util.io.graphson.ElementPropertyConfig.ElementPropertiesRule;
/**
* Helps write individual graph elements to TinkerPop JSON format known as GraphSON.
*
* @author Stephen Mallette (http://stephen.genoprime.com)
*/
public class GraphSONUtility {
private static final JsonNodeFactory jsonNodeFactory = JsonNodeFactory.instance;
private static final JsonFactory jsonFactory = new MappingJsonFactory();
private static final String EMPTY_STRING = "";
private static final ObjectMapper mapper = new ObjectMapper();
private final GraphSONMode mode;
private final List<String> vertexPropertyKeys;
private final List<String> edgePropertyKeys;
private final ElementFactory factory;
private final boolean hasEmbeddedTypes;
private final ElementPropertiesRule vertexPropertiesRule;
private final ElementPropertiesRule edgePropertiesRule;
private final boolean normalized;
private final boolean includeReservedVertexId;
private final boolean includeReservedEdgeId;
private final boolean includeReservedVertexType;
private final boolean includeReservedEdgeType;
private final boolean includeReservedEdgeLabel;
private final boolean includeReservedEdgeOutV;
private final boolean includeReservedEdgeInV;
/**
* A GraphSONUtiltiy that includes all properties of vertices and edges.
*/
public GraphSONUtility(final GraphSONMode mode, final ElementFactory factory) {
this(mode, factory, ElementPropertyConfig.AllProperties);
}
/**
* A GraphSONUtility that includes the specified properties.
*/
public GraphSONUtility(final GraphSONMode mode, final ElementFactory factory,
final Set<String> vertexPropertyKeys, final Set<String> edgePropertyKeys) {
this(mode, factory, ElementPropertyConfig.includeProperties(vertexPropertyKeys, edgePropertyKeys));
}
public GraphSONUtility(final GraphSONMode mode, final ElementFactory factory,
final ElementPropertyConfig config) {
this.vertexPropertyKeys = config.getVertexPropertyKeys();
this.edgePropertyKeys = config.getEdgePropertyKeys();
this.vertexPropertiesRule = config.getVertexPropertiesRule();
this.edgePropertiesRule = config.getEdgePropertiesRule();
this.normalized = config.isNormalized();
this.mode = mode;
this.factory = factory;
this.hasEmbeddedTypes = mode == GraphSONMode.EXTENDED;
this.includeReservedVertexId = includeReservedKey(mode, GraphSONTokens._ID, vertexPropertyKeys, this.vertexPropertiesRule);
this.includeReservedEdgeId = includeReservedKey(mode, GraphSONTokens._ID, edgePropertyKeys, this.edgePropertiesRule);
this.includeReservedVertexType = includeReservedKey(mode, GraphSONTokens._TYPE, vertexPropertyKeys, this.vertexPropertiesRule);
this.includeReservedEdgeType = includeReservedKey(mode, GraphSONTokens._TYPE, edgePropertyKeys, this.edgePropertiesRule);
this.includeReservedEdgeLabel = includeReservedKey(mode, GraphSONTokens._LABEL, edgePropertyKeys, this.edgePropertiesRule);
this.includeReservedEdgeOutV = includeReservedKey(mode, GraphSONTokens._OUT_V, edgePropertyKeys, this.edgePropertiesRule);
this.includeReservedEdgeInV = includeReservedKey(mode, GraphSONTokens._IN_V, edgePropertyKeys, this.edgePropertiesRule);
}
/**
* Creates a vertex from GraphSON using settings supplied in the constructor.
*/
public Vertex vertexFromJson(final JSONObject json) throws IOException {
return this.vertexFromJson(json.toString());
}
/**
* Creates a vertex from GraphSON using settings supplied in the constructor.
*/
public Vertex vertexFromJson(final String json) throws IOException {
final JsonParser jp = jsonFactory.createParser(json);
final JsonNode node = jp.readValueAsTree();
return this.vertexFromJson(node);
}
/**
* Creates a vertex from GraphSON using settings supplied in the constructor.
*/
public Vertex vertexFromJson(final InputStream json) throws IOException {
final JsonParser jp = jsonFactory.createParser(json);
final JsonNode node = jp.readValueAsTree();
return this.vertexFromJson(node);
}
/**
* Creates a vertex from GraphSON using settings supplied in the constructor.
*/
public Vertex vertexFromJson(final JsonNode json) throws IOException {
final Map<String, Object> props = readProperties(json, true, this.hasEmbeddedTypes);
final Object vertexId = getTypedValueFromJsonNode(json.get(GraphSONTokens._ID));
final Vertex v = factory.createVertex(vertexId);
for (Map.Entry<String, Object> entry : props.entrySet()) {
//if (this.vertexPropertyKeys == null || vertexPropertyKeys.contains(entry.getKey())) {
if (includeKey(entry.getKey(), vertexPropertyKeys, this.vertexPropertiesRule)) {
v.setProperty(entry.getKey(), entry.getValue());
}
}
return v;
}
/**
* Creates an edge from GraphSON using settings supplied in the constructor.
*/
public Edge edgeFromJson(final JSONObject json, final Vertex out, final Vertex in) throws IOException {
return this.edgeFromJson(json.toString(), out, in);
}
/**
* Creates an edge from GraphSON using settings supplied in the constructor.
*/
public Edge edgeFromJson(final String json, final Vertex out, final Vertex in) throws IOException {
final JsonParser jp = jsonFactory.createParser(json);
final JsonNode node = jp.readValueAsTree();
return this.edgeFromJson(node, out, in);
}
/**
* Creates an edge from GraphSON using settings supplied in the constructor.
*/
public Edge edgeFromJson(final InputStream json, final Vertex out, final Vertex in) throws IOException {
final JsonParser jp = jsonFactory.createParser(json);
final JsonNode node = jp.readValueAsTree();
return this.edgeFromJson(node, out, in);
}
/**
* Creates an edge from GraphSON using settings supplied in the constructor.
*/
public Edge edgeFromJson(final JsonNode json, final Vertex out, final Vertex in) throws IOException {
final Map<String, Object> props = GraphSONUtility.readProperties(json, true, this.hasEmbeddedTypes);
final Object edgeId = getTypedValueFromJsonNode(json.get(GraphSONTokens._ID));
final JsonNode nodeLabel = json.get(GraphSONTokens._LABEL);
// assigned an empty string edge label in cases where one does not exist. this gets around the requirement
// that blueprints graphs have a non-null label while ensuring that GraphSON can stay flexible in parsing
// partial bits from the JSON. Not sure if there is any gotchas developing out of this.
final String label = nodeLabel == null ? EMPTY_STRING : nodeLabel.textValue();
final Edge e = factory.createEdge(edgeId, out, in, label);
for (Map.Entry<String, Object> entry : props.entrySet()) {
// if (this.edgePropertyKeys == null || this.edgePropertyKeys.contains(entry.getKey())) {
if (includeKey(entry.getKey(), edgePropertyKeys, this.edgePropertiesRule)) {
e.setProperty(entry.getKey(), entry.getValue());
}
}
return e;
}
/**
* Creates GraphSON for a single graph element.
*/
public JSONObject jsonFromElement(final Element element) throws JSONException {
final ObjectNode objectNode = this.objectNodeFromElement(element);
try {
return new JSONObject(new JSONTokener(mapper.writeValueAsString(objectNode)));
} catch (IOException ioe) {
// repackage this as a JSONException...seems sensible as the caller will only know about
// the jettison object not being created
throw new JSONException(ioe);
}
}
/**
* Creates GraphSON for a single graph element.
*/
public ObjectNode objectNodeFromElement(final Element element) {
final boolean isEdge = element instanceof Edge;
final boolean showTypes = mode == GraphSONMode.EXTENDED;
final List<String> propertyKeys = isEdge ? this.edgePropertyKeys : this.vertexPropertyKeys;
final ElementPropertiesRule elementPropertyConfig = isEdge ? this.edgePropertiesRule : this.vertexPropertiesRule;
final ObjectNode jsonElement = createJSONMap(createPropertyMap(element, propertyKeys, elementPropertyConfig, normalized), propertyKeys, showTypes);
if ((isEdge && this.includeReservedEdgeId) || (!isEdge && this.includeReservedVertexId)) {
putObject(jsonElement, GraphSONTokens._ID, element.getId());
}
// it's important to keep the order of these straight. check Edge first and then Vertex because there
// are graph implementations that have Edge extend from Vertex
if (element instanceof Edge) {
final Edge edge = (Edge) element;
if (this.includeReservedEdgeId) {
putObject(jsonElement, GraphSONTokens._ID, element.getId());
}
if (this.includeReservedEdgeType) {
jsonElement.put(GraphSONTokens._TYPE, GraphSONTokens.EDGE);
}
if (this.includeReservedEdgeOutV) {
putObject(jsonElement, GraphSONTokens._OUT_V, edge.getVertex(Direction.OUT).getId());
}
if (this.includeReservedEdgeInV) {
putObject(jsonElement, GraphSONTokens._IN_V, edge.getVertex(Direction.IN).getId());
}
if (this.includeReservedEdgeLabel) {
jsonElement.put(GraphSONTokens._LABEL, edge.getLabel());
}
} else if (element instanceof Vertex) {
if (this.includeReservedVertexId) {
putObject(jsonElement, GraphSONTokens._ID, element.getId());
}
if (this.includeReservedVertexType) {
jsonElement.put(GraphSONTokens._TYPE, GraphSONTokens.VERTEX);
}
}
return jsonElement;
}
/**
* Reads an individual Vertex from JSON. The vertex must match the accepted GraphSON format.
*
* @param json a single vertex in GraphSON format as Jettison JSONObject
* @param factory the factory responsible for constructing graph elements
* @param mode the mode of the GraphSON
* @param propertyKeys a list of keys to include on reading of element properties
*/
public static Vertex vertexFromJson(final JSONObject json, final ElementFactory factory, final GraphSONMode mode,
final Set<String> propertyKeys) throws IOException {
final GraphSONUtility graphson = new GraphSONUtility(mode, factory, propertyKeys, null);
return graphson.vertexFromJson(json);
}
/**
* Reads an individual Vertex from JSON. The vertex must match the accepted GraphSON format.
*
* @param json a single vertex in GraphSON format as a String.
* @param factory the factory responsible for constructing graph elements
* @param mode the mode of the GraphSON
* @param propertyKeys a list of keys to include on reading of element properties
*/
public static Vertex vertexFromJson(final String json, final ElementFactory factory, final GraphSONMode mode,
final Set<String> propertyKeys) throws IOException {
final GraphSONUtility graphson = new GraphSONUtility(mode, factory, propertyKeys, null);
return graphson.vertexFromJson(json);
}
/**
* Reads an individual Vertex from JSON. The vertex must match the accepted GraphSON format.
*
* @param json a single vertex in GraphSON format as an InputStream.
* @param factory the factory responsible for constructing graph elements
* @param mode the mode of the GraphSON
* @param propertyKeys a list of keys to include on reading of element properties
*/
public static Vertex vertexFromJson(final InputStream json, final ElementFactory factory, final GraphSONMode mode,
final Set<String> propertyKeys) throws IOException {
final GraphSONUtility graphson = new GraphSONUtility(mode, factory, propertyKeys, null);
return graphson.vertexFromJson(json);
}
/**
* Reads an individual Vertex from JSON. The vertex must match the accepted GraphSON format.
*
* @param json a single vertex in GraphSON format as Jackson JsonNode
* @param factory the factory responsible for constructing graph elements
* @param mode the mode of the GraphSON
* @param propertyKeys a list of keys to include on reading of element properties
*/
public static Vertex vertexFromJson(final JsonNode json, final ElementFactory factory, final GraphSONMode mode,
final Set<String> propertyKeys) throws IOException {
final GraphSONUtility graphson = new GraphSONUtility(mode, factory, propertyKeys, null);
return graphson.vertexFromJson(json);
}
/**
* Reads an individual Edge from JSON. The edge must match the accepted GraphSON format.
*
* @param json a single edge in GraphSON format as a Jettison JSONObject
* @param factory the factory responsible for constructing graph elements
* @param mode the mode of the GraphSON
* @param propertyKeys a list of keys to include when reading of element properties
*/
public static Edge edgeFromJson(final JSONObject json, final Vertex out, final Vertex in,
final ElementFactory factory, final GraphSONMode mode,
final Set<String> propertyKeys) throws IOException {
final GraphSONUtility graphson = new GraphSONUtility(mode, factory, null, propertyKeys);
return graphson.edgeFromJson(json, out, in);
}
/**
* Reads an individual Edge from JSON. The edge must match the accepted GraphSON format.
*
* @param json a single edge in GraphSON format as a String
* @param factory the factory responsible for constructing graph elements
* @param mode the mode of the GraphSON
* @param propertyKeys a list of keys to include when reading of element properties
*/
public static Edge edgeFromJson(final String json, final Vertex out, final Vertex in,
final ElementFactory factory, final GraphSONMode mode,
final Set<String> propertyKeys) throws IOException {
final GraphSONUtility graphson = new GraphSONUtility(mode, factory, null, propertyKeys);
return graphson.edgeFromJson(json, out, in);
}
/**
* Reads an individual Edge from JSON. The edge must match the accepted GraphSON format.
*
* @param json a single edge in GraphSON format as an InputStream
* @param factory the factory responsible for constructing graph elements
* @param mode the mode of the GraphSON
* @param propertyKeys a list of keys to include when reading of element properties
*/
public static Edge edgeFromJson(final InputStream json, final Vertex out, final Vertex in,
final ElementFactory factory, final GraphSONMode mode,
final Set<String> propertyKeys) throws IOException {
final GraphSONUtility graphson = new GraphSONUtility(mode, factory, null, propertyKeys);
return graphson.edgeFromJson(json, out, in);
}
/**
* Reads an individual Edge from JSON. The edge must match the accepted GraphSON format.
*
* @param json a single edge in GraphSON format as a Jackson JsonNode
* @param factory the factory responsible for constructing graph elements
* @param mode the mode of the GraphSON
* @param propertyKeys a list of keys to include when reading of element properties
*/
public static Edge edgeFromJson(final JsonNode json, final Vertex out, final Vertex in,
final ElementFactory factory, final GraphSONMode mode,
final Set<String> propertyKeys) throws IOException {
final GraphSONUtility graphson = new GraphSONUtility(mode, factory, null, propertyKeys);
return graphson.edgeFromJson(json, out, in);
}
/**
* Creates a Jettison JSONObject from a graph element.
*
* @param element the graph element to convert to JSON.
* @param propertyKeys The property keys at the root of the element to serialize. If null, then all keys are serialized.
* @param mode the type of GraphSON to be generated.
*/
public static JSONObject jsonFromElement(final Element element, final Set<String> propertyKeys,
final GraphSONMode mode) throws JSONException {
final GraphSONUtility graphson = element instanceof Edge ? new GraphSONUtility(mode, null, null, propertyKeys)
: new GraphSONUtility(mode, null, propertyKeys, null);
return graphson.jsonFromElement(element);
}
/**
* Creates a Jackson ObjectNode from a graph element.
*
* @param element the graph element to convert to JSON.
* @param propertyKeys The property keys at the root of the element to serialize. If null, then all keys are serialized.
* @param mode The type of GraphSON to generate.
*/
public static ObjectNode objectNodeFromElement(final Element element, final Set<String> propertyKeys, final GraphSONMode mode) {
final GraphSONUtility graphson = element instanceof Edge ? new GraphSONUtility(mode, null, null, propertyKeys)
: new GraphSONUtility(mode, null, propertyKeys, null);
return graphson.objectNodeFromElement(element);
}
private static ObjectNode objectNodeFromElement(final Element element, final List<String> propertyKeys, final GraphSONMode mode) {
final GraphSONUtility graphson = element instanceof Edge ? new GraphSONUtility(mode, null, null, new HashSet<String>(propertyKeys))
: new GraphSONUtility(mode, null, new HashSet<String>(propertyKeys), null);
return graphson.objectNodeFromElement(element);
}
static Map<String, Object> readProperties(final JsonNode node, final boolean ignoreReservedKeys, final boolean hasEmbeddedTypes) {
final Map<String, Object> map = new HashMap<String, Object>();
final Iterator<Map.Entry<String, JsonNode>> iterator = node.fields();
while (iterator.hasNext()) {
final Map.Entry<String, JsonNode> entry = iterator.next();
if (!ignoreReservedKeys || !isReservedKey(entry.getKey())) {
// it generally shouldn't be as such but graphson containing null values can't be shoved into
// element property keys or it will result in error
final Object o = readProperty(entry.getValue(), hasEmbeddedTypes);
if (o != null) {
map.put(entry.getKey(), o);
}
}
}
return map;
}
private static boolean includeReservedKey(final GraphSONMode mode, final String key,
final List<String> propertyKeys,
final ElementPropertiesRule rule) {
// the key is always included in modes other than compact. if it is compact, then validate that the
// key is in the property key list
return mode != GraphSONMode.COMPACT || includeKey(key, propertyKeys, rule);
}
private static boolean includeKey(final String key, final List<String> propertyKeys,
final ElementPropertiesRule rule) {
if (propertyKeys == null) {
// when null always include the key and shortcut this piece
return true;
}
// default the key situation. if it's included then it should be explicitly defined in the
// property keys list to be included or the reverse otherwise
boolean keySituation = rule == ElementPropertiesRule.INCLUDE;
switch (rule) {
case INCLUDE:
keySituation = propertyKeys.contains(key);
break;
case EXCLUDE:
keySituation = !propertyKeys.contains(key);
break;
}
return keySituation;
}
private static boolean isReservedKey(final String key) {
return key.equals(GraphSONTokens._ID) || key.equals(GraphSONTokens._TYPE) || key.equals(GraphSONTokens._LABEL)
|| key.equals(GraphSONTokens._OUT_V) || key.equals(GraphSONTokens._IN_V);
}
private static Object readProperty(final JsonNode node, final boolean hasEmbeddedTypes) {
final Object propertyValue;
if (hasEmbeddedTypes) {
if (node.get(GraphSONTokens.TYPE).textValue().equals(GraphSONTokens.TYPE_UNKNOWN)) {
propertyValue = null;
} else if (node.get(GraphSONTokens.TYPE).textValue().equals(GraphSONTokens.TYPE_BOOLEAN)) {
propertyValue = node.get(GraphSONTokens.VALUE).booleanValue();
} else if (node.get(GraphSONTokens.TYPE).textValue().equals(GraphSONTokens.TYPE_FLOAT)) {
propertyValue = Float.parseFloat(node.get(GraphSONTokens.VALUE).asText());
} else if (node.get(GraphSONTokens.TYPE).textValue().equals(GraphSONTokens.TYPE_BYTE)) {
propertyValue = Byte.parseByte(node.get(GraphSONTokens.VALUE).asText());
} else if (node.get(GraphSONTokens.TYPE).textValue().equals(GraphSONTokens.TYPE_SHORT)) {
propertyValue = Short.parseShort(node.get(GraphSONTokens.VALUE).asText());
} else if (node.get(GraphSONTokens.TYPE).textValue().equals(GraphSONTokens.TYPE_DOUBLE)) {
propertyValue = node.get(GraphSONTokens.VALUE).doubleValue();
} else if (node.get(GraphSONTokens.TYPE).textValue().equals(GraphSONTokens.TYPE_INTEGER)) {
propertyValue = node.get(GraphSONTokens.VALUE).intValue();
} else if (node.get(GraphSONTokens.TYPE).textValue().equals(GraphSONTokens.TYPE_LONG)) {
propertyValue = node.get(GraphSONTokens.VALUE).longValue();
} else if (node.get(GraphSONTokens.TYPE).textValue().equals(GraphSONTokens.TYPE_STRING)) {
propertyValue = node.get(GraphSONTokens.VALUE).textValue();
} else if (node.get(GraphSONTokens.TYPE).textValue().equals(GraphSONTokens.TYPE_LIST)) {
propertyValue = readProperties(node.get(GraphSONTokens.VALUE).elements(), hasEmbeddedTypes);
} else if (node.get(GraphSONTokens.TYPE).textValue().equals(GraphSONTokens.TYPE_MAP)) {
propertyValue = readProperties(node.get(GraphSONTokens.VALUE), false, hasEmbeddedTypes);
} else {
propertyValue = node.textValue();
}
} else {
if (node.isNull()) {
propertyValue = null;
} else if (node.isBoolean()) {
propertyValue = node.booleanValue();
} else if (node.isDouble()) {
propertyValue = node.doubleValue();
} else if (node.isInt()) {
propertyValue = node.intValue();
} else if (node.isLong()) {
propertyValue = node.longValue();
} else if (node.isTextual()) {
propertyValue = node.textValue();
} else if (node.isArray()) {
propertyValue = readProperties(node.elements(), hasEmbeddedTypes);
} else if (node.isObject()) {
propertyValue = readProperties(node, false, hasEmbeddedTypes);
} else {
propertyValue = node.textValue();
}
}
return propertyValue;
}
private static List readProperties(final Iterator<JsonNode> listOfNodes, final boolean hasEmbeddedTypes) {
final List array = new ArrayList();
while (listOfNodes.hasNext()) {
array.add(readProperty(listOfNodes.next(), hasEmbeddedTypes));
}
return array;
}
private static ArrayNode createJSONList(final List list, final List<String> propertyKeys, final boolean showTypes) {
final ArrayNode jsonList = jsonNodeFactory.arrayNode();
for (Object item : list) {
if (item instanceof Element) {
jsonList.add(objectNodeFromElement((Element) item, propertyKeys,
showTypes ? GraphSONMode.EXTENDED : GraphSONMode.NORMAL));
} else if (item instanceof List) {
jsonList.add(createJSONList((List) item, propertyKeys, showTypes));
} else if (item instanceof Map) {
jsonList.add(createJSONMap((Map) item, propertyKeys, showTypes));
} else if (item != null && item.getClass().isArray()) {
jsonList.add(createJSONList(convertArrayToList(item), propertyKeys, showTypes));
} else {
addObject(jsonList, item);
}
}
return jsonList;
}
private static ObjectNode createJSONMap(final Map map, final List<String> propertyKeys, final boolean showTypes) {
final ObjectNode jsonMap = jsonNodeFactory.objectNode();
for (Object key : map.keySet()) {
Object value = map.get(key);
if (value != null) {
if (value instanceof List) {
value = createJSONList((List) value, propertyKeys, showTypes);
} else if (value instanceof Iterable){
value = createJSONList(getList((Iterable) value), propertyKeys, showTypes);
} else if (value instanceof Iterator){
value = createJSONList(getList((Iterator) value), propertyKeys, showTypes);
} else if (value instanceof Map) {
value = createJSONMap((Map) value, propertyKeys, showTypes);
} else if (value instanceof Element) {
value = objectNodeFromElement((Element) value, propertyKeys,
showTypes ? GraphSONMode.EXTENDED : GraphSONMode.NORMAL);
} else if (value.getClass().isArray()) {
value = createJSONList(convertArrayToList(value), propertyKeys, showTypes);
}
}
putObject(jsonMap, key.toString(), getValue(value, showTypes));
}
return jsonMap;
}
private static List getList(final Iterable value) {
return getList(value.iterator());
}
private static List getList(final Iterator value) {
final List result = new ArrayList();
while (value.hasNext()) {
result.add(value.next());
}
return result;
}
private static void addObject(final ArrayNode jsonList, final Object value) {
if (value == null) {
jsonList.add((JsonNode) null);
} else if (value.getClass() == Boolean.class) {
jsonList.add((Boolean) value);
} else if (value.getClass() == Long.class) {
jsonList.add((Long) value);
} else if (value.getClass() == Integer.class) {
jsonList.add((Integer) value);
} else if (value.getClass() == Float.class) {
jsonList.add((Float) value);
} else if (value.getClass() == Double.class) {
jsonList.add((Double) value);
} else if (value.getClass() == Byte.class) {
jsonList.add((Byte) value);
} else if (value.getClass() == Short.class) {
jsonList.add((Short) value);
} else if (value.getClass() == String.class) {
jsonList.add((String) value);
} else if (value instanceof ObjectNode) {
jsonList.add((ObjectNode) value);
} else if (value instanceof ArrayNode) {
jsonList.add((ArrayNode) value);
} else {
jsonList.add(value.toString());
}
}
private static void putObject(final ObjectNode jsonMap, final String key, final Object value) {
if (value == null) {
jsonMap.put(key, (JsonNode) null);
} else if (value.getClass() == Boolean.class) {
jsonMap.put(key, (Boolean) value);
} else if (value.getClass() == Long.class) {
jsonMap.put(key, (Long) value);
} else if (value.getClass() == Integer.class) {
jsonMap.put(key, (Integer) value);
} else if (value.getClass() == Float.class) {
jsonMap.put(key, (Float) value);
} else if (value.getClass() == Double.class) {
jsonMap.put(key, (Double) value);
} else if (value.getClass() == Short.class) {
jsonMap.put(key, (Short) value);
} else if (value.getClass() == Byte.class) {
jsonMap.put(key, (Byte) value);
} else if (value.getClass() == String.class) {
jsonMap.put(key, (String) value);
} else if (value instanceof ObjectNode) {
jsonMap.put(key, (ObjectNode) value);
} else if (value instanceof ArrayNode) {
jsonMap.put(key, (ArrayNode) value);
} else {
jsonMap.put(key, value.toString());
}
}
private static Map createPropertyMap(final Element element, final List<String> propertyKeys,
final ElementPropertiesRule rule, final boolean normalized) {
final Map map = new HashMap<String, Object>();
final List<String> propertyKeyList;
if (normalized) {
final List<String> sorted = new ArrayList<String>(element.getPropertyKeys());
Collections.sort(sorted);
propertyKeyList = sorted;
} else
propertyKeyList = new ArrayList<String>(element.getPropertyKeys());
if (propertyKeys == null) {
for (String key : propertyKeyList) {
final Object valToPutInMap = element.getProperty(key);
if (valToPutInMap != null) {
map.put(key, valToPutInMap);
}
}
} else {
if (rule == ElementPropertiesRule.INCLUDE) {
for (String key : propertyKeys) {
final Object valToPutInMap = element.getProperty(key);
if (valToPutInMap != null) {
map.put(key, valToPutInMap);
}
}
} else {
for (String key : propertyKeyList) {
if (!propertyKeys.contains(key)) {
final Object valToPutInMap = element.getProperty(key);
if (valToPutInMap != null) {
map.put(key, valToPutInMap);
}
}
}
}
}
return map;
}
private static Object getValue(Object value, final boolean includeType) {
Object returnValue = value;
// if the includeType is set to true then show the data types of the properties
if (includeType) {
// type will be one of: map, list, string, long, int, double, float.
// in the event of a complex object it will call a toString and store as a
// string
String type = determineType(value);
ObjectNode valueAndType = jsonNodeFactory.objectNode();
valueAndType.put(GraphSONTokens.TYPE, type);
if (type.equals(GraphSONTokens.TYPE_LIST)) {
// values of lists must be accumulated as ObjectNode objects under the value key.
// will return as a ArrayNode. called recursively to traverse the entire
// object graph of each item in the array.
ArrayNode list = (ArrayNode) value;
// there is a set of values that must be accumulated as an array under a key
ArrayNode valueArray = valueAndType.putArray(GraphSONTokens.VALUE);
for (int ix = 0; ix < list.size(); ix++) {
// the value of each item in the array is a node object from an ArrayNode...must
// get the value of it.
addObject(valueArray, getValue(getTypedValueFromJsonNode(list.get(ix)), includeType));
}
} else if (type.equals(GraphSONTokens.TYPE_MAP)) {
// maps are converted to a ObjectNode. called recursively to traverse
// the entire object graph within the map.
ObjectNode convertedMap = jsonNodeFactory.objectNode();
ObjectNode jsonObject = (ObjectNode) value;
Iterator keyIterator = jsonObject.fieldNames();
while (keyIterator.hasNext()) {
Object key = keyIterator.next();
// no need to getValue() here as this is already a ObjectNode and should have type info
convertedMap.put(key.toString(), jsonObject.get(key.toString()));
}
valueAndType.put(GraphSONTokens.VALUE, convertedMap);
} else {
// this must be a primitive value or a complex object. if a complex
// object it will be handled by a call to toString and stored as a
// string value
putObject(valueAndType, GraphSONTokens.VALUE, value);
}
// this goes back as a JSONObject with data type and value
returnValue = valueAndType;
}
return returnValue;
}
static Object getTypedValueFromJsonNode(JsonNode node) {
Object theValue = null;
if (node != null && !node.isNull()) {
if (node.isBoolean()) {
theValue = node.booleanValue();
} else if (node.isDouble()) {
theValue = node.doubleValue();
} else if (node.isFloatingPointNumber()) {
theValue = node.floatValue();
} else if (node.isInt()) {
theValue = node.intValue();
} else if (node.isLong()) {
theValue = node.longValue();
} else if (node.isTextual()) {
theValue = node.textValue();
} else if (node.isArray()) {
// this is an array so just send it back so that it can be
// reprocessed to its primitive components
theValue = node;
} else if (node.isObject()) {
// this is an object so just send it back so that it can be
// reprocessed to its primitive components
theValue = node;
} else {
theValue = node.textValue();
}
}
return theValue;
}
private static List convertArrayToList(final Object value) {
final ArrayList<Object> list = new ArrayList<Object>();
int arrlength = Array.getLength(value);
for (int i = 0; i < arrlength; i++) {
Object object = Array.get(value, i);
list.add(object);
}
return list;
}
private static String determineType(final Object value) {
String type = GraphSONTokens.TYPE_STRING;
if (value == null) {
type = GraphSONTokens.TYPE_UNKNOWN;
} else if (value.getClass() == Double.class) {
type = GraphSONTokens.TYPE_DOUBLE;
} else if (value.getClass() == Float.class) {
type = GraphSONTokens.TYPE_FLOAT;
} else if (value.getClass() == Byte.class) {
type = GraphSONTokens.TYPE_BYTE;
} else if (value.getClass() == Short.class) {
type = GraphSONTokens.TYPE_SHORT;
} else if (value.getClass() == Integer.class) {
type = GraphSONTokens.TYPE_INTEGER;
} else if (value.getClass() == Long.class) {
type = GraphSONTokens.TYPE_LONG;
} else if (value.getClass() == Boolean.class) {
type = GraphSONTokens.TYPE_BOOLEAN;
} else if (value instanceof ArrayNode) {
type = GraphSONTokens.TYPE_LIST;
} else if (value instanceof ObjectNode) {
type = GraphSONTokens.TYPE_MAP;
}
return type;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.streams.threaded.tasks;
import com.fasterxml.jackson.core.JsonParseException;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonMappingException;
import com.fasterxml.jackson.databind.node.ObjectNode;
import org.apache.commons.lang.SerializationException;
import org.apache.streams.core.*;
import org.apache.streams.jackson.StreamsJacksonMapper;
import org.apache.streams.threaded.controller.ThreadingController;
import org.apache.streams.util.SerializationUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.io.Serializable;
import java.util.*;
import java.util.concurrent.atomic.AtomicLong;
public abstract class BaseStreamsTask implements StreamsTask {
private static final Logger LOGGER = LoggerFactory.getLogger(BaseStreamsTask.class);
private final ThreadingController threadingController;
private final StreamsOperation streamsOperation;
private final String id;
private String type;
protected final Map<String, Object> config;
private final Set<String> downStreamIds = new HashSet<String>();
protected final Set<StreamsTask> downStreamTasks = new HashSet<StreamsTask>();
protected final DatumStatusCounter statusCounter = new DatumStatusCounter();
private final AtomicLong workingCounter = new AtomicLong(0);
private final AtomicLong timeSpentSuccess = new AtomicLong(0);
private final AtomicLong timeSpentFailure = new AtomicLong(0);
private boolean isPrepared = false;
private boolean isCleanedUp = false;
BaseStreamsTask(ThreadingController threadingController, String id, Map<String, Object> config, StreamsOperation streamsOperation) {
this.threadingController = threadingController;
this.id = id;
this.config = config;
this.streamsOperation = streamsOperation;
if(this.getClass().equals(StreamsProviderTask.class)) {
this.type = "provider";
} else if(this.getClass().equals(StreamsProcessorTask.class)) {
this.type = "processor";
} else if(this.getClass().equals(StreamsPersistWriterTask.class)) {
this.type = "writer";
} else {
this.type = "unknown";
}
}
public void initialize(final Map<String, StreamsTask> ctx) {
for(String id : this.downStreamIds) {
this.downStreamTasks.add(ctx.get(id));
}
}
public Collection<StreamsTask> getChildren() {
return this.downStreamTasks;
}
@Override
public StatusCounts getCurrentStatus() {
if(this.streamsOperation instanceof DatumStatusCountable) {
DatumStatusCounter datumStatusCounter = ((DatumStatusCountable)this.streamsOperation).getDatumStatusCounter();
return new StatusCounts(this.id, this.type, this.workingCounter.get(), datumStatusCounter.getSuccess(), datumStatusCounter.getFail(), this.timeSpentSuccess.get(), this.timeSpentFailure.get());
}
else {
return new StatusCounts(this.id, this.type, this.workingCounter.get(), this.statusCounter.getSuccess(), this.statusCounter.getFail(), this.timeSpentSuccess.get(), this.timeSpentFailure.get());
}
}
@Override
public final String getId() {
return this.id;
}
protected ThreadingController getThreadingController() {
return this.threadingController;
}
public String getType() {
return this.type;
}
@Override
public void addOutputQueue(String id) {
this.downStreamIds.add(id);
}
public String toString() {
return this.getClass().getName() + "[" + this.getId() + "]: " + this.getCurrentStatus().toString();
}
@Override
public final void prepare(Object configuration) {
try {
if(!this.isPrepared)
this.streamsOperation.prepare(configuration);
}
catch(Throwable e) {
LOGGER.warn("Problem preparing the component[{}]: {}", this.getId(), e.getMessage());
}
this.isPrepared = true;
}
@Override
public final void cleanup() {
try {
if(!isCleanedUp)
this.streamsOperation.cleanUp();
}
catch(Throwable e) {
LOGGER.warn("Problem Cleaning Up Component[{}]: {}", this.getId(), e.getMessage());
}
this.isCleanedUp = true;
}
@Override
public final void process(StreamsDatum datum) {
Collection<StreamsDatum> myDatums = this.fetch(datum);
if(myDatums != null) {
for(StreamsDatum d : myDatums)
sendToChildren(d);
}
}
protected final void sendToChildren(final StreamsDatum datum) {
for (StreamsTask t : this.downStreamTasks) {
t.process(datum);
}
}
private Collection<StreamsDatum> fetch(StreamsDatum datum) {
// start a timer to find out how long this process takes.
long startTime = new Date().getTime();
Collection<StreamsDatum> toReturn = null;
this.workingCounter.incrementAndGet();
try {
toReturn = this.processInternal(cloneStreamsDatum(datum));
this.statusCounter.incrementStatus(DatumStatus.SUCCESS);
this.timeSpentSuccess.addAndGet(new Date().getTime() - startTime);
} catch(Throwable e) {
LOGGER.warn("Datum failed", e);
this.statusCounter.incrementStatus(DatumStatus.FAIL);
this.timeSpentFailure.addAndGet(new Date().getTime() - startTime);
} finally {
this.workingCounter.decrementAndGet();
}
return toReturn;
}
protected abstract Collection<StreamsDatum> processInternal(StreamsDatum datum);
/**
* In order for our data streams to ported to other data flow frame works(Storm, Hadoop, Spark, etc) we need to be able to
* enforce the serialization required by each framework. This needs some thought and design before a final solution is
* made.
* <p/>
* The object must be either marked as serializable OR be of instance ObjectNode in order to be cloned
*
* @param datum The datum you wish to clone
* @return A Streams datum
* @throws SerializationException (runtime) if the serialization fails
*/
private StreamsDatum cloneStreamsDatum(StreamsDatum datum) throws SerializationException {
// this is difficult to clone due to it's nature. To clone it we will use the "deepCopy" function available.
if (datum.document instanceof ObjectNode) {
return copyMetaData(datum, new StreamsDatum(((ObjectNode) datum.getDocument()).deepCopy(), datum.getTimestamp(), datum.getSequenceid()));
} else {
try {
// Try to serialize the document using standard serialization methods
return (StreamsDatum) org.apache.commons.lang.SerializationUtils.clone(datum);
}
catch(SerializationException ser) {
try {
// Use the bruce force method for serialization.
String value = StreamsJacksonMapper.getInstance().writeValueAsString(datum.document);
Object object = StreamsJacksonMapper.getInstance().readValue(value, datum.getDocument().getClass());
return copyMetaData(datum, new StreamsDatum(object, datum.getId(), datum.timestamp, datum.sequenceid));
} catch (JsonMappingException e) {
LOGGER.warn("Unable to clone datum Mapper Error: {} - {}", e.getMessage(), datum);
} catch (JsonParseException e) {
LOGGER.warn("Unable to clone datum Parser Error: {} - {}", e.getMessage(), datum);
} catch (JsonProcessingException e) {
LOGGER.warn("Unable to clone datum Processing Error: {} - {}", e.getMessage(), datum);
} catch (IOException e) {
LOGGER.warn("Unable to clone datum IOException Error: {} - {}", e.getMessage(), datum);
}
throw new SerializationException("Unable to clone datum");
}
}
}
private StreamsDatum copyMetaData(StreamsDatum copyFrom, StreamsDatum copyTo) {
Map<String, Object> fromMeta = copyFrom.getMetadata();
Map<String, Object> toMeta = copyTo.getMetadata();
for (String key : fromMeta.keySet()) {
Object value = fromMeta.get(key);
if (value instanceof Serializable) {
toMeta.put(key, SerializationUtil.cloneBySerialization(value));
} else {//hope for the best - should be serializable
toMeta.put(key, value);
}
}
return copyTo;
}
}
| |
/*
* Copyright (c) 2015, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.am.integration.tests.api.lifecycle;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import org.wso2.am.integration.test.utils.APIManagerIntegrationTestException;
import org.wso2.am.integration.test.utils.bean.APICreationRequestBean;
import org.wso2.am.integration.test.utils.bean.APILifeCycleState;
import org.wso2.am.integration.test.utils.bean.APILifeCycleStateRequest;
import org.wso2.am.integration.test.utils.clients.APIPublisherRestClient;
import org.wso2.am.integration.test.utils.clients.APIStoreRestClient;
import org.wso2.carbon.apimgt.api.model.APIIdentifier;
import org.wso2.carbon.automation.test.utils.http.client.HttpRequestUtil;
import org.wso2.carbon.automation.test.utils.http.client.HttpResponse;
import javax.xml.xpath.XPathExpressionException;
import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.HashMap;
import java.util.Map;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertTrue;
/**
* "Publish a API. Copy and create a new version, publish the new API version with re-subscription required and
* test invocation of New API before and after the re-subscription."
*/
public class AccessibilityOfOldAPIAndCopyAPIWithReSubscriptionTestCase extends APIManagerLifecycleBaseTest {
private static final String API_NAME = "CopyAPIWithReSubscriptionTest";
private static final String API_CONTEXT = "CopyAPIWithReSubscription";
private static final String API_TAGS = "testTag1, testTag2, testTag3";
private static final String API_DESCRIPTION = "This is test API create by API manager integration test";
private static final String API_END_POINT_METHOD = "/customers/123";
private static final String API_RESPONSE_DATA = "<id>123</id><name>John</name></Customer>";
private static final String API_VERSION_1_0_0 = "1.0.0";
private static final String API_VERSION_2_0_0 = "2.0.0";
private static final String APPLICATION_NAME = "AccessibilityOfOldAPIAndCopyAPIWithReSubscriptionTestCase";
private static final String API_END_POINT_POSTFIX_URL = "jaxrs_basic/services/customers/customerservice/";
private String apiEndPointUrl;
private APIIdentifier apiIdentifierAPI1Version1;
private APIIdentifier apiIdentifierAPI1Version2;
private String providerName;
private APICreationRequestBean apiCreationRequestBean;
private Map<String, String> requestHeaders;
private APIPublisherRestClient apiPublisherClientUser1;
private APIStoreRestClient apiStoreClientUser1;
@BeforeClass(alwaysRun = true)
public void initialize() throws APIManagerIntegrationTestException, XPathExpressionException, MalformedURLException {
super.init();
apiEndPointUrl = gatewayUrls.getWebAppURLHttp() + API_END_POINT_POSTFIX_URL;
providerName = publisherContext.getContextTenant().getContextUser().getUserName();
apiCreationRequestBean = new APICreationRequestBean(API_NAME, API_CONTEXT, API_VERSION_1_0_0,
providerName, new URL(apiEndPointUrl));
apiCreationRequestBean.setTags(API_TAGS);
apiCreationRequestBean.setDescription(API_DESCRIPTION);
apiIdentifierAPI1Version1 = new APIIdentifier(providerName, API_NAME, API_VERSION_1_0_0);
apiIdentifierAPI1Version2 = new APIIdentifier(providerName, API_NAME, API_VERSION_2_0_0);
String publisherURLHttp = publisherUrls.getWebAppURLHttp();
String storeURLHttp = storeUrls.getWebAppURLHttp();
apiPublisherClientUser1 = new APIPublisherRestClient(publisherURLHttp);
apiStoreClientUser1 = new APIStoreRestClient(storeURLHttp);
//Login to API Publisher with admin
apiPublisherClientUser1.login(
publisherContext.getContextTenant().getContextUser().getUserName(),
publisherContext.getContextTenant().getContextUser().getPassword());
//Login to API Store with admin
apiStoreClientUser1.login(
storeContext.getContextTenant().getContextUser().getUserName(),
storeContext.getContextTenant().getContextUser().getPassword());
apiStoreClientUser1.addApplication(APPLICATION_NAME, "", "", "");
}
@Test(groups = {"wso2.am"}, description = "Test subscribe of old api version.")
public void testSubscriptionOfOldAPI() throws APIManagerIntegrationTestException {
//Create and publish API version 1.0.0
createAndPublishAPI(apiIdentifierAPI1Version1, apiCreationRequestBean, apiPublisherClientUser1, false);
// Subscribe old api version (1.0.0)
HttpResponse oldVersionSubscribeResponse =
subscribeToAPI(apiIdentifierAPI1Version1, APPLICATION_NAME, apiStoreClientUser1);
assertEquals(oldVersionSubscribeResponse.getResponseCode(), HTTP_RESPONSE_CODE_OK,
"Subscribe of old API version request not successful " +
getAPIIdentifierString(apiIdentifierAPI1Version1));
assertEquals(getValueFromJSON(oldVersionSubscribeResponse, "error"), "false",
"Error in subscribe of old API version" + getAPIIdentifierString(apiIdentifierAPI1Version1) +
"Response Data:" + oldVersionSubscribeResponse.getData());
}
@Test(groups = {"wso2.am"}, description = "Test publishing of copied API with re-subscription required",
dependsOnMethods = "testSubscriptionOfOldAPI")
public void testPublishCopiedAPIWithReSubscriptionRequired() throws APIManagerIntegrationTestException {
// Copy API
copyAPI(apiIdentifierAPI1Version1, API_VERSION_2_0_0, apiPublisherClientUser1);
//Publish version 2.0.0 with re-subscription required
APILifeCycleStateRequest publishUpdateRequest =
new APILifeCycleStateRequest(API_NAME, providerName, APILifeCycleState.PUBLISHED);
publishUpdateRequest.setVersion(API_VERSION_2_0_0);
HttpResponse publishAPIResponse =
apiPublisherClientUser1.changeAPILifeCycleStatusToPublish(apiIdentifierAPI1Version2, true);
assertEquals(publishAPIResponse.getResponseCode(), HTTP_RESPONSE_CODE_OK,
"API publish Response code is invalid " + getAPIIdentifierString(apiIdentifierAPI1Version2));
assertTrue(verifyAPIStatusChange(publishAPIResponse, APILifeCycleState.CREATED, APILifeCycleState.PUBLISHED),
"API status Change is invalid in" + getAPIIdentifierString(apiIdentifierAPI1Version2) +
"Response Data:" + publishAPIResponse.getData());
}
@Test(groups = {"wso2.am"}, description = "Test invocation of old API version before the new version is subscribed.",
dependsOnMethods = "testPublishCopiedAPIWithReSubscriptionRequired")
public void testInvokeOldAPIBeforeSubscribeTheNewVersion() throws APIManagerIntegrationTestException, IOException {
//get access token
String accessToken = generateApplicationKeys(apiStoreClientUser1, APPLICATION_NAME).getAccessToken();
// Create requestHeaders
requestHeaders = new HashMap<String, String>();
requestHeaders.put("accept", "text/xml");
requestHeaders.put("Authorization", "Bearer " + accessToken);
//Invoke old version
HttpResponse oldVersionInvokeResponse =
HttpRequestUtil.doGet(gatewayWebAppUrl + API_CONTEXT + "/" + API_VERSION_1_0_0 +
API_END_POINT_METHOD, requestHeaders);
assertEquals(oldVersionInvokeResponse.getResponseCode(), HTTP_RESPONSE_CODE_OK,
"Response code mismatched when invoke old api before subscribe the new version");
assertTrue(oldVersionInvokeResponse.getData().contains(API_RESPONSE_DATA),
"Response data mismatched when invoke old API version before subscribe the new version." +
" Response Data:" + oldVersionInvokeResponse.getData());
}
@Test(groups = {"wso2.am"}, description = "Test invocation of new API version before the new version is subscribed." +
"This invocation should be failed", dependsOnMethods = "testInvokeOldAPIBeforeSubscribeTheNewVersion")
public void testInvokeNewAPIBeforeSubscribeTheNewVersion() throws APIManagerIntegrationTestException, IOException {
//Invoke old version
HttpResponse oldVersionInvokeResponse =
HttpRequestUtil.doGet(gatewayWebAppUrl + API_CONTEXT + "/" + API_VERSION_2_0_0 +
API_END_POINT_METHOD, requestHeaders);
assertEquals(oldVersionInvokeResponse.getResponseCode(), HTTP_RESPONSE_CODE_UNAUTHORIZED,
"Response code mismatched when invoke new api before subscribe the new version");
assertTrue(oldVersionInvokeResponse.getData().contains(UNCLASSIFIED_AUTHENTICATION_FAILURE),
"Response data mismatched when invoke new API version before subscribe the new version." +
" Response Data:" + oldVersionInvokeResponse.getData());
}
@Test(groups = {"wso2.am"}, description = "Test subscribe the new API Version",
dependsOnMethods = "testInvokeNewAPIBeforeSubscribeTheNewVersion")
public void testSubscribeTheNewVersion() throws APIManagerIntegrationTestException {
//subscribe new version
HttpResponse httpResponseSubscribeNewVersion =
subscribeToAPI(apiIdentifierAPI1Version2, APPLICATION_NAME, apiStoreClientUser1);
assertEquals(httpResponseSubscribeNewVersion.getResponseCode(), HTTP_RESPONSE_CODE_OK,
"Subscribe of New API version when re-subscription required not successful. Invalid Response Code " +
getAPIIdentifierString(apiIdentifierAPI1Version2));
assertEquals(getValueFromJSON(httpResponseSubscribeNewVersion, "error"), "false",
"Error in subscribe of New API version when re-subscription required not successful" +
getAPIIdentifierString(apiIdentifierAPI1Version2) + "Response Data:" +
httpResponseSubscribeNewVersion.getData());
}
@Test(groups = {"wso2.am"}, description = "Test invocation of new API version after the new version is subscribed.",
dependsOnMethods = "testSubscribeTheNewVersion")
public void testInvokeNewAPIAfterSubscribeTheNewVersion() throws APIManagerIntegrationTestException, IOException {
//Invoke new version after subscription
HttpResponse oldVersionInvokeResponse = HttpRequestUtil.doGet(gatewayWebAppUrl + API_CONTEXT +
"/" + API_VERSION_2_0_0 + API_END_POINT_METHOD, requestHeaders);
assertEquals(oldVersionInvokeResponse.getResponseCode(), HTTP_RESPONSE_CODE_OK, "Response code mismatched when" +
" invoke new api after subscribe the new version");
assertTrue(oldVersionInvokeResponse.getData().contains(API_RESPONSE_DATA), "Response data mismatched when invoke" +
" new API version after subscribe the new version. Response Data:" + oldVersionInvokeResponse.getData());
}
@AfterClass(alwaysRun = true)
public void cleanUpArtifacts() throws APIManagerIntegrationTestException {
apiStoreClientUser1.removeApplication(APPLICATION_NAME);
deleteAPI(apiIdentifierAPI1Version1, apiPublisherClientUser1);
deleteAPI(apiIdentifierAPI1Version2, apiPublisherClientUser1);
}
}
| |
package galaxyspace.core.prefab.entities;
import java.lang.reflect.Field;
import java.util.ArrayList;
import java.util.List;
import asmodeuscore.api.entity.ICustomEngine;
import asmodeuscore.core.astronomy.SpaceData.Engine_Type;
import asmodeuscore.core.handler.capabilities.ACStatsCapability;
import asmodeuscore.core.handler.capabilities.IStatsCapability;
import galaxyspace.GalaxySpace;
import galaxyspace.core.GSBlocks;
import galaxyspace.core.GSItems;
import galaxyspace.core.util.GSUtils;
import galaxyspace.systems.SolarSystem.planets.overworld.blocks.BlockAdvancedLandingPadFull;
import galaxyspace.systems.SolarSystem.planets.overworld.items.armor.ItemSpaceSuit;
import galaxyspace.systems.SolarSystem.planets.overworld.tile.TileEntityAdvLandingPad;
import io.netty.buffer.ByteBuf;
import micdoodle8.mods.galacticraft.api.prefab.entity.EntityAutoRocket;
import micdoodle8.mods.galacticraft.api.prefab.entity.EntityTieredRocket;
import micdoodle8.mods.galacticraft.api.tile.IFuelDock;
import micdoodle8.mods.galacticraft.api.vector.Vector3;
import micdoodle8.mods.galacticraft.api.world.IExitHeight;
import micdoodle8.mods.galacticraft.api.world.IGalacticraftWorldProvider;
import micdoodle8.mods.galacticraft.api.world.IOrbitDimension;
import micdoodle8.mods.galacticraft.core.Constants;
import micdoodle8.mods.galacticraft.core.GalacticraftCore;
import micdoodle8.mods.galacticraft.core.entities.player.GCPlayerStats;
import micdoodle8.mods.galacticraft.core.event.EventLandingPadRemoval;
import micdoodle8.mods.galacticraft.core.network.PacketSimple;
import micdoodle8.mods.galacticraft.core.network.PacketSimple.EnumSimplePacket;
import micdoodle8.mods.galacticraft.core.util.ConfigManagerCore;
import micdoodle8.mods.galacticraft.core.util.GCLog;
import micdoodle8.mods.galacticraft.core.util.PlayerUtil;
import micdoodle8.mods.galacticraft.core.util.WorldUtil;
import net.minecraft.block.Block;
import net.minecraft.entity.Entity;
import net.minecraft.entity.EntityLivingBase;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.entity.player.EntityPlayerMP;
import net.minecraft.init.SoundEvents;
import net.minecraft.item.ItemStack;
import net.minecraft.nbt.NBTTagCompound;
import net.minecraft.util.EnumHand;
import net.minecraft.util.NonNullList;
import net.minecraft.util.math.BlockPos;
import net.minecraft.util.math.MathHelper;
import net.minecraft.util.math.RayTraceResult;
import net.minecraft.world.World;
import net.minecraft.world.WorldProvider;
import net.minecraft.world.WorldServer;
import net.minecraftforge.common.MinecraftForge;
import net.minecraftforge.fluids.FluidStack;
import net.minecraftforge.fml.common.FMLCommonHandler;
import net.minecraftforge.fml.relauncher.Side;
public class EntityTier6Rocket extends EntityTieredRocket implements ICustomEngine
{
private Engine_Type engine_type = Engine_Type.FUEL_ENGINE;
private boolean waitFormMultiPlayer;
static Field marsConfigAllDimsAllowed;
static {
try
{
Class<?> marsConfig = Class.forName("micdoodle8.mods.galacticraft.planets.mars.ConfigManagerMars");
marsConfigAllDimsAllowed = marsConfig.getField("launchControllerAllDims");
} catch (Exception ignore) {}
}
public EntityTier6Rocket(World par1World)
{
super(par1World);
this.setSize(2.2F, 11F);
}
public EntityTier6Rocket(World par1World, double par2, double par4, double par6, EnumRocketType rocketType)
{
super(par1World, par2, par4, par6);
this.rocketType = rocketType;
this.stacks = NonNullList.withSize(this.getSizeInventory(), ItemStack.EMPTY);
}
public EntityTier6Rocket(World par1World, double par2, double par4, double par6, EnumRocketType rocketType, Engine_Type engine)
{
super(par1World, par2, par4, par6);
this.rocketType = rocketType;
this.stacks = NonNullList.withSize(this.getSizeInventory(), ItemStack.EMPTY);
this.engine_type = engine;
}
public EntityTier6Rocket(World par1World, double par2, double par4, double par6, boolean reversed, EnumRocketType rocketType, NonNullList<ItemStack> inv)
{
this(par1World, par2, par4, par6, rocketType);
this.stacks = inv;
}
@Override
public double getYOffset()
{
return 1.4F;
}
@Override
public ItemStack getPickedResult(RayTraceResult target)
{
return new ItemStack(GSItems.ROCKET_TIER_6, 1, this.rocketType.getIndex());
}
@Override
protected void entityInit()
{
super.entityInit();
}
@Override
public void updatePassenger(Entity passenger)
{
if (this.isPassenger(passenger))
{
passenger.setPosition(this.posX, this.posY + this.getMountedYOffset(passenger) + passenger.getYOffset(), this.posZ);
}
}
public double getMountedYOffset(Entity passenger)
{
if(this.getPassengers().size() > 1) {
if(this.getPassengers().get(0).equals(passenger))
return 5.0D;
else
return 1.75D;
}
return 2.75D;
}
@Override
public void igniteCheckingCooldown()
{
super.igniteCheckingCooldown();
}
@Override
@Deprecated
public double getMountedYOffset()
{
return 2.75D;
}
@Override
public float getRotateOffset()
{
return 1.25F;
}
@Override
public double getOnPadYOffset()
{
return 0.0D;
}
@Override
public void onLaunch()
{
super.onLaunch();
if (!this.world.isRemote)
{
GCPlayerStats stats = null;
if (!this.getPassengers().isEmpty())
{
for (Entity player : this.getPassengers())
{
if (player instanceof EntityPlayerMP)
{
stats = GCPlayerStats.get(player);
stats.setLaunchpadStack(null);
if (!(this.world.provider instanceof IOrbitDimension))
{
stats.setCoordsTeleportedFromX(player.posX);
stats.setCoordsTeleportedFromZ(player.posZ);
}
}
}
Entity playerMain = this.getPassengers().get(0);
if (playerMain instanceof EntityPlayerMP)
stats = GCPlayerStats.get(playerMain);
}
int amountRemoved = 0;
PADSEARCH:
for (int x = MathHelper.floor(this.posX) - 2; x <= MathHelper.floor(this.posX) + 2; x++)
{
for (int y = MathHelper.floor(this.posY) - 3; y <= MathHelper.floor(this.posY) + 1; y++)
{
for (int z = MathHelper.floor(this.posZ) - 2; z <= MathHelper.floor(this.posZ) + 2; z++)
{
BlockPos pos = new BlockPos(x, y, z);
final Block block = this.world.getBlockState(pos).getBlock();
if (block != null && block instanceof BlockAdvancedLandingPadFull)
{
if (amountRemoved < 25)
{
EventLandingPadRemoval event = new EventLandingPadRemoval(this.world, pos);
MinecraftForge.EVENT_BUS.post(event);
if (event.allow)
{
this.world.setBlockToAir(pos);
amountRemoved = 25;
}
break PADSEARCH;
}
}
}
}
}
//Set the player's launchpad item for return on landing - or null if launchpads not removed
if (stats != null && amountRemoved == 25)
{
stats.setLaunchpadStack(new ItemStack(GSBlocks.ADVANCED_LANDING_PAD_SINGLE, 25, 0));
}
this.playSound(SoundEvents.ENTITY_ITEM_PICKUP, 0.2F, ((this.rand.nextFloat() - this.rand.nextFloat()) * 0.7F + 1.0F) * 2.0F);
}
}
@Override
public void onUpdate()
{
//TODO: HARDCODE
if (this.getWaitForMultiPlayer())
{
if (!this.getPassengers().isEmpty())
{
Entity passenger = this.getPassengers().get(0);
Entity passenger2 = null;
if(this.getPassengers().size() > 1) {
passenger2 = this.getPassengers().get(1);
}
if (this.ticks >= 40)
{
if (!this.world.isRemote)
{
this.removePassengers();
passenger.startRiding(this, true);
if(passenger2 != null)
passenger2.startRiding(this, true);
GCLog.debug("Remounting player in rocket.");
}
this.setWaitForPlayer(false);
this.motionY = -0.5D;
}
else
{
this.motionX = this.motionY = this.motionZ = 0.0D;
passenger.motionX = passenger.motionY = passenger.motionZ = 0;
if(passenger2 != null)
passenger2.motionX = passenger2.motionY = passenger2.motionZ = 0;
}
}
else
{
this.motionX = this.motionY = this.motionZ = 0.0D;
}
}
super.onUpdate();
int i;
if (this.timeUntilLaunch >= 100)
{
i = Math.abs(this.timeUntilLaunch / 100);
}
else
{
i = 1;
}
if ((this.getLaunched() || this.launchPhase == EnumLaunchPhase.IGNITED.ordinal() && this.rand.nextInt(i) == 0) && !ConfigManagerCore.disableSpaceshipParticles && this.hasValidFuel())
{
if (this.world.isRemote)
{
this.spawnParticles(this.getLaunched());
}
}
if (FMLCommonHandler.instance().getEffectiveSide() == Side.CLIENT && this.hasValidFuel() && this.rand.nextInt(4) == 0 && !this.getLaunched() && this.getEngine() == Engine_Type.FUEL_ENGINE)
{
//GalacticraftCore.proxy.spawnParticle("whiteSmokeLargeLaunched", new Vector3(this.posX + 0.4 - this.rand.nextDouble(), this.posY + 10, this.posZ + 0.4 - this.rand.nextDouble()), new Vector3(0.5D, -1.5D, 0.5D), new Object[] { });
GalaxySpace.proxy.spawnParticle("waterbubbles", new Vector3(this.posX + 0.4D + rand.nextDouble() , this.posY - 0.4D + rand.nextDouble(), this.posZ + rand.nextDouble()), new Vector3(0.05D + 0.06D, -0.5D, 0.0D - 0.03D), new Object [] { 20, 5, false, new Vector3(1.0F), 1.0D } );
GalaxySpace.proxy.spawnParticle("waterbubbles", new Vector3(this.posX - 0.8D + rand.nextDouble() , this.posY - 0.4D + rand.nextDouble(), this.posZ + rand.nextDouble()), new Vector3(0.05D - 0.25D, -0.5D, 0.0D - 0.03D), new Object [] { 20, 5, false, new Vector3(1.0F), 1.0D } );
GalaxySpace.proxy.spawnParticle("waterbubbles", new Vector3(this.posX - 0.2D + rand.nextDouble() , this.posY - 0.4D + rand.nextDouble(), this.posZ + 0.4D + rand.nextDouble()), new Vector3(0.0D, -0.5D, 0.1D + 0.06D), new Object [] { 20, 5, false, new Vector3(1.0F), 1.0D } );
GalaxySpace.proxy.spawnParticle("waterbubbles", new Vector3(this.posX - 0.2D + rand.nextDouble() , this.posY - 0.4D + rand.nextDouble(), this.posZ - 0.4D + rand.nextDouble()), new Vector3(0.0D, -0.5D, -0.1D - 0.06D), new Object [] { 20, 5, false, new Vector3(1.0F), 1.0D } );
}
if (this.launchPhase >= EnumLaunchPhase.LAUNCHED.ordinal() && this.hasValidFuel())
{
if (this.launchPhase == EnumLaunchPhase.LAUNCHED.ordinal())
{
double d = this.timeSinceLaunch / 150;
int mod_engine = 1;
if(this.engine_type == Engine_Type.SUBLIGHT_ENGINE) mod_engine = 5;
if (this.world.provider instanceof IGalacticraftWorldProvider && ((IGalacticraftWorldProvider) this.world.provider).hasNoAtmosphere())
{
d = Math.min(d * 1.2 * mod_engine, 2 * mod_engine);
}
else
{
d = Math.min(d * mod_engine, 1.4 * mod_engine);
}
if (d != 0.0)
{
this.motionY = -d * 2.5D * mod_engine * Math.cos((this.rotationPitch - 180) / Constants.RADIANS_TO_DEGREES);
}
}
else
{
this.motionY -= 0.008D;
}
double multiplier = 1.0D;
if (this.world.provider instanceof IGalacticraftWorldProvider)
{
multiplier = ((IGalacticraftWorldProvider) this.world.provider).getFuelUsageMultiplier();
if (multiplier <= 0)
{
multiplier = 1;
}
}
if (this.timeSinceLaunch % MathHelper.floor(2 * (1 / multiplier)) == 0)
{
this.removeFuel(4);
if (!this.hasValidFuel())
{
this.stopRocketSound();
}
}
}
else if (!this.hasValidFuel() && this.getLaunched() && !this.world.isRemote)
{
if (Math.abs(Math.sin(this.timeSinceLaunch / 1000)) / 10 != 0.0)
{
this.motionY -= Math.abs(Math.sin(this.timeSinceLaunch / 1000)) / 20;
}
}
//GalaxySpace.debug(this.getEngine().getName());
}
@Override
public void onTeleport(EntityPlayerMP player)
{
EntityPlayerMP playerBase = PlayerUtil.getPlayerBaseServerFromPlayer(player, false);
if (playerBase != null)
{
GCPlayerStats stats = GCPlayerStats.get(playerBase);
if (this.stacks == null || this.stacks.isEmpty())
{
stats.setRocketStacks(NonNullList.withSize(2, ItemStack.EMPTY));
}
else
{
stats.setRocketStacks(this.stacks);
}
stats.setRocketType(this.rocketType.getIndex());
stats.setFuelLevel(this.fuelTank.getFluidAmount());
if(engine_type == Engine_Type.SUBLIGHT_ENGINE)
{
ItemStack withengine = new ItemStack(GSItems.ROCKET_TIER_6, 1, this.rocketType.getIndex());
if(!withengine.hasTagCompound()) withengine.setTagCompound(new NBTTagCompound());
withengine.getTagCompound().setBoolean(Engine_Type.SUBLIGHT_ENGINE.getName(), true);
withengine.getTagCompound().setInteger(ItemSpaceSuit.mod_count, 0);
for(int i = 0; i < stats.getRocketStacks().size(); i++)
{
//ItemStack stack = stats.getRocketStacks().get(i);
if(i == stats.getRocketStacks().size() - 1)
{
stats.getRocketStacks().set(i, withengine);
break;
}
}
//stats.getRocketStacks().set(1, withengine);
}
else
stats.setRocketItem(GSItems.ROCKET_TIER_6);
}
}
@Override
public void onReachAtmosphere()
{
//Launch controlled
if (this.destinationFrequency != -1)
{
if (this.world.isRemote)
{
//stop the sounds on the client - but do not reset, the rocket may start again
this.stopRocketSound();
return;
}
this.setTarget(true, this.destinationFrequency);
if (this.targetVec != null)
{
if (this.targetDimension != this.world.provider.getDimension())
{
WorldProvider targetDim = WorldUtil.getProviderForDimensionServer(this.targetDimension);
if (targetDim != null && targetDim.world instanceof WorldServer)
{
boolean dimensionAllowed = this.targetDimension == ConfigManagerCore.idDimensionOverworld;
if (targetDim instanceof IGalacticraftWorldProvider)
{
if (((IGalacticraftWorldProvider) targetDim).canSpaceshipTierPass(this.getRocketTier()))
dimensionAllowed = true;
else
dimensionAllowed = false;
}
else
//No rocket flight to non-Galacticraft dimensions other than the Overworld allowed unless config
if ((this.targetDimension > 1 || this.targetDimension < -1) && marsConfigAllDimsAllowed != null)
{
try {
if (marsConfigAllDimsAllowed.getBoolean(null))
{
dimensionAllowed = true;
}
} catch (Exception e) { e.printStackTrace(); }
}
if (dimensionAllowed)
{
if (!this.getPassengers().isEmpty())
{
for (Entity passenger : this.getPassengers())
{
if (passenger instanceof EntityPlayerMP)
{
WorldUtil.transferEntityToDimension(passenger, this.targetDimension, (WorldServer) targetDim.world, false, this);
}
}
}
else
{
Entity e = WorldUtil.transferEntityToDimension(this, this.targetDimension, (WorldServer)targetDim.world, false, null);
if (e instanceof EntityAutoRocket)
{
e.setPosition(this.targetVec.getX() + 0.5F, this.targetVec.getY() + 800, this.targetVec.getZ() + 0.5f);
((EntityAutoRocket)e).setLaunchPhase(EnumLaunchPhase.LANDING);
((EntityAutoRocket)e).setWaitForPlayer(false);
}
else
{
GCLog.info("Error: failed to recreate the unmanned rocket in landing mode on target planet.");
e.setDead();
this.setDead();
}
}
return;
}
}
//No destination world found - in this situation continue into regular take-off (as if Not launch controlled)
}
else
{
//Same dimension controlled rocket flight
this.setPosition(this.targetVec.getX() + 0.5F, this.targetVec.getY() + 800, this.targetVec.getZ() + 0.5F);
//Stop any lateral motion, otherwise it will update to an incorrect x,z position first tick after spawning above target
this.motionX = this.motionZ = 0.0D;
//Small upward motion initially, to keep clear of own flame trail from launch
this.motionY = 0.1D;
for (Entity passenger : this.getPassengers())
{
if (passenger instanceof EntityPlayerMP)
{
WorldUtil.forceMoveEntityToPos(passenger, (WorldServer) this.world, new Vector3(this.targetVec.getX() + 0.5F, this.targetVec.getY() + 800, this.targetVec.getZ() + 0.5F), false);
this.setWaitForPlayer(true);
GCLog.debug("Rocket repositioned, waiting for player");
}
}
this.setLaunchPhase(EnumLaunchPhase.LANDING);
//Do not destroy the rocket, we still need it!
return;
}
}
else
{
//Launch controlled launch but no valid target frequency = rocket loss [INVESTIGATE]
GCLog.info("Error: the launch controlled rocket failed to find a valid landing spot when it reached space.");
this.fuelTank.drain(Integer.MAX_VALUE, true);
this.posY = Math.max(255, (this.world.provider instanceof IExitHeight ? ((IExitHeight) this.world.provider).getYCoordinateToTeleport() : 1200) - 200);
return;
}
}
//TODO 2-mounts players.
//Not launch controlled
if (!this.world.isRemote)
{
for (Entity e : this.getPassengers())
{
if (e instanceof EntityPlayerMP)
{
EntityPlayerMP player = (EntityPlayerMP) e;
this.onTeleport(player);
GCPlayerStats stats = GCPlayerStats.get(player);
if(this.getPassengers().size() > 1) {
Entity secondPassenger = this.getPassengers().get(1);
if (secondPassenger instanceof EntityPlayerMP) {
IStatsCapability acstats = ACStatsCapability.get((EntityPlayerMP)secondPassenger);
acstats.setSecondPassenger(true);
}
}
WorldUtil.toCelestialSelection(player, stats, this.getRocketTier());
}
}
//Destroy any rocket which reached the top of the atmosphere and is not controlled by a Launch Controller
this.setDead();
}
//Client side, non-launch controlled, do nothing - no reason why it can't continue flying until the GUICelestialSelection activates
}
@Override
public boolean processInitialInteract(EntityPlayer player, EnumHand hand)
{
if (hand != EnumHand.MAIN_HAND)
{
return false;
}
if (this.launchPhase >= EnumLaunchPhase.LAUNCHED.ordinal())
{
return false;
}
if (!this.getPassengers().isEmpty() && this.getPassengers().contains(player))
{
if (!this.world.isRemote)
{
GalacticraftCore.packetPipeline.sendTo(new PacketSimple(EnumSimplePacket.C_RESET_THIRD_PERSON, this.world.provider.getDimension(), new Object[] { }), (EntityPlayerMP) player);
GCPlayerStats stats = GCPlayerStats.get(player);
stats.setChatCooldown(0);
// Prevent player being dropped from the top of the rocket...
float heightBefore = this.height;
this.height = this.height / 2.0F;
this.removePassengers();
this.height = heightBefore;
}
return true;
}
else if (player instanceof EntityPlayerMP)
{
if (!this.world.isRemote)
{
GalacticraftCore.packetPipeline.sendTo(new PacketSimple(EnumSimplePacket.C_DISPLAY_ROCKET_CONTROLS, this.world.provider.getDimension(), new Object[] { }), (EntityPlayerMP) player);
GCPlayerStats stats = GCPlayerStats.get(player);
stats.setChatCooldown(0);
player.startRiding(this);
}
return true;
}
return false;
}
@Override
public boolean getWaitForPlayer()
{
return super.getWaitForPlayer();
}
@Override
public void setWaitForPlayer(boolean waitForPlayer)
{
super.setWaitForPlayer(waitForPlayer);
this.waitFormMultiPlayer = waitForPlayer;
}
public boolean getWaitForMultiPlayer()
{
return this.waitFormMultiPlayer;
}
@Override
protected boolean canFitPassenger(Entity passenger)
{
return this.getPassengers().size() < 1;
}
protected void spawnParticles(boolean launched)
{
if (!this.isDead)
{
double sinPitch = Math.sin(this.rotationPitch / Constants.RADIANS_TO_DEGREES_D);
double x1 = 3.2 * Math.cos(this.rotationYaw / Constants.RADIANS_TO_DEGREES_D) * sinPitch;
double z1 = 3.2 * Math.sin(this.rotationYaw / Constants.RADIANS_TO_DEGREES_D) * sinPitch;
double y1 = 3.2 * Math.cos((this.rotationPitch - 180) / Constants.RADIANS_TO_DEGREES_D);
if (this.launchPhase == EnumLaunchPhase.LANDING.ordinal() && this.targetVec != null)
{
double modifier = this.posY - this.targetVec.getY();
modifier = Math.max(modifier, 180.0);
x1 *= modifier / 200.0D;
y1 *= Math.min(modifier / 200.0D, 2.5D);
z1 *= modifier / 200.0D;
}
final double y2 = this.prevPosY + (this.posY - this.prevPosY) + y1 - 0.75 * this.motionY - 0.3 + 2.5D;
final double x2 = this.posX + x1 + this.motionX;
final double z2 = this.posZ + z1 + this.motionZ;
Vector3 motionVec = new Vector3(x1 + this.motionX, y1 + this.motionY, z1 + this.motionZ);
Vector3 d1 = new Vector3(y1 * 0.1D, -x1 * 0.1D, z1 * 0.1D).rotate(315 - this.rotationYaw, motionVec);
Vector3 d2 = new Vector3(x1 * 0.1D, -z1 * 0.1D, y1 * 0.1D).rotate(315 - this.rotationYaw, motionVec);
Vector3 d3 = new Vector3(-y1 * 0.1D, x1 * 0.1D, z1 * 0.1D).rotate(315 - this.rotationYaw, motionVec);
Vector3 d4 = new Vector3(x1 * 0.1D, z1 * 0.1D, -y1 * 0.1D).rotate(315 - this.rotationYaw, motionVec);
//Vector3 d5 = new Vector3(y1 * 0.1D, -x1 * 0.1D, z1 * 0.1D).rotate(270 - this.rotationYaw, motionVec);
//Vector3 d6 = new Vector3(x1 * 0.1D, -z1 * 0.1D, y1 * 0.1D).rotate(270 - this.rotationYaw, motionVec);
//Vector3 d7 = new Vector3(-y1 * 0.1D, x1 * 0.1D, z1 * 0.1D).rotate(270 - this.rotationYaw, motionVec);
//Vector3 d8 = new Vector3(x1 * 0.1D, z1 * 0.1D, -y1 * 0.1D).rotate(270 - this.rotationYaw, motionVec);
Vector3 mv1 = motionVec.clone().translate(d1);
Vector3 mv2 = motionVec.clone().translate(d2);
Vector3 mv3 = motionVec.clone().translate(d3);
Vector3 mv4 = motionVec.clone().translate(d4);
//T3 - Four flameballs which spread
makeFlame(x2 + d1.x, y2 + d1.y, z2 + d1.z, mv1, this.getLaunched());
makeFlame(x2 + d2.x, y2 + d2.y, z2 + d2.z, mv2, this.getLaunched());
makeFlame(x2 + d3.x, y2 + d3.y, z2 + d3.z, mv3, this.getLaunched());
makeFlame(x2 + d4.x, y2 + d4.y, z2 + d4.z, mv4, this.getLaunched());
makeFlame(x2, y2, z2, new Vector3(x1, y1, z1), this.getLaunched());
}
}
private void makeFlame(double x2, double y2, double z2, Vector3 motionVec, boolean getLaunched)
{
EntityLivingBase riddenByEntity = this.getPassengers().isEmpty() || !(this.getPassengers().get(0) instanceof EntityLivingBase) ? null : (EntityLivingBase) this.getPassengers().get(0);
if (getLaunched)
{
if(this.engine_type == Engine_Type.FUEL_ENGINE) {
GalacticraftCore.proxy.spawnParticle("launchFlameLaunched", new Vector3(x2 + 0.4 - this.rand.nextDouble() / 10, y2, z2 + 0.4 - this.rand.nextDouble() / 10), motionVec, new Object[] { riddenByEntity });
GalacticraftCore.proxy.spawnParticle("launchFlameLaunched", new Vector3(x2 - 0.4 + this.rand.nextDouble() / 10, y2, z2 + 0.4 - this.rand.nextDouble() / 10), motionVec, new Object[] { riddenByEntity });
GalacticraftCore.proxy.spawnParticle("launchFlameLaunched", new Vector3(x2 - 0.4 + this.rand.nextDouble() / 10, y2, z2 - 0.4 + this.rand.nextDouble() / 10), motionVec, new Object[] { riddenByEntity });
GalacticraftCore.proxy.spawnParticle("launchFlameLaunched", new Vector3(x2 + 0.4 - this.rand.nextDouble() / 10, y2, z2 - 0.4 + this.rand.nextDouble() / 10), motionVec, new Object[] { riddenByEntity });
GalacticraftCore.proxy.spawnParticle("launchFlameLaunched", new Vector3(x2, y2, z2), motionVec, new Object[] { riddenByEntity });
GalacticraftCore.proxy.spawnParticle("launchFlameLaunched", new Vector3(x2 + 0.4, y2, z2), motionVec, new Object[] { riddenByEntity });
GalacticraftCore.proxy.spawnParticle("launchFlameLaunched", new Vector3(x2 - 0.4, y2, z2), motionVec, new Object[] { riddenByEntity });
GalacticraftCore.proxy.spawnParticle("launchFlameLaunched", new Vector3(x2, y2, z2 + 0.4D), motionVec, new Object[] { riddenByEntity });
GalacticraftCore.proxy.spawnParticle("launchFlameLaunched", new Vector3(x2, y2, z2 - 0.4D), motionVec, new Object[] { riddenByEntity });
}
if(this.engine_type == Engine_Type.ION_ENGINE) {
GalaxySpace.proxy.spawnParticle("launchFlameLaunched",new Vector3(x2 + 0.4 - this.rand.nextDouble() / 10, y2, z2 + 0.4 - this.rand.nextDouble() / 10), motionVec, new Object [] { riddenByEntity, new Vector3(0.0F, 252.0F, 251.0F) } );
GalaxySpace.proxy.spawnParticle("launchFlameLaunched", new Vector3(x2 + 0.4 - this.rand.nextDouble() / 10, y2, z2 + 0.4 - this.rand.nextDouble() / 10), motionVec, new Object[] { riddenByEntity, new Vector3(0.0F, 252.0F, 251.0F) });
GalaxySpace.proxy.spawnParticle("launchFlameLaunched", new Vector3(x2 - 0.4 + this.rand.nextDouble() / 10, y2, z2 + 0.4 - this.rand.nextDouble() / 10), motionVec, new Object[] { riddenByEntity, new Vector3(0.0F, 252.0F, 251.0F) });
GalaxySpace.proxy.spawnParticle("launchFlameLaunched", new Vector3(x2 - 0.4 + this.rand.nextDouble() / 10, y2, z2 - 0.4 + this.rand.nextDouble() / 10), motionVec, new Object[] { riddenByEntity, new Vector3(0.0F, 252.0F, 251.0F) });
GalaxySpace.proxy.spawnParticle("launchFlameLaunched", new Vector3(x2 + 0.4 - this.rand.nextDouble() / 10, y2, z2 - 0.4 + this.rand.nextDouble() / 10), motionVec, new Object[] { riddenByEntity, new Vector3(0.0F, 252.0F, 251.0F) });
GalaxySpace.proxy.spawnParticle("launchFlameLaunched", new Vector3(x2, y2, z2), motionVec, new Object[] { riddenByEntity, new Vector3(0.0F, 252.0F, 251.0F) });
GalaxySpace.proxy.spawnParticle("launchFlameLaunched", new Vector3(x2 + 0.4, y2, z2), motionVec, new Object[] { riddenByEntity, new Vector3(0.0F, 252.0F, 251.0F) });
GalaxySpace.proxy.spawnParticle("launchFlameLaunched", new Vector3(x2 - 0.4, y2, z2), motionVec, new Object[] { riddenByEntity, new Vector3(0.0F, 252.0F, 251.0F) });
GalaxySpace.proxy.spawnParticle("launchFlameLaunched", new Vector3(x2, y2, z2 + 0.4D), motionVec, new Object[] { riddenByEntity, new Vector3(0.0F, 252.0F, 251.0F) });
GalaxySpace.proxy.spawnParticle("launchFlameLaunched", new Vector3(x2, y2, z2 - 0.4D), motionVec, new Object[] { riddenByEntity, new Vector3(0.0F, 252.0F, 251.0F) });
}
return;
}
if (this.ticksExisted % 2 == 0) return;
y2 += 1.6D;
double x1 = motionVec.x;
double y1 = motionVec.y;
double z1 = motionVec.z;
if(this.engine_type == Engine_Type.FUEL_ENGINE)
{
GalacticraftCore.proxy.spawnParticle("launchFlameIdle", new Vector3(x2 + 0.4 - this.rand.nextDouble() / 10, y2, z2 + 0.4 - this.rand.nextDouble() / 10), new Vector3(x1 + 0.1D + this.rand.nextDouble() / 10, y1 - 0.3D, z1 + 0.1D + this.rand.nextDouble() / 10), new Object[] { riddenByEntity });
GalacticraftCore.proxy.spawnParticle("launchFlameIdle", new Vector3(x2 - 0.4 + this.rand.nextDouble() / 10, y2, z2 + 0.4 - this.rand.nextDouble() / 10), new Vector3(x1 - 0.1D - this.rand.nextDouble() / 10, y1 - 0.3D, z1 + 0.1D + this.rand.nextDouble() / 10), new Object[] { riddenByEntity });
GalacticraftCore.proxy.spawnParticle("launchFlameIdle", new Vector3(x2 - 0.4 + this.rand.nextDouble() / 10, y2, z2 - 0.4 + this.rand.nextDouble() / 10), new Vector3(x1 - 0.1D - this.rand.nextDouble() / 10, y1 - 0.3D, z1 - 0.1D - this.rand.nextDouble() / 10), new Object[] { riddenByEntity });
GalacticraftCore.proxy.spawnParticle("launchFlameIdle", new Vector3(x2 + 0.4 - this.rand.nextDouble() / 10, y2, z2 - 0.4 + this.rand.nextDouble() / 10), new Vector3(x1 + 0.1D + this.rand.nextDouble() / 10, y1 - 0.3D, z1 - 0.1D - this.rand.nextDouble() / 10), new Object[] { riddenByEntity });
GalacticraftCore.proxy.spawnParticle("launchFlameIdle", new Vector3(x2 + 0.4, y2, z2), new Vector3(x1 + 0.3D, y1 - 0.3D, z1), new Object[] { riddenByEntity });
GalacticraftCore.proxy.spawnParticle("launchFlameIdle", new Vector3(x2 - 0.4, y2, z2), new Vector3(x1 - 0.3D, y1 - 0.3D, z1), new Object[] { riddenByEntity });
GalacticraftCore.proxy.spawnParticle("launchFlameIdle", new Vector3(x2, y2, z2 + 0.4D), new Vector3(x1, y1 - 0.3D, z1 + 0.3D), new Object[] { riddenByEntity });
GalacticraftCore.proxy.spawnParticle("launchFlameIdle", new Vector3(x2, y2, z2 - 0.4D), new Vector3(x1, y1 - 0.3D, z1 - 0.3D), new Object[] { riddenByEntity });
}
}
@Override
public boolean isUsableByPlayer(EntityPlayer par1EntityPlayer)
{
return !this.isDead && par1EntityPlayer.getDistanceSq(this) <= 64.0D;
}
@Override
public void getNetworkedData(ArrayList<Object> list)
{
super.getNetworkedData(list);
}
@Override
public void decodePacketdata(ByteBuf buffer)
{
super.decodePacketdata(buffer);
}
@Override
protected void writeEntityToNBT(NBTTagCompound nbt)
{
super.writeEntityToNBT(nbt);
nbt.setInteger("engine_type", engine_type.getID());
nbt.setBoolean("WaitingForMultiPlayer", this.getWaitForMultiPlayer());
}
@Override
protected void readEntityFromNBT(NBTTagCompound nbt)
{
super.readEntityFromNBT(nbt);
this.engine_type = Engine_Type.byID(nbt.getInteger("engine_type"));
this.setWaitForPlayer(nbt.getBoolean("WaitingForMultiPlayer"));
}
@Override
public int getRocketTier()
{
return 6;
}
@Override
public int getFuelTankCapacity()
{
return 4500;
}
@Override
public int getPreLaunchWait()
{
return 400;
}
@Override
public float getCameraZoom()
{
return 15.0F;
}
@Override
public boolean defaultThirdPerson()
{
return true;
}
@Override
public List<ItemStack> getItemsDropped(List<ItemStack> droppedItems)
{
super.getItemsDropped(droppedItems);
ItemStack rocket = new ItemStack(GSItems.ROCKET_TIER_6, 1, this.rocketType.getIndex());
rocket.setTagCompound(new NBTTagCompound());
rocket.getTagCompound().setInteger("RocketFuel", this.fuelTank.getFluidAmount());
droppedItems.add(rocket);
return droppedItems;
}
@Override
public float getRenderOffsetY()
{
return -0.2F;
}
@Override
public boolean isDockValid(IFuelDock dock)
{
return dock instanceof TileEntityAdvLandingPad;
}
@Override
public int addFuel(FluidStack liquid, boolean doFill)
{
return GSUtils.fillWithGCFuel(this.fuelTank, liquid, doFill, this);
}
/*
@Override
public FluidStack removeFuel(int amount)
{
return this.fuelTank.drain(amount * ConfigManagerCore.rocketFuelFactor, true);
}*/
/*
@Override
public Engine_Type getEngine() {
return Engine_Type.BLACKHOLE_ENGINE;
}*/
public void setEngine(Engine_Type type)
{
this.engine_type = type;
}
@Override
public Engine_Type getEngine() {
return this.engine_type;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.pdfbox.cos;
import java.io.IOException;
import java.io.OutputStream;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import org.apache.pdfbox.util.Charsets;
/**
* A PDF Name object.
*
* @author Ben Litchfield
*/
public final class COSName extends COSBase implements Comparable<COSName>
{
// using ConcurrentHashMap because this can be accessed by multiple threads
private static Map<String, COSName> nameMap = new ConcurrentHashMap<String, COSName>(8192);
// all common COSName values are stored in this HashMap
// hey are already defined as static constants and don't need to be synchronized
private static Map<String, COSName> commonNameMap = new HashMap<String, COSName>();
//
// IMPORTANT: this list is *alphabetized* and does not need any JavaDoc
//
// A
public static final COSName A = new COSName("A");
public static final COSName AA = new COSName("AA");
public static final COSName ACRO_FORM = new COSName("AcroForm");
public static final COSName ACTUAL_TEXT = new COSName("ActualText");
public static final COSName ADBE_PKCS7_DETACHED = new COSName("adbe.pkcs7.detached");
public static final COSName ADBE_PKCS7_SHA1 = new COSName("adbe.pkcs7.sha1");
public static final COSName ADBE_X509_RSA_SHA1 = new COSName("adbe.x509.rsa_sha1");
public static final COSName ADOBE_PPKLITE = new COSName("Adobe.PPKLite");
public static final COSName AESV3 = new COSName("AESV3");
public static final COSName AFTER = new COSName("After");
public static final COSName AIS = new COSName("AIS");
public static final COSName ALT = new COSName("Alt");
public static final COSName ALPHA = new COSName("Alpha");
public static final COSName ALTERNATE = new COSName("Alternate");
public static final COSName ANNOT = new COSName("Annot");
public static final COSName ANNOTS = new COSName("Annots");
public static final COSName ANTI_ALIAS = new COSName("AntiAlias");
public static final COSName AP = new COSName("AP");
public static final COSName AP_REF = new COSName("APRef");
public static final COSName APP = new COSName("App");
public static final COSName ART_BOX = new COSName("ArtBox");
public static final COSName ARTIFACT = new COSName("Artifact");
public static final COSName AS = new COSName("AS");
public static final COSName ASCENT = new COSName("Ascent");
public static final COSName ASCII_HEX_DECODE = new COSName("ASCIIHexDecode");
public static final COSName ASCII_HEX_DECODE_ABBREVIATION = new COSName("AHx");
public static final COSName ASCII85_DECODE = new COSName("ASCII85Decode");
public static final COSName ASCII85_DECODE_ABBREVIATION = new COSName("A85");
public static final COSName ATTACHED = new COSName("Attached");
public static final COSName AUTHOR = new COSName("Author");
public static final COSName AVG_WIDTH = new COSName("AvgWidth");
// B
public static final COSName B = new COSName("B");
public static final COSName BACKGROUND = new COSName("Background");
public static final COSName BASE_ENCODING = new COSName("BaseEncoding");
public static final COSName BASE_FONT = new COSName("BaseFont");
public static final COSName BASE_STATE = new COSName("BaseState");
public static final COSName BBOX = new COSName("BBox");
public static final COSName BC = new COSName("BC");
public static final COSName BE = new COSName("BE");
public static final COSName BEFORE = new COSName("Before");
public static final COSName BG = new COSName("BG");
public static final COSName BITS_PER_COMPONENT = new COSName("BitsPerComponent");
public static final COSName BITS_PER_COORDINATE = new COSName("BitsPerCoordinate");
public static final COSName BITS_PER_FLAG = new COSName("BitsPerFlag");
public static final COSName BITS_PER_SAMPLE = new COSName("BitsPerSample");
public static final COSName BLACK_IS_1 = new COSName("BlackIs1");
public static final COSName BLACK_POINT = new COSName("BlackPoint");
public static final COSName BLEED_BOX = new COSName("BleedBox");
public static final COSName BM = new COSName("BM");
public static final COSName BOUNDS = new COSName("Bounds");
public static final COSName BPC = new COSName("BPC");
public static final COSName BS = new COSName("BS");
//** Acro form field type for button fields.
public static final COSName BTN = new COSName("Btn");
public static final COSName BYTERANGE = new COSName("ByteRange");
// C
public static final COSName C = new COSName("C");
public static final COSName C0 = new COSName("C0");
public static final COSName C1 = new COSName("C1");
public static final COSName CA = new COSName("CA");
public static final COSName CA_NS = new COSName("ca");
public static final COSName CALGRAY = new COSName("CalGray");
public static final COSName CALRGB = new COSName("CalRGB");
public static final COSName CAP = new COSName("Cap");
public static final COSName CAP_HEIGHT = new COSName("CapHeight");
public static final COSName CATALOG = new COSName("Catalog");
public static final COSName CCITTFAX_DECODE = new COSName("CCITTFaxDecode");
public static final COSName CCITTFAX_DECODE_ABBREVIATION = new COSName("CCF");
public static final COSName CENTER_WINDOW = new COSName("CenterWindow");
public static final COSName CF = new COSName("CF");
public static final COSName CFM = new COSName("CFM");
//** Acro form field type for choice fields.
public static final COSName CH = new COSName("Ch");
public static final COSName CHAR_PROCS = new COSName("CharProcs");
public static final COSName CHAR_SET = new COSName("CharSet");
public static final COSName CICI_SIGNIT = new COSName("CICI.SignIt");
public static final COSName CID_FONT_TYPE0 = new COSName("CIDFontType0");
public static final COSName CID_FONT_TYPE2 = new COSName("CIDFontType2");
public static final COSName CID_TO_GID_MAP = new COSName("CIDToGIDMap");
public static final COSName CID_SET = new COSName("CIDSet");
public static final COSName CIDSYSTEMINFO = new COSName("CIDSystemInfo");
public static final COSName CLR_F = new COSName("ClrF");
public static final COSName CLR_FF = new COSName("ClrFf");
public static final COSName CMAP = new COSName("CMap");
public static final COSName CMAPNAME = new COSName("CMapName");
public static final COSName CMYK = new COSName("CMYK");
public static final COSName CO = new COSName("CO");
public static final COSName COLOR_BURN = new COSName("ColorBurn");
public static final COSName COLOR_DODGE = new COSName("ColorDodge");
public static final COSName COLORANTS = new COSName("Colorants");
public static final COSName COLORS = new COSName("Colors");
public static final COSName COLORSPACE = new COSName("ColorSpace");
public static final COSName COLUMNS = new COSName("Columns");
public static final COSName COMPATIBLE = new COSName("Compatible");
public static final COSName COMPONENTS = new COSName("Components");
public static final COSName CONTACT_INFO = new COSName("ContactInfo");
public static final COSName CONTENTS = new COSName("Contents");
public static final COSName COORDS = new COSName("Coords");
public static final COSName COUNT = new COSName("Count");
public static final COSName CP = new COSName("CP");
public static final COSName CREATION_DATE = new COSName("CreationDate");
public static final COSName CREATOR = new COSName("Creator");
public static final COSName CROP_BOX = new COSName("CropBox");
public static final COSName CRYPT = new COSName("Crypt");
public static final COSName CS = new COSName("CS");
// D
public static final COSName D = new COSName("D");
public static final COSName DA = new COSName("DA");
public static final COSName DARKEN = new COSName("Darken");
public static final COSName DATE = new COSName("Date");
public static final COSName DCT_DECODE = new COSName("DCTDecode");
public static final COSName DCT_DECODE_ABBREVIATION = new COSName("DCT");
public static final COSName DECODE = new COSName("Decode");
public static final COSName DECODE_PARMS = new COSName("DecodeParms");
public static final COSName DEFAULT = new COSName("default");
public static final COSName DEFAULT_CMYK = new COSName("DefaultCMYK");
public static final COSName DEFAULT_GRAY = new COSName("DefaultGray");
public static final COSName DEFAULT_RGB = new COSName("DefaultRGB");
public static final COSName DESC = new COSName("Desc");
public static final COSName DESCENDANT_FONTS = new COSName("DescendantFonts");
public static final COSName DESCENT = new COSName("Descent");
public static final COSName DEST = new COSName("Dest");
public static final COSName DEST_OUTPUT_PROFILE = new COSName("DestOutputProfile");
public static final COSName DESTS = new COSName("Dests");
public static final COSName DEVICECMYK = new COSName("DeviceCMYK");
public static final COSName DEVICEGRAY = new COSName("DeviceGray");
public static final COSName DEVICEN = new COSName("DeviceN");
public static final COSName DEVICERGB = new COSName("DeviceRGB");
public static final COSName DI = new COSName("Di");
public static final COSName DIFFERENCE = new COSName("Difference");
public static final COSName DIFFERENCES = new COSName("Differences");
public static final COSName DIGEST_METHOD = new COSName("DigestMethod");
public static final COSName DIGEST_RIPEMD160 = new COSName("RIPEMD160");
public static final COSName DIGEST_SHA1 = new COSName("SHA1");
public static final COSName DIGEST_SHA256 = new COSName("SHA256");
public static final COSName DIGEST_SHA384 = new COSName("SHA384");
public static final COSName DIGEST_SHA512 = new COSName("SHA512");
public static final COSName DIRECTION = new COSName("Direction");
public static final COSName DISPLAY_DOC_TITLE = new COSName("DisplayDocTitle");
public static final COSName DL = new COSName("DL");
public static final COSName DM = new COSName("Dm");
public static final COSName DOC = new COSName("Doc");
public static final COSName DOC_CHECKSUM = new COSName("DocChecksum");
public static final COSName DOC_TIME_STAMP = new COSName("DocTimeStamp");
public static final COSName DOMAIN = new COSName("Domain");
public static final COSName DOS = new COSName("DOS");
public static final COSName DP = new COSName("DP");
public static final COSName DR = new COSName("DR");
public static final COSName DS = new COSName("DS");
public static final COSName DUPLEX = new COSName("Duplex");
public static final COSName DUR = new COSName("Dur");
public static final COSName DV = new COSName("DV");
public static final COSName DW = new COSName("DW");
public static final COSName DW2 = new COSName("DW2");
// E
public static final COSName E = new COSName("E");
public static final COSName EARLY_CHANGE = new COSName("EarlyChange");
public static final COSName EF = new COSName("EF");
public static final COSName EMBEDDED_FDFS = new COSName("EmbeddedFDFs");
public static final COSName EMBEDDED_FILES = new COSName("EmbeddedFiles");
public static final COSName EMPTY = new COSName("");
public static final COSName ENCODE = new COSName("Encode");
public static final COSName ENCODED_BYTE_ALIGN = new COSName("EncodedByteAlign");
public static final COSName ENCODING = new COSName("Encoding");
public static final COSName ENCODING_90MS_RKSJ_H = new COSName("90ms-RKSJ-H");
public static final COSName ENCODING_90MS_RKSJ_V = new COSName("90ms-RKSJ-V");
public static final COSName ENCODING_ETEN_B5_H = new COSName("ETen-B5-H");
public static final COSName ENCODING_ETEN_B5_V = new COSName("ETen-B5-V");
public static final COSName ENCRYPT = new COSName("Encrypt");
public static final COSName ENCRYPT_META_DATA = new COSName("EncryptMetadata");
public static final COSName END_OF_LINE = new COSName("EndOfLine");
public static final COSName ENTRUST_PPKEF = new COSName("Entrust.PPKEF");
public static final COSName EXCLUSION = new COSName("Exclusion");
public static final COSName EXT_G_STATE = new COSName("ExtGState");
public static final COSName EXTEND = new COSName("Extend");
public static final COSName EXTENDS = new COSName("Extends");
// F
public static final COSName F = new COSName("F");
public static final COSName F_DECODE_PARMS = new COSName("FDecodeParms");
public static final COSName F_FILTER = new COSName("FFilter");
public static final COSName FB = new COSName("FB");
public static final COSName FDF = new COSName("FDF");
public static final COSName FF = new COSName("Ff");
public static final COSName FIELDS = new COSName("Fields");
public static final COSName FILESPEC = new COSName("Filespec");
public static final COSName FILTER = new COSName("Filter");
public static final COSName FIRST = new COSName("First");
public static final COSName FIRST_CHAR = new COSName("FirstChar");
public static final COSName FIT_WINDOW = new COSName("FitWindow");
public static final COSName FL = new COSName("FL");
public static final COSName FLAGS = new COSName("Flags");
public static final COSName FLATE_DECODE = new COSName("FlateDecode");
public static final COSName FLATE_DECODE_ABBREVIATION = new COSName("Fl");
public static final COSName FONT = new COSName("Font");
public static final COSName FONT_BBOX = new COSName("FontBBox");
public static final COSName FONT_DESC = new COSName("FontDescriptor");
public static final COSName FONT_FAMILY = new COSName("FontFamily");
public static final COSName FONT_FILE = new COSName("FontFile");
public static final COSName FONT_FILE2 = new COSName("FontFile2");
public static final COSName FONT_FILE3 = new COSName("FontFile3");
public static final COSName FONT_MATRIX = new COSName("FontMatrix");
public static final COSName FONT_NAME = new COSName("FontName");
public static final COSName FONT_STRETCH = new COSName("FontStretch");
public static final COSName FONT_WEIGHT = new COSName("FontWeight");
public static final COSName FORM = new COSName("Form");
public static final COSName FORMTYPE = new COSName("FormType");
public static final COSName FRM = new COSName("FRM");
public static final COSName FT = new COSName("FT");
public static final COSName FUNCTION = new COSName("Function");
public static final COSName FUNCTION_TYPE = new COSName("FunctionType");
public static final COSName FUNCTIONS = new COSName("Functions");
// G
public static final COSName G = new COSName("G");
public static final COSName GAMMA = new COSName("Gamma");
public static final COSName GROUP = new COSName("Group");
public static final COSName GTS_PDFA1 = new COSName("GTS_PDFA1");
// H
public static final COSName H = new COSName("H");
public static final COSName HARD_LIGHT = new COSName("HardLight");
public static final COSName HEIGHT = new COSName("Height");
public static final COSName HIDE_MENUBAR = new COSName("HideMenubar");
public static final COSName HIDE_TOOLBAR = new COSName("HideToolbar");
public static final COSName HIDE_WINDOWUI = new COSName("HideWindowUI");
// I
public static final COSName I = new COSName("I");
public static final COSName IC = new COSName("IC");
public static final COSName ICCBASED = new COSName("ICCBased");
public static final COSName ID = new COSName("ID");
public static final COSName ID_TREE = new COSName("IDTree");
public static final COSName IDENTITY = new COSName("Identity");
public static final COSName IDENTITY_H = new COSName("Identity-H");
public static final COSName IF = new COSName("IF");
public static final COSName IM = new COSName("IM");
public static final COSName IMAGE = new COSName("Image");
public static final COSName IMAGE_MASK = new COSName("ImageMask");
public static final COSName INDEX = new COSName("Index");
public static final COSName INDEXED = new COSName("Indexed");
public static final COSName INFO = new COSName("Info");
public static final COSName INKLIST = new COSName("InkList");
public static final COSName INTERPOLATE = new COSName("Interpolate");
public static final COSName IT = new COSName("IT");
public static final COSName ITALIC_ANGLE = new COSName("ItalicAngle");
// J
public static final COSName JAVA_SCRIPT = new COSName("JavaScript");
public static final COSName JBIG2_DECODE = new COSName("JBIG2Decode");
public static final COSName JBIG2_GLOBALS = new COSName("JBIG2Globals");
public static final COSName JPX_DECODE = new COSName("JPXDecode");
public static final COSName JS = new COSName("JS");
// K
public static final COSName K = new COSName("K");
public static final COSName KEYWORDS = new COSName("Keywords");
public static final COSName KIDS = new COSName("Kids");
// L
public static final COSName L = new COSName("L");
public static final COSName LAB = new COSName("Lab");
public static final COSName LANG = new COSName("Lang");
public static final COSName LAST = new COSName("Last");
public static final COSName LAST_CHAR = new COSName("LastChar");
public static final COSName LAST_MODIFIED = new COSName("LastModified");
public static final COSName LC = new COSName("LC");
public static final COSName LE = new COSName("LE");
public static final COSName LEADING = new COSName("Leading");
public static final COSName LEGAL_ATTESTATION = new COSName("LegalAttestation");
public static final COSName LENGTH = new COSName("Length");
public static final COSName LENGTH1 = new COSName("Length1");
public static final COSName LENGTH2 = new COSName("Length2");
public static final COSName LIGHTEN = new COSName("Lighten");
public static final COSName LIMITS = new COSName("Limits");
public static final COSName LJ = new COSName("LJ");
public static final COSName LL = new COSName("LL");
public static final COSName LLE = new COSName("LLE");
public static final COSName LLO = new COSName("LLO");
public static final COSName LOCATION = new COSName("Location");
public static final COSName LUMINOSITY = new COSName("Luminosity");
public static final COSName LW = new COSName("LW");
public static final COSName LZW_DECODE = new COSName("LZWDecode");
public static final COSName LZW_DECODE_ABBREVIATION = new COSName("LZW");
// M
public static final COSName M = new COSName("M");
public static final COSName MAC = new COSName("Mac");
public static final COSName MAC_ROMAN_ENCODING = new COSName("MacRomanEncoding");
public static final COSName MARK_INFO = new COSName("MarkInfo");
public static final COSName MASK = new COSName("Mask");
public static final COSName MATRIX = new COSName("Matrix");
public static final COSName MAX_LEN = new COSName("MaxLen");
public static final COSName MAX_WIDTH = new COSName("MaxWidth");
public static final COSName MCID = new COSName("MCID");
public static final COSName MDP = new COSName("MDP");
public static final COSName MEDIA_BOX = new COSName("MediaBox");
public static final COSName METADATA = new COSName("Metadata");
public static final COSName MISSING_WIDTH = new COSName("MissingWidth");
public static final COSName MK = new COSName("MK");
public static final COSName ML = new COSName("ML");
public static final COSName MM_TYPE1 = new COSName("MMType1");
public static final COSName MOD_DATE = new COSName("ModDate");
public static final COSName MULTIPLY = new COSName("Multiply");
// N
public static final COSName N = new COSName("N");
public static final COSName NAME = new COSName("Name");
public static final COSName NAMES = new COSName("Names");
public static final COSName NEED_APPEARANCES = new COSName("NeedAppearances");
public static final COSName NEXT = new COSName("Next");
public static final COSName NM = new COSName("NM");
public static final COSName NON_EFONT_NO_WARN = new COSName("NonEFontNoWarn");
public static final COSName NON_FULL_SCREEN_PAGE_MODE = new COSName("NonFullScreenPageMode");
public static final COSName NONE = new COSName("None");
public static final COSName NORMAL = new COSName("Normal");
public static final COSName NUMS = new COSName("Nums");
// O
public static final COSName O = new COSName("O");
public static final COSName OBJ = new COSName("Obj");
public static final COSName OBJ_STM = new COSName("ObjStm");
public static final COSName OC = new COSName("OC");
public static final COSName OCG = new COSName("OCG");
public static final COSName OCGS = new COSName("OCGs");
public static final COSName OCPROPERTIES = new COSName("OCProperties");
public static final COSName OE = new COSName("OE");
public static final COSName OFF = new COSName("OFF");
public static final COSName ON = new COSName("ON");
public static final COSName OP = new COSName("OP");
public static final COSName OP_NS = new COSName("op");
public static final COSName OPEN_ACTION = new COSName("OpenAction");
public static final COSName OPEN_TYPE = new COSName("OpenType");
public static final COSName OPM = new COSName("OPM");
public static final COSName OPT = new COSName("Opt");
public static final COSName ORDER = new COSName("Order");
public static final COSName ORDERING = new COSName("Ordering");
public static final COSName OS = new COSName("OS");
public static final COSName OUTLINES = new COSName("Outlines");
public static final COSName OUTPUT_CONDITION = new COSName("OutputCondition");
public static final COSName OUTPUT_CONDITION_IDENTIFIER = new COSName(
"OutputConditionIdentifier");
public static final COSName OUTPUT_INTENT = new COSName("OutputIntent");
public static final COSName OUTPUT_INTENTS = new COSName("OutputIntents");
public static final COSName OVERLAY = new COSName("Overlay");
// P
public static final COSName P = new COSName("P");
public static final COSName PAGE = new COSName("Page");
public static final COSName PAGE_LABELS = new COSName("PageLabels");
public static final COSName PAGE_LAYOUT = new COSName("PageLayout");
public static final COSName PAGE_MODE = new COSName("PageMode");
public static final COSName PAGES = new COSName("Pages");
public static final COSName PAINT_TYPE = new COSName("PaintType");
public static final COSName PANOSE = new COSName("Panose");
public static final COSName PARAMS = new COSName("Params");
public static final COSName PARENT = new COSName("Parent");
public static final COSName PARENT_TREE = new COSName("ParentTree");
public static final COSName PARENT_TREE_NEXT_KEY = new COSName("ParentTreeNextKey");
public static final COSName PATTERN = new COSName("Pattern");
public static final COSName PATTERN_TYPE = new COSName("PatternType");
public static final COSName PDF_DOC_ENCODING = new COSName("PDFDocEncoding");
public static final COSName PERMS = new COSName("Perms");
public static final COSName PG = new COSName("Pg");
public static final COSName PRE_RELEASE = new COSName("PreRelease");
public static final COSName PREDICTOR = new COSName("Predictor");
public static final COSName PREV = new COSName("Prev");
public static final COSName PRINT_AREA = new COSName("PrintArea");
public static final COSName PRINT_CLIP = new COSName("PrintClip");
public static final COSName PRINT_SCALING = new COSName("PrintScaling");
public static final COSName PROC_SET = new COSName("ProcSet");
public static final COSName PROCESS = new COSName("Process");
public static final COSName PRODUCER = new COSName("Producer");
public static final COSName PROP_BUILD = new COSName("Prop_Build");
public static final COSName PROPERTIES = new COSName("Properties");
public static final COSName PS = new COSName("PS");
public static final COSName PUB_SEC = new COSName("PubSec");
// Q
public static final COSName Q = new COSName("Q");
public static final COSName QUADPOINTS = new COSName("QuadPoints");
// R
public static final COSName R = new COSName("R");
public static final COSName RANGE = new COSName("Range");
public static final COSName RC = new COSName("RC");
public static final COSName RD = new COSName("RD");
public static final COSName REASON = new COSName("Reason");
public static final COSName REASONS = new COSName("Reasons");
public static final COSName RECIPIENTS = new COSName("Recipients");
public static final COSName RECT = new COSName("Rect");
public static final COSName REGISTRY = new COSName("Registry");
public static final COSName REGISTRY_NAME = new COSName("RegistryName");
public static final COSName RENAME = new COSName("Rename");
public static final COSName RESOURCES = new COSName("Resources");
public static final COSName RGB = new COSName("RGB");
public static final COSName RI = new COSName("RI");
public static final COSName ROLE_MAP = new COSName("RoleMap");
public static final COSName ROOT = new COSName("Root");
public static final COSName ROTATE = new COSName("Rotate");
public static final COSName ROWS = new COSName("Rows");
public static final COSName RUN_LENGTH_DECODE = new COSName("RunLengthDecode");
public static final COSName RUN_LENGTH_DECODE_ABBREVIATION = new COSName("RL");
public static final COSName RV = new COSName("RV");
// S
public static final COSName S = new COSName("S");
public static final COSName SA = new COSName("SA");
public static final COSName SCREEN = new COSName("Screen");
public static final COSName SE = new COSName("SE");
public static final COSName SEPARATION = new COSName("Separation");
public static final COSName SET_F = new COSName("SetF");
public static final COSName SET_FF = new COSName("SetFf");
public static final COSName SHADING = new COSName("Shading");
public static final COSName SHADING_TYPE = new COSName("ShadingType");
public static final COSName SIG = new COSName("Sig");
public static final COSName SIG_FLAGS = new COSName("SigFlags");
public static final COSName SIZE = new COSName("Size");
public static final COSName SM = new COSName("SM");
public static final COSName SMASK = new COSName("SMask");
public static final COSName SOFT_LIGHT = new COSName("SoftLight");
public static final COSName SS = new COSName("SS");
public static final COSName ST = new COSName("St");
public static final COSName STANDARD_ENCODING = new COSName("StandardEncoding");
public static final COSName STATE = new COSName("State");
public static final COSName STATE_MODEL = new COSName("StateModel");
public static final COSName STATUS = new COSName("Status");
public static final COSName STD_CF = new COSName("StdCF");
public static final COSName STEM_H = new COSName("StemH");
public static final COSName STEM_V = new COSName("StemV");
public static final COSName STM_F = new COSName("StmF");
public static final COSName STR_F = new COSName("StrF");
public static final COSName STRUCT_PARENT = new COSName("StructParent");
public static final COSName STRUCT_PARENTS = new COSName("StructParents");
public static final COSName STRUCT_TREE_ROOT = new COSName("StructTreeRoot");
public static final COSName STYLE = new COSName("Style");
public static final COSName SUB_FILTER = new COSName("SubFilter");
public static final COSName SUBJ = new COSName("Subj");
public static final COSName SUBJECT = new COSName("Subject");
public static final COSName SUBTYPE = new COSName("Subtype");
public static final COSName SUPPLEMENT = new COSName("Supplement");
public static final COSName SV = new COSName("SV");
public static final COSName SW = new COSName("SW");
public static final COSName SY = new COSName("Sy");
// T
public static final COSName T = new COSName("T");
public static final COSName TARGET = new COSName("Target");
public static final COSName TEMPLATES = new COSName("Templates");
public static final COSName THREADS = new COSName("Threads");
public static final COSName TI = new COSName("TI");
public static final COSName TILING_TYPE = new COSName("TilingType");
public static final COSName TIME_STAMP = new COSName("TimeStamp");
public static final COSName TITLE = new COSName("Title");
public static final COSName TK = new COSName("TK");
public static final COSName TM = new COSName("TM");
public static final COSName TO_UNICODE = new COSName("ToUnicode");
public static final COSName TR = new COSName("TR");
public static final COSName TRAPPED = new COSName("Trapped");
public static final COSName TRANS = new COSName("Trans");
public static final COSName TRANSPARENCY = new COSName("Transparency");
public static final COSName TREF = new COSName("TRef");
public static final COSName TRIM_BOX = new COSName("TrimBox");
public static final COSName TRUE_TYPE = new COSName("TrueType");
public static final COSName TRUSTED_MODE = new COSName("TrustedMode");
public static final COSName TU = new COSName("TU");
/** Acro form field type for text field. */
public static final COSName TX = new COSName("Tx");
public static final COSName TYPE = new COSName("Type");
public static final COSName TYPE0 = new COSName("Type0");
public static final COSName TYPE1 = new COSName("Type1");
public static final COSName TYPE3 = new COSName("Type3");
// U
public static final COSName U = new COSName("U");
public static final COSName UE = new COSName("UE");
public static final COSName UF = new COSName("UF");
public static final COSName UNCHANGED = new COSName("Unchanged");
public static final COSName UNIX = new COSName("Unix");
public static final COSName URI = new COSName("URI");
public static final COSName URL = new COSName("URL");
// V
public static final COSName V = new COSName("V");
public static final COSName VERISIGN_PPKVS = new COSName("VeriSign.PPKVS");
public static final COSName VERSION = new COSName("Version");
public static final COSName VERTICES = new COSName("Vertices");
public static final COSName VERTICES_PER_ROW = new COSName("VerticesPerRow");
public static final COSName VIEW_AREA = new COSName("ViewArea");
public static final COSName VIEW_CLIP = new COSName("ViewClip");
public static final COSName VIEWER_PREFERENCES = new COSName("ViewerPreferences");
// W
public static final COSName W = new COSName("W");
public static final COSName W2 = new COSName("W2");
public static final COSName WHITE_POINT = new COSName("WhitePoint");
public static final COSName WIDTH = new COSName("Width");
public static final COSName WIDTHS = new COSName("Widths");
public static final COSName WIN_ANSI_ENCODING = new COSName("WinAnsiEncoding");
// X
public static final COSName XFA = new COSName("XFA");
public static final COSName X_STEP = new COSName("XStep");
public static final COSName XHEIGHT = new COSName("XHeight");
public static final COSName XOBJECT = new COSName("XObject");
public static final COSName XREF = new COSName("XRef");
public static final COSName XREF_STM = new COSName("XRefStm");
// Y
public static final COSName Y_STEP = new COSName("YStep");
public static final COSName YES = new COSName("Yes");
// fields
private final String name;
private final int hashCode;
private Integer originalLength;
public Integer getOriginalLength() {
return originalLength != null ? originalLength : name.length();
}
/**
* This will get a COSName object with that name.
*
* @param aName The name of the object.
*
* @return A COSName with the specified name.
*/
public static COSName getPDFName(String aName)
{
COSName name = null;
if (aName != null)
{
// Is it a common COSName ??
name = commonNameMap.get(aName);
if (name == null)
{
// It seems to be a document specific COSName
name = nameMap.get(aName);
if (name == null)
{
// name is added to the synchronized map in the constructor
name = new COSName(aName, false);
}
}
}
return name;
}
/**
* This will get a COSName object with that name. Also take into account length
* of current name.
*
* @param aName The name of the object.
*
* @return A COSName with the specified name.
*/
public static COSName getPDFName(String aName, int originalLength) {
COSName name = null;
if (aName != null)
{
// Is it a common COSName ??
name = commonNameMap.get(aName);
if (name == null)
{
// It seems to be a document specific COSName
name = nameMap.get(aName);
if (name == null)
{
// name is added to the synchronized map in the constructor
name = new COSName(aName, false, originalLength);
}
}
}
return name;
}
private COSName(String name, boolean staticValue, int originalLength) {
this(name, staticValue);
this.originalLength = originalLength;
}
/**
* Private constructor. This will limit the number of COSName objects. that are created.
*
* @param aName The name of the COSName object.
* @param staticValue Indicates if the COSName object is static so that it can be stored in the HashMap without
* synchronizing.
*/
private COSName(String aName, boolean staticValue)
{
name = aName;
if (staticValue)
{
commonNameMap.put(aName, this);
}
else
{
nameMap.put(aName, this);
}
hashCode = name.hashCode();
}
/**
* Private constructor. This will limit the number of COSName objects. that are created.
*
* @param aName The name of the COSName object.
*/
private COSName(String aName)
{
this(aName, true);
}
/**
* This will get the name of this COSName object.
*
* @return The name of the object.
*/
public String getName()
{
return name;
}
@Override
public String toString()
{
return "COSName{" + name + "}";
}
@Override
public boolean equals(Object object)
{
return object instanceof COSName && name.equals(((COSName) object).name);
}
@Override
public int hashCode()
{
return hashCode;
}
@Override
public int compareTo(COSName other)
{
return name.compareTo(other.name);
}
/**
* Returns true if the name is the empty string.
* @return true if the name is the empty string.
*/
public boolean isEmpty()
{
return name.isEmpty();
}
@Override
public Object accept(ICOSVisitor visitor) throws IOException
{
return visitor.visitFromName(this);
}
/**
* This will output this string as a PDF object.
*
* @param output The stream to write to.
* @throws IOException If there is an error writing to the stream.
*/
public void writePDF(OutputStream output) throws IOException
{
output.write('/');
byte[] bytes = getName().getBytes(Charsets.US_ASCII);
for (byte b : bytes)
{
int current = (b + 256) % 256;
// be more restrictive than the PDF spec, "Name Objects", see PDFBOX-2073
if (current >= 'A' && current <= 'Z' ||
current >= 'a' && current <= 'z' ||
current >= '0' && current <= '9' ||
current == '+' ||
current == '-' ||
current == '_' ||
current == '@' ||
current == '*' ||
current == '$' ||
current == ';' ||
current == '.')
{
output.write(current);
}
else
{
output.write('#');
output.write(String.format("%02X", current).getBytes(Charsets.US_ASCII));
}
}
}
/**
* Not usually needed except if resources need to be reclaimed in a long running process.
*/
public static synchronized void clearResources()
{
// Clear them all
nameMap.clear();
}
}
| |
/*
* Copyright 2016, Google Inc.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.opencensus.stats;
import static com.google.common.truth.Truth.assertThat;
import static org.junit.Assert.fail;
import com.google.common.collect.Collections2;
import com.google.common.testing.EqualsTester;
import io.opencensus.common.Function;
import io.opencensus.internal.SimpleEventQueue;
import io.opencensus.testing.common.TestClock;
import io.opencensus.internal.VarInt;
import io.opencensus.stats.View.DistributionView;
import io.opencensus.stats.View.IntervalView;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
/** Tests for {@link StatsContext}. */
@RunWith(JUnit4.class)
public class StatsContextTest {
private static final double TOLERANCE = 1e-6;
private final StatsManagerImplBase statsManager =
new StatsManagerImplBase(new SimpleEventQueue(), TestClock.create());
private final StatsContextFactory factory = statsManager.getStatsContextFactory();
private final StatsContext defaultStatsContext = factory.getDefault();
private static final int VERSION_ID = 0;
private static final int VALUE_TYPE_STRING = 0;
private static final TagKey K_EMPTY = TagKey.create("");
private static final TagKey K1 = TagKey.create("k1");
private static final TagKey K2 = TagKey.create("k2");
private static final TagKey K3 = TagKey.create("k3");
private static final TagKey K4 = TagKey.create("k4");
private static final TagKey K10 = TagKey.create("k10");
private static final TagValue V_EMPTY = TagValue.create("");
private static final TagValue V1 = TagValue.create("v1");
private static final TagValue V2 = TagValue.create("v2");
private static final TagValue V3 = TagValue.create("v3");
private static final TagValue V4 = TagValue.create("v4");
private static final TagValue V10 = TagValue.create("v10");
private static final TagValue V20 = TagValue.create("v20");
private static final TagValue V30 = TagValue.create("v30");
private static final TagValue V100 = TagValue.create("v100");
private static final Tag T1 = Tag.create(K1, V1);
private static final Tag T2 = Tag.create(K2, V2);
private static final Tag T3 = Tag.create(K3, V3);
private static final Tag T4 = Tag.create(K4, V4);
@Test
public void testWith() {
assertThat(defaultStatsContext.builder().set(K1, V1).build())
.isEqualTo(defaultStatsContext.with(K1, V1));
assertThat(defaultStatsContext.builder().set(K1, V1).set(K2, V2).build())
.isEqualTo(defaultStatsContext.with(K1, V1, K2, V2));
assertThat(defaultStatsContext.builder().set(K1, V1).set(K2, V2).set(K3, V3).build())
.isEqualTo(defaultStatsContext.with(K1, V1, K2, V2, K3, V3));
}
@Test
public void testWithComposed() {
StatsContext context1 = defaultStatsContext.with(K1, V1);
assertThat(defaultStatsContext.builder().set(K1, V1).build()).isEqualTo(context1);
StatsContext context2 = context1.with(K1, V10, K2, V2);
assertThat(defaultStatsContext.with(K1, V10, K2, V2)).isEqualTo(context2);
StatsContext context3 = context2.with(K1, V100, K2, V20, K3, V3);
assertThat(defaultStatsContext.with(K1, V100, K2, V20, K3, V3)).isEqualTo(context3);
StatsContext context4 = context3.with(K3, V30, K4, V4);
assertThat(
defaultStatsContext
.builder()
.set(K1, V100)
.set(K2, V20)
.set(K3, V30)
.set(K4, V4)
.build())
.isEqualTo(context4);
}
// The main tests for stats recording are in StatsManagerImplTest.
@Test
public void testRecord() {
statsManager.registerView(RpcViewConstants.RPC_CLIENT_ROUNDTRIP_LATENCY_VIEW);
View beforeView = statsManager.getView(RpcViewConstants.RPC_CLIENT_ROUNDTRIP_LATENCY_VIEW);
beforeView.match(
new Function<DistributionView, Void>() {
@Override
public Void apply(DistributionView view) {
assertThat(view.getDistributionAggregations()).isEmpty();
return null;
}
},
new Function<IntervalView, Void>() {
@Override
public Void apply(IntervalView view) {
fail("Expected a DistributionView");
return null;
}
});
StatsContext context =
defaultStatsContext.with(
RpcMeasurementConstants.RPC_CLIENT_METHOD, TagValue.create("myMethod"));
MeasurementMap measurements =
MeasurementMap.of(RpcMeasurementConstants.RPC_CLIENT_ROUNDTRIP_LATENCY, 5.1);
context.record(measurements);
View afterView = statsManager.getView(RpcViewConstants.RPC_CLIENT_ROUNDTRIP_LATENCY_VIEW);
afterView.match(
new Function<DistributionView, Void>() {
@Override
public Void apply(DistributionView view) {
assertThat(view.getDistributionAggregations()).hasSize(1);
DistributionAggregation agg = view.getDistributionAggregations().get(0);
assertThat(agg.getTags())
.containsExactly(
Tag.create(
RpcMeasurementConstants.RPC_CLIENT_METHOD, TagValue.create("myMethod")));
assertThat(agg.getCount()).isEqualTo(1);
assertThat(agg.getMean()).isWithin(TOLERANCE).of(5.1);
return null;
}
},
new Function<IntervalView, Void>() {
@Override
public Void apply(IntervalView view) {
fail("Expected a DistributionView");
return null;
}
});
}
@Test
public void testSerializeDefault() throws Exception {
testSerialize();
}
@Test
public void testSerializeWithOneStringTag() throws Exception {
testSerialize(T1);
}
@Test
public void testSerializeWithMultiStringTags() throws Exception {
testSerialize(T1, T2, T3, T4);
}
@Test
public void testRoundtripSerialization() throws Exception {
testRoundtripSerialization(defaultStatsContext.builder().build());
testRoundtripSerialization(defaultStatsContext.with(K1, V1));
testRoundtripSerialization(defaultStatsContext.with(K1, V1, K2, V2, K3, V3));
testRoundtripSerialization(defaultStatsContext.with(K1, V_EMPTY));
testRoundtripSerialization(defaultStatsContext.with(K_EMPTY, V1));
testRoundtripSerialization(defaultStatsContext.with(K_EMPTY, V_EMPTY));
}
// Tests for Object overrides.
@Test
public void testEquals() {
new EqualsTester()
.addEqualityGroup(defaultStatsContext, defaultStatsContext)
.addEqualityGroup(defaultStatsContext.with(K1, V1), defaultStatsContext.with(K1, V1))
.addEqualityGroup(
defaultStatsContext.with(K1, V1, K2, V2),
defaultStatsContext.with(K1, V1, K2, V2),
defaultStatsContext.with(K2, V2, K1, V1))
.addEqualityGroup(defaultStatsContext.with(K10, V1))
.addEqualityGroup(defaultStatsContext.with(K1, V10))
.addEqualityGroup("foo")
.testEquals();
}
@Test
public void testToString() {
assertThat(defaultStatsContext.with(K1, V1).toString())
.isEqualTo(defaultStatsContext.with(K1, V1).toString());
assertThat(defaultStatsContext.with(K10, V1).toString())
.isNotEqualTo(defaultStatsContext.with(K1, V1).toString());
assertThat(defaultStatsContext.with(K1, V10).toString())
.isNotEqualTo(defaultStatsContext.with(K1, V1).toString());
}
private void testSerialize(Tag... tags) throws IOException {
StatsContext.Builder builder = defaultStatsContext.builder();
for (Tag tag : tags) {
builder.set(tag.getKey(), tag.getValue());
}
ByteArrayOutputStream actual = new ByteArrayOutputStream();
builder.build().serialize(actual);
Collection<List<Tag>> tagPermutation = Collections2.permutations(Arrays.asList(tags));
Set<String> possibleOutputs = new HashSet<String>();
for (List<Tag> list : tagPermutation) {
ByteArrayOutputStream expected = new ByteArrayOutputStream();
expected.write(VERSION_ID);
for (Tag tag : list) {
expected.write(VALUE_TYPE_STRING);
encodeString(tag.getKey().asString(), expected);
encodeString(tag.getValue().asString(), expected);
}
possibleOutputs.add(expected.toString());
}
assertThat(possibleOutputs).contains(actual.toString());
}
private void testRoundtripSerialization(StatsContext expected) throws Exception {
ByteArrayOutputStream output = new ByteArrayOutputStream();
expected.serialize(output);
ByteArrayInputStream input = new ByteArrayInputStream(output.toByteArray());
StatsContext actual = factory.deserialize(input);
assertThat(actual).isEqualTo(expected);
}
private static final void encodeString(String input, ByteArrayOutputStream byteArrayOutputStream)
throws IOException {
VarInt.putVarInt(input.length(), byteArrayOutputStream);
byteArrayOutputStream.write(input.getBytes("UTF-8"));
}
}
| |
package com.huawei.esdk.uc.professional.local.bean;
import java.util.ArrayList;
import java.util.List;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlType;
/**
* <p>Java class for LinkmanInfo complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType name="LinkmanInfo">
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence>
* <element name="contactUrl" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/>
* <element name="contactId" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/>
* <element name="displayName" type="{http://www.w3.org/2001/XMLSchema}string"/>
* <element name="description" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/>
* <element name="sex" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/>
* <element name="occupation" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/>
* <element name="corpName" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/>
* <element name="deptName" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/>
* <element name="pinyinName" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/>
* <element name="foreignName" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/>
* <element name="staffName" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/>
* <element name="timeZone" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/>
* <element name="timeZoneValue" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/>
* <element name="contactDesc" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/>
* <element name="assistant" type="{esdk_uc_professional_server}AssistantInfo" maxOccurs="unbounded" minOccurs="0"/>
* <element name="contactInfo" type="{esdk_uc_professional_server}ContactInfo"/>
* </sequence>
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "LinkmanInfo", propOrder = {
"contactUrl",
"contactId",
"displayName",
"description",
"sex",
"occupation",
"corpName",
"deptName",
"pinyinName",
"foreignName",
"staffName",
"timeZone",
"timeZoneValue",
"contactDesc",
"assistant",
"contactInfo"
})
public class LinkmanInfo {
protected String contactUrl = "";
protected String contactId = "";
@XmlElement(required = true)
protected String displayName = "";
protected String description = "";
protected String sex = "";
protected String occupation = "";
protected String corpName = "";
protected String deptName = "";
protected String pinyinName = "";
protected String foreignName = "";
protected String staffName = "";
protected String timeZone = "";
protected String timeZoneValue = "";
protected String contactDesc = "";
protected List<AssistantInfo> assistant;
@XmlElement(required = true)
protected ContactInfo contactInfo;
/**
* Gets the value of the contactUrl property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getContactUrl() {
return contactUrl;
}
/**
* Sets the value of the contactUrl property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setContactUrl(String value) {
this.contactUrl = value;
}
/**
* Gets the value of the contactId property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getContactId() {
return contactId;
}
/**
* Sets the value of the contactId property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setContactId(String value) {
this.contactId = value;
}
/**
* Gets the value of the displayName property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getDisplayName() {
return displayName;
}
/**
* Sets the value of the displayName property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setDisplayName(String value) {
this.displayName = value;
}
/**
* Gets the value of the description property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getDescription() {
return description;
}
/**
* Sets the value of the description property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setDescription(String value) {
this.description = value;
}
/**
* Gets the value of the sex property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getSex() {
return sex;
}
/**
* Sets the value of the sex property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setSex(String value) {
this.sex = value;
}
/**
* Gets the value of the occupation property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getOccupation() {
return occupation;
}
/**
* Sets the value of the occupation property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setOccupation(String value) {
this.occupation = value;
}
/**
* Gets the value of the corpName property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getCorpName() {
return corpName;
}
/**
* Sets the value of the corpName property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setCorpName(String value) {
this.corpName = value;
}
/**
* Gets the value of the deptName property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getDeptName() {
return deptName;
}
/**
* Sets the value of the deptName property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setDeptName(String value) {
this.deptName = value;
}
/**
* Gets the value of the pinyinName property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getPinyinName() {
return pinyinName;
}
/**
* Sets the value of the pinyinName property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setPinyinName(String value) {
this.pinyinName = value;
}
/**
* Gets the value of the foreignName property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getForeignName() {
return foreignName;
}
/**
* Sets the value of the foreignName property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setForeignName(String value) {
this.foreignName = value;
}
/**
* Gets the value of the staffName property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getStaffName() {
return staffName;
}
/**
* Sets the value of the staffName property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setStaffName(String value) {
this.staffName = value;
}
/**
* Gets the value of the timeZone property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getTimeZone() {
return timeZone;
}
/**
* Sets the value of the timeZone property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setTimeZone(String value) {
this.timeZone = value;
}
/**
* Gets the value of the timeZoneValue property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getTimeZoneValue() {
return timeZoneValue;
}
/**
* Sets the value of the timeZoneValue property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setTimeZoneValue(String value) {
this.timeZoneValue = value;
}
/**
* Gets the value of the contactDesc property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getContactDesc() {
return contactDesc;
}
/**
* Sets the value of the contactDesc property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setContactDesc(String value) {
this.contactDesc = value;
}
/**
* Gets the value of the assistant property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the assistant property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getAssistant().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link AssistantInfo }
*
*
*/
public List<AssistantInfo> getAssistant() {
if (assistant == null) {
assistant = new ArrayList<AssistantInfo>();
}
return this.assistant;
}
/**
* Gets the value of the contactInfo property.
*
* @return
* possible object is
* {@link ContactInfo }
*
*/
public ContactInfo getContactInfo() {
return contactInfo;
}
/**
* Sets the value of the contactInfo property.
*
* @param value
* allowed object is
* {@link ContactInfo }
*
*/
public void setContactInfo(ContactInfo value) {
this.contactInfo = value;
}
public void setAssistant(List<AssistantInfo> assistant) {
this.assistant = assistant;
}
}
| |
/*
* Copyright 2014 Google Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.openrtb.util;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNotSame;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertSame;
import static org.junit.Assert.assertTrue;
import com.google.common.base.Function;
import com.google.common.base.Predicate;
import com.google.common.base.Predicates;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Iterables;
import com.google.openrtb.OpenRtb.BidRequest;
import com.google.openrtb.OpenRtb.BidRequest.Imp;
import com.google.openrtb.OpenRtb.BidRequest.Imp.Banner;
import com.google.openrtb.OpenRtb.BidRequest.Imp.Video;
import com.google.openrtb.OpenRtb.BidResponse;
import com.google.openrtb.OpenRtb.BidResponse.SeatBid;
import com.google.openrtb.OpenRtb.BidResponse.SeatBid.Bid;
import com.google.openrtb.OpenRtb.ContentCategory;
import org.junit.Test;
import java.util.Iterator;
import javax.annotation.Nullable;
/**
* Tests for {@link OpenRtbUtils}.
*/
public class OpenRtbUtilsTest {
@Test
public void testCatUtils() {
assertEquals(ContentCategory.IAB10_1, OpenRtbUtils.categoryFromName("IAB10_1"));
assertEquals(ContentCategory.IAB10_1, OpenRtbUtils.categoryFromName("IAB10-1"));
assertEquals("IAB10-1", OpenRtbUtils.categoryToJsonName("IAB10-1"));
assertEquals("IAB10-1", OpenRtbUtils.categoryToJsonName("IAB10_1"));
assertEquals("IAB10-1", OpenRtbUtils.categoryToJsonName(ContentCategory.IAB10_1));
}
@Test
public void testRequest_imps() {
BidRequest request = BidRequest.newBuilder().setId("1").build();
assertTrue(Iterables.isEmpty(OpenRtbUtils.impsWith(
request, Predicates.<Imp>alwaysTrue(), true, true)));
request = request.toBuilder().addImp(Imp.newBuilder().setId("1")).build();
assertEquals(1, Iterables.size(OpenRtbUtils.impsWith(
request, Predicates.<Imp>alwaysTrue(), true, true)));
assertTrue(Iterables.isEmpty(OpenRtbUtils.impsWith(request, new Predicate<Imp>() {
@Override public boolean apply(Imp imp) {
return "notfound".equals(imp.getId());
}
}, true, true)));
assertEquals(1, Iterables.size(OpenRtbUtils.impsWith(request, new Predicate<Imp>() {
@Override public boolean apply(Imp imp) {
return "1".equals(imp.getId());
}
}, true, true)));
assertNotNull(OpenRtbUtils.impWithId(request, "1"));
}
@Test
public void testRequest_banners() {
BidRequest request = BidRequest.newBuilder()
.setId("1")
.addImp(Imp.newBuilder().setId("1").setBanner(Banner.newBuilder().setId("0")))
.addImp(Imp.newBuilder().setId("2").setBanner(Banner.newBuilder().setId("0")))
.addImp(Imp.newBuilder().setId("3").setBanner(Banner.newBuilder().setId("0")))
.addImp(Imp.newBuilder().setId("4").setBanner(Banner.newBuilder().setId("0")))
.build();
assertEquals(4, Iterables.size(OpenRtbUtils.impsWith(
request, Predicates.<Imp>alwaysTrue(), true, false)));
assertTrue(Iterables.isEmpty(OpenRtbUtils.impsWith(
request, Predicates.<Imp>alwaysFalse(), true, false)));
// Filter-all case
assertEquals(4, Iterables.size(OpenRtbUtils.impsWith(request, new Predicate<Imp>() {
@Override public boolean apply(Imp imp) {
return "0".equals(imp.getBanner().getId());
}
}, true, false)));
// Filter-none case
assertEquals(0, Iterables.size(OpenRtbUtils.impsWith(request, new Predicate<Imp>() {
@Override public boolean apply(Imp imp) {
return "1".equals(imp.getBanner().getId());
}
}, true, false)));
// Filter-1 case
assertEquals(1, Iterables.size(OpenRtbUtils.impsWith(request, new Predicate<Imp>() {
@Override public boolean apply(Imp imp) {
return "1".equals(imp.getId());
}
}, true, false)));
// Filter-N case
assertEquals(3, Iterables.size(OpenRtbUtils.impsWith(request, new Predicate<Imp>() {
@Override public boolean apply(Imp imp) {
return imp.getId().compareTo("1") > 0;
}
}, true, false)));
assertNull(OpenRtbUtils.bannerImpWithId(request, "notfound", "2"));
assertNull(OpenRtbUtils.bannerImpWithId(request, "1", "notfound"));
assertNotNull(OpenRtbUtils.bannerImpWithId(request, "1", "0"));
}
@Test
public void testRequest_videos() {
BidRequest request = BidRequest.newBuilder()
.setId("1")
.addImp(Imp.newBuilder().setId("1").setVideo(Video.newBuilder()))
.build();
assertEquals(1, Iterables.size(OpenRtbUtils.impsWith(
request, Predicates.<Imp>alwaysTrue(), false, true)));
assertTrue(Iterables.isEmpty(OpenRtbUtils.impsWith(
request, Predicates.<Imp>alwaysFalse(), false, true)));
assertEquals(1, Iterables.size(OpenRtbUtils.impsWith(request, new Predicate<Imp>() {
@Override public boolean apply(Imp imp) {
return imp.hasVideo();
}
}, false, true)));
}
@Test
public void testResponse_bids() {
BidResponse.Builder response = BidResponse.newBuilder().setCur("USD");
OpenRtbUtils.seatBid(response, "unused");
OpenRtbUtils.seatBid(response); // no seat
SeatBid.Builder seatbidAnon = OpenRtbUtils.seatBid(response);
assertSame(seatbidAnon, OpenRtbUtils.seatBid(response));
SeatBid.Builder seatbidX = OpenRtbUtils.seatBid(response, "x");
assertSame(seatbidX, OpenRtbUtils.seatBid(response, "x"));
assertNotSame(seatbidX, OpenRtbUtils.seatBid(response));
assertTrue(Iterables.isEmpty(OpenRtbUtils.bids(response)));
Bid bid1 = buildHtmlBid("1", 100).build();
OpenRtbUtils.seatBid(response).addBid(bid1);
Bid bid11 = buildHtmlBid("11", 100).build();
OpenRtbUtils.seatBid(response).addBid(bid11);
assertEquals(2, Iterables.size(OpenRtbUtils.bids(response)));
assertTrue(Iterables.isEmpty(OpenRtbUtils.bids(response, "none")));
assertEquals(2, Iterables.size(OpenRtbUtils.bids(response, null)));
assertEquals(bid1, OpenRtbUtils.bidWithId(response, "1").build());
Bid bid2 = buildHtmlBid("2", 100).build();
Bid bidUnused = buildHtmlBid("unused", 100).build();
OpenRtbUtils.seatBid(response, "x").addBid(bidUnused);
OpenRtbUtils.seatBid(response, "x").addBid(bid2);
Bid bid22 = buildHtmlBid("22", 100).build();
OpenRtbUtils.seatBid(response, "x").addBid(bid22);
assertEquals(bid2, OpenRtbUtils.bidWithId(response, "x", "2").build());
assertNull(OpenRtbUtils.bidWithId(response, "x", "1"));
assertNull(OpenRtbUtils.bidWithId(response, "none"));
assertNull(OpenRtbUtils.bidWithId(response, "none", "1"));
assertNotNull(OpenRtbUtils.bidWithId(response, null, "1"));
assertTrue(Iterables.elementsEqual(
ImmutableList.of(bid1, bid11, bidUnused, bid2, bid22),
BuilderToBid.toBids(OpenRtbUtils.bids(response))));
assertTrue(Iterables.elementsEqual(
ImmutableList.of(bid1, bid11),
BuilderToBid.toBids(OpenRtbUtils.bids(response, null))));
assertTrue(Iterables.elementsEqual(
ImmutableList.of(bidUnused, bid2, bid22),
BuilderToBid.toBids(OpenRtbUtils.bids(response, "x"))));
Predicate<Bid.Builder> filterGoodBids = new Predicate<Bid.Builder>(){
@Override public boolean apply(Bid.Builder bid) {
return !"unused".equals(bid.getId());
}};
assertEquals(4, Iterables.size(OpenRtbUtils.bidsWith(response, filterGoodBids)));
assertTrue(Iterables.isEmpty(OpenRtbUtils.bidsWith(response, "none", filterGoodBids)));
assertEquals(2, Iterables.size(OpenRtbUtils.bidsWith(response, "x", filterGoodBids)));
assertEquals(2, Iterables.size(OpenRtbUtils.bidsWith(response, null, filterGoodBids)));
}
@Test(expected = UnsupportedOperationException.class)
public void testResponse_unsupportedRemove() {
BidResponse.Builder response = BidResponse.newBuilder().addSeatbid(SeatBid.newBuilder()
.addBid(buildHtmlBid("1", 100)));
Iterator<Bid.Builder> bids = OpenRtbUtils.bids(response).iterator();
bids.next();
bids.remove();
}
@Test
public void testResponse_filter() {
BidResponse.Builder response = BidResponse.newBuilder()
.addSeatbid(SeatBid.newBuilder()
.addBid(buildHtmlBid("1", 100))
.addBid(buildHtmlBid("2", 100))
.addBid(buildHtmlBid("3", 200)))
.addSeatbid(SeatBid.newBuilder().setSeat("unused"));
OpenRtbUtils.filterBids(response, Predicates.<Bid.Builder>alwaysTrue());
assertEquals(3, Iterables.size(OpenRtbUtils.bids(response)));
assertTrue(OpenRtbUtils.filterBids(response, new Predicate<Bid.Builder>() {
@Override public boolean apply(Bid.Builder bid) {
return !"1".equals(bid.getId());
}}));
assertEquals(2, Iterables.size(OpenRtbUtils.bids(response)));
OpenRtbUtils.seatBid(response, "x").addBid(buildHtmlBid("unused", 100));
OpenRtbUtils.seatBid(response, "x").addBid(buildHtmlBid("4", 100));
assertTrue(OpenRtbUtils.filterBids(response, "x", new Predicate<Bid.Builder>() {
@Override public boolean apply(Bid.Builder bid) {
return !"4".equals(bid.getId());
}}));
assertEquals(1, Iterables.size(OpenRtbUtils.bids(response, "x")));
assertFalse(OpenRtbUtils.filterBids(response, "none", Predicates.<Bid.Builder>alwaysFalse()));
assertTrue(OpenRtbUtils.filterBids(response, null, Predicates.<Bid.Builder>alwaysFalse()));
assertTrue(OpenRtbUtils.filterBids(response, "x", Predicates.<Bid.Builder>alwaysFalse()));
assertTrue(Iterables.isEmpty(OpenRtbUtils.bids(response, "x")));
assertFalse(OpenRtbUtils.filterBids(response, Predicates.<Bid.Builder>alwaysFalse()));
assertTrue(Iterables.isEmpty(OpenRtbUtils.bids(response)));
}
@Test
public void testResponse_updater() {
BidResponse.Builder response = BidResponse.newBuilder().addSeatbid(SeatBid.newBuilder()
.addBid(buildHtmlBid("1", 100))
.addBid(buildHtmlBid("2", 200)));
OpenRtbUtils.seatBid(response, "unused");
Function<Bid.Builder, Boolean> inflation = new Function<Bid.Builder, Boolean>() {
@Override public Boolean apply(Bid.Builder bid) {
if (bid.getPrice() < 150) {
bid.setPrice(bid.getPrice() * 2);
return true;
} else {
return false;
}
}};
Function<Bid.Builder, Boolean> noUpdates = new Function<Bid.Builder, Boolean>() {
@Override public Boolean apply(@Nullable Bid.Builder bid) {
return false;
}};
assertTrue(OpenRtbUtils.updateBids(response, inflation));
assertFalse(OpenRtbUtils.updateBids(response, noUpdates));
assertFalse(OpenRtbUtils.updateBids(response, noUpdates));
OpenRtbUtils.seatBid(response, "x").addBid(buildHtmlBid("1", 100));
OpenRtbUtils.seatBid(response, "x").addBid(buildHtmlBid("2", 200));
assertTrue(OpenRtbUtils.updateBids(response, "x", inflation));
assertFalse(OpenRtbUtils.updateBids(response, "x", noUpdates));
assertFalse(OpenRtbUtils.updateBids(response, "none", noUpdates));
assertFalse(OpenRtbUtils.updateBids(response, null, noUpdates));
}
static class BuilderToBid implements Function<Bid.Builder, Bid> {
static final BuilderToBid INSTANCE = new BuilderToBid();
@Override public Bid apply(Bid.Builder builder) {
return builder.buildPartial();
}
static Iterable<Bid> toBids(Iterable<Bid.Builder> builders) {
return Iterables.transform(builders, INSTANCE);
}
}
private static Bid.Builder buildHtmlBid(String id, long bidMicros) {
return Bid.newBuilder()
.setId(id)
.setAdid("ad" + id)
.setImpid("imp" + id)
.setPrice(bidMicros);
}
}
| |
/*
* Copyright 2015 Google Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.bigquery;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
import com.google.api.services.bigquery.model.QueryParameter;
import com.google.common.base.Function;
import com.google.common.base.MoreObjects;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import java.io.Serializable;
import java.util.List;
import java.util.Map;
import java.util.Objects;
/**
* Google Cloud BigQuery Query Request. This class can be used to run a BigQuery SQL query and
* return results if the query completes within a specified timeout. The query results are saved to
* a temporary table that is deleted approximately 24 hours after the query is run. The query is run
* through a BigQuery Job whose identity can be accessed via {@link QueryResponse#getJobId()}. If
* the query does not complete within the provided {@link Builder#setMaxWaitTime(Long)}, the
* response returned by {@link BigQuery#query(QueryRequest)} will have
* {@link QueryResponse#jobCompleted()} set to {@code false} and {@link QueryResponse#getResult()}
* set to {@code null}. To obtain query results you can use
* {@link BigQuery#getQueryResults(JobId, BigQuery.QueryResultsOption...)} until
* {@link QueryResponse#jobCompleted()} returns {@code true}.
*
* <p>Example usage of a query request:
* <pre> {@code
* // Substitute "field", "table" and "dataset" with real field, table and dataset identifiers
* QueryRequest request = QueryRequest.newBuilder("SELECT field FROM table")
* .setDefaultDataset(DatasetId.of("dataset"))
* .setMaxWaitTime(60000L)
* .setPageSize(1000L)
* .build();
* QueryResponse response = bigquery.query(request);
* while (!response.jobCompleted()) {
* Thread.sleep(1000);
* response = bigquery.getQueryResults(response.getJobId());
* }
* List<BigQueryError> executionErrors = response.getExecutionErrors();
* // look for errors in executionErrors
* QueryResult result = response.getResult();
* Iterator<List<FieldValue>> rowIterator = result.iterateAll();
* while(rowIterator.hasNext()) {
* List<FieldValue> row = rowIterator.next();
* // do something with row
* }
* }</pre>
*
* @see <a href="https://cloud.google.com/bigquery/docs/reference/v2/jobs/query">Query</a>
* @see <a href="https://cloud.google.com/bigquery/query-reference">Query Reference</a>
*/
public final class QueryRequest implements Serializable {
private static final long serialVersionUID = -8727328332415880852L;
private final String query;
private final ImmutableList<QueryParameterValue> positionalParameters;
private final ImmutableMap<String, QueryParameterValue> namedParameters;
private final Long pageSize;
private final DatasetId defaultDataset;
private final Long maxWaitTime;
private final Boolean dryRun;
private final Boolean useQueryCache;
private final Boolean useLegacySql;
public static final class Builder {
private String query;
private List<QueryParameterValue> positionalParameters = Lists.newArrayList();
private Map<String, QueryParameterValue> namedParameters = Maps.newHashMap();
private Long pageSize;
private DatasetId defaultDataset;
private Long maxWaitTime;
private Boolean dryRun;
private Boolean useQueryCache;
private Boolean useLegacySql;
private Builder() {}
/**
* Sets the BigQuery query to be executed.
*/
@Deprecated
public Builder query(String query) {
return setQuery(query);
}
/**
* Sets the BigQuery query to be executed.
*/
public Builder setQuery(String query) {
this.query = checkNotNull(query);
return this;
}
/**
* Adds a positional query parameter to the list of query parameters. See
* {@link #setPositionalParameters(Iterable)} for more details on the input requirements.
*
* <p>A positional parameter cannot be added after named parameters have been added.
*/
public Builder addPositionalParameter(QueryParameterValue value) {
checkNotNull(value);
if (!namedParameters.isEmpty()) {
throw new IllegalStateException(
"Positional parameters can't be combined with named parameters");
}
positionalParameters.add(value);
return this;
}
/**
* Sets the query parameters to a list of positional query parameters to use in the query.
*
* <p>The set of query parameters must either be all positional or all named parameters.
* Positional parameters are denoted in the query with a question mark (?).
*
* <p>Additionally, useLegacySql must be set to false; query parameters cannot be used with
* legacy SQL.
*
* <p>The values parameter can be set to null to clear out the positional
* parameters so that named parameters can be used instead.
*/
public Builder setPositionalParameters(Iterable<QueryParameterValue> values) {
if (values == null || Iterables.isEmpty(values)) {
positionalParameters = Lists.newArrayList();
} else {
if (!this.namedParameters.isEmpty()) {
throw new IllegalStateException(
"Positional parameters can't be combined with named parameters");
}
this.positionalParameters = Lists.newArrayList(values);
}
return this;
}
/**
* Adds a named query parameter to the set of query parameters. See
* {@link #setNamedParameters(Map)} for more details on the input requirements.
*
* <p>A named parameter cannot be added after positional parameters have been added.
*/
public Builder addNamedParameter(String name, QueryParameterValue value) {
checkNotNull(value);
if (!this.positionalParameters.isEmpty()) {
throw new IllegalStateException(
"Named parameters can't be combined with positional parameters");
}
namedParameters.put(name, value);
return this;
}
/**
* Sets the query parameters to a set of named query parameters to use in the query.
*
* <p>The set of query parameters must either be all positional or all named parameters. Named
* parameters are denoted using an @ prefix, e.g. @myParam for a parameter named "myParam".
*
* <p>Additionally, useLegacySql must be set to false; query parameters cannot be used with
* legacy SQL.
*
* <p>The values parameter can be set to null to clear out the named parameters so that
* positional parameters can be used instead.
*/
public Builder setNamedParameters(Map<String, QueryParameterValue> values) {
if (values == null || values.isEmpty()) {
namedParameters = Maps.newHashMap();
} else {
if (!this.positionalParameters.isEmpty()) {
throw new IllegalStateException(
"Named parameters can't be combined with positional parameters");
}
this.namedParameters = Maps.newHashMap(values);
}
return this;
}
/**
* Sets the maximum number of rows of data to return per page of results. Setting this flag to a
* small value such as 1000 and then paging through results might improve reliability when the
* query result set is large. In addition to this limit, responses are also limited to 10 MB.
* By default, there is no maximum row count, and only the byte limit applies.
*/
@Deprecated
public Builder pageSize(Long pageSize) {
return setPageSize(pageSize);
}
/**
* Sets the maximum number of rows of data to return per page of results. Setting this flag to a
* small value such as 1000 and then paging through results might improve reliability when the
* query result set is large. In addition to this limit, responses are also limited to 10 MB.
* By default, there is no maximum row count, and only the byte limit applies.
*/
public Builder setPageSize(Long pageSize) {
this.pageSize = pageSize;
return this;
}
/**
* Sets the default dataset to assume for any unqualified table names in the query.
*/
@Deprecated
public Builder defaultDataset(DatasetId defaultDataset) {
return setDefaultDataset(defaultDataset);
}
/**
* Sets the default dataset to assume for any unqualified table names in the query.
*/
public Builder setDefaultDataset(DatasetId defaultDataset) {
this.defaultDataset = defaultDataset;
return this;
}
/**
* Sets the default dataset to assume for any unqualified table names in the query.
*/
@Deprecated
public Builder defaultDataset(String defaultDataset) {
return setDefaultDataset(DatasetId.of(defaultDataset));
}
/**
* Sets the default dataset to assume for any unqualified table names in the query.
*/
public Builder setDefaultDataset(String defaultDataset) {
return setDefaultDataset(DatasetId.of(defaultDataset));
}
/**
* Sets how long to wait for the query to complete, in milliseconds, before the request times
* out and returns. Note that this is only a timeout for the request, not the query. If the
* query takes longer to run than the timeout value, the call returns without any results and
* with the {@link QueryResponse#jobCompleted()} set to {@code false}. If not set, a wait time
* of 10000 milliseconds (10 seconds) is used.
*/
@Deprecated
public Builder maxWaitTime(Long maxWaitTime) {
return setMaxWaitTime(maxWaitTime);
}
/**
* Sets how long to wait for the query to complete, in milliseconds, before the request times
* out and returns. Note that this is only a timeout for the request, not the query. If the
* query takes longer to run than the timeout value, the call returns without any results and
* with the {@link QueryResponse#jobCompleted()} set to {@code false}. If not set, a wait time
* of 10000 milliseconds (10 seconds) is used.
*/
public Builder setMaxWaitTime(Long maxWaitTime) {
this.maxWaitTime = maxWaitTime;
return this;
}
/**
* Sets whether the query has to be dry run or not. If set, the query is not executed. If the
* query is valid statistics are returned on how many bytes would be processed. If the query is
* invalid an error is returned. If not set the query is executed.
*/
@Deprecated
public Builder dryRun(Boolean dryRun) {
return setDryRun(dryRun);
}
/**
* Sets whether the query has to be dry run or not. If set, the query is not executed. If the
* query is valid statistics are returned on how many bytes would be processed. If the query is
* invalid an error is returned. If not set the query is executed.
*/
public Builder setDryRun(Boolean dryRun) {
this.dryRun = dryRun;
return this;
}
/**
* Sets whether to look for the result in the query cache. The query cache is a best-effort
* cache that will be flushed whenever tables in the query are modified. If not specified the
* query cache is used.
*
* @see <a href="https://cloud.google.com/bigquery/querying-data#querycaching">Query Caching</a>
*/
@Deprecated
public Builder useQueryCache(Boolean useQueryCache) {
return setUseQueryCache(useQueryCache);
}
/**
* Sets whether to look for the result in the query cache. The query cache is a best-effort
* cache that will be flushed whenever tables in the query are modified. If not specified the
* query cache is used.
*
* @see <a href="https://cloud.google.com/bigquery/querying-data#querycaching">Query Caching</a>
*/
public Builder setUseQueryCache(Boolean useQueryCache) {
this.useQueryCache = useQueryCache;
return this;
}
/**
* Sets whether to use BigQuery's legacy SQL dialect for this query. If set to {@code false},
* the query will use BigQuery's <a href="https://cloud.google.com/bigquery/sql-reference/">
* Standard SQL</a>. If not set, legacy SQL dialect is used. This property is experimental and
* might be subject to change.
*/
@Deprecated
public Builder useLegacySql(Boolean useLegacySql) {
return setUseLegacySql(useLegacySql);
}
/**
* Sets whether to use BigQuery's legacy SQL dialect for this query. If set to {@code false},
* the query will use BigQuery's <a href="https://cloud.google.com/bigquery/sql-reference/">
* Standard SQL</a>. If not set, legacy SQL dialect is used. This property is experimental and
* might be subject to change.
*/
public Builder setUseLegacySql(Boolean useLegacySql) {
this.useLegacySql = useLegacySql;
return this;
}
public QueryRequest build() {
return new QueryRequest(this);
}
}
private QueryRequest(Builder builder) {
query = builder.query;
checkNotNull(builder.positionalParameters);
checkNotNull(builder.namedParameters);
if (!builder.positionalParameters.isEmpty()) {
checkArgument(builder.namedParameters.isEmpty());
}
if (!builder.namedParameters.isEmpty()) {
checkArgument(builder.positionalParameters.isEmpty());
}
positionalParameters = ImmutableList.copyOf(builder.positionalParameters);
namedParameters = ImmutableMap.copyOf(builder.namedParameters);
pageSize = builder.pageSize;
defaultDataset = builder.defaultDataset;
maxWaitTime = builder.maxWaitTime;
dryRun = builder.dryRun;
useQueryCache = builder.useQueryCache;
useLegacySql = builder.useLegacySql;
}
/**
* Sets the BigQuery query to be executed.
*/
@Deprecated
public String query() {
return getQuery();
}
/**
* Sets the BigQuery query to be executed.
*/
public String getQuery() {
return query;
}
/**
* Returns the positional query parameters to use for the query.
*/
public List<QueryParameterValue> getPositionalParameters() {
return positionalParameters;
}
/**
* Returns the named query parameters to use for the query.
*/
public Map<String, QueryParameterValue> getNamedParameters() {
return namedParameters;
}
/**
* Returns the maximum number of rows of data to return per page of results.
*/
@Deprecated
public Long pageSize() {
return getPageSize();
}
/**
* Returns the maximum number of rows of data to return per page of results.
*/
public Long getPageSize() {
return pageSize;
}
/**
* Returns the default dataset to assume for any unqualified table names in the query.
*/
@Deprecated
public DatasetId defaultDataset() {
return getDefaultDataset();
}
/**
* Returns the default dataset to assume for any unqualified table names in the query.
*/
public DatasetId getDefaultDataset() {
return defaultDataset;
}
/**
* Returns how long to wait for the query to complete, in milliseconds, before the request times
* out and returns. Note that this is only a timeout for the request, not the query. If the
* query takes longer to run than the timeout value, the call returns without any results and
* with the {@link QueryResponse#jobCompleted()} set to {@code false}. You can call
* {@link BigQuery#getQueryResults(JobId, BigQuery.QueryResultsOption...)} to wait for the query
* to complete and read the results. If not set, a wait time of 10000 milliseconds (10 seconds)
* is used.
*/
@Deprecated
public Long maxWaitTime() {
return getMaxWaitTime();
}
/**
* Returns how long to wait for the query to complete, in milliseconds, before the request times
* out and returns. Note that this is only a timeout for the request, not the query. If the
* query takes longer to run than the timeout value, the call returns without any results and
* with the {@link QueryResponse#jobCompleted()} set to {@code false}. You can call
* {@link BigQuery#getQueryResults(JobId, BigQuery.QueryResultsOption...)} to wait for the query
* to complete and read the results. If not set, a wait time of 10000 milliseconds (10 seconds)
* is used.
*/
public Long getMaxWaitTime() {
return maxWaitTime;
}
/**
* Returns whether the query has to be dry run or not. If set, the query is not executed. If the
* query is valid statistics are returned on how many bytes would be processed. If the query is
* invalid an error is returned. If not set the query is executed.
*/
public Boolean dryRun() {
return dryRun;
}
/**
* Returns whether to look for the result in the query cache. The query cache is a best-effort
* cache that will be flushed whenever tables in the query are modified. If not specified the
* query cache is used.
*
* @see <a href="https://cloud.google.com/bigquery/querying-data#querycaching">Query Caching</a>
*/
public Boolean useQueryCache() {
return useQueryCache;
}
/**
* Returns whether to use BigQuery's legacy SQL dialect for this query. If set to {@code false},
* the query will use BigQuery's <a href="https://cloud.google.com/bigquery/sql-reference/">
* Standard SQL</a>. If not set, legacy SQL dialect is used. This property is experimental and
* might be subject to change.
*/
public Boolean useLegacySql() {
return useLegacySql;
}
/**
* Returns a builder for the {@code QueryRequest} object.
*/
public Builder toBuilder() {
return new Builder()
.setQuery(query)
.setPositionalParameters(positionalParameters)
.setNamedParameters(namedParameters)
.setPageSize(pageSize)
.setDefaultDataset(defaultDataset)
.setMaxWaitTime(maxWaitTime)
.setDryRun(dryRun)
.setUseQueryCache(useQueryCache)
.setUseLegacySql(useLegacySql);
}
@Override
public String toString() {
return MoreObjects.toStringHelper(this)
.add("query", query)
.add("positionalParameters", positionalParameters)
.add("namedParameters", namedParameters)
.add("pageSize", pageSize)
.add("defaultDataset", defaultDataset)
.add("maxWaitTime", maxWaitTime)
.add("dryRun", dryRun)
.add("useQueryCache", useQueryCache)
.add("useLegacySql", useLegacySql)
.toString();
}
@Override
public int hashCode() {
return Objects.hash(
query,
positionalParameters,
namedParameters,
pageSize,
defaultDataset,
maxWaitTime,
dryRun,
useQueryCache,
useLegacySql);
}
@Override
public boolean equals(Object obj) {
return obj == this
|| obj instanceof QueryRequest
&& Objects.equals(toPb(), ((QueryRequest) obj).toPb());
}
QueryRequest setProjectId(String projectId) {
Builder builder = toBuilder();
if (getDefaultDataset() != null) {
builder.setDefaultDataset(getDefaultDataset().setProjectId(projectId));
}
return builder.build();
}
com.google.api.services.bigquery.model.QueryRequest toPb() {
com.google.api.services.bigquery.model.QueryRequest queryRequestPb =
new com.google.api.services.bigquery.model.QueryRequest().setQuery(query);
if (!positionalParameters.isEmpty()) {
List<QueryParameter> queryParametersPb
= Lists.transform(positionalParameters, POSITIONAL_PARAMETER_TO_PB_FUNCTION);
queryRequestPb.setQueryParameters(queryParametersPb);
} else if (!namedParameters.isEmpty()) {
List<QueryParameter> queryParametersPb
= Lists.transform(namedParameters.entrySet().asList(), NAMED_PARAMETER_TO_PB_FUNCTION);
queryRequestPb.setQueryParameters(queryParametersPb);
}
if (pageSize != null) {
queryRequestPb.setMaxResults(pageSize);
}
if (defaultDataset != null) {
queryRequestPb.setDefaultDataset(defaultDataset.toPb());
}
if (maxWaitTime != null) {
queryRequestPb.setTimeoutMs(maxWaitTime);
}
if (dryRun != null) {
queryRequestPb.setDryRun(dryRun);
}
if (useQueryCache != null) {
queryRequestPb.setUseQueryCache(useQueryCache);
}
if (useLegacySql != null) {
queryRequestPb.setUseLegacySql(useLegacySql);
}
return queryRequestPb;
}
/**
* Creates a builder for a {@code QueryRequest} given the BigQuery SQL query to be executed.
*/
@Deprecated
public static Builder builder(String query) {
return newBuilder(query);
}
/**
* Creates a builder for a {@code QueryRequest} given the BigQuery SQL query to be executed.
*/
public static Builder newBuilder(String query) {
return new Builder().setQuery(query);
}
/**
* Creates a {@code QueryRequest} object given the BigQuery SQL query to be executed.
*/
public static QueryRequest of(String query) {
return new Builder().setQuery(query).build();
}
static QueryRequest fromPb(com.google.api.services.bigquery.model.QueryRequest queryRequestPb) {
Builder builder = newBuilder(queryRequestPb.getQuery());
if (queryRequestPb.getQueryParameters() != null && !queryRequestPb.getQueryParameters().isEmpty()) {
if (queryRequestPb.getQueryParameters().get(0).getName() == null) {
builder.setPositionalParameters(
Lists.transform(queryRequestPb.getQueryParameters(), POSITIONAL_PARAMETER_FROM_PB_FUNCTION));
} else {
Map<String, QueryParameterValue> values = Maps.newHashMap();
for (QueryParameter queryParameterPb : queryRequestPb.getQueryParameters()) {
checkNotNull(queryParameterPb.getName());
QueryParameterValue value = QueryParameterValue.fromPb(
queryParameterPb.getParameterValue(), queryParameterPb.getParameterType());
values.put(queryParameterPb.getName(), value);
}
builder.setNamedParameters(values);
}
}
if (queryRequestPb.getMaxResults() != null) {
builder.setPageSize(queryRequestPb.getMaxResults());
}
if (queryRequestPb.getDefaultDataset() != null) {
builder.setDefaultDataset(DatasetId.fromPb(queryRequestPb.getDefaultDataset()));
}
if (queryRequestPb.getTimeoutMs() != null) {
builder.setMaxWaitTime(queryRequestPb.getTimeoutMs());
}
if (queryRequestPb.getDryRun() != null) {
builder.setDryRun(queryRequestPb.getDryRun());
}
if (queryRequestPb.getUseQueryCache() != null) {
builder.setUseQueryCache(queryRequestPb.getUseQueryCache());
}
if (queryRequestPb.getUseLegacySql() != null) {
builder.setUseLegacySql(queryRequestPb.getUseLegacySql());
}
return builder.build();
}
static QueryParameter namedParameterToPb(Map.Entry<String, QueryParameterValue> entry) {
QueryParameter queryParameterPb =
new QueryParameter();
queryParameterPb.setName(entry.getKey());
queryParameterPb.setParameterValue(entry.getValue().toValuePb());
queryParameterPb.setParameterType(entry.getValue().toTypePb());
return queryParameterPb;
}
static QueryParameter positionalParameterToPb(QueryParameterValue value) {
QueryParameter queryParameterPb =
new QueryParameter();
queryParameterPb.setParameterValue(value.toValuePb());
queryParameterPb.setParameterType(value.toTypePb());
return queryParameterPb;
}
static QueryParameterValue positionalParameterFromPb(QueryParameter queryParameterPb) {
checkArgument(queryParameterPb.getName() == null);
return QueryParameterValue.fromPb(
queryParameterPb.getParameterValue(), queryParameterPb.getParameterType());
}
static final Function<QueryParameter, QueryParameterValue>
POSITIONAL_PARAMETER_FROM_PB_FUNCTION =
new Function<QueryParameter, QueryParameterValue>() {
@Override
public QueryParameterValue apply(QueryParameter pb) {
return positionalParameterFromPb(pb);
}
};
static final Function<QueryParameterValue, QueryParameter>
POSITIONAL_PARAMETER_TO_PB_FUNCTION =
new Function<QueryParameterValue, QueryParameter>() {
@Override
public QueryParameter apply(
QueryParameterValue value) {
return positionalParameterToPb(value);
}
};
static final Function<Map.Entry<String, QueryParameterValue>, QueryParameter>
NAMED_PARAMETER_TO_PB_FUNCTION =
new Function<Map.Entry<String, QueryParameterValue>, QueryParameter>() {
@Override
public QueryParameter apply(
Map.Entry<String, QueryParameterValue> value) {
return namedParameterToPb(value);
}
};
}
| |
/*
* Copyright (C) 2012-2015, Juan Manuel Barrios <http://juan.cl/>
* All rights reserved.
*
* This file is part of P-VCD. http://p-vcd.org/
* P-VCD is made available under the terms of the BSD 2-Clause License.
*/
package org.p_vcd.process;
import java.io.BufferedReader;
import java.io.File;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.lang.ProcessBuilder.Redirect;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.IOUtils;
import org.p_vcd.model.MyUtil;
import org.p_vcd.model.Parameters;
public abstract class ProcessBase {
private StatusListener status;
private Thread processingThread;
private boolean processRunning;
private Process currentSystemProcess;
public void asyncRun(StatusListener status) {
if (status != null) {
this.status = status;
} else {
this.status = new StatusListener() {
@Override
public void appendOutputLine(String line) {
}
@Override
public void setPctProgress(String stepName, double pct) {
}
@Override
public void callbackOnEnd(ProcessBase process, boolean wasSuccessful) {
}
};
}
this.processingThread = new Thread() {
@Override
public void run() {
try {
ProcessBase.this.runProcess(ProcessBase.this.status);
ProcessBase.this.status.callbackOnEnd(ProcessBase.this, true);
} catch (Throwable tr) {
tr.printStackTrace();
ProcessBase.this.status.appendOutputLine("\n\n" + tr.toString());
ProcessBase.this.status.setPctProgress("ERROR: " + tr.toString(), 1);
ProcessBase.this.status.callbackOnEnd(ProcessBase.this, false);
}
}
};
this.processingThread.start();
}
public void sendKillProcess() {
if (this.processingThread != null || !this.processingThread.isAlive())
return;
Thread kthread = new Thread() {
@Override
public void run() {
while (ProcessBase.this.processRunning) {
System.out.println("\ntrying to kill process...\n");
ProcessBase.this.status.appendOutputLine("\ntrying to kill process...\n");
System.out.println(ProcessBase.this.currentSystemProcess.getClass());
ProcessBase.this.currentSystemProcess.destroy();
try {
Thread.sleep(500);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
while (ProcessBase.this.processingThread.isAlive()) {
System.out.println("\ninterrupting process...\n");
ProcessBase.this.status.appendOutputLine("\ninterrupting process...\n");
ProcessBase.this.processingThread.interrupt();
try {
Thread.sleep(500);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
};
kthread.setDaemon(true);
kthread.start();
}
public void waitProcess() {
try {
this.processingThread.join();
} catch (Exception e) {
e.printStackTrace();
}
}
public synchronized boolean hasEnded() {
return !this.processingThread.isAlive();
}
protected abstract void runProcess(StatusListener status) throws Exception;
protected void runVlc(ProcessArguments arguments) throws Exception {
runCommandInternal(Parameters.get().getVlcExePath(), arguments, Parameters.get().getUserDataPath(), null, null);
}
protected void runWget(ProcessArguments arguments, File workingDir, StringBuffer sbSaveStderr) throws Exception {
runCommandInternal(Parameters.get().getWgetExePath(), arguments, workingDir, null, sbSaveStderr);
}
protected void runYoutubeDl(ProcessArguments arguments, File workingDir) throws Exception {
runCommandInternal(Parameters.get().getYoutubedlExePath(), arguments, workingDir, null, null);
}
protected void runPvcdDb(ProcessArguments arguments) throws Exception {
runPvcd("pvcd_db", arguments);
}
protected void runPvcdSearch(ProcessArguments arguments) throws Exception {
runPvcd("pvcd_search", arguments);
}
protected void runPvcdMerge(ProcessArguments arguments) throws Exception {
runPvcd("pvcd_mergeLocalToGlobal", arguments);
}
protected void runPvcdDetect(ProcessArguments arguments) throws Exception {
runPvcd("pvcd_detect", arguments);
}
private void runPvcd(String command, ProcessArguments arguments) throws Exception {
if (Parameters.get().getPvcdMaxCores() > 1)
arguments.add("-num_cores", Parameters.get().getPvcdMaxCores());
String binfile = command + Parameters.get().getSystemExeExtension();
if (Parameters.get().getPvcdPath().length() > 0)
binfile = new File(Parameters.get().getPvcdPath(), binfile).toString();
runCommandInternal(binfile, arguments, Parameters.get().getUserDataPath(), null, null);
}
private void runCommandInternal(String command, ProcessArguments commandArgs, File workingDir,
StringBuffer sbSaveStdout, StringBuffer sbSaveStderr) throws Exception {
commandArgs.insertFirst(command);
FileUtils.forceMkdir(workingDir);
StringBuffer sbLog = new StringBuffer();
sbLog.append("\n").append(MyUtil.getFormateDate());
commandArgs.addToLog(sbLog);
System.out.println(sbLog.toString());
this.status.appendOutputLine(sbLog.toString());
ProcessBuilder pb = new ProcessBuilder(commandArgs.getCommands());
pb.directory(workingDir);
pb.redirectInput(Redirect.INHERIT);
pb.redirectOutput(Redirect.PIPE);
pb.redirectError(Redirect.PIPE);
long init = System.currentTimeMillis();
this.currentSystemProcess = pb.start();
PrintThreadWithStatus thStdout = new PrintThreadWithStatus(this.currentSystemProcess.getInputStream(), command,
this.status, sbSaveStdout);
PrintThreadWithStatus thStderr = new PrintThreadWithStatus(this.currentSystemProcess.getErrorStream(), command,
this.status, sbSaveStderr);
this.currentSystemProcess.getOutputStream().close();
thStdout.start();
thStderr.start();
int ret = -1;
try {
this.processRunning = true;
ret = this.currentSystemProcess.waitFor();
} catch (InterruptedException ex) {
ex.printStackTrace();
} finally {
this.processRunning = false;
}
try {
thStderr.join();
} catch (InterruptedException ex) {
ex.printStackTrace();
}
try {
thStdout.join();
} catch (InterruptedException ex) {
ex.printStackTrace();
}
long milis = System.currentTimeMillis() - init;
if (ret != 0) {
throw new Exception("command error code=" + ret + " (" + milis + " ms)");
}
sbLog = new StringBuffer();
sbLog.append(MyUtil.getFormateDate()).append("command ").append(command).append(" ok (").append(milis)
.append(" ms)");
System.out.println(sbLog.toString());
this.status.appendOutputLine(sbLog.toString());
}
}
class PrintThreadWithStatus extends Thread {
private InputStream is;
private String commandName;
private StatusListener status;
private StringBuffer sbOutput;
public PrintThreadWithStatus(InputStream is, String commandName, StatusListener status, StringBuffer sbOutput) {
this.is = is;
String s = commandName;
if (s.endsWith(".exe") || s.endsWith(".EXE"))
s = s.substring(0, s.length() - 4);
this.commandName = "[" + s + "] ";
this.status = status;
this.sbOutput = sbOutput;
}
@Override
public void run() {
BufferedReader br = null;
try {
br = new BufferedReader(new InputStreamReader(is, "UTF-8"));
String line;
while ((line = br.readLine()) != null) {
System.out.println(commandName + line);
this.status.appendOutputLine(line);
if (this.sbOutput != null)
this.sbOutput.append(line).append("\n");
}
} catch (Exception e) {
e.printStackTrace();
} finally {
IOUtils.closeQuietly(br);
IOUtils.closeQuietly(is);
}
}
public String getSavedOutput() {
return sbOutput.toString();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.hive.hcatalog.hbase;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.net.URI;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hive.cli.CliSessionState;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
import org.apache.hadoop.hive.ql.DriverFactory;
import org.apache.hadoop.hive.ql.IDriver;
import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hive.hcatalog.common.HCatUtil;
import org.apache.hive.hcatalog.mapreduce.HCatBaseTest;
import org.apache.pig.PigServer;
import org.apache.pig.data.DataType;
import org.apache.pig.data.Tuple;
import org.apache.pig.impl.logicalLayer.schema.Schema;
import org.apache.pig.impl.logicalLayer.schema.Schema.FieldSchema;
import org.junit.Test;
public class TestPigHBaseStorageHandler extends SkeletonHBaseTest {
private static HiveConf hcatConf;
private static IDriver driver;
private final byte[] FAMILY = Bytes.toBytes("testFamily");
private final byte[] QUALIFIER1 = Bytes.toBytes("testQualifier1");
private final byte[] QUALIFIER2 = Bytes.toBytes("testQualifier2");
public void Initialize() throws Exception {
hcatConf = new HiveConf(this.getClass());
//hcatConf.set(ConfVars.SEMANTIC_ANALYZER_HOOK.varname,
// HCatSemanticAnalyzer.class.getName());
URI fsuri = getFileSystem().getUri();
Path whPath = new Path(fsuri.getScheme(), fsuri.getAuthority(),
getTestDir());
hcatConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false");
hcatConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, "");
hcatConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, "");
hcatConf.set(ConfVars.METASTOREWAREHOUSE.varname, whPath.toString());
hcatConf
.setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER,
"org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory");
//Add hbase properties
for (Map.Entry<String, String> el : getHbaseConf()) {
if (el.getKey().startsWith("hbase.")) {
hcatConf.set(el.getKey(), el.getValue());
}
}
driver = DriverFactory.newDriver(hcatConf);
SessionState.start(new CliSessionState(hcatConf));
}
private void populateHBaseTable(String tName, Connection connection) throws IOException {
List<Put> myPuts = generatePuts(tName);
Table table = null;
try {
table = connection.getTable(TableName.valueOf(tName));
table.put(myPuts);
} finally {
if (table != null) {
table.close();
}
}
}
private List<Put> generatePuts(String tableName) throws IOException {
List<Put> myPuts;
myPuts = new ArrayList<Put>();
for (int i = 1; i <=10; i++) {
Put put = new Put(Bytes.toBytes(i));
put.addColumn(FAMILY, QUALIFIER1, 1, Bytes.toBytes("textA-" + i));
put.addColumn(FAMILY, QUALIFIER2, 1, Bytes.toBytes("textB-" + i));
myPuts.add(put);
}
return myPuts;
}
public static void createTestDataFile(String filename) throws IOException {
FileWriter writer = null;
int LOOP_SIZE = 10;
float f = -100.1f;
try {
File file = new File(filename);
file.deleteOnExit();
writer = new FileWriter(file);
for (int i =1; i <= LOOP_SIZE; i++) {
writer.write(i+ "\t" +(f+i)+ "\t" + "textB-" + i + "\n");
}
} finally {
if (writer != null) {
writer.close();
}
}
}
@Test
public void testPigHBaseSchema() throws Exception {
Initialize();
String tableName = newTableName("MyTable");
String databaseName = newTableName("MyDatabase");
//Table name will be lower case unless specified by hbase.table.name property
String hbaseTableName = "testTable";
String db_dir = HCatUtil.makePathASafeFileName(getTestDir() + "/hbasedb");
String dbQuery = "CREATE DATABASE IF NOT EXISTS " + databaseName + " LOCATION '"
+ db_dir + "'";
String deleteQuery = "DROP TABLE "+databaseName+"."+tableName;
String tableQuery = "CREATE TABLE " + databaseName + "." + tableName
+ "(key float, testqualifier1 string, testqualifier2 int) STORED BY " +
"'org.apache.hadoop.hive.hbase.HBaseStorageHandler'"
+ " WITH SERDEPROPERTIES ('hbase.columns.mapping'=':key,testFamily:testQualifier1,testFamily:testQualifier2')"
+ " TBLPROPERTIES ('hbase.table.name'='"+hbaseTableName+"')";
driver.run(deleteQuery);
driver.run(dbQuery);
driver.run(tableQuery);
Connection connection = null;
Admin hAdmin = null;
boolean doesTableExist = false;
try {
connection = ConnectionFactory.createConnection(getHbaseConf());
hAdmin = connection.getAdmin();
doesTableExist = hAdmin.tableExists(TableName.valueOf(hbaseTableName));
} finally {
if (hAdmin != null) {
hAdmin.close();
}
if (connection != null) {
connection.close();
}
}
assertTrue(doesTableExist);
PigServer server = HCatBaseTest.createPigServer(false, hcatConf.getAllProperties());
server.registerQuery("A = load '"+databaseName+"."+tableName+"' using org.apache.hive.hcatalog.pig.HCatLoader();");
Schema dumpedASchema = server.dumpSchema("A");
List<FieldSchema> fields = dumpedASchema.getFields();
assertEquals(3, fields.size());
assertEquals(DataType.FLOAT,fields.get(0).type);
assertEquals("key",fields.get(0).alias.toLowerCase());
assertEquals( DataType.CHARARRAY,fields.get(1).type);
assertEquals("testQualifier1".toLowerCase(), fields.get(1).alias.toLowerCase());
assertEquals( DataType.INTEGER,fields.get(2).type);
assertEquals("testQualifier2".toLowerCase(), fields.get(2).alias.toLowerCase());
}
@Test
public void testPigFilterProjection() throws Exception {
Initialize();
String tableName = newTableName("MyTable");
String databaseName = newTableName("MyDatabase");
//Table name will be lower case unless specified by hbase.table.name property
String hbaseTableName = (databaseName + "." + tableName).toLowerCase();
String db_dir = HCatUtil.makePathASafeFileName(getTestDir() + "/hbasedb");
String dbQuery = "CREATE DATABASE IF NOT EXISTS " + databaseName + " LOCATION '"
+ db_dir + "'";
String deleteQuery = "DROP TABLE "+databaseName+"."+tableName;
String tableQuery = "CREATE TABLE " + databaseName + "." + tableName
+ "(key int, testqualifier1 string, testqualifier2 string) STORED BY " +
"'org.apache.hadoop.hive.hbase.HBaseStorageHandler'" +
" WITH SERDEPROPERTIES ('hbase.columns.mapping'=':key,testFamily:testQualifier1,testFamily:testQualifier2')" +
" TBLPROPERTIES ('hbase.table.default.storage.type'='binary')";
driver.run(deleteQuery);
driver.run(dbQuery);
driver.run(tableQuery);
Connection connection = null;
Admin hAdmin = null;
Table table = null;
ResultScanner scanner = null;
boolean doesTableExist = false;
try {
connection = ConnectionFactory.createConnection(getHbaseConf());
hAdmin = connection.getAdmin();
doesTableExist = hAdmin.tableExists(TableName.valueOf(hbaseTableName));
assertTrue(doesTableExist);
populateHBaseTable(hbaseTableName, connection);
table = connection.getTable(TableName.valueOf(hbaseTableName));
Scan scan = new Scan();
scan.addFamily(Bytes.toBytes("testFamily"));
scanner = table.getScanner(scan);
} finally {
if (scanner != null) {
scanner.close();
}
if (table != null ) {
table.close();
}
if (hAdmin != null) {
hAdmin.close();
}
if (connection != null) {
connection.close();
}
}
int index=1;
PigServer server = HCatBaseTest.createPigServer(false, hcatConf.getAllProperties());
server.registerQuery("A = load '"+databaseName+"."+tableName+"' using org.apache.hive.hcatalog.pig.HCatLoader();");
server.registerQuery("B = filter A by key < 5;");
server.registerQuery("C = foreach B generate key,testqualifier2;");
Iterator<Tuple> itr = server.openIterator("C");
//verify if the filter is correct and returns 2 rows and contains 2 columns and the contents match
while(itr.hasNext()){
Tuple t = itr.next();
assertTrue(t.size() == 2);
assertTrue(t.get(0).getClass() == Integer.class);
assertEquals(index,t.get(0));
assertTrue(t.get(1).getClass() == String.class);
assertEquals("textB-"+index,t.get(1));
index++;
}
assertEquals(index-1,4);
}
@Test
public void testPigPopulation() throws Exception {
Initialize();
String tableName = newTableName("MyTable");
String databaseName = newTableName("MyDatabase");
//Table name will be lower case unless specified by hbase.table.name property
String hbaseTableName = (databaseName + "." + tableName).toLowerCase();
String db_dir = HCatUtil.makePathASafeFileName(getTestDir() + "/hbasedb");
String POPTXT_FILE_NAME = db_dir+"testfile.txt";
float f = -100.1f;
String dbQuery = "CREATE DATABASE IF NOT EXISTS " + databaseName + " LOCATION '"
+ db_dir + "'";
String deleteQuery = "DROP TABLE "+databaseName+"."+tableName;
String tableQuery = "CREATE TABLE " + databaseName + "." + tableName
+ "(key int, testqualifier1 float, testqualifier2 string) STORED BY " +
"'org.apache.hadoop.hive.hbase.HBaseStorageHandler'"
+ " WITH SERDEPROPERTIES ('hbase.columns.mapping'=':key,testFamily:testQualifier1,testFamily:testQualifier2')"
+ " TBLPROPERTIES ('hbase.table.default.storage.type'='binary')";
String selectQuery = "SELECT * from "+databaseName.toLowerCase()+"."+tableName.toLowerCase();
driver.run(deleteQuery);
driver.run(dbQuery);
driver.run(tableQuery);
Connection connection = null;
Admin hAdmin = null;
Table table = null;
ResultScanner scanner = null;
boolean doesTableExist = false;
try {
connection = ConnectionFactory.createConnection(getHbaseConf());
hAdmin = connection.getAdmin();
doesTableExist = hAdmin.tableExists(TableName.valueOf(hbaseTableName));
assertTrue(doesTableExist);
createTestDataFile(POPTXT_FILE_NAME);
PigServer server = HCatBaseTest.createPigServer(false, hcatConf.getAllProperties());
server.registerQuery("A = load '"+POPTXT_FILE_NAME+"' using PigStorage() as (key:int, testqualifier1:float, testqualifier2:chararray);");
server.registerQuery("B = filter A by (key > 2) AND (key < 8) ;");
server.registerQuery("store B into '"+databaseName.toLowerCase()+"."+tableName.toLowerCase()+"' using org.apache.hive.hcatalog.pig.HCatStorer();");
server.registerQuery("C = load '"+databaseName.toLowerCase()+"."+tableName.toLowerCase()+"' using org.apache.hive.hcatalog.pig.HCatLoader();");
// Schema should be same
Schema dumpedBSchema = server.dumpSchema("C");
List<FieldSchema> fields = dumpedBSchema.getFields();
assertEquals(3, fields.size());
assertEquals(DataType.INTEGER,fields.get(0).type);
assertEquals("key",fields.get(0).alias.toLowerCase());
assertEquals( DataType.FLOAT,fields.get(1).type);
assertEquals("testQualifier1".toLowerCase(), fields.get(1).alias.toLowerCase());
assertEquals( DataType.CHARARRAY,fields.get(2).type);
assertEquals("testQualifier2".toLowerCase(), fields.get(2).alias.toLowerCase());
//Query the hbase table and check the key is valid and only 5 are present
table = connection.getTable(TableName.valueOf(hbaseTableName));
Scan scan = new Scan();
scan.addFamily(Bytes.toBytes("testFamily"));
byte[] familyNameBytes = Bytes.toBytes("testFamily");
scanner = table.getScanner(scan);
int index=3;
int count=0;
for(Result result: scanner) {
//key is correct
assertEquals(index,Bytes.toInt(result.getRow()));
//first column exists
assertTrue(result.containsColumn(familyNameBytes,Bytes.toBytes("testQualifier1")));
//value is correct
assertEquals((index+f),Bytes.toFloat(result.getValue(familyNameBytes,Bytes.toBytes("testQualifier1"))),0);
//second column exists
assertTrue(result.containsColumn(familyNameBytes,Bytes.toBytes("testQualifier2")));
//value is correct
assertEquals(("textB-"+index).toString(),Bytes.toString(result.getValue(familyNameBytes,Bytes.toBytes("testQualifier2"))));
index++;
count++;
}
// 5 rows should be returned
assertEquals(count,5);
} finally {
if (scanner != null) {
scanner.close();
}
if (table != null ) {
table.close();
}
if (hAdmin != null) {
hAdmin.close();
}
if (connection != null) {
connection.close();
}
}
//Check if hive returns results correctly
driver.run(selectQuery);
ArrayList<String> result = new ArrayList<String>();
driver.getResults(result);
//Query using the hive command line
assertEquals(5, result.size());
Iterator<String> itr = result.iterator();
for(int i = 3; i <= 7; i++) {
String tokens[] = itr.next().split("\\s+");
assertEquals(i,Integer.parseInt(tokens[0]));
assertEquals(i+f,Float.parseFloat(tokens[1]),0);
assertEquals(("textB-"+i).toString(),tokens[2]);
}
//delete the table from the database
driver.run(deleteQuery);
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.raptor.integration;
import com.facebook.presto.Session;
import com.facebook.presto.common.type.ArrayType;
import com.facebook.presto.testing.MaterializedResult;
import com.facebook.presto.testing.MaterializedRow;
import com.facebook.presto.testing.QueryRunner;
import com.facebook.presto.tests.AbstractTestIntegrationSmokeTest;
import com.google.common.collect.HashMultimap;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.SetMultimap;
import org.intellij.lang.annotations.Language;
import org.testng.annotations.Test;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.util.Collection;
import java.util.Map;
import java.util.Set;
import java.util.StringJoiner;
import java.util.UUID;
import java.util.stream.IntStream;
import static com.facebook.airlift.testing.Assertions.assertGreaterThan;
import static com.facebook.airlift.testing.Assertions.assertGreaterThanOrEqual;
import static com.facebook.airlift.testing.Assertions.assertInstanceOf;
import static com.facebook.airlift.testing.Assertions.assertLessThan;
import static com.facebook.presto.SystemSessionProperties.COLOCATED_JOIN;
import static com.facebook.presto.common.type.BigintType.BIGINT;
import static com.facebook.presto.common.type.BooleanType.BOOLEAN;
import static com.facebook.presto.common.type.DateType.DATE;
import static com.facebook.presto.common.type.IntegerType.INTEGER;
import static com.facebook.presto.common.type.VarcharType.VARCHAR;
import static com.facebook.presto.raptor.RaptorColumnHandle.SHARD_UUID_COLUMN_TYPE;
import static com.facebook.presto.raptor.RaptorQueryRunner.createRaptorQueryRunner;
import static com.google.common.collect.ImmutableMap.toImmutableMap;
import static com.google.common.collect.Iterables.getOnlyElement;
import static java.lang.String.format;
import static java.util.Arrays.asList;
import static java.util.function.Function.identity;
import static java.util.stream.Collectors.toSet;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertNotEquals;
import static org.testng.Assert.assertNotNull;
public class TestRaptorIntegrationSmokeTest
extends AbstractTestIntegrationSmokeTest
{
@Override
protected QueryRunner createQueryRunner()
throws Exception
{
return createRaptorQueryRunner(
ImmutableMap.of(),
true,
false,
false,
ImmutableMap.of("storage.orc.optimized-writer-stage", "ENABLED_AND_VALIDATED"));
}
@Test
public void testCreateArrayTable()
{
assertUpdate("CREATE TABLE array_test AS SELECT ARRAY [1, 2, 3] AS c", 1);
assertQuery("SELECT cardinality(c) FROM array_test", "SELECT 3");
assertUpdate("DROP TABLE array_test");
}
@Test
public void testCreateTableUnsupportedType()
{
assertQueryFails("CREATE TABLE rowtype_test AS SELECT row(1) AS c", "Type not supported: row\\(integer\\)");
assertQueryFails("CREATE TABLE rowtype_test(row_type_field row(s varchar))", "Type not supported: row\\(s varchar\\)");
}
@Test
public void testMapTable()
{
assertUpdate("CREATE TABLE map_test AS SELECT MAP(ARRAY [1, 2, 3], ARRAY ['hi', 'bye', NULL]) AS c", 1);
assertQuery("SELECT c[1] FROM map_test", "SELECT 'hi'");
assertQuery("SELECT c[3] FROM map_test", "SELECT NULL");
assertUpdate("DROP TABLE map_test");
}
@Test
public void testCreateTableViewAlreadyExists()
{
assertUpdate("CREATE VIEW view_already_exists AS SELECT 1 a");
assertQueryFails("CREATE TABLE view_already_exists(a integer)", "View already exists: tpch.view_already_exists");
assertQueryFails("CREATE TABLE View_Already_Exists(a integer)", "View already exists: tpch.view_already_exists");
assertQueryFails("CREATE TABLE view_already_exists AS SELECT 1 a", "View already exists: tpch.view_already_exists");
assertQueryFails("CREATE TABLE View_Already_Exists AS SELECT 1 a", "View already exists: tpch.view_already_exists");
assertUpdate("DROP VIEW view_already_exists");
}
@Test
public void testCreateViewTableAlreadyExists()
{
assertUpdate("CREATE TABLE table_already_exists (id integer)");
assertQueryFails("CREATE VIEW table_already_exists AS SELECT 1 a", "Table already exists: tpch.table_already_exists");
assertQueryFails("CREATE VIEW Table_Already_Exists AS SELECT 1 a", "Table already exists: tpch.table_already_exists");
assertQueryFails("CREATE OR REPLACE VIEW table_already_exists AS SELECT 1 a", "Table already exists: tpch.table_already_exists");
assertQueryFails("CREATE OR REPLACE VIEW Table_Already_Exists AS SELECT 1 a", "Table already exists: tpch.table_already_exists");
assertUpdate("DROP TABLE table_already_exists");
}
@Test
public void testInsertSelectDecimal()
{
assertUpdate("CREATE TABLE test_decimal(short_decimal DECIMAL(5,2), long_decimal DECIMAL(25,20))");
assertUpdate("INSERT INTO test_decimal VALUES(DECIMAL '123.45', DECIMAL '12345.12345678901234567890')", "VALUES(1)");
assertUpdate("INSERT INTO test_decimal VALUES(NULL, NULL)", "VALUES(1)");
assertQuery("SELECT * FROM test_decimal", "VALUES (123.45, 12345.12345678901234567890), (NULL, NULL)");
assertUpdate("DROP TABLE test_decimal");
}
@Test
public void testShardUuidHiddenColumn()
{
assertUpdate("CREATE TABLE test_shard_uuid AS SELECT orderdate, orderkey FROM orders", "SELECT count(*) FROM orders");
MaterializedResult actualResults = computeActual("SELECT *, \"$shard_uuid\" FROM test_shard_uuid");
assertEquals(actualResults.getTypes(), ImmutableList.of(DATE, BIGINT, SHARD_UUID_COLUMN_TYPE));
UUID arbitraryUuid = null;
for (MaterializedRow row : actualResults.getMaterializedRows()) {
Object uuid = row.getField(2);
assertInstanceOf(uuid, String.class);
arbitraryUuid = UUID.fromString((String) uuid);
}
assertNotNull(arbitraryUuid);
actualResults = computeActual(format("SELECT * FROM test_shard_uuid where \"$shard_uuid\" = '%s'", arbitraryUuid));
assertNotEquals(actualResults.getMaterializedRows().size(), 0);
actualResults = computeActual("SELECT * FROM test_shard_uuid where \"$shard_uuid\" = 'foo'");
assertEquals(actualResults.getMaterializedRows().size(), 0);
}
@Test
public void testBucketNumberHiddenColumn()
{
assertUpdate("" +
"CREATE TABLE test_bucket_number " +
"WITH (bucket_count = 50, bucketed_on = ARRAY ['orderkey']) " +
"AS SELECT * FROM orders",
"SELECT count(*) FROM orders");
MaterializedResult actualResults = computeActual("SELECT DISTINCT \"$bucket_number\" FROM test_bucket_number");
assertEquals(actualResults.getTypes(), ImmutableList.of(INTEGER));
Set<Object> actual = actualResults.getMaterializedRows().stream()
.map(row -> row.getField(0))
.collect(toSet());
assertEquals(actual, IntStream.range(0, 50).boxed().collect(toSet()));
}
@Test(expectedExceptions = RuntimeException.class, expectedExceptionsMessageRegExp = ".*Column '\\$bucket_number' cannot be resolved")
public void testNoBucketNumberHiddenColumn()
{
assertUpdate("CREATE TABLE test_no_bucket_number (test bigint)");
computeActual("SELECT DISTINCT \"$bucket_number\" FROM test_no_bucket_number");
}
@Test
public void testShardingByTemporalDateColumn()
{
// Make sure we have at least 2 different orderdate.
assertEquals(computeActual("SELECT count(DISTINCT orderdate) >= 2 FROM orders WHERE orderdate < date '1992-02-08'").getOnlyValue(), true);
assertUpdate("CREATE TABLE test_shard_temporal_date " +
"WITH (temporal_column = 'orderdate') AS " +
"SELECT orderdate, orderkey " +
"FROM orders " +
"WHERE orderdate < date '1992-02-08'",
"SELECT count(*) " +
"FROM orders " +
"WHERE orderdate < date '1992-02-08'");
MaterializedResult results = computeActual("SELECT orderdate, \"$shard_uuid\" FROM test_shard_temporal_date");
// Each shard will only contain data of one date.
SetMultimap<String, LocalDate> shardDateMap = HashMultimap.create();
for (MaterializedRow row : results.getMaterializedRows()) {
shardDateMap.put((String) row.getField(1), (LocalDate) row.getField(0));
}
for (Collection<LocalDate> dates : shardDateMap.asMap().values()) {
assertEquals(dates.size(), 1);
}
// Make sure we have all the rows
assertQuery("SELECT orderdate, orderkey FROM test_shard_temporal_date",
"SELECT orderdate, orderkey FROM orders WHERE orderdate < date '1992-02-08'");
}
@Test
public void testShardingByTemporalDateColumnBucketed()
{
String tableName = "test_shard_temporal_date_bucketed";
prepareTemporalShardedAndBucketedTable(tableName);
MaterializedResult results = computeActual("SELECT orderdate, \"$shard_uuid\" FROM " + tableName);
// Each shard will only contain data of one date.
SetMultimap<String, LocalDate> shardDateMap = HashMultimap.create();
for (MaterializedRow row : results.getMaterializedRows()) {
shardDateMap.put((String) row.getField(1), (LocalDate) row.getField(0));
}
for (Collection<LocalDate> dates : shardDateMap.asMap().values()) {
assertEquals(dates.size(), 1);
}
// Make sure we have all the rows
assertQuery("SELECT orderdate, orderkey FROM " + tableName,
"SELECT orderdate, orderkey FROM orders WHERE orderdate < date '1992-02-08'");
}
@Test
public void testColocatedJoin()
{
String tableName = "test_colocated_join";
prepareTemporalShardedAndBucketedTable(tableName);
Session colocated = Session.builder(getSession())
.setSystemProperty(COLOCATED_JOIN, "true")
.build();
assertQuery(
colocated,
format("SELECT t1.orderkey " +
"FROM %s t1 JOIN %s t2 " +
"ON t1.orderkey = t2.orderkey",
tableName,
tableName),
"SELECT t1.orderkey FROM " +
"(SELECT * FROM orders WHERE orderdate < date '1992-02-08') t1 " +
"JOIN " +
"(SELECT * FROM orders WHERE orderdate < date '1992-02-08') t2 " +
" ON t1.orderkey = t2.orderkey");
// empty probe side
assertQuery(
colocated,
format("SELECT t1.orderkey " +
"FROM " +
"(SELECT * FROM %s WHERE orderdate < date '1970-01-01') t1 " +
"JOIN %s t2 " +
" ON t1.orderkey = t2.orderkey",
tableName,
tableName),
"SELECT t1.orderkey FROM " +
"(SELECT * FROM orders WHERE orderdate < date '1970-01-01') t1 " +
"JOIN " +
"(SELECT * FROM orders WHERE orderdate < date '1992-02-08') t2 " +
" ON t1.orderkey = t2.orderkey");
// empty build side
assertQuery(
colocated,
format("SELECT t1.orderkey " +
"FROM " +
"%s t1 JOIN " +
"(SELECT * FROM %s WHERE orderdate < date '1970-01-01') t2 " +
" ON t1.orderkey = t2.orderkey",
tableName,
tableName),
"SELECT t1.orderkey FROM " +
"(SELECT * FROM orders WHERE orderdate < date '1992-02-08') t1 " +
"JOIN " +
"(SELECT * FROM orders WHERE orderdate < date '1970-01-01') t2 " +
" ON t1.orderkey = t2.orderkey");
}
private void prepareTemporalShardedAndBucketedTable(String tableName)
{
// Make sure we have at least 2 different orderdate.
assertEquals(computeActual("SELECT count(DISTINCT orderdate) >= 2 FROM orders WHERE orderdate < date '1992-02-08'").getOnlyValue(), true);
assertUpdate(
format("CREATE TABLE %s " +
"WITH (temporal_column = 'orderdate', bucket_count = 10, bucketed_on = ARRAY ['orderkey']) AS " +
"SELECT orderdate, orderkey " +
"FROM orders " +
"WHERE orderdate < date '1992-02-08'",
tableName),
"SELECT count(*) " +
"FROM orders " +
"WHERE orderdate < date '1992-02-08'");
}
@Test
public void testShardingByTemporalTimestampColumn()
{
assertUpdate("CREATE TABLE test_shard_temporal_timestamp(col1 BIGINT, col2 TIMESTAMP) WITH (temporal_column = 'col2')");
int rows = 20;
StringJoiner joiner = new StringJoiner(", ", "INSERT INTO test_shard_temporal_timestamp VALUES ", "");
for (int i = 0; i < rows; i++) {
joiner.add(format("(%s, TIMESTAMP '2016-08-08 01:00' + interval '%s' hour)", i, i * 4));
}
assertUpdate(joiner.toString(), format("VALUES(%s)", rows));
MaterializedResult results = computeActual("SELECT format_datetime(col2 AT TIME ZONE 'UTC', 'yyyyMMdd'), \"$shard_uuid\" FROM test_shard_temporal_timestamp");
assertEquals(results.getRowCount(), rows);
// Each shard will only contain data of one date.
SetMultimap<String, String> shardDateMap = HashMultimap.create();
for (MaterializedRow row : results.getMaterializedRows()) {
shardDateMap.put((String) row.getField(1), (String) row.getField(0));
}
for (Collection<String> dates : shardDateMap.asMap().values()) {
assertEquals(dates.size(), 1);
}
// Ensure one shard can contain different timestamps from the same day
assertLessThan(shardDateMap.size(), rows);
}
@Test
public void testShardingByTemporalTimestampColumnBucketed()
{
assertUpdate("" +
"CREATE TABLE test_shard_temporal_timestamp_bucketed(col1 BIGINT, col2 TIMESTAMP) " +
"WITH (temporal_column = 'col2', bucket_count = 3, bucketed_on = ARRAY ['col1'])");
int rows = 100;
StringJoiner joiner = new StringJoiner(", ", "INSERT INTO test_shard_temporal_timestamp_bucketed VALUES ", "");
for (int i = 0; i < rows; i++) {
joiner.add(format("(%s, TIMESTAMP '2016-08-08 01:00' + interval '%s' hour)", i, i));
}
assertUpdate(joiner.toString(), format("VALUES(%s)", rows));
MaterializedResult results = computeActual("" +
"SELECT format_datetime(col2 AT TIME ZONE 'UTC', 'yyyyMMdd'), \"$shard_uuid\" " +
"FROM test_shard_temporal_timestamp_bucketed");
assertEquals(results.getRowCount(), rows);
// Each shard will only contain data of one date.
SetMultimap<String, String> shardDateMap = HashMultimap.create();
for (MaterializedRow row : results.getMaterializedRows()) {
shardDateMap.put((String) row.getField(1), (String) row.getField(0));
}
for (Collection<String> dates : shardDateMap.asMap().values()) {
assertEquals(dates.size(), 1);
}
// Ensure one shard can contain different timestamps from the same day
assertLessThan(shardDateMap.size(), rows);
}
@Test
public void testTableProperties()
{
computeActual("CREATE TABLE test_table_properties_1 (foo BIGINT, bar BIGINT, ds DATE) WITH (ordering=array['foo','bar'], temporal_column='ds')");
computeActual("CREATE TABLE test_table_properties_2 (foo BIGINT, bar BIGINT, ds DATE) WITH (ORDERING=array['foo','bar'], TEMPORAL_COLUMN='ds')");
computeActual("CREATE TABLE test_table_properties_3 (foo BIGINT, bar BIGINT, ds DATE) WITH (TABLE_SUPPORTS_DELTA_DELETE=false)");
computeActual("CREATE TABLE test_table_properties_4 (foo BIGINT, bar BIGINT, ds DATE) WITH (table_supports_delta_delete=true)");
}
@Test
public void testShardsSystemTable()
{
assertQuery("" +
"SELECT table_schema, table_name, sum(row_count)\n" +
"FROM system.shards\n" +
"WHERE table_schema = 'tpch'\n" +
" AND table_name IN ('orders', 'lineitem')\n" +
"GROUP BY 1, 2",
"" +
"SELECT 'tpch', 'orders', (SELECT count(*) FROM orders)\n" +
"UNION ALL\n" +
"SELECT 'tpch', 'lineitem', (SELECT count(*) FROM lineitem)");
}
@Test
public void testShardsSystemTableWithTemporalColumn()
{
// Make sure we have rows in the selected range
assertEquals(computeActual("SELECT count(*) >= 1 FROM orders WHERE orderdate BETWEEN date '1992-01-01' AND date '1992-02-08'").getOnlyValue(), true);
// Create a table that has DATE type temporal column
assertUpdate("CREATE TABLE test_shards_system_table_date_temporal\n" +
"WITH (temporal_column = 'orderdate') AS\n" +
"SELECT orderdate, orderkey\n" +
"FROM orders\n" +
"WHERE orderdate BETWEEN date '1992-01-01' AND date '1992-02-08'",
"SELECT count(*)\n" +
"FROM orders\n" +
"WHERE orderdate BETWEEN date '1992-01-01' AND date '1992-02-08'");
// Create a table that has TIMESTAMP type temporal column
assertUpdate("CREATE TABLE test_shards_system_table_timestamp_temporal\n" +
"WITH (temporal_column = 'ordertimestamp') AS\n" +
"SELECT CAST (orderdate AS TIMESTAMP) AS ordertimestamp, orderkey\n" +
"FROM test_shards_system_table_date_temporal",
"SELECT count(*)\n" +
"FROM orders\n" +
"WHERE orderdate BETWEEN date '1992-01-01' AND date '1992-02-08'");
// For table with DATE type temporal column, min/max_timestamp columns must be null while min/max_date columns must not be null
assertEquals(computeActual("" +
"SELECT count(*)\n" +
"FROM system.shards\n" +
"WHERE table_schema = 'tpch'\n" +
"AND table_name = 'test_shards_system_table_date_temporal'\n" +
"AND NOT \n" +
"(min_timestamp IS NULL AND max_timestamp IS NULL\n" +
"AND min_date IS NOT NULL AND max_date IS NOT NULL)").getOnlyValue(), 0L);
// For table with TIMESTAMP type temporal column, min/max_date columns must be null while min/max_timestamp columns must not be null
assertEquals(computeActual("" +
"SELECT count(*)\n" +
"FROM system.shards\n" +
"WHERE table_schema = 'tpch'\n" +
"AND table_name = 'test_shards_system_table_timestamp_temporal'\n" +
"AND NOT\n" +
"(min_date IS NULL AND max_date IS NULL\n" +
"AND min_timestamp IS NOT NULL AND max_timestamp IS NOT NULL)").getOnlyValue(), 0L);
// Test date predicates in table with DATE temporal column
assertQuery("" +
"SELECT table_schema, table_name, sum(row_count)\n" +
"FROM system.shards \n" +
"WHERE table_schema = 'tpch'\n" +
"AND table_name = 'test_shards_system_table_date_temporal'\n" +
"AND min_date >= date '1992-01-01'\n" +
"AND max_date <= date '1992-02-08'\n" +
"GROUP BY 1, 2",
"" +
"SELECT 'tpch', 'test_shards_system_table_date_temporal',\n" +
"(SELECT count(*) FROM orders WHERE orderdate BETWEEN date '1992-01-01' AND date '1992-02-08')");
// Test timestamp predicates in table with TIMESTAMP temporal column
assertQuery("" +
"SELECT table_schema, table_name, sum(row_count)\n" +
"FROM system.shards \n" +
"WHERE table_schema = 'tpch'\n" +
"AND table_name = 'test_shards_system_table_timestamp_temporal'\n" +
"AND min_timestamp >= timestamp '1992-01-01'\n" +
"AND max_timestamp <= timestamp '1992-02-08'\n" +
"GROUP BY 1, 2",
"" +
"SELECT 'tpch', 'test_shards_system_table_timestamp_temporal',\n" +
"(SELECT count(*) FROM orders WHERE orderdate BETWEEN date '1992-01-01' AND date '1992-02-08')");
}
@Test
public void testColumnRangesSystemTable()
{
assertQuery("SELECT orderkey_min, orderkey_max, custkey_min, custkey_max, orderdate_min, orderdate_max FROM \"orders$column_ranges\"",
"SELECT min(orderkey), max(orderkey), min(custkey), max(custkey), min(orderdate), max(orderdate) FROM orders");
assertQuery("SELECT orderkey_min, orderkey_max FROM \"orders$column_ranges\"",
"SELECT min(orderkey), max(orderkey) FROM orders");
// No such table test
assertQueryFails("SELECT * FROM \"no_table$column_ranges\"", ".*raptor\\.tpch\\.no_table\\$column_ranges does not exist.*");
// No range column for DOUBLE, INTEGER or VARCHAR
assertQueryFails("SELECT totalprice_min FROM \"orders$column_ranges\"", ".*Column 'totalprice_min' cannot be resolved.*");
assertQueryFails("SELECT shippriority_min FROM \"orders$column_ranges\"", ".*Column 'shippriority_min' cannot be resolved.*");
assertQueryFails("SELECT orderstatus_min FROM \"orders$column_ranges\"", ".*Column 'orderstatus_min' cannot be resolved.*");
assertQueryFails("SELECT orderpriority_min FROM \"orders$column_ranges\"", ".*Column 'orderpriority_min' cannot be resolved.*");
assertQueryFails("SELECT clerk_min FROM \"orders$column_ranges\"", ".*Column 'clerk_min' cannot be resolved.*");
assertQueryFails("SELECT comment_min FROM \"orders$column_ranges\"", ".*Column 'comment_min' cannot be resolved.*");
// Empty table
assertUpdate("CREATE TABLE column_ranges_test (a BIGINT, b BIGINT)");
assertQuery("SELECT a_min, a_max, b_min, b_max FROM \"column_ranges_test$column_ranges\"", "SELECT NULL, NULL, NULL, NULL");
// Table with NULL values
assertUpdate("INSERT INTO column_ranges_test VALUES (1, NULL)", 1);
assertQuery("SELECT a_min, a_max, b_min, b_max FROM \"column_ranges_test$column_ranges\"", "SELECT 1, 1, NULL, NULL");
assertUpdate("INSERT INTO column_ranges_test VALUES (NULL, 99)", 1);
assertQuery("SELECT a_min, a_max, b_min, b_max FROM \"column_ranges_test$column_ranges\"", "SELECT 1, 1, 99, 99");
assertUpdate("INSERT INTO column_ranges_test VALUES (50, 50)", 1);
assertQuery("SELECT a_min, a_max, b_min, b_max FROM \"column_ranges_test$column_ranges\"", "SELECT 1, 50, 50, 99");
// Drop table
assertUpdate("DROP TABLE column_ranges_test");
assertQueryFails("SELECT a_min, a_max, b_min, b_max FROM \"column_ranges_test$column_ranges\"",
".*raptor\\.tpch\\.column_ranges_test\\$column_ranges does not exist.*");
}
@Test
public void testCreateBucketedTable()
{
assertUpdate("" +
"CREATE TABLE orders_bucketed " +
"WITH (bucket_count = 50, bucketed_on = ARRAY ['orderkey']) " +
"AS SELECT * FROM orders",
"SELECT count(*) FROM orders");
assertQuery("SELECT * FROM orders_bucketed", "SELECT * FROM orders");
assertQuery("SELECT count(*) FROM orders_bucketed", "SELECT count(*) FROM orders");
assertQuery("SELECT count(DISTINCT \"$shard_uuid\") FROM orders_bucketed", "SELECT 50");
assertQuery("SELECT count(DISTINCT \"$bucket_number\") FROM orders_bucketed", "SELECT 50");
assertUpdate("INSERT INTO orders_bucketed SELECT * FROM orders", "SELECT count(*) FROM orders");
assertQuery("SELECT * FROM orders_bucketed", "SELECT * FROM orders UNION ALL SELECT * FROM orders");
assertQuery("SELECT count(*) FROM orders_bucketed", "SELECT count(*) * 2 FROM orders");
assertQuery("SELECT count(DISTINCT \"$shard_uuid\") FROM orders_bucketed", "SELECT 50 * 2");
assertQuery("SELECT count(DISTINCT \"$bucket_number\") FROM orders_bucketed", "SELECT 50");
assertQuery("SELECT count(*) FROM orders_bucketed a JOIN orders_bucketed b USING (orderkey)", "SELECT count(*) * 4 FROM orders");
assertUpdate("DELETE FROM orders_bucketed WHERE orderkey = 37", 2);
assertQuery("SELECT count(*) FROM orders_bucketed", "SELECT (count(*) * 2) - 2 FROM orders");
assertQuery("SELECT count(DISTINCT \"$shard_uuid\") FROM orders_bucketed", "SELECT 50 * 2");
assertQuery("SELECT count(DISTINCT \"$bucket_number\") FROM orders_bucketed", "SELECT 50");
assertUpdate("DROP TABLE orders_bucketed");
}
@Test
public void testCreateBucketedTableLike()
{
assertUpdate("" +
"CREATE TABLE orders_bucketed_original (" +
" orderkey bigint" +
", custkey bigint" +
") " +
"WITH (bucket_count = 50, bucketed_on = ARRAY['orderkey'])");
assertUpdate("" +
"CREATE TABLE orders_bucketed_like (" +
" orderdate date" +
", LIKE orders_bucketed_original INCLUDING PROPERTIES" +
")");
assertUpdate("INSERT INTO orders_bucketed_like SELECT orderdate, orderkey, custkey FROM orders", "SELECT count(*) FROM orders");
assertUpdate("INSERT INTO orders_bucketed_like SELECT orderdate, orderkey, custkey FROM orders", "SELECT count(*) FROM orders");
assertQuery("SELECT count(DISTINCT \"$shard_uuid\") FROM orders_bucketed_like", "SELECT 50 * 2");
assertUpdate("DROP TABLE orders_bucketed_original");
assertUpdate("DROP TABLE orders_bucketed_like");
}
@Test
public void testBucketingMixedTypes()
{
assertUpdate("" +
"CREATE TABLE orders_bucketed_mixed " +
"WITH (bucket_count = 50, bucketed_on = ARRAY ['custkey', 'clerk', 'shippriority']) " +
"AS SELECT * FROM orders",
"SELECT count(*) FROM orders");
assertQuery("SELECT * FROM orders_bucketed_mixed", "SELECT * FROM orders");
assertQuery("SELECT count(*) FROM orders_bucketed_mixed", "SELECT count(*) FROM orders");
assertQuery("SELECT count(DISTINCT \"$shard_uuid\") FROM orders_bucketed_mixed", "SELECT 50");
assertQuery("SELECT count(DISTINCT \"$bucket_number\") FROM orders_bucketed_mixed", "SELECT 50");
}
@Test
public void testShowCreateTable()
{
String createTableSql = format("" +
"CREATE TABLE %s.%s.%s (\n" +
" \"c1\" bigint,\n" +
" \"c2\" double,\n" +
" \"c 3\" varchar,\n" +
" \"c'4\" array(bigint),\n" +
" \"c5\" map(bigint, varchar),\n" +
" \"c6\" bigint,\n" +
" \"c7\" timestamp\n" +
")\n" +
"WITH (\n" +
" bucket_count = 32,\n" +
" bucketed_on = ARRAY['c1','c6'],\n" +
" ordering = ARRAY['c6','c1'],\n" +
" table_supports_delta_delete = true,\n" +
" temporal_column = 'c7'\n" +
")",
getSession().getCatalog().get(), getSession().getSchema().get(), "test_show_create_table");
assertUpdate(createTableSql);
MaterializedResult actualResult = computeActual("SHOW CREATE TABLE test_show_create_table");
assertEquals(getOnlyElement(actualResult.getOnlyColumnAsSet()), createTableSql);
actualResult = computeActual("SHOW CREATE TABLE " + getSession().getSchema().get() + ".test_show_create_table");
assertEquals(getOnlyElement(actualResult.getOnlyColumnAsSet()), createTableSql);
actualResult = computeActual("SHOW CREATE TABLE " + getSession().getCatalog().get() + "." + getSession().getSchema().get() + ".test_show_create_table");
assertEquals(getOnlyElement(actualResult.getOnlyColumnAsSet()), createTableSql);
// With organization enabled
createTableSql = format("" +
"CREATE TABLE %s.%s.%s (\n" +
" \"c1\" bigint,\n" +
" \"c2\" double,\n" +
" \"c 3\" varchar,\n" +
" \"c'4\" array(bigint),\n" +
" \"c5\" map(bigint, varchar),\n" +
" \"c6\" bigint,\n" +
" \"c7\" timestamp\n" +
")\n" +
"WITH (\n" +
" bucket_count = 32,\n" +
" bucketed_on = ARRAY['c1','c6'],\n" +
" ordering = ARRAY['c6','c1'],\n" +
" organized = true,\n" +
" table_supports_delta_delete = true\n" +
")",
getSession().getCatalog().get(), getSession().getSchema().get(), "test_show_create_table_organized");
assertUpdate(createTableSql);
actualResult = computeActual("SHOW CREATE TABLE test_show_create_table_organized");
assertEquals(getOnlyElement(actualResult.getOnlyColumnAsSet()), createTableSql);
actualResult = computeActual("SHOW CREATE TABLE " + getSession().getSchema().get() + ".test_show_create_table_organized");
assertEquals(getOnlyElement(actualResult.getOnlyColumnAsSet()), createTableSql);
actualResult = computeActual("SHOW CREATE TABLE " + getSession().getCatalog().get() + "." + getSession().getSchema().get() + ".test_show_create_table_organized");
assertEquals(getOnlyElement(actualResult.getOnlyColumnAsSet()), createTableSql);
createTableSql = format("" +
"CREATE TABLE %s.%s.%s (\n" +
" \"c\"\"1\" bigint,\n" +
" \"c2\" double,\n" +
" \"c 3\" varchar,\n" +
" \"c'4\" array(bigint),\n" +
" \"c5\" map(bigint, varchar)\n" +
")",
getSession().getCatalog().get(), getSession().getSchema().get(), "test_show_create_table_default");
assertUpdate(createTableSql);
actualResult = computeActual("SHOW CREATE TABLE \"test_show_create_table_default\"");
assertEquals(getOnlyElement(actualResult.getOnlyColumnAsSet()), createTableSql);
}
@Test
public void testTablesSystemTable()
{
assertUpdate("" +
"CREATE TABLE system_tables_test0 (c00 timestamp, c01 varchar, c02 double, c03 bigint, c04 bigint)");
assertUpdate("" +
"CREATE TABLE system_tables_test1 (c10 timestamp, c11 varchar, c12 double, c13 bigint, c14 bigint) " +
"WITH (temporal_column = 'c10')");
assertUpdate("" +
"CREATE TABLE system_tables_test2 (c20 timestamp, c21 varchar, c22 double, c23 bigint, c24 bigint) " +
"WITH (temporal_column = 'c20', ordering = ARRAY['c22', 'c21'])");
assertUpdate("" +
"CREATE TABLE system_tables_test3 (c30 timestamp, c31 varchar, c32 double, c33 bigint, c34 bigint) " +
"WITH (temporal_column = 'c30', bucket_count = 40, bucketed_on = ARRAY ['c34', 'c33'])");
assertUpdate("" +
"CREATE TABLE system_tables_test4 (c40 timestamp, c41 varchar, c42 double, c43 bigint, c44 bigint) " +
"WITH (temporal_column = 'c40', ordering = ARRAY['c41', 'c42'], distribution_name = 'test_distribution', bucket_count = 50, bucketed_on = ARRAY ['c43', 'c44'])");
assertUpdate("" +
"CREATE TABLE system_tables_test5 (c50 timestamp, c51 varchar, c52 double, c53 bigint, c54 bigint) " +
"WITH (ordering = ARRAY['c51', 'c52'], distribution_name = 'test_distribution', bucket_count = 50, bucketed_on = ARRAY ['c53', 'c54'], organized = true)");
assertUpdate("" +
"CREATE TABLE system_tables_test6 (c60 timestamp, c61 varchar, c62 double, c63 bigint, c64 bigint) " +
"WITH (ordering = ARRAY['c61', 'c62'], distribution_name = 'test_distribution', bucket_count = 50, bucketed_on = ARRAY ['c63', 'c64'], organized = true, table_supports_delta_delete = true)");
MaterializedResult actualResults = computeActual("SELECT * FROM system.tables");
assertEquals(
actualResults.getTypes(),
ImmutableList.builder()
.add(VARCHAR) // table_schema
.add(VARCHAR) // table_name
.add(VARCHAR) // temporal_column
.add(new ArrayType(VARCHAR)) // ordering_columns
.add(VARCHAR) // distribution_name
.add(BIGINT) // bucket_count
.add(new ArrayType(VARCHAR)) // bucket_columns
.add(BOOLEAN) // organized
.add(BOOLEAN) // table_supports_delta_delete
.build());
Map<String, MaterializedRow> map = actualResults.getMaterializedRows().stream()
.filter(row -> ((String) row.getField(1)).startsWith("system_tables_test"))
.collect(toImmutableMap(row -> ((String) row.getField(1)), identity()));
assertEquals(map.size(), 7);
assertEquals(
map.get("system_tables_test0").getFields(),
asList("tpch", "system_tables_test0", null, null, null, null, null, Boolean.FALSE, Boolean.FALSE));
assertEquals(
map.get("system_tables_test1").getFields(),
asList("tpch", "system_tables_test1", "c10", null, null, null, null, Boolean.FALSE, Boolean.FALSE));
assertEquals(
map.get("system_tables_test2").getFields(),
asList("tpch", "system_tables_test2", "c20", ImmutableList.of("c22", "c21"), null, null, null, Boolean.FALSE, Boolean.FALSE));
assertEquals(
map.get("system_tables_test3").getFields(),
asList("tpch", "system_tables_test3", "c30", null, null, 40L, ImmutableList.of("c34", "c33"), Boolean.FALSE, Boolean.FALSE));
assertEquals(
map.get("system_tables_test4").getFields(),
asList("tpch", "system_tables_test4", "c40", ImmutableList.of("c41", "c42"), "test_distribution", 50L, ImmutableList.of("c43", "c44"), Boolean.FALSE, Boolean.FALSE));
assertEquals(
map.get("system_tables_test5").getFields(),
asList("tpch", "system_tables_test5", null, ImmutableList.of("c51", "c52"), "test_distribution", 50L, ImmutableList.of("c53", "c54"), Boolean.TRUE, Boolean.FALSE));
assertEquals(
map.get("system_tables_test6").getFields(),
asList("tpch", "system_tables_test6", null, ImmutableList.of("c61", "c62"), "test_distribution", 50L, ImmutableList.of("c63", "c64"), Boolean.TRUE, Boolean.TRUE));
actualResults = computeActual("SELECT * FROM system.tables WHERE table_schema = 'tpch'");
long actualRowCount = actualResults.getMaterializedRows().stream()
.filter(row -> ((String) row.getField(1)).startsWith("system_tables_test"))
.count();
assertEquals(actualRowCount, 7);
actualResults = computeActual("SELECT * FROM system.tables WHERE table_name = 'system_tables_test3'");
assertEquals(actualResults.getMaterializedRows().size(), 1);
actualResults = computeActual("SELECT * FROM system.tables WHERE table_schema = 'tpch' and table_name = 'system_tables_test3'");
assertEquals(actualResults.getMaterializedRows().size(), 1);
actualResults = computeActual("" +
"SELECT distribution_name, bucket_count, bucketing_columns, ordering_columns, temporal_column, organized " +
"FROM system.tables " +
"WHERE table_schema = 'tpch' and table_name = 'system_tables_test3'");
assertEquals(actualResults.getTypes(), ImmutableList.of(VARCHAR, BIGINT, new ArrayType(VARCHAR), new ArrayType(VARCHAR), VARCHAR, BOOLEAN));
assertEquals(actualResults.getMaterializedRows().size(), 1);
assertUpdate("DROP TABLE system_tables_test0");
assertUpdate("DROP TABLE system_tables_test1");
assertUpdate("DROP TABLE system_tables_test2");
assertUpdate("DROP TABLE system_tables_test3");
assertUpdate("DROP TABLE system_tables_test4");
assertUpdate("DROP TABLE system_tables_test5");
assertUpdate("DROP TABLE system_tables_test6");
assertEquals(computeActual("SELECT * FROM system.tables WHERE table_schema IN ('foo', 'bar')").getRowCount(), 0);
}
@SuppressWarnings("OverlyStrongTypeCast")
@Test
public void testTableStatsSystemTable()
{
// basic sanity tests
assertQuery("" +
"SELECT table_schema, table_name, sum(row_count)\n" +
"FROM system.table_stats\n" +
"WHERE table_schema = 'tpch'\n" +
" AND table_name IN ('orders', 'lineitem')\n" +
"GROUP BY 1, 2",
"" +
"SELECT 'tpch', 'orders', (SELECT count(*) FROM orders)\n" +
"UNION ALL\n" +
"SELECT 'tpch', 'lineitem', (SELECT count(*) FROM lineitem)");
assertQuery("" +
"SELECT\n" +
" bool_and(row_count >= shard_count)\n" +
", bool_and(update_time >= create_time)\n" +
", bool_and(table_version >= 1)\n" +
"FROM system.table_stats\n" +
"WHERE row_count > 0",
"SELECT true, true, true");
// create empty table
assertUpdate("CREATE TABLE test_table_stats (x bigint)");
@Language("SQL") String sql = "" +
"SELECT create_time, update_time, table_version," +
" shard_count, row_count, uncompressed_size\n" +
"FROM system.table_stats\n" +
"WHERE table_schema = 'tpch'\n" +
" AND table_name = 'test_table_stats'";
MaterializedRow row = getOnlyElement(computeActual(sql).getMaterializedRows());
LocalDateTime createTime = (LocalDateTime) row.getField(0);
LocalDateTime updateTime1 = (LocalDateTime) row.getField(1);
assertEquals(createTime, updateTime1);
assertEquals(row.getField(2), 1L); // table_version
assertEquals(row.getField(3), 0L); // shard_count
assertEquals(row.getField(4), 0L); // row_count
long size1 = (long) row.getField(5); // uncompressed_size
// insert
assertUpdate("INSERT INTO test_table_stats VALUES (1), (2), (3), (4)", 4);
row = getOnlyElement(computeActual(sql).getMaterializedRows());
assertEquals(row.getField(0), createTime);
LocalDateTime updateTime2 = (LocalDateTime) row.getField(1);
assertLessThan(updateTime1, updateTime2);
assertEquals(row.getField(2), 2L); // table_version
assertGreaterThanOrEqual((Long) row.getField(3), 1L); // shard_count
assertEquals(row.getField(4), 4L); // row_count
long size2 = (long) row.getField(5); // uncompressed_size
assertGreaterThan(size2, size1);
// delete
assertUpdate("DELETE FROM test_table_stats WHERE x IN (2, 4)", 2);
row = getOnlyElement(computeActual(sql).getMaterializedRows());
assertEquals(row.getField(0), createTime);
LocalDateTime updateTime3 = (LocalDateTime) row.getField(1);
assertLessThan(updateTime2, updateTime3);
assertEquals(row.getField(2), 3L); // table_version
assertGreaterThanOrEqual((Long) row.getField(3), 1L); // shard_count
assertEquals(row.getField(4), 2L); // row_count
long size3 = (long) row.getField(5); // uncompressed_Size
assertLessThan(size3, size2);
// add column
assertUpdate("ALTER TABLE test_table_stats ADD COLUMN y bigint");
row = getOnlyElement(computeActual(sql).getMaterializedRows());
assertEquals(row.getField(0), createTime);
assertLessThan(updateTime3, (LocalDateTime) row.getField(1));
assertEquals(row.getField(2), 4L); // table_version
assertEquals(row.getField(4), 2L); // row_count
assertEquals(row.getField(5), size3); // uncompressed_size
// cleanup
assertUpdate("DROP TABLE test_table_stats");
}
@SuppressWarnings("OverlyStrongTypeCast")
@Test
public void testTableStatsSystemTableWithDeltaDelete()
{
// create empty table
assertUpdate("CREATE TABLE test_table_stats_with_delta_delete (x bigint) WITH (table_supports_delta_delete = true)");
@Language("SQL") String sql = "" +
"SELECT create_time, update_time, table_version," +
" shard_count, row_count, uncompressed_size, delta_count\n" +
"FROM system.table_stats\n" +
"WHERE table_schema = 'tpch'\n" +
" AND table_name = 'test_table_stats_with_delta_delete'";
MaterializedRow row = getOnlyElement(computeActual(sql).getMaterializedRows());
LocalDateTime createTime = (LocalDateTime) row.getField(0);
LocalDateTime updateTime1 = (LocalDateTime) row.getField(1);
assertEquals(createTime, updateTime1);
assertEquals(row.getField(2), 1L); // table_version
assertEquals(row.getField(3), 0L); // shard_count
assertEquals(row.getField(4), 0L); // row_count
long size1 = (long) row.getField(5); // uncompressed_size
// insert
assertUpdate("INSERT INTO test_table_stats_with_delta_delete VALUES (1), (2), (3), (4)", 4);
row = getOnlyElement(computeActual(sql).getMaterializedRows());
assertEquals(row.getField(0), createTime);
LocalDateTime updateTime2 = (LocalDateTime) row.getField(1);
assertLessThan(updateTime1, updateTime2);
assertEquals(row.getField(2), 2L); // table_version
assertGreaterThanOrEqual((Long) row.getField(3), 1L); // shard_count
assertEquals(row.getField(4), 4L); // row_count
assertGreaterThanOrEqual((Long) row.getField(6), 0L); // delta_count
long size2 = (long) row.getField(5); // uncompressed_size
assertGreaterThan(size2, size1);
// delete
assertUpdate("DELETE FROM test_table_stats_with_delta_delete WHERE x IN (2, 4)", 2);
row = getOnlyElement(computeActual(sql).getMaterializedRows());
assertEquals(row.getField(0), createTime);
LocalDateTime updateTime3 = (LocalDateTime) row.getField(1);
assertLessThan(updateTime2, updateTime3);
assertEquals(row.getField(2), 3L); // table_version
assertGreaterThanOrEqual((Long) row.getField(3), 1L); // shard_count
assertEquals(row.getField(4), 2L); // row_count
assertGreaterThanOrEqual((Long) row.getField(6), 1L); // delta_count
long size3 = (long) row.getField(5); // uncompressed_Size
// without compaction, the size will grow with delta delete
assertGreaterThan(size3, size2);
// add column
assertUpdate("ALTER TABLE test_table_stats_with_delta_delete ADD COLUMN y bigint");
row = getOnlyElement(computeActual(sql).getMaterializedRows());
assertEquals(row.getField(0), createTime);
assertLessThan(updateTime3, (LocalDateTime) row.getField(1));
assertEquals(row.getField(2), 4L); // table_version
assertEquals(row.getField(4), 2L); // row_count
assertEquals(row.getField(5), size3); // uncompressed_size
// cleanup
assertUpdate("DROP TABLE test_table_stats_with_delta_delete");
}
@Test
public void testAlterTable()
{
assertUpdate("CREATE TABLE test_alter_table (c1 bigint, c2 bigint)");
assertUpdate("INSERT INTO test_alter_table VALUES (1, 1), (1, 2), (1, 3), (1, 4)", 4);
assertUpdate("INSERT INTO test_alter_table VALUES (11, 1), (11, 2)", 2);
assertUpdate("ALTER TABLE test_alter_table ADD COLUMN c3 bigint");
assertQueryFails("ALTER TABLE test_alter_table DROP COLUMN c3", "Cannot drop the column which has the largest column ID in the table");
assertUpdate("INSERT INTO test_alter_table VALUES (2, 1, 1), (2, 2, 2), (2, 3, 3), (2, 4, 4)", 4);
assertUpdate("INSERT INTO test_alter_table VALUES (22, 1, 1), (22, 2, 2), (22, 4, 4)", 3);
// Do a partial delete on a shard that does not contain newly added column
assertUpdate("DELETE FROM test_alter_table WHERE c1 = 1 and c2 = 1", 1);
// Then drop a full shard that does not contain newly added column
assertUpdate("DELETE FROM test_alter_table WHERE c1 = 11", 2);
// Drop a column from middle of table
assertUpdate("ALTER TABLE test_alter_table DROP COLUMN c2");
assertUpdate("INSERT INTO test_alter_table VALUES (3, 1), (3, 2), (3, 3), (3, 4)", 4);
// Do a partial delete on a shard that contains column already dropped
assertUpdate("DELETE FROM test_alter_table WHERE c1 = 2 and c3 = 1", 1);
// Then drop a full shard that contains column already dropped
assertUpdate("DELETE FROM test_alter_table WHERE c1 = 22", 3);
assertUpdate("DROP TABLE test_alter_table");
}
@Test
public void testAlterTableUnsupportedType()
{
assertUpdate("CREATE TABLE test_alter_table_unsupported_type (c1 bigint, c2 bigint)");
assertQueryFails("ALTER TABLE test_alter_table_unsupported_type ADD COLUMN c3 row(bigint)", "Type not supported: row\\(bigint\\)");
assertUpdate("DROP TABLE test_alter_table_unsupported_type");
}
@Test
public void testDelete()
{
assertUpdate("CREATE TABLE test_delete_table (c1 bigint, c2 bigint)");
assertUpdate("INSERT INTO test_delete_table VALUES (1, 1), (1, 2), (1, 3), (1, 4), (11, 1), (11, 2)", 6);
assertUpdate("ALTER TABLE test_delete_table ADD COLUMN c3 bigint");
assertUpdate("INSERT INTO test_delete_table VALUES (2, 1, 1), (2, 2, 2), (2, 3, 3), (2, 4, 4), (22, 1, 1), (22, 2, 2), (22, 4, 4)", 7);
assertUpdate("DELETE FROM test_delete_table WHERE c1 = 1", 4);
assertQuery("SELECT * FROM test_delete_table", "VALUES (11, 1, NULL), (11, 2, NULL), (2, 1, 1), (2, 2, 2), (2, 3, 3), (2, 4, 4), (22, 1, 1), (22, 2, 2), (22, 4, 4)");
assertUpdate("ALTER TABLE test_delete_table DROP COLUMN c2");
assertUpdate("INSERT INTO test_delete_table VALUES (3, 1), (3, 2), (3, 3), (3, 4)", 4);
assertUpdate("DELETE FROM test_delete_table WHERE c1 = 2", 4);
assertQuery("SELECT * FROM test_delete_table", "VALUES (11, NULL), (11, NULL), (22, 1), (22, 2), (22, 4), (3, 1), (3, 2), (3, 3), (3, 4)");
assertUpdate("DELETE FROM test_delete_table WHERE c1 % 11 = 0", 5);
assertQuery("SELECT * FROM test_delete_table", "VALUES (3, 1), (3, 2), (3, 3), (3, 4)");
assertUpdate("DROP TABLE test_delete_table");
}
@Test
public void testDeltaDelete()
{
assertUpdate("CREATE TABLE test_delta_delete_table (c1 bigint, c2 bigint) WITH (table_supports_delta_delete = true)");
assertUpdate("INSERT INTO test_delta_delete_table VALUES (1, 1), (1, 2), (1, 3), (1, 4), (11, 1), (11, 2)", 6);
assertUpdate("ALTER TABLE test_delta_delete_table ADD COLUMN c3 bigint");
assertUpdate("INSERT INTO test_delta_delete_table VALUES (2, 1, 1), (2, 2, 2), (2, 3, 3), (2, 4, 4), (22, 1, 1), (22, 2, 2), (22, 4, 4)", 7);
assertUpdate("DELETE FROM test_delta_delete_table WHERE c1 = 1", 4);
assertQuery("SELECT * FROM test_delta_delete_table", "VALUES (11, 1, NULL), (11, 2, NULL), (2, 1, 1), (2, 2, 2), (2, 3, 3), (2, 4, 4), (22, 1, 1), (22, 2, 2), (22, 4, 4)");
assertUpdate("DELETE FROM test_delta_delete_table WHERE c1 = 1", 0);
assertQuery("SELECT * FROM test_delta_delete_table", "VALUES (11, 1, NULL), (11, 2, NULL), (2, 1, 1), (2, 2, 2), (2, 3, 3), (2, 4, 4), (22, 1, 1), (22, 2, 2), (22, 4, 4)");
assertUpdate("ALTER TABLE test_delta_delete_table DROP COLUMN c2");
assertUpdate("INSERT INTO test_delta_delete_table VALUES (3, 1), (3, 2), (3, 3), (3, 4)", 4);
assertUpdate("DELETE FROM test_delta_delete_table WHERE c1 = 2", 4);
assertQuery("SELECT * FROM test_delta_delete_table", "VALUES (11, NULL), (11, NULL), (22, 1), (22, 2), (22, 4), (3, 1), (3, 2), (3, 3), (3, 4)");
assertUpdate("DELETE FROM test_delta_delete_table WHERE c1 % 11 = 0", 5);
assertQuery("SELECT * FROM test_delta_delete_table", "VALUES (3, 1), (3, 2), (3, 3), (3, 4)");
assertUpdate("DROP TABLE test_delta_delete_table");
}
@Test
public void testTriggerBucketBalancer()
{
assertUpdate("CALL system.trigger_bucket_balancer()");
}
}
| |
package hu.akarnokd.math;
import java.util.*;
import ix.Ix;
import net.objecthunter.exp4j.ExpressionBuilder;
public class NumberFrom1to10NoConcat {
static boolean validParent(StringBuilder s) {
int p = 0;
for (int i = 0; i < s.length(); i++) {
if (s.charAt(i) == '(') {
p++;
}
if (s.charAt(i) == ')') {
p--;
}
if (p < 0) {
return false;
}
}
for (int i = 0; i < s.length() - 2; i++) {
if (s.charAt(i) == '('
&& Character.isDigit(s.charAt(i + 1))
&& s.charAt(i + 2) == ')') {
return false;
}
}
while (p-- > 0) {
s.append(')');
}
return true;
}
static long ipow(int base, int exp) {
long result = 1;
for (int i = 0; i < exp; i++) {
result *= base;
}
return result;
}
static final class MultiIndex {
final int[] indices;
final int[] limits;
long counter;
MultiIndex(int numIndices) {
this.indices = new int[numIndices];
this.limits = new int[numIndices];
}
boolean next() {
counter++;
for (int i = 0; i < indices.length; i++) {
int a = indices[i] + 1;
if (a == limits[i]) {
indices[i] = 0;
if (i == indices.length - 1) {
return false;
}
} else {
indices[i] = a;
return true;
}
}
return false;
}
}
static final int report = 10_000_000;
static String findExpression(Map<Integer, String> map, boolean noDivide, int maxParen, boolean noConcat) throws Exception {
List<String> first = new ArrayList<>(Arrays.asList(
""
, "-", "(", "-(", "(-"
, "((", "-((", "(((", "-((("
, "((-", "((-", "(((-", "(((-"
));
List<String> second = new ArrayList<>(
Arrays.asList(
""
, "+", "-", "*"
, "+("
, "-("
, "*("
, "/"
, "^"
, "^("
, "+(("
, "-(("
, "*(("
, "/("
, "/(("
, "^(("
));
List<String> between = new ArrayList<>(
Arrays.asList(
""
, "+", "-", "*"
,")+", "+(", ")+("
,")-", "-(", ")-("
,")*", "*(", ")*("
, "/", "^"
,")^", "^(", ")^("
, "))+", "+((", "))+(", ")+((", "))+(("
, "))-", "-((", "))-(", ")-((", "))-(("
, "))*", "*((", "))*(", ")*((", "))*(("
, ")/", "/(", ")/(",
"))/", "/((", "))/(", ")/((", "))/(("
, "))^", "^((", "))^(", ")^((", "))^(("
));
List<String> beforeLast = new ArrayList<>(
Arrays.asList(
""
, "+", "-", "*"
,")+"
,")-"
,")*"
,"))+"
,"))-"
,"))*"
, "^"
,")^"
,"))^"
, "/"
, ")/"
, "))/"
));
if (noDivide) {
first.removeIf(v -> v.contains("/"));
second.removeIf(v -> v.contains("/"));
between.removeIf(v -> v.contains("/"));
beforeLast.removeIf(v -> v.contains("/"));
}
String maxParenStart = Ix.repeatValue("(").take(maxParen + 1).join("").first();
String maxParenEnd = Ix.repeatValue(")").take(maxParen + 1).join("").first();
first.removeIf(v -> v.contains(maxParenStart) || v.contains(maxParenEnd));
second.removeIf(v -> v.contains(maxParenStart) || v.contains(maxParenEnd));
between.removeIf(v -> v.contains(maxParenStart) || v.contains(maxParenEnd));
beforeLast.removeIf(v -> v.contains(maxParenStart) || v.contains(maxParenEnd));
if (noConcat) {
second.removeIf(String::isEmpty);
between.removeIf(String::isEmpty);
beforeLast.removeIf(String::isEmpty);
}
long all = first.size() * ipow(between.size(), 7) * second.size() * beforeLast.size();
System.out.printf("%,d%n", all);
MultiIndex mi = new MultiIndex(10);
mi.limits[0] = first.size();
mi.limits[1] = second.size();
for (int i = 2; i < 9; i++) {
mi.limits[i] = between.size();
}
mi.limits[9] = beforeLast.size();
StringBuilder b = new StringBuilder();
long invalid = 0;
long valid = 0;
int[] indices = mi.indices;
do {
b.setLength(0);
b.append(first.get(indices[0]));
b.append('1');
b.append(second.get(indices[1]));
b.append('2');
for (int i = 2; i < 8; i++) {
b.append(between.get(indices[i]));
b.append((char)('0' + (i + 1)));
}
b.append(beforeLast.get(indices[9]));
b.append('9');
if (validParent(b)) {
String expr = b.toString();
double result;
try {
result = new ExpressionBuilder(expr)
.build().evaluate();
valid++;
} catch (ArithmeticException ignored) {
invalid++;
result = Double.NaN;
} catch (Exception ex) {
System.err.println(expr);
throw ex;
}
if (result > 0 && result < 11112 && result == Math.floor(result)) {
String str = map.get((int)result);
if (str == null/* || str.length() > expr.length()*/) {
map.put((int)result, expr);
}
}
} else {
invalid++;
}
if (mi.counter % report == 0) {
System.out.printf("Total: %,d (%.6f%%), Valid: %,d (%.6f%%), Invalid: %,d, Found: %,d%n",
mi.counter + 1,
((mi.counter + 1) * 100d) / all,
valid, (valid * 100d / (mi.counter + 1)),
invalid,
map.size()
);
}
} while (mi.next());
return null;
}
public static void main(String[] args) throws Exception {
Map<Integer, String> map = new HashMap<>();
findExpression(map, true, 1, true);
System.out.print("Numbers found: " + map.size());
for (int i = 1; i < 11112; i++) {
if (!map.containsKey(i)) {
System.out.printf(" %d%n", i);
}
}
}
}
| |
package me.ci;
import java.awt.BorderLayout;
import java.awt.Color;
import java.awt.Font;
import java.awt.FontMetrics;
import java.awt.Graphics;
import java.awt.Graphics2D;
import java.awt.RenderingHints;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.MouseAdapter;
import java.awt.event.MouseEvent;
import java.awt.event.WindowAdapter;
import java.awt.event.WindowEvent;
import java.awt.geom.AffineTransform;
import java.util.ArrayList;
import javax.swing.JFrame;
import javax.swing.JPanel;
import javax.swing.SwingUtilities;
import javax.swing.Timer;
import javax.swing.WindowConstants;
public class MovieWheel extends JFrame{
public static void addMovieToWheel(Movie m){
if(MovieWheel.INSTANCE==null)new MovieWheel();
if(MovieWheel.INSTANCE.crossedOffMovies==0
&&MovieWheel.INSTANCE.wheelSpinStart==-1
&&!MovieWheel.INSTANCE.movies.contains(m)){
MovieWheel.INSTANCE.addMovie(m);
MovieWheel.INSTANCE.addMovie(m);
}
MovieWheel.INSTANCE.toFront();
}
private static void drawAngledString(Graphics2D g, String s, double angle, int x, int y, float centerX, float centerY){
AffineTransform trans = new AffineTransform();
trans.translate(centerX, centerY);
trans.rotate(Math.toRadians(-angle));
trans.translate(-centerX, -centerY);
g.setTransform(trans);
g.setColor(Color.black);
g.drawString(s, x, y);
g.setTransform(new AffineTransform());
}
private static boolean isBetween(double angle, double angleStart, double angleEnd){
return (angle>=angleStart&&angle<angleEnd)
||(angle+360>=angleStart&&angle+360<angleEnd);
}
private static MovieWheel INSTANCE;
private static final float WHEEL_SIZE_PERCENT = 0.42f;
private static final double GOAL_WHEEL_SIZE_DISTRIBUTION = 100;
private static final float WHEEL_SLICE_GROWTH_SPEED = 0.05f;
private static final float WHEEL_SPIN_SPEED = 250;
private static final int WHEEL_CENTER_SIZE = 24;
private static final Font BIG_FONT = new Font("Tahoma", Font.BOLD, 20);
private final ArrayList<Movie> movies = new ArrayList();
private ColorHeightmap colorScheme = new ColorHeightmap(WheelColorScheme.RAINBOW.colors);
private double[] wheelSizeValues = new double[0];
private double[] goalWheelSizeValues = new double[0];
private double[] currentColorPercents = new double[0];
private double totalWheelSize = 0;
private long wheelSpinStart = -1;
private int wheelSpinTime;
private double currentWheelAngle;
private double wheelSpinSpeed;
private String crossOffText;
private int crossedOffMovies = 0;
private MovieWheel(){
MovieWheel.INSTANCE = this;
SwingUtilities.invokeLater(new Runnable(){
public void run(){
init();
addComponents();
setVisible(true);
}
});
}
private void init(){
setTitle("Movie Wheel");
setSize(640, 480);
setLocationRelativeTo(null);
setDefaultCloseOperation(WindowConstants.DISPOSE_ON_CLOSE);
addWindowListener(new WindowAdapter(){
@Override
public void windowClosed(WindowEvent e){
MovieWheel.INSTANCE = null;
}
});
addMouseListener(new MouseAdapter(){
@Override
public void mouseReleased(MouseEvent e){
if(wheelSpinStart!=-1)return;
if(crossedOffMovies==movies.size()-1)return;
crossOffText = null;
wheelSpinStart = System.currentTimeMillis();
wheelSpinTime = (int)(Math.random()*5000+5000);
wheelSpinSpeed = wheelSpinTime/1000f*MovieWheel.WHEEL_SPIN_SPEED;
}
});
Timer timer = new Timer(30, new ActionListener(){
public void actionPerformed(ActionEvent e){
updateElements();
repaint();
}
});
timer.setRepeats(true);
timer.start();
setFont(new Font("Tahoma", Font.PLAIN, 13));
}
private double getWheelSpinAngle(){
if(wheelSpinStart==-1)return currentWheelAngle;
long time = System.currentTimeMillis()-wheelSpinStart;
if(time>wheelSpinTime)time = wheelSpinTime;
double completionPercent = time/(double)wheelSpinTime;
double inverseCompletionPercent = 1-completionPercent;
double angle = wheelSpinSpeed-inverseCompletionPercent*inverseCompletionPercent*wheelSpinSpeed;
return (angle+currentWheelAngle)%360;
}
private void updateElements(){
for(int i = 0; i<wheelSizeValues.length; i++)
updateWheelSize(i);
recalculateWheelTotalSize();
if(wheelSpinStart!=-1
&&System.currentTimeMillis()-wheelSpinStart>wheelSpinTime)finishWheelSpin();
}
private void updateWheelSize(int index){
if(Math.abs(wheelSizeValues[index]-goalWheelSizeValues[index])<0.5)wheelSizeValues[index] = goalWheelSizeValues[index];
else wheelSizeValues[index] = (goalWheelSizeValues[index]-wheelSizeValues[index])*MovieWheel.WHEEL_SLICE_GROWTH_SPEED+wheelSizeValues[index];
currentColorPercents[index] = ((index+0.5)/movies.size()-currentColorPercents[index])*MovieWheel.WHEEL_SLICE_GROWTH_SPEED+currentColorPercents[index];
}
private void finishWheelSpin(){
currentWheelAngle = getWheelSpinAngle();
wheelSpinStart = -1;
crossedOffMovies++;
removeSelectedMovie();
}
private void removeSelectedMovie(){
double currentAngle = currentWheelAngle;
double angleSize;
for(int i = 0; i<movies.size(); i++){
angleSize = wheelSizeValues[i]/totalWheelSize*360;
if(MovieWheel.isBetween(0, currentAngle, currentAngle+angleSize)){
goalWheelSizeValues[i] = 0;
crossOffText = "Sorry, \""+movies.get(i).title+"\"";
return;
}
currentAngle += angleSize;
currentAngle %= 360;
}
}
private void recalculateWheelTotalSize(){
double s = 0;
for(int i = 0; i<wheelSizeValues.length; i++)
s += wheelSizeValues[i];
totalWheelSize = Math.max(s, MovieWheel.GOAL_WHEEL_SIZE_DISTRIBUTION);
}
private void addComponents(){
setLayout(new BorderLayout());
add(new JPanel(){
@Override
public void paintComponent(Graphics g1){
Graphics2D g = (Graphics2D)g1;
g.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON);
int width = getWidth();
int height = getHeight();
g.setColor(Color.darkGray);
g.fillRect(0, 0, width, height);
float wheelRadius;
if(width<height)wheelRadius = width*MovieWheel.WHEEL_SIZE_PERCENT;
else wheelRadius = height*MovieWheel.WHEEL_SIZE_PERCENT;
double wheelAngle = getWheelSpinAngle();
if(movies.size()>0){
g.setFont(MovieWheel.this.getFont());
FontMetrics fm = g.getFontMetrics();
double angleSize;
float centerX = width/2f;
float centerY = height/2f;
synchronized(movies){
for(int i = 0; i<movies.size(); i++){
angleSize = wheelSizeValues[i]/totalWheelSize*360;
g.setColor(getColor(i));
g.fillArc((int)(centerX-wheelRadius), (int)(centerY-wheelRadius), (int)(wheelRadius*2), (int)(wheelRadius*2), (int)wheelAngle, (int)Math.ceil(angleSize));
if(goalWheelSizeValues[i]!=0)MovieWheel.drawAngledString(g, movies.get(i).title, angleSize/2+wheelAngle, (int)(centerX+wheelRadius-fm.stringWidth(movies.get(i).title)-5), (int)(centerY+fm.getAscent()/2f), centerX, centerY);
wheelAngle += angleSize;
wheelAngle %= 360;
}
}
g.setColor(Color.black);
g.fillOval((int)(centerX-MovieWheel.WHEEL_CENTER_SIZE/2f), (int)(centerY-MovieWheel.WHEEL_CENTER_SIZE/2f), MovieWheel.WHEEL_CENTER_SIZE, MovieWheel.WHEEL_CENTER_SIZE);
g.drawLine((int)(centerX+wheelRadius-1), (int)centerY, (int)(centerX+wheelRadius+10), (int)(centerY+5));
g.drawLine((int)(centerX+wheelRadius-1), (int)centerY, (int)(centerX+wheelRadius+10), (int)(centerY-5));
g.drawLine((int)(centerX+wheelRadius+10), (int)(centerY+5), (int)(centerX+wheelRadius+10), (int)(centerY-5));
if(crossOffText!=null){
g.setColor(Color.green);
g.setFont(MovieWheel.BIG_FONT);
fm = g.getFontMetrics();
g.drawString(crossOffText, (width-fm.stringWidth(crossOffText))/2, height-5);
}
}
g.dispose();
}
});
}
private Color getColor(int index){
int[] temp = new int[3];
colorScheme.getColors(currentColorPercents[index], temp);
return new Color(temp[0], temp[1], temp[2]);
}
private void addMovie(Movie m){
synchronized(movies){
int index = (int)(Math.random()*movies.size());
movies.add(index, m);
double[] newMovieSizes = new double[movies.size()];
double[] newGoalWheelSizes = new double[movies.size()];
int j = 0;
for(int i = 0; i<movies.size(); i++){
if(i==index){
newGoalWheelSizes[i] = MovieWheel.GOAL_WHEEL_SIZE_DISTRIBUTION;
continue;
}
newMovieSizes[i] = wheelSizeValues[j];
newGoalWheelSizes[i] = goalWheelSizeValues[j];
j++;
}
wheelSizeValues = newMovieSizes;
goalWheelSizeValues = newGoalWheelSizes;
double[] newColorPercents = new double[movies.size()];
j = 0;
for(int i = 0; i<movies.size(); i++){
if(i==index){
newColorPercents[i] = (index+0.5)/movies.size();
continue;
}
newColorPercents[i] = currentColorPercents[j];
j++;
}
currentColorPercents = newColorPercents;
}
repaint();
}
}
| |
/*
* Copyright (C) 2008 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.inject.spi;
import static com.google.common.collect.Iterables.getOnlyElement;
import static com.google.inject.Asserts.assertContains;
import static com.google.inject.Asserts.getDeclaringSourcePart;
import static com.google.inject.Asserts.isIncludeStackTraceComplete;
import static java.lang.annotation.RetentionPolicy.RUNTIME;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.inject.AbstractModule;
import com.google.inject.Binding;
import com.google.inject.BindingAnnotation;
import com.google.inject.Inject;
import com.google.inject.Key;
import com.google.inject.MembersInjector;
import com.google.inject.Module;
import com.google.inject.PrivateBinder;
import com.google.inject.Provider;
import com.google.inject.Scope;
import com.google.inject.Scopes;
import com.google.inject.Singleton;
import com.google.inject.Stage;
import com.google.inject.TypeLiteral;
import com.google.inject.binder.AnnotatedBindingBuilder;
import com.google.inject.binder.AnnotatedConstantBindingBuilder;
import com.google.inject.binder.ConstantBindingBuilder;
import com.google.inject.binder.ScopedBindingBuilder;
import com.google.inject.matcher.Matcher;
import com.google.inject.matcher.Matchers;
import com.google.inject.name.Named;
import com.google.inject.name.Names;
import com.google.inject.util.Providers;
import java.lang.annotation.Annotation;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.Target;
import java.lang.reflect.Constructor;
import java.lang.reflect.Field;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeSet;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicReference;
import junit.framework.TestCase;
/** @author jessewilson@google.com (Jesse Wilson) */
public class ElementsTest extends TestCase {
// Binder fidelity tests
public void testAddMessageErrorCommand() {
checkModule(
new AbstractModule() {
@Override
protected void configure() {
addError("Message %s %d %s", "A", 5, "C");
}
},
new FailingElementVisitor() {
@Override
public Void visit(Message command) {
assertEquals("Message A 5 C", command.getMessage());
assertNull(command.getCause());
assertContains(
command.getSources().toString(),
ElementsTest.class.getName(),
getDeclaringSourcePart(ElementsTest.class));
assertContains(command.getSource(), getDeclaringSourcePart(ElementsTest.class));
return null;
}
});
}
public void testAddThrowableErrorCommand() {
checkModule(
new AbstractModule() {
@Override
protected void configure() {
addError(new Exception("A"));
}
},
new FailingElementVisitor() {
@Override
public Void visit(Message command) {
assertEquals("A", command.getCause().getMessage());
assertEquals(command.getMessage(), "An exception was caught and reported. Message: A");
assertContains(command.getSource(), getDeclaringSourcePart(ElementsTest.class));
return null;
}
});
}
public void testErrorsAddedWhenExceptionsAreThrown() {
checkModule(
new AbstractModule() {
@Override
protected void configure() {
install(
new AbstractModule() {
@Override
protected void configure() {
throw new RuntimeException(
"Throwing RuntimeException in AbstractModule.configure().");
}
});
addError("Code after the exception still gets executed");
}
},
new FailingElementVisitor() {
@Override
public Void visit(Message command) {
assertEquals(
"Throwing RuntimeException in AbstractModule.configure().",
command.getCause().getMessage());
return null;
}
},
new FailingElementVisitor() {
@Override
public Void visit(Message command) {
assertEquals("Code after the exception still gets executed", command.getMessage());
return null;
}
});
}
private <T> T getInstance(Binding<T> binding) {
return binding.acceptTargetVisitor(Elements.<T>getInstanceVisitor());
}
public void testBindConstantAnnotations() {
checkModule(
new AbstractModule() {
@Override
protected void configure() {
bindConstant().annotatedWith(SampleAnnotation.class).to("A");
bindConstant().annotatedWith(Names.named("Bee")).to("B");
}
},
new FailingElementVisitor() {
@Override
public <T> Void visit(Binding<T> command) {
assertTrue(command instanceof InstanceBinding);
assertEquals(Key.get(String.class, SampleAnnotation.class), command.getKey());
assertEquals("A", getInstance(command));
return null;
}
},
new FailingElementVisitor() {
@Override
public <T> Void visit(Binding<T> command) {
assertTrue(command instanceof InstanceBinding);
assertEquals(Key.get(String.class, Names.named("Bee")), command.getKey());
assertEquals("B", getInstance(command));
return null;
}
});
}
public void testBindConstantTypes() {
checkModule(
new AbstractModule() {
@Override
protected void configure() {
bindConstant().annotatedWith(Names.named("String")).to("A");
bindConstant().annotatedWith(Names.named("int")).to(2);
bindConstant().annotatedWith(Names.named("long")).to(3L);
bindConstant().annotatedWith(Names.named("boolean")).to(false);
bindConstant().annotatedWith(Names.named("double")).to(5.0d);
bindConstant().annotatedWith(Names.named("float")).to(6.0f);
bindConstant().annotatedWith(Names.named("short")).to((short) 7);
bindConstant().annotatedWith(Names.named("char")).to('h');
bindConstant().annotatedWith(Names.named("byte")).to((byte) 8);
bindConstant().annotatedWith(Names.named("Class")).to(Iterator.class);
bindConstant().annotatedWith(Names.named("Enum")).to(CoinSide.TAILS);
}
},
new FailingElementVisitor() {
@Override
public <T> Void visit(Binding<T> command) {
assertTrue(command instanceof InstanceBinding);
assertEquals(Key.get(String.class, Names.named("String")), command.getKey());
assertEquals("A", getInstance(command));
return null;
}
},
new FailingElementVisitor() {
@Override
public <T> Void visit(Binding<T> command) {
assertTrue(command instanceof InstanceBinding);
assertEquals(Key.get(Integer.class, Names.named("int")), command.getKey());
assertEquals(2, getInstance(command));
return null;
}
},
new FailingElementVisitor() {
@Override
public <T> Void visit(Binding<T> command) {
assertTrue(command instanceof InstanceBinding);
assertEquals(Key.get(Long.class, Names.named("long")), command.getKey());
assertEquals(3L, getInstance(command));
return null;
}
},
new FailingElementVisitor() {
@Override
public <T> Void visit(Binding<T> command) {
assertTrue(command instanceof InstanceBinding);
assertEquals(Key.get(Boolean.class, Names.named("boolean")), command.getKey());
assertEquals(false, getInstance(command));
return null;
}
},
new FailingElementVisitor() {
@Override
public <T> Void visit(Binding<T> command) {
assertTrue(command instanceof InstanceBinding);
assertEquals(Key.get(Double.class, Names.named("double")), command.getKey());
assertEquals(5.0d, getInstance(command));
return null;
}
},
new FailingElementVisitor() {
@Override
public <T> Void visit(Binding<T> command) {
assertTrue(command instanceof InstanceBinding);
assertEquals(Key.get(Float.class, Names.named("float")), command.getKey());
assertEquals(6.0f, getInstance(command));
return null;
}
},
new FailingElementVisitor() {
@Override
public <T> Void visit(Binding<T> command) {
assertTrue(command instanceof InstanceBinding);
assertEquals(Key.get(Short.class, Names.named("short")), command.getKey());
assertEquals((short) 7, getInstance(command));
return null;
}
},
new FailingElementVisitor() {
@Override
public <T> Void visit(Binding<T> command) {
assertTrue(command instanceof InstanceBinding);
assertEquals(Key.get(Character.class, Names.named("char")), command.getKey());
assertEquals('h', getInstance(command));
return null;
}
},
new FailingElementVisitor() {
@Override
public <T> Void visit(Binding<T> command) {
assertTrue(command instanceof InstanceBinding);
assertEquals(Key.get(Byte.class, Names.named("byte")), command.getKey());
assertEquals((byte) 8, getInstance(command));
return null;
}
},
new FailingElementVisitor() {
@Override
public <T> Void visit(Binding<T> command) {
assertTrue(command instanceof InstanceBinding);
assertEquals(Key.get(Class.class, Names.named("Class")), command.getKey());
assertEquals(Iterator.class, getInstance(command));
return null;
}
},
new FailingElementVisitor() {
@Override
public <T> Void visit(Binding<T> command) {
assertTrue(command instanceof InstanceBinding);
assertEquals(Key.get(CoinSide.class, Names.named("Enum")), command.getKey());
assertEquals(CoinSide.TAILS, getInstance(command));
return null;
}
});
}
public void testBindKeysNoAnnotations() {
FailingElementVisitor keyChecker =
new FailingElementVisitor() {
@Override
public <T> Void visit(Binding<T> command) {
assertEquals(Key.get(String.class), command.getKey());
return null;
}
};
checkModule(
new AbstractModule() {
@Override
protected void configure() {
bind(String.class).toInstance("A");
bind(new TypeLiteral<String>() {}).toInstance("B");
bind(Key.get(String.class)).toInstance("C");
}
},
keyChecker,
keyChecker,
keyChecker);
}
public void testBindKeysWithAnnotationType() {
FailingElementVisitor annotationChecker =
new FailingElementVisitor() {
@Override
public <T> Void visit(Binding<T> command) {
assertEquals(Key.get(String.class, SampleAnnotation.class), command.getKey());
return null;
}
};
checkModule(
new AbstractModule() {
@Override
protected void configure() {
bind(String.class).annotatedWith(SampleAnnotation.class).toInstance("A");
bind(new TypeLiteral<String>() {})
.annotatedWith(SampleAnnotation.class)
.toInstance("B");
}
},
annotationChecker,
annotationChecker);
}
public void testBindKeysWithAnnotationInstance() {
FailingElementVisitor annotationChecker =
new FailingElementVisitor() {
@Override
public <T> Void visit(Binding<T> command) {
assertEquals(Key.get(String.class, Names.named("a")), command.getKey());
return null;
}
};
checkModule(
new AbstractModule() {
@Override
protected void configure() {
bind(String.class).annotatedWith(Names.named("a")).toInstance("B");
bind(new TypeLiteral<String>() {}).annotatedWith(Names.named("a")).toInstance("C");
}
},
annotationChecker,
annotationChecker);
}
public void testBindToProvider() {
final Provider<String> aProvider =
new Provider<String>() {
@Override
public String get() {
return "A";
}
};
final javax.inject.Provider<Integer> intJavaxProvider =
new javax.inject.Provider<Integer>() {
@Override
public Integer get() {
return 42;
}
};
final javax.inject.Provider<Double> doubleJavaxProvider =
new javax.inject.Provider<Double>() {
@javax.inject.Inject String string;
@Override
public Double get() {
return 42.42;
}
};
checkModule(
new AbstractModule() {
@Override
protected void configure() {
bind(String.class).toProvider(aProvider);
bind(Integer.class).toProvider(intJavaxProvider);
bind(Double.class).toProvider(doubleJavaxProvider);
bind(List.class).toProvider(ListProvider.class);
bind(Collection.class).toProvider(Key.get(ListProvider.class));
bind(Iterable.class).toProvider(new TypeLiteral<TProvider<List>>() {});
}
},
new FailingElementVisitor() {
@Override
public <T> Void visit(Binding<T> command) {
assertTrue(command instanceof ProviderInstanceBinding);
assertEquals(Key.get(String.class), command.getKey());
command.acceptTargetVisitor(
new FailingTargetVisitor<T>() {
@Override
public Void visit(ProviderInstanceBinding<? extends T> binding) {
assertSame(aProvider, binding.getUserSuppliedProvider());
assertSame(aProvider, binding.getProviderInstance());
return null;
}
});
return null;
}
},
new FailingElementVisitor() {
@Override
public <T> Void visit(Binding<T> command) {
assertTrue(command instanceof ProviderInstanceBinding);
assertEquals(Key.get(Integer.class), command.getKey());
command.acceptTargetVisitor(
new FailingTargetVisitor<T>() {
@Override
public Void visit(ProviderInstanceBinding<? extends T> binding) {
assertSame(intJavaxProvider, binding.getUserSuppliedProvider());
assertEquals(42, binding.getProviderInstance().get());
// we don't wrap this w/ dependencies if there were none.
assertFalse(binding.getProviderInstance() instanceof HasDependencies);
return null;
}
});
return null;
}
},
new FailingElementVisitor() {
@Override
public <T> Void visit(Binding<T> command) {
assertTrue(command instanceof ProviderInstanceBinding);
assertEquals(Key.get(Double.class), command.getKey());
command.acceptTargetVisitor(
new FailingTargetVisitor<T>() {
@Override
public Void visit(ProviderInstanceBinding<? extends T> binding) {
assertSame(doubleJavaxProvider, binding.getUserSuppliedProvider());
assertEquals(42.42, binding.getProviderInstance().get());
// we do wrap it with dependencies if there were some.
assertTrue(binding.getProviderInstance() instanceof HasDependencies);
Set<Dependency<?>> deps =
((HasDependencies) binding.getProviderInstance()).getDependencies();
assertEquals(1, deps.size());
assertEquals(
String.class,
deps.iterator().next().getKey().getTypeLiteral().getRawType());
return null;
}
});
return null;
}
},
new FailingElementVisitor() {
@Override
public <T> Void visit(Binding<T> command) {
assertTrue(command instanceof ProviderKeyBinding);
assertEquals(Key.get(List.class), command.getKey());
command.acceptTargetVisitor(
new FailingTargetVisitor<T>() {
@Override
public Void visit(ProviderKeyBinding<? extends T> binding) {
assertEquals(Key.get(ListProvider.class), binding.getProviderKey());
return null;
}
});
return null;
}
},
new FailingElementVisitor() {
@Override
public <T> Void visit(Binding<T> command) {
assertTrue(command instanceof ProviderKeyBinding);
assertEquals(Key.get(Collection.class), command.getKey());
command.acceptTargetVisitor(
new FailingTargetVisitor<T>() {
@Override
public Void visit(ProviderKeyBinding<? extends T> binding) {
assertEquals(Key.get(ListProvider.class), binding.getProviderKey());
return null;
}
});
return null;
}
},
new FailingElementVisitor() {
@Override
public <T> Void visit(Binding<T> command) {
assertTrue(command instanceof ProviderKeyBinding);
assertEquals(Key.get(Iterable.class), command.getKey());
command.acceptTargetVisitor(
new FailingTargetVisitor<T>() {
@Override
public Void visit(ProviderKeyBinding<? extends T> binding) {
assertEquals(new Key<TProvider<List>>() {}, binding.getProviderKey());
return null;
}
});
return null;
}
});
}
public void testBindToLinkedBinding() {
checkModule(
new AbstractModule() {
@Override
protected void configure() {
bind(List.class).to(ArrayList.class);
bind(Map.class).to(new TypeLiteral<HashMap<Integer, String>>() {});
bind(Set.class).to(Key.get(TreeSet.class, SampleAnnotation.class));
}
},
new FailingElementVisitor() {
@Override
public <T> Void visit(Binding<T> command) {
assertTrue(command instanceof LinkedKeyBinding);
assertEquals(Key.get(List.class), command.getKey());
command.acceptTargetVisitor(
new FailingTargetVisitor<T>() {
@Override
public Void visit(LinkedKeyBinding<? extends T> binding) {
assertEquals(Key.get(ArrayList.class), binding.getLinkedKey());
return null;
}
});
return null;
}
},
new FailingElementVisitor() {
@Override
public <T> Void visit(Binding<T> command) {
assertTrue(command instanceof LinkedKeyBinding);
assertEquals(Key.get(Map.class), command.getKey());
command.acceptTargetVisitor(
new FailingTargetVisitor<T>() {
@Override
public Void visit(LinkedKeyBinding<? extends T> binding) {
assertEquals(
Key.get(new TypeLiteral<HashMap<Integer, String>>() {}),
binding.getLinkedKey());
return null;
}
});
return null;
}
},
new FailingElementVisitor() {
@Override
public <T> Void visit(Binding<T> command) {
assertTrue(command instanceof LinkedKeyBinding);
assertEquals(Key.get(Set.class), command.getKey());
command.acceptTargetVisitor(
new FailingTargetVisitor<T>() {
@Override
public Void visit(LinkedKeyBinding<? extends T> binding) {
assertEquals(
Key.get(TreeSet.class, SampleAnnotation.class), binding.getLinkedKey());
return null;
}
});
return null;
}
});
}
public void testBindToInstance() {
checkModule(
new AbstractModule() {
@Override
protected void configure() {
bind(String.class).toInstance("A");
}
},
new FailingElementVisitor() {
@Override
public <T> Void visit(Binding<T> command) {
assertTrue(command instanceof InstanceBinding);
assertEquals(Key.get(String.class), command.getKey());
assertEquals("A", getInstance(command));
return null;
}
});
}
public void testBindInScopes() {
checkModule(
new AbstractModule() {
@Override
protected void configure() {
bind(String.class);
bind(List.class).to(ArrayList.class).in(Scopes.SINGLETON);
bind(Map.class).to(HashMap.class).in(Singleton.class);
bind(Set.class).to(TreeSet.class).asEagerSingleton();
}
},
new FailingElementVisitor() {
@Override
public <T> Void visit(Binding<T> command) {
assertEquals(Key.get(String.class), command.getKey());
command.acceptScopingVisitor(
new FailingBindingScopingVisitor() {
@Override
public Void visitNoScoping() {
return null;
}
});
return null;
}
},
new FailingElementVisitor() {
@Override
public <T> Void visit(Binding<T> command) {
assertEquals(Key.get(List.class), command.getKey());
command.acceptScopingVisitor(
new FailingBindingScopingVisitor() {
@Override
public Void visitScope(Scope scope) {
assertEquals(Scopes.SINGLETON, scope);
return null;
}
});
return null;
}
},
new FailingElementVisitor() {
@Override
public <T> Void visit(Binding<T> command) {
assertEquals(Key.get(Map.class), command.getKey());
command.acceptScopingVisitor(
new FailingBindingScopingVisitor() {
@Override
public Void visitScopeAnnotation(Class<? extends Annotation> annotation) {
assertEquals(Singleton.class, annotation);
return null;
}
});
return null;
}
},
new FailingElementVisitor() {
@Override
public <T> Void visit(Binding<T> command) {
assertEquals(Key.get(Set.class), command.getKey());
command.acceptScopingVisitor(
new FailingBindingScopingVisitor() {
@Override
public Void visitEagerSingleton() {
return null;
}
});
return null;
}
});
}
public void testBindToInstanceInScope() {
checkModule(
new AbstractModule() {
@Override
protected void configure() {
AnnotatedBindingBuilder<String> b = bind(String.class);
b.toInstance("A");
b.in(Singleton.class);
}
},
new FailingElementVisitor() {
@Override
public <T> Void visit(Binding<T> command) {
return null;
}
},
new FailingElementVisitor() {
@Override
public Void visit(Message command) {
assertEquals(
"Setting the scope is not permitted when binding to a single instance.",
command.getMessage());
assertNull(command.getCause());
assertContains(command.getSource(), getDeclaringSourcePart(ElementsTest.class));
return null;
}
});
}
public void testBindToInstanceScope() {
checkModule(
new AbstractModule() {
@Override
protected void configure() {
bind(String.class).toInstance("A");
}
},
new FailingElementVisitor() {
@Override
public <T> Void visit(Binding<T> binding) {
assertEquals(Key.get(String.class), binding.getKey());
binding.acceptScopingVisitor(
new FailingBindingScopingVisitor() {
@Override
public Void visitEagerSingleton() {
return null;
}
});
return null;
}
});
}
/*if[AOP]*/
public void testBindIntercepor() {
final Matcher<Class> classMatcher = Matchers.subclassesOf(List.class);
final Matcher<Object> methodMatcher = Matchers.any();
final org.aopalliance.intercept.MethodInterceptor methodInterceptor =
new org.aopalliance.intercept.MethodInterceptor() {
@Override
public Object invoke(org.aopalliance.intercept.MethodInvocation methodInvocation) {
return null;
}
};
checkModule(
new AbstractModule() {
@Override
protected void configure() {
bindInterceptor(classMatcher, methodMatcher, methodInterceptor);
}
},
new FailingElementVisitor() {
@Override
public Void visit(InterceptorBinding command) {
assertSame(classMatcher, command.getClassMatcher());
assertSame(methodMatcher, command.getMethodMatcher());
assertEquals(Arrays.asList(methodInterceptor), command.getInterceptors());
return null;
}
});
}
/*end[AOP]*/
public void testBindScope() {
checkModule(
new AbstractModule() {
@Override
protected void configure() {
bindScope(SampleAnnotation.class, Scopes.NO_SCOPE);
}
},
new FailingElementVisitor() {
@Override
public Void visit(ScopeBinding command) {
assertSame(SampleAnnotation.class, command.getAnnotationType());
assertSame(Scopes.NO_SCOPE, command.getScope());
return null;
}
});
}
public void testBindListener() {
final Matcher<Object> typeMatcher = Matchers.only(TypeLiteral.get(String.class));
final TypeListener listener =
new TypeListener() {
@Override
public <I> void hear(TypeLiteral<I> type, TypeEncounter<I> encounter) {
throw new UnsupportedOperationException();
}
};
checkModule(
new AbstractModule() {
@Override
protected void configure() {
bindListener(typeMatcher, listener);
}
},
new FailingElementVisitor() {
@Override
public Void visit(TypeListenerBinding binding) {
assertSame(typeMatcher, binding.getTypeMatcher());
assertSame(listener, binding.getListener());
return null;
}
});
}
public void testConvertToTypes() {
final TypeConverter typeConverter =
new TypeConverter() {
@Override
public Object convert(String value, TypeLiteral<?> toType) {
return value;
}
};
checkModule(
new AbstractModule() {
@Override
protected void configure() {
convertToTypes(Matchers.any(), typeConverter);
}
},
new FailingElementVisitor() {
@Override
public Void visit(TypeConverterBinding command) {
assertSame(typeConverter, command.getTypeConverter());
assertSame(Matchers.any(), command.getTypeMatcher());
return null;
}
});
}
public void testGetProvider() {
checkModule(
new AbstractModule() {
@Override
protected void configure() {
Provider<String> keyGetProvider =
getProvider(Key.get(String.class, SampleAnnotation.class));
try {
keyGetProvider.get();
fail("Expected IllegalStateException");
} catch (IllegalStateException e) {
assertEquals(
"This Provider cannot be used until the Injector has been created.",
e.getMessage());
}
Provider<String> typeGetProvider = getProvider(String.class);
try {
typeGetProvider.get();
fail("Expected IllegalStateException");
} catch (IllegalStateException e) {
assertEquals(
"This Provider cannot be used until the Injector has been created.",
e.getMessage());
}
}
},
new FailingElementVisitor() {
@Override
public <T> Void visit(ProviderLookup<T> command) {
assertEquals(Key.get(String.class, SampleAnnotation.class), command.getKey());
assertNull(command.getDelegate());
return null;
}
},
new FailingElementVisitor() {
@Override
public <T> Void visit(ProviderLookup<T> command) {
assertEquals(Key.get(String.class), command.getKey());
assertNull(command.getDelegate());
return null;
}
});
}
public void testElementInitialization() {
final AtomicReference<Provider<String>> providerFromBinder =
new AtomicReference<Provider<String>>();
final AtomicReference<MembersInjector<String>> membersInjectorFromBinder =
new AtomicReference<MembersInjector<String>>();
final AtomicReference<String> lastInjected = new AtomicReference<String>();
final MembersInjector<String> stringInjector =
new MembersInjector<String>() {
@Override
public void injectMembers(String instance) {
lastInjected.set(instance);
}
};
checkModule(
new AbstractModule() {
@Override
protected void configure() {
providerFromBinder.set(getProvider(String.class));
membersInjectorFromBinder.set(getMembersInjector(String.class));
}
},
new FailingElementVisitor() {
@Override
public <T> Void visit(ProviderLookup<T> providerLookup) {
@SuppressWarnings("unchecked") // we know that T is a String here
ProviderLookup<String> stringLookup = (ProviderLookup<String>) providerLookup;
stringLookup.initializeDelegate(Providers.of("out"));
assertEquals("out", providerFromBinder.get().get());
return null;
}
},
new FailingElementVisitor() {
@Override
public <T> Void visit(MembersInjectorLookup<T> lookup) {
@SuppressWarnings("unchecked") // we know that T is a String here
MembersInjectorLookup<String> stringLookup = (MembersInjectorLookup<String>) lookup;
stringLookup.initializeDelegate(stringInjector);
membersInjectorFromBinder.get().injectMembers("in");
assertEquals("in", lastInjected.get());
return null;
}
});
}
public void testGetMembersInjector() {
checkModule(
new AbstractModule() {
@Override
protected void configure() {
MembersInjector<A<String>> typeMembersInjector =
getMembersInjector(new TypeLiteral<A<String>>() {});
try {
typeMembersInjector.injectMembers(new A<String>());
fail("Expected IllegalStateException");
} catch (IllegalStateException e) {
assertEquals(
"This MembersInjector cannot be used until the Injector has been created.",
e.getMessage());
}
MembersInjector<String> classMembersInjector = getMembersInjector(String.class);
try {
classMembersInjector.injectMembers("hello");
fail("Expected IllegalStateException");
} catch (IllegalStateException e) {
assertEquals(
"This MembersInjector cannot be used until the Injector has been created.",
e.getMessage());
}
}
},
new FailingElementVisitor() {
@Override
public <T> Void visit(MembersInjectorLookup<T> command) {
assertEquals(new TypeLiteral<A<String>>() {}, command.getType());
assertNull(command.getDelegate());
return null;
}
},
new FailingElementVisitor() {
@Override
public <T> Void visit(MembersInjectorLookup<T> command) {
assertEquals(TypeLiteral.get(String.class), command.getType());
assertNull(command.getDelegate());
return null;
}
});
}
public void testRequestInjection() {
final Object firstObject = new Object();
final Object secondObject = new Object();
checkModule(
new AbstractModule() {
@Override
protected void configure() {
requestInjection(firstObject);
requestInjection(secondObject);
}
},
new FailingElementVisitor() {
@Override
public Void visit(InjectionRequest<?> command) {
assertEquals(firstObject, command.getInstance());
return null;
}
},
new FailingElementVisitor() {
@Override
public Void visit(InjectionRequest<?> command) {
assertEquals(secondObject, command.getInstance());
return null;
}
});
}
public void testRequestStaticInjection() {
checkModule(
new AbstractModule() {
@Override
protected void configure() {
requestStaticInjection(ArrayList.class);
}
},
new FailingElementVisitor() {
@Override
public Void visit(StaticInjectionRequest command) {
assertEquals(ArrayList.class, command.getType());
return null;
}
});
}
public void testNewPrivateBinder() {
final Key<Collection> collection = Key.get(Collection.class, SampleAnnotation.class);
final Key<ArrayList> arrayList = Key.get(ArrayList.class);
final ImmutableSet<Key<?>> collections = ImmutableSet.<Key<?>>of(arrayList, collection);
final Key<?> a = Key.get(String.class, Names.named("a"));
final Key<?> b = Key.get(String.class, Names.named("b"));
final ImmutableSet<Key<?>> ab = ImmutableSet.of(a, b);
checkModule(
new AbstractModule() {
@Override
protected void configure() {
PrivateBinder one = binder().newPrivateBinder();
one.expose(ArrayList.class);
one.expose(Collection.class).annotatedWith(SampleAnnotation.class);
one.bind(List.class).to(ArrayList.class);
PrivateBinder two =
binder().withSource("1 FooBar").newPrivateBinder().withSource("2 FooBar");
two.expose(String.class).annotatedWith(Names.named("a"));
two.expose(b);
two.bind(List.class).to(ArrayList.class);
}
},
new FailingElementVisitor() {
@Override
public Void visit(PrivateElements one) {
assertEquals(collections, one.getExposedKeys());
checkElements(
one.getElements(),
new FailingElementVisitor() {
@Override
public <T> Void visit(Binding<T> binding) {
assertEquals(Key.get(List.class), binding.getKey());
return null;
}
});
return null;
}
},
new ExternalFailureVisitor() {
@Override
public Void visit(PrivateElements two) {
assertEquals(ab, two.getExposedKeys());
assertEquals("1 FooBar", two.getSource().toString());
checkElements(
two.getElements(),
new ExternalFailureVisitor() {
@Override
public <T> Void visit(Binding<T> binding) {
assertEquals("2 FooBar", binding.getSource().toString());
assertEquals(Key.get(List.class), binding.getKey());
return null;
}
});
return null;
}
});
}
public void testBindWithMultipleAnnotationsAddsError() {
checkModule(
new AbstractModule() {
@Override
protected void configure() {
AnnotatedBindingBuilder<String> abb = bind(String.class);
abb.annotatedWith(SampleAnnotation.class);
abb.annotatedWith(Names.named("A"));
}
},
new FailingElementVisitor() {
@Override
public <T> Void visit(Binding<T> command) {
return null;
}
},
new FailingElementVisitor() {
@Override
public Void visit(Message command) {
assertEquals(
"More than one annotation is specified for this binding.", command.getMessage());
assertNull(command.getCause());
assertContains(command.getSource(), getDeclaringSourcePart(ElementsTest.class));
return null;
}
});
}
public void testBindWithMultipleTargetsAddsError() {
checkModule(
new AbstractModule() {
@Override
protected void configure() {
AnnotatedBindingBuilder<String> abb = bind(String.class);
abb.toInstance("A");
abb.toInstance("B");
}
},
new FailingElementVisitor() {
@Override
public <T> Void visit(Binding<T> command) {
return null;
}
},
new FailingElementVisitor() {
@Override
public Void visit(Message command) {
assertEquals("Implementation is set more than once.", command.getMessage());
assertNull(command.getCause());
assertContains(command.getSource(), getDeclaringSourcePart(ElementsTest.class));
return null;
}
});
}
public void testBindWithMultipleScopesAddsError() {
checkModule(
new AbstractModule() {
@Override
protected void configure() {
ScopedBindingBuilder sbb = bind(List.class).to(ArrayList.class);
sbb.in(Scopes.NO_SCOPE);
sbb.asEagerSingleton();
}
},
new FailingElementVisitor() {
@Override
public <T> Void visit(Binding<T> command) {
return null;
}
},
new FailingElementVisitor() {
@Override
public Void visit(Message command) {
assertEquals("Scope is set more than once.", command.getMessage());
assertNull(command.getCause());
assertContains(command.getSource(), getDeclaringSourcePart(ElementsTest.class));
return null;
}
});
}
public void testBindConstantWithMultipleAnnotationsAddsError() {
checkModule(
new AbstractModule() {
@Override
protected void configure() {
AnnotatedConstantBindingBuilder cbb = bindConstant();
cbb.annotatedWith(SampleAnnotation.class).to("A");
cbb.annotatedWith(Names.named("A"));
}
},
new FailingElementVisitor() {
@Override
public <T> Void visit(Binding<T> command) {
return null;
}
},
new FailingElementVisitor() {
@Override
public Void visit(Message command) {
assertEquals(
"More than one annotation is specified for this binding.", command.getMessage());
assertNull(command.getCause());
assertContains(command.getSource(), getDeclaringSourcePart(ElementsTest.class));
return null;
}
});
}
public void testBindConstantWithMultipleTargetsAddsError() {
checkModule(
new AbstractModule() {
@Override
protected void configure() {
ConstantBindingBuilder cbb = bindConstant().annotatedWith(SampleAnnotation.class);
cbb.to("A");
cbb.to("B");
}
},
new FailingElementVisitor() {
@Override
public <T> Void visit(Binding<T> command) {
return null;
}
},
new FailingElementVisitor() {
@Override
public Void visit(Message message) {
assertEquals("Constant value is set more than once.", message.getMessage());
assertNull(message.getCause());
assertContains(message.getSource(), getDeclaringSourcePart(ElementsTest.class));
return null;
}
});
}
public void testBindToConstructor() throws NoSuchMethodException, NoSuchFieldException {
final Constructor<A> aConstructor = A.class.getDeclaredConstructor();
final Constructor<B> bConstructor = B.class.getDeclaredConstructor(Object.class);
final Field field = B.class.getDeclaredField("stage");
checkModule(
new AbstractModule() {
@Override
protected void configure() {
bind(A.class).toConstructor(aConstructor);
bind(B.class)
.toConstructor(bConstructor, new TypeLiteral<B<Integer>>() {})
.in(Singleton.class);
}
},
new FailingElementVisitor() {
@Override
public <T> Void visit(Binding<T> binding) {
assertEquals(new Key<A>() {}, binding.getKey());
return binding.acceptTargetVisitor(
new FailingTargetVisitor<T>() {
@Override
public Void visit(ConstructorBinding<? extends T> constructorBinding) {
InjectionPoint injectionPoint = constructorBinding.getConstructor();
assertEquals(aConstructor, injectionPoint.getMember());
assertEquals(new TypeLiteral<A>() {}, injectionPoint.getDeclaringType());
return null;
}
});
}
},
new FailingElementVisitor() {
@Override
public <T> Void visit(Binding<T> binding) {
assertEquals(new Key<B>() {}, binding.getKey());
binding.acceptScopingVisitor(
new FailingBindingScopingVisitor() {
@Override
public Void visitScopeAnnotation(Class<? extends Annotation> annotation) {
assertEquals(Singleton.class, annotation);
return null;
}
});
binding.acceptTargetVisitor(
new FailingTargetVisitor<T>() {
@Override
public Void visit(ConstructorBinding<? extends T> constructorBinding) {
assertEquals(bConstructor, constructorBinding.getConstructor().getMember());
assertEquals(
Key.get(Integer.class),
getOnlyElement(constructorBinding.getConstructor().getDependencies())
.getKey());
assertEquals(
field,
getOnlyElement(constructorBinding.getInjectableMembers()).getMember());
assertEquals(2, constructorBinding.getDependencies().size());
/*if[AOP]*/
assertEquals(ImmutableMap.of(), constructorBinding.getMethodInterceptors());
/*end[AOP]*/
return null;
}
});
return null;
}
});
}
public void testBindToMalformedConstructor() throws NoSuchMethodException, NoSuchFieldException {
final Constructor<C> constructor = C.class.getDeclaredConstructor(Integer.class);
checkModule(
new AbstractModule() {
@Override
protected void configure() {
bind(C.class).toConstructor(constructor);
}
},
new FailingElementVisitor() {
@Override
public <T> Void visit(Binding<T> binding) {
assertEquals(Key.get(C.class), binding.getKey());
assertTrue(binding instanceof UntargettedBinding);
return null;
}
},
new ExternalFailureVisitor() {
@Override
public Void visit(Message message) {
assertContains(
message.getMessage(),
C.class.getName() + ".a has more than one annotation ",
Named.class.getName(),
SampleAnnotation.class.getName());
return null;
}
},
new ExternalFailureVisitor() {
@Override
public Void visit(Message message) {
assertContains(
message.getMessage(),
C.class.getName() + ".<init>() has more than one annotation ",
Named.class.getName(),
SampleAnnotation.class.getName());
return null;
}
});
}
// Business logic tests
public void testModulesAreInstalledAtMostOnce() {
final AtomicInteger aConfigureCount = new AtomicInteger(0);
final Module a =
new AbstractModule() {
@Override
public void configure() {
aConfigureCount.incrementAndGet();
}
};
Elements.getElements(a, a);
assertEquals(1, aConfigureCount.get());
aConfigureCount.set(0);
Module b =
new AbstractModule() {
@Override
protected void configure() {
install(a);
install(a);
}
};
Elements.getElements(b);
assertEquals(1, aConfigureCount.get());
}
/** Ensures the module performs the commands consistent with {@code visitors}. */
protected void checkModule(Module module, ElementVisitor<?>... visitors) {
List<Element> elements = Elements.getElements(module);
assertEquals(elements.size(), visitors.length);
checkElements(elements, visitors);
}
protected void checkElements(List<Element> elements, ElementVisitor<?>... visitors) {
for (int i = 0; i < visitors.length; i++) {
ElementVisitor<?> visitor = visitors[i];
Element element = elements.get(i);
if (!(element instanceof Message)) {
ElementSource source = (ElementSource) element.getSource();
assertFalse(source.getModuleClassNames().isEmpty());
if (isIncludeStackTraceComplete()) {
assertTrue(source.getStackTrace().length > 0);
} else {
assertEquals(0, source.getStackTrace().length);
}
}
if (!(visitor instanceof ExternalFailureVisitor)) {
assertContains(element.getSource().toString(), getDeclaringSourcePart(ElementsTest.class));
}
element.acceptVisitor(visitor);
}
}
private static class ListProvider implements Provider<List> {
@Override
public List get() {
return new ArrayList();
}
}
private static class TProvider<T> implements Provider<T> {
@Override
public T get() {
return null;
}
}
/**
* By extending this interface rather than FailingElementVisitor, the source of the error doesn't
* need to contain the string {@code ElementsTest.java}.
*/
abstract static class ExternalFailureVisitor extends FailingElementVisitor {}
@Retention(RUNTIME)
@Target({ElementType.FIELD, ElementType.PARAMETER, ElementType.METHOD})
@BindingAnnotation
public @interface SampleAnnotation {}
public enum CoinSide {
HEADS,
TAILS
}
static class A<T> {
@Inject Stage stage;
}
static class B<T> {
@Inject Stage stage;
B(T t) {}
}
static class C {
@Inject
@Named("foo")
@SampleAnnotation
String a;
C(@Named("bar") @SampleAnnotation Integer b) {}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.schema.parser.dialect;
import org.apache.ignite.schema.parser.*;
import java.sql.*;
import java.util.*;
import static java.sql.Types.*;
/**
* Oracle specific metadata dialect.
*/
public class OracleMetadataDialect extends DatabaseMetadataDialect {
/** SQL to get columns metadata. */
private static final String SQL_COLUMNS = "SELECT a.owner, a.table_name, a.column_name, a.nullable," +
" a.data_type, a.data_precision, a.data_scale " +
"FROM all_tab_columns a %s " +
" %s " +
" ORDER BY a.owner, a.table_name, a.column_id";
/** SQL to get list of PRIMARY KEYS columns. */
private static final String SQL_PRIMARY_KEYS = "SELECT b.column_name" +
" FROM all_constraints a" +
" INNER JOIN all_cons_columns b ON a.owner = b.owner AND a.constraint_name = b.constraint_name" +
" WHERE a.owner = ? and a.table_name = ? AND a.constraint_type = 'P'";
/** SQL to get indexes metadata. */
private static final String SQL_INDEXES = "SELECT i.index_name, u.column_expression, i.column_name, i.descend" +
" FROM all_ind_columns i" +
" LEFT JOIN user_ind_expressions u on u.index_name = i.index_name and i.table_name = u.table_name" +
" WHERE i.index_owner = ? and i.table_name = ?" +
" ORDER BY i.index_name, i.column_position";
/** Owner index. */
private static final int OWNER_IDX = 1;
/** Table name index. */
private static final int TBL_NAME_IDX = 2;
/** Column name index. */
private static final int COL_NAME_IDX = 3;
/** Nullable index. */
private static final int NULLABLE_IDX = 4;
/** Data type index. */
private static final int DATA_TYPE_IDX = 5;
/** Numeric precision index. */
private static final int DATA_PRECISION_IDX = 6;
/** Numeric scale index. */
private static final int DATA_SCALE_IDX = 7;
/** Index name index. */
private static final int IDX_NAME_IDX = 1;
/** Index name index. */
private static final int IDX_EXPR_IDX = 2;
/** Index column name index. */
private static final int IDX_COL_NAME_IDX = 3;
/** Index column sort order index. */
private static final int IDX_COL_DESCEND_IDX = 4;
/** {@inheritDoc} */
@Override public Set<String> systemSchemas() {
return new HashSet<>(Arrays.asList("ANONYMOUS", "CTXSYS", "DBSNMP", "EXFSYS", "LBACSYS", "MDSYS", "MGMT_VIEW",
"OLAPSYS", "OWBSYS", "ORDPLUGINS", "ORDSYS", "OUTLN", "SI_INFORMTN_SCHEMA", "SYS", "SYSMAN", "SYSTEM",
"TSMSYS", "WK_TEST", "WKSYS", "WKPROXY", "WMSYS", "XDB",
"APEX_040000", "APEX_PUBLIC_USER", "DIP", "FLOWS_30000", "FLOWS_FILES", "MDDATA", "ORACLE_OCM",
"SPATIAL_CSW_ADMIN_USR", "SPATIAL_WFS_ADMIN_USR", "XS$NULL",
"BI", "HR", "OE", "PM", "IX", "SH"));
}
/** {@inheritDoc} */
@Override public List<String> schemas(Connection conn) throws SQLException {
List<String> schemas = new ArrayList<>();
ResultSet rs = conn.getMetaData().getSchemas();
Set<String> sysSchemas = systemSchemas();
while(rs.next()) {
String schema = rs.getString(1);
if (!sysSchemas.contains(schema) && !schema.startsWith("FLOWS_"))
schemas.add(schema);
}
return schemas;
}
/**
* @param rs Result set with column type metadata from Oracle database.
* @return JDBC type.
* @throws SQLException If failed to decode type.
*/
private int decodeType(ResultSet rs) throws SQLException {
String type = rs.getString(DATA_TYPE_IDX);
if (type.startsWith("TIMESTAMP"))
return TIMESTAMP;
else {
switch (type) {
case "CHAR":
case "NCHAR":
return CHAR;
case "VARCHAR2":
case "NVARCHAR2":
return VARCHAR;
case "LONG":
return LONGVARCHAR;
case "LONG RAW":
return LONGVARBINARY;
case "FLOAT":
return FLOAT;
case "NUMBER":
int precision = rs.getInt(DATA_PRECISION_IDX);
int scale = rs.getInt(DATA_SCALE_IDX);
if (scale > 0) {
if (scale < 4 && precision < 19)
return FLOAT;
if (scale > 4 || precision > 19)
return DOUBLE;
return NUMERIC;
}
else {
if (precision < 1)
return INTEGER;
if (precision < 2)
return BOOLEAN;
if (precision < 4)
return TINYINT;
if (precision < 6)
return SMALLINT;
if (precision < 11)
return INTEGER;
if (precision < 20)
return BIGINT;
return NUMERIC;
}
case "DATE":
return DATE;
case "BFILE":
case "BLOB":
return BLOB;
case "CLOB":
case "NCLOB":
case "XMLTYPE":
return CLOB;
}
}
return OTHER;
}
/**
* Retrieve primary key columns.
*
* @param stmt Prepared SQL statement to execute.
* @param owner DB owner.
* @param tbl Table name.
* @return Primary key columns.
* @throws SQLException If failed to retrieve primary key columns.
*/
private Set<String> primaryKeys(PreparedStatement stmt, String owner, String tbl) throws SQLException {
Set<String> pkCols = new HashSet<>();
stmt.setString(1, owner);
stmt.setString(2, tbl);
try (ResultSet pkRs = stmt.executeQuery()) {
while(pkRs.next())
pkCols.add(pkRs.getString(1));
}
return pkCols;
}
/**
* Retrieve index columns.
*
* @param stmt Prepared SQL statement to execute.
* @param owner DB owner.
* @param tbl Table name.
* @return Index columns.
* @throws SQLException If failed to retrieve indexes columns.
*/
private Map<String, Map<String, Boolean>> indexes(PreparedStatement stmt, String owner, String tbl)
throws SQLException {
Map<String, Map<String, Boolean>> idxs = new LinkedHashMap<>();
stmt.setString(1, owner);
stmt.setString(2, tbl);
try (ResultSet idxsRs = stmt.executeQuery()) {
while (idxsRs.next()) {
String idxName = idxsRs.getString(IDX_NAME_IDX);
Map<String, Boolean> idx = idxs.get(idxName);
if (idx == null) {
idx = new LinkedHashMap<>();
idxs.put(idxName, idx);
}
String expr = idxsRs.getString(IDX_EXPR_IDX);
String col = expr == null ? idxsRs.getString(IDX_COL_NAME_IDX) : expr.replaceAll("\"", "");
idx.put(col, "DESC".equals(idxsRs.getString(IDX_COL_DESCEND_IDX)));
}
}
return idxs;
}
/** {@inheritDoc} */
@Override public Collection<DbTable> tables(Connection conn, List<String> schemas, boolean tblsOnly)
throws SQLException {
Collection<DbTable> tbls = new ArrayList<>();
PreparedStatement pkStmt = conn.prepareStatement(SQL_PRIMARY_KEYS);
PreparedStatement idxStmt = conn.prepareStatement(SQL_INDEXES);
if (schemas.size() == 0)
schemas.add(null);
Set<String> sysSchemas = systemSchemas();
try (Statement colsStmt = conn.createStatement()) {
for (String schema: schemas) {
if (systemSchemas().contains(schema) || (schema != null && schema.startsWith("FLOWS_")))
continue;
Collection<DbColumn> cols = new ArrayList<>();
Set<String> pkCols = Collections.emptySet();
Map<String, Map<String, Boolean>> idxs = Collections.emptyMap();
String sql = String.format(SQL_COLUMNS,
tblsOnly ? "INNER JOIN all_tables b on a.table_name = b.table_name and a.owner = b.owner" : "",
schema != null ? String.format(" WHERE a.owner = '%s' ", schema) : "");
try (ResultSet colsRs = colsStmt.executeQuery(sql)) {
String prevSchema = "";
String prevTbl = "";
boolean first = true;
while (colsRs.next()) {
String owner = colsRs.getString(OWNER_IDX);
String tbl = colsRs.getString(TBL_NAME_IDX);
if (sysSchemas.contains(owner) || (schema != null && schema.startsWith("FLOWS_")))
continue;
boolean changed = !owner.equals(prevSchema) || !tbl.equals(prevTbl);
if (changed) {
if (first)
first = false;
else
tbls.add(table(prevSchema, prevTbl, cols, idxs));
prevSchema = owner;
prevTbl = tbl;
cols = new ArrayList<>();
pkCols = primaryKeys(pkStmt, owner, tbl);
idxs = indexes(idxStmt, owner, tbl);
}
String colName = colsRs.getString(COL_NAME_IDX);
cols.add(new DbColumn(colName, decodeType(colsRs), pkCols.contains(colName),
!"N".equals(colsRs.getString(NULLABLE_IDX))));
}
if (!cols.isEmpty())
tbls.add(table(prevSchema, prevTbl, cols, idxs));
}
}
}
return tbls;
}
}
| |
/*
* Copyright (c) 2014 by CDAC Chennai
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* @Original File : GPSTracker
* @File LocationDataListener(Modified copy)
* @Created: 24.04.2014
* @author: Prasenjit
* Last Change: 09.09.2014 by Prasenjit
*/
package com.contextawareframework.backgroundservices;
import android.app.AlertDialog;
import android.content.Context;
import android.content.DialogInterface;
import android.content.Intent;
import android.location.Location;
import android.location.LocationListener;
import android.location.LocationManager;
import android.net.ConnectivityManager;
import android.net.NetworkInfo;
import android.os.IBinder;
import android.provider.Settings;
import android.util.Log;
/**
* This clas is listener class for GPS Sensor, can be used just to register the GPS / Location
* listener only. If GPS enabled then will try to get the lat / long from GPS else it will try
* to get the location details from Network Provider (If the phone has network coverage)
* */
public class LocationDataListener extends CAFService{
private final Context mContext;
// flag for GPS status
boolean isGPSEnabled = false;
// flag for network status
boolean isNetworkEnabled = false;
// flag for GPS status
boolean canGetLocation = false;
// Use this string constant to debug this class
private static final String TAG = "LocationDataListener";
private Location location; // location
double latitude; // latitude
double longitude; // longitude
// The minimum distance to change Updates in meters
private static final long MIN_DISTANCE_CHANGE_FOR_UPDATES = 10; // 10 meters
// The minimum time between updates in milliseconds
private static final long MIN_TIME_BW_UPDATES = 1000 * 60 * 1; // 1 minute
// Declaring a Location Manager
protected LocationManager locationManager;
private LocationListener locationListener;
private static LocationDataListener locationDataListenerInstance;
/**
* Description : Private constructor. Singleton Pattern to create the class object
* @param context Calling Activity context
*/
private LocationDataListener(Context context)
{
mContext = context;
}
/**
* Description : Method to create an instance of LocationDataListener Class.
* @param context Calling Activity context
* @return LocationDataListener Class instance
*/
public static synchronized LocationDataListener getInstance(Context context)
{
if (locationDataListenerInstance == null)
locationDataListenerInstance = new LocationDataListener(context);
return locationDataListenerInstance;
}
public void enableLocationListener(String provider, long minTime, float minDistance, LocationListener locationListener)
{
location = getLocation(provider, minTime, minDistance, locationListener);
}
public void disableLocationListener(LocationListener listenerFromActivity)
{
unregisterLocationListener(listenerFromActivity);
}
/**
* @author Rekha N
* Checking for all possible internet providers
* **/
public boolean isConnectingToInternet()
{
ConnectivityManager connectivity = (ConnectivityManager) mContext.getSystemService(Context.CONNECTIVITY_SERVICE);
if (connectivity != null)
{
NetworkInfo[] info = connectivity.getAllNetworkInfo();
if (info != null)
for (int i = 0; i < info.length; i++)
if (info[i].getState() == NetworkInfo.State.CONNECTED)
{
return true;
}
}
return false;
}
public Location getLocation(String provider, long minTime, float minDistance, LocationListener locationListener) {
try {
locationManager = (LocationManager) mContext.getSystemService(LOCATION_SERVICE);
// getting GPS status
isGPSEnabled = locationManager.isProviderEnabled(LocationManager.GPS_PROVIDER);
// getting network status
isNetworkEnabled = locationManager.isProviderEnabled(LocationManager.NETWORK_PROVIDER);
if (!isGPSEnabled && !isNetworkEnabled) {
// no network provider is enabled
Log.d(TAG,"No service available to get the location. Network and GPS both are disable or not available");
}
else
{
this.canGetLocation = true;
if (isNetworkEnabled) {
// This line need to be changed
//locationManager.requestLocationUpdates(LocationManager.GPS_PROVIDER,MIN_TIME_BW_UPDATES,MIN_DISTANCE_CHANGE_FOR_UPDATES, locationListener); // Now this will be implemented in user level
locationManager.requestLocationUpdates(LocationManager.NETWORK_PROVIDER,minTime, minDistance, locationListener); // Now this will be implemented in user level
Log.d("Network", "Network");
if (locationManager != null) {
location = locationManager.getLastKnownLocation(LocationManager.NETWORK_PROVIDER);
if (location != null) {
latitude = location.getLatitude();
longitude = location.getLongitude();
}
}
}
// if GPS Enabled get lat/long using GPS Services
if (isGPSEnabled) {
if (location == null) {
locationManager.requestLocationUpdates(LocationManager.GPS_PROVIDER, 10, 60000, locationListener);
Log.d("GPS Enabled", "GPS Enabled");
if (locationManager != null) {
location = locationManager
.getLastKnownLocation(LocationManager.GPS_PROVIDER);
if (location != null) {
latitude = location.getLatitude();
longitude = location.getLongitude();
}
}
}
}
}
} catch (Exception e) {
e.printStackTrace();
}
return location;
}
/**
* Stop using Location listener
* Calling this function will stop using GPS in your app
* */
public void unregisterLocationListener(LocationListener locationListener){
if(locationManager != null){
locationManager.removeUpdates(locationListener);
}
}
/**
* Function to get latitude
* */
public double getLatitude(){
if(location != null){
latitude = location.getLatitude();
}
// return latitude
return latitude;
}
/**
* Function to get longitude
* */
public double getLongitude(){
if(location != null){
longitude = location.getLongitude();
}
// return longitude
return longitude;
}
/**
* Function to check GPS/wifi enabled
* @return boolean
* */
public boolean canGetLocation() {
return this.canGetLocation;
}
/**
* Function to show settings alert dialog
* On pressing Settings button will lauch Settings Options
* */
public void showSettingsAlert(){
AlertDialog.Builder alertDialog = new AlertDialog.Builder(mContext);
// Setting Dialog Title
alertDialog.setTitle("GPS settings");
// Setting Dialog Message
alertDialog.setMessage("GPS is not enabled. Do you want to go to settings menu?");
// On pressing Settings button
alertDialog.setPositiveButton("Settings", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog,int which) {
Intent intent = new Intent(Settings.ACTION_LOCATION_SOURCE_SETTINGS);
mContext.startActivity(intent);
}
});
// on pressing cancel button
alertDialog.setNegativeButton("Cancel", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
dialog.cancel();
}
});
// Showing Alert Message
alertDialog.show();
}
@Override
public IBinder onBind(Intent arg0) {
return null;
}
}
| |
/*
* Copyright (c) 2015-present, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*/
package com.facebook.drawee.drawable;
import android.graphics.Matrix;
import android.graphics.Rect;
/**
* Performs scale type calculations.
*/
public class ScalingUtils {
/**
* Options for scaling the child bounds to the parent bounds.
* <p>
* Similar to {@link android.widget.ImageView.ScaleType}, but ScaleType.MATRIX is not supported.
* To use matrix scaling, use a {@link MatrixDrawable}. An additional scale type (FOCUS_CROP) is
* provided.
*/
public enum ScaleType {
/**
* Scales width and height independently, so that the child matches the parent exactly.
* This may change the aspect ratio of the child.
*/
FIT_XY,
/**
* Scales the child so that it fits entirely inside the parent. At least one dimension (width or
* height) will fit exactly. Aspect ratio is preserved.
* Child is aligned to the top-left corner of the parent.
*/
FIT_START,
/**
* Scales the child so that it fits entirely inside the parent. At least one dimension (width or
* height) will fit exactly. Aspect ratio is preserved.
* Child is centered within the parent's bounds.
*/
FIT_CENTER,
/**
* Scales the child so that it fits entirely inside the parent. At least one dimension (width or
* height) will fit exactly. Aspect ratio is preserved.
* Child is aligned to the bottom-right corner of the parent.
*/
FIT_END,
/**
* Performs no scaling.
* Child is centered within parent's bounds.
*/
CENTER,
/**
* Scales the child so that it fits entirely inside the parent. Unlike FIT_CENTER, if the child
* is smaller, no up-scaling will be performed. Aspect ratio is preserved.
* Child is centered within parent's bounds.
*/
CENTER_INSIDE,
/**
* Scales the child so that both dimensions will be greater than or equal to the corresponding
* dimension of the parent. At least one dimension (width or height) will fit exactly.
* Child is centered within parent's bounds.
*/
CENTER_CROP,
/**
* Scales the child so that both dimensions will be greater than or equal to the corresponding
* dimension of the parent. At least one dimension (width or height) will fit exactly.
* The child's focus point will be centered within the parent's bounds as much as possible
* without leaving empty space.
* It is guaranteed that the focus point will be visible and centered as much as possible.
* If the focus point is set to (0.5f, 0.5f), result will be equivalent to CENTER_CROP.
*/
FOCUS_CROP;
/**
* Gets the scale type out of string.
*
* <p> Used by GenericDraweeView styleable in
* android_res/com/facebook/custom/res/values/attrs.xml
*
* @param value string value to parse
* @return scale type if recognized
* @throws IllegalArgumentException if scale type is not recognized
*/
public static ScaleType fromString(String value) {
if (value.equals("none")) {
return null;
} else if (value.equals("fitXY")) {
return ScaleType.FIT_XY;
} else if (value.equals("fitStart")) {
return ScaleType.FIT_START;
} else if (value.equals("fitCenter")) {
return FIT_CENTER;
} else if (value.equals("fitEnd")) {
return FIT_END;
} else if (value.equals("center")) {
return CENTER;
} else if (value.equals("centerInside")) {
return CENTER_INSIDE;
} else if (value.equals("centerCrop")) {
return CENTER_CROP;
} else if (value.equals("focusCrop")) {
return FOCUS_CROP;
} else {
throw new IllegalArgumentException(
"Unrecognized scale type: " + value +
"; use a value defined in the ScalingUtils.fromString method");
}
}
}
/**
* Gets transformation based on the scale type.
* @param transform out matrix to store result
* @param parentBounds parent bounds
* @param childWidth child width
* @param childHeight child height
* @param focusX focus point x coordinate, relative [0...1]
* @param focusY focus point y coordinate, relative [0...1]
* @param scaleType scale type to be used
* @return reference to the out matrix
*/
public static Matrix getTransform(
final Matrix transform,
final Rect parentBounds,
final int childWidth,
final int childHeight,
final float focusX,
final float focusY,
final ScaleType scaleType) {
final int parentWidth = parentBounds.width();
final int parentHeight = parentBounds.height();
final float scaleX = (float) parentWidth / (float) childWidth;
final float scaleY = (float) parentHeight / (float) childHeight;
float scale = 1.0f;
float dx = 0;
float dy = 0;
switch (scaleType) {
case FIT_XY:
dx = parentBounds.left;
dy = parentBounds.top;
transform.setScale(scaleX, scaleY);
transform.postTranslate((int) (dx + 0.5f), (int) (dy + 0.5f));
break;
case FIT_START:
scale = Math.min(scaleX, scaleY);
dx = parentBounds.left;
dy = parentBounds.top;
transform.setScale(scale, scale);
transform.postTranslate((int) (dx + 0.5f), (int) (dy + 0.5f));
break;
case FIT_CENTER:
scale = Math.min(scaleX, scaleY);
dx = parentBounds.left + (parentWidth - childWidth * scale) * 0.5f;
dy = parentBounds.top + (parentHeight - childHeight * scale) * 0.5f;
transform.setScale(scale, scale);
transform.postTranslate((int) (dx + 0.5f), (int) (dy + 0.5f));
break;
case FIT_END:
scale = Math.min(scaleX, scaleY);
dx = parentBounds.left + (parentWidth - childWidth * scale);
dy = parentBounds.top + (parentHeight - childHeight * scale);
transform.setScale(scale, scale);
transform.postTranslate((int) (dx + 0.5f), (int) (dy + 0.5f));
break;
case CENTER:
dx = parentBounds.left + (parentWidth - childWidth) * 0.5f;
dy = parentBounds.top + (parentHeight - childHeight) * 0.5f;
transform.setTranslate((int) (dx + 0.5f), (int) (dy + 0.5f));
break;
case CENTER_INSIDE:
scale = Math.min(Math.min(scaleX, scaleY), 1.0f);
dx = parentBounds.left + (parentWidth - childWidth * scale) * 0.5f;
dy = parentBounds.top + (parentHeight - childHeight * scale) * 0.5f;
transform.setScale(scale, scale);
transform.postTranslate((int) (dx + 0.5f), (int) (dy + 0.5f));
break;
case CENTER_CROP:
if (scaleY > scaleX) {
scale = scaleY;
dx = parentBounds.left + (parentWidth - childWidth * scale) * 0.5f;
dy = parentBounds.top;
} else {
scale = scaleX;
dx = parentBounds.left;
dy = parentBounds.top + (parentHeight - childHeight * scale) * 0.5f;
}
transform.setScale(scale, scale);
transform.postTranslate((int) (dx + 0.5f), (int) (dy + 0.5f));
break;
case FOCUS_CROP:
if (scaleY > scaleX) {
scale = scaleY;
dx = parentWidth * 0.5f - childWidth * scale * focusX;
dx = parentBounds.left + Math.max(Math.min(dx, 0), parentWidth - childWidth * scale);
dy = parentBounds.top;
} else {
scale = scaleX;
dx = parentBounds.left;
dy = parentHeight * 0.5f - childHeight * scale * focusY;
dy = parentBounds.top + Math.max(Math.min(dy, 0), parentHeight - childHeight * scale);
}
transform.setScale(scale, scale);
transform.postTranslate((int) (dx + 0.5f), (int) (dy + 0.5f));
break;
default:
throw new UnsupportedOperationException("Unsupported scale type: " + scaleType);
}
return transform;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.ipc;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import io.netty.channel.ChannelHandlerContext;
import io.netty.channel.ChannelInitializer;
import io.netty.channel.ChannelOutboundHandlerAdapter;
import io.netty.channel.ChannelPromise;
import io.netty.channel.epoll.EpollEventLoopGroup;
import io.netty.channel.nio.NioEventLoopGroup;
import io.netty.channel.socket.SocketChannel;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.concurrent.atomic.AtomicBoolean;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.CellScannable;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.Waiter;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.RowMutations;
import org.apache.hadoop.hbase.codec.Codec;
import org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoRequestProto;
import org.apache.hadoop.hbase.protobuf.RequestConverter;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction;
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier;
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType;
import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.util.StringUtils;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameters;
import com.google.protobuf.ByteString;
import com.google.protobuf.Descriptors.MethodDescriptor;
import com.google.protobuf.Message;
import com.google.protobuf.RpcCallback;
import com.google.protobuf.RpcChannel;
@RunWith(Parameterized.class)
@Category({ SmallTests.class })
public class TestAsyncIPC extends AbstractTestIPC {
private static final Log LOG = LogFactory.getLog(TestAsyncIPC.class);
private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
@Parameters
public static Collection<Object[]> parameters() {
List<Object[]> paramList = new ArrayList<Object[]>();
paramList.add(new Object[] { false, false });
paramList.add(new Object[] { false, true });
paramList.add(new Object[] { true, false });
paramList.add(new Object[] { true, true });
return paramList;
}
private final boolean useNativeTransport;
private final boolean useGlobalEventLoopGroup;
public TestAsyncIPC(boolean useNativeTransport, boolean useGlobalEventLoopGroup) {
this.useNativeTransport = useNativeTransport;
this.useGlobalEventLoopGroup = useGlobalEventLoopGroup;
}
private void setConf(Configuration conf) {
conf.setBoolean(AsyncRpcClient.USE_NATIVE_TRANSPORT, useNativeTransport);
conf.setBoolean(AsyncRpcClient.USE_GLOBAL_EVENT_LOOP_GROUP, useGlobalEventLoopGroup);
if (useGlobalEventLoopGroup && AsyncRpcClient.GLOBAL_EVENT_LOOP_GROUP != null) {
if (useNativeTransport
&& !(AsyncRpcClient.GLOBAL_EVENT_LOOP_GROUP.getFirst() instanceof EpollEventLoopGroup)
|| (!useNativeTransport
&& !(AsyncRpcClient.GLOBAL_EVENT_LOOP_GROUP.getFirst() instanceof NioEventLoopGroup))) {
AsyncRpcClient.GLOBAL_EVENT_LOOP_GROUP.getFirst().shutdownGracefully();
AsyncRpcClient.GLOBAL_EVENT_LOOP_GROUP = null;
}
}
}
@Override
protected AsyncRpcClient createRpcClientNoCodec(Configuration conf) {
setConf(conf);
return new AsyncRpcClient(conf, HConstants.CLUSTER_ID_DEFAULT, null) {
@Override
Codec getCodec() {
return null;
}
};
}
@Override
protected AsyncRpcClient createRpcClient(Configuration conf) {
setConf(conf);
return new AsyncRpcClient(conf, HConstants.CLUSTER_ID_DEFAULT, null);
}
@Override
protected AsyncRpcClient createRpcClientRTEDuringConnectionSetup(Configuration conf) {
setConf(conf);
return new AsyncRpcClient(conf, HConstants.CLUSTER_ID_DEFAULT, null,
new ChannelInitializer<SocketChannel>() {
@Override
protected void initChannel(SocketChannel ch) throws Exception {
ch.pipeline().addFirst(new ChannelOutboundHandlerAdapter() {
@Override
public void write(ChannelHandlerContext ctx, Object msg, ChannelPromise promise)
throws Exception {
promise.setFailure(new RuntimeException("Injected fault"));
}
});
}
});
}
@Test
public void testAsyncConnectionSetup() throws Exception {
TestRpcServer rpcServer = new TestRpcServer();
AsyncRpcClient client = createRpcClient(CONF);
try {
rpcServer.start();
InetSocketAddress address = rpcServer.getListenerAddress();
MethodDescriptor md = SERVICE.getDescriptorForType().findMethodByName("echo");
EchoRequestProto param = EchoRequestProto.newBuilder().setMessage("hello").build();
RpcChannel channel =
client.createRpcChannel(ServerName.valueOf(address.getHostName(), address.getPort(),
System.currentTimeMillis()), User.getCurrent(), 0);
final AtomicBoolean done = new AtomicBoolean(false);
channel.callMethod(md, new PayloadCarryingRpcController(), param, md.getOutputType()
.toProto(), new RpcCallback<Message>() {
@Override
public void run(Message parameter) {
done.set(true);
}
});
TEST_UTIL.waitFor(1000, new Waiter.Predicate<Exception>() {
@Override
public boolean evaluate() throws Exception {
return done.get();
}
});
} finally {
client.close();
rpcServer.stop();
}
}
@Test
public void testRTEDuringAsyncConnectionSetup() throws Exception {
TestRpcServer rpcServer = new TestRpcServer();
AsyncRpcClient client = createRpcClientRTEDuringConnectionSetup(CONF);
try {
rpcServer.start();
InetSocketAddress address = rpcServer.getListenerAddress();
MethodDescriptor md = SERVICE.getDescriptorForType().findMethodByName("echo");
EchoRequestProto param = EchoRequestProto.newBuilder().setMessage("hello").build();
RpcChannel channel =
client.createRpcChannel(ServerName.valueOf(address.getHostName(), address.getPort(),
System.currentTimeMillis()), User.getCurrent(), 0);
final AtomicBoolean done = new AtomicBoolean(false);
PayloadCarryingRpcController controller = new PayloadCarryingRpcController();
controller.notifyOnFail(new RpcCallback<IOException>() {
@Override
public void run(IOException e) {
done.set(true);
LOG.info("Caught expected exception: " + e.toString());
assertTrue(StringUtils.stringifyException(e).contains("Injected fault"));
}
});
channel.callMethod(md, controller, param, md.getOutputType().toProto(),
new RpcCallback<Message>() {
@Override
public void run(Message parameter) {
done.set(true);
fail("Expected an exception to have been thrown!");
}
});
TEST_UTIL.waitFor(1000, new Waiter.Predicate<Exception>() {
@Override
public boolean evaluate() throws Exception {
return done.get();
}
});
} finally {
client.close();
rpcServer.stop();
}
}
public static void main(String[] args) throws IOException, SecurityException,
NoSuchMethodException, InterruptedException {
if (args.length != 2) {
System.out.println("Usage: TestAsyncIPC <CYCLES> <CELLS_PER_CYCLE>");
return;
}
// ((Log4JLogger)HBaseServer.LOG).getLogger().setLevel(Level.INFO);
// ((Log4JLogger)HBaseClient.LOG).getLogger().setLevel(Level.INFO);
int cycles = Integer.parseInt(args[0]);
int cellcount = Integer.parseInt(args[1]);
Configuration conf = HBaseConfiguration.create();
TestRpcServer rpcServer = new TestRpcServer();
MethodDescriptor md = SERVICE.getDescriptorForType().findMethodByName("echo");
EchoRequestProto param = EchoRequestProto.newBuilder().setMessage("hello").build();
AsyncRpcClient client = new AsyncRpcClient(conf, HConstants.CLUSTER_ID_DEFAULT, null);
KeyValue kv = BIG_CELL;
Put p = new Put(CellUtil.cloneRow(kv));
for (int i = 0; i < cellcount; i++) {
p.add(kv);
}
RowMutations rm = new RowMutations(CellUtil.cloneRow(kv));
rm.add(p);
try {
rpcServer.start();
InetSocketAddress address = rpcServer.getListenerAddress();
long startTime = System.currentTimeMillis();
User user = User.getCurrent();
for (int i = 0; i < cycles; i++) {
List<CellScannable> cells = new ArrayList<CellScannable>();
// Message param = RequestConverter.buildMultiRequest(HConstants.EMPTY_BYTE_ARRAY, rm);
ClientProtos.RegionAction.Builder builder =
RequestConverter.buildNoDataRegionAction(HConstants.EMPTY_BYTE_ARRAY, rm, cells,
RegionAction.newBuilder(), ClientProtos.Action.newBuilder(),
MutationProto.newBuilder());
builder.setRegion(RegionSpecifier
.newBuilder()
.setType(RegionSpecifierType.REGION_NAME)
.setValue(
ByteString.copyFrom(HRegionInfo.FIRST_META_REGIONINFO.getEncodedNameAsBytes())));
if (i % 100000 == 0) {
LOG.info("" + i);
// Uncomment this for a thread dump every so often.
// ReflectionUtils.printThreadInfo(new PrintWriter(System.out),
// "Thread dump " + Thread.currentThread().getName());
}
PayloadCarryingRpcController pcrc =
new PayloadCarryingRpcController(CellUtil.createCellScanner(cells));
// Pair<Message, CellScanner> response =
client.call(pcrc, md, builder.build(), param, user, address);
/*
* int count = 0; while (p.getSecond().advance()) { count++; } assertEquals(cells.size(),
* count);
*/
}
LOG.info("Cycled " + cycles + " time(s) with " + cellcount + " cell(s) in "
+ (System.currentTimeMillis() - startTime) + "ms");
} finally {
client.close();
rpcServer.stop();
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.streams.kstream.internals;
import org.apache.kafka.common.serialization.Serde;
import org.apache.kafka.common.serialization.Serdes;
import org.apache.kafka.common.utils.Bytes;
import org.apache.kafka.streams.KeyValue;
import org.apache.kafka.streams.kstream.Consumed;
import org.apache.kafka.streams.kstream.Grouped;
import org.apache.kafka.streams.kstream.KGroupedTable;
import org.apache.kafka.streams.kstream.KStream;
import org.apache.kafka.streams.kstream.KTable;
import org.apache.kafka.streams.kstream.KeyValueMapper;
import org.apache.kafka.streams.kstream.Materialized;
import org.apache.kafka.streams.kstream.Named;
import org.apache.kafka.streams.kstream.Predicate;
import org.apache.kafka.streams.kstream.Produced;
import org.apache.kafka.streams.kstream.Suppressed;
import org.apache.kafka.streams.kstream.ValueJoiner;
import org.apache.kafka.streams.kstream.ValueMapper;
import org.apache.kafka.streams.kstream.ValueMapperWithKey;
import org.apache.kafka.streams.kstream.ValueTransformerWithKeySupplier;
import org.apache.kafka.streams.kstream.internals.foreignkeyjoin.CombinedKey;
import org.apache.kafka.streams.kstream.internals.foreignkeyjoin.CombinedKeySchema;
import org.apache.kafka.streams.kstream.internals.foreignkeyjoin.ForeignJoinSubscriptionProcessorSupplier;
import org.apache.kafka.streams.kstream.internals.foreignkeyjoin.ForeignJoinSubscriptionSendProcessorSupplier;
import org.apache.kafka.streams.kstream.internals.foreignkeyjoin.SubscriptionJoinForeignProcessorSupplier;
import org.apache.kafka.streams.kstream.internals.foreignkeyjoin.SubscriptionResolverJoinProcessorSupplier;
import org.apache.kafka.streams.kstream.internals.foreignkeyjoin.SubscriptionResponseWrapper;
import org.apache.kafka.streams.kstream.internals.foreignkeyjoin.SubscriptionResponseWrapperSerde;
import org.apache.kafka.streams.kstream.internals.foreignkeyjoin.SubscriptionStoreReceiveProcessorSupplier;
import org.apache.kafka.streams.kstream.internals.foreignkeyjoin.SubscriptionWrapper;
import org.apache.kafka.streams.kstream.internals.foreignkeyjoin.SubscriptionWrapperSerde;
import org.apache.kafka.streams.kstream.internals.graph.KTableKTableJoinNode;
import org.apache.kafka.streams.kstream.internals.graph.ProcessorGraphNode;
import org.apache.kafka.streams.kstream.internals.graph.ProcessorParameters;
import org.apache.kafka.streams.kstream.internals.graph.StatefulProcessorNode;
import org.apache.kafka.streams.kstream.internals.graph.StreamSinkNode;
import org.apache.kafka.streams.kstream.internals.graph.StreamSourceNode;
import org.apache.kafka.streams.kstream.internals.graph.GraphNode;
import org.apache.kafka.streams.kstream.internals.graph.TableProcessorNode;
import org.apache.kafka.streams.kstream.internals.suppress.FinalResultsSuppressionBuilder;
import org.apache.kafka.streams.kstream.internals.suppress.KTableSuppressProcessorSupplier;
import org.apache.kafka.streams.kstream.internals.suppress.NamedSuppressed;
import org.apache.kafka.streams.kstream.internals.suppress.SuppressedInternal;
import org.apache.kafka.streams.processor.api.ProcessorSupplier;
import org.apache.kafka.streams.processor.internals.InternalTopicProperties;
import org.apache.kafka.streams.processor.internals.StaticTopicNameExtractor;
import org.apache.kafka.streams.state.KeyValueStore;
import org.apache.kafka.streams.state.StoreBuilder;
import org.apache.kafka.streams.state.Stores;
import org.apache.kafka.streams.state.TimestampedKeyValueStore;
import org.apache.kafka.streams.state.ValueAndTimestamp;
import org.apache.kafka.streams.state.internals.InMemoryTimeOrderedKeyValueBuffer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.time.Duration;
import java.util.Collections;
import java.util.HashSet;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.function.Function;
import java.util.function.Supplier;
import static org.apache.kafka.streams.kstream.internals.graph.GraphGraceSearchUtil.findAndVerifyWindowGrace;
/**
* The implementation class of {@link KTable}.
*
* @param <K> the key type
* @param <S> the source's (parent's) value type
* @param <V> the value type
*/
public class KTableImpl<K, S, V> extends AbstractStream<K, V> implements KTable<K, V> {
private static final Logger LOG = LoggerFactory.getLogger(KTableImpl.class);
static final String SOURCE_NAME = "KTABLE-SOURCE-";
static final String STATE_STORE_NAME = "STATE-STORE-";
private static final String FILTER_NAME = "KTABLE-FILTER-";
private static final String JOINTHIS_NAME = "KTABLE-JOINTHIS-";
private static final String JOINOTHER_NAME = "KTABLE-JOINOTHER-";
private static final String MAPVALUES_NAME = "KTABLE-MAPVALUES-";
private static final String MERGE_NAME = "KTABLE-MERGE-";
private static final String SELECT_NAME = "KTABLE-SELECT-";
private static final String SUPPRESS_NAME = "KTABLE-SUPPRESS-";
private static final String TOSTREAM_NAME = "KTABLE-TOSTREAM-";
private static final String TRANSFORMVALUES_NAME = "KTABLE-TRANSFORMVALUES-";
private static final String FK_JOIN = "KTABLE-FK-JOIN-";
private static final String FK_JOIN_STATE_STORE_NAME = FK_JOIN + "SUBSCRIPTION-STATE-STORE-";
private static final String SUBSCRIPTION_REGISTRATION = FK_JOIN + "SUBSCRIPTION-REGISTRATION-";
private static final String SUBSCRIPTION_RESPONSE = FK_JOIN + "SUBSCRIPTION-RESPONSE-";
private static final String SUBSCRIPTION_PROCESSOR = FK_JOIN + "SUBSCRIPTION-PROCESSOR-";
private static final String SUBSCRIPTION_RESPONSE_RESOLVER_PROCESSOR = FK_JOIN + "SUBSCRIPTION-RESPONSE-RESOLVER-PROCESSOR-";
private static final String FK_JOIN_OUTPUT_NAME = FK_JOIN + "OUTPUT-";
private static final String TOPIC_SUFFIX = "-topic";
private static final String SINK_NAME = "KTABLE-SINK-";
// Temporarily setting the processorSupplier to type Object so that we can transition from the
// old ProcessorSupplier to the new api.ProcessorSupplier. This works because all accesses to
// this field are guarded by typechecks anyway.
private final Object processorSupplier;
private final String queryableStoreName;
private boolean sendOldValues = false;
@SuppressWarnings("deprecation") // Old PAPI compatibility.
public KTableImpl(final String name,
final Serde<K> keySerde,
final Serde<V> valueSerde,
final Set<String> subTopologySourceNodes,
final String queryableStoreName,
final org.apache.kafka.streams.processor.ProcessorSupplier<?, ?> processorSupplier,
final GraphNode graphNode,
final InternalStreamsBuilder builder) {
super(name, keySerde, valueSerde, subTopologySourceNodes, graphNode, builder);
this.processorSupplier = processorSupplier;
this.queryableStoreName = queryableStoreName;
}
public KTableImpl(final String name,
final Serde<K> keySerde,
final Serde<V> valueSerde,
final Set<String> subTopologySourceNodes,
final String queryableStoreName,
final org.apache.kafka.streams.processor.api.ProcessorSupplier<?, ?, ?, ?> newProcessorSupplier,
final GraphNode graphNode,
final InternalStreamsBuilder builder) {
super(name, keySerde, valueSerde, subTopologySourceNodes, graphNode, builder);
this.processorSupplier = newProcessorSupplier;
this.queryableStoreName = queryableStoreName;
}
@Override
public String queryableStoreName() {
return queryableStoreName;
}
private KTable<K, V> doFilter(final Predicate<? super K, ? super V> predicate,
final Named named,
final MaterializedInternal<K, V, KeyValueStore<Bytes, byte[]>> materializedInternal,
final boolean filterNot) {
final Serde<K> keySerde;
final Serde<V> valueSerde;
final String queryableStoreName;
final StoreBuilder<TimestampedKeyValueStore<K, V>> storeBuilder;
if (materializedInternal != null) {
// we actually do not need to generate store names at all since if it is not specified, we will not
// materialize the store; but we still need to burn one index BEFORE generating the processor to keep compatibility.
if (materializedInternal.storeName() == null) {
builder.newStoreName(FILTER_NAME);
}
// we can inherit parent key and value serde if user do not provide specific overrides, more specifically:
// we preserve the key following the order of 1) materialized, 2) parent
keySerde = materializedInternal.keySerde() != null ? materializedInternal.keySerde() : this.keySerde;
// we preserve the value following the order of 1) materialized, 2) parent
valueSerde = materializedInternal.valueSerde() != null ? materializedInternal.valueSerde() : this.valueSerde;
queryableStoreName = materializedInternal.queryableStoreName();
// only materialize if materialized is specified and it has queryable name
storeBuilder = queryableStoreName != null ? (new TimestampedKeyValueStoreMaterializer<>(materializedInternal)).materialize() : null;
} else {
keySerde = this.keySerde;
valueSerde = this.valueSerde;
queryableStoreName = null;
storeBuilder = null;
}
final String name = new NamedInternal(named).orElseGenerateWithPrefix(builder, FILTER_NAME);
final KTableNewProcessorSupplier<K, V, K, V> processorSupplier =
new KTableFilter<>(this, predicate, filterNot, queryableStoreName);
final ProcessorParameters<K, V, ?, ?> processorParameters = unsafeCastProcessorParametersToCompletelyDifferentType(
new ProcessorParameters<>(processorSupplier, name)
);
final GraphNode tableNode = new TableProcessorNode<>(
name,
processorParameters,
storeBuilder
);
builder.addGraphNode(this.graphNode, tableNode);
return new KTableImpl<K, V, V>(
name,
keySerde,
valueSerde,
subTopologySourceNodes,
queryableStoreName,
processorSupplier,
tableNode,
builder);
}
@Override
public KTable<K, V> filter(final Predicate<? super K, ? super V> predicate) {
Objects.requireNonNull(predicate, "predicate can't be null");
return doFilter(predicate, NamedInternal.empty(), null, false);
}
@Override
public KTable<K, V> filter(final Predicate<? super K, ? super V> predicate, final Named named) {
Objects.requireNonNull(predicate, "predicate can't be null");
return doFilter(predicate, named, null, false);
}
@Override
public KTable<K, V> filter(final Predicate<? super K, ? super V> predicate,
final Named named,
final Materialized<K, V, KeyValueStore<Bytes, byte[]>> materialized) {
Objects.requireNonNull(predicate, "predicate can't be null");
Objects.requireNonNull(materialized, "materialized can't be null");
final MaterializedInternal<K, V, KeyValueStore<Bytes, byte[]>> materializedInternal = new MaterializedInternal<>(materialized);
return doFilter(predicate, named, materializedInternal, false);
}
@Override
public KTable<K, V> filter(final Predicate<? super K, ? super V> predicate,
final Materialized<K, V, KeyValueStore<Bytes, byte[]>> materialized) {
return filter(predicate, NamedInternal.empty(), materialized);
}
@Override
public KTable<K, V> filterNot(final Predicate<? super K, ? super V> predicate) {
Objects.requireNonNull(predicate, "predicate can't be null");
return doFilter(predicate, NamedInternal.empty(), null, true);
}
@Override
public KTable<K, V> filterNot(final Predicate<? super K, ? super V> predicate,
final Named named) {
Objects.requireNonNull(predicate, "predicate can't be null");
return doFilter(predicate, named, null, true);
}
@Override
public KTable<K, V> filterNot(final Predicate<? super K, ? super V> predicate,
final Materialized<K, V, KeyValueStore<Bytes, byte[]>> materialized) {
return filterNot(predicate, NamedInternal.empty(), materialized);
}
@Override
public KTable<K, V> filterNot(final Predicate<? super K, ? super V> predicate,
final Named named,
final Materialized<K, V, KeyValueStore<Bytes, byte[]>> materialized) {
Objects.requireNonNull(predicate, "predicate can't be null");
Objects.requireNonNull(materialized, "materialized can't be null");
final MaterializedInternal<K, V, KeyValueStore<Bytes, byte[]>> materializedInternal = new MaterializedInternal<>(materialized);
final NamedInternal renamed = new NamedInternal(named);
return doFilter(predicate, renamed, materializedInternal, true);
}
private <VR> KTable<K, VR> doMapValues(final ValueMapperWithKey<? super K, ? super V, ? extends VR> mapper,
final Named named,
final MaterializedInternal<K, VR, KeyValueStore<Bytes, byte[]>> materializedInternal) {
final Serde<K> keySerde;
final Serde<VR> valueSerde;
final String queryableStoreName;
final StoreBuilder<TimestampedKeyValueStore<K, VR>> storeBuilder;
if (materializedInternal != null) {
// we actually do not need to generate store names at all since if it is not specified, we will not
// materialize the store; but we still need to burn one index BEFORE generating the processor to keep compatibility.
if (materializedInternal.storeName() == null) {
builder.newStoreName(MAPVALUES_NAME);
}
keySerde = materializedInternal.keySerde() != null ? materializedInternal.keySerde() : this.keySerde;
valueSerde = materializedInternal.valueSerde();
queryableStoreName = materializedInternal.queryableStoreName();
// only materialize if materialized is specified and it has queryable name
storeBuilder = queryableStoreName != null ? (new TimestampedKeyValueStoreMaterializer<>(materializedInternal)).materialize() : null;
} else {
keySerde = this.keySerde;
valueSerde = null;
queryableStoreName = null;
storeBuilder = null;
}
final String name = new NamedInternal(named).orElseGenerateWithPrefix(builder, MAPVALUES_NAME);
final KTableNewProcessorSupplier<K, V, K, VR> processorSupplier = new KTableMapValues<>(this, mapper, queryableStoreName);
// leaving in calls to ITB until building topology with graph
final ProcessorParameters<K, VR, ?, ?> processorParameters = unsafeCastProcessorParametersToCompletelyDifferentType(
new ProcessorParameters<>(processorSupplier, name)
);
final GraphNode tableNode = new TableProcessorNode<>(
name,
processorParameters,
storeBuilder
);
builder.addGraphNode(this.graphNode, tableNode);
// don't inherit parent value serde, since this operation may change the value type, more specifically:
// we preserve the key following the order of 1) materialized, 2) parent, 3) null
// we preserve the value following the order of 1) materialized, 2) null
return new KTableImpl<>(
name,
keySerde,
valueSerde,
subTopologySourceNodes,
queryableStoreName,
processorSupplier,
tableNode,
builder
);
}
@Override
public <VR> KTable<K, VR> mapValues(final ValueMapper<? super V, ? extends VR> mapper) {
Objects.requireNonNull(mapper, "mapper can't be null");
return doMapValues(withKey(mapper), NamedInternal.empty(), null);
}
@Override
public <VR> KTable<K, VR> mapValues(final ValueMapper<? super V, ? extends VR> mapper,
final Named named) {
Objects.requireNonNull(mapper, "mapper can't be null");
return doMapValues(withKey(mapper), named, null);
}
@Override
public <VR> KTable<K, VR> mapValues(final ValueMapperWithKey<? super K, ? super V, ? extends VR> mapper) {
Objects.requireNonNull(mapper, "mapper can't be null");
return doMapValues(mapper, NamedInternal.empty(), null);
}
@Override
public <VR> KTable<K, VR> mapValues(final ValueMapperWithKey<? super K, ? super V, ? extends VR> mapper,
final Named named) {
Objects.requireNonNull(mapper, "mapper can't be null");
return doMapValues(mapper, named, null);
}
@Override
public <VR> KTable<K, VR> mapValues(final ValueMapper<? super V, ? extends VR> mapper,
final Materialized<K, VR, KeyValueStore<Bytes, byte[]>> materialized) {
return mapValues(mapper, NamedInternal.empty(), materialized);
}
@Override
public <VR> KTable<K, VR> mapValues(final ValueMapper<? super V, ? extends VR> mapper,
final Named named,
final Materialized<K, VR, KeyValueStore<Bytes, byte[]>> materialized) {
Objects.requireNonNull(mapper, "mapper can't be null");
Objects.requireNonNull(materialized, "materialized can't be null");
final MaterializedInternal<K, VR, KeyValueStore<Bytes, byte[]>> materializedInternal = new MaterializedInternal<>(materialized);
return doMapValues(withKey(mapper), named, materializedInternal);
}
@Override
public <VR> KTable<K, VR> mapValues(final ValueMapperWithKey<? super K, ? super V, ? extends VR> mapper,
final Materialized<K, VR, KeyValueStore<Bytes, byte[]>> materialized) {
return mapValues(mapper, NamedInternal.empty(), materialized);
}
@Override
public <VR> KTable<K, VR> mapValues(final ValueMapperWithKey<? super K, ? super V, ? extends VR> mapper,
final Named named,
final Materialized<K, VR, KeyValueStore<Bytes, byte[]>> materialized) {
Objects.requireNonNull(mapper, "mapper can't be null");
Objects.requireNonNull(materialized, "materialized can't be null");
final MaterializedInternal<K, VR, KeyValueStore<Bytes, byte[]>> materializedInternal = new MaterializedInternal<>(materialized);
return doMapValues(mapper, named, materializedInternal);
}
@Override
public <VR> KTable<K, VR> transformValues(final ValueTransformerWithKeySupplier<? super K, ? super V, ? extends VR> transformerSupplier,
final String... stateStoreNames) {
return doTransformValues(transformerSupplier, null, NamedInternal.empty(), stateStoreNames);
}
@Override
public <VR> KTable<K, VR> transformValues(final ValueTransformerWithKeySupplier<? super K, ? super V, ? extends VR> transformerSupplier,
final Named named,
final String... stateStoreNames) {
Objects.requireNonNull(named, "processorName can't be null");
return doTransformValues(transformerSupplier, null, new NamedInternal(named), stateStoreNames);
}
@Override
public <VR> KTable<K, VR> transformValues(final ValueTransformerWithKeySupplier<? super K, ? super V, ? extends VR> transformerSupplier,
final Materialized<K, VR, KeyValueStore<Bytes, byte[]>> materialized,
final String... stateStoreNames) {
return transformValues(transformerSupplier, materialized, NamedInternal.empty(), stateStoreNames);
}
@Override
public <VR> KTable<K, VR> transformValues(final ValueTransformerWithKeySupplier<? super K, ? super V, ? extends VR> transformerSupplier,
final Materialized<K, VR, KeyValueStore<Bytes, byte[]>> materialized,
final Named named,
final String... stateStoreNames) {
Objects.requireNonNull(materialized, "materialized can't be null");
Objects.requireNonNull(named, "named can't be null");
final MaterializedInternal<K, VR, KeyValueStore<Bytes, byte[]>> materializedInternal = new MaterializedInternal<>(materialized);
return doTransformValues(transformerSupplier, materializedInternal, new NamedInternal(named), stateStoreNames);
}
private <VR> KTable<K, VR> doTransformValues(final ValueTransformerWithKeySupplier<? super K, ? super V, ? extends VR> transformerSupplier,
final MaterializedInternal<K, VR, KeyValueStore<Bytes, byte[]>> materializedInternal,
final NamedInternal namedInternal,
final String... stateStoreNames) {
Objects.requireNonNull(stateStoreNames, "stateStoreNames");
final Serde<K> keySerde;
final Serde<VR> valueSerde;
final String queryableStoreName;
final StoreBuilder<TimestampedKeyValueStore<K, VR>> storeBuilder;
if (materializedInternal != null) {
// don't inherit parent value serde, since this operation may change the value type, more specifically:
// we preserve the key following the order of 1) materialized, 2) parent, 3) null
keySerde = materializedInternal.keySerde() != null ? materializedInternal.keySerde() : this.keySerde;
// we preserve the value following the order of 1) materialized, 2) null
valueSerde = materializedInternal.valueSerde();
queryableStoreName = materializedInternal.queryableStoreName();
// only materialize if materialized is specified and it has queryable name
storeBuilder = queryableStoreName != null ? (new TimestampedKeyValueStoreMaterializer<>(materializedInternal)).materialize() : null;
} else {
keySerde = this.keySerde;
valueSerde = null;
queryableStoreName = null;
storeBuilder = null;
}
final String name = namedInternal.orElseGenerateWithPrefix(builder, TRANSFORMVALUES_NAME);
final KTableProcessorSupplier<K, V, VR> processorSupplier = new KTableTransformValues<>(
this,
transformerSupplier,
queryableStoreName);
final ProcessorParameters<K, VR, ?, ?> processorParameters = unsafeCastProcessorParametersToCompletelyDifferentType(
new ProcessorParameters<>(processorSupplier, name)
);
final GraphNode tableNode = new TableProcessorNode<>(
name,
processorParameters,
storeBuilder,
stateStoreNames
);
builder.addGraphNode(this.graphNode, tableNode);
return new KTableImpl<>(
name,
keySerde,
valueSerde,
subTopologySourceNodes,
queryableStoreName,
processorSupplier,
tableNode,
builder);
}
@Override
public KStream<K, V> toStream() {
return toStream(NamedInternal.empty());
}
@Override
public KStream<K, V> toStream(final Named named) {
Objects.requireNonNull(named, "named can't be null");
final String name = new NamedInternal(named).orElseGenerateWithPrefix(builder, TOSTREAM_NAME);
final KStreamMapValues<K, Change<V>, V> kStreamMapValues = new KStreamMapValues<>((key, change) -> change.newValue);
final ProcessorParameters<K, V, ?, ?> processorParameters = unsafeCastProcessorParametersToCompletelyDifferentType(
new ProcessorParameters<>(kStreamMapValues, name)
);
final ProcessorGraphNode<K, V> toStreamNode = new ProcessorGraphNode<>(
name,
processorParameters
);
builder.addGraphNode(this.graphNode, toStreamNode);
// we can inherit parent key and value serde
return new KStreamImpl<>(name, keySerde, valueSerde, subTopologySourceNodes, false, toStreamNode, builder);
}
@Override
public <K1> KStream<K1, V> toStream(final KeyValueMapper<? super K, ? super V, ? extends K1> mapper) {
return toStream().selectKey(mapper);
}
@Override
public <K1> KStream<K1, V> toStream(final KeyValueMapper<? super K, ? super V, ? extends K1> mapper,
final Named named) {
return toStream(named).selectKey(mapper);
}
@Override
public KTable<K, V> suppress(final Suppressed<? super K> suppressed) {
final String name;
if (suppressed instanceof NamedSuppressed) {
final String givenName = ((NamedSuppressed<?>) suppressed).name();
name = givenName != null ? givenName : builder.newProcessorName(SUPPRESS_NAME);
} else {
throw new IllegalArgumentException("Custom subclasses of Suppressed are not supported.");
}
final SuppressedInternal<K> suppressedInternal = buildSuppress(suppressed, name);
final String storeName =
suppressedInternal.name() != null ? suppressedInternal.name() + "-store" : builder.newStoreName(SUPPRESS_NAME);
final ProcessorSupplier<K, Change<V>, K, Change<V>> suppressionSupplier = new KTableSuppressProcessorSupplier<>(
suppressedInternal,
storeName,
this
);
final StoreBuilder<InMemoryTimeOrderedKeyValueBuffer<K, V>> storeBuilder;
if (suppressedInternal.bufferConfig().isLoggingEnabled()) {
final Map<String, String> topicConfig = suppressedInternal.bufferConfig().getLogConfig();
storeBuilder = new InMemoryTimeOrderedKeyValueBuffer.Builder<>(
storeName,
keySerde,
valueSerde)
.withLoggingEnabled(topicConfig);
} else {
storeBuilder = new InMemoryTimeOrderedKeyValueBuffer.Builder<>(
storeName,
keySerde,
valueSerde)
.withLoggingDisabled();
}
final ProcessorGraphNode<K, Change<V>> node = new StatefulProcessorNode<>(
name,
new ProcessorParameters<>(suppressionSupplier, name),
storeBuilder
);
builder.addGraphNode(graphNode, node);
return new KTableImpl<K, S, V>(
name,
keySerde,
valueSerde,
Collections.singleton(this.name),
null,
suppressionSupplier,
node,
builder
);
}
@SuppressWarnings("unchecked")
private SuppressedInternal<K> buildSuppress(final Suppressed<? super K> suppress, final String name) {
if (suppress instanceof FinalResultsSuppressionBuilder) {
final long grace = findAndVerifyWindowGrace(graphNode);
LOG.info("Using grace period of [{}] as the suppress duration for node [{}].",
Duration.ofMillis(grace), name);
final FinalResultsSuppressionBuilder<?> builder = (FinalResultsSuppressionBuilder<?>) suppress;
final SuppressedInternal<?> finalResultsSuppression =
builder.buildFinalResultsSuppression(Duration.ofMillis(grace));
return (SuppressedInternal<K>) finalResultsSuppression;
} else if (suppress instanceof SuppressedInternal) {
return (SuppressedInternal<K>) suppress;
} else {
throw new IllegalArgumentException("Custom subclasses of Suppressed are not allowed.");
}
}
@Override
public <V1, R> KTable<K, R> join(final KTable<K, V1> other,
final ValueJoiner<? super V, ? super V1, ? extends R> joiner) {
return doJoin(other, joiner, NamedInternal.empty(), null, false, false);
}
@Override
public <V1, R> KTable<K, R> join(final KTable<K, V1> other,
final ValueJoiner<? super V, ? super V1, ? extends R> joiner,
final Named named) {
return doJoin(other, joiner, named, null, false, false);
}
@Override
public <VO, VR> KTable<K, VR> join(final KTable<K, VO> other,
final ValueJoiner<? super V, ? super VO, ? extends VR> joiner,
final Materialized<K, VR, KeyValueStore<Bytes, byte[]>> materialized) {
return join(other, joiner, NamedInternal.empty(), materialized);
}
@Override
public <VO, VR> KTable<K, VR> join(final KTable<K, VO> other,
final ValueJoiner<? super V, ? super VO, ? extends VR> joiner,
final Named named,
final Materialized<K, VR, KeyValueStore<Bytes, byte[]>> materialized) {
Objects.requireNonNull(materialized, "materialized can't be null");
final MaterializedInternal<K, VR, KeyValueStore<Bytes, byte[]>> materializedInternal =
new MaterializedInternal<>(materialized, builder, MERGE_NAME);
return doJoin(other, joiner, named, materializedInternal, false, false);
}
@Override
public <V1, R> KTable<K, R> outerJoin(final KTable<K, V1> other,
final ValueJoiner<? super V, ? super V1, ? extends R> joiner) {
return outerJoin(other, joiner, NamedInternal.empty());
}
@Override
public <V1, R> KTable<K, R> outerJoin(final KTable<K, V1> other,
final ValueJoiner<? super V, ? super V1, ? extends R> joiner,
final Named named) {
return doJoin(other, joiner, named, null, true, true);
}
@Override
public <VO, VR> KTable<K, VR> outerJoin(final KTable<K, VO> other,
final ValueJoiner<? super V, ? super VO, ? extends VR> joiner,
final Materialized<K, VR, KeyValueStore<Bytes, byte[]>> materialized) {
return outerJoin(other, joiner, NamedInternal.empty(), materialized);
}
@Override
public <VO, VR> KTable<K, VR> outerJoin(final KTable<K, VO> other,
final ValueJoiner<? super V, ? super VO, ? extends VR> joiner,
final Named named,
final Materialized<K, VR, KeyValueStore<Bytes, byte[]>> materialized) {
Objects.requireNonNull(materialized, "materialized can't be null");
final MaterializedInternal<K, VR, KeyValueStore<Bytes, byte[]>> materializedInternal =
new MaterializedInternal<>(materialized, builder, MERGE_NAME);
return doJoin(other, joiner, named, materializedInternal, true, true);
}
@Override
public <V1, R> KTable<K, R> leftJoin(final KTable<K, V1> other,
final ValueJoiner<? super V, ? super V1, ? extends R> joiner) {
return leftJoin(other, joiner, NamedInternal.empty());
}
@Override
public <V1, R> KTable<K, R> leftJoin(final KTable<K, V1> other,
final ValueJoiner<? super V, ? super V1, ? extends R> joiner,
final Named named) {
return doJoin(other, joiner, named, null, true, false);
}
@Override
public <VO, VR> KTable<K, VR> leftJoin(final KTable<K, VO> other,
final ValueJoiner<? super V, ? super VO, ? extends VR> joiner,
final Materialized<K, VR, KeyValueStore<Bytes, byte[]>> materialized) {
return leftJoin(other, joiner, NamedInternal.empty(), materialized);
}
@Override
public <VO, VR> KTable<K, VR> leftJoin(final KTable<K, VO> other,
final ValueJoiner<? super V, ? super VO, ? extends VR> joiner,
final Named named,
final Materialized<K, VR, KeyValueStore<Bytes, byte[]>> materialized) {
Objects.requireNonNull(materialized, "materialized can't be null");
final MaterializedInternal<K, VR, KeyValueStore<Bytes, byte[]>> materializedInternal =
new MaterializedInternal<>(materialized, builder, MERGE_NAME);
return doJoin(other, joiner, named, materializedInternal, true, false);
}
@SuppressWarnings("unchecked")
private <VO, VR> KTable<K, VR> doJoin(final KTable<K, VO> other,
final ValueJoiner<? super V, ? super VO, ? extends VR> joiner,
final Named joinName,
final MaterializedInternal<K, VR, KeyValueStore<Bytes, byte[]>> materializedInternal,
final boolean leftOuter,
final boolean rightOuter) {
Objects.requireNonNull(other, "other can't be null");
Objects.requireNonNull(joiner, "joiner can't be null");
Objects.requireNonNull(joinName, "joinName can't be null");
final NamedInternal renamed = new NamedInternal(joinName);
final String joinMergeName = renamed.orElseGenerateWithPrefix(builder, MERGE_NAME);
final Set<String> allSourceNodes = ensureCopartitionWith(Collections.singleton((AbstractStream<K, VO>) other));
if (leftOuter) {
enableSendingOldValues(true);
}
if (rightOuter) {
((KTableImpl<?, ?, ?>) other).enableSendingOldValues(true);
}
final KTableKTableAbstractJoin<K, VR, V, VO> joinThis;
final KTableKTableAbstractJoin<K, VR, VO, V> joinOther;
if (!leftOuter) { // inner
joinThis = new KTableKTableInnerJoin<>(this, (KTableImpl<K, ?, VO>) other, joiner);
joinOther = new KTableKTableInnerJoin<>((KTableImpl<K, ?, VO>) other, this, reverseJoiner(joiner));
} else if (!rightOuter) { // left
joinThis = new KTableKTableLeftJoin<>(this, (KTableImpl<K, ?, VO>) other, joiner);
joinOther = new KTableKTableRightJoin<>((KTableImpl<K, ?, VO>) other, this, reverseJoiner(joiner));
} else { // outer
joinThis = new KTableKTableOuterJoin<>(this, (KTableImpl<K, ?, VO>) other, joiner);
joinOther = new KTableKTableOuterJoin<>((KTableImpl<K, ?, VO>) other, this, reverseJoiner(joiner));
}
final String joinThisName = renamed.suffixWithOrElseGet("-join-this", builder, JOINTHIS_NAME);
final String joinOtherName = renamed.suffixWithOrElseGet("-join-other", builder, JOINOTHER_NAME);
final ProcessorParameters<K, Change<V>, ?, ?> joinThisProcessorParameters = new ProcessorParameters<>(joinThis, joinThisName);
final ProcessorParameters<K, Change<VO>, ?, ?> joinOtherProcessorParameters = new ProcessorParameters<>(joinOther, joinOtherName);
final Serde<K> keySerde;
final Serde<VR> valueSerde;
final String queryableStoreName;
final StoreBuilder<TimestampedKeyValueStore<K, VR>> storeBuilder;
if (materializedInternal != null) {
if (materializedInternal.keySerde() == null) {
materializedInternal.withKeySerde(this.keySerde);
}
keySerde = materializedInternal.keySerde();
valueSerde = materializedInternal.valueSerde();
queryableStoreName = materializedInternal.storeName();
storeBuilder = new TimestampedKeyValueStoreMaterializer<>(materializedInternal).materialize();
} else {
keySerde = this.keySerde;
valueSerde = null;
queryableStoreName = null;
storeBuilder = null;
}
final KTableKTableJoinNode<K, V, VO, VR> kTableKTableJoinNode =
KTableKTableJoinNode.<K, V, VO, VR>kTableKTableJoinNodeBuilder()
.withNodeName(joinMergeName)
.withJoinThisProcessorParameters(joinThisProcessorParameters)
.withJoinOtherProcessorParameters(joinOtherProcessorParameters)
.withThisJoinSideNodeName(name)
.withOtherJoinSideNodeName(((KTableImpl<?, ?, ?>) other).name)
.withJoinThisStoreNames(valueGetterSupplier().storeNames())
.withJoinOtherStoreNames(((KTableImpl<?, ?, ?>) other).valueGetterSupplier().storeNames())
.withKeySerde(keySerde)
.withValueSerde(valueSerde)
.withQueryableStoreName(queryableStoreName)
.withStoreBuilder(storeBuilder)
.build();
builder.addGraphNode(this.graphNode, kTableKTableJoinNode);
// we can inherit parent key serde if user do not provide specific overrides
return new KTableImpl<K, Change<VR>, VR>(
kTableKTableJoinNode.nodeName(),
kTableKTableJoinNode.keySerde(),
kTableKTableJoinNode.valueSerde(),
allSourceNodes,
kTableKTableJoinNode.queryableStoreName(),
kTableKTableJoinNode.joinMerger(),
kTableKTableJoinNode,
builder
);
}
@Override
public <K1, V1> KGroupedTable<K1, V1> groupBy(final KeyValueMapper<? super K, ? super V, KeyValue<K1, V1>> selector) {
return groupBy(selector, Grouped.with(null, null));
}
@Override
public <K1, V1> KGroupedTable<K1, V1> groupBy(final KeyValueMapper<? super K, ? super V, KeyValue<K1, V1>> selector,
final Grouped<K1, V1> grouped) {
Objects.requireNonNull(selector, "selector can't be null");
Objects.requireNonNull(grouped, "grouped can't be null");
final GroupedInternal<K1, V1> groupedInternal = new GroupedInternal<>(grouped);
final String selectName = new NamedInternal(groupedInternal.name()).orElseGenerateWithPrefix(builder, SELECT_NAME);
final KTableProcessorSupplier<K, V, KeyValue<K1, V1>> selectSupplier = new KTableRepartitionMap<>(this, selector);
final ProcessorParameters<K, Change<V>, ?, ?> processorParameters = new ProcessorParameters<>(selectSupplier, selectName);
// select the aggregate key and values (old and new), it would require parent to send old values
final ProcessorGraphNode<K, Change<V>> groupByMapNode = new ProcessorGraphNode<>(selectName, processorParameters);
builder.addGraphNode(this.graphNode, groupByMapNode);
this.enableSendingOldValues(true);
return new KGroupedTableImpl<>(
builder,
selectName,
subTopologySourceNodes,
groupedInternal,
groupByMapNode
);
}
@SuppressWarnings("unchecked")
public KTableValueGetterSupplier<K, V> valueGetterSupplier() {
if (processorSupplier instanceof KTableSource) {
final KTableSource<K, V> source = (KTableSource<K, V>) processorSupplier;
// whenever a source ktable is required for getter, it should be materialized
source.materialize();
return new KTableSourceValueGetterSupplier<>(source.queryableName());
} else if (processorSupplier instanceof KStreamAggProcessorSupplier) {
return ((KStreamAggProcessorSupplier<?, K, S, V>) processorSupplier).view();
} else if (processorSupplier instanceof KTableNewProcessorSupplier) {
return ((KTableNewProcessorSupplier<?, ?, K, V>) processorSupplier).view();
} else {
return ((KTableProcessorSupplier<K, S, V>) processorSupplier).view();
}
}
@SuppressWarnings("unchecked")
public boolean enableSendingOldValues(final boolean forceMaterialization) {
if (!sendOldValues) {
if (processorSupplier instanceof KTableSource) {
final KTableSource<K, ?> source = (KTableSource<K, V>) processorSupplier;
if (!forceMaterialization && !source.materialized()) {
return false;
}
source.enableSendingOldValues();
} else if (processorSupplier instanceof KStreamAggProcessorSupplier) {
((KStreamAggProcessorSupplier<?, K, S, V>) processorSupplier).enableSendingOldValues();
} else if (processorSupplier instanceof KTableNewProcessorSupplier) {
final KTableNewProcessorSupplier<?, ?, ?, ?> tableProcessorSupplier =
(KTableNewProcessorSupplier<?, ?, ?, ?>) processorSupplier;
if (!tableProcessorSupplier.enableSendingOldValues(forceMaterialization)) {
return false;
}
} else {
final KTableProcessorSupplier<K, S, V> tableProcessorSupplier = (KTableProcessorSupplier<K, S, V>) processorSupplier;
if (!tableProcessorSupplier.enableSendingOldValues(forceMaterialization)) {
return false;
}
}
sendOldValues = true;
}
return true;
}
boolean sendingOldValueEnabled() {
return sendOldValues;
}
/**
* We conflate V with Change<V> in many places. This will get fixed in the implementation of KIP-478.
* For now, I'm just explicitly lying about the parameterized type.
*/
@SuppressWarnings("unchecked")
private <VR> ProcessorParameters<K, VR, ?, ?> unsafeCastProcessorParametersToCompletelyDifferentType(final ProcessorParameters<K, Change<V>, ?, ?> kObjectProcessorParameters) {
return (ProcessorParameters<K, VR, ?, ?>) kObjectProcessorParameters;
}
@Override
public <VR, KO, VO> KTable<K, VR> join(final KTable<KO, VO> other,
final Function<V, KO> foreignKeyExtractor,
final ValueJoiner<V, VO, VR> joiner) {
return doJoinOnForeignKey(
other,
foreignKeyExtractor,
joiner,
NamedInternal.empty(),
Materialized.with(null, null),
false
);
}
@Override
public <VR, KO, VO> KTable<K, VR> join(final KTable<KO, VO> other,
final Function<V, KO> foreignKeyExtractor,
final ValueJoiner<V, VO, VR> joiner,
final Named named) {
return doJoinOnForeignKey(
other,
foreignKeyExtractor,
joiner,
named,
Materialized.with(null, null),
false
);
}
@Override
public <VR, KO, VO> KTable<K, VR> join(final KTable<KO, VO> other,
final Function<V, KO> foreignKeyExtractor,
final ValueJoiner<V, VO, VR> joiner,
final Materialized<K, VR, KeyValueStore<Bytes, byte[]>> materialized) {
return doJoinOnForeignKey(other, foreignKeyExtractor, joiner, NamedInternal.empty(), materialized, false);
}
@Override
public <VR, KO, VO> KTable<K, VR> join(final KTable<KO, VO> other,
final Function<V, KO> foreignKeyExtractor,
final ValueJoiner<V, VO, VR> joiner,
final Named named,
final Materialized<K, VR, KeyValueStore<Bytes, byte[]>> materialized) {
return doJoinOnForeignKey(other, foreignKeyExtractor, joiner, named, materialized, false);
}
@Override
public <VR, KO, VO> KTable<K, VR> leftJoin(final KTable<KO, VO> other,
final Function<V, KO> foreignKeyExtractor,
final ValueJoiner<V, VO, VR> joiner) {
return doJoinOnForeignKey(
other,
foreignKeyExtractor,
joiner,
NamedInternal.empty(),
Materialized.with(null, null),
true
);
}
@Override
public <VR, KO, VO> KTable<K, VR> leftJoin(final KTable<KO, VO> other,
final Function<V, KO> foreignKeyExtractor,
final ValueJoiner<V, VO, VR> joiner,
final Named named) {
return doJoinOnForeignKey(
other,
foreignKeyExtractor,
joiner,
named,
Materialized.with(null, null),
true
);
}
@Override
public <VR, KO, VO> KTable<K, VR> leftJoin(final KTable<KO, VO> other,
final Function<V, KO> foreignKeyExtractor,
final ValueJoiner<V, VO, VR> joiner,
final Named named,
final Materialized<K, VR, KeyValueStore<Bytes, byte[]>> materialized) {
return doJoinOnForeignKey(other, foreignKeyExtractor, joiner, named, materialized, true);
}
@Override
public <VR, KO, VO> KTable<K, VR> leftJoin(final KTable<KO, VO> other,
final Function<V, KO> foreignKeyExtractor,
final ValueJoiner<V, VO, VR> joiner,
final Materialized<K, VR, KeyValueStore<Bytes, byte[]>> materialized) {
return doJoinOnForeignKey(other, foreignKeyExtractor, joiner, NamedInternal.empty(), materialized, true);
}
@SuppressWarnings("unchecked")
private <VR, KO, VO> KTable<K, VR> doJoinOnForeignKey(final KTable<KO, VO> foreignKeyTable,
final Function<V, KO> foreignKeyExtractor,
final ValueJoiner<V, VO, VR> joiner,
final Named joinName,
final Materialized<K, VR, KeyValueStore<Bytes, byte[]>> materialized,
final boolean leftJoin) {
Objects.requireNonNull(foreignKeyTable, "foreignKeyTable can't be null");
Objects.requireNonNull(foreignKeyExtractor, "foreignKeyExtractor can't be null");
Objects.requireNonNull(joiner, "joiner can't be null");
Objects.requireNonNull(joinName, "joinName can't be null");
Objects.requireNonNull(materialized, "materialized can't be null");
//Old values are a useful optimization. The old values from the foreignKeyTable table are compared to the new values,
//such that identical values do not cause a prefixScan. PrefixScan and propagation can be expensive and should
//not be done needlessly.
((KTableImpl<?, ?, ?>) foreignKeyTable).enableSendingOldValues(true);
//Old values must be sent such that the ForeignJoinSubscriptionSendProcessorSupplier can propagate deletions to the correct node.
//This occurs whenever the extracted foreignKey changes values.
enableSendingOldValues(true);
final NamedInternal renamed = new NamedInternal(joinName);
final String subscriptionTopicName = renamed.suffixWithOrElseGet(
"-subscription-registration",
builder,
SUBSCRIPTION_REGISTRATION
) + TOPIC_SUFFIX;
// the decoration can't be performed until we have the configuration available when the app runs,
// so we pass Suppliers into the components, which they can call at run time
final Supplier<String> subscriptionPrimaryKeySerdePseudoTopic =
() -> internalTopologyBuilder().decoratePseudoTopic(subscriptionTopicName + "-pk");
final Supplier<String> subscriptionForeignKeySerdePseudoTopic =
() -> internalTopologyBuilder().decoratePseudoTopic(subscriptionTopicName + "-fk");
final Supplier<String> valueHashSerdePseudoTopic =
() -> internalTopologyBuilder().decoratePseudoTopic(subscriptionTopicName + "-vh");
builder.internalTopologyBuilder.addInternalTopic(subscriptionTopicName, InternalTopicProperties.empty());
final Serde<KO> foreignKeySerde = ((KTableImpl<KO, VO, ?>) foreignKeyTable).keySerde;
final Serde<SubscriptionWrapper<K>> subscriptionWrapperSerde = new SubscriptionWrapperSerde<>(subscriptionPrimaryKeySerdePseudoTopic, keySerde);
final SubscriptionResponseWrapperSerde<VO> responseWrapperSerde =
new SubscriptionResponseWrapperSerde<>(((KTableImpl<KO, VO, VO>) foreignKeyTable).valueSerde);
final CombinedKeySchema<KO, K> combinedKeySchema = new CombinedKeySchema<>(
subscriptionForeignKeySerdePseudoTopic,
foreignKeySerde,
subscriptionPrimaryKeySerdePseudoTopic,
keySerde
);
final ProcessorGraphNode<K, Change<V>> subscriptionNode = new ProcessorGraphNode<>(
new ProcessorParameters<>(
new ForeignJoinSubscriptionSendProcessorSupplier<>(
foreignKeyExtractor,
subscriptionForeignKeySerdePseudoTopic,
valueHashSerdePseudoTopic,
foreignKeySerde,
valueSerde == null ? null : valueSerde.serializer(),
leftJoin
),
renamed.suffixWithOrElseGet("-subscription-registration-processor", builder, SUBSCRIPTION_REGISTRATION)
)
);
builder.addGraphNode(graphNode, subscriptionNode);
final StreamSinkNode<KO, SubscriptionWrapper<K>> subscriptionSink = new StreamSinkNode<>(
renamed.suffixWithOrElseGet("-subscription-registration-sink", builder, SINK_NAME),
new StaticTopicNameExtractor<>(subscriptionTopicName),
new ProducedInternal<>(Produced.with(foreignKeySerde, subscriptionWrapperSerde))
);
builder.addGraphNode(subscriptionNode, subscriptionSink);
final StreamSourceNode<KO, SubscriptionWrapper<K>> subscriptionSource = new StreamSourceNode<>(
renamed.suffixWithOrElseGet("-subscription-registration-source", builder, SOURCE_NAME),
Collections.singleton(subscriptionTopicName),
new ConsumedInternal<>(Consumed.with(foreignKeySerde, subscriptionWrapperSerde))
);
builder.addGraphNode(subscriptionSink, subscriptionSource);
// The subscription source is the source node on the *receiving* end *after* the repartition.
// This topic needs to be copartitioned with the Foreign Key table.
final Set<String> copartitionedRepartitionSources =
new HashSet<>(((KTableImpl<?, ?, ?>) foreignKeyTable).subTopologySourceNodes);
copartitionedRepartitionSources.add(subscriptionSource.nodeName());
builder.internalTopologyBuilder.copartitionSources(copartitionedRepartitionSources);
final StoreBuilder<TimestampedKeyValueStore<Bytes, SubscriptionWrapper<K>>> subscriptionStore =
Stores.timestampedKeyValueStoreBuilder(
Stores.persistentTimestampedKeyValueStore(
renamed.suffixWithOrElseGet("-subscription-store", builder, FK_JOIN_STATE_STORE_NAME)
),
new Serdes.BytesSerde(),
subscriptionWrapperSerde
);
builder.addStateStore(subscriptionStore);
final StatefulProcessorNode<KO, SubscriptionWrapper<K>> subscriptionReceiveNode =
new StatefulProcessorNode<>(
new ProcessorParameters<>(
new SubscriptionStoreReceiveProcessorSupplier<>(subscriptionStore, combinedKeySchema),
renamed.suffixWithOrElseGet("-subscription-receive", builder, SUBSCRIPTION_PROCESSOR)
),
Collections.singleton(subscriptionStore),
Collections.emptySet()
);
builder.addGraphNode(subscriptionSource, subscriptionReceiveNode);
final StatefulProcessorNode<CombinedKey<KO, K>, Change<ValueAndTimestamp<SubscriptionWrapper<K>>>> subscriptionJoinForeignNode =
new StatefulProcessorNode<>(
new ProcessorParameters<>(
new SubscriptionJoinForeignProcessorSupplier<>(
((KTableImpl<KO, VO, VO>) foreignKeyTable).valueGetterSupplier()
),
renamed.suffixWithOrElseGet("-subscription-join-foreign", builder, SUBSCRIPTION_PROCESSOR)
),
Collections.emptySet(),
Collections.singleton(((KTableImpl<KO, VO, VO>) foreignKeyTable).valueGetterSupplier())
);
builder.addGraphNode(subscriptionReceiveNode, subscriptionJoinForeignNode);
final StatefulProcessorNode<KO, Change<Object>> foreignJoinSubscriptionNode = new StatefulProcessorNode<>(
new ProcessorParameters<>(
new ForeignJoinSubscriptionProcessorSupplier<>(subscriptionStore, combinedKeySchema),
renamed.suffixWithOrElseGet("-foreign-join-subscription", builder, SUBSCRIPTION_PROCESSOR)
),
Collections.singleton(subscriptionStore),
Collections.emptySet()
);
builder.addGraphNode(((KTableImpl<KO, VO, ?>) foreignKeyTable).graphNode, foreignJoinSubscriptionNode);
final String finalRepartitionTopicName = renamed.suffixWithOrElseGet("-subscription-response", builder, SUBSCRIPTION_RESPONSE) + TOPIC_SUFFIX;
builder.internalTopologyBuilder.addInternalTopic(finalRepartitionTopicName, InternalTopicProperties.empty());
final StreamSinkNode<K, SubscriptionResponseWrapper<VO>> foreignResponseSink =
new StreamSinkNode<>(
renamed.suffixWithOrElseGet("-subscription-response-sink", builder, SINK_NAME),
new StaticTopicNameExtractor<>(finalRepartitionTopicName),
new ProducedInternal<>(Produced.with(keySerde, responseWrapperSerde))
);
builder.addGraphNode(subscriptionJoinForeignNode, foreignResponseSink);
builder.addGraphNode(foreignJoinSubscriptionNode, foreignResponseSink);
final StreamSourceNode<K, SubscriptionResponseWrapper<VO>> foreignResponseSource = new StreamSourceNode<>(
renamed.suffixWithOrElseGet("-subscription-response-source", builder, SOURCE_NAME),
Collections.singleton(finalRepartitionTopicName),
new ConsumedInternal<>(Consumed.with(keySerde, responseWrapperSerde))
);
builder.addGraphNode(foreignResponseSink, foreignResponseSource);
// the response topic has to be copartitioned with the left (primary) side of the join
final Set<String> resultSourceNodes = new HashSet<>(this.subTopologySourceNodes);
resultSourceNodes.add(foreignResponseSource.nodeName());
builder.internalTopologyBuilder.copartitionSources(resultSourceNodes);
final KTableValueGetterSupplier<K, V> primaryKeyValueGetter = valueGetterSupplier();
final SubscriptionResolverJoinProcessorSupplier<K, V, VO, VR> resolverProcessorSupplier = new SubscriptionResolverJoinProcessorSupplier<>(
primaryKeyValueGetter,
valueSerde == null ? null : valueSerde.serializer(),
valueHashSerdePseudoTopic,
joiner,
leftJoin
);
final StatefulProcessorNode<K, SubscriptionResponseWrapper<VO>> resolverNode = new StatefulProcessorNode<>(
new ProcessorParameters<>(
resolverProcessorSupplier,
renamed.suffixWithOrElseGet("-subscription-response-resolver", builder, SUBSCRIPTION_RESPONSE_RESOLVER_PROCESSOR)
),
Collections.emptySet(),
Collections.singleton(primaryKeyValueGetter)
);
builder.addGraphNode(foreignResponseSource, resolverNode);
final String resultProcessorName = renamed.suffixWithOrElseGet("-result", builder, FK_JOIN_OUTPUT_NAME);
final MaterializedInternal<K, VR, KeyValueStore<Bytes, byte[]>> materializedInternal =
new MaterializedInternal<>(
materialized,
builder,
FK_JOIN_OUTPUT_NAME
);
// If we have a key serde, it's still valid, but we don't know the value serde, since it's the result
// of the joiner (VR).
if (materializedInternal.keySerde() == null) {
materializedInternal.withKeySerde(keySerde);
}
final KTableSource<K, VR> resultProcessorSupplier = new KTableSource<>(
materializedInternal.storeName(),
materializedInternal.queryableStoreName()
);
final StoreBuilder<TimestampedKeyValueStore<K, VR>> resultStore =
new TimestampedKeyValueStoreMaterializer<>(materializedInternal).materialize();
final TableProcessorNode<K, VR> resultNode = new TableProcessorNode<>(
resultProcessorName,
new ProcessorParameters<>(
resultProcessorSupplier,
resultProcessorName
),
resultStore
);
builder.addGraphNode(resolverNode, resultNode);
return new KTableImpl<K, V, VR>(
resultProcessorName,
keySerde,
materializedInternal.valueSerde(),
resultSourceNodes,
materializedInternal.storeName(),
resultProcessorSupplier,
resultNode,
builder
);
}
}
| |
package sk.henrichg.phoneprofilesplus;
import android.annotation.SuppressLint;
import android.content.Context;
import android.net.ConnectivityManager;
import android.net.wifi.WifiConfiguration;
import android.net.wifi.WifiManager;
import android.os.Build;
import android.os.Handler;
import android.os.ResultReceiver;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
final class WifiApManager {
//private static final int WIFI_AP_STATE_FAILED = 4;
private final WifiManager mWifiManager;
//private final String TAG = "Wifi Access Manager";
private Method wifiControlMethod = null;
private Method wifiApConfigurationMethod = null;
//private Method wifiApState;
private Method wifiApEnabled = null;
private ConnectivityManager mConnectivityManager;
private String packageName;
@SuppressLint("PrivateApi")
WifiApManager(Context context) throws SecurityException, NoSuchMethodException {
mWifiManager = (WifiManager) context.getApplicationContext().getSystemService(Context.WIFI_SERVICE);
if (mWifiManager != null)
wifiApEnabled = mWifiManager.getClass().getDeclaredMethod("isWifiApEnabled");
/*if (PPApplication.logEnabled()) {
PPApplication.logE("$$$ WifiAP", "WifiApManager.WifiApManager-mWifiManager=" + mWifiManager);
PPApplication.logE("$$$ WifiAP", "WifiApManager.WifiApManager-wifiApEnabled=" + wifiApEnabled);
}*/
if (Build.VERSION.SDK_INT >= 26) {
mConnectivityManager = (ConnectivityManager) context.getSystemService(Context.CONNECTIVITY_SERVICE);
packageName = PPApplication.PACKAGE_NAME;
}
else {
if (mWifiManager != null) {
//noinspection deprecation
wifiControlMethod = mWifiManager.getClass().getMethod("setWifiApEnabled", WifiConfiguration.class, boolean.class);
wifiApConfigurationMethod = mWifiManager.getClass().getMethod("getWifiApConfiguration"/*,null*/);
//wifiApState = mWifiManager.getClass().getMethod("getWifiApState");
}
/*if (PPApplication.logEnabled()) {
PPApplication.logE("$$$ WifiAP", "WifiApManager.WifiApManager-wifiControlMethod=" + wifiControlMethod);
PPApplication.logE("$$$ WifiAP", "WifiApManager.WifiApManager-wifiApConfigurationMethod=" + wifiApConfigurationMethod);
}*/
}
}
@SuppressWarnings("deprecation")
private void setWifiApState(WifiConfiguration config, boolean enabled, boolean doNotChangeWifi) {
try {
/*if (PPApplication.logEnabled()) {
PPApplication.logE("$$$ WifiAP", "WifiApManager.setWifiApState-config=" + config);
PPApplication.logE("$$$ WifiAP", "WifiApManager.setWifiApState-enabled=" + enabled);
PPApplication.logE("$$$ WifiAP", "WifiApManager.setWifiApState-mWifiManager=" + mWifiManager);
PPApplication.logE("$$$ WifiAP", "WifiApManager.setWifiApState-wifiControlMethod=" + wifiControlMethod);
}*/
if (enabled) {
if (!doNotChangeWifi) {
if (mWifiManager != null) {
int wifiState = mWifiManager.getWifiState();
boolean isWifiEnabled = ((wifiState == WifiManager.WIFI_STATE_ENABLED) || (wifiState == WifiManager.WIFI_STATE_ENABLING));
if (isWifiEnabled) {
//PPApplication.logE("#### setWifiEnabled", "from WifAPManager.setWifiApState");
//if (Build.VERSION.SDK_INT >= 29)
// CmdWifi.setWifi(false);
//else
// PPApplication.logE("[WIFI_ENABLED] WifiApManager.setWifiApState", "false");
//noinspection deprecation
mWifiManager.setWifiEnabled(false);
}
}
}
}
wifiControlMethod.setAccessible(true);
wifiControlMethod.invoke(mWifiManager, config, enabled);
} catch (Exception e) {
//Log.e(TAG, "", e);
//PPApplication.logE("WifiApManager.setWifiApState", Log.getStackTraceString(e));
PPApplication.recordException(e);
}
}
@SuppressWarnings("deprecation")
void setWifiApState(boolean enabled, boolean doNotChangeWifi) {
WifiConfiguration wifiConfiguration = getWifiApConfiguration();
/*return*/ setWifiApState(wifiConfiguration, enabled, doNotChangeWifi);
}
// not working in Android 8+ :-/
// https://stackoverflow.com/questions/46392277/changing-android-hotspot-settings
@SuppressWarnings("deprecation")
private WifiConfiguration getWifiApConfiguration()
{
try{
wifiApConfigurationMethod.setAccessible(true);
return (WifiConfiguration)wifiApConfigurationMethod.invoke(mWifiManager/*, null*/);
}
catch (Exception e)
{
//PPApplication.logE("WifiApManager.getWifiApConfiguration", Log.getStackTraceString(e));
PPApplication.recordException(e);
return null;
}
}
/*
public int getWifiApState() {
try {
wifiApState.setAccessible(true);
return (Integer)wifiApState.invoke(mWifiManager);
} catch (Exception e) {
Log.e(TAG, "", e);
return WIFI_AP_STATE_FAILED;
}
}
*/
boolean isWifiAPEnabled() {
try {
wifiApEnabled.setAccessible(true);
// PPApplication.logE("WifiApManager.isWifiAPEnabled", "enabled="+wifiApEnabled.invoke(mWifiManager));
//noinspection ConstantConditions
return (Boolean) wifiApEnabled.invoke(mWifiManager);
} catch (Exception e) {
//Log.e("$$$ WifiAP", "WifiApManager.isWifiAPEnabled-exception="+e);
PPApplication.recordException(e);
return false;
}
}
static boolean isWifiAPEnabled(Context context) {
try {
WifiApManager wifiApManager = new WifiApManager(context);
/*
int wifiApState = wifiApManager.getWifiApState();
// 11 => AP OFF
// 13 => AP ON
canScan = wifiApState == 11;*/
// PPApplication.logE("WifiApManager.isWifiAPEnabled", "enabled="+wifiApManager.isWifiAPEnabled());
return wifiApManager.isWifiAPEnabled();
} catch (NoSuchMethodException e) {
return false;
}
}
static boolean isWifiAPEnabledA30(Context context) {
// PPApplication.logE("CmdWifiAP.isEnabled", "xxx");
try {
//boolean enabled;
/*IWifiManager adapter = IWifiManager.Stub.asInterface(ServiceManager.getService("wifi")); // service list | grep IWifiManager
//PPApplication.logE("CmdWifiAP.isEnabled", "adapter="+adapter);
enabled = adapter.getWifiApEnabledState() == WifiManager.WIFI_AP_STATE_ENABLED;
//PPApplication.logE("CmdWifiAP.isEnabled", "enabled="+enabled);
return enabled;*/
WifiManager wifiManager = (WifiManager) context.getApplicationContext().getSystemService(Context.WIFI_SERVICE);
// PPApplication.logE("CmdWifiAP.isEnabled", "enabled="+wifiManager.isWifiApEnabled());
return wifiManager.isWifiApEnabled();
} catch (Throwable e) {
//Log.e("CmdWifiAP.isEnabled", Log.getStackTraceString(e));
PPApplication.recordException(e);
//PPApplication.logE("CmdWifiAP.isEnabled", Log.getStackTraceString(e));
return false;
}
}
static boolean canExploitWifiAP(Context context) {
try {
/*WifiApManager wifiApManager = */new WifiApManager(context);
return true;
} catch (NoSuchMethodException e) {
return false;
}
}
void startTethering(boolean doNotChangeWifi) {
//PPApplication.logE("WifiApManager.startTethering", "mWifiManager="+mWifiManager);
if (!doNotChangeWifi) {
if (mWifiManager != null) {
int wifiState = mWifiManager.getWifiState();
boolean isWifiEnabled = ((wifiState == WifiManager.WIFI_STATE_ENABLED) || (wifiState == WifiManager.WIFI_STATE_ENABLING));
//PPApplication.logE("WifiApManager.startTethering", "isWifiEnabled="+isWifiEnabled);
if (isWifiEnabled) {
//PPApplication.logE("#### setWifiEnabled", "from WifiAPManager.startTethering");
//if (Build.VERSION.SDK_INT >= 29)
// CmdWifi.setWifi(false);
//else
// PPApplication.logE("[WIFI_ENABLED] WifiApManager.startTethering", "false");
//noinspection deprecation
mWifiManager.setWifiEnabled(false);
}
}
}
//PPApplication.logE("WifiApManager.startTethering", "mConnectivityManager="+mConnectivityManager);
if (mConnectivityManager != null) {
try {
//noinspection JavaReflectionMemberAccess
@SuppressLint("DiscouragedPrivateApi")
Field internalConnectivityManagerField = ConnectivityManager.class.getDeclaredField("mService");
internalConnectivityManagerField.setAccessible(true);
callStartTethering(internalConnectivityManagerField.get(mConnectivityManager));
} catch (Exception e) {
//Log.e("WifiApManager.startTethering", Log.getStackTraceString(e));
PPApplication.recordException(e);
//PPApplication.logE("WifiApManager.startTethering", Log.getStackTraceString(e));
}
}
}
void stopTethering() {
//PPApplication.logE("WifiApManager.stopTethering", "mConnectivityManager="+mConnectivityManager);
if (mConnectivityManager != null) {
try {
Method stopTetheringMethod = ConnectivityManager.class.getDeclaredMethod("stopTethering", int.class);
stopTetheringMethod.invoke(mConnectivityManager, 0);
} catch (Exception e) {
//Log.e("WifiApManager.stopTethering", Log.getStackTraceString(e));
PPApplication.recordException(e);
//PPApplication.logE("WifiApManager.stopTethering", Log.getStackTraceString(e));
}
}
}
@SuppressWarnings({"unchecked", "JavaReflectionMemberAccess"})
private void callStartTethering(Object internalConnectivityManager) throws ReflectiveOperationException {
//PPApplication.logE("WifiApManager.callStartTethering", "START");
@SuppressWarnings("rawtypes")
Class internalConnectivityManagerClass = Class.forName("android.net.IConnectivityManager");
ResultReceiver dummyResultReceiver = new ResultReceiver(null);
try {
Method startTetheringMethod = internalConnectivityManagerClass.getDeclaredMethod("startTethering",
int.class,
ResultReceiver.class,
boolean.class);
//PPApplication.logE("WifiApManager.callStartTethering", "startTetheringMethod.1="+startTetheringMethod);
startTetheringMethod.invoke(internalConnectivityManager,
0,
dummyResultReceiver,
false);
} catch (NoSuchMethodException e) {
//PPApplication.logE("WifiApManager.callStartTethering", Log.getStackTraceString(e));
// Newer devices have "callingPkg" String argument at the end of this method.
@SuppressLint("SoonBlockedPrivateApi")
Method startTetheringMethod = internalConnectivityManagerClass.getDeclaredMethod("startTethering",
int.class,
ResultReceiver.class,
boolean.class,
String.class);
//PPApplication.logE("WifiApManager.callStartTethering", "startTetheringMethod.2="+startTetheringMethod);
startTetheringMethod.invoke(internalConnectivityManager,
0,
dummyResultReceiver,
false,
packageName);
}
//PPApplication.logE("WifiApManager.callStartTethering", "END");
}
@SuppressWarnings({"unchecked", "JavaReflectionMemberAccess", "DiscouragedPrivateApi", "SoonBlockedPrivateApi"})
static boolean canExploitWifiTethering(Context context) {
try {
if (canExploitWifiAP(context)) {
ConnectivityManager.class.getDeclaredField("mService");
//noinspection rawtypes
Class internalConnectivityManagerClass = Class.forName("android.net.IConnectivityManager");
try {
internalConnectivityManagerClass.getDeclaredMethod("startTethering",
int.class,
ResultReceiver.class,
boolean.class);
} catch (NoSuchMethodException e) {
internalConnectivityManagerClass.getDeclaredMethod("startTethering",
int.class,
ResultReceiver.class,
boolean.class,
String.class);
}
ConnectivityManager.class.getDeclaredMethod("stopTethering", int.class);
return true;
} else
return false;
} catch (Exception e) {
return false;
}
}
// Tnank to author of MacroDroid application.
// It is used as source of this implenetation.
static class MyOnStartTetheringCallback extends MyOnStartTetheringCallbackAbstract {
MyOnStartTetheringCallback() {
}
}
@SuppressWarnings("RedundantArrayCreation")
@SuppressLint("PrivateApi")
static boolean canExploitWifiTethering30(Context context) {
try {
WifiManager wifiManager = (WifiManager) context.getApplicationContext().getSystemService(Context.WIFI_SERVICE);
wifiManager.isWifiApEnabled();
} catch (Throwable e) {
return false;
}
MyOnStartTetheringCallback callback = new MyOnStartTetheringCallback();
Object myOnStartTetheringCallbackAbstractObj;
Class<?> myOnStartTetheringCallbackAbstractObjCls;// = null;
try {
myOnStartTetheringCallbackAbstractObj =
new WifiTetheringCallbackMaker(context, callback)
.getTtetheringCallback().getDeclaredConstructor(new Class[]{Integer.TYPE}).newInstance(new Object[]{0});
} catch (Exception e) {
myOnStartTetheringCallbackAbstractObj = null;
}
if (myOnStartTetheringCallbackAbstractObj == null)
return false;
ConnectivityManager connectivityManager = context.getApplicationContext().getSystemService(ConnectivityManager.class);
try {
myOnStartTetheringCallbackAbstractObjCls = Class.forName("android.net.ConnectivityManager$OnStartTetheringCallback");
} catch (Exception e2) {
return false;
}
try {
Method declaredMethod = connectivityManager.getClass().getDeclaredMethod("startTethering",
new Class[]{Integer.TYPE, Boolean.TYPE, myOnStartTetheringCallbackAbstractObjCls, Handler.class});
//noinspection ConstantConditions
if (declaredMethod == null) {
return false;
}
} catch (Exception e) {
return false;
}
try {
Method declaredMethod = connectivityManager.getClass().getDeclaredMethod("stopTethering", new Class[]{Integer.TYPE});
//noinspection ConstantConditions
if (declaredMethod == null) {
return false;
}
} catch (Exception e) {
return false;
}
return true;
}
// Tnank to author of MacroDroid application.
// It is used as source of this implenetation.
static void startTethering30(Context context, boolean doNotChangeWifi) {
if (!doNotChangeWifi) {
WifiManager wifiManager = (WifiManager) context.getApplicationContext().getSystemService(Context.WIFI_SERVICE);
if (wifiManager != null) {
int wifiState = wifiManager.getWifiState();
boolean isWifiEnabled = ((wifiState == WifiManager.WIFI_STATE_ENABLED) || (wifiState == WifiManager.WIFI_STATE_ENABLING));
//PPApplication.logE("WifiApManager.startTethering", "isWifiEnabled="+isWifiEnabled);
if (isWifiEnabled) {
//PPApplication.logE("#### setWifiEnabled", "from WifiAPManager.startTethering");
//if (Build.VERSION.SDK_INT >= 29)
// CmdWifi.setWifi(false);
//else
// PPApplication.logE("[WIFI_ENABLED] WifiApManager.startTethering", "false");
//noinspection deprecation
wifiManager.setWifiEnabled(false);
}
}
}
MyOnStartTetheringCallback callback = new MyOnStartTetheringCallback();
_startTethering30(context, callback, new Handler());
}
// Thanks to author of MacroDroid application.
// It is used as source of this implenetation.
@SuppressWarnings("RedundantArrayCreation")
@SuppressLint("PrivateApi")
static private void _startTethering30(Context context,
MyOnStartTetheringCallbackAbstract myOnStartTetheringCallbackAbstract,
Handler handler) {
Object myOnStartTetheringCallbackAbstractObj;
Class<?> myOnStartTetheringCallbackAbstractObjCls;// = null;
try {
myOnStartTetheringCallbackAbstractObj =
new WifiTetheringCallbackMaker(context, myOnStartTetheringCallbackAbstract)
.getTtetheringCallback().getDeclaredConstructor(new Class[]{Integer.TYPE}).newInstance(new Object[]{0});
} catch (Exception e) {
//Log.e("WifiApManager._startTethering30 (1)", Log.getStackTraceString(e));
myOnStartTetheringCallbackAbstractObj = null;
}
//if (myOnStartTetheringCallbackAbstractObj != null) {
ConnectivityManager connectivityManager = context.getApplicationContext().getSystemService(ConnectivityManager.class);
try {
myOnStartTetheringCallbackAbstractObjCls = Class.forName("android.net.ConnectivityManager$OnStartTetheringCallback");
} catch (Exception e2) {
//Log.e("WifiApManager._startTethering30 (2)", Log.getStackTraceString(e2));
PPApplication.recordException(e2);
return;
}
try {
Method declaredMethod = connectivityManager.getClass().getDeclaredMethod("startTethering",
new Class[]{Integer.TYPE, Boolean.TYPE, myOnStartTetheringCallbackAbstractObjCls, Handler.class});
//noinspection ConstantConditions
if (declaredMethod == null) {
//Log.e("WifiApManager._startTethering30", "startTetheringMethod is null");
return;
}
declaredMethod.invoke(connectivityManager, new Object[]{0, Boolean.FALSE, myOnStartTetheringCallbackAbstractObj, handler});
} catch (Exception e) {
//Log.e("WifiApManager._startTethering30 (3)", Log.getStackTraceString(e));
PPApplication.recordException(e);
}
//}
//else
// Log.e("WifiApManager._startTethering30", "myOnStartTetheringCallbackAbstractObj is null");
}
// Thanks to author of MacroDroid application.
// It is used as source of this implenetation.
@SuppressWarnings("RedundantArrayCreation")
static void stopTethering30(Context context) {
ConnectivityManager connectivityManager = context.getApplicationContext().getSystemService(ConnectivityManager.class);
try {
Method declaredMethod = connectivityManager.getClass().getDeclaredMethod("stopTethering", new Class[]{Integer.TYPE});
//noinspection ConstantConditions
if (declaredMethod == null) {
// Log.e("WifiApManager.stopTethering30", "stopTetheringMethod is null");
return;
}
declaredMethod.invoke(connectivityManager, new Object[]{0});
} catch (Exception e) {
PPApplication.recordException(e);
}
}
}
| |
/**
*/
package CIM15.IEC61970.OperationalLimits;
import CIM15.Element;
import CIM15.IEC61970.Core.CorePackage;
import CIM15.IEC61970.Core.Terminal;
import org.eclipse.emf.common.notify.NotificationChain;
import org.eclipse.emf.ecore.EClass;
import org.eclipse.emf.ecore.InternalEObject;
/**
* <!-- begin-user-doc -->
* A representation of the model object '<em><b>Branch Group Terminal</b></em>'.
* <!-- end-user-doc -->
*
* <p>
* The following features are supported:
* <ul>
* <li>{@link CIM15.IEC61970.OperationalLimits.BranchGroupTerminal#getBranchGroup <em>Branch Group</em>}</li>
* <li>{@link CIM15.IEC61970.OperationalLimits.BranchGroupTerminal#getTerminal <em>Terminal</em>}</li>
* <li>{@link CIM15.IEC61970.OperationalLimits.BranchGroupTerminal#isPositiveFlowIn <em>Positive Flow In</em>}</li>
* </ul>
* </p>
*
* @generated
*/
public class BranchGroupTerminal extends Element {
/**
* The cached value of the '{@link #getBranchGroup() <em>Branch Group</em>}' reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getBranchGroup()
* @generated
* @ordered
*/
protected BranchGroup branchGroup;
/**
* The cached value of the '{@link #getTerminal() <em>Terminal</em>}' reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getTerminal()
* @generated
* @ordered
*/
protected Terminal terminal;
/**
* The default value of the '{@link #isPositiveFlowIn() <em>Positive Flow In</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #isPositiveFlowIn()
* @generated
* @ordered
*/
protected static final boolean POSITIVE_FLOW_IN_EDEFAULT = false;
/**
* The cached value of the '{@link #isPositiveFlowIn() <em>Positive Flow In</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #isPositiveFlowIn()
* @generated
* @ordered
*/
protected boolean positiveFlowIn = POSITIVE_FLOW_IN_EDEFAULT;
/**
* This is true if the Positive Flow In attribute has been set.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
* @ordered
*/
protected boolean positiveFlowInESet;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected BranchGroupTerminal() {
super();
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
protected EClass eStaticClass() {
return OperationalLimitsPackage.Literals.BRANCH_GROUP_TERMINAL;
}
/**
* Returns the value of the '<em><b>Branch Group</b></em>' reference.
* It is bidirectional and its opposite is '{@link CIM15.IEC61970.OperationalLimits.BranchGroup#getBranchGroupTerminal <em>Branch Group Terminal</em>}'.
* <!-- begin-user-doc -->
* <p>
* If the meaning of the '<em>Branch Group</em>' reference isn't clear,
* there really should be more of a description here...
* </p>
* <!-- end-user-doc -->
* @return the value of the '<em>Branch Group</em>' reference.
* @see #setBranchGroup(BranchGroup)
* @see CIM15.IEC61970.OperationalLimits.BranchGroup#getBranchGroupTerminal
* @generated
*/
public BranchGroup getBranchGroup() {
if (branchGroup != null && branchGroup.eIsProxy()) {
InternalEObject oldBranchGroup = (InternalEObject)branchGroup;
branchGroup = (BranchGroup)eResolveProxy(oldBranchGroup);
if (branchGroup != oldBranchGroup) {
}
}
return branchGroup;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public BranchGroup basicGetBranchGroup() {
return branchGroup;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public NotificationChain basicSetBranchGroup(BranchGroup newBranchGroup, NotificationChain msgs) {
BranchGroup oldBranchGroup = branchGroup;
branchGroup = newBranchGroup;
return msgs;
}
/**
* Sets the value of the '{@link CIM15.IEC61970.OperationalLimits.BranchGroupTerminal#getBranchGroup <em>Branch Group</em>}' reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @param value the new value of the '<em>Branch Group</em>' reference.
* @see #getBranchGroup()
* @generated
*/
public void setBranchGroup(BranchGroup newBranchGroup) {
if (newBranchGroup != branchGroup) {
NotificationChain msgs = null;
if (branchGroup != null)
msgs = ((InternalEObject)branchGroup).eInverseRemove(this, OperationalLimitsPackage.BRANCH_GROUP__BRANCH_GROUP_TERMINAL, BranchGroup.class, msgs);
if (newBranchGroup != null)
msgs = ((InternalEObject)newBranchGroup).eInverseAdd(this, OperationalLimitsPackage.BRANCH_GROUP__BRANCH_GROUP_TERMINAL, BranchGroup.class, msgs);
msgs = basicSetBranchGroup(newBranchGroup, msgs);
if (msgs != null) msgs.dispatch();
}
}
/**
* Returns the value of the '<em><b>Terminal</b></em>' reference.
* It is bidirectional and its opposite is '{@link CIM15.IEC61970.Core.Terminal#getBranchGroupTerminal <em>Branch Group Terminal</em>}'.
* <!-- begin-user-doc -->
* <p>
* If the meaning of the '<em>Terminal</em>' reference isn't clear,
* there really should be more of a description here...
* </p>
* <!-- end-user-doc -->
* @return the value of the '<em>Terminal</em>' reference.
* @see #setTerminal(Terminal)
* @see CIM15.IEC61970.Core.Terminal#getBranchGroupTerminal
* @generated
*/
public Terminal getTerminal() {
if (terminal != null && terminal.eIsProxy()) {
InternalEObject oldTerminal = (InternalEObject)terminal;
terminal = (Terminal)eResolveProxy(oldTerminal);
if (terminal != oldTerminal) {
}
}
return terminal;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public Terminal basicGetTerminal() {
return terminal;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public NotificationChain basicSetTerminal(Terminal newTerminal, NotificationChain msgs) {
Terminal oldTerminal = terminal;
terminal = newTerminal;
return msgs;
}
/**
* Sets the value of the '{@link CIM15.IEC61970.OperationalLimits.BranchGroupTerminal#getTerminal <em>Terminal</em>}' reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @param value the new value of the '<em>Terminal</em>' reference.
* @see #getTerminal()
* @generated
*/
public void setTerminal(Terminal newTerminal) {
if (newTerminal != terminal) {
NotificationChain msgs = null;
if (terminal != null)
msgs = ((InternalEObject)terminal).eInverseRemove(this, CorePackage.TERMINAL__BRANCH_GROUP_TERMINAL, Terminal.class, msgs);
if (newTerminal != null)
msgs = ((InternalEObject)newTerminal).eInverseAdd(this, CorePackage.TERMINAL__BRANCH_GROUP_TERMINAL, Terminal.class, msgs);
msgs = basicSetTerminal(newTerminal, msgs);
if (msgs != null) msgs.dispatch();
}
}
/**
* Returns the value of the '<em><b>Positive Flow In</b></em>' attribute.
* <!-- begin-user-doc -->
* <p>
* If the meaning of the '<em>Positive Flow In</em>' attribute isn't clear,
* there really should be more of a description here...
* </p>
* <!-- end-user-doc -->
* @return the value of the '<em>Positive Flow In</em>' attribute.
* @see #isSetPositiveFlowIn()
* @see #unsetPositiveFlowIn()
* @see #setPositiveFlowIn(boolean)
* @generated
*/
public boolean isPositiveFlowIn() {
return positiveFlowIn;
}
/**
* Sets the value of the '{@link CIM15.IEC61970.OperationalLimits.BranchGroupTerminal#isPositiveFlowIn <em>Positive Flow In</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @param value the new value of the '<em>Positive Flow In</em>' attribute.
* @see #isSetPositiveFlowIn()
* @see #unsetPositiveFlowIn()
* @see #isPositiveFlowIn()
* @generated
*/
public void setPositiveFlowIn(boolean newPositiveFlowIn) {
positiveFlowIn = newPositiveFlowIn;
positiveFlowInESet = true;
}
/**
* Unsets the value of the '{@link CIM15.IEC61970.OperationalLimits.BranchGroupTerminal#isPositiveFlowIn <em>Positive Flow In</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #isSetPositiveFlowIn()
* @see #isPositiveFlowIn()
* @see #setPositiveFlowIn(boolean)
* @generated
*/
public void unsetPositiveFlowIn() {
positiveFlowIn = POSITIVE_FLOW_IN_EDEFAULT;
positiveFlowInESet = false;
}
/**
* Returns whether the value of the '{@link CIM15.IEC61970.OperationalLimits.BranchGroupTerminal#isPositiveFlowIn <em>Positive Flow In</em>}' attribute is set.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return whether the value of the '<em>Positive Flow In</em>' attribute is set.
* @see #unsetPositiveFlowIn()
* @see #isPositiveFlowIn()
* @see #setPositiveFlowIn(boolean)
* @generated
*/
public boolean isSetPositiveFlowIn() {
return positiveFlowInESet;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public NotificationChain eInverseAdd(InternalEObject otherEnd, int featureID, NotificationChain msgs) {
switch (featureID) {
case OperationalLimitsPackage.BRANCH_GROUP_TERMINAL__BRANCH_GROUP:
if (branchGroup != null)
msgs = ((InternalEObject)branchGroup).eInverseRemove(this, OperationalLimitsPackage.BRANCH_GROUP__BRANCH_GROUP_TERMINAL, BranchGroup.class, msgs);
return basicSetBranchGroup((BranchGroup)otherEnd, msgs);
case OperationalLimitsPackage.BRANCH_GROUP_TERMINAL__TERMINAL:
if (terminal != null)
msgs = ((InternalEObject)terminal).eInverseRemove(this, CorePackage.TERMINAL__BRANCH_GROUP_TERMINAL, Terminal.class, msgs);
return basicSetTerminal((Terminal)otherEnd, msgs);
}
return super.eInverseAdd(otherEnd, featureID, msgs);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public NotificationChain eInverseRemove(InternalEObject otherEnd, int featureID, NotificationChain msgs) {
switch (featureID) {
case OperationalLimitsPackage.BRANCH_GROUP_TERMINAL__BRANCH_GROUP:
return basicSetBranchGroup(null, msgs);
case OperationalLimitsPackage.BRANCH_GROUP_TERMINAL__TERMINAL:
return basicSetTerminal(null, msgs);
}
return super.eInverseRemove(otherEnd, featureID, msgs);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Object eGet(int featureID, boolean resolve, boolean coreType) {
switch (featureID) {
case OperationalLimitsPackage.BRANCH_GROUP_TERMINAL__BRANCH_GROUP:
if (resolve) return getBranchGroup();
return basicGetBranchGroup();
case OperationalLimitsPackage.BRANCH_GROUP_TERMINAL__TERMINAL:
if (resolve) return getTerminal();
return basicGetTerminal();
case OperationalLimitsPackage.BRANCH_GROUP_TERMINAL__POSITIVE_FLOW_IN:
return isPositiveFlowIn();
}
return super.eGet(featureID, resolve, coreType);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void eSet(int featureID, Object newValue) {
switch (featureID) {
case OperationalLimitsPackage.BRANCH_GROUP_TERMINAL__BRANCH_GROUP:
setBranchGroup((BranchGroup)newValue);
return;
case OperationalLimitsPackage.BRANCH_GROUP_TERMINAL__TERMINAL:
setTerminal((Terminal)newValue);
return;
case OperationalLimitsPackage.BRANCH_GROUP_TERMINAL__POSITIVE_FLOW_IN:
setPositiveFlowIn((Boolean)newValue);
return;
}
super.eSet(featureID, newValue);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void eUnset(int featureID) {
switch (featureID) {
case OperationalLimitsPackage.BRANCH_GROUP_TERMINAL__BRANCH_GROUP:
setBranchGroup((BranchGroup)null);
return;
case OperationalLimitsPackage.BRANCH_GROUP_TERMINAL__TERMINAL:
setTerminal((Terminal)null);
return;
case OperationalLimitsPackage.BRANCH_GROUP_TERMINAL__POSITIVE_FLOW_IN:
unsetPositiveFlowIn();
return;
}
super.eUnset(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public boolean eIsSet(int featureID) {
switch (featureID) {
case OperationalLimitsPackage.BRANCH_GROUP_TERMINAL__BRANCH_GROUP:
return branchGroup != null;
case OperationalLimitsPackage.BRANCH_GROUP_TERMINAL__TERMINAL:
return terminal != null;
case OperationalLimitsPackage.BRANCH_GROUP_TERMINAL__POSITIVE_FLOW_IN:
return isSetPositiveFlowIn();
}
return super.eIsSet(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public String toString() {
if (eIsProxy()) return super.toString();
StringBuffer result = new StringBuffer(super.toString());
result.append(" (positiveFlowIn: ");
if (positiveFlowInESet) result.append(positiveFlowIn); else result.append("<unset>");
result.append(')');
return result.toString();
}
} // BranchGroupTerminal
| |
// Protocol Buffers - Google's data interchange format
// Copyright 2008 Google Inc. All rights reserved.
// http://code.google.com/p/protobuf/
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
package com.github.os72.protobuf241;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.List;
/**
* Reads and decodes protocol message fields.
*
* This class contains two kinds of methods: methods that read specific
* protocol message constructs and field types (e.g. {@link #readTag()} and
* {@link #readInt32()}) and methods that read low-level values (e.g.
* {@link #readRawVarint32()} and {@link #readRawBytes}). If you are reading
* encoded protocol messages, you should use the former methods, but if you are
* reading some other format of your own design, use the latter.
*
* @author kenton@google.com Kenton Varda
*/
public final class CodedInputStream {
/**
* Create a new CodedInputStream wrapping the given InputStream.
*/
public static CodedInputStream newInstance(final InputStream input) {
return new CodedInputStream(input);
}
/**
* Create a new CodedInputStream wrapping the given byte array.
*/
public static CodedInputStream newInstance(final byte[] buf) {
return newInstance(buf, 0, buf.length);
}
/**
* Create a new CodedInputStream wrapping the given byte array slice.
*/
public static CodedInputStream newInstance(final byte[] buf, final int off,
final int len) {
CodedInputStream result = new CodedInputStream(buf, off, len);
try {
// Some uses of CodedInputStream can be more efficient if they know
// exactly how many bytes are available. By pushing the end point of the
// buffer as a limit, we allow them to get this information via
// getBytesUntilLimit(). Pushing a limit that we know is at the end of
// the stream can never hurt, since we can never past that point anyway.
result.pushLimit(len);
} catch (InvalidProtocolBufferException ex) {
// The only reason pushLimit() might throw an exception here is if len
// is negative. Normally pushLimit()'s parameter comes directly off the
// wire, so it's important to catch exceptions in case of corrupt or
// malicious data. However, in this case, we expect that len is not a
// user-supplied value, so we can assume that it being negative indicates
// a programming error. Therefore, throwing an unchecked exception is
// appropriate.
throw new IllegalArgumentException(ex);
}
return result;
}
// -----------------------------------------------------------------
/**
* Attempt to read a field tag, returning zero if we have reached EOF.
* Protocol message parsers use this to read tags, since a protocol message
* may legally end wherever a tag occurs, and zero is not a valid tag number.
*/
public int readTag() throws IOException {
if (isAtEnd()) {
lastTag = 0;
return 0;
}
lastTag = readRawVarint32();
if (WireFormat.getTagFieldNumber(lastTag) == 0) {
// If we actually read zero (or any tag number corresponding to field
// number zero), that's not a valid tag.
throw InvalidProtocolBufferException.invalidTag();
}
return lastTag;
}
/**
* Verifies that the last call to readTag() returned the given tag value.
* This is used to verify that a nested group ended with the correct
* end tag.
*
* @throws InvalidProtocolBufferException {@code value} does not match the
* last tag.
*/
public void checkLastTagWas(final int value)
throws InvalidProtocolBufferException {
if (lastTag != value) {
throw InvalidProtocolBufferException.invalidEndTag();
}
}
/**
* Reads and discards a single field, given its tag value.
*
* @return {@code false} if the tag is an endgroup tag, in which case
* nothing is skipped. Otherwise, returns {@code true}.
*/
public boolean skipField(final int tag) throws IOException {
switch (WireFormat.getTagWireType(tag)) {
case WireFormat.WIRETYPE_VARINT:
readInt32();
return true;
case WireFormat.WIRETYPE_FIXED64:
readRawLittleEndian64();
return true;
case WireFormat.WIRETYPE_LENGTH_DELIMITED:
skipRawBytes(readRawVarint32());
return true;
case WireFormat.WIRETYPE_START_GROUP:
skipMessage();
checkLastTagWas(
WireFormat.makeTag(WireFormat.getTagFieldNumber(tag),
WireFormat.WIRETYPE_END_GROUP));
return true;
case WireFormat.WIRETYPE_END_GROUP:
return false;
case WireFormat.WIRETYPE_FIXED32:
readRawLittleEndian32();
return true;
default:
throw InvalidProtocolBufferException.invalidWireType();
}
}
/**
* Reads and discards an entire message. This will read either until EOF
* or until an endgroup tag, whichever comes first.
*/
public void skipMessage() throws IOException {
while (true) {
final int tag = readTag();
if (tag == 0 || !skipField(tag)) {
return;
}
}
}
// -----------------------------------------------------------------
/** Read a {@code double} field value from the stream. */
public double readDouble() throws IOException {
return Double.longBitsToDouble(readRawLittleEndian64());
}
/** Read a {@code float} field value from the stream. */
public float readFloat() throws IOException {
return Float.intBitsToFloat(readRawLittleEndian32());
}
/** Read a {@code uint64} field value from the stream. */
public long readUInt64() throws IOException {
return readRawVarint64();
}
/** Read an {@code int64} field value from the stream. */
public long readInt64() throws IOException {
return readRawVarint64();
}
/** Read an {@code int32} field value from the stream. */
public int readInt32() throws IOException {
return readRawVarint32();
}
/** Read a {@code fixed64} field value from the stream. */
public long readFixed64() throws IOException {
return readRawLittleEndian64();
}
/** Read a {@code fixed32} field value from the stream. */
public int readFixed32() throws IOException {
return readRawLittleEndian32();
}
/** Read a {@code bool} field value from the stream. */
public boolean readBool() throws IOException {
return readRawVarint32() != 0;
}
/** Read a {@code string} field value from the stream. */
public String readString() throws IOException {
final int size = readRawVarint32();
if (size <= (bufferSize - bufferPos) && size > 0) {
// Fast path: We already have the bytes in a contiguous buffer, so
// just copy directly from it.
final String result = new String(buffer, bufferPos, size, "UTF-8");
bufferPos += size;
return result;
} else {
// Slow path: Build a byte array first then copy it.
return new String(readRawBytes(size), "UTF-8");
}
}
/** Read a {@code group} field value from the stream. */
public void readGroup(final int fieldNumber,
final MessageLite.Builder builder,
final ExtensionRegistryLite extensionRegistry)
throws IOException {
if (recursionDepth >= recursionLimit) {
throw InvalidProtocolBufferException.recursionLimitExceeded();
}
++recursionDepth;
builder.mergeFrom(this, extensionRegistry);
checkLastTagWas(
WireFormat.makeTag(fieldNumber, WireFormat.WIRETYPE_END_GROUP));
--recursionDepth;
}
/**
* Reads a {@code group} field value from the stream and merges it into the
* given {@link UnknownFieldSet}.
*
* @deprecated UnknownFieldSet.Builder now implements MessageLite.Builder, so
* you can just call {@link #readGroup}.
*/
@Deprecated
public void readUnknownGroup(final int fieldNumber,
final MessageLite.Builder builder)
throws IOException {
// We know that UnknownFieldSet will ignore any ExtensionRegistry so it
// is safe to pass null here. (We can't call
// ExtensionRegistry.getEmptyRegistry() because that would make this
// class depend on ExtensionRegistry, which is not part of the lite
// library.)
readGroup(fieldNumber, builder, null);
}
/** Read an embedded message field value from the stream. */
public void readMessage(final MessageLite.Builder builder,
final ExtensionRegistryLite extensionRegistry)
throws IOException {
final int length = readRawVarint32();
if (recursionDepth >= recursionLimit) {
throw InvalidProtocolBufferException.recursionLimitExceeded();
}
final int oldLimit = pushLimit(length);
++recursionDepth;
builder.mergeFrom(this, extensionRegistry);
checkLastTagWas(0);
--recursionDepth;
popLimit(oldLimit);
}
/** Read a {@code bytes} field value from the stream. */
public ByteString readBytes() throws IOException {
final int size = readRawVarint32();
if (size == 0) {
return ByteString.EMPTY;
} else if (size <= (bufferSize - bufferPos) && size > 0) {
// Fast path: We already have the bytes in a contiguous buffer, so
// just copy directly from it.
final ByteString result = ByteString.copyFrom(buffer, bufferPos, size);
bufferPos += size;
return result;
} else {
// Slow path: Build a byte array first then copy it.
return ByteString.copyFrom(readRawBytes(size));
}
}
/** Read a {@code uint32} field value from the stream. */
public int readUInt32() throws IOException {
return readRawVarint32();
}
/**
* Read an enum field value from the stream. Caller is responsible
* for converting the numeric value to an actual enum.
*/
public int readEnum() throws IOException {
return readRawVarint32();
}
/** Read an {@code sfixed32} field value from the stream. */
public int readSFixed32() throws IOException {
return readRawLittleEndian32();
}
/** Read an {@code sfixed64} field value from the stream. */
public long readSFixed64() throws IOException {
return readRawLittleEndian64();
}
/** Read an {@code sint32} field value from the stream. */
public int readSInt32() throws IOException {
return decodeZigZag32(readRawVarint32());
}
/** Read an {@code sint64} field value from the stream. */
public long readSInt64() throws IOException {
return decodeZigZag64(readRawVarint64());
}
// =================================================================
/**
* Read a raw Varint from the stream. If larger than 32 bits, discard the
* upper bits.
*/
public int readRawVarint32() throws IOException {
byte tmp = readRawByte();
if (tmp >= 0) {
return tmp;
}
int result = tmp & 0x7f;
if ((tmp = readRawByte()) >= 0) {
result |= tmp << 7;
} else {
result |= (tmp & 0x7f) << 7;
if ((tmp = readRawByte()) >= 0) {
result |= tmp << 14;
} else {
result |= (tmp & 0x7f) << 14;
if ((tmp = readRawByte()) >= 0) {
result |= tmp << 21;
} else {
result |= (tmp & 0x7f) << 21;
result |= (tmp = readRawByte()) << 28;
if (tmp < 0) {
// Discard upper 32 bits.
for (int i = 0; i < 5; i++) {
if (readRawByte() >= 0) {
return result;
}
}
throw InvalidProtocolBufferException.malformedVarint();
}
}
}
}
return result;
}
/**
* Reads a varint from the input one byte at a time, so that it does not
* read any bytes after the end of the varint. If you simply wrapped the
* stream in a CodedInputStream and used {@link #readRawVarint32(InputStream)}
* then you would probably end up reading past the end of the varint since
* CodedInputStream buffers its input.
*/
static int readRawVarint32(final InputStream input) throws IOException {
final int firstByte = input.read();
if (firstByte == -1) {
throw InvalidProtocolBufferException.truncatedMessage();
}
return readRawVarint32(firstByte, input);
}
/**
* Like {@link #readRawVarint32(InputStream)}, but expects that the caller
* has already read one byte. This allows the caller to determine if EOF
* has been reached before attempting to read.
*/
public static int readRawVarint32(
final int firstByte, final InputStream input) throws IOException {
if ((firstByte & 0x80) == 0) {
return firstByte;
}
int result = firstByte & 0x7f;
int offset = 7;
for (; offset < 32; offset += 7) {
final int b = input.read();
if (b == -1) {
throw InvalidProtocolBufferException.truncatedMessage();
}
result |= (b & 0x7f) << offset;
if ((b & 0x80) == 0) {
return result;
}
}
// Keep reading up to 64 bits.
for (; offset < 64; offset += 7) {
final int b = input.read();
if (b == -1) {
throw InvalidProtocolBufferException.truncatedMessage();
}
if ((b & 0x80) == 0) {
return result;
}
}
throw InvalidProtocolBufferException.malformedVarint();
}
/** Read a raw Varint from the stream. */
public long readRawVarint64() throws IOException {
int shift = 0;
long result = 0;
while (shift < 64) {
final byte b = readRawByte();
result |= (long)(b & 0x7F) << shift;
if ((b & 0x80) == 0) {
return result;
}
shift += 7;
}
throw InvalidProtocolBufferException.malformedVarint();
}
/** Read a 32-bit little-endian integer from the stream. */
public int readRawLittleEndian32() throws IOException {
final byte b1 = readRawByte();
final byte b2 = readRawByte();
final byte b3 = readRawByte();
final byte b4 = readRawByte();
return (((int)b1 & 0xff) ) |
(((int)b2 & 0xff) << 8) |
(((int)b3 & 0xff) << 16) |
(((int)b4 & 0xff) << 24);
}
/** Read a 64-bit little-endian integer from the stream. */
public long readRawLittleEndian64() throws IOException {
final byte b1 = readRawByte();
final byte b2 = readRawByte();
final byte b3 = readRawByte();
final byte b4 = readRawByte();
final byte b5 = readRawByte();
final byte b6 = readRawByte();
final byte b7 = readRawByte();
final byte b8 = readRawByte();
return (((long)b1 & 0xff) ) |
(((long)b2 & 0xff) << 8) |
(((long)b3 & 0xff) << 16) |
(((long)b4 & 0xff) << 24) |
(((long)b5 & 0xff) << 32) |
(((long)b6 & 0xff) << 40) |
(((long)b7 & 0xff) << 48) |
(((long)b8 & 0xff) << 56);
}
/**
* Decode a ZigZag-encoded 32-bit value. ZigZag encodes signed integers
* into values that can be efficiently encoded with varint. (Otherwise,
* negative values must be sign-extended to 64 bits to be varint encoded,
* thus always taking 10 bytes on the wire.)
*
* @param n An unsigned 32-bit integer, stored in a signed int because
* Java has no explicit unsigned support.
* @return A signed 32-bit integer.
*/
public static int decodeZigZag32(final int n) {
return (n >>> 1) ^ -(n & 1);
}
/**
* Decode a ZigZag-encoded 64-bit value. ZigZag encodes signed integers
* into values that can be efficiently encoded with varint. (Otherwise,
* negative values must be sign-extended to 64 bits to be varint encoded,
* thus always taking 10 bytes on the wire.)
*
* @param n An unsigned 64-bit integer, stored in a signed int because
* Java has no explicit unsigned support.
* @return A signed 64-bit integer.
*/
public static long decodeZigZag64(final long n) {
return (n >>> 1) ^ -(n & 1);
}
// -----------------------------------------------------------------
private final byte[] buffer;
private int bufferSize;
private int bufferSizeAfterLimit;
private int bufferPos;
private final InputStream input;
private int lastTag;
/**
* The total number of bytes read before the current buffer. The total
* bytes read up to the current position can be computed as
* {@code totalBytesRetired + bufferPos}. This value may be negative if
* reading started in the middle of the current buffer (e.g. if the
* constructor that takes a byte array and an offset was used).
*/
private int totalBytesRetired;
/** The absolute position of the end of the current message. */
private int currentLimit = Integer.MAX_VALUE;
/** See setRecursionLimit() */
private int recursionDepth;
private int recursionLimit = DEFAULT_RECURSION_LIMIT;
/** See setSizeLimit() */
private int sizeLimit = DEFAULT_SIZE_LIMIT;
private static final int DEFAULT_RECURSION_LIMIT = 64;
private static final int DEFAULT_SIZE_LIMIT = 64 << 20; // 64MB
private static final int BUFFER_SIZE = 4096;
private CodedInputStream(final byte[] buffer, final int off, final int len) {
this.buffer = buffer;
bufferSize = off + len;
bufferPos = off;
totalBytesRetired = -off;
input = null;
}
private CodedInputStream(final InputStream input) {
buffer = new byte[BUFFER_SIZE];
bufferSize = 0;
bufferPos = 0;
totalBytesRetired = 0;
this.input = input;
}
/**
* Set the maximum message recursion depth. In order to prevent malicious
* messages from causing stack overflows, {@code CodedInputStream} limits
* how deeply messages may be nested. The default limit is 64.
*
* @return the old limit.
*/
public int setRecursionLimit(final int limit) {
if (limit < 0) {
throw new IllegalArgumentException(
"Recursion limit cannot be negative: " + limit);
}
final int oldLimit = recursionLimit;
recursionLimit = limit;
return oldLimit;
}
/**
* Set the maximum message size. In order to prevent malicious
* messages from exhausting memory or causing integer overflows,
* {@code CodedInputStream} limits how large a message may be.
* The default limit is 64MB. You should set this limit as small
* as you can without harming your app's functionality. Note that
* size limits only apply when reading from an {@code InputStream}, not
* when constructed around a raw byte array (nor with
* {@link ByteString#newCodedInput}).
* <p>
* If you want to read several messages from a single CodedInputStream, you
* could call {@link #resetSizeCounter()} after each one to avoid hitting the
* size limit.
*
* @return the old limit.
*/
public int setSizeLimit(final int limit) {
if (limit < 0) {
throw new IllegalArgumentException(
"Size limit cannot be negative: " + limit);
}
final int oldLimit = sizeLimit;
sizeLimit = limit;
return oldLimit;
}
/**
* Resets the current size counter to zero (see {@link #setSizeLimit(int)}).
*/
public void resetSizeCounter() {
totalBytesRetired = -bufferPos;
}
/**
* Sets {@code currentLimit} to (current position) + {@code byteLimit}. This
* is called when descending into a length-delimited embedded message.
*
* <p>Note that {@code pushLimit()} does NOT affect how many bytes the
* {@code CodedInputStream} reads from an underlying {@code InputStream} when
* refreshing its buffer. If you need to prevent reading past a certain
* point in the underlying {@code InputStream} (e.g. because you expect it to
* contain more data after the end of the message which you need to handle
* differently) then you must place a wrapper around you {@code InputStream}
* which limits the amount of data that can be read from it.
*
* @return the old limit.
*/
public int pushLimit(int byteLimit) throws InvalidProtocolBufferException {
if (byteLimit < 0) {
throw InvalidProtocolBufferException.negativeSize();
}
byteLimit += totalBytesRetired + bufferPos;
final int oldLimit = currentLimit;
if (byteLimit > oldLimit) {
throw InvalidProtocolBufferException.truncatedMessage();
}
currentLimit = byteLimit;
recomputeBufferSizeAfterLimit();
return oldLimit;
}
private void recomputeBufferSizeAfterLimit() {
bufferSize += bufferSizeAfterLimit;
final int bufferEnd = totalBytesRetired + bufferSize;
if (bufferEnd > currentLimit) {
// Limit is in current buffer.
bufferSizeAfterLimit = bufferEnd - currentLimit;
bufferSize -= bufferSizeAfterLimit;
} else {
bufferSizeAfterLimit = 0;
}
}
/**
* Discards the current limit, returning to the previous limit.
*
* @param oldLimit The old limit, as returned by {@code pushLimit}.
*/
public void popLimit(final int oldLimit) {
currentLimit = oldLimit;
recomputeBufferSizeAfterLimit();
}
/**
* Returns the number of bytes to be read before the current limit.
* If no limit is set, returns -1.
*/
public int getBytesUntilLimit() {
if (currentLimit == Integer.MAX_VALUE) {
return -1;
}
final int currentAbsolutePosition = totalBytesRetired + bufferPos;
return currentLimit - currentAbsolutePosition;
}
/**
* Returns true if the stream has reached the end of the input. This is the
* case if either the end of the underlying input source has been reached or
* if the stream has reached a limit created using {@link #pushLimit(int)}.
*/
public boolean isAtEnd() throws IOException {
return bufferPos == bufferSize && !refillBuffer(false);
}
/**
* The total bytes read up to the current position. Calling
* {@link #resetSizeCounter()} resets this value to zero.
*/
public int getTotalBytesRead() {
return totalBytesRetired + bufferPos;
}
/**
* Called with {@code this.buffer} is empty to read more bytes from the
* input. If {@code mustSucceed} is true, refillBuffer() gurantees that
* either there will be at least one byte in the buffer when it returns
* or it will throw an exception. If {@code mustSucceed} is false,
* refillBuffer() returns false if no more bytes were available.
*/
private boolean refillBuffer(final boolean mustSucceed) throws IOException {
if (bufferPos < bufferSize) {
throw new IllegalStateException(
"refillBuffer() called when buffer wasn't empty.");
}
if (totalBytesRetired + bufferSize == currentLimit) {
// Oops, we hit a limit.
if (mustSucceed) {
throw InvalidProtocolBufferException.truncatedMessage();
} else {
return false;
}
}
totalBytesRetired += bufferSize;
bufferPos = 0;
bufferSize = (input == null) ? -1 : input.read(buffer);
if (bufferSize == 0 || bufferSize < -1) {
throw new IllegalStateException(
"InputStream#read(byte[]) returned invalid result: " + bufferSize +
"\nThe InputStream implementation is buggy.");
}
if (bufferSize == -1) {
bufferSize = 0;
if (mustSucceed) {
throw InvalidProtocolBufferException.truncatedMessage();
} else {
return false;
}
} else {
recomputeBufferSizeAfterLimit();
final int totalBytesRead =
totalBytesRetired + bufferSize + bufferSizeAfterLimit;
if (totalBytesRead > sizeLimit || totalBytesRead < 0) {
throw InvalidProtocolBufferException.sizeLimitExceeded();
}
return true;
}
}
/**
* Read one byte from the input.
*
* @throws InvalidProtocolBufferException The end of the stream or the current
* limit was reached.
*/
public byte readRawByte() throws IOException {
if (bufferPos == bufferSize) {
refillBuffer(true);
}
return buffer[bufferPos++];
}
/**
* Read a fixed size of bytes from the input.
*
* @throws InvalidProtocolBufferException The end of the stream or the current
* limit was reached.
*/
public byte[] readRawBytes(final int size) throws IOException {
if (size < 0) {
throw InvalidProtocolBufferException.negativeSize();
}
if (totalBytesRetired + bufferPos + size > currentLimit) {
// Read to the end of the stream anyway.
skipRawBytes(currentLimit - totalBytesRetired - bufferPos);
// Then fail.
throw InvalidProtocolBufferException.truncatedMessage();
}
if (size <= bufferSize - bufferPos) {
// We have all the bytes we need already.
final byte[] bytes = new byte[size];
System.arraycopy(buffer, bufferPos, bytes, 0, size);
bufferPos += size;
return bytes;
} else if (size < BUFFER_SIZE) {
// Reading more bytes than are in the buffer, but not an excessive number
// of bytes. We can safely allocate the resulting array ahead of time.
// First copy what we have.
final byte[] bytes = new byte[size];
int pos = bufferSize - bufferPos;
System.arraycopy(buffer, bufferPos, bytes, 0, pos);
bufferPos = bufferSize;
// We want to use refillBuffer() and then copy from the buffer into our
// byte array rather than reading directly into our byte array because
// the input may be unbuffered.
refillBuffer(true);
while (size - pos > bufferSize) {
System.arraycopy(buffer, 0, bytes, pos, bufferSize);
pos += bufferSize;
bufferPos = bufferSize;
refillBuffer(true);
}
System.arraycopy(buffer, 0, bytes, pos, size - pos);
bufferPos = size - pos;
return bytes;
} else {
// The size is very large. For security reasons, we can't allocate the
// entire byte array yet. The size comes directly from the input, so a
// maliciously-crafted message could provide a bogus very large size in
// order to trick the app into allocating a lot of memory. We avoid this
// by allocating and reading only a small chunk at a time, so that the
// malicious message must actually *be* extremely large to cause
// problems. Meanwhile, we limit the allowed size of a message elsewhere.
// Remember the buffer markers since we'll have to copy the bytes out of
// it later.
final int originalBufferPos = bufferPos;
final int originalBufferSize = bufferSize;
// Mark the current buffer consumed.
totalBytesRetired += bufferSize;
bufferPos = 0;
bufferSize = 0;
// Read all the rest of the bytes we need.
int sizeLeft = size - (originalBufferSize - originalBufferPos);
final List<byte[]> chunks = new ArrayList<byte[]>();
while (sizeLeft > 0) {
final byte[] chunk = new byte[Math.min(sizeLeft, BUFFER_SIZE)];
int pos = 0;
while (pos < chunk.length) {
final int n = (input == null) ? -1 :
input.read(chunk, pos, chunk.length - pos);
if (n == -1) {
throw InvalidProtocolBufferException.truncatedMessage();
}
totalBytesRetired += n;
pos += n;
}
sizeLeft -= chunk.length;
chunks.add(chunk);
}
// OK, got everything. Now concatenate it all into one buffer.
final byte[] bytes = new byte[size];
// Start by copying the leftover bytes from this.buffer.
int pos = originalBufferSize - originalBufferPos;
System.arraycopy(buffer, originalBufferPos, bytes, 0, pos);
// And now all the chunks.
for (final byte[] chunk : chunks) {
System.arraycopy(chunk, 0, bytes, pos, chunk.length);
pos += chunk.length;
}
// Done.
return bytes;
}
}
/**
* Reads and discards {@code size} bytes.
*
* @throws InvalidProtocolBufferException The end of the stream or the current
* limit was reached.
*/
public void skipRawBytes(final int size) throws IOException {
if (size < 0) {
throw InvalidProtocolBufferException.negativeSize();
}
if (totalBytesRetired + bufferPos + size > currentLimit) {
// Read to the end of the stream anyway.
skipRawBytes(currentLimit - totalBytesRetired - bufferPos);
// Then fail.
throw InvalidProtocolBufferException.truncatedMessage();
}
if (size <= bufferSize - bufferPos) {
// We have all the bytes we need already.
bufferPos += size;
} else {
// Skipping more bytes than are in the buffer. First skip what we have.
int pos = bufferSize - bufferPos;
bufferPos = bufferSize;
// Keep refilling the buffer until we get to the point we wanted to skip
// to. This has the side effect of ensuring the limits are updated
// correctly.
refillBuffer(true);
while (size - pos > bufferSize) {
pos += bufferSize;
bufferPos = bufferSize;
refillBuffer(true);
}
bufferPos = size - pos;
}
}
}
| |
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.connectwisdom.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.AmazonWebServiceRequest;
/**
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/wisdom-2020-10-19/UpdateContent" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class UpdateContentRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable {
/**
* <p>
* The identifier of the content. Can be either the ID or the ARN. URLs cannot contain the ARN.
* </p>
*/
private String contentId;
/**
* <p>
* The the identifier of the knowledge base. Can be either the ID or the ARN
* </p>
*/
private String knowledgeBaseId;
/**
* <p>
* A key/value map to store attributes without affecting tagging or recommendations. For example, when synchronizing
* data between an external system and Wisdom, you can store an external version identifier as metadata to utilize
* for determining drift.
* </p>
*/
private java.util.Map<String, String> metadata;
/**
* <p>
* The URI for the article. If the knowledge base has a templateUri, setting this argument overrides it for this
* piece of content. To remove an existing <code>overrideLinkOurUri</code>, exclude this argument and set
* <code>removeOverrideLinkOutUri</code> to true.
* </p>
*/
private String overrideLinkOutUri;
/**
* <p>
* Unset the existing <code>overrideLinkOutUri</code> if it exists.
* </p>
*/
private Boolean removeOverrideLinkOutUri;
/**
* <p>
* The <code>revisionId</code> of the content resource to update, taken from an earlier call to
* <code>GetContent</code>, <code>GetContentSummary</code>, <code>SearchContent</code>, or <code>ListContents</code>
* . If included, this argument acts as an optimistic lock to ensure content was not modified since it was last
* read. If it has been modified, this API throws a <code>PreconditionFailedException</code>.
* </p>
*/
private String revisionId;
/**
* <p>
* The title of the content.
* </p>
*/
private String title;
/**
* <p>
* A pointer to the uploaded asset. This value is returned by <a
* href="https://docs.aws.amazon.com/wisdom/latest/APIReference/API_StartContentUpload.html">StartContentUpload</a>.
* </p>
*/
private String uploadId;
/**
* <p>
* The identifier of the content. Can be either the ID or the ARN. URLs cannot contain the ARN.
* </p>
*
* @param contentId
* The identifier of the content. Can be either the ID or the ARN. URLs cannot contain the ARN.
*/
public void setContentId(String contentId) {
this.contentId = contentId;
}
/**
* <p>
* The identifier of the content. Can be either the ID or the ARN. URLs cannot contain the ARN.
* </p>
*
* @return The identifier of the content. Can be either the ID or the ARN. URLs cannot contain the ARN.
*/
public String getContentId() {
return this.contentId;
}
/**
* <p>
* The identifier of the content. Can be either the ID or the ARN. URLs cannot contain the ARN.
* </p>
*
* @param contentId
* The identifier of the content. Can be either the ID or the ARN. URLs cannot contain the ARN.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public UpdateContentRequest withContentId(String contentId) {
setContentId(contentId);
return this;
}
/**
* <p>
* The the identifier of the knowledge base. Can be either the ID or the ARN
* </p>
*
* @param knowledgeBaseId
* The the identifier of the knowledge base. Can be either the ID or the ARN
*/
public void setKnowledgeBaseId(String knowledgeBaseId) {
this.knowledgeBaseId = knowledgeBaseId;
}
/**
* <p>
* The the identifier of the knowledge base. Can be either the ID or the ARN
* </p>
*
* @return The the identifier of the knowledge base. Can be either the ID or the ARN
*/
public String getKnowledgeBaseId() {
return this.knowledgeBaseId;
}
/**
* <p>
* The the identifier of the knowledge base. Can be either the ID or the ARN
* </p>
*
* @param knowledgeBaseId
* The the identifier of the knowledge base. Can be either the ID or the ARN
* @return Returns a reference to this object so that method calls can be chained together.
*/
public UpdateContentRequest withKnowledgeBaseId(String knowledgeBaseId) {
setKnowledgeBaseId(knowledgeBaseId);
return this;
}
/**
* <p>
* A key/value map to store attributes without affecting tagging or recommendations. For example, when synchronizing
* data between an external system and Wisdom, you can store an external version identifier as metadata to utilize
* for determining drift.
* </p>
*
* @return A key/value map to store attributes without affecting tagging or recommendations. For example, when
* synchronizing data between an external system and Wisdom, you can store an external version identifier as
* metadata to utilize for determining drift.
*/
public java.util.Map<String, String> getMetadata() {
return metadata;
}
/**
* <p>
* A key/value map to store attributes without affecting tagging or recommendations. For example, when synchronizing
* data between an external system and Wisdom, you can store an external version identifier as metadata to utilize
* for determining drift.
* </p>
*
* @param metadata
* A key/value map to store attributes without affecting tagging or recommendations. For example, when
* synchronizing data between an external system and Wisdom, you can store an external version identifier as
* metadata to utilize for determining drift.
*/
public void setMetadata(java.util.Map<String, String> metadata) {
this.metadata = metadata;
}
/**
* <p>
* A key/value map to store attributes without affecting tagging or recommendations. For example, when synchronizing
* data between an external system and Wisdom, you can store an external version identifier as metadata to utilize
* for determining drift.
* </p>
*
* @param metadata
* A key/value map to store attributes without affecting tagging or recommendations. For example, when
* synchronizing data between an external system and Wisdom, you can store an external version identifier as
* metadata to utilize for determining drift.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public UpdateContentRequest withMetadata(java.util.Map<String, String> metadata) {
setMetadata(metadata);
return this;
}
/**
* Add a single Metadata entry
*
* @see UpdateContentRequest#withMetadata
* @returns a reference to this object so that method calls can be chained together.
*/
public UpdateContentRequest addMetadataEntry(String key, String value) {
if (null == this.metadata) {
this.metadata = new java.util.HashMap<String, String>();
}
if (this.metadata.containsKey(key))
throw new IllegalArgumentException("Duplicated keys (" + key.toString() + ") are provided.");
this.metadata.put(key, value);
return this;
}
/**
* Removes all the entries added into Metadata.
*
* @return Returns a reference to this object so that method calls can be chained together.
*/
public UpdateContentRequest clearMetadataEntries() {
this.metadata = null;
return this;
}
/**
* <p>
* The URI for the article. If the knowledge base has a templateUri, setting this argument overrides it for this
* piece of content. To remove an existing <code>overrideLinkOurUri</code>, exclude this argument and set
* <code>removeOverrideLinkOutUri</code> to true.
* </p>
*
* @param overrideLinkOutUri
* The URI for the article. If the knowledge base has a templateUri, setting this argument overrides it for
* this piece of content. To remove an existing <code>overrideLinkOurUri</code>, exclude this argument and
* set <code>removeOverrideLinkOutUri</code> to true.
*/
public void setOverrideLinkOutUri(String overrideLinkOutUri) {
this.overrideLinkOutUri = overrideLinkOutUri;
}
/**
* <p>
* The URI for the article. If the knowledge base has a templateUri, setting this argument overrides it for this
* piece of content. To remove an existing <code>overrideLinkOurUri</code>, exclude this argument and set
* <code>removeOverrideLinkOutUri</code> to true.
* </p>
*
* @return The URI for the article. If the knowledge base has a templateUri, setting this argument overrides it for
* this piece of content. To remove an existing <code>overrideLinkOurUri</code>, exclude this argument and
* set <code>removeOverrideLinkOutUri</code> to true.
*/
public String getOverrideLinkOutUri() {
return this.overrideLinkOutUri;
}
/**
* <p>
* The URI for the article. If the knowledge base has a templateUri, setting this argument overrides it for this
* piece of content. To remove an existing <code>overrideLinkOurUri</code>, exclude this argument and set
* <code>removeOverrideLinkOutUri</code> to true.
* </p>
*
* @param overrideLinkOutUri
* The URI for the article. If the knowledge base has a templateUri, setting this argument overrides it for
* this piece of content. To remove an existing <code>overrideLinkOurUri</code>, exclude this argument and
* set <code>removeOverrideLinkOutUri</code> to true.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public UpdateContentRequest withOverrideLinkOutUri(String overrideLinkOutUri) {
setOverrideLinkOutUri(overrideLinkOutUri);
return this;
}
/**
* <p>
* Unset the existing <code>overrideLinkOutUri</code> if it exists.
* </p>
*
* @param removeOverrideLinkOutUri
* Unset the existing <code>overrideLinkOutUri</code> if it exists.
*/
public void setRemoveOverrideLinkOutUri(Boolean removeOverrideLinkOutUri) {
this.removeOverrideLinkOutUri = removeOverrideLinkOutUri;
}
/**
* <p>
* Unset the existing <code>overrideLinkOutUri</code> if it exists.
* </p>
*
* @return Unset the existing <code>overrideLinkOutUri</code> if it exists.
*/
public Boolean getRemoveOverrideLinkOutUri() {
return this.removeOverrideLinkOutUri;
}
/**
* <p>
* Unset the existing <code>overrideLinkOutUri</code> if it exists.
* </p>
*
* @param removeOverrideLinkOutUri
* Unset the existing <code>overrideLinkOutUri</code> if it exists.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public UpdateContentRequest withRemoveOverrideLinkOutUri(Boolean removeOverrideLinkOutUri) {
setRemoveOverrideLinkOutUri(removeOverrideLinkOutUri);
return this;
}
/**
* <p>
* Unset the existing <code>overrideLinkOutUri</code> if it exists.
* </p>
*
* @return Unset the existing <code>overrideLinkOutUri</code> if it exists.
*/
public Boolean isRemoveOverrideLinkOutUri() {
return this.removeOverrideLinkOutUri;
}
/**
* <p>
* The <code>revisionId</code> of the content resource to update, taken from an earlier call to
* <code>GetContent</code>, <code>GetContentSummary</code>, <code>SearchContent</code>, or <code>ListContents</code>
* . If included, this argument acts as an optimistic lock to ensure content was not modified since it was last
* read. If it has been modified, this API throws a <code>PreconditionFailedException</code>.
* </p>
*
* @param revisionId
* The <code>revisionId</code> of the content resource to update, taken from an earlier call to
* <code>GetContent</code>, <code>GetContentSummary</code>, <code>SearchContent</code>, or
* <code>ListContents</code>. If included, this argument acts as an optimistic lock to ensure content was not
* modified since it was last read. If it has been modified, this API throws a
* <code>PreconditionFailedException</code>.
*/
public void setRevisionId(String revisionId) {
this.revisionId = revisionId;
}
/**
* <p>
* The <code>revisionId</code> of the content resource to update, taken from an earlier call to
* <code>GetContent</code>, <code>GetContentSummary</code>, <code>SearchContent</code>, or <code>ListContents</code>
* . If included, this argument acts as an optimistic lock to ensure content was not modified since it was last
* read. If it has been modified, this API throws a <code>PreconditionFailedException</code>.
* </p>
*
* @return The <code>revisionId</code> of the content resource to update, taken from an earlier call to
* <code>GetContent</code>, <code>GetContentSummary</code>, <code>SearchContent</code>, or
* <code>ListContents</code>. If included, this argument acts as an optimistic lock to ensure content was
* not modified since it was last read. If it has been modified, this API throws a
* <code>PreconditionFailedException</code>.
*/
public String getRevisionId() {
return this.revisionId;
}
/**
* <p>
* The <code>revisionId</code> of the content resource to update, taken from an earlier call to
* <code>GetContent</code>, <code>GetContentSummary</code>, <code>SearchContent</code>, or <code>ListContents</code>
* . If included, this argument acts as an optimistic lock to ensure content was not modified since it was last
* read. If it has been modified, this API throws a <code>PreconditionFailedException</code>.
* </p>
*
* @param revisionId
* The <code>revisionId</code> of the content resource to update, taken from an earlier call to
* <code>GetContent</code>, <code>GetContentSummary</code>, <code>SearchContent</code>, or
* <code>ListContents</code>. If included, this argument acts as an optimistic lock to ensure content was not
* modified since it was last read. If it has been modified, this API throws a
* <code>PreconditionFailedException</code>.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public UpdateContentRequest withRevisionId(String revisionId) {
setRevisionId(revisionId);
return this;
}
/**
* <p>
* The title of the content.
* </p>
*
* @param title
* The title of the content.
*/
public void setTitle(String title) {
this.title = title;
}
/**
* <p>
* The title of the content.
* </p>
*
* @return The title of the content.
*/
public String getTitle() {
return this.title;
}
/**
* <p>
* The title of the content.
* </p>
*
* @param title
* The title of the content.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public UpdateContentRequest withTitle(String title) {
setTitle(title);
return this;
}
/**
* <p>
* A pointer to the uploaded asset. This value is returned by <a
* href="https://docs.aws.amazon.com/wisdom/latest/APIReference/API_StartContentUpload.html">StartContentUpload</a>.
* </p>
*
* @param uploadId
* A pointer to the uploaded asset. This value is returned by <a
* href="https://docs.aws.amazon.com/wisdom/latest/APIReference/API_StartContentUpload.html"
* >StartContentUpload</a>.
*/
public void setUploadId(String uploadId) {
this.uploadId = uploadId;
}
/**
* <p>
* A pointer to the uploaded asset. This value is returned by <a
* href="https://docs.aws.amazon.com/wisdom/latest/APIReference/API_StartContentUpload.html">StartContentUpload</a>.
* </p>
*
* @return A pointer to the uploaded asset. This value is returned by <a
* href="https://docs.aws.amazon.com/wisdom/latest/APIReference/API_StartContentUpload.html"
* >StartContentUpload</a>.
*/
public String getUploadId() {
return this.uploadId;
}
/**
* <p>
* A pointer to the uploaded asset. This value is returned by <a
* href="https://docs.aws.amazon.com/wisdom/latest/APIReference/API_StartContentUpload.html">StartContentUpload</a>.
* </p>
*
* @param uploadId
* A pointer to the uploaded asset. This value is returned by <a
* href="https://docs.aws.amazon.com/wisdom/latest/APIReference/API_StartContentUpload.html"
* >StartContentUpload</a>.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public UpdateContentRequest withUploadId(String uploadId) {
setUploadId(uploadId);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getContentId() != null)
sb.append("ContentId: ").append(getContentId()).append(",");
if (getKnowledgeBaseId() != null)
sb.append("KnowledgeBaseId: ").append(getKnowledgeBaseId()).append(",");
if (getMetadata() != null)
sb.append("Metadata: ").append(getMetadata()).append(",");
if (getOverrideLinkOutUri() != null)
sb.append("OverrideLinkOutUri: ").append(getOverrideLinkOutUri()).append(",");
if (getRemoveOverrideLinkOutUri() != null)
sb.append("RemoveOverrideLinkOutUri: ").append(getRemoveOverrideLinkOutUri()).append(",");
if (getRevisionId() != null)
sb.append("RevisionId: ").append(getRevisionId()).append(",");
if (getTitle() != null)
sb.append("Title: ").append(getTitle()).append(",");
if (getUploadId() != null)
sb.append("UploadId: ").append(getUploadId());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof UpdateContentRequest == false)
return false;
UpdateContentRequest other = (UpdateContentRequest) obj;
if (other.getContentId() == null ^ this.getContentId() == null)
return false;
if (other.getContentId() != null && other.getContentId().equals(this.getContentId()) == false)
return false;
if (other.getKnowledgeBaseId() == null ^ this.getKnowledgeBaseId() == null)
return false;
if (other.getKnowledgeBaseId() != null && other.getKnowledgeBaseId().equals(this.getKnowledgeBaseId()) == false)
return false;
if (other.getMetadata() == null ^ this.getMetadata() == null)
return false;
if (other.getMetadata() != null && other.getMetadata().equals(this.getMetadata()) == false)
return false;
if (other.getOverrideLinkOutUri() == null ^ this.getOverrideLinkOutUri() == null)
return false;
if (other.getOverrideLinkOutUri() != null && other.getOverrideLinkOutUri().equals(this.getOverrideLinkOutUri()) == false)
return false;
if (other.getRemoveOverrideLinkOutUri() == null ^ this.getRemoveOverrideLinkOutUri() == null)
return false;
if (other.getRemoveOverrideLinkOutUri() != null && other.getRemoveOverrideLinkOutUri().equals(this.getRemoveOverrideLinkOutUri()) == false)
return false;
if (other.getRevisionId() == null ^ this.getRevisionId() == null)
return false;
if (other.getRevisionId() != null && other.getRevisionId().equals(this.getRevisionId()) == false)
return false;
if (other.getTitle() == null ^ this.getTitle() == null)
return false;
if (other.getTitle() != null && other.getTitle().equals(this.getTitle()) == false)
return false;
if (other.getUploadId() == null ^ this.getUploadId() == null)
return false;
if (other.getUploadId() != null && other.getUploadId().equals(this.getUploadId()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getContentId() == null) ? 0 : getContentId().hashCode());
hashCode = prime * hashCode + ((getKnowledgeBaseId() == null) ? 0 : getKnowledgeBaseId().hashCode());
hashCode = prime * hashCode + ((getMetadata() == null) ? 0 : getMetadata().hashCode());
hashCode = prime * hashCode + ((getOverrideLinkOutUri() == null) ? 0 : getOverrideLinkOutUri().hashCode());
hashCode = prime * hashCode + ((getRemoveOverrideLinkOutUri() == null) ? 0 : getRemoveOverrideLinkOutUri().hashCode());
hashCode = prime * hashCode + ((getRevisionId() == null) ? 0 : getRevisionId().hashCode());
hashCode = prime * hashCode + ((getTitle() == null) ? 0 : getTitle().hashCode());
hashCode = prime * hashCode + ((getUploadId() == null) ? 0 : getUploadId().hashCode());
return hashCode;
}
@Override
public UpdateContentRequest clone() {
return (UpdateContentRequest) super.clone();
}
}
| |
/*
* Copyright 2014-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.cxx;
import com.facebook.buck.core.model.BuildTarget;
import com.facebook.buck.core.rules.ActionGraphBuilder;
import com.facebook.buck.core.rules.BuildRule;
import com.facebook.buck.core.rules.SourcePathRuleFinder;
import com.facebook.buck.core.rules.impl.SymlinkTree;
import com.facebook.buck.core.sourcepath.SourcePath;
import com.facebook.buck.core.util.graph.AbstractBreadthFirstTraversal;
import com.facebook.buck.cxx.toolchain.CxxPlatform;
import com.facebook.buck.cxx.toolchain.HeaderMode;
import com.facebook.buck.cxx.toolchain.HeaderSymlinkTree;
import com.facebook.buck.cxx.toolchain.HeaderSymlinkTreeWithModuleMap;
import com.facebook.buck.cxx.toolchain.HeaderVisibility;
import com.facebook.buck.cxx.toolchain.Preprocessor;
import com.facebook.buck.io.filesystem.ProjectFilesystem;
import com.facebook.buck.rules.args.StringArg;
import com.facebook.buck.rules.coercer.FrameworkPath;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableListMultimap;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Multimap;
import com.google.common.collect.Multimaps;
import java.nio.file.Path;
import java.util.Collection;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.function.Predicate;
public class CxxPreprocessables {
private CxxPreprocessables() {}
public enum IncludeType {
/** Headers should be included with `-I`. */
LOCAL {
@Override
public Iterable<String> includeArgs(Preprocessor pp, Iterable<String> includeRoots) {
return pp.localIncludeArgs(includeRoots);
}
},
/** Headers should be included with `-isystem`. */
SYSTEM {
@Override
public Iterable<String> includeArgs(Preprocessor pp, Iterable<String> includeRoots) {
return pp.systemIncludeArgs(includeRoots);
}
},
/** Headers are not added by buck */
RAW {
@Override
public Iterable<String> includeArgs(
Preprocessor preprocessor, Iterable<String> includeRoots) {
return ImmutableList.of();
}
},
;
public abstract Iterable<String> includeArgs(Preprocessor pp, Iterable<String> includeRoots);
}
/**
* Resolve the map of name to {@link SourcePath} to a map of full header name to {@link
* SourcePath}.
*/
public static ImmutableMap<Path, SourcePath> resolveHeaderMap(
Path basePath, ImmutableMap<String, SourcePath> headers) {
ImmutableMap.Builder<Path, SourcePath> headerMap = ImmutableMap.builder();
// Resolve the "names" of the headers to actual paths by prepending the base path
// specified by the build target.
for (ImmutableMap.Entry<String, SourcePath> ent : headers.entrySet()) {
Path path = basePath.resolve(ent.getKey());
headerMap.put(path, ent.getValue());
}
return headerMap.build();
}
/**
* Find and return the {@link CxxPreprocessorInput} objects from {@link CxxPreprocessorDep} found
* while traversing the dependencies starting from the {@link BuildRule} objects given.
*/
public static Collection<CxxPreprocessorInput> getTransitiveCxxPreprocessorInput(
CxxPlatform cxxPlatform,
ActionGraphBuilder graphBuilder,
Iterable<? extends BuildRule> inputs,
Predicate<Object> traverse) {
// We don't really care about the order we get back here, since headers shouldn't
// conflict. However, we want something that's deterministic, so sort by build
// target.
Map<BuildTarget, CxxPreprocessorInput> deps = new LinkedHashMap<>();
// Build up the map of all C/C++ preprocessable dependencies.
new AbstractBreadthFirstTraversal<BuildRule>(inputs) {
@Override
public Iterable<BuildRule> visit(BuildRule rule) {
if (rule instanceof CxxPreprocessorDep) {
CxxPreprocessorDep dep = (CxxPreprocessorDep) rule;
deps.putAll(dep.getTransitiveCxxPreprocessorInput(cxxPlatform, graphBuilder));
return ImmutableSet.of();
}
return traverse.test(rule) ? rule.getBuildDeps() : ImmutableSet.of();
}
}.start();
// Grab the cxx preprocessor inputs and return them.
return deps.values();
}
public static Collection<CxxPreprocessorInput> getTransitiveCxxPreprocessorInput(
CxxPlatform cxxPlatform,
ActionGraphBuilder graphBuilder,
Iterable<? extends BuildRule> inputs) {
return getTransitiveCxxPreprocessorInput(cxxPlatform, graphBuilder, inputs, x -> false);
}
/**
* Build the {@link HeaderSymlinkTree} rule using the original build params from a target node. In
* particular, make sure to drop all dependencies from the original build rule params, as these
* are modeled via {@link CxxPreprocessAndCompile}.
*/
public static HeaderSymlinkTree createHeaderSymlinkTreeBuildRule(
BuildTarget target,
ProjectFilesystem filesystem,
SourcePathRuleFinder ruleFinder,
Path root,
ImmutableMap<Path, SourcePath> links,
HeaderMode headerMode) {
switch (headerMode) {
case SYMLINK_TREE_WITH_HEADER_MAP:
return HeaderSymlinkTreeWithHeaderMap.create(target, filesystem, root, links, ruleFinder);
case SYMLINK_TREE_WITH_MODULEMAP:
return HeaderSymlinkTreeWithModuleMap.create(target, filesystem, root, links, ruleFinder);
case HEADER_MAP_ONLY:
return new DirectHeaderMap(target, filesystem, root, links, ruleFinder);
default:
case SYMLINK_TREE_ONLY:
return new HeaderSymlinkTree(target, filesystem, root, links, ruleFinder);
}
}
/**
* @return adds a the header {@link SymlinkTree} for the given rule to the {@link
* CxxPreprocessorInput}.
*/
public static CxxPreprocessorInput.Builder addHeaderSymlinkTree(
CxxPreprocessorInput.Builder builder,
BuildTarget target,
ActionGraphBuilder graphBuilder,
CxxPlatform platform,
HeaderVisibility headerVisibility,
IncludeType includeType) {
BuildRule rule =
graphBuilder.requireRule(
target.withAppendedFlavors(
platform.getFlavor(),
CxxDescriptionEnhancer.getHeaderSymlinkTreeFlavor(headerVisibility)));
Preconditions.checkState(
rule instanceof HeaderSymlinkTree,
"Attempt to add %s of type %s and class %s to %s",
rule.getFullyQualifiedName(),
rule.getType(),
rule.getClass().getName(),
target);
HeaderSymlinkTree symlinkTree = (HeaderSymlinkTree) rule;
builder.addIncludes(CxxSymlinkTreeHeaders.from(symlinkTree, includeType));
return builder;
}
/** Builds a {@link CxxPreprocessorInput} for a rule. */
public static CxxPreprocessorInput getCxxPreprocessorInput(
BuildTarget buildTarget,
ActionGraphBuilder graphBuilder,
boolean hasHeaderSymlinkTree,
CxxPlatform platform,
HeaderVisibility headerVisibility,
IncludeType includeType,
Multimap<CxxSource.Type, String> exportedPreprocessorFlags,
Iterable<FrameworkPath> frameworks) {
CxxPreprocessorInput.Builder builder = CxxPreprocessorInput.builder();
if (hasHeaderSymlinkTree) {
addHeaderSymlinkTree(
builder, buildTarget, graphBuilder, platform, headerVisibility, includeType);
}
return builder
.putAllPreprocessorFlags(
ImmutableListMultimap.copyOf(
Multimaps.transformValues(exportedPreprocessorFlags, StringArg::of)))
.addAllFrameworks(frameworks)
.build();
}
}
| |
// Copyright 2018 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.skyframe.actiongraph.v2;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Iterables;
import com.google.devtools.build.lib.actions.ActionAnalysisMetadata;
import com.google.devtools.build.lib.actions.ActionExecutionMetadata;
import com.google.devtools.build.lib.actions.ActionKeyContext;
import com.google.devtools.build.lib.actions.ActionOwner;
import com.google.devtools.build.lib.actions.Artifact;
import com.google.devtools.build.lib.actions.CommandAction;
import com.google.devtools.build.lib.actions.CommandLineExpansionException;
import com.google.devtools.build.lib.analysis.AnalysisProtosV2;
import com.google.devtools.build.lib.analysis.AspectValue;
import com.google.devtools.build.lib.analysis.ConfiguredTarget;
import com.google.devtools.build.lib.analysis.actions.ParameterFileWriteAction;
import com.google.devtools.build.lib.analysis.actions.SpawnAction;
import com.google.devtools.build.lib.analysis.configuredtargets.RuleConfiguredTarget;
import com.google.devtools.build.lib.buildeventstream.BuildEvent;
import com.google.devtools.build.lib.collect.nestedset.NestedSet;
import com.google.devtools.build.lib.packages.AspectDescriptor;
import com.google.devtools.build.lib.query2.aquery.AqueryActionFilter;
import com.google.devtools.build.lib.query2.aquery.AqueryUtils;
import com.google.devtools.build.lib.skyframe.ConfiguredTargetValue;
import com.google.devtools.build.lib.util.Pair;
import java.io.IOException;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
* Encapsulates necessary functionality to dump the current skyframe state of the action graph to
* proto format.
*/
public class ActionGraphDump {
private final ActionKeyContext actionKeyContext = new ActionKeyContext();
private final Set<String> actionGraphTargets;
private final KnownRuleClassStrings knownRuleClassStrings;
private final KnownArtifacts knownArtifacts;
private final KnownConfigurations knownConfigurations;
private final KnownNestedSets knownNestedSets;
private final KnownAspectDescriptors knownAspectDescriptors;
private final KnownTargets knownTargets;
private final AqueryActionFilter actionFilters;
private final boolean includeActionCmdLine;
private final boolean includeArtifacts;
private final boolean includeParamFiles;
private final AqueryOutputHandler aqueryOutputHandler;
private Map<String, Iterable<String>> paramFileNameToContentMap;
public ActionGraphDump(
boolean includeActionCmdLine,
boolean includeArtifacts,
AqueryActionFilter actionFilters,
boolean includeParamFiles,
AqueryOutputHandler aqueryOutputHandler) {
this(
/* actionGraphTargets= */ ImmutableList.of("..."),
includeActionCmdLine,
includeArtifacts,
actionFilters,
includeParamFiles,
aqueryOutputHandler);
}
public ActionGraphDump(
List<String> actionGraphTargets,
boolean includeActionCmdLine,
boolean includeArtifacts,
AqueryActionFilter actionFilters,
boolean includeParamFiles,
AqueryOutputHandler aqueryOutputHandler) {
this.actionGraphTargets = ImmutableSet.copyOf(actionGraphTargets);
this.includeActionCmdLine = includeActionCmdLine;
this.includeArtifacts = includeArtifacts;
this.actionFilters = actionFilters;
this.includeParamFiles = includeParamFiles;
this.aqueryOutputHandler = aqueryOutputHandler;
knownRuleClassStrings = new KnownRuleClassStrings(aqueryOutputHandler);
knownArtifacts = new KnownArtifacts(aqueryOutputHandler);
knownConfigurations = new KnownConfigurations(aqueryOutputHandler);
knownNestedSets = new KnownNestedSets(aqueryOutputHandler, knownArtifacts);
knownAspectDescriptors = new KnownAspectDescriptors(aqueryOutputHandler);
knownTargets = new KnownTargets(aqueryOutputHandler, knownRuleClassStrings);
}
public ActionKeyContext getActionKeyContext() {
return actionKeyContext;
}
private boolean includeInActionGraph(String labelString) {
if (actionGraphTargets.size() == 1
&& Iterables.getOnlyElement(actionGraphTargets).equals("...")) {
return true;
}
return actionGraphTargets.contains(labelString);
}
private void dumpSingleAction(ConfiguredTarget configuredTarget, ActionAnalysisMetadata action)
throws CommandLineExpansionException, IOException {
// Store the content of param files.
if (includeParamFiles && (action instanceof ParameterFileWriteAction)) {
ParameterFileWriteAction parameterFileWriteAction = (ParameterFileWriteAction) action;
Iterable<String> fileContent = parameterFileWriteAction.getArguments();
String paramFileExecPath = action.getPrimaryOutput().getExecPathString();
getParamFileNameToContentMap().put(paramFileExecPath, fileContent);
}
if (!AqueryUtils.matchesAqueryFilters(action, actionFilters)) {
return;
}
// Dereference any aliases that might be present.
configuredTarget = configuredTarget.getActual();
Preconditions.checkState(configuredTarget instanceof RuleConfiguredTarget);
Pair<String, String> targetIdentifier =
new Pair<>(
configuredTarget.getLabel().toString(),
((RuleConfiguredTarget) configuredTarget).getRuleClassString());
AnalysisProtosV2.Action.Builder actionBuilder =
AnalysisProtosV2.Action.newBuilder()
.setMnemonic(action.getMnemonic())
.setTargetId(knownTargets.dataToIdAndStreamOutputProto(targetIdentifier));
if (action instanceof ActionExecutionMetadata) {
ActionExecutionMetadata actionExecutionMetadata = (ActionExecutionMetadata) action;
actionBuilder
.setActionKey(actionExecutionMetadata.getKey(getActionKeyContext()))
.setDiscoversInputs(actionExecutionMetadata.discoversInputs());
}
// store environment
if (action instanceof SpawnAction) {
SpawnAction spawnAction = (SpawnAction) action;
// TODO(twerth): This handles the fixed environment. We probably want to output the inherited
// environment as well.
Map<String, String> fixedEnvironment = spawnAction.getEnvironment().getFixedEnv().toMap();
for (Map.Entry<String, String> environmentVariable : fixedEnvironment.entrySet()) {
actionBuilder.addEnvironmentVariables(
AnalysisProtosV2.KeyValuePair.newBuilder()
.setKey(environmentVariable.getKey())
.setValue(environmentVariable.getValue())
.build());
}
}
if (includeActionCmdLine && action instanceof CommandAction) {
CommandAction commandAction = (CommandAction) action;
actionBuilder.addAllArguments(commandAction.getArguments());
}
// Include the content of param files in output.
if (includeParamFiles) {
// Assumption: if an Action takes a params file as an input, it will be used
// to provide params to the command.
for (Artifact input : action.getInputs().toList()) {
String inputFileExecPath = input.getExecPathString();
if (getParamFileNameToContentMap().containsKey(inputFileExecPath)) {
AnalysisProtosV2.ParamFile paramFile =
AnalysisProtosV2.ParamFile.newBuilder()
.setExecPath(inputFileExecPath)
.addAllArguments(getParamFileNameToContentMap().get(inputFileExecPath))
.build();
actionBuilder.addParamFiles(paramFile);
}
}
}
Map<String, String> executionInfo = action.getExecutionInfo();
if (executionInfo != null) {
for (Map.Entry<String, String> info : executionInfo.entrySet()) {
actionBuilder.addExecutionInfo(
AnalysisProtosV2.KeyValuePair.newBuilder()
.setKey(info.getKey())
.setValue(info.getValue()));
}
}
ActionOwner actionOwner = action.getOwner();
if (actionOwner != null) {
BuildEvent event = actionOwner.getConfiguration();
actionBuilder.setConfigurationId(knownConfigurations.dataToIdAndStreamOutputProto(event));
// Store aspects.
// Iterate through the aspect path and dump the aspect descriptors.
// In the case of aspect-on-aspect, AspectDescriptors are listed in topological order
// of the configured target graph.
// e.g. [A, B] would imply that aspect A is applied on top of aspect B.
for (AspectDescriptor aspectDescriptor : actionOwner.getAspectDescriptors().reverse()) {
actionBuilder.addAspectDescriptorIds(
knownAspectDescriptors.dataToIdAndStreamOutputProto(aspectDescriptor));
}
}
if (includeArtifacts) {
// Store inputs
NestedSet<Artifact> inputs = action.getInputs();
if (!inputs.isEmpty()) {
actionBuilder.addInputDepSetIds(knownNestedSets.dataToIdAndStreamOutputProto(inputs));
}
// store outputs
for (Artifact artifact : action.getOutputs()) {
actionBuilder.addOutputIds(knownArtifacts.dataToIdAndStreamOutputProto(artifact));
}
actionBuilder.setPrimaryOutputId(
knownArtifacts.dataToIdAndStreamOutputProto(action.getPrimaryOutput()));
}
aqueryOutputHandler.outputAction(actionBuilder.build());
}
public void dumpAspect(AspectValue aspectValue, ConfiguredTargetValue configuredTargetValue)
throws CommandLineExpansionException, IOException {
ConfiguredTarget configuredTarget = configuredTargetValue.getConfiguredTarget();
if (!includeInActionGraph(configuredTarget.getLabel().toString())) {
return;
}
for (ActionAnalysisMetadata action : aspectValue.getActions()) {
dumpSingleAction(configuredTarget, action);
}
}
public void dumpConfiguredTarget(ConfiguredTargetValue configuredTargetValue)
throws CommandLineExpansionException, IOException {
ConfiguredTarget configuredTarget = configuredTargetValue.getConfiguredTarget();
if (!includeInActionGraph(configuredTarget.getLabel().toString())) {
return;
}
for (ActionAnalysisMetadata action : configuredTargetValue.getActions()) {
dumpSingleAction(configuredTarget, action);
}
}
/** Lazy initialization of paramFileNameToContentMap. */
private Map<String, Iterable<String>> getParamFileNameToContentMap() {
if (paramFileNameToContentMap == null) {
paramFileNameToContentMap = new HashMap<>();
}
return paramFileNameToContentMap;
}
}
| |
/*
* Copyright 2018 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gradle.integtests.fixtures.executer;
import com.google.common.base.Joiner;
import com.google.common.collect.ImmutableList;
import org.fusesource.jansi.AnsiOutputStream;
import org.gradle.api.Action;
import org.gradle.api.UncheckedIOException;
import org.gradle.internal.Pair;
import javax.annotation.Nullable;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.io.Writer;
import java.util.ArrayList;
import java.util.List;
import java.util.regex.Pattern;
public class LogContent {
private final static Pattern DEBUG_PREFIX = Pattern.compile("\\d{2}:\\d{2}:\\d{2}\\.\\d{3} \\[\\w+] \\[.+?] ");
private final static String PROGRESS_BAR_PATTERN = "<[-=(\u001b\\[\\d+[a-zA-Z;])]*> \\d+% (INITIALIZ|CONFIGUR|EXECUT|WAIT)ING( \\[((\\d+h )? \\d+m )?\\d+s\\])?";
private final static String WORK_IN_PROGRESS_PATTERN = "\u001b\\[\\d+[a-zA-Z]> (IDLE|[:a-z][\\w\\s\\d:>/\\\\\\.]+)\u001b\\[\\d*[a-zA-Z]";
private final static String DOWN_MOVEMENT_WITH_NEW_LINE_PATTERN = "\u001b\\[\\d+B\\n";
private final static Pattern WORK_IN_PROGRESS_AREA_PATTERN = Pattern.compile(PROGRESS_BAR_PATTERN + "|" + WORK_IN_PROGRESS_PATTERN + "|" + DOWN_MOVEMENT_WITH_NEW_LINE_PATTERN);
private final static Pattern JAVA_ILLEGAL_ACCESS_WARNING_PATTERN = Pattern.compile("(?ms)WARNING: An illegal reflective access operation has occurred$.+?"
+ "^WARNING: All illegal access operations will be denied in a future release\r?\n");
private final ImmutableList<String> lines;
private final boolean definitelyNoDebugPrefix;
private final LogContent rawContent;
private LogContent(ImmutableList<String> lines, boolean definitelyNoDebugPrefix, LogContent rawContent) {
this.lines = lines;
this.rawContent = rawContent == null ? this : rawContent;
this.definitelyNoDebugPrefix = definitelyNoDebugPrefix || lines.isEmpty();
}
/**
* Creates a new instance, from raw characters.
*/
public static LogContent of(String chars) {
String stripped = stripWorkInProgressArea(chars);
LogContent raw = new LogContent(toLines(stripped), false, null);
return new LogContent(toLines(stripJavaIllegalAccessWarnings(stripped)), false, raw);
}
private static ImmutableList<String> toLines(String chars) {
List<String> lines = new ArrayList<String>();
int pos = 0;
while (pos < chars.length()) {
int next = chars.indexOf('\n', pos);
if (next < 0) {
lines.add(chars.substring(pos));
pos = chars.length();
continue;
}
if (next > pos && chars.charAt(next - 1) == '\r') {
lines.add(chars.substring(pos, next - 1));
pos = next + 1;
} else {
lines.add(chars.substring(pos, next));
pos = next + 1;
}
if (pos == chars.length()) {
// trailing EOL
lines.add("");
}
}
return ImmutableList.copyOf(lines);
}
/**
* Creates a new instance from a sequence of lines (without the line separators).
*/
public static LogContent of(List<String> lines) {
return new LogContent(ImmutableList.copyOf(lines), false, null);
}
public static LogContent empty() {
return new LogContent(ImmutableList.<String>of(), true, null);
}
/**
* Returns the original content that this content was built from, after transforms such as {@link #removeDebugPrefix()} or {@link #splitOnFirstMatchingLine(Pattern)}.
*/
public LogContent getRawContent() {
return rawContent;
}
/**
* Does not return the text of this content.
*/
@Override
public String toString() {
// Intentionally not the text
return lines.toString();
}
/**
* Returns this content formatted using a new line char to separate lines.
*/
public String withNormalizedEol() {
if (lines.isEmpty()) {
return "";
}
return Joiner.on('\n').join(lines);
}
/**
* Returns this content separated into lines. The line does not include the line separator.
*/
public ImmutableList<String> getLines() {
return lines;
}
private LogContent lines(int startLine, int endLine) {
if (rawContent != this) {
throw new UnsupportedOperationException("not implemented");
}
return new LogContent(lines.subList(startLine, endLine), definitelyNoDebugPrefix, null);
}
/**
* Visits each line in this content. The line does not include the line separator.
*/
public void eachLine(Action<? super String> action) {
for (String line : lines) {
action.execute(line);
}
}
/**
* Locates the log content starting with the first line that matches the given pattern, or null if no such line.
*
* @return a pair containing (content-before-matching-line, content-from-matching-line)
*/
public @Nullable
Pair<LogContent, LogContent> splitOnFirstMatchingLine(Pattern pattern) {
for (int i = 0; i < lines.size(); i++) {
String line = lines.get(i);
if (pattern.matcher(line).matches()) {
LogContent before = new LogContent(lines.subList(0, i), definitelyNoDebugPrefix, rawContent.lines(0, i));
LogContent after = new LogContent(lines.subList(i, lines.size()), definitelyNoDebugPrefix, rawContent.lines(i, lines.size()));
return Pair.of(before, after);
}
}
return null;
}
/**
* Returns the number of lines that match the given pattern.
*/
public int countMatches(Pattern pattern) {
int count = 0;
for (String line : lines) {
if (pattern.matcher(line).matches()) {
count++;
}
}
return count;
}
/**
* Drops the first n lines.
*/
public LogContent drop(int i) {
return new LogContent(lines.subList(i, lines.size()), definitelyNoDebugPrefix, rawContent.lines(i, lines.size()));
}
/**
* Returns a copy of this log content with the debug prefix removed.
*/
public LogContent removeDebugPrefix() {
if (definitelyNoDebugPrefix) {
return this;
}
List<String> result = new ArrayList<String>(lines.size());
for (String line : lines) {
java.util.regex.Matcher matcher = DEBUG_PREFIX.matcher(line);
if (matcher.lookingAt()) {
result.add(line.substring(matcher.end()));
} else {
result.add(line);
}
}
return new LogContent(ImmutableList.copyOf(result), true, rawContent);
}
/**
* Returns a copy of this log content with ANSI control characters removed.
*/
public LogContent removeAnsiChars() {
if (lines.isEmpty()) {
return this;
}
try {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
Writer writer = new OutputStreamWriter(new AnsiOutputStream(baos));
for (int i = 0; i < lines.size(); i++) {
if (i > 0) {
writer.write("\n");
}
writer.write(lines.get(i));
}
writer.flush();
return new LogContent(toLines(baos.toString()), definitelyNoDebugPrefix, rawContent);
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
/**
* Remove all empty lines.
*/
public LogContent removeEmptyLines() {
List<String> nonEmptyLines = new ArrayList<String>();
for (String line : lines) {
if (!line.isEmpty()) {
nonEmptyLines.add(line);
}
}
return new LogContent(ImmutableList.copyOf(nonEmptyLines), definitelyNoDebugPrefix, rawContent);
}
public static String stripWorkInProgressArea(String output) {
String result = output;
for (int i = 1; i <= 10; ++i) {
result = result.replaceAll(workInProgressAreaScrollingPattern(i), "");
}
return WORK_IN_PROGRESS_AREA_PATTERN.matcher(result).replaceAll("");
}
public static String stripJavaIllegalAccessWarnings(String result) {
return JAVA_ILLEGAL_ACCESS_WARNING_PATTERN.matcher(result).replaceAll("");
}
private static String workInProgressAreaScrollingPattern(int scroll) {
return "(\u001b\\[0K\\n){" + scroll + "}\u001b\\[" + scroll + "A";
}
}
| |
// Copyright (c) 2003-present, Jodd Team (http://jodd.org)
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are met:
//
// 1. Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// 2. Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in the
// documentation and/or other materials provided with the distribution.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
// POSSIBILITY OF SUCH DAMAGE.
package jodd.mail;
import javax.mail.Flags;
import javax.mail.Folder;
import javax.mail.Message;
import javax.mail.MessagingException;
import javax.mail.Session;
import javax.mail.Store;
/**
* Encapsulates email receiving session. Prepares and receives message(s).
* Some methods do not work on POP3 servers.
*/
public class ReceiveMailSession {
protected static final String DEFAULT_FOLDER = "INBOX";
protected final Session session;
protected final Store store;
static {
JoddMail.mailSystem.defineJavaMailSystemProperties();
}
/**
* Creates new mail session.
*/
public ReceiveMailSession(Session session, Store store) {
this.session = session;
this.store = store;
}
protected Folder folder;
/**
* Opens session.
*/
public void open() {
try {
store.connect();
} catch (MessagingException msex) {
throw new MailException("Open session error", msex);
}
}
// ---------------------------------------------------------------- folders
/**
* Returns list of all folders. You can use these names in
* {@link #useFolder(String)} method.
*/
public String[] getAllFolders() {
Folder[] folders;
try {
folders = store.getDefaultFolder().list( "*" );
} catch (MessagingException msex) {
throw new MailException("Failed to connect to folder", msex);
}
String[] folderNames = new String[folders.length];
for (int i = 0; i < folders.length; i++) {
Folder folder = folders[i];
folderNames[i] = folder.getFullName();
}
return folderNames;
}
/**
* Opens new folder and closes previously opened folder.
*/
public void useFolder(String folderName) {
closeFolderIfOpened();
try {
folder = store.getFolder(folderName);
} catch (MessagingException msex) {
throw new MailException("Failed to connect to folder: " + folderName, msex);
}
try {
folder.open(Folder.READ_WRITE);
} catch (MessagingException ignore) {
try {
folder.open(Folder.READ_ONLY);
} catch (MessagingException msex) {
throw new MailException("Failed to open folder: " + folderName, msex);
}
}
}
/**
* Opens default folder: INBOX.
*/
public void useDefaultFolder() {
closeFolderIfOpened();
useFolder(DEFAULT_FOLDER);
}
// ---------------------------------------------------------------- message count
/**
* Returns number of messages.
*/
public int getMessageCount() {
if (folder == null) {
useDefaultFolder();
}
try {
return folder.getMessageCount();
} catch (MessagingException mex) {
throw new MailException(mex);
}
}
/**
* Returns the number of new messages.
*/
public int getNewMessageCount() {
if (folder == null) {
useDefaultFolder();
}
try {
return folder.getNewMessageCount();
} catch (MessagingException mex) {
throw new MailException(mex);
}
}
/**
* Returns the number of unread messages.
*/
public int getUnreadMessageCount() {
if (folder == null) {
useDefaultFolder();
}
try {
return folder.getUnreadMessageCount();
} catch (MessagingException mex) {
throw new MailException(mex);
}
}
/**
* Returns the number of deleted messages.
*/
public int getDeletedMessageCount() {
if (folder == null) {
useDefaultFolder();
}
try {
return folder.getDeletedMessageCount();
} catch (MessagingException mex) {
throw new MailException(mex);
}
}
// ---------------------------------------------------------------- receive emails
/**
* Receives all emails. Messages are not modified. However, servers
* do may set SEEN flag anyway, so we force messages to remain
* unseen.
*/
public ReceivedEmail[] receiveEmail() {
return receive(null, null);
}
/**
* Receives all emails that matches given {@link EmailFilter filter}.
* Messages are not modified. However, servers do may set SEEN flag anyway,
* so we force messages to remain unseen.
*/
public ReceivedEmail[] receiveEmail(EmailFilter emailFilter) {
return receive(emailFilter, null);
}
/**
* Receives all emails and mark all messages as 'seen' (ie 'read').
*/
public ReceivedEmail[] receiveEmailAndMarkSeen() {
return receiveEmailAndMarkSeen(null);
}
/**
* Receives all emails that matches given {@link EmailFilter filter}
* and mark them as 'seen' (ie 'read').
*/
public ReceivedEmail[] receiveEmailAndMarkSeen(EmailFilter emailFilter) {
Flags flags = new Flags();
flags.add(Flags.Flag.SEEN);
return receive(emailFilter, flags);
}
/**
* Receives all emails and mark all messages as 'seen' and 'deleted'.
*/
public ReceivedEmail[] receiveEmailAndDelete() {
return receiveEmailAndDelete(null);
}
/**
* Receives all emails that matches given {@link EmailFilter filter} and
* mark all messages as 'seen' and 'deleted'.
*/
public ReceivedEmail[] receiveEmailAndDelete(EmailFilter emailFilter) {
Flags flags = new Flags();
flags.add(Flags.Flag.SEEN);
flags.add(Flags.Flag.DELETED);
return receive(emailFilter, flags);
}
/**
* Receives all emails that matches given {@link EmailFilter filter}
* and set given flags. Both filter and flags to set are optional.
* If flags to set is not provided, it forces 'seen' flag to be unset.
*/
public ReceivedEmail[] receive(EmailFilter filter, Flags flagsToSet) {
if (folder == null) {
useDefaultFolder();
}
Message[] messages;
// todo add FetchProfile option for just headers
try {
if (filter == null) {
messages = folder.getMessages();
} else {
messages = folder.search(filter.getSearchTerm());
}
if (messages.length == 0) {
return null;
}
// process messages
ReceivedEmail[] emails = new ReceivedEmail[messages.length];
for (int i = 0; i < messages.length; i++) {
Message msg = messages[i];
if (flagsToSet != null) {
msg.setFlags(flagsToSet, true);
}
emails[i] = new ReceivedEmail(msg);
if (flagsToSet == null && !emails[i].isSeen()) {
msg.setFlag(Flags.Flag.SEEN, false);
}
}
return emails;
} catch (MessagingException msex) {
throw new MailException("Failed to fetch messages", msex);
}
}
// ---------------------------------------------------------------- close
/**
* Closes folder if opened and expunge deleted messages.
*/
protected void closeFolderIfOpened() {
if (folder != null) {
try {
folder.close(true);
} catch (MessagingException ignore) {
}
}
}
/**
* Closes session.
*/
public void close() {
closeFolderIfOpened();
try {
store.close();
} catch (MessagingException mex) {
throw new MailException(mex);
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal;
import java.io.Serializable;
import java.util.List;
import java.util.Map;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicReference;
import org.apache.ignite.Ignite;
import org.apache.ignite.IgniteException;
import org.apache.ignite.cluster.ClusterNode;
import org.apache.ignite.compute.ComputeJob;
import org.apache.ignite.compute.ComputeJobAdapter;
import org.apache.ignite.compute.ComputeJobAfterSend;
import org.apache.ignite.compute.ComputeJobBeforeFailover;
import org.apache.ignite.compute.ComputeJobResult;
import org.apache.ignite.compute.ComputeJobResultPolicy;
import org.apache.ignite.compute.ComputeTask;
import org.apache.ignite.compute.ComputeTaskContinuousMapper;
import org.apache.ignite.compute.ComputeUserUndeclaredException;
import org.apache.ignite.configuration.IgniteConfiguration;
import org.apache.ignite.internal.util.typedef.X;
import org.apache.ignite.resources.TaskContinuousMapperResource;
import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest;
import org.apache.ignite.testframework.junits.common.GridCommonTest;
/**
* Test for various job callback annotations.
*/
@GridCommonTest(group = "Kernal Self")
public class GridContinuousJobAnnotationSelfTest extends GridCommonAbstractTest {
/** */
private static final AtomicBoolean fail = new AtomicBoolean();
/** */
private static final AtomicInteger afterSendCnt = new AtomicInteger();
/** */
private static final AtomicInteger beforeFailoverCnt = new AtomicInteger();
/** */
private static final AtomicReference<Exception> err = new AtomicReference<>();
/** {@inheritDoc} */
@Override protected IgniteConfiguration getConfiguration(String igniteInstanceName) throws Exception {
IgniteConfiguration c = super.getConfiguration(igniteInstanceName);
c.setMarshalLocalJobs(false);
return c;
}
/**
* @throws Exception If test failed.
*/
public void testJobAnnotation() throws Exception {
testContinuousJobAnnotation(TestJob.class);
}
/**
* @throws Exception If test failed.
*/
public void testJobChildAnnotation() throws Exception {
testContinuousJobAnnotation(TestJobChild.class);
}
/**
* @param jobCls Job class.
* @throws Exception If test failed.
*/
public void testContinuousJobAnnotation(Class<?> jobCls) throws Exception {
try {
Ignite ignite = startGrid(0);
startGrid(1);
fail.set(true);
ignite.compute().execute(TestTask.class, jobCls);
Exception e = err.get();
if (e != null)
throw e;
}
finally {
stopGrid(0);
stopGrid(1);
}
assertEquals(2, afterSendCnt.getAndSet(0));
assertEquals(1, beforeFailoverCnt.getAndSet(0));
}
/** */
@SuppressWarnings({"PublicInnerClass", "unused"})
public static class TestTask implements ComputeTask<Object, Object> {
/** */
@TaskContinuousMapperResource
private ComputeTaskContinuousMapper mapper;
/** {@inheritDoc} */
@Override public Map<? extends ComputeJob, ClusterNode> map(List<ClusterNode> subgrid, Object arg) {
try {
mapper.send(((Class<ComputeJob>)arg).newInstance());
}
catch (Exception e) {
throw new IgniteException("Job instantination failed.", e);
}
return null;
}
/** {@inheritDoc} */
@Override public ComputeJobResultPolicy result(ComputeJobResult res, List<ComputeJobResult> received)
throws IgniteException {
if (res.getException() != null) {
if (res.getException() instanceof ComputeUserUndeclaredException)
throw new IgniteException("Job threw unexpected exception.", res.getException());
return ComputeJobResultPolicy.FAILOVER;
}
return ComputeJobResultPolicy.WAIT;
}
/** {@inheritDoc} */
@Override public Object reduce(List<ComputeJobResult> results) throws IgniteException {
assert results.size() == 1 : "Unexpected result count: " + results.size();
return null;
}
}
/**
*
*/
private static class TestJob extends ComputeJobAdapter {
/** */
private boolean flag = true;
/** */
TestJob() {
X.println("Constructing TestJob [this=" + this + ", identity=" + System.identityHashCode(this) + "]");
}
/** */
@ComputeJobAfterSend
private void afterSend() {
X.println("AfterSend start TestJob [this=" + this + ", identity=" + System.identityHashCode(this) +
", flag=" + flag + "]");
afterSendCnt.incrementAndGet();
flag = false;
X.println("AfterSend end TestJob [this=" + this + ", identity=" + System.identityHashCode(this) +
", flag=" + flag + "]");
}
/** */
@ComputeJobBeforeFailover
private void beforeFailover() {
X.println("BeforeFailover start TestJob [this=" + this + ", identity=" + System.identityHashCode(this) +
", flag=" + flag + "]");
beforeFailoverCnt.incrementAndGet();
flag = true;
X.println("BeforeFailover end TestJob [this=" + this + ", identity=" + System.identityHashCode(this) +
", flag=" + flag + "]");
}
/** {@inheritDoc} */
@Override public Serializable execute() throws IgniteException {
X.println("Execute TestJob [this=" + this + ", identity=" + System.identityHashCode(this) +
", flag=" + flag + "]");
if (!flag) {
String msg = "Flag is false on execute [this=" + this + ", identity=" + System.identityHashCode(this) +
", flag=" + flag + "]";
X.println(msg);
err.compareAndSet(null, new Exception(msg));
}
if (fail.get()) {
fail.set(false);
throw new IgniteException("Expected test exception.");
}
return null;
}
}
/**
*
*/
private static class TestJobChild extends TestJob {
/**
* Required for reflectional creation.
*/
TestJobChild() {
// No-op.
}
}
}
| |
package com.netflix.simianarmy.aws.janitor.crawler.edda;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import com.netflix.simianarmy.Resource;
import com.netflix.simianarmy.aws.AWSResource;
import com.netflix.simianarmy.aws.AWSResourceType;
import com.netflix.simianarmy.client.edda.EddaClient;
import com.netflix.simianarmy.janitor.JanitorCrawler;
import com.netflix.simianarmy.janitor.JanitorMonkey;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang.Validate;
import org.codehaus.jackson.JsonNode;
import org.joda.time.DateTime;
import org.joda.time.format.DateTimeFormat;
import org.joda.time.format.DateTimeFormatter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.util.Collections;
import java.util.Date;
import java.util.EnumSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
* The crawler to crawl AWS EBS volumes for Janitor monkey using Edda.
*/
public class EddaEBSVolumeJanitorCrawler implements JanitorCrawler {
/** The Constant LOGGER. */
private static final Logger LOGGER = LoggerFactory.getLogger(EddaEBSVolumeJanitorCrawler.class);
private static final DateTimeFormatter TIME_FORMATTER = DateTimeFormat.forPattern("yyyy-MM-dd'T'HH:mm:ss.S'Z'");
private static final int BATCH_SIZE = 50;
// The value below specifies how many days we want to look back in Edda to find the owner of old instances.
// In case of Edda keeps too much history data, without a reasonable date range, the query may fail.
private static final int LOOKBACK_DAYS = 90;
/**
* The field name for purpose.
*/
public static final String PURPOSE = "purpose";
/**
* The field name for deleteOnTermination.
*/
public static final String DELETE_ON_TERMINATION = "deleteOnTermination";
/**
* The field name for detach time.
*/
public static final String DETACH_TIME = "detachTime";
private final EddaClient eddaClient;
private final List<String> regions = Lists.newArrayList();
private final Map<String, String> instanceToOwner = Maps.newHashMap();
/**
* The constructor.
* @param eddaClient
* the Edda client
* @param regions
* the regions the crawler will crawl resources for
*/
public EddaEBSVolumeJanitorCrawler(EddaClient eddaClient, String... regions) {
Validate.notNull(eddaClient);
this.eddaClient = eddaClient;
Validate.notNull(regions);
for (String region : regions) {
this.regions.add(region);
updateInstanceToOwner(region);
}
LOGGER.info(String.format("Found owner for %d instances in %s", instanceToOwner.size(), this.regions));
}
private void updateInstanceToOwner(String region) {
LOGGER.info(String.format("Getting owners for all instances in region %s", region));
long startTime = DateTime.now().minusDays(LOOKBACK_DAYS).getMillis();
String url = String.format("%s/view/instances;_since=%d;state.name=running;tags.key=owner;"
+ "_expand:(instanceId,tags:(key,value))",
eddaClient.getBaseUrl(region), startTime);
JsonNode jsonNode = null;
try {
jsonNode = eddaClient.getJsonNodeFromUrl(url);
} catch (Exception e) {
LOGGER.error(String.format(
"Failed to get Jason node from edda for instance owners in region %s.", region), e);
}
if (jsonNode == null || !jsonNode.isArray()) {
throw new RuntimeException(String.format("Failed to get valid document from %s, got: %s", url, jsonNode));
}
for (Iterator<JsonNode> it = jsonNode.getElements(); it.hasNext();) {
JsonNode elem = it.next();
String instanceId = elem.get("instanceId").getTextValue();
JsonNode tags = elem.get("tags");
if (tags == null || !tags.isArray() || tags.size() == 0) {
continue;
}
for (Iterator<JsonNode> tagsIt = tags.getElements(); tagsIt.hasNext();) {
JsonNode tag = tagsIt.next();
String tagKey = tag.get("key").getTextValue();
if ("owner".equals(tagKey)) {
instanceToOwner.put(instanceId, tag.get("value").getTextValue());
break;
}
}
}
}
@Override
public EnumSet<?> resourceTypes() {
return EnumSet.of(AWSResourceType.EBS_VOLUME);
}
@Override
public List<Resource> resources(Enum resourceType) {
if ("EBS_VOLUME".equals(resourceType.name())) {
return getVolumeResources();
}
return Collections.emptyList();
}
@Override
public List<Resource> resources(String... resourceIds) {
return getVolumeResources(resourceIds);
}
@Override
public String getOwnerEmailForResource(Resource resource) {
return null;
}
private List<Resource> getVolumeResources(String... volumeIds) {
List<Resource> resources = Lists.newArrayList();
for (String region : regions) {
resources.addAll(getUnattachedVolumeResourcesInRegion(region, volumeIds));
addLastAttachmentInfo(resources);
}
return resources;
}
/**
* Gets all volumes that are not attached to any instance. Janitor Monkey only considers unattached volumes
* as cleanup candidates, so there is no need to get volumes that are in-use.
* @param region
* @return
*/
private List<Resource> getUnattachedVolumeResourcesInRegion(String region, String... volumeIds) {
String url = eddaClient.getBaseUrl(region) + "/aws/volumes;";
if (volumeIds != null && volumeIds.length != 0) {
url += StringUtils.join(volumeIds, ',');
LOGGER.info(String.format("Getting volumes in region %s for %d ids", region, volumeIds.length));
} else {
LOGGER.info(String.format("Getting all unattached volumes in region %s", region));
}
url += ";state=available;_expand:(volumeId,createTime,size,state,tags)";
JsonNode jsonNode = null;
try {
jsonNode = eddaClient.getJsonNodeFromUrl(url);
} catch (Exception e) {
LOGGER.error(String.format(
"Failed to get Jason node from edda for unattached volumes in region %s.", region), e);
}
if (jsonNode == null || !jsonNode.isArray()) {
throw new RuntimeException(String.format("Failed to get valid document from %s, got: %s", url, jsonNode));
}
List<Resource> resources = Lists.newArrayList();
for (Iterator<JsonNode> it = jsonNode.getElements(); it.hasNext();) {
resources.add(parseJsonElementToVolumeResource(region, it.next()));
}
return resources;
}
private Resource parseJsonElementToVolumeResource(String region, JsonNode jsonNode) {
Validate.notNull(jsonNode);
long createTime = jsonNode.get("createTime").asLong();
Resource resource = new AWSResource().withId(jsonNode.get("volumeId").getTextValue()).withRegion(region)
.withResourceType(AWSResourceType.EBS_VOLUME)
.withLaunchTime(new Date(createTime));
JsonNode tags = jsonNode.get("tags");
StringBuilder description = new StringBuilder();
JsonNode size = jsonNode.get("size");
description.append(String.format("size=%s", size == null ? "unknown" : size.getIntValue()));
if (tags == null || !tags.isArray() || tags.size() == 0) {
LOGGER.debug(String.format("No tags is found for %s", resource.getId()));
} else {
for (Iterator<JsonNode> it = tags.getElements(); it.hasNext();) {
JsonNode tag = it.next();
String key = tag.get("key").getTextValue();
String value = tag.get("value").getTextValue();
description.append(String.format("; %s=%s", key, value));
resource.setTag(key, value);
if (key.equals(PURPOSE)) {
resource.setAdditionalField(PURPOSE, value);
}
}
resource.setDescription(description.toString());
}
((AWSResource) resource).setAWSResourceState(jsonNode.get("state").getTextValue());
return resource;
}
/**
* Adds information of last attachment to the resources. To be compatible with the AWS implementation of
* the same crawler, add the information to the JANITOR_META tag. It always uses the latest information
* to update the tag in this resource (not writing back to AWS) no matter if the tag exists.
* @param resources the volume resources
*/
private void addLastAttachmentInfo(List<Resource> resources) {
Validate.notNull(resources);
LOGGER.info(String.format("Updating the latest attachment info for %d resources", resources.size()));
Map<String, List<Resource>> regionToResources = Maps.newHashMap();
for (Resource resource : resources) {
List<Resource> regionalList = regionToResources.get(resource.getRegion());
if (regionalList == null) {
regionalList = Lists.newArrayList();
regionToResources.put(resource.getRegion(), regionalList);
}
regionalList.add(resource);
}
for (Map.Entry<String, List<Resource>> entry : regionToResources.entrySet()) {
LOGGER.info(String.format("Updating the latest attachment info for %d resources in region %s",
resources.size(), entry.getKey()));
for (List<Resource> batch : Lists.partition(entry.getValue(), BATCH_SIZE)) {
LOGGER.info(String.format("Processing batch of size %d", batch.size()));
String batchUrl = getBatchUrl(entry.getKey(), batch);
JsonNode batchResult = null;
try {
batchResult = eddaClient.getJsonNodeFromUrl(batchUrl);
} catch (IOException e) {
LOGGER.error("Failed to get response for the batch.", e);
}
Map<String, Resource> idToResource = Maps.newHashMap();
for (Resource resource : batch) {
idToResource.put(resource.getId(), resource);
}
if (batchResult == null || !batchResult.isArray()) {
throw new RuntimeException(String.format("Failed to get valid document from %s, got: %s",
batchUrl, batchResult));
}
Set<String> processedIds = Sets.newHashSet();
for (Iterator<JsonNode> it = batchResult.getElements(); it.hasNext();) {
JsonNode elem = it.next();
JsonNode data = elem.get("data");
String volumeId = data.get("volumeId").getTextValue();
Resource resource = idToResource.get(volumeId);
JsonNode attachments = data.get("attachments");
Validate.isTrue(attachments.isArray() && attachments.size() > 0);
JsonNode attachment = attachments.get(0);
JsonNode ltime = elem.get("ltime");
if (ltime == null || ltime.isNull()) {
continue;
}
DateTime detachTime = new DateTime(ltime.asLong());
processedIds.add(volumeId);
setAttachmentInfo(volumeId, attachment, detachTime, resource);
}
for (Map.Entry<String, Resource> volumeEntry : idToResource.entrySet()) {
String id = volumeEntry.getKey();
if (!processedIds.contains(id)) {
Resource resource = volumeEntry.getValue();
LOGGER.info(String.format("Volume %s never was attached, use createTime %s as the detachTime",
id, resource.getLaunchTime()));
setAttachmentInfo(id, null, new DateTime(resource.getLaunchTime().getTime()), resource);
}
}
}
}
}
private void setAttachmentInfo(String volumeId, JsonNode attachment, DateTime detachTime, Resource resource) {
String instanceId = null;
if (attachment != null) {
boolean deleteOnTermination = attachment.get(DELETE_ON_TERMINATION).getBooleanValue();
if (deleteOnTermination) {
LOGGER.info(String.format(
"Volume %s had set the deleteOnTermination flag as true", volumeId));
}
resource.setAdditionalField(DELETE_ON_TERMINATION, String.valueOf(deleteOnTermination));
instanceId = attachment.get("instanceId").getTextValue();
}
// The subclass can customize the way to get the owner for a volume
String owner = getOwnerEmailForResource(resource);
if (owner == null && instanceId != null) {
owner = instanceToOwner.get(instanceId);
}
resource.setOwnerEmail(owner);
String metaTag = makeMetaTag(instanceId, owner, detachTime);
LOGGER.info(String.format("Setting Janitor Metatag as %s for volume %s", metaTag, volumeId));
resource.setTag(JanitorMonkey.JANITOR_META_TAG, metaTag);
LOGGER.info(String.format("The last detach time of volume %s is %s", volumeId, detachTime));
resource.setAdditionalField(DETACH_TIME, String.valueOf(detachTime.getMillis()));
}
private String makeMetaTag(String instance, String owner, DateTime lastDetachTime) {
StringBuilder meta = new StringBuilder();
meta.append(String.format("%s=%s;",
JanitorMonkey.INSTANCE_TAG_KEY, instance == null ? "" : instance));
meta.append(String.format("%s=%s;", JanitorMonkey.OWNER_TAG_KEY, owner == null ? "" : owner));
meta.append(String.format("%s=%s", JanitorMonkey.DETACH_TIME_TAG_KEY,
lastDetachTime == null ? "" : AWSResource.DATE_FORMATTER.print(lastDetachTime)));
return meta.toString();
}
private String getBatchUrl(String region, List<Resource> batch) {
StringBuilder batchUrl = new StringBuilder(eddaClient.getBaseUrl(region) + "/aws/volumes/");
boolean isFirst = true;
for (Resource resource : batch) {
if (!isFirst) {
batchUrl.append(',');
} else {
isFirst = false;
}
batchUrl.append(resource.getId());
}
batchUrl.append(";data.state=in-use;_since=0;_expand;_meta:"
+ "(ltime,data:(volumeId,attachments:(deleteOnTermination,instanceId)))");
return batchUrl.toString();
}
}
| |
/**
* Copyright (C) 2013 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.financial.security;
import com.opengamma.core.security.Security;
import com.opengamma.financial.security.option.BondFutureOptionSecurity;
import com.opengamma.financial.security.option.CommodityFutureOptionSecurity;
import com.opengamma.financial.security.option.EquityIndexFutureOptionSecurity;
import com.opengamma.financial.security.option.EquityIndexOptionSecurity;
import com.opengamma.financial.security.option.EquityOptionSecurity;
import com.opengamma.financial.security.option.IRFutureOptionSecurity;
import com.opengamma.financial.security.option.OptionType;
import com.opengamma.util.time.Expiry;
/**
* Utility class containing a number of visitors that get fields typical of option securities.
*/
public class OptionSecurityVisitors {
/** Strike visitor */
private static FinancialSecurityVisitorAdapter<Double> s_strikeVisitor = new StrikeVisitor();
/** Expiry visitor */
private static FinancialSecurityVisitorAdapter<Expiry> s_expiryVisitor = new ExpiryVisitor();
/** Exchange visitor */
private static FinancialSecurityVisitorAdapter<String> s_exchangeVisitor = new ExchangeVisitor();
/** Option type visitor */
private static FinancialSecurityVisitorAdapter<OptionType> s_optionTypeVisitor = new OptionTypeVisitor();
/**
* Gets a visitor that provides the strike of options.
* @return Instance of {@link FinancialSecurityVisitorAdapter} that provides the strike of option securities
*/
public static FinancialSecurityVisitorAdapter<Double> getStrikeVisitor() {
return s_strikeVisitor;
}
/**
* Gets the strike of a security, if applicable.
* @param security The security
* @return The strike
* @throws UnsupportedOperationException if the security is null or is not one of the types handled.
*/
public static Double getStrike(final Security security) {
if (security instanceof FinancialSecurity) {
return ((FinancialSecurity) security).accept(s_strikeVisitor);
}
throw new UnsupportedOperationException("Cannot get strike for security " + security);
}
/**
* Gets a visitor that provides the expiry of an option.
* @return Instance of {@link FinancialSecurityVisitorAdapter} that provides the expiry of option securities
*/
public static FinancialSecurityVisitorAdapter<Expiry> getExpiryVisitor() {
return s_expiryVisitor;
}
/**
* Gets the expiry of a security, if applicable.
* @param security The security
* @return The expiry
* @throws UnsupportedOperationException if the security is null or is not one of the types handled.
*/
public static Expiry getExpiry(final Security security) {
if (security instanceof FinancialSecurity) {
return ((FinancialSecurity) security).accept(s_expiryVisitor);
}
throw new UnsupportedOperationException("Cannot get expiry for security " + security);
}
/**
* Gets a visitor that provides the exchange code for an option.
* @return Instance of {@link FinancialSecurityVisitorAdapter} that provides exchange code of option securities
*/
public static FinancialSecurityVisitorAdapter<String> getExchangeVisitor() {
return s_exchangeVisitor;
}
/**
* Gets the exchange of a security, if applicable. If both settlement and trading exchanges are available,
* returns the settlement exchange.
* @param security The security
* @return The strike
* @throws UnsupportedOperationException if the security is null or is not one of the types handled.
*/
public static String getExchange(final Security security) {
if (security instanceof FinancialSecurity) {
return ((FinancialSecurity) security).accept(s_exchangeVisitor);
}
throw new UnsupportedOperationException("Cannot get exchange for security " + security);
}
/**
* Gets a visitor that provides the option type.
* @return Instance of FinancialSecurityVisitorAdapter that provides {@link OptionType} of option securities
*/
public static FinancialSecurityVisitorAdapter<OptionType> getOptionTypeVisitor() {
return s_optionTypeVisitor;
}
/**
* Gets the option type of a security, if applicable.
* @param security The security
* @return The option type
* @throws UnsupportedOperationException if the security is null or is not one of the types handled.
*/
public static OptionType getOptionType(final Security security) {
if (security instanceof FinancialSecurity) {
return ((FinancialSecurity) security).accept(s_optionTypeVisitor);
}
throw new UnsupportedOperationException("Cannot get option type for security " + security);
}
/**
* Gets the strike for a security.
*/
public static class StrikeVisitor extends FinancialSecurityVisitorAdapter<Double> {
@Override
public Double visitEquityIndexOptionSecurity(final EquityIndexOptionSecurity security) {
return Double.valueOf(security.getStrike());
}
@Override
public Double visitEquityOptionSecurity(final EquityOptionSecurity security) {
return Double.valueOf(security.getStrike());
}
@Override
public Double visitEquityIndexFutureOptionSecurity(final EquityIndexFutureOptionSecurity security) {
return Double.valueOf(security.getStrike());
}
@Override
public Double visitBondFutureOptionSecurity(final BondFutureOptionSecurity security) {
return Double.valueOf(security.getStrike());
}
@Override
public Double visitCommodityFutureOptionSecurity(final CommodityFutureOptionSecurity security) {
return Double.valueOf(security.getStrike());
}
@Override
public Double visitIRFutureOptionSecurity(final IRFutureOptionSecurity security) {
return Double.valueOf(security.getStrike());
}
}
/**
* Get the expiry for a security.
*/
public static class ExpiryVisitor extends FinancialSecurityVisitorAdapter<Expiry> {
@Override
public Expiry visitEquityIndexOptionSecurity(final EquityIndexOptionSecurity security) {
return security.getExpiry();
}
@Override
public Expiry visitEquityOptionSecurity(final EquityOptionSecurity security) {
return security.getExpiry();
}
@Override
public Expiry visitEquityIndexFutureOptionSecurity(final EquityIndexFutureOptionSecurity security) {
return security.getExpiry();
}
@Override
public Expiry visitBondFutureOptionSecurity(final BondFutureOptionSecurity security) {
return security.getExpiry();
}
@Override
public Expiry visitCommodityFutureOptionSecurity(final CommodityFutureOptionSecurity security) {
return security.getExpiry();
}
@Override
public Expiry visitIRFutureOptionSecurity(final IRFutureOptionSecurity security) {
return security.getExpiry();
}
}
/**
* Get Exchange for security. <p>
* Note: This defaults to Settlement Exchange when both Settlement and Trading Exchanges are available for the SecurityType.
*/
public static class ExchangeVisitor extends FinancialSecurityVisitorAdapter<String> {
@Override
public String visitEquityIndexOptionSecurity(final EquityIndexOptionSecurity security) {
return security.getExchange();
}
@Override
public String visitEquityOptionSecurity(final EquityOptionSecurity security) {
return security.getExchange();
}
@Override
public String visitEquityIndexFutureOptionSecurity(final EquityIndexFutureOptionSecurity security) {
return security.getExchange();
}
@Override
public String visitBondFutureOptionSecurity(final BondFutureOptionSecurity security) {
return security.getSettlementExchange();
}
@Override
public String visitCommodityFutureOptionSecurity(final CommodityFutureOptionSecurity security) {
return security.getSettlementExchange();
}
@Override
public String visitIRFutureOptionSecurity(final IRFutureOptionSecurity security) {
return security.getExchange();
}
}
/**
* Get {@link OptionType}, CALL or PUT, for security.
*/
public static class OptionTypeVisitor extends FinancialSecurityVisitorAdapter<OptionType> {
@Override
public OptionType visitEquityIndexOptionSecurity(final EquityIndexOptionSecurity security) {
return security.getOptionType();
}
@Override
public OptionType visitEquityOptionSecurity(final EquityOptionSecurity security) {
return security.getOptionType();
}
@Override
public OptionType visitEquityIndexFutureOptionSecurity(final EquityIndexFutureOptionSecurity security) {
return security.getOptionType();
}
@Override
public OptionType visitBondFutureOptionSecurity(final BondFutureOptionSecurity security) {
return security.getOptionType();
}
@Override
public OptionType visitCommodityFutureOptionSecurity(final CommodityFutureOptionSecurity security) {
return security.getOptionType();
}
@Override
public OptionType visitIRFutureOptionSecurity(final IRFutureOptionSecurity security) {
return security.getOptionType();
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.reef.examples.group.bgd;
import org.apache.reef.examples.group.bgd.operatornames.*;
import org.apache.reef.examples.group.bgd.parameters.*;
import org.apache.reef.examples.group.bgd.utils.StepSizes;
import org.apache.reef.examples.group.utils.math.DenseVector;
import org.apache.reef.examples.group.utils.math.Vector;
import org.apache.reef.examples.group.utils.timer.Timer;
import org.apache.reef.exception.evaluator.NetworkException;
import org.apache.reef.io.Tuple;
import org.apache.reef.io.network.group.api.operators.Broadcast;
import org.apache.reef.io.network.group.api.operators.Reduce;
import org.apache.reef.io.network.group.api.GroupChanges;
import org.apache.reef.io.network.group.api.task.CommunicationGroupClient;
import org.apache.reef.io.network.group.api.task.GroupCommClient;
import org.apache.reef.io.network.util.Pair;
import org.apache.reef.io.serialization.Codec;
import org.apache.reef.io.serialization.SerializableCodec;
import org.apache.reef.tang.annotations.Parameter;
import org.apache.reef.task.Task;
import javax.inject.Inject;
import java.util.ArrayList;
import java.util.logging.Level;
import java.util.logging.Logger;
/**
* Master task for BGD example.
*/
public class MasterTask implements Task {
public static final String TASK_ID = "MasterTask";
private static final Logger LOG = Logger.getLogger(MasterTask.class.getName());
private final CommunicationGroupClient communicationGroupClient;
private final Broadcast.Sender<ControlMessages> controlMessageBroadcaster;
private final Broadcast.Sender<Vector> modelBroadcaster;
private final Reduce.Receiver<Pair<Pair<Double, Integer>, Vector>> lossAndGradientReducer;
private final Broadcast.Sender<Pair<Vector, Vector>> modelAndDescentDirectionBroadcaster;
private final Broadcast.Sender<Vector> descentDriectionBroadcaster;
private final Reduce.Receiver<Pair<Vector, Integer>> lineSearchEvaluationsReducer;
private final Broadcast.Sender<Double> minEtaBroadcaster;
private final boolean ignoreAndContinue;
private final StepSizes ts;
private final double lambda;
private final int maxIters;
private final ArrayList<Double> losses = new ArrayList<>();
private final Codec<ArrayList<Double>> lossCodec = new SerializableCodec<>();
private final Vector model;
private boolean sendModel = true;
private double minEta = 0;
@Inject
public MasterTask(
final GroupCommClient groupCommClient,
@Parameter(ModelDimensions.class) final int dimensions,
@Parameter(Lambda.class) final double lambda,
@Parameter(Iterations.class) final int maxIters,
@Parameter(EnableRampup.class) final boolean rampup,
final StepSizes ts) {
this.lambda = lambda;
this.maxIters = maxIters;
this.ts = ts;
this.ignoreAndContinue = rampup;
this.model = new DenseVector(dimensions);
this.communicationGroupClient = groupCommClient.getCommunicationGroup(AllCommunicationGroup.class);
this.controlMessageBroadcaster = communicationGroupClient.getBroadcastSender(ControlMessageBroadcaster.class);
this.modelBroadcaster = communicationGroupClient.getBroadcastSender(ModelBroadcaster.class);
this.lossAndGradientReducer = communicationGroupClient.getReduceReceiver(LossAndGradientReducer.class);
this.modelAndDescentDirectionBroadcaster =
communicationGroupClient.getBroadcastSender(ModelAndDescentDirectionBroadcaster.class);
this.descentDriectionBroadcaster = communicationGroupClient.getBroadcastSender(DescentDirectionBroadcaster.class);
this.lineSearchEvaluationsReducer = communicationGroupClient.getReduceReceiver(LineSearchEvaluationsReducer.class);
this.minEtaBroadcaster = communicationGroupClient.getBroadcastSender(MinEtaBroadcaster.class);
}
@Override
public byte[] call(final byte[] memento) throws Exception {
double gradientNorm = Double.MAX_VALUE;
for (int iteration = 1; !converged(iteration, gradientNorm); ++iteration) {
try (final Timer t = new Timer("Current Iteration(" + iteration + ")")) {
final Pair<Double, Vector> lossAndGradient = computeLossAndGradient();
losses.add(lossAndGradient.getFirst());
final Vector descentDirection = getDescentDirection(lossAndGradient.getSecond());
updateModel(descentDirection);
gradientNorm = descentDirection.norm2();
}
}
LOG.log(Level.INFO, "OUT: Stop");
controlMessageBroadcaster.send(ControlMessages.Stop);
for (final Double loss : losses) {
LOG.log(Level.INFO, "OUT: LOSS = {0}", loss);
}
return lossCodec.encode(losses);
}
private void updateModel(final Vector descentDirection) throws NetworkException, InterruptedException {
try (final Timer t = new Timer("GetDescentDirection + FindMinEta + UpdateModel")) {
final Vector lineSearchEvals = lineSearch(descentDirection);
minEta = findMinEta(model, descentDirection, lineSearchEvals);
model.multAdd(minEta, descentDirection);
}
LOG.log(Level.INFO, "OUT: New Model = {0}", model);
}
private Vector lineSearch(final Vector descentDirection) throws NetworkException, InterruptedException {
Vector lineSearchResults = null;
boolean allDead = false;
do {
try (final Timer t = new Timer("LineSearch - Broadcast("
+ (sendModel ? "ModelAndDescentDirection" : "DescentDirection") + ") + Reduce(LossEvalsInLineSearch)")) {
if (sendModel) {
LOG.log(Level.INFO, "OUT: DoLineSearchWithModel");
controlMessageBroadcaster.send(ControlMessages.DoLineSearchWithModel);
modelAndDescentDirectionBroadcaster.send(new Pair<>(model, descentDirection));
} else {
LOG.log(Level.INFO, "OUT: DoLineSearch");
controlMessageBroadcaster.send(ControlMessages.DoLineSearch);
descentDriectionBroadcaster.send(descentDirection);
}
final Pair<Vector, Integer> lineSearchEvals = lineSearchEvaluationsReducer.reduce();
if (lineSearchEvals != null) {
final int numExamples = lineSearchEvals.getSecond();
lineSearchResults = lineSearchEvals.getFirst();
lineSearchResults.scale(1.0 / numExamples);
LOG.log(Level.INFO, "OUT: #Examples: {0}", numExamples);
LOG.log(Level.INFO, "OUT: LineSearchEvals: {0}", lineSearchResults);
allDead = false;
} else {
allDead = true;
}
}
sendModel = chkAndUpdate();
} while (allDead || !ignoreAndContinue && sendModel);
return lineSearchResults;
}
private Pair<Double, Vector> computeLossAndGradient() throws NetworkException, InterruptedException {
Pair<Double, Vector> returnValue = null;
boolean allDead = false;
do {
try (final Timer t = new Timer("Broadcast(" + (sendModel ? "Model" : "MinEta") + ") + Reduce(LossAndGradient)")) {
if (sendModel) {
LOG.log(Level.INFO, "OUT: ComputeGradientWithModel");
controlMessageBroadcaster.send(ControlMessages.ComputeGradientWithModel);
modelBroadcaster.send(model);
} else {
LOG.log(Level.INFO, "OUT: ComputeGradientWithMinEta");
controlMessageBroadcaster.send(ControlMessages.ComputeGradientWithMinEta);
minEtaBroadcaster.send(minEta);
}
final Pair<Pair<Double, Integer>, Vector> lossAndGradient = lossAndGradientReducer.reduce();
if (lossAndGradient != null) {
final int numExamples = lossAndGradient.getFirst().getSecond();
LOG.log(Level.INFO, "OUT: #Examples: {0}", numExamples);
final double lossPerExample = lossAndGradient.getFirst().getFirst() / numExamples;
LOG.log(Level.INFO, "OUT: Loss: {0}", lossPerExample);
final double objFunc = (lambda / 2) * model.norm2Sqr() + lossPerExample;
LOG.log(Level.INFO, "OUT: Objective Func Value: {0}", objFunc);
final Vector gradient = lossAndGradient.getSecond();
gradient.scale(1.0 / numExamples);
LOG.log(Level.INFO, "OUT: Gradient: {0}", gradient);
returnValue = new Pair<>(objFunc, gradient);
allDead = false;
} else {
allDead = true;
}
}
sendModel = chkAndUpdate();
} while (allDead || !ignoreAndContinue && sendModel);
return returnValue;
}
private boolean chkAndUpdate() {
long t1 = System.currentTimeMillis();
final GroupChanges changes = communicationGroupClient.getTopologyChanges();
long t2 = System.currentTimeMillis();
LOG.log(Level.INFO, "OUT: Time to get TopologyChanges = " + (t2 - t1) / 1000.0 + " sec");
if (changes.exist()) {
LOG.log(Level.INFO, "OUT: There exist topology changes. Asking to update Topology");
t1 = System.currentTimeMillis();
communicationGroupClient.updateTopology();
t2 = System.currentTimeMillis();
LOG.log(Level.INFO, "OUT: Time to get TopologyChanges = " + (t2 - t1) / 1000.0 + " sec");
return true;
} else {
LOG.log(Level.INFO, "OUT: No changes in topology exist. So not updating topology");
return false;
}
}
private boolean converged(final int iters, final double gradNorm) {
return iters >= maxIters || Math.abs(gradNorm) <= 1e-3;
}
private double findMinEta(final Vector theModel, final Vector descentDir, final Vector lineSearchEvals) {
final double wNormSqr = theModel.norm2Sqr();
final double dNormSqr = descentDir.norm2Sqr();
final double wDotd = theModel.dot(descentDir);
final double[] t = ts.getT();
int i = 0;
for (final double eta : t) {
final double modelNormSqr = wNormSqr + (eta * eta) * dNormSqr + 2 * eta * wDotd;
final double loss = lineSearchEvals.get(i) + ((lambda / 2) * modelNormSqr);
lineSearchEvals.set(i, loss);
++i;
}
LOG.log(Level.INFO, "OUT: Regularized LineSearchEvals: {0}", lineSearchEvals);
final Tuple<Integer, Double> minTup = lineSearchEvals.min();
LOG.log(Level.INFO, "OUT: MinTup: {0}", minTup);
final double minT = t[minTup.getKey()];
LOG.log(Level.INFO, "OUT: MinT: {0}", minT);
return minT;
}
private Vector getDescentDirection(final Vector gradient) {
gradient.multAdd(lambda, model);
gradient.scale(-1);
LOG.log(Level.INFO, "OUT: DescentDirection: {0}", gradient);
return gradient;
}
}
| |
package com.parse.starter;
import android.bluetooth.BluetoothAdapter;
import android.content.DialogInterface;
import android.content.Intent;
import android.content.SharedPreferences;
import android.content.pm.PackageManager;
import android.content.res.Configuration;
import android.preference.PreferenceManager;
import android.support.design.widget.NavigationView;
import android.support.v4.app.Fragment;
import android.support.v4.app.FragmentManager;
import android.support.v4.view.GravityCompat;
import android.support.v4.widget.DrawerLayout;
import android.support.v7.app.AlertDialog;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.support.v7.widget.Toolbar;
import android.util.Log;
import android.view.MenuItem;
import android.view.View;
import android.widget.TextView;
import android.widget.Toast;
import com.parse.FindCallback;
import com.parse.ParseException;
import com.parse.ParseObject;
import com.parse.ParseQuery;
import com.parse.ParseRelation;
import com.parse.ParseUser;
import java.util.List;
import java.util.Timer;
import java.util.TimerTask;
public class Main2Activity extends AppCompatActivity implements HomeFragment.OnHomeFragmentInteractionListener, GroupFragment.OnGroupFragmentInteractionListener, MyGroupFragment.OnMyGroupFragmentListener {
private static final String MEMBER_KEY = "members";
private static final String GROUP_KEY = "Group";
private static final int REQUEST_ENABLE_BT = 1;
private static final String ITEM_SELECTED_KEY = "Menu";
private DrawerLayout mDrawer;
private NavigationView nvDrawer;
private Toolbar toolbar;
private TextView usernameText;
private TextView emailText;
private BluetoothAdapter bluetoothAdapter;
private int itemIdSelected;
private Timer timer;
private SharedPreferences pref;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main2);
pref = PreferenceManager.getDefaultSharedPreferences(Main2Activity.this);
//TODO: funziona?
if(ParseUser.getCurrentUser().getBoolean(GROUP_KEY)) {
startService(new Intent(Main2Activity.this, FindMyPosition.class));
}
// Check If the user belongs to a group.
// Network call is called every 30 seconds.
TimerTask timerTask = new TimerTask() {
@Override
public void run() {
hasGroup();
}
};
timer = new Timer();
timer.schedule(timerTask, 0, 30000);
// Set a Toolbar to replace the ActionBar.
toolbar = (Toolbar) findViewById(R.id.toolbar);
setSupportActionBar(toolbar);
getSupportActionBar().setDisplayHomeAsUpEnabled(true);
getSupportActionBar().setHomeAsUpIndicator(R.drawable.ic_menu);
getSupportActionBar().setHomeButtonEnabled(true);
// Find our drawer view
mDrawer = (DrawerLayout) findViewById(R.id.drawer_layout);
// Find our drawer view
nvDrawer = (NavigationView) findViewById(R.id.nvView);
// Setup drawer view
setupDrawerContent(nvDrawer);
View header = nvDrawer.getHeaderView(0);
usernameText = (TextView) header.findViewById(R.id.username_text);
usernameText.setText(ParseUser.getCurrentUser().getUsername());
emailText = (TextView) header.findViewById(R.id.email_text);
emailText.setText(ParseUser.getCurrentUser().getEmail());
Fragment fragment = HomeFragment.newInstance();
FragmentManager fragmentManager = getSupportFragmentManager();
fragmentManager.beginTransaction().replace(R.id.flContent, fragment).commit();
itemIdSelected = R.id.nav_home;
if (getPackageManager().hasSystemFeature(PackageManager.FEATURE_BLUETOOTH_LE)) {
bluetoothAdapter = BluetoothAdapter.getDefaultAdapter();
// Ensures Bluetooth is available on the device and it is enabled. If not,
// displays a dialog requesting user permission to enable Bluetooth.
if (bluetoothAdapter == null || !bluetoothAdapter.isEnabled()) {
Intent enableBtIntent = new Intent(BluetoothAdapter.ACTION_REQUEST_ENABLE);
startActivityForResult(enableBtIntent, REQUEST_ENABLE_BT);
}
}
}
@Override
protected void onSaveInstanceState(Bundle outState) {
super.onSaveInstanceState(outState);
pref.edit().putInt(ITEM_SELECTED_KEY, itemIdSelected).apply();
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
// The action bar home/up action should open or close the drawer.
switch (item.getItemId()) {
case android.R.id.home:
mDrawer.openDrawer(GravityCompat.START);
return true;
}
return super.onOptionsItemSelected(item);
}
@Override
protected void onResume() {
super.onResume();
int p = pref.getInt(ITEM_SELECTED_KEY, R.id.nav_home);
MenuItem i = nvDrawer.getMenu().findItem(p);
selectDrawerItem(i);
}
@Override
protected void onPause() {
super.onPause();
timer.cancel();
timer = null;
}
private void setupDrawerContent(NavigationView navigationView) {
navigationView.setNavigationItemSelectedListener(
new NavigationView.OnNavigationItemSelectedListener() {
@Override
public boolean onNavigationItemSelected(MenuItem menuItem) {
selectDrawerItem(menuItem);
return true;
}
});
}
public void selectDrawerItem(MenuItem menuItem) {
// Create a new fragment and specify the planet to show based on
// position
Fragment fragment = null;
Class fragmentClass = null;
switch (menuItem.getItemId()) {
case R.id.nav_home:
fragmentClass = HomeFragment.class;
break;
case R.id.nav_lift:
fragmentClass = LiftFragment.class;
break;
case R.id.nav_chalet:
fragmentClass = ChaletFragment.class;
break;
case R.id.nav_group:
if (ParseUser.getCurrentUser().getBoolean("group"))
fragmentClass = MyGroupFragment.class;
else
fragmentClass = GroupFragment.class;
break;
/* case R.id.nav_settings:
fragmentClass = HomeFragment.class;
break; */
case R.id.nav_logout:
//TODO: loading. Anche per log in e sign up.
ParseUser.logOut();
Intent intent = new Intent(Main2Activity.this, DispatchActivity.class);
intent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_TASK | Intent.FLAG_ACTIVITY_NEW_TASK);
startActivity(intent);
return;
default:
fragmentClass = HomeFragment.class;
}
try {
fragment = (Fragment) fragmentClass.newInstance();
} catch (Exception e) {
e.printStackTrace();
}
// Insert the fragment by replacing any existing fragment
FragmentManager fragmentManager = getSupportFragmentManager();
fragmentManager.beginTransaction().replace(R.id.flContent, fragment).commit();
// Highlight the selected item, update the title, and close the drawer
menuItem.setChecked(true);
setTitle(menuItem.getTitle());
mDrawer.closeDrawers();
itemIdSelected = menuItem.getItemId();
}
// Used to check If the user's been added to a group.
private void hasGroup() {
// Check if the user's group.
if (!(ParseUser.getCurrentUser().getBoolean(UserKey.GROUP_KEY))) {
ParseQuery<ParseObject> query = ParseQuery.getQuery(GROUP_KEY);
query.whereEqualTo(MEMBER_KEY, ParseUser.getCurrentUser());
query.findInBackground(new FindCallback<ParseObject>() {
@Override
public void done(final List<ParseObject> objects, ParseException e) {
if (e == null) {
if (objects.size() > 0) {
// The user is in a group but he's not accepted yet.
new AlertDialog.Builder(Main2Activity.this)
.setTitle("Avviso")
.setMessage("Sei invitato ad entrare nel gruppo " + objects.get(0).getString("Name") + ". Accetti l'invito?")
.setPositiveButton(R.string.accept_group, new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) { // Accept.
ParseUser.getCurrentUser().put(UserKey.GROUP_KEY, true);
ParseUser.getCurrentUser().saveInBackground();
// Start the location service.
startService(new Intent(Main2Activity.this, FindMyPosition.class));
if (itemIdSelected == R.id.nav_group)
selectDrawerItem(nvDrawer.getMenu().findItem(itemIdSelected));
}
})
.setNegativeButton(R.string.refuse_group, new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) { //Refuse.
ParseRelation r = objects.get(0).getRelation("members");
r.remove(ParseUser.getCurrentUser());
objects.get(0).saveInBackground();
}
})
.setIcon(android.R.drawable.ic_dialog_alert)
.show();
}
} else {
Toast.makeText(Main2Activity.this, e.getMessage(), Toast.LENGTH_SHORT).show();
}
}
});
}
}
//Interface OnHomeFragmentInteractionListener's method. Called when I tap the checkbox for the service.
@Override
public void onBluetoothButtonClick(boolean enable) {
if (enable) {
startService(new Intent(Main2Activity.this, FindBluetoothService.class));
} else {
stopService(new Intent(Main2Activity.this, FindBluetoothService.class));
}
}
//Interface OnGroupFragmentInteractionListener's method. Calles when I create a group.
@Override
public void onCreateGroupButtonClick() {
MyGroupFragment fragment = MyGroupFragment.newInstance();
FragmentManager fragmentManager = getSupportFragmentManager();
fragmentManager.beginTransaction().replace(R.id.flContent, fragment).commit();
// Start the location service.
startService(new Intent(Main2Activity.this, FindMyPosition.class));
}
//Interface OnMyGroupFragmentListener's method. Called when I exit from a group.
@Override
public void onExitGroupButtonClick() {
GroupFragment fragment = GroupFragment.newInstance();
FragmentManager fragmentManager = getSupportFragmentManager();
fragmentManager.beginTransaction().replace(R.id.flContent, fragment).commit();
// Stop the location service.
stopService(new Intent(Main2Activity.this, FindMyPosition.class));
}
@Override
public void onBackPressed() {
// Used to go to the Home screen and not to the DispatchActivity.
Intent intent = new Intent(Intent.ACTION_MAIN);
intent.addCategory(Intent.CATEGORY_HOME);
intent.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
startActivity(intent);
}
}
| |
package org.hisp.dhis.reporting.exp;
/*
* Copyright (c) 2004-2015, University of Oslo
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* Neither the name of the HISP project nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
import java.io.IOException;
import java.io.OutputStream;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.Calendar;
import java.util.Date;
import java.util.zip.GZIPOutputStream;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.struts2.ServletActionContext;
import org.hisp.dhis.importexport.synchronous.ExportPivotViewService;
import org.hisp.dhis.importexport.synchronous.ExportPivotViewService.RequestType;
import org.hisp.dhis.period.PeriodType;
import org.hisp.dhis.system.util.StreamUtils;
import org.hisp.dhis.user.CurrentUserService;
import org.hisp.dhis.util.ContextUtils;
import com.opensymphony.xwork2.Action;
/**
* @author Bob Jolliffe
*
* This action is called to export a csv formatted selection of
* aggregated indicator or data values from datamart. It requires 4
* parameters: startdate and enddate: 8 character string representation
* of date - 20100624 root: id of root organization unit level: level
* number to fetch aggregated values for
*/
public class ExportDataMartAction
implements Action
{
// TODO: experiment with different sizes for this to stop data dribbling out
private static final int GZIPBUFFER = 8192;
// dummy figure to keep legacy mydatamart happy
private static final int DUMMYCOUNT = 100000;
private static final Log log = LogFactory.getLog( ExportDataMartAction.class );
private static final DateFormat dateFormat = new SimpleDateFormat( "yyyyMMdd" );
private static final String NO_STARTDATE = "The request is missing a startDate parameter";
private static final String NO_ENDDATE = "The request is missing an endDate parameter";
private static final String BAD_STARTDATE = "The request has a bad startDate parameter. Required format is YYYMMDD";
private static final String BAD_ENDDATE = "The request has a bad endDate parameter. Required format is YYYMMDD";
private static final String NO_ROOT = "The request is missing a non-zero dataSourceRoot parameter";
private static final String NO_LEVEL = "The request is missing a non-zero dataSourceLevel parameter";
private static final String CLIENT_ERROR = "client-error";
private static final int HTTP_ERROR = 400;
// -------------------------------------------------------------------------
// Dependencies
// -------------------------------------------------------------------------
private CurrentUserService currentUserService;
public void setCurrentUserService( CurrentUserService currentUserService )
{
this.currentUserService = currentUserService;
}
private ExportPivotViewService exportPivotViewService;
public void setExportPivotViewService( ExportPivotViewService exportPivotViewService )
{
this.exportPivotViewService = exportPivotViewService;
}
// -------------------------------------------------------------------------
// Input
// -------------------------------------------------------------------------
private String startDate;
public void setStartDate( String startDate )
{
this.startDate = startDate;
}
private String endDate;
public void setEndDate( String endDate )
{
this.endDate = endDate;
}
private int dataSourceLevel;
public void setDataSourceLevel( int dataSourceLevel )
{
this.dataSourceLevel = dataSourceLevel;
}
private int dataSourceRoot;
public void setDataSourceRoot( int dataSourceRoot )
{
this.dataSourceRoot = dataSourceRoot;
}
private RequestType requestType;
public void setRequestType( RequestType requestType )
{
this.requestType = requestType;
}
private String periodType;
public void setPeriodType( String periodType )
{
this.periodType = periodType;
}
// -------------------------------------------------------------------------
// Action implementation
// -------------------------------------------------------------------------
@Override
public String execute()
throws IOException
{
HttpServletRequest request = ServletActionContext.getRequest();
log.info( "DataMart export request from " + currentUserService.getCurrentUsername() + " @ "
+ request.getRemoteAddr() );
HttpServletResponse response = ServletActionContext.getResponse();
// ---------------------------------------------------------------------
// Check all parameters
// ---------------------------------------------------------------------
String paramError = null;
if ( startDate == null )
{
paramError = NO_STARTDATE;
}
if ( endDate == null )
{
paramError = NO_ENDDATE;
}
if ( dataSourceRoot == 0 )
{
paramError = NO_ROOT;
}
if ( dataSourceLevel == 0 )
{
paramError = NO_LEVEL;
}
Date start = null;
Date end = null;
if ( paramError == null )
{
try
{
start = dateFormat.parse( startDate );
if ( start == null )
{
paramError = BAD_STARTDATE;
}
end = dateFormat.parse( endDate );
if ( end == null )
{
paramError = BAD_ENDDATE;
}
}
catch ( java.text.ParseException ex )
{
paramError = ex.getMessage();
}
}
if ( paramError != null )
{
response.sendError( HTTP_ERROR, paramError );
log.info( paramError );
return CLIENT_ERROR;
}
// timestamp filename
SimpleDateFormat format = new SimpleDateFormat( "_yyyy_MM_dd_HHmm_ss" );
String filename = requestType + format.format( Calendar.getInstance().getTime() ) + ".csv.gz";
PeriodType pType = PeriodType.getPeriodTypeByName( periodType );
// prepare to write output
OutputStream out = null;
// how many rows do we expect
// int count = exportPivotViewService.count( requestType, pType, start, end, dataSourceLevel, dataSourceRoot );
// Turns out it is too expensive to count the size of the resultset on large datamarts
// so we just return a dummy value here
int count = DUMMYCOUNT;
ContextUtils.configureResponse( response, ContextUtils.CONTENT_TYPE_GZIP, true, filename, true );
// write number of rows to custom header
response.addHeader( "X-Number-Of-Rows", String.valueOf( count ) );
try
{
out = new GZIPOutputStream( response.getOutputStream(), GZIPBUFFER );
exportPivotViewService.execute( out, requestType, pType, start, end, dataSourceLevel, dataSourceRoot );
}
finally
{
StreamUtils.closeOutputStream( out );
}
return SUCCESS;
}
}
| |
/**
* JBoss, Home of Professional Open Source
* Copyright 2011, Red Hat, Inc. and individual contributors
* by the @authors tag. See the copyright.txt in the distribution for a
* full listing of individual contributors.
*
* This is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1 of
* the License, or (at your option) any later version.
*
* This software is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this software; if not, write to the Free
* Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
* 02110-1301 USA, or see the FSF site: http://www.fsf.org.
*/
package org.jboss.arquillian.ajocado.framework;
import java.awt.image.BufferedImage;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.Arrays;
import java.util.List;
import java.util.Set;
import javax.imageio.ImageIO;
import org.apache.commons.codec.binary.Base64;
import org.jboss.arquillian.ajocado.configuration.XPathLibrary;
import org.jboss.arquillian.ajocado.cookie.Cookie;
import org.jboss.arquillian.ajocado.cookie.CookieCreateOptions;
import org.jboss.arquillian.ajocado.cookie.CookieDeleteOptions;
import org.jboss.arquillian.ajocado.cookie.CookieOptions;
import org.jboss.arquillian.ajocado.dom.Attribute;
import org.jboss.arquillian.ajocado.dom.Event;
import org.jboss.arquillian.ajocado.framework.GrapheneConfiguration.TimeoutType;
import org.jboss.arquillian.ajocado.framework.internal.UnsupportedTypedSelenium;
import org.jboss.arquillian.ajocado.geometry.Dimension;
import org.jboss.arquillian.ajocado.geometry.Offset;
import org.jboss.arquillian.ajocado.geometry.Point;
import org.jboss.arquillian.ajocado.javascript.JavaScript;
import org.jboss.arquillian.ajocado.javascript.KeyCode;
import org.jboss.arquillian.ajocado.locator.IdLocator;
import org.jboss.arquillian.ajocado.locator.attribute.AttributeLocator;
import org.jboss.arquillian.ajocado.locator.element.ElementLocationStrategy;
import org.jboss.arquillian.ajocado.locator.element.ElementLocator;
import org.jboss.arquillian.ajocado.locator.element.IterableLocator;
import org.jboss.arquillian.ajocado.locator.frame.FrameLocator;
import org.jboss.arquillian.ajocado.locator.option.OptionLocator;
import org.jboss.arquillian.ajocado.locator.window.WindowLocator;
import org.jboss.arquillian.ajocado.locator.window.WindowNameLocator;
import org.jboss.arquillian.ajocado.log.LogLevel;
import org.jboss.arquillian.ajocado.network.NetworkTraffic;
import org.jboss.arquillian.ajocado.network.NetworkTrafficType;
import org.jboss.arquillian.ajocado.request.RequestHeader;
import org.jboss.arquillian.ajocado.utils.array.ArrayTransform;
import com.thoughtworks.selenium.Selenium;
/**
* Wrapper implementation for Selenium object's API to make it type-safe.
*
* @author <a href="mailto:lfryc@redhat.com">Lukas Fryc</a>
* @version $Revision$
*/
public class TypedSeleniumImpl implements TypedSelenium, UnsupportedTypedSelenium {
Selenium selenium;
GrapheneConfiguration configuration = GrapheneConfigurationContext.getProxy();
private ArrayTransform<String, Integer> transformArrayOfStringToInteger = new ArrayTransform<String, Integer>(Integer.class) {
@Override
public Integer transformation(String source) {
return Integer.valueOf(source);
}
};
@Override
public void addLocationStrategy(ElementLocationStrategy locationStrategy, JavaScript strategyDefinition) {
selenium.addLocationStrategy(locationStrategy.getStrategyName(), strategyDefinition.toString());
}
@Override
public void addScript(JavaScript javaScript) {
selenium.addScript(javaScript.getAsString(), javaScript.getIdentification());
}
@Override
public void addSelection(ElementLocator<?> elementLocator, OptionLocator<?> optionLocator) {
selenium.addSelection(elementLocator.inSeleniumRepresentation(), optionLocator.inSeleniumRepresentation());
}
@Override
public void allowNativeXpath(boolean allow) {
selenium.allowNativeXpath(String.valueOf(allow));
}
@Override
public void altKeyDown() {
selenium.altKeyDown();
}
@Override
public void altKeyUp() {
selenium.altKeyUp();
}
@Override
public void answerOnNextPrompt(String answer) {
selenium.answerOnNextPrompt(answer);
}
@Override
public IdLocator assignId(ElementLocator<?> elementLocator, String identifier) {
selenium.assignId(elementLocator.inSeleniumRepresentation(), identifier);
return new IdLocator(identifier);
}
@Override
public void attachFile(ElementLocator<?> fieldLocator, File fileLocator) {
throw new UnsupportedOperationException();
}
@Override
public void attachFile(ElementLocator<?> fieldLocator, URL fileLocator) {
throw new UnsupportedOperationException();
}
@Override
public void captureEntirePageScreenshot(File filename) {
throw new UnsupportedOperationException();
}
@Override
public BufferedImage captureEntirePageScreenshot() {
return decodeBase64Screenshot(selenium.captureEntirePageScreenshotToString(""));
}
@Override
public NetworkTraffic captureNetworkTraffic(NetworkTrafficType type) {
String traffic = selenium.captureNetworkTraffic(type.getType());
return new NetworkTraffic(type, traffic);
}
@Override
public void captureScreenshot(File filename) {
throw new UnsupportedOperationException();
}
@Override
public BufferedImage captureScreenshot() {
return decodeBase64Screenshot(selenium.captureScreenshotToString());
}
@Override
public void check(ElementLocator<?> elementLocator) {
selenium.check(elementLocator.inSeleniumRepresentation());
}
@Override
public void chooseCancelOnNextConfirmation() {
selenium.chooseCancelOnNextConfirmation();
}
@Override
public void chooseOkOnNextConfirmation() {
selenium.chooseOkOnNextConfirmation();
}
@Override
public void click(ElementLocator<?> elementLocator) {
selenium.click(elementLocator.inSeleniumRepresentation());
}
@Override
public void clickAt(ElementLocator<?> elementLocator, Point point) {
selenium.clickAt(elementLocator.inSeleniumRepresentation(), point.inSeleniumRepresentation());
}
@Override
public void close() {
selenium.close();
}
@Override
public boolean containsScript(JavaScript javaScript) {
final String identification = javaScript.getIdentification();
String evaluated = selenium.getEval("document.getElementById('" + identification + "') ? true : false");
return Boolean.valueOf(evaluated);
}
@Override
public void contextMenu(ElementLocator<?> elementLocator) {
selenium.contextMenu(elementLocator.inSeleniumRepresentation());
}
@Override
public void contextMenuAt(ElementLocator<?> elementLocator, Point point) {
selenium.contextMenuAt(elementLocator.inSeleniumRepresentation(), point.inSeleniumRepresentation());
}
@Override
public void controlKeyDown() {
selenium.controlKeyDown();
}
@Override
public void controlKeyUp() {
selenium.controlKeyUp();
}
@Override
public void deleteAllVisibleCookies() {
selenium.deleteAllVisibleCookies();
}
@Override
public void deselectPopUp() {
selenium.deselectPopUp();
}
@Override
public void doubleClick(ElementLocator<?> elementLocator) {
selenium.doubleClick(elementLocator.inSeleniumRepresentation());
}
@Override
public void doubleClickAt(ElementLocator<?> elementLocator, Point point) {
selenium.doubleClickAt(elementLocator.inSeleniumRepresentation(), point.inSeleniumRepresentation());
}
@Override
public void dragAndDrop(ElementLocator<?> elementLocator, Offset offset) {
selenium.dragAndDrop(elementLocator.inSeleniumRepresentation(), offset.inSeleniumRepresentation());
}
@Override
public void dragAndDropToObject(ElementLocator<?> elementLocatorOfObjectToBeDragged,
ElementLocator<?> elementLocatorOfDragDestinationObject) {
selenium.dragAndDropToObject(elementLocatorOfDragDestinationObject.inSeleniumRepresentation(),
elementLocatorOfObjectToBeDragged.inSeleniumRepresentation());
}
@Override
public void fireEvent(ElementLocator<?> elementLocator, Event event) {
selenium.fireEvent(elementLocator.inSeleniumRepresentation(), event.getEventName());
}
@Override
public void focus(ElementLocator<?> elementLocator) {
selenium.focus(elementLocator.inSeleniumRepresentation());
}
@Override
public String getAlert() {
return selenium.getAlert();
}
@Override
public List<ElementLocator<?>> getAllButtons() {
throw new UnsupportedOperationException();
}
@Override
public List<ElementLocator<?>> getAllFields() {
throw new UnsupportedOperationException();
}
@Override
public List<ElementLocator<?>> getAllLinks() {
throw new UnsupportedOperationException();
}
@Override
public List<WindowNameLocator> getAllWindowIds() {
throw new UnsupportedOperationException();
}
@Override
public List<String> getAllWindowNames() {
throw new UnsupportedOperationException();
}
@Override
public List<String> getAllWindowTitles() {
throw new UnsupportedOperationException();
}
@Override
public String getAttribute(AttributeLocator<?> attributeLocator) {
return selenium.getAttribute(attributeLocator.inSeleniumRepresentation());
}
@Override
public String getAttribute(ElementLocator<?> elementLocator, Attribute attribute) {
return getAttribute(elementLocator.getAttribute(attribute));
}
@Override
public List<String> getAttributeFromAllWindows(Attribute attribute) {
throw new UnsupportedOperationException();
}
@Override
public String getBodyText() {
return selenium.getBodyText();
}
@Override
public String getConfirmation() {
return selenium.getConfirmation();
}
@Override
public int getCount(IterableLocator<?> locator) {
if (locator.getLocationStrategy() != ElementLocationStrategy.XPATH) {
throw new UnsupportedOperationException("Only XPath locators are supported for counting");
}
return selenium.getXpathCount(locator.getRawLocator()).intValue();
}
@Override
public int getCursorPosition(ElementLocator<?> elementLocator) {
return selenium.getCursorPosition(elementLocator.inSeleniumRepresentation()).intValue();
}
@Override
public Dimension getElementDimension(ElementLocator<?> elementLocator) {
return new Dimension(getElementWidth(elementLocator), getElementHeight(elementLocator));
}
@Override
public int getElementHeight(ElementLocator<?> elementLocator) {
return selenium.getElementHeight(elementLocator.inSeleniumRepresentation()).intValue();
}
@Override
public int getElementIndex(ElementLocator<?> elementLocator) {
return selenium.getElementIndex(elementLocator.inSeleniumRepresentation()).intValue();
}
@Override
public Point getElementPosition(ElementLocator<?> elementLocator) {
return new Point(getElementPositionLeft(elementLocator), getElementPositionTop(elementLocator));
}
@Override
public int getElementPositionLeft(ElementLocator<?> elementLocator) {
return selenium.getElementPositionLeft(elementLocator.inSeleniumRepresentation()).intValue();
}
@Override
public int getElementPositionTop(ElementLocator<?> elementLocator) {
return selenium.getElementPositionTop(elementLocator.inSeleniumRepresentation()).intValue();
}
@Override
public int getElementWidth(ElementLocator<?> elementLocator) {
return selenium.getElementWidth(elementLocator.inSeleniumRepresentation()).intValue();
}
@Override
public String getEval(JavaScript script) {
return selenium.getEval(script.toString());
}
@Override
public JavaScript getExpression(JavaScript expression) {
throw new UnsupportedOperationException("not implemented yet");
}
@Override
public String getHtmlSource() {
return selenium.getHtmlSource();
}
@Override
public URL getLocation() {
try {
return new URL(selenium.getLocation());
} catch (MalformedURLException e) {
throw new RuntimeException(e);
}
}
@Override
public int getMouseSpeed() {
return selenium.getMouseSpeed().intValue();
}
@Override
public String getPrompt() {
return selenium.getPrompt();
}
@Override
public List<String> getSelectOptions(ElementLocator<?> selectLocator) {
return Arrays.asList(selenium.getSelectOptions(selectLocator.inSeleniumRepresentation()));
}
@Override
public String getSelectedId(ElementLocator<?> selectLocator) {
return selenium.getSelectedId(selectLocator.inSeleniumRepresentation());
}
@Override
public List<String> getSelectedIds(ElementLocator<?> selectLocator) {
return Arrays.asList(selenium.getSelectedIds(selectLocator.inSeleniumRepresentation()));
}
@Override
public int getSelectedIndex(ElementLocator<?> selectLocator) {
return Integer.valueOf(selenium.getSelectedIndex(selectLocator.inSeleniumRepresentation()));
}
@Override
public List<Integer> getSelectedIndexes(ElementLocator<?> selectLocator) {
return Arrays.asList(transformArrayOfStringToInteger.transform(selenium.getSelectedIndexes(selectLocator
.inSeleniumRepresentation())));
}
@Override
public String getSelectedLabel(ElementLocator<?> selectLocator) {
return selenium.getSelectedLabel(selectLocator.inSeleniumRepresentation());
}
@Override
public List<String> getSelectedLabels(ElementLocator<?> selectLocator) {
return Arrays.asList(selenium.getSelectedLabels(selectLocator.inSeleniumRepresentation()));
}
@Override
public String getSelectedValue(ElementLocator<?> selectLocator) {
return selenium.getSelectedValue(selenium.getSelectedValue(selectLocator.inSeleniumRepresentation()));
}
@Override
public List<String> getSelectedValues(ElementLocator<?> selectLocator) {
return Arrays.asList(selenium.getSelectedValues(selectLocator.inSeleniumRepresentation()));
}
@Override
public long getSpeed() {
return Long.valueOf(selenium.getSpeed());
}
@Override
public String getText(ElementLocator<?> elementLocator) {
return selenium.getText(elementLocator.inSeleniumRepresentation());
}
@Override
public String getTitle() {
return selenium.getTitle();
}
@Override
public String getValue(ElementLocator<?> elementLocator) {
return selenium.getValue(elementLocator.inSeleniumRepresentation());
}
@Override
public boolean getWhetherThisFrameMatchFrameExpression(String currentFrameString, String target) {
return selenium.getWhetherThisFrameMatchFrameExpression(currentFrameString, target);
}
@Override
public boolean getWhetherThisWindowMatchWindowExpression(String currentWindowString, String target) {
return selenium.getWhetherThisWindowMatchWindowExpression(currentWindowString, target);
}
@Override
public void goBack() {
selenium.goBack();
}
@Override
public void highlight(ElementLocator<?> elementLocator) {
selenium.highlight(elementLocator.inSeleniumRepresentation());
}
@Override
public void ignoreAttributesWithoutValue(boolean ignore) {
selenium.ignoreAttributesWithoutValue(String.valueOf(ignore));
}
@Override
public boolean isAlertPresent() {
return Boolean.valueOf(selenium.isAlertPresent());
}
@Override
public boolean isChecked(ElementLocator<?> elementLocator) {
return Boolean.valueOf(selenium.isChecked(elementLocator.inSeleniumRepresentation()));
}
@Override
public boolean isConfirmationPresent() {
return Boolean.valueOf(selenium.isConfirmationPresent());
}
@Override
public boolean isEditable(ElementLocator<?> elementLocator) {
return Boolean.valueOf(selenium.isEditable(elementLocator.inSeleniumRepresentation()));
}
@Override
public boolean isElementPresent(ElementLocator<?> elementLocator) {
return selenium.isElementPresent(elementLocator.inSeleniumRepresentation());
}
@Override
public boolean isOrdered(ElementLocator<?> elementLocator1, ElementLocator<?> elementLocator2) {
return selenium.isOrdered(elementLocator1.inSeleniumRepresentation(), elementLocator2.inSeleniumRepresentation());
}
@Override
public boolean isPromptPresent() {
return selenium.isPromptPresent();
}
@Override
public boolean isSomethingSelected(ElementLocator<?> selectLocator) {
return selenium.isSomethingSelected(selectLocator.inSeleniumRepresentation());
}
@Override
public boolean isTextPresent(String text) {
return selenium.isTextPresent(text);
}
@Override
public boolean isVisible(ElementLocator<?> elementLocator) {
return selenium.isVisible(elementLocator.inSeleniumRepresentation());
}
@Override
public void keyDown(ElementLocator<?> elementLocator, char character) {
selenium.keyPress(elementLocator.inSeleniumRepresentation(), String.valueOf(character));
}
@Override
public void keyDown(ElementLocator<?> elementLocator, KeyCode keyCode) {
selenium.keyPress(elementLocator.inSeleniumRepresentation(), keyCode.inSeleniumRepresentation());
}
@Override
public void keyDownNative(int keycode) {
selenium.keyDownNative(keyEventToNativeCode(keycode));
}
public void keyPress(ElementLocator<?> elementLocator, char character) {
selenium.keyPress(elementLocator.inSeleniumRepresentation(), String.valueOf(character));
}
public void keyPress(ElementLocator<?> elementLocator, KeyCode keyCode) {
selenium.keyPress(elementLocator.inSeleniumRepresentation(), keyCode.inSeleniumRepresentation());
}
public void keyPressNative(int keycode) {
selenium.keyPressNative(keyEventToNativeCode(keycode));
}
public void keyUp(ElementLocator<?> elementLocator, char character) {
selenium.keyPress(elementLocator.inSeleniumRepresentation(), String.valueOf(character));
}
public void keyUp(ElementLocator<?> elementLocator, KeyCode keyCode) {
selenium.keyPress(elementLocator.inSeleniumRepresentation(), keyCode.inSeleniumRepresentation());
}
public void keyUpNative(int keycode) {
selenium.keyUpNative(keyEventToNativeCode(keycode));
}
public void logToBrowser(String textToLog) {
selenium.setContext(textToLog);
}
public void metaKeyDown() {
selenium.metaKeyDown();
}
public void metaKeyUp() {
selenium.metaKeyUp();
}
public void mouseDown(ElementLocator<?> elementLocator) {
selenium.mouseDown(elementLocator.inSeleniumRepresentation());
}
public void mouseDownAt(ElementLocator<?> elementLocator, Point point) {
selenium.mouseDownAt(elementLocator.inSeleniumRepresentation(), point.inSeleniumRepresentation());
}
public void mouseDownRight(ElementLocator<?> elementLocator) {
selenium.mouseDownRight(elementLocator.inSeleniumRepresentation());
}
public void mouseDownRightAt(ElementLocator<?> elementLocator, Point point) {
selenium.mouseDownRightAt(elementLocator.inSeleniumRepresentation(), point.inSeleniumRepresentation());
}
public void mouseMove(ElementLocator<?> elementLocator) {
selenium.mouseMove(elementLocator.inSeleniumRepresentation());
}
public void mouseMoveAt(ElementLocator<?> elementLocator, Point point) {
selenium.mouseMoveAt(elementLocator.inSeleniumRepresentation(), point.inSeleniumRepresentation());
}
public void mouseOut(ElementLocator<?> elementLocator) {
selenium.mouseOut(elementLocator.inSeleniumRepresentation());
}
public void mouseOver(ElementLocator<?> elementLocator) {
selenium.mouseOver(elementLocator.inSeleniumRepresentation());
}
public void mouseUp(ElementLocator<?> elementLocator) {
selenium.mouseUp(elementLocator.inSeleniumRepresentation());
}
public void mouseUpAt(ElementLocator<?> elementLocator, Point point) {
selenium.mouseUpAt(elementLocator.inSeleniumRepresentation(), point.inSeleniumRepresentation());
}
public void mouseUpRight(ElementLocator<?> elementLocator) {
selenium.mouseUpRight(elementLocator.inSeleniumRepresentation());
}
public void mouseUpRightAt(ElementLocator<?> elementLocator, Point point) {
selenium.mouseUpRightAt(elementLocator.inSeleniumRepresentation(), point.inSeleniumRepresentation());
}
public void open(URL url) {
selenium.open(url.toString());
}
public void openWindow(URL url, WindowNameLocator windowName) {
selenium.openWindow(url.toString(), windowName.getRawLocator());
}
public void refresh() {
selenium.refresh();
}
public void removeAllSelections(ElementLocator<?> elementLocator) {
selenium.removeAllSelections(elementLocator.inSeleniumRepresentation());
}
public void removeScript(JavaScript javaScript) {
selenium.removeScript(javaScript.getIdentification());
}
public void removeSelection(ElementLocator<?> elementLocator, OptionLocator<?> optionLocator) {
selenium.removeSelection(elementLocator.inSeleniumRepresentation(), optionLocator.inSeleniumRepresentation());
}
public String retrieveLastRemoteControlLogs() {
return selenium.retrieveLastRemoteControlLogs();
}
public void runScript(JavaScript script) {
selenium.runScript(script.getAsString());
}
public void select(ElementLocator<?> selectLocator, OptionLocator<?> optionLocator) {
selenium.select(selectLocator.inSeleniumRepresentation(), optionLocator.inSeleniumRepresentation());
}
public void selectFrame(FrameLocator<?> frameLocator) {
selenium.selectFrame(frameLocator.inSeleniumRepresentation());
}
public void selectPopUp(WindowLocator<?> windowLocator) {
selenium.selectPopUp(windowLocator.getRawLocator());
}
public void selectWindow(WindowLocator<?> windowLocator) {
selenium.selectWindow(windowLocator.inSeleniumRepresentation());
}
public void setBrowserLogLevel(LogLevel logLevel) {
selenium.setBrowserLogLevel(logLevel.getLogLevelName());
}
public void setCursorPosition(ElementLocator<?> elementLocator, int position) {
selenium.setCursorPosition(elementLocator.inSeleniumRepresentation(), String.valueOf(position));
}
public void setExtensionJs(JavaScript extensionJs) {
throw new UnsupportedOperationException("not implemented yet");
}
public void setMouseSpeed(int pixels) {
selenium.setMouseSpeed(String.valueOf(pixels));
}
public void setSpeed(long speedInMilis) {
selenium.setSpeed(String.valueOf(speedInMilis));
}
public void setTimeout(long timeoutInMilis) {
selenium.setTimeout(String.valueOf(timeoutInMilis));
}
public void shiftKeyDown() {
selenium.shiftKeyDown();
}
public void shiftKeyUp() {
selenium.shiftKeyUp();
}
public void shutDownSeleniumServer() {
selenium.shutDownSeleniumServer();
}
public void start() {
selenium.start();
}
public void stop() {
selenium.stop();
}
public void submit(ElementLocator<?> formLocator) {
selenium.submit(formLocator.inSeleniumRepresentation());
}
public void type(ElementLocator<?> elementLocator, String value) {
selenium.type(elementLocator.inSeleniumRepresentation(), value);
}
public void typeKeys(ElementLocator<?> elementLocator, String value) {
selenium.type(elementLocator.inSeleniumRepresentation(), value);
}
public void uncheck(ElementLocator<?> elementLocator) {
selenium.uncheck(elementLocator.inSeleniumRepresentation());
}
public void useXpathLibrary(XPathLibrary xPathLibrary) {
selenium.useXpathLibrary(xPathLibrary.inSeleniumRepresentation());
}
public void waitForCondition(JavaScript script) {
String timeout = String.valueOf(configuration.getTimeout(TimeoutType.DEFAULT));
selenium.waitForCondition(script.getAsString(), timeout);
}
public void waitForCondition(JavaScript script, long timeout) {
selenium.waitForCondition(script.getAsString(), String.valueOf(timeout));
}
public void waitForFrameToLoad(URL frameURL) {
String timeout = String.valueOf(configuration.getTimeout(TimeoutType.DEFAULT));
selenium.waitForFrameToLoad(frameURL.toString(), timeout);
}
public void waitForFrameToLoad(URL frameURL, long timeout) {
selenium.waitForFrameToLoad(frameURL.toString(), String.valueOf(timeout));
}
public void waitForPageToLoad() {
String timeout = String.valueOf(configuration.getTimeout(TimeoutType.DEFAULT));
selenium.waitForPageToLoad(timeout);
}
public void waitForPageToLoad(long timeout) {
selenium.waitForPageToLoad(String.valueOf(timeout));
}
public void waitForPopUp(WindowNameLocator windowNameLocator, long timeoutInMilis) {
selenium.waitForPopUp(windowNameLocator.getRawLocator(), Long.toString(timeoutInMilis));
}
public void windowFocus() {
selenium.windowFocus();
}
public void windowMaximize() {
selenium.windowMaximize();
}
private BufferedImage decodeBase64Screenshot(String screenshotInBase64) {
byte[] screenshotPng = Base64.decodeBase64(screenshotInBase64);
ByteArrayInputStream inputStream = new ByteArrayInputStream(screenshotPng);
BufferedImage result;
try {
result = ImageIO.read(inputStream);
} catch (IOException e) {
throw new RuntimeException(e);
}
return result;
}
@Override
public Set<Cookie> getAllCookies() {
throw new UnsupportedOperationException("not implemented yet");
}
@Override
public Cookie getCookieByName(String cookieName) {
String value = selenium.getCookieByName(cookieName);
return Cookie.createCookie(cookieName, value);
}
@Override
public boolean isCookiePresent(String cookieName) {
return selenium.isCookiePresent(cookieName);
}
@Override
public CookieCreateOptions createCookie(Cookie cookie) {
CookieCreateOptions options = CookieOptions.forCreation();
this.createCookie(cookie, options);
return options;
}
@Override
public void createCookie(Cookie cookie, CookieCreateOptions options) {
selenium.createCookie(cookie.inSeleniumRepresentation(), options.inSeleniumRepresentation());
}
@Override
public void deleteCookie(String cookieName, CookieDeleteOptions options) {
selenium.deleteCookie(cookieName, options.inSeleniumRepresentation());
}
@Override
public void addCustomRequestHeader(RequestHeader header) {
selenium.addCustomRequestHeader(header.getName(), header.getValue());
}
private static String keyEventToNativeCode(int event) {
return Integer.toString(event);
}
}
| |
/**
* Copyright (c) 2015-present, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*/
package com.facebook.react.views.image;
import javax.annotation.Nullable;
import java.util.Arrays;
import java.util.LinkedList;
import java.util.List;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.BitmapShader;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Matrix;
import android.graphics.Paint;
import android.graphics.Path;
import android.graphics.Rect;
import android.graphics.RectF;
import android.graphics.Shader;
import android.graphics.drawable.Animatable;
import android.graphics.drawable.Drawable;
import com.facebook.common.util.UriUtil;
import com.facebook.csslayout.YogaConstants;
import com.facebook.drawee.controller.AbstractDraweeControllerBuilder;
import com.facebook.drawee.controller.BaseControllerListener;
import com.facebook.drawee.controller.ControllerListener;
import com.facebook.drawee.controller.ForwardingControllerListener;
import com.facebook.drawee.drawable.AutoRotateDrawable;
import com.facebook.drawee.drawable.ScalingUtils;
import com.facebook.drawee.generic.GenericDraweeHierarchy;
import com.facebook.drawee.generic.GenericDraweeHierarchyBuilder;
import com.facebook.drawee.generic.RoundingParams;
import com.facebook.drawee.view.GenericDraweeView;
import com.facebook.imagepipeline.common.ResizeOptions;
import com.facebook.imagepipeline.image.ImageInfo;
import com.facebook.imagepipeline.request.BasePostprocessor;
import com.facebook.imagepipeline.request.ImageRequest;
import com.facebook.imagepipeline.request.ImageRequestBuilder;
import com.facebook.imagepipeline.request.Postprocessor;
import com.facebook.react.bridge.ReactContext;
import com.facebook.react.bridge.ReadableArray;
import com.facebook.react.bridge.ReadableMap;
import com.facebook.react.uimanager.FloatUtil;
import com.facebook.react.uimanager.PixelUtil;
import com.facebook.react.uimanager.UIManagerModule;
import com.facebook.react.uimanager.events.EventDispatcher;
import com.facebook.react.views.imagehelper.ImageSource;
import com.facebook.react.views.imagehelper.MultiSourceHelper;
import com.facebook.react.views.imagehelper.ResourceDrawableIdHelper;
import com.facebook.react.views.imagehelper.MultiSourceHelper.MultiSourceResult;
/**
* Wrapper class around Fresco's GenericDraweeView, enabling persisting props across multiple view
* update and consistent processing of both static and network images.
*/
public class ReactImageView extends GenericDraweeView {
public static final int REMOTE_IMAGE_FADE_DURATION_MS = 300;
private static float[] sComputedCornerRadii = new float[4];
/*
* Implementation note re rounded corners:
*
* Fresco's built-in rounded corners only work for 'cover' resize mode -
* this is a limitation in Android itself. Fresco has a workaround for this, but
* it requires knowing the background color.
*
* So for the other modes, we use a postprocessor.
* Because the postprocessor uses a modified bitmap, that would just get cropped in
* 'cover' mode, so we fall back to Fresco's normal implementation.
*/
private static final Matrix sMatrix = new Matrix();
private static final Matrix sInverse = new Matrix();
private ImageResizeMethod mResizeMethod = ImageResizeMethod.AUTO;
private class RoundedCornerPostprocessor extends BasePostprocessor {
void getRadii(Bitmap source, float[] computedCornerRadii, float[] mappedRadii) {
ScalingUtils.getTransform(
sMatrix,
new Rect(0, 0, source.getWidth(), source.getHeight()),
source.getWidth(),
source.getHeight(),
0.0f,
0.0f,
mScaleType);
sMatrix.invert(sInverse);
mappedRadii[0] = sInverse.mapRadius(computedCornerRadii[0]);
mappedRadii[1] = mappedRadii[0];
mappedRadii[2] = sInverse.mapRadius(computedCornerRadii[1]);
mappedRadii[3] = mappedRadii[2];
mappedRadii[4] = sInverse.mapRadius(computedCornerRadii[2]);
mappedRadii[5] = mappedRadii[4];
mappedRadii[6] = sInverse.mapRadius(computedCornerRadii[3]);
mappedRadii[7] = mappedRadii[6];
}
@Override
public void process(Bitmap output, Bitmap source) {
cornerRadii(sComputedCornerRadii);
output.setHasAlpha(true);
if (FloatUtil.floatsEqual(sComputedCornerRadii[0], 0f) &&
FloatUtil.floatsEqual(sComputedCornerRadii[1], 0f) &&
FloatUtil.floatsEqual(sComputedCornerRadii[2], 0f) &&
FloatUtil.floatsEqual(sComputedCornerRadii[3], 0f)) {
super.process(output, source);
return;
}
Paint paint = new Paint();
paint.setAntiAlias(true);
paint.setShader(new BitmapShader(source, Shader.TileMode.CLAMP, Shader.TileMode.CLAMP));
Canvas canvas = new Canvas(output);
float[] radii = new float[8];
getRadii(source, sComputedCornerRadii, radii);
Path pathForBorderRadius = new Path();
pathForBorderRadius.addRoundRect(
new RectF(0, 0, source.getWidth(), source.getHeight()),
radii,
Path.Direction.CW);
canvas.drawPath(pathForBorderRadius, paint);
}
}
private final List<ImageSource> mSources;
private @Nullable ImageSource mImageSource;
private @Nullable ImageSource mCachedImageSource;
private @Nullable Drawable mLoadingImageDrawable;
private int mBorderColor;
private int mOverlayColor;
private float mBorderWidth;
private float mBorderRadius = YogaConstants.UNDEFINED;
private @Nullable float[] mBorderCornerRadii;
private ScalingUtils.ScaleType mScaleType;
private boolean mIsDirty;
private final AbstractDraweeControllerBuilder mDraweeControllerBuilder;
private final RoundedCornerPostprocessor mRoundedCornerPostprocessor;
private @Nullable ControllerListener mControllerListener;
private @Nullable ControllerListener mControllerForTesting;
private final @Nullable Object mCallerContext;
private int mFadeDurationMs = -1;
private boolean mProgressiveRenderingEnabled;
// We can't specify rounding in XML, so have to do so here
private static GenericDraweeHierarchy buildHierarchy(Context context) {
return new GenericDraweeHierarchyBuilder(context.getResources())
.setRoundingParams(RoundingParams.fromCornersRadius(0))
.build();
}
public ReactImageView(
Context context,
AbstractDraweeControllerBuilder draweeControllerBuilder,
@Nullable Object callerContext) {
super(context, buildHierarchy(context));
mScaleType = ImageResizeMode.defaultValue();
mDraweeControllerBuilder = draweeControllerBuilder;
mRoundedCornerPostprocessor = new RoundedCornerPostprocessor();
mCallerContext = callerContext;
mSources = new LinkedList<>();
}
public void setShouldNotifyLoadEvents(boolean shouldNotify) {
if (!shouldNotify) {
mControllerListener = null;
} else {
final EventDispatcher mEventDispatcher = ((ReactContext) getContext()).
getNativeModule(UIManagerModule.class).getEventDispatcher();
mControllerListener = new BaseControllerListener<ImageInfo>() {
@Override
public void onSubmit(String id, Object callerContext) {
mEventDispatcher.dispatchEvent(
new ImageLoadEvent(getId(), ImageLoadEvent.ON_LOAD_START));
}
@Override
public void onFinalImageSet(
String id,
@Nullable final ImageInfo imageInfo,
@Nullable Animatable animatable) {
if (imageInfo != null) {
mEventDispatcher.dispatchEvent(
new ImageLoadEvent(getId(), ImageLoadEvent.ON_LOAD,
mImageSource.getSource(), imageInfo.getWidth(), imageInfo.getHeight()));
mEventDispatcher.dispatchEvent(
new ImageLoadEvent(getId(), ImageLoadEvent.ON_LOAD_END));
}
}
@Override
public void onFailure(String id, Throwable throwable) {
mEventDispatcher.dispatchEvent(
new ImageLoadEvent(getId(), ImageLoadEvent.ON_ERROR));
mEventDispatcher.dispatchEvent(
new ImageLoadEvent(getId(), ImageLoadEvent.ON_LOAD_END));
}
};
}
mIsDirty = true;
}
public void setBorderColor(int borderColor) {
mBorderColor = borderColor;
mIsDirty = true;
}
public void setOverlayColor(int overlayColor) {
mOverlayColor = overlayColor;
mIsDirty = true;
}
public void setBorderWidth(float borderWidth) {
mBorderWidth = PixelUtil.toPixelFromDIP(borderWidth);
mIsDirty = true;
}
public void setBorderRadius(float borderRadius) {
if (!FloatUtil.floatsEqual(mBorderRadius, borderRadius)) {
mBorderRadius = borderRadius;
mIsDirty = true;
}
}
public void setBorderRadius(float borderRadius, int position) {
if (mBorderCornerRadii == null) {
mBorderCornerRadii = new float[4];
Arrays.fill(mBorderCornerRadii, YogaConstants.UNDEFINED);
}
if (!FloatUtil.floatsEqual(mBorderCornerRadii[position], borderRadius)) {
mBorderCornerRadii[position] = borderRadius;
mIsDirty = true;
}
}
public void setScaleType(ScalingUtils.ScaleType scaleType) {
mScaleType = scaleType;
mIsDirty = true;
}
public void setResizeMethod(ImageResizeMethod resizeMethod) {
mResizeMethod = resizeMethod;
mIsDirty = true;
}
public void setSource(@Nullable ReadableArray sources) {
mSources.clear();
if (sources != null && sources.size() != 0) {
// Optimize for the case where we have just one uri, case in which we don't need the sizes
if (sources.size() == 1) {
mSources.add(new ImageSource(getContext(), sources.getMap(0).getString("uri")));
} else {
for (int idx = 0; idx < sources.size(); idx++) {
ReadableMap source = sources.getMap(idx);
mSources.add(new ImageSource(
getContext(),
source.getString("uri"),
source.getDouble("width"),
source.getDouble("height")));
}
}
}
mIsDirty = true;
}
public void setLoadingIndicatorSource(@Nullable String name) {
Drawable drawable = ResourceDrawableIdHelper.getInstance().getResourceDrawable(getContext(), name);
mLoadingImageDrawable =
drawable != null ? (Drawable) new AutoRotateDrawable(drawable, 1000) : null;
mIsDirty = true;
}
public void setProgressiveRenderingEnabled(boolean enabled) {
mProgressiveRenderingEnabled = enabled;
// no worth marking as dirty if it already rendered..
}
public void setFadeDuration(int durationMs) {
mFadeDurationMs = durationMs;
// no worth marking as dirty if it already rendered..
}
private void cornerRadii(float[] computedCorners) {
float defaultBorderRadius = !YogaConstants.isUndefined(mBorderRadius) ? mBorderRadius : 0;
computedCorners[0] = mBorderCornerRadii != null && !YogaConstants.isUndefined(mBorderCornerRadii[0]) ? mBorderCornerRadii[0] : defaultBorderRadius;
computedCorners[1] = mBorderCornerRadii != null && !YogaConstants.isUndefined(mBorderCornerRadii[1]) ? mBorderCornerRadii[1] : defaultBorderRadius;
computedCorners[2] = mBorderCornerRadii != null && !YogaConstants.isUndefined(mBorderCornerRadii[2]) ? mBorderCornerRadii[2] : defaultBorderRadius;
computedCorners[3] = mBorderCornerRadii != null && !YogaConstants.isUndefined(mBorderCornerRadii[3]) ? mBorderCornerRadii[3] : defaultBorderRadius;
}
public void maybeUpdateView() {
if (!mIsDirty) {
return;
}
if (hasMultipleSources() && (getWidth() <= 0 || getHeight() <= 0)) {
// If we need to choose from multiple uris but the size is not yet set, wait for layout pass
return;
}
setSourceImage();
if (mImageSource == null) {
return;
}
boolean doResize = shouldResize(mImageSource);
if (doResize && (getWidth() <= 0 || getHeight() <= 0)) {
// If need a resize and the size is not yet set, wait until the layout pass provides one
return;
}
GenericDraweeHierarchy hierarchy = getHierarchy();
hierarchy.setActualImageScaleType(mScaleType);
if (mLoadingImageDrawable != null) {
hierarchy.setPlaceholderImage(mLoadingImageDrawable, ScalingUtils.ScaleType.CENTER);
}
boolean usePostprocessorScaling =
mScaleType != ScalingUtils.ScaleType.CENTER_CROP &&
mScaleType != ScalingUtils.ScaleType.FOCUS_CROP;
RoundingParams roundingParams = hierarchy.getRoundingParams();
if (usePostprocessorScaling) {
roundingParams.setCornersRadius(0);
} else {
cornerRadii(sComputedCornerRadii);
roundingParams.setCornersRadii(sComputedCornerRadii[0], sComputedCornerRadii[1], sComputedCornerRadii[2], sComputedCornerRadii[3]);
}
roundingParams.setBorder(mBorderColor, mBorderWidth);
if (mOverlayColor != Color.TRANSPARENT) {
roundingParams.setOverlayColor(mOverlayColor);
} else {
// make sure the default rounding method is used.
roundingParams.setRoundingMethod(RoundingParams.RoundingMethod.BITMAP_ONLY);
}
hierarchy.setRoundingParams(roundingParams);
hierarchy.setFadeDuration(
mFadeDurationMs >= 0
? mFadeDurationMs
: mImageSource.isResource() ? 0 : REMOTE_IMAGE_FADE_DURATION_MS);
Postprocessor postprocessor = usePostprocessorScaling ? mRoundedCornerPostprocessor : null;
ResizeOptions resizeOptions = doResize ? new ResizeOptions(getWidth(), getHeight()) : null;
ImageRequest imageRequest = ImageRequestBuilder.newBuilderWithSource(mImageSource.getUri())
.setPostprocessor(postprocessor)
.setResizeOptions(resizeOptions)
.setAutoRotateEnabled(true)
.setProgressiveRenderingEnabled(mProgressiveRenderingEnabled)
.build();
// This builder is reused
mDraweeControllerBuilder.reset();
mDraweeControllerBuilder
.setAutoPlayAnimations(true)
.setCallerContext(mCallerContext)
.setOldController(getController())
.setImageRequest(imageRequest);
if (mCachedImageSource != null) {
ImageRequest cachedImageRequest =
ImageRequestBuilder.newBuilderWithSource(mCachedImageSource.getUri())
.setPostprocessor(postprocessor)
.setResizeOptions(resizeOptions)
.setAutoRotateEnabled(true)
.setProgressiveRenderingEnabled(mProgressiveRenderingEnabled)
.build();
mDraweeControllerBuilder.setLowResImageRequest(cachedImageRequest);
}
if (mControllerListener != null && mControllerForTesting != null) {
ForwardingControllerListener combinedListener = new ForwardingControllerListener();
combinedListener.addListener(mControllerListener);
combinedListener.addListener(mControllerForTesting);
mDraweeControllerBuilder.setControllerListener(combinedListener);
} else if (mControllerForTesting != null) {
mDraweeControllerBuilder.setControllerListener(mControllerForTesting);
} else if (mControllerListener != null) {
mDraweeControllerBuilder.setControllerListener(mControllerListener);
}
setController(mDraweeControllerBuilder.build());
mIsDirty = false;
}
// VisibleForTesting
public void setControllerListener(ControllerListener controllerListener) {
mControllerForTesting = controllerListener;
mIsDirty = true;
maybeUpdateView();
}
@Override
protected void onSizeChanged(int w, int h, int oldw, int oldh) {
super.onSizeChanged(w, h, oldw, oldh);
if (w > 0 && h > 0) {
mIsDirty = mIsDirty || hasMultipleSources();
maybeUpdateView();
}
}
/**
* ReactImageViews only render a single image.
*/
@Override
public boolean hasOverlappingRendering() {
return false;
}
private boolean hasMultipleSources() {
return mSources.size() > 1;
}
private void setSourceImage() {
mImageSource = null;
if (mSources.isEmpty()) {
return;
}
if (hasMultipleSources()) {
MultiSourceResult multiSource =
MultiSourceHelper.getBestSourceForSize(getWidth(), getHeight(), mSources);
mImageSource = multiSource.getBestResult();
mCachedImageSource = multiSource.getBestResultInCache();
return;
}
mImageSource = mSources.get(0);
}
private boolean shouldResize(ImageSource imageSource) {
// Resizing is inferior to scaling. See http://frescolib.org/docs/resizing-rotating.html#_
// We resize here only for images likely to be from the device's camera, where the app developer
// has no control over the original size
if (mResizeMethod == ImageResizeMethod.AUTO) {
return
UriUtil.isLocalContentUri(imageSource.getUri()) ||
UriUtil.isLocalFileUri(imageSource.getUri());
} else if (mResizeMethod == ImageResizeMethod.RESIZE) {
return true;
} else {
return false;
}
}
}
| |
package prefuse.action.layout;
import java.awt.geom.Rectangle2D;
import java.util.Iterator;
import prefuse.Constants;
import prefuse.data.Table;
import prefuse.data.Tuple;
import prefuse.data.expression.Predicate;
import prefuse.data.query.NumberRangeModel;
import prefuse.data.query.ObjectRangeModel;
import prefuse.data.tuple.TupleSet;
import prefuse.util.DataLib;
import prefuse.util.MathLib;
import prefuse.util.ui.ValuedRangeModel;
import prefuse.visual.VisualItem;
/**
* Layout Action that assigns positions along a single dimension (either x or
* y) according to a specified data field. By default, the range of values
* along the axis is automatically determined by the minimum and maximum
* values of the data field. The range bounds can be manually set using the
* {@link #setRangeModel(ValuedRangeModel)} method. Also, the set of items
* processed by this layout can be filtered by providing a filtering
* predicate (@link #setFilter(Predicate)).
*
* @author <a href="http://jheer.org">jeffrey heer</a>
*/
public class AxisLayout extends Layout {
private String m_field;
private int m_scale = Constants.LINEAR_SCALE;
private int m_axis = Constants.X_AXIS;
private int m_type = Constants.UNKNOWN;
// visible region of the layout (in item coordinates)
// if false, the table will be consulted
private boolean m_modelSet = false;
private ValuedRangeModel m_model = null;
private Predicate m_filter = null;
// screen coordinate range
private double m_min;
private double m_range;
// value range / distribution
private double[] m_dist = new double[2];
/**
* Create a new AxisLayout. Defaults to using the x-axis.
* @param group the data group to layout
* @param field the data field upon which to base the layout
*/
public AxisLayout(String group, String field) {
super(group);
m_field = field;
}
/**
* Create a new AxisLayout.
* @param group the data group to layout
* @param field the data field upon which to base the layout
* @param axis the axis type, either {@link prefuse.Constants#X_AXIS}
* or {@link prefuse.Constants#Y_AXIS}.
*/
public AxisLayout(String group, String field, int axis) {
this(group, field);
setAxis(axis);
}
/**
* Create a new AxisLayout.
* @param group the data group to layout
* @param field the data field upon which to base the layout
* @param axis the axis type, either {@link prefuse.Constants#X_AXIS}
* or {@link prefuse.Constants#Y_AXIS}.
* @param filter an optional predicate filter for limiting which items
* to layout.
*/
public AxisLayout(String group, String field, int axis, Predicate filter) {
this(group, field, axis);
setFilter(filter);
}
// ------------------------------------------------------------------------
/**
* Set the data field used by this axis layout action. The values of the
* data field will determine the position of items along the axis. Note
* that this method does not affect the other parameters of this action. In
* particular, clients that have provided a custom range model for
* setting the axis range may need to appropriately update the model
* setting for use with the new data field setting.
* @param field the name of the data field that determines the layout
*/
public void setDataField(String field) {
m_field = field;
if ( !m_modelSet )
m_model = null;
}
/**
* Get the data field used by this axis layout action. The values of the
* data field determine the position of items along the axis.
* @return the name of the data field that determines the layout
*/
public String getDataField() {
return m_field;
}
/**
* Set the range model determing the span of the axis. This model controls
* the minimum and maximum values of the layout, as provided by the
* {@link prefuse.util.ui.ValuedRangeModel#getLowValue()} and
* {@link prefuse.util.ui.ValuedRangeModel#getHighValue()} methods.
* @param model the range model for the axis.
*/
public void setRangeModel(ValuedRangeModel model) {
m_model = model;
m_modelSet = (model != null);
}
/**
* Get the range model determing the span of the axis. This model controls
* the minimum and maximum values of the layout, as provided by the
* {@link prefuse.util.ui.ValuedRangeModel#getLowValue()} and
* {@link prefuse.util.ui.ValuedRangeModel#getHighValue()} methods.
* @return the range model for the axis.
*/
public ValuedRangeModel getRangeModel() {
return m_model;
}
/**
* Set a predicate filter to limit which items are considered for layout.
* Only items for which the predicate returns a true value are included
* in the layout computation.
* @param filter the predicate filter to use. If null, no filtering
* will be performed.
*/
public void setFilter(Predicate filter) {
m_filter = filter;
}
/**
* Get the predicate filter to limit which items are considered for layout.
* Only items for which the predicate returns a true value are included
* in the layout computation.
* @return the predicate filter used by this layout. If null, no filtering
* is performed.
*/
public Predicate getFilter() {
return m_filter;
}
// ------------------------------------------------------------------------
/**
* Returns the scale type used for the axis. This setting only applies
* for numerical data types (i.e., when axis values are from a
* <code>NumberValuedRange</code>).
* @return the scale type. One of
* {@link prefuse.Constants#LINEAR_SCALE},
* {@link prefuse.Constants#SQRT_SCALE}, or
* {@link Constants#LOG_SCALE}.
*/
public int getScale() {
return m_scale;
}
/**
* Sets the scale type used for the axis. This setting only applies
* for numerical data types (i.e., when axis values are from a
* <code>NumberValuedRange</code>).
* @param scale the scale type. One of
* {@link prefuse.Constants#LINEAR_SCALE},
* {@link prefuse.Constants#SQRT_SCALE}, or
* {@link Constants#LOG_SCALE}.
*/
public void setScale(int scale) {
if ( scale < 0 || scale >= Constants.SCALE_COUNT )
throw new IllegalArgumentException(
"Unrecognized scale value: "+scale);
m_scale = scale;
}
/**
* Return the axis type of this layout, either
* {@link prefuse.Constants#X_AXIS} or {@link prefuse.Constants#Y_AXIS}.
* @return the axis type of this layout.
*/
public int getAxis() {
return m_axis;
}
/**
* Set the axis type of this layout.
* @param axis the axis type to use for this layout, either
* {@link prefuse.Constants#X_AXIS} or {@link prefuse.Constants#Y_AXIS}.
*/
public void setAxis(int axis) {
if ( axis < 0 || axis >= Constants.AXIS_COUNT )
throw new IllegalArgumentException(
"Unrecognized axis value: "+axis);
m_axis = axis;
}
/**
* Return the data type used by this layout. This value is one of
* {@link prefuse.Constants#NOMINAL}, {@link prefuse.Constants#ORDINAL},
* {@link prefuse.Constants#NUMERICAL}, or
* {@link prefuse.Constants#UNKNOWN}.
* @return the data type used by this layout
*/
public int getDataType() {
return m_type;
}
/**
* Set the data type used by this layout.
* @param type the data type used by this layout, one of
* {@link prefuse.Constants#NOMINAL}, {@link prefuse.Constants#ORDINAL},
* {@link prefuse.Constants#NUMERICAL}, or
* {@link prefuse.Constants#UNKNOWN}.
*/
public void setDataType(int type) {
if ( type < 0 || type >= Constants.DATATYPE_COUNT )
throw new IllegalArgumentException(
"Unrecognized data type value: "+type);
m_type = type;
}
// ------------------------------------------------------------------------
/**
* @see prefuse.action.Action#run(double)
*/
public void run(double frac) {
TupleSet ts = m_vis.getGroup(m_group);
setMinMax();
switch ( getDataType(ts) ) {
case Constants.NUMERICAL:
numericalLayout(ts);
break;
default:
ordinalLayout(ts);
}
}
/**
* Retrieve the data type.
*/
protected int getDataType(TupleSet ts) {
if ( m_type == Constants.UNKNOWN ) {
boolean numbers = true;
if ( ts instanceof Table ) {
numbers = ((Table)ts).canGetDouble(m_field);
} else {
for ( Iterator it = ts.tuples(); it.hasNext(); ) {
if ( !((Tuple)it.next()).canGetDouble(m_field) ) {
numbers = false;
break;
}
}
}
if ( numbers ) {
return Constants.NUMERICAL;
} else {
return Constants.ORDINAL;
}
} else {
return m_type;
}
}
/**
* Set the minimum and maximum pixel values.
*/
private void setMinMax() {
Rectangle2D b = getLayoutBounds();
if ( m_axis == Constants.X_AXIS ) {
m_min = b.getMinX();
m_range = b.getMaxX() - m_min;
} else {
m_min = b.getMaxY();
m_range = b.getMinY() - m_min;
}
}
/**
* Set the layout position of an item.
*/
protected void set(VisualItem item, double frac) {
double xOrY = m_min + frac*m_range;
if ( m_axis == Constants.X_AXIS ) {
setX(item, null, xOrY);
} else {
setY(item, null, xOrY);
}
}
/**
* Compute a quantitative axis layout.
*/
protected void numericalLayout(TupleSet ts) {
if ( !m_modelSet ) {
m_dist[0] = DataLib.min(ts, m_field).getDouble(m_field);
m_dist[1] = DataLib.max(ts, m_field).getDouble(m_field);
double lo = m_dist[0], hi = m_dist[1];
if ( m_model == null ) {
m_model = new NumberRangeModel(lo, hi, lo, hi);
} else {
((NumberRangeModel)m_model).setValueRange(lo, hi, lo, hi);
}
} else {
m_dist[0] = ((Number)m_model.getLowValue()).doubleValue();
m_dist[1] = ((Number)m_model.getHighValue()).doubleValue();
}
Iterator iter = m_vis.items(m_group, m_filter);
while ( iter.hasNext() ) {
VisualItem item = (VisualItem)iter.next();
double v = item.getDouble(m_field);
double f = MathLib.interp(m_scale, v, m_dist);
set(item, f);
}
}
/**
* Compute an ordinal axis layout.
*/
protected void ordinalLayout(TupleSet ts) {
if ( !m_modelSet) {
Object[] array = DataLib.ordinalArray(ts, m_field);
if ( m_model == null ) {
m_model = new ObjectRangeModel(array);
} else {
((ObjectRangeModel)m_model).setValueRange(array);
}
}
ObjectRangeModel model = (ObjectRangeModel)m_model;
int start = model.getValue();
int end = start + model.getExtent();
double total = (double)(end-start);
Iterator iter = m_vis.items(m_group, m_filter);
while ( iter.hasNext() ) {
VisualItem item = (VisualItem)iter.next();
int order = model.getIndex(item.get(m_field)) - start;
set(item, order/total);
}
}
} // end of class AxisLayout
| |
// Copyright 2017 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.exec;
import static com.google.common.truth.Truth.assertThat;
import static com.google.devtools.build.lib.actions.FilesetManifest.RelativeSymlinkBehavior.ERROR;
import static com.google.devtools.build.lib.actions.FilesetManifest.RelativeSymlinkBehavior.IGNORE;
import static com.google.devtools.build.lib.actions.FilesetManifest.RelativeSymlinkBehavior.RESOLVE;
import static org.junit.Assert.assertThrows;
import static org.junit.Assert.fail;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.devtools.build.lib.actions.ActionInput;
import com.google.devtools.build.lib.actions.ActionInputHelper;
import com.google.devtools.build.lib.actions.Artifact;
import com.google.devtools.build.lib.actions.Artifact.ArtifactExpander;
import com.google.devtools.build.lib.actions.Artifact.SpecialArtifact;
import com.google.devtools.build.lib.actions.Artifact.SpecialArtifactType;
import com.google.devtools.build.lib.actions.Artifact.TreeFileArtifact;
import com.google.devtools.build.lib.actions.ArtifactRoot;
import com.google.devtools.build.lib.actions.ArtifactRoot.RootType;
import com.google.devtools.build.lib.actions.EmptyRunfilesSupplier;
import com.google.devtools.build.lib.actions.FileArtifactValue;
import com.google.devtools.build.lib.actions.FilesetManifest;
import com.google.devtools.build.lib.actions.FilesetOutputSymlink;
import com.google.devtools.build.lib.actions.ForbiddenActionInputException;
import com.google.devtools.build.lib.actions.RunfilesSupplier;
import com.google.devtools.build.lib.actions.Spawn;
import com.google.devtools.build.lib.actions.cache.VirtualActionInput;
import com.google.devtools.build.lib.actions.util.ActionsTestUtil;
import com.google.devtools.build.lib.analysis.Runfiles;
import com.google.devtools.build.lib.analysis.util.AnalysisTestUtil;
import com.google.devtools.build.lib.exec.util.FakeActionInputFileCache;
import com.google.devtools.build.lib.exec.util.SpawnBuilder;
import com.google.devtools.build.lib.vfs.DigestHashFunction;
import com.google.devtools.build.lib.vfs.FileSystem;
import com.google.devtools.build.lib.vfs.FileSystemUtils;
import com.google.devtools.build.lib.vfs.Path;
import com.google.devtools.build.lib.vfs.PathFragment;
import com.google.devtools.build.lib.vfs.Root;
import com.google.devtools.build.lib.vfs.inmemoryfs.InMemoryFileSystem;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
/** Tests for {@link SpawnInputExpander}. */
@RunWith(JUnit4.class)
public class SpawnInputExpanderTest {
private static final byte[] FAKE_DIGEST = new byte[] {1, 2, 3, 4};
private static final ArtifactExpander NO_ARTIFACT_EXPANDER =
(a, b) -> fail("expected no interactions");
private final FileSystem fs = new InMemoryFileSystem(DigestHashFunction.SHA256);
private final Path execRoot = fs.getPath("/root");
private final ArtifactRoot rootDir = ArtifactRoot.asDerivedRoot(execRoot, RootType.Output, "out");
private SpawnInputExpander expander = new SpawnInputExpander(execRoot, /*strict=*/ true);
private Map<PathFragment, ActionInput> inputMappings = new HashMap<>();
@Test
public void testEmptyRunfiles() throws Exception {
RunfilesSupplier supplier = EmptyRunfilesSupplier.INSTANCE;
FakeActionInputFileCache mockCache = new FakeActionInputFileCache();
expander.addRunfilesToInputs(
inputMappings, supplier, mockCache, NO_ARTIFACT_EXPANDER, PathFragment.EMPTY_FRAGMENT);
assertThat(inputMappings).isEmpty();
}
@Test
public void testRunfilesSingleFile() throws Exception {
Artifact artifact =
ActionsTestUtil.createArtifact(
ArtifactRoot.asSourceRoot(Root.fromPath(fs.getPath("/root"))),
fs.getPath("/root/dir/file"));
Runfiles runfiles = new Runfiles.Builder("workspace").addArtifact(artifact).build();
RunfilesSupplier supplier =
AnalysisTestUtil.createRunfilesSupplier(PathFragment.create("runfiles"), runfiles);
FakeActionInputFileCache mockCache = new FakeActionInputFileCache();
mockCache.put(
artifact,
FileArtifactValue.createForNormalFile(
FAKE_DIGEST, /*proxy=*/ null, 0L, /*isShareable=*/ true));
expander.addRunfilesToInputs(
inputMappings, supplier, mockCache, NO_ARTIFACT_EXPANDER, PathFragment.EMPTY_FRAGMENT);
assertThat(inputMappings).hasSize(1);
assertThat(inputMappings)
.containsEntry(PathFragment.create("runfiles/workspace/dir/file"), artifact);
}
@Test
public void testRunfilesWithFileset() throws Exception {
Artifact artifact = createFilesetArtifact("foo/biz/fs_out");
Runfiles runfiles = new Runfiles.Builder("workspace").addArtifact(artifact).build();
RunfilesSupplier supplier =
AnalysisTestUtil.createRunfilesSupplier(PathFragment.create("runfiles"), runfiles);
FakeActionInputFileCache mockCache = new FakeActionInputFileCache();
mockCache.put(
artifact,
FileArtifactValue.createForNormalFile(
FAKE_DIGEST, /*proxy=*/ null, 0L, /*isShareable=*/ true));
ArtifactExpander filesetExpander =
new ArtifactExpander() {
@Override
public void expand(Artifact artifact, Collection<? super Artifact> output) {
throw new IllegalStateException("Unexpected tree expansion");
}
@Override
public ImmutableList<FilesetOutputSymlink> getFileset(Artifact artifact) {
return ImmutableList.of(
FilesetOutputSymlink.createForTesting(
PathFragment.create("zizz"),
PathFragment.create("/foo/fake_exec/xyz/zizz"),
PathFragment.create("/foo/fake_exec/")));
}
};
expander.addRunfilesToInputs(
inputMappings, supplier, mockCache, filesetExpander, PathFragment.EMPTY_FRAGMENT);
assertThat(inputMappings).hasSize(1);
assertThat(inputMappings)
.containsEntry(
PathFragment.create("runfiles/workspace/foo/biz/fs_out/zizz"),
ActionInputHelper.fromPath("/root/xyz/zizz"));
}
@Test
public void testRunfilesDirectoryStrict() {
Artifact artifact =
ActionsTestUtil.createArtifact(
ArtifactRoot.asSourceRoot(Root.fromPath(fs.getPath("/root"))),
fs.getPath("/root/dir/file"));
Runfiles runfiles = new Runfiles.Builder("workspace").addArtifact(artifact).build();
RunfilesSupplier supplier =
AnalysisTestUtil.createRunfilesSupplier(PathFragment.create("runfiles"), runfiles);
FakeActionInputFileCache mockCache = new FakeActionInputFileCache();
mockCache.put(artifact, FileArtifactValue.createForDirectoryWithMtime(-1));
ForbiddenActionInputException expected =
assertThrows(
ForbiddenActionInputException.class,
() ->
expander.addRunfilesToInputs(
inputMappings,
supplier,
mockCache,
NO_ARTIFACT_EXPANDER,
PathFragment.EMPTY_FRAGMENT));
assertThat(expected).hasMessageThat().isEqualTo("Not a file: dir/file");
}
@Test
public void testRunfilesDirectoryNonStrict() throws Exception {
Artifact artifact =
ActionsTestUtil.createArtifact(
ArtifactRoot.asSourceRoot(Root.fromPath(fs.getPath("/root"))),
fs.getPath("/root/dir/file"));
Runfiles runfiles = new Runfiles.Builder("workspace").addArtifact(artifact).build();
RunfilesSupplier supplier =
AnalysisTestUtil.createRunfilesSupplier(PathFragment.create("runfiles"), runfiles);
FakeActionInputFileCache mockCache = new FakeActionInputFileCache();
mockCache.put(artifact, FileArtifactValue.createForDirectoryWithMtime(-1));
expander = new SpawnInputExpander(execRoot, /*strict=*/ false);
expander.addRunfilesToInputs(
inputMappings, supplier, mockCache, NO_ARTIFACT_EXPANDER, PathFragment.EMPTY_FRAGMENT);
assertThat(inputMappings).hasSize(1);
assertThat(inputMappings)
.containsEntry(PathFragment.create("runfiles/workspace/dir/file"), artifact);
}
@Test
public void testRunfilesTwoFiles() throws Exception {
Artifact artifact1 =
ActionsTestUtil.createArtifact(
ArtifactRoot.asSourceRoot(Root.fromPath(fs.getPath("/root"))),
fs.getPath("/root/dir/file"));
Artifact artifact2 =
ActionsTestUtil.createArtifact(
ArtifactRoot.asSourceRoot(Root.fromPath(fs.getPath("/root"))),
fs.getPath("/root/dir/baz"));
Runfiles runfiles =
new Runfiles.Builder("workspace").addArtifact(artifact1).addArtifact(artifact2).build();
RunfilesSupplier supplier =
AnalysisTestUtil.createRunfilesSupplier(PathFragment.create("runfiles"), runfiles);
FakeActionInputFileCache mockCache = new FakeActionInputFileCache();
mockCache.put(
artifact1,
FileArtifactValue.createForNormalFile(
FAKE_DIGEST, /*proxy=*/ null, 1L, /*isShareable=*/ true));
mockCache.put(
artifact2,
FileArtifactValue.createForNormalFile(
FAKE_DIGEST, /*proxy=*/ null, 12L, /*isShareable=*/ true));
expander.addRunfilesToInputs(
inputMappings, supplier, mockCache, NO_ARTIFACT_EXPANDER, PathFragment.EMPTY_FRAGMENT);
assertThat(inputMappings).hasSize(2);
assertThat(inputMappings)
.containsEntry(PathFragment.create("runfiles/workspace/dir/file"), artifact1);
assertThat(inputMappings)
.containsEntry(PathFragment.create("runfiles/workspace/dir/baz"), artifact2);
}
@Test
public void testRunfilesSymlink() throws Exception {
Artifact artifact =
ActionsTestUtil.createArtifact(
ArtifactRoot.asSourceRoot(Root.fromPath(fs.getPath("/root"))),
fs.getPath("/root/dir/file"));
Runfiles runfiles =
new Runfiles.Builder("workspace")
.addSymlink(PathFragment.create("symlink"), artifact)
.build();
RunfilesSupplier supplier =
AnalysisTestUtil.createRunfilesSupplier(PathFragment.create("runfiles"), runfiles);
FakeActionInputFileCache mockCache = new FakeActionInputFileCache();
mockCache.put(
artifact,
FileArtifactValue.createForNormalFile(
FAKE_DIGEST, /*proxy=*/ null, 1L, /*isShareable=*/ true));
expander.addRunfilesToInputs(
inputMappings, supplier, mockCache, NO_ARTIFACT_EXPANDER, PathFragment.EMPTY_FRAGMENT);
assertThat(inputMappings).hasSize(1);
assertThat(inputMappings)
.containsEntry(PathFragment.create("runfiles/workspace/symlink"), artifact);
}
@Test
public void testRunfilesRootSymlink() throws Exception {
Artifact artifact =
ActionsTestUtil.createArtifact(
ArtifactRoot.asSourceRoot(Root.fromPath(fs.getPath("/root"))),
fs.getPath("/root/dir/file"));
Runfiles runfiles =
new Runfiles.Builder("workspace")
.addRootSymlink(PathFragment.create("symlink"), artifact)
.build();
RunfilesSupplier supplier =
AnalysisTestUtil.createRunfilesSupplier(PathFragment.create("runfiles"), runfiles);
FakeActionInputFileCache mockCache = new FakeActionInputFileCache();
mockCache.put(
artifact,
FileArtifactValue.createForNormalFile(
FAKE_DIGEST, /*proxy=*/ null, 1L, /*isShareable=*/ true));
expander.addRunfilesToInputs(
inputMappings, supplier, mockCache, NO_ARTIFACT_EXPANDER, PathFragment.EMPTY_FRAGMENT);
assertThat(inputMappings).hasSize(2);
assertThat(inputMappings).containsEntry(PathFragment.create("runfiles/symlink"), artifact);
// If there's no other entry, Runfiles adds an empty file in the workspace to make sure the
// directory gets created.
assertThat(inputMappings)
.containsEntry(
PathFragment.create("runfiles/workspace/.runfile"), VirtualActionInput.EMPTY_MARKER);
}
@Test
public void testRunfilesWithTreeArtifacts() throws Exception {
SpecialArtifact treeArtifact = createTreeArtifact("treeArtifact");
assertThat(treeArtifact.isTreeArtifact()).isTrue();
TreeFileArtifact file1 = TreeFileArtifact.createTreeOutput(treeArtifact, "file1");
TreeFileArtifact file2 = TreeFileArtifact.createTreeOutput(treeArtifact, "file2");
FileSystemUtils.writeContentAsLatin1(file1.getPath(), "foo");
FileSystemUtils.writeContentAsLatin1(file2.getPath(), "bar");
Runfiles runfiles = new Runfiles.Builder("workspace").addArtifact(treeArtifact).build();
ArtifactExpander artifactExpander =
(Artifact artifact, Collection<? super Artifact> output) -> {
if (artifact.equals(treeArtifact)) {
output.addAll(Arrays.asList(file1, file2));
}
};
RunfilesSupplier supplier =
AnalysisTestUtil.createRunfilesSupplier(PathFragment.create("runfiles"), runfiles);
FakeActionInputFileCache fakeCache = new FakeActionInputFileCache();
fakeCache.put(file1, FileArtifactValue.createForTesting(file1));
fakeCache.put(file2, FileArtifactValue.createForTesting(file2));
expander.addRunfilesToInputs(
inputMappings, supplier, fakeCache, artifactExpander, PathFragment.EMPTY_FRAGMENT);
assertThat(inputMappings).hasSize(2);
assertThat(inputMappings)
.containsEntry(PathFragment.create("runfiles/workspace/treeArtifact/file1"), file1);
assertThat(inputMappings)
.containsEntry(PathFragment.create("runfiles/workspace/treeArtifact/file2"), file2);
}
@Test
public void testRunfilesWithTreeArtifactsInSymlinks() throws Exception {
SpecialArtifact treeArtifact = createTreeArtifact("treeArtifact");
assertThat(treeArtifact.isTreeArtifact()).isTrue();
TreeFileArtifact file1 = TreeFileArtifact.createTreeOutput(treeArtifact, "file1");
TreeFileArtifact file2 = TreeFileArtifact.createTreeOutput(treeArtifact, "file2");
FileSystemUtils.writeContentAsLatin1(file1.getPath(), "foo");
FileSystemUtils.writeContentAsLatin1(file2.getPath(), "bar");
Runfiles runfiles =
new Runfiles.Builder("workspace")
.addSymlink(PathFragment.create("symlink"), treeArtifact)
.build();
ArtifactExpander artifactExpander =
(Artifact artifact, Collection<? super Artifact> output) -> {
if (artifact.equals(treeArtifact)) {
output.addAll(Arrays.asList(file1, file2));
}
};
RunfilesSupplier supplier =
AnalysisTestUtil.createRunfilesSupplier(PathFragment.create("runfiles"), runfiles);
FakeActionInputFileCache fakeCache = new FakeActionInputFileCache();
fakeCache.put(file1, FileArtifactValue.createForTesting(file1));
fakeCache.put(file2, FileArtifactValue.createForTesting(file2));
expander.addRunfilesToInputs(
inputMappings, supplier, fakeCache, artifactExpander, PathFragment.EMPTY_FRAGMENT);
assertThat(inputMappings).hasSize(2);
assertThat(inputMappings)
.containsEntry(PathFragment.create("runfiles/workspace/symlink/file1"), file1);
assertThat(inputMappings)
.containsEntry(PathFragment.create("runfiles/workspace/symlink/file2"), file2);
}
@Test
public void testTreeArtifactsInInputs() throws Exception {
SpecialArtifact treeArtifact = createTreeArtifact("treeArtifact");
assertThat(treeArtifact.isTreeArtifact()).isTrue();
TreeFileArtifact file1 = TreeFileArtifact.createTreeOutput(treeArtifact, "file1");
TreeFileArtifact file2 = TreeFileArtifact.createTreeOutput(treeArtifact, "file2");
FileSystemUtils.writeContentAsLatin1(file1.getPath(), "foo");
FileSystemUtils.writeContentAsLatin1(file2.getPath(), "bar");
ArtifactExpander artifactExpander =
(Artifact artifact, Collection<? super Artifact> output) -> {
if (artifact.equals(treeArtifact)) {
output.addAll(Arrays.asList(file1, file2));
}
};
FakeActionInputFileCache fakeCache = new FakeActionInputFileCache();
fakeCache.put(file1, FileArtifactValue.createForTesting(file1));
fakeCache.put(file2, FileArtifactValue.createForTesting(file2));
Spawn spawn = new SpawnBuilder("/bin/echo", "Hello World").withInput(treeArtifact).build();
inputMappings =
expander.getInputMapping(spawn, artifactExpander, PathFragment.EMPTY_FRAGMENT, fakeCache);
assertThat(inputMappings).hasSize(2);
assertThat(inputMappings).containsEntry(PathFragment.create("out/treeArtifact/file1"), file1);
assertThat(inputMappings).containsEntry(PathFragment.create("out/treeArtifact/file2"), file2);
}
private SpecialArtifact createTreeArtifact(String relPath) throws IOException {
SpecialArtifact treeArtifact = createSpecialArtifact(relPath, SpecialArtifactType.TREE);
treeArtifact.setGeneratingActionKey(ActionsTestUtil.NULL_ACTION_LOOKUP_DATA);
return treeArtifact;
}
private SpecialArtifact createFilesetArtifact(String relPath) throws IOException {
return createSpecialArtifact(relPath, SpecialArtifactType.FILESET);
}
private SpecialArtifact createSpecialArtifact(String relPath, SpecialArtifactType type)
throws IOException {
String outputSegment = "out";
Path outputDir = execRoot.getRelative(outputSegment);
Path outputPath = outputDir.getRelative(relPath);
outputPath.createDirectoryAndParents();
ArtifactRoot derivedRoot = ArtifactRoot.asDerivedRoot(execRoot, RootType.Output, outputSegment);
return new SpecialArtifact(
derivedRoot,
derivedRoot.getExecPath().getRelative(derivedRoot.getRoot().relativize(outputPath)),
ActionsTestUtil.NULL_ARTIFACT_OWNER,
type);
}
@Test
public void testEmptyManifest() throws Exception {
Map<Artifact, ImmutableList<FilesetOutputSymlink>> filesetMappings =
ImmutableMap.of(createFileset("out"), ImmutableList.of());
expander.addFilesetManifests(filesetMappings, inputMappings, PathFragment.EMPTY_FRAGMENT);
assertThat(inputMappings).isEmpty();
}
@Test
public void testManifestWithSingleFile() throws Exception {
Map<Artifact, ImmutableList<FilesetOutputSymlink>> filesetMappings =
ImmutableMap.of(
createFileset("out"), ImmutableList.of(filesetSymlink("foo/bar", "/dir/file")));
expander.addFilesetManifests(filesetMappings, inputMappings, PathFragment.EMPTY_FRAGMENT);
assertThat(inputMappings)
.containsExactly(
PathFragment.create("out/foo/bar"), ActionInputHelper.fromPath("/dir/file"));
}
@Test
public void testManifestWithTwoFiles() throws Exception {
Map<Artifact, ImmutableList<FilesetOutputSymlink>> filesetMappings =
ImmutableMap.of(
createFileset("out"),
ImmutableList.of(
filesetSymlink("foo/bar", "/dir/file"), filesetSymlink("foo/baz", "/dir/file")));
expander.addFilesetManifests(filesetMappings, inputMappings, PathFragment.EMPTY_FRAGMENT);
assertThat(inputMappings)
.containsExactly(
PathFragment.create("out/foo/bar"), ActionInputHelper.fromPath("/dir/file"),
PathFragment.create("out/foo/baz"), ActionInputHelper.fromPath("/dir/file"));
}
@Test
public void testManifestWithDirectory() throws Exception {
Map<Artifact, ImmutableList<FilesetOutputSymlink>> filesetMappings =
ImmutableMap.of(createFileset("out"), ImmutableList.of(filesetSymlink("foo/bar", "/some")));
expander.addFilesetManifests(filesetMappings, inputMappings, PathFragment.EMPTY_FRAGMENT);
assertThat(inputMappings)
.containsExactly(PathFragment.create("out/foo/bar"), ActionInputHelper.fromPath("/some"));
}
private static FilesetOutputSymlink filesetSymlink(String from, String to) {
return FilesetOutputSymlink.createForTesting(
PathFragment.create(from), PathFragment.create(to), PathFragment.create("/root"));
}
private ImmutableMap<Artifact, ImmutableList<FilesetOutputSymlink>> simpleFilesetManifest() {
return ImmutableMap.of(
createFileset("out"),
ImmutableList.of(
filesetSymlink("workspace/bar", "foo"), filesetSymlink("workspace/foo", "/root/bar")));
}
private SpecialArtifact createFileset(String execPath) {
return new SpecialArtifact(
rootDir,
PathFragment.create(execPath),
ActionsTestUtil.NULL_ARTIFACT_OWNER,
SpecialArtifactType.FILESET);
}
@Test
public void testManifestWithErrorOnRelativeSymlink() {
expander = new SpawnInputExpander(execRoot, /*strict=*/ true, ERROR);
FilesetManifest.ForbiddenRelativeSymlinkException e =
assertThrows(
FilesetManifest.ForbiddenRelativeSymlinkException.class,
() ->
expander.addFilesetManifests(
simpleFilesetManifest(), inputMappings, PathFragment.EMPTY_FRAGMENT));
assertThat(e).hasMessageThat().contains("Fileset symlink foo is not absolute");
}
@Test
public void testManifestWithIgnoredRelativeSymlink() throws Exception {
expander = new SpawnInputExpander(execRoot, /*strict=*/ true, IGNORE);
expander.addFilesetManifests(
simpleFilesetManifest(), inputMappings, PathFragment.EMPTY_FRAGMENT);
assertThat(inputMappings)
.containsExactly(
PathFragment.create("out/workspace/foo"), ActionInputHelper.fromPath("/root/bar"));
}
@Test
public void testManifestWithResolvedRelativeSymlink() throws Exception {
expander = new SpawnInputExpander(execRoot, /*strict=*/ true, RESOLVE);
expander.addFilesetManifests(
simpleFilesetManifest(), inputMappings, PathFragment.EMPTY_FRAGMENT);
assertThat(inputMappings)
.containsExactly(
PathFragment.create("out/workspace/bar"), ActionInputHelper.fromPath("/root/bar"),
PathFragment.create("out/workspace/foo"), ActionInputHelper.fromPath("/root/bar"));
}
}
| |
/*
* Copyright 2014 The Closure Compiler Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.javascript.jscomp;
import com.google.javascript.jscomp.CompilerOptions.LanguageMode;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
/**
* Test case for {@link Es6RenameVariablesInParamLists}.
*
*/
@RunWith(JUnit4.class)
public final class Es6RenameVariablesInParamListsTest extends CompilerTestCase {
@Override
@Before
public void setUp() throws Exception {
super.setUp();
setAcceptedLanguage(LanguageMode.ECMASCRIPT_2015);
enableRunTypeCheckAfterProcessing();
}
@Override
protected CompilerOptions getOptions() {
CompilerOptions options = super.getOptions();
options.setLanguageOut(LanguageMode.ECMASCRIPT3);
return options;
}
@Override
protected CompilerPass getProcessor(Compiler compiler) {
return new Es6RenameVariablesInParamLists(compiler);
}
@Test
public void testRenameVar() {
test("var x = 5; function f(y=x) { var x; }",
"var x = 5; function f(y=x) { var x$0; }");
test(
lines(
"function x() {}",
"function f(y=(function y() { return x(); }())) {",
" var x; y++;",
"}"),
lines(
"function x() {}",
"function f(y=(function y() { return x(); }())) {",
" var x$0; y++;",
"}"));
test(
lines(
"function x() {}",
"function f(y=(function y() { return x(); }())) {",
" var x;",
" { let x; x++; }",
" x++;",
"}"),
lines(
"function x() {}",
"function f(y=(function y() { return x(); }())) {",
" var x$0;",
" { let x; x++; }",
" x$0++;",
"}"));
test(
lines(
"function x() {}",
"function f(y=(function y() { return x(); }())) {",
" var x; { x++ };",
"}"),
lines(
"function x() {}",
"function f(y=(function y() { return x(); }())) {",
" var x$0; { x$0++ };",
"}"));
test(
lines(
"function f(a = x, b = y) {",
" var y, x;",
" return function() { var x = () => y };",
"}"),
lines(
"function f(a = x, b = y) {",
" var y$0, x$1;",
" return function() { var x = () => y$0 };",
"}"));
test(
lines(
"var x = 4;", "function f(a=x) { let x = 5; { let x = 99; } return a + x; }"),
lines(
"var x = 4;", "function f(a=x) { let x$0 = 5; { let x = 99; } return a + x$0; }"));
}
@Test
public void testRenameFunction() {
test(
lines(
"function x() {}", "function f(y=x()) {", " x();", " function x() {}", "}"),
lines(
"function x() {}", "function f(y=x()) {", " x$0();", " function x$0() {}", "}"));
}
@Test
public void testGlobalDeclaration() {
test(
lines(
"function x() {}",
"function f(y=(function y() { w = 5; return w; }())) {",
" let x = w;",
" var w = 3;",
" return w;",
"}"),
lines(
"function x() {}",
"function f(y=(function y() { w = 5; return w; }())) {",
" let x = w$0;",
" var w$0 = 3;",
" return w$0;",
"}"));
testSame(
lines(
"function x() {}",
"function f(y=(function () { w = 5; return w; }())) {",
" w;",
" return w;",
"}"));
test(
lines(
"function x() {}",
"function f(y=(function () { w = 5; return w; }())) {",
" w;",
" var w = 3;",
" return w;",
"}"),
lines(
"function x() {}",
"function f(y=(function () { w = 5; return w; }())) {",
" w$0;",
" var w$0 = 3;",
" return w$0;",
"}"));
test(
lines(
"function x() {}",
"function f(y=(function () { w = 5; return w; }())) {",
" w;",
" let w = 3;",
" return w;",
"}"),
lines(
"function x() {}",
"function f(y=(function () { w = 5; return w; }())) {",
" w$0;",
" let w$0 = 3;",
" return w$0;",
"}"));
}
@Test
public void testMultipleDefaultParams() {
test(
lines(
"function x() {}",
"var y = 1;",
"function f(z=x, w=y) {",
" let x = y;",
" var y = 3;",
" return w;",
"}"),
lines(
"function x() {}",
"var y = 1;",
"function f(z=x, w=y) {",
" let x$0 = y$1;",
" var y$1 = 3;",
" return w;",
"}"));
test(
lines(
"function x() {}",
"var y = 1;",
"function f(z=x, w=y) {",
" var x;",
" { let y; y++; }",
" { var y; y++; }",
" x++;",
"}"),
lines(
"function x() {}",
"var y = 1;",
"function f(z=x, w=y) {",
" var x$0;",
" { let y; y++; }",
" { var y$1; y$1++; }",
" x$0++;",
"}"));
}
@Test
public void testArrow() {
testSame("var x = true; var f = (a=x) => x;");
test("var x = true; var f = (a=x) => { var x = false; return a; }",
"var x = true; var f = (a=x) => { var x$0 = false; return a; }");
}
}
| |
/*
* Copyright (c) 2008-2018 Haulmont.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package spec.cuba.web.screens.inspection;
import com.haulmont.bali.events.Subscription;
import com.haulmont.chile.core.datatypes.Datatype;
import com.haulmont.cuba.gui.components.Component;
import com.haulmont.cuba.gui.components.Frame;
import com.haulmont.cuba.gui.components.SizeUnit;
import com.haulmont.cuba.gui.components.TextField;
import com.haulmont.cuba.gui.components.data.ValueSource;
import com.haulmont.cuba.gui.icons.Icons;
import javax.annotation.Nullable;
import java.util.Collection;
import java.util.Collections;
import java.util.function.Consumer;
import java.util.function.Function;
public class TestTextField implements TextField<String> {
private Consumer<TextChangeEvent> listener;
@Override
public String getRawValue() {
return null;
}
@Override
public void commit() {
}
@Override
public void discard() {
}
@Override
public boolean isBuffered() {
return false;
}
@Override
public void setBuffered(boolean buffered) {
}
@Override
public boolean isModified() {
return false;
}
@Override
public String getId() {
return null;
}
@Override
public void setId(String id) {
}
@Override
public Component getParent() {
return null;
}
@Override
public void setParent(Component parent) {
}
@Override
public boolean isEnabled() {
return false;
}
@Override
public void setEnabled(boolean enabled) {
}
@Override
public boolean isResponsive() {
return false;
}
@Override
public void setResponsive(boolean responsive) {
}
@Override
public boolean isVisible() {
return false;
}
@Override
public void setVisible(boolean visible) {
}
@Override
public boolean isVisibleRecursive() {
return false;
}
@Override
public boolean isEnabledRecursive() {
return false;
}
@Override
public float getHeight() {
return 0;
}
@Override
public SizeUnit getHeightSizeUnit() {
return null;
}
@Override
public void setHeight(String height) {
}
@Override
public float getWidth() {
return 0;
}
@Override
public SizeUnit getWidthSizeUnit() {
return null;
}
@Override
public void setWidth(String width) {
}
@Override
public Alignment getAlignment() {
return null;
}
@Override
public void setAlignment(Alignment alignment) {
}
@Override
public String getStyleName() {
return null;
}
@Override
public void setStyleName(String styleName) {
}
@Override
public void addStyleName(String styleName) {
}
@Override
public void removeStyleName(String styleName) {
}
@Override
public <X> X unwrap(Class<X> internalComponentClass) {
return null;
}
@Nullable
@Override
public <X> X unwrapOrNull(Class<X> internalComponentClass) {
return null;
}
@Override
public <X> void withUnwrapped(Class<X> internalComponentClass, Consumer<X> action) {
}
@Override
public <X> X unwrapComposition(Class<X> internalCompositionClass) {
return null;
}
@Nullable
@Override
public <X> X unwrapCompositionOrNull(Class<X> internalCompositionClass) {
return null;
}
@Override
public <X> void withUnwrappedComposition(Class<X> internalCompositionClass, Consumer<X> action) {
}
@Override
public String getDescription() {
return null;
}
@Override
public void setDescription(String description) {
}
@Override
public boolean isDescriptionAsHtml() {
return false;
}
@Override
public void setDescriptionAsHtml(boolean descriptionAsHtml) {
}
@Override
public String getCaption() {
return null;
}
@Override
public void setCaption(String caption) {
}
@Override
public boolean isCaptionAsHtml() {
return false;
}
@Override
public void setCaptionAsHtml(boolean captionAsHtml) {
}
@Override
public void focus() {
}
@Override
public int getTabIndex() {
return 0;
}
@Override
public void setTabIndex(int tabIndex) {
}
@Override
public boolean isRequired() {
return false;
}
@Override
public void setRequired(boolean required) {
}
@Override
public String getRequiredMessage() {
return null;
}
@Override
public void setRequiredMessage(String msg) {
}
@Override
public void addValidator(Consumer validator) {
}
@Override
public void removeValidator(Consumer validator) {
}
@Override
public Collection getValidators() {
return Collections.emptyList();
}
@Override
public Frame getFrame() {
return null;
}
@Override
public void setFrame(Frame frame) {
}
@Override
public boolean isEditable() {
return false;
}
@Override
public void setEditable(boolean editable) {
}
@Override
public String getIcon() {
return null;
}
@Override
public void setIcon(String icon) {
}
@Override
public void setIconFromSet(Icons.Icon icon) {
}
@Override
public String getContextHelpText() {
return null;
}
@Override
public void setContextHelpText(String contextHelpText) {
}
@Override
public boolean isContextHelpTextHtmlEnabled() {
return false;
}
@Override
public void setContextHelpTextHtmlEnabled(boolean enabled) {
}
@Override
public Consumer<ContextHelpIconClickEvent> getContextHelpIconClickHandler() {
return null;
}
@Override
public void setContextHelpIconClickHandler(Consumer<ContextHelpIconClickEvent> handler) {
}
@Override
public void setDatatype(Datatype<String> datatype) {
}
@Override
public Datatype<String> getDatatype() {
return null;
}
@Override
public Function<String, String> getFormatter() {
return null;
}
@Override
public void setFormatter(Function<? super String, String> formatter) {
}
@Override
public String getInputPrompt() {
return null;
}
@Override
public void setInputPrompt(String inputPrompt) {
}
@Override
public String getValue() {
return null;
}
@Override
public void setValue(String value) {
}
@Override
public Subscription addValueChangeListener(Consumer<ValueChangeEvent<String>> listener) {
return null;
}
@Override
public void removeValueChangeListener(Consumer<ValueChangeEvent<String>> listener) {
}
@Override
public boolean isTrimming() {
return false;
}
@Override
public void setTrimming(boolean trimming) {
}
@Override
public int getMaxLength() {
return 0;
}
@Override
public void setMaxLength(int maxLength) {
}
@Override
public void setCursorPosition(int position) {
}
@Override
public CaseConversion getCaseConversion() {
return null;
}
@Override
public void setCaseConversion(CaseConversion caseConversion) {
}
@Override
public void selectAll() {
}
@Override
public void setSelectionRange(int pos, int length) {
}
@Override
public Subscription addTextChangeListener(Consumer<TextChangeEvent> listener) {
this.listener = listener;
return () -> {};
}
@Override
public void removeTextChangeListener(Consumer<TextChangeEvent> listener) {
}
@Override
public int getTextChangeTimeout() {
return 0;
}
@Override
public void setTextChangeTimeout(int timeout) {
}
@Override
public TextChangeEventMode getTextChangeEventMode() {
return null;
}
@Override
public void setTextChangeEventMode(TextChangeEventMode mode) {
}
@Override
public boolean isValid() {
return false;
}
@Override
public void validate() {
}
@Override
public void setValueSource(ValueSource<String> valueSource) {
}
@Override
public ValueSource<String> getValueSource() {
return null;
}
public Consumer<TextChangeEvent> getListener() {
return listener;
}
@Override
public Subscription addEnterPressListener(Consumer<EnterPressEvent> listener) {
return null;
}
@Override
public void removeEnterPressListener(Consumer<EnterPressEvent> listener) {
}
@Override
public void setHtmlName(String htmlName) {
}
@Override
public String getHtmlName() {
return null;
}
@Override
public void setConversionErrorMessage(String conversionErrorMessage) {
}
@Override
public String getConversionErrorMessage() {
return null;
}
@Override
public boolean isHtmlSanitizerEnabled() {
return false;
}
@Override
public void setHtmlSanitizerEnabled(boolean htmlSanitizerEnabled) {
}
}
| |
/**
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for
* license information.
*
* Code generated by Microsoft (R) AutoRest Code Generator.
*/
package com.microsoft.azure.management.network.v2018_12_01.implementation;
import com.microsoft.azure.arm.resources.models.implementation.GroupableResourceCoreImpl;
import com.microsoft.azure.management.network.v2018_12_01.ExpressRouteCircuit;
import rx.Observable;
import com.microsoft.azure.management.network.v2018_12_01.ExpressRouteCircuitSku;
import com.microsoft.azure.management.network.v2018_12_01.ServiceProviderProvisioningState;
import java.util.List;
import com.microsoft.azure.management.network.v2018_12_01.ExpressRouteCircuitServiceProviderProperties;
import com.microsoft.azure.SubResource;
import java.util.ArrayList;
import com.microsoft.azure.management.network.v2018_12_01.ExpressRouteCircuitAuthorization;
import com.microsoft.azure.management.network.v2018_12_01.ExpressRouteCircuitPeering;
class ExpressRouteCircuitImpl extends GroupableResourceCoreImpl<ExpressRouteCircuit, ExpressRouteCircuitInner, ExpressRouteCircuitImpl, NetworkManager> implements ExpressRouteCircuit, ExpressRouteCircuit.Definition, ExpressRouteCircuit.Update {
ExpressRouteCircuitImpl(String name, ExpressRouteCircuitInner inner, NetworkManager manager) {
super(name, inner, manager);
}
@Override
public Observable<ExpressRouteCircuit> createResourceAsync() {
ExpressRouteCircuitsInner client = this.manager().inner().expressRouteCircuits();
return client.createOrUpdateAsync(this.resourceGroupName(), this.name(), this.inner())
.map(innerToFluentMap(this));
}
@Override
public Observable<ExpressRouteCircuit> updateResourceAsync() {
ExpressRouteCircuitsInner client = this.manager().inner().expressRouteCircuits();
return client.createOrUpdateAsync(this.resourceGroupName(), this.name(), this.inner())
.map(innerToFluentMap(this));
}
@Override
protected Observable<ExpressRouteCircuitInner> getInnerAsync() {
ExpressRouteCircuitsInner client = this.manager().inner().expressRouteCircuits();
return client.getByResourceGroupAsync(this.resourceGroupName(), this.name());
}
@Override
public boolean isInCreateMode() {
return this.inner().id() == null;
}
@Override
public Boolean allowClassicOperations() {
return this.inner().allowClassicOperations();
}
@Override
public Boolean allowGlobalReach() {
return this.inner().allowGlobalReach();
}
@Override
public List<ExpressRouteCircuitAuthorization> authorizations() {
List<ExpressRouteCircuitAuthorization> lst = new ArrayList<ExpressRouteCircuitAuthorization>();
if (this.inner().authorizations() != null) {
for (ExpressRouteCircuitAuthorizationInner inner : this.inner().authorizations()) {
lst.add( new ExpressRouteCircuitAuthorizationImpl(inner, manager()));
}
}
return lst;
}
@Override
public Double bandwidthInGbps() {
return this.inner().bandwidthInGbps();
}
@Override
public String circuitProvisioningState() {
return this.inner().circuitProvisioningState();
}
@Override
public String etag() {
return this.inner().etag();
}
@Override
public SubResource expressRoutePort() {
return this.inner().expressRoutePort();
}
@Override
public String gatewayManagerEtag() {
return this.inner().gatewayManagerEtag();
}
@Override
public Boolean globalReachEnabled() {
return this.inner().globalReachEnabled();
}
@Override
public List<ExpressRouteCircuitPeering> peerings() {
List<ExpressRouteCircuitPeering> lst = new ArrayList<ExpressRouteCircuitPeering>();
if (this.inner().peerings() != null) {
for (ExpressRouteCircuitPeeringInner inner : this.inner().peerings()) {
lst.add( new ExpressRouteCircuitPeeringImpl(inner, manager()));
}
}
return lst;
}
@Override
public String provisioningState() {
return this.inner().provisioningState();
}
@Override
public String serviceKey() {
return this.inner().serviceKey();
}
@Override
public String serviceProviderNotes() {
return this.inner().serviceProviderNotes();
}
@Override
public ExpressRouteCircuitServiceProviderProperties serviceProviderProperties() {
return this.inner().serviceProviderProperties();
}
@Override
public ServiceProviderProvisioningState serviceProviderProvisioningState() {
return this.inner().serviceProviderProvisioningState();
}
@Override
public ExpressRouteCircuitSku sku() {
return this.inner().sku();
}
@Override
public Integer stag() {
return this.inner().stag();
}
@Override
public ExpressRouteCircuitImpl withAllowClassicOperations(Boolean allowClassicOperations) {
this.inner().withAllowClassicOperations(allowClassicOperations);
return this;
}
@Override
public ExpressRouteCircuitImpl withAllowGlobalReach(Boolean allowGlobalReach) {
this.inner().withAllowGlobalReach(allowGlobalReach);
return this;
}
@Override
public ExpressRouteCircuitImpl withAuthorizations(List<ExpressRouteCircuitAuthorizationInner> authorizations) {
this.inner().withAuthorizations(authorizations);
return this;
}
@Override
public ExpressRouteCircuitImpl withBandwidthInGbps(Double bandwidthInGbps) {
this.inner().withBandwidthInGbps(bandwidthInGbps);
return this;
}
@Override
public ExpressRouteCircuitImpl withCircuitProvisioningState(String circuitProvisioningState) {
this.inner().withCircuitProvisioningState(circuitProvisioningState);
return this;
}
@Override
public ExpressRouteCircuitImpl withExpressRoutePort(SubResource expressRoutePort) {
this.inner().withExpressRoutePort(expressRoutePort);
return this;
}
@Override
public ExpressRouteCircuitImpl withGatewayManagerEtag(String gatewayManagerEtag) {
this.inner().withGatewayManagerEtag(gatewayManagerEtag);
return this;
}
@Override
public ExpressRouteCircuitImpl withGlobalReachEnabled(Boolean globalReachEnabled) {
this.inner().withGlobalReachEnabled(globalReachEnabled);
return this;
}
@Override
public ExpressRouteCircuitImpl withPeerings(List<ExpressRouteCircuitPeeringInner> peerings) {
this.inner().withPeerings(peerings);
return this;
}
@Override
public ExpressRouteCircuitImpl withProvisioningState(String provisioningState) {
this.inner().withProvisioningState(provisioningState);
return this;
}
@Override
public ExpressRouteCircuitImpl withServiceKey(String serviceKey) {
this.inner().withServiceKey(serviceKey);
return this;
}
@Override
public ExpressRouteCircuitImpl withServiceProviderNotes(String serviceProviderNotes) {
this.inner().withServiceProviderNotes(serviceProviderNotes);
return this;
}
@Override
public ExpressRouteCircuitImpl withServiceProviderProperties(ExpressRouteCircuitServiceProviderProperties serviceProviderProperties) {
this.inner().withServiceProviderProperties(serviceProviderProperties);
return this;
}
@Override
public ExpressRouteCircuitImpl withServiceProviderProvisioningState(ServiceProviderProvisioningState serviceProviderProvisioningState) {
this.inner().withServiceProviderProvisioningState(serviceProviderProvisioningState);
return this;
}
@Override
public ExpressRouteCircuitImpl withSku(ExpressRouteCircuitSku sku) {
this.inner().withSku(sku);
return this;
}
}
| |
/*
* Copyright 2015 Persinity Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.persinity.common.db;
import static com.persinity.common.StringUtils.format;
import static com.persinity.common.collection.CollectionUtils.implode;
import static com.persinity.common.invariant.Invariant.notEmpty;
import java.sql.SQLException;
import java.util.Collection;
import java.util.List;
import java.util.Set;
import com.google.common.base.Function;
import com.persinity.common.collection.CollectionUtils;
import com.persinity.common.db.metainfo.And;
import com.persinity.common.db.metainfo.Col;
import com.persinity.common.db.metainfo.Params;
import com.persinity.common.db.metainfo.constraint.Constraint;
/**
* @author dyordanov
*/
public class OracleSqlStrategy implements SqlStrategy {
public OracleSqlStrategy() {
trimmer = new Trimmer();
}
@Override
public String createTable(final String tableName, final Set<Col> cols, final String pkColName) {
assert tableName != null && !tableName.isEmpty();
assert cols != null && !cols.isEmpty();
final StringBuilder sb = new StringBuilder();
sb.append("CREATE TABLE ").append(tableName).append(" (");
boolean firstRowFlg = true;
for (final Col col : cols) {
if (!firstRowFlg) {
sb.append(", ");
} else {
firstRowFlg = false;
}
sb.append("\n\t\t\t").append(col.getName()).append(" ").append(col.getType());
if (pkColName.equals(col.getName())) {
sb.append(" ").append("CONSTRAINT pk_").append(tableName).append(" PRIMARY KEY");
}
}
sb.append("\n\t\t)");
final String sql = sb.toString();
return sql;
}
@Override
public String disableConstraint(final Constraint cons) {
assert cons != null;
return format(CONSTRAINT_DISABLE_TEMPLATE, cons.getTable(), cons.getName());
}
@Override
public String dropTable(final String tableName) {
assert tableName != null && !tableName.isEmpty();
final String result = format("DROP TABLE {}", tableName);
return result;
}
@Override
public String dropTrigger(final String triggerName) {
assert triggerName != null && !triggerName.isEmpty();
final String result = format("DROP TRIGGER {}", triggerName);
return result;
}
@Override
public String enableConstraint(final Constraint cons) {
assert cons != null;
return format(CONSTRAINT_ENABLE_TEMPLATE, cons.getTable(), cons.getName());
}
@Override
public int getMaxNameLength() {
return 25;
}
@Override
public String insertStatement(final String tableName, final List<Col> cols) {
assert tableName != null && !tableName.trim().isEmpty();
assert cols != null && !cols.isEmpty();
final String colClause = SqlUtil.buildColClause(cols);
final String sql = format("INSERT INTO {} ({}) VALUES ({})", tableName, colClause,
new Params(cols.size(), Params.ParameterCount.EXACT));
return sql;
}
@Override
public String selectAllStatement(final String tableName) {
assert tableName != null && !tableName.trim().isEmpty();
return format("SELECT * FROM {}", tableName);
}
@Override
public String selectStatement(final String tableName, final List<Col> cols) {
assert tableName != null && !tableName.trim().isEmpty();
assert cols != null && !cols.isEmpty();
final String colsList = implode(cols, ",", new Function<Col, String>() {
@Override
public String apply(final Col col) {
return col.getName();
}
});
return format("SELECT {} FROM {}", colsList, tableName);
}
@Override
public String deleteStatement(final String tableName, final List<Col> ids) {
assert tableName != null && !tableName.trim().isEmpty();
assert ids != null && !ids.isEmpty();
return format("DELETE FROM {} WHERE {}", tableName, new And(SqlUtil.toEqualParams(ids)));
}
@Override
public String deleteAllStatement(final String tableName) {
assert tableName != null && !tableName.trim().isEmpty();
return format("DELETE FROM {}", tableName);
}
@Override
public String grantPrivs(final Collection<String> privs, final String onObject, final String toUser) {
notEmpty(privs);
notEmpty(onObject);
notEmpty(toUser);
return format(GRANT_PRIVS_TEMPLATE, CollectionUtils.implode(privs, ", "), onObject, toUser);
}
@Override
public String tableConstraintsInfo() {
return TAB_CONSTRAINTS_QRY;
}
@Override
public String tableForPkInfo() {
return TABNAME_FOR_PKNAME_QRY;
}
@Override
public String dropPackage(final String packageName) {
notEmpty(packageName);
return format("DROP PACKAGE {}", packageName);
}
@Override
public String updateStatement(final String tableName, final List<Col> cols, final List<Col> ids) {
assert tableName != null && !tableName.trim().isEmpty();
assert cols != null && !cols.isEmpty();
assert ids != null && !ids.isEmpty();
final String setColList = implode(cols, ", ", new Function<Col, String>() {
@Override
public String apply(final Col col) {
return format("{} = ?", col.getName());
}
});
final String sql = format("UPDATE {} SET {} WHERE {}", tableName, setColList,
new And(SqlUtil.toEqualParams(ids)));
return sql;
}
@Override
public String createIndex(final String tableName, final String colName) {
notEmpty(tableName);
notEmpty(colName);
final String indexName = trimmer.trim(format("index_{}_{}", tableName, colName), getMaxNameLength());
return format("CREATE INDEX {} ON {} ({})", indexName, tableName, colName);
}
@Override
public String count(final String colName) {
return format("COUNT({})", colName);
}
@Override
public String distinct(final String string) {
return format("DISTINCT {}", string);
}
@Override
public String max(final String colName) {
return format("NVL(MAX({}), 0)", colName);
}
@Override
public String min(final String colName) {
return format("NVL(MIN({}), 0)", colName);
}
@Override
public String mod(final String colName, final String divisor) {
return format("MOD({}, {})", colName, divisor);
}
@Override
public String hash(final List<Col> cols) {
assert cols != null && !cols.isEmpty();
final String colsConcatenation = implode(cols, "||", new Function<Col, String>() {
@Override
public String apply(final Col col) {
return format("NVL({}, 0)", col.getName());
}
});
return format("ORA_HASH(''||{})", colsConcatenation);
}
@Override
public boolean isIntegrityConstraintViolation(final Throwable cause) {
// check in SQL state for 23: integrity constraint violation
return extractSqlState(cause).startsWith("23");
}
@Override
public boolean isAccessRuleViolation(final Throwable cause) {
// check in SQL state for 42: syntax error or access rule violation
return extractSqlState(cause).startsWith("42");
}
private String extractSqlState(final Throwable cause) {
String sqlState = null;
if (cause instanceof SQLException) {
sqlState = ((SQLException) cause).getSQLState();
}
if (sqlState == null) {
sqlState = "";
}
return sqlState;
}
private static final String GRANT_PRIVS_TEMPLATE = "GRANT {} ON {} TO {}";
private static final String CONSTRAINT_ENABLE_TEMPLATE = "ALTER TABLE {} ENABLE CONSTRAINT {}";
private static final String CONSTRAINT_DISABLE_TEMPLATE = "ALTER TABLE {} DISABLE CONSTRAINT {}";
private static final String TAB_CONSTRAINTS_QRY =
"SELECT c.constraint_name AS " + SqlStrategy.COL_CONSTRAINT_NAME + ", cc.table_name AS "
+ SqlStrategy.COL_TABLE_NAME + ", cc.column_name AS " + SqlStrategy.COL_COLUMN_NAME
+ ", c.r_constraint_name AS " + SqlStrategy.COL_REF_CONSTRAINT_NAME + " FROM user_constraints c "
+ "INNER JOIN user_cons_columns cc "
+ "ON (c.constraint_name = cc.constraint_name AND c.table_name = cc.table_name)"
+ " WHERE c.table_name = ? AND c.constraint_type = ? " + "ORDER BY cc.position";
private static final String TABNAME_FOR_PKNAME_QRY = "SELECT table_name AS " + SqlStrategy.COL_TABLE_NAME +
" FROM user_constraints WHERE constraint_name = ?";
private final Trimmer trimmer;
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.processors.standard;
import static org.apache.nifi.processor.FlowFileFilter.FlowFileFilterResult.ACCEPT_AND_CONTINUE;
import static org.apache.nifi.processor.FlowFileFilter.FlowFileFilterResult.ACCEPT_AND_TERMINATE;
import static org.apache.nifi.processor.FlowFileFilter.FlowFileFilterResult.REJECT_AND_CONTINUE;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.io.File;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.sql.Time;
import java.sql.Timestamp;
import java.sql.Types;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.time.LocalDate;
import java.time.LocalTime;
import java.time.ZoneId;
import java.time.format.DateTimeFormatter;
import java.util.Arrays;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.Map;
import javax.xml.bind.DatatypeConverter;
import org.apache.commons.lang3.RandomUtils;
import org.apache.nifi.controller.AbstractControllerService;
import org.apache.nifi.dbcp.DBCPService;
import org.apache.nifi.processor.FlowFileFilter;
import org.apache.nifi.processor.exception.ProcessException;
import org.apache.nifi.processor.util.pattern.RollbackOnFailure;
import org.apache.nifi.reporting.InitializationException;
import org.apache.nifi.util.MockFlowFile;
import org.apache.nifi.util.TestRunner;
import org.apache.nifi.util.TestRunners;
import org.junit.BeforeClass;
import org.junit.ClassRule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
import org.mockito.Mockito;
public class TestPutSQL {
private static final String createPersons = "CREATE TABLE PERSONS (id integer primary key, name varchar(100), code integer)";
private static final String createPersonsAutoId = "CREATE TABLE PERSONS_AI (id INTEGER NOT NULL GENERATED ALWAYS AS IDENTITY (START WITH 1), name VARCHAR(100), code INTEGER check(code <= 100))";
@ClassRule
public static TemporaryFolder folder = new TemporaryFolder();
/**
* Setting up Connection pooling is expensive operation.
* So let's do this only once and reuse MockDBCPService in each test.
*/
static protected DBCPService service;
@BeforeClass
public static void setupClass() throws ProcessException, SQLException {
System.setProperty("derby.stream.error.file", "target/derby.log");
final File tempDir = folder.getRoot();
final File dbDir = new File(tempDir, "db");
service = new MockDBCPService(dbDir.getAbsolutePath());
try (final Connection conn = service.getConnection()) {
try (final Statement stmt = conn.createStatement()) {
stmt.executeUpdate(createPersons);
stmt.executeUpdate(createPersonsAutoId);
}
}
}
@Test
public void testDirectStatements() throws InitializationException, ProcessException, SQLException, IOException {
final TestRunner runner = TestRunners.newTestRunner(PutSQL.class);
runner.addControllerService("dbcp", service);
runner.enableControllerService(service);
runner.setProperty(PutSQL.CONNECTION_POOL, "dbcp");
recreateTable("PERSONS", createPersons);
runner.enqueue("INSERT INTO PERSONS (ID, NAME, CODE) VALUES (1, 'Mark', 84)".getBytes());
runner.run();
runner.assertAllFlowFilesTransferred(PutSQL.REL_SUCCESS, 1);
try (final Connection conn = service.getConnection()) {
try (final Statement stmt = conn.createStatement()) {
final ResultSet rs = stmt.executeQuery("SELECT * FROM PERSONS");
assertTrue(rs.next());
assertEquals(1, rs.getInt(1));
assertEquals("Mark", rs.getString(2));
assertEquals(84, rs.getInt(3));
assertFalse(rs.next());
}
}
runner.enqueue("UPDATE PERSONS SET NAME='George' WHERE ID=1".getBytes());
runner.run();
try (final Connection conn = service.getConnection()) {
try (final Statement stmt = conn.createStatement()) {
final ResultSet rs = stmt.executeQuery("SELECT * FROM PERSONS");
assertTrue(rs.next());
assertEquals(1, rs.getInt(1));
assertEquals("George", rs.getString(2));
assertEquals(84, rs.getInt(3));
assertFalse(rs.next());
}
}
}
@Test
public void testInsertWithGeneratedKeys() throws InitializationException, ProcessException, SQLException, IOException {
final TestRunner runner = TestRunners.newTestRunner(PutSQL.class);
runner.addControllerService("dbcp", service);
runner.enableControllerService(service);
runner.setProperty(PutSQL.OBTAIN_GENERATED_KEYS, "true");
runner.setProperty(PutSQL.CONNECTION_POOL, "dbcp");
recreateTable("PERSONS_AI",createPersonsAutoId);
runner.enqueue("INSERT INTO PERSONS_AI (NAME, CODE) VALUES ('Mark', 84)".getBytes());
runner.run();
runner.assertAllFlowFilesTransferred(PutSQL.REL_SUCCESS, 1);
final MockFlowFile mff = runner.getFlowFilesForRelationship(PutSQL.REL_SUCCESS).get(0);
mff.assertAttributeEquals("sql.generated.key", "1");
try (final Connection conn = service.getConnection()) {
try (final Statement stmt = conn.createStatement()) {
final ResultSet rs = stmt.executeQuery("SELECT * FROM PERSONS_AI");
assertTrue(rs.next());
assertEquals(1, rs.getInt(1));
assertEquals("Mark", rs.getString(2));
assertEquals(84, rs.getInt(3));
assertFalse(rs.next());
}
}
}
@Test
public void testFailInMiddleWithBadStatementAndSupportTransaction() throws InitializationException, ProcessException, SQLException, IOException {
final TestRunner runner = TestRunners.newTestRunner(PutSQL.class);
testFailInMiddleWithBadStatement(runner);
runner.run();
runner.assertTransferCount(PutSQL.REL_FAILURE, 4);
runner.assertTransferCount(PutSQL.REL_SUCCESS, 0);
}
@Test
public void testFailInMiddleWithBadStatementAndNotSupportTransaction() throws InitializationException, ProcessException, SQLException, IOException {
final TestRunner runner = TestRunners.newTestRunner(PutSQL.class);
runner.setProperty(PutSQL.SUPPORT_TRANSACTIONS, "false");
testFailInMiddleWithBadStatement(runner);
runner.run();
runner.assertTransferCount(PutSQL.REL_FAILURE, 1);
runner.assertTransferCount(PutSQL.REL_SUCCESS, 3);
}
private void testFailInMiddleWithBadStatement(final TestRunner runner) throws InitializationException {
runner.addControllerService("dbcp", service);
runner.enableControllerService(service);
runner.setProperty(PutSQL.OBTAIN_GENERATED_KEYS, "false");
runner.setProperty(PutSQL.CONNECTION_POOL, "dbcp");
runner.enqueue("INSERT INTO PERSONS_AI (NAME, CODE) VALUES ('Mark', 84)".getBytes());
runner.enqueue("INSERT INTO PERSONS_AI".getBytes()); // intentionally wrong syntax
runner.enqueue("INSERT INTO PERSONS_AI (NAME, CODE) VALUES ('Tom', 3)".getBytes());
runner.enqueue("INSERT INTO PERSONS_AI (NAME, CODE) VALUES ('Harry', 44)".getBytes());
}
@Test
public void testFailInMiddleWithBadStatementRollbackOnFailure() throws InitializationException, ProcessException, SQLException, IOException {
final TestRunner runner = TestRunners.newTestRunner(PutSQL.class);
runner.addControllerService("dbcp", service);
runner.enableControllerService(service);
runner.setProperty(PutSQL.OBTAIN_GENERATED_KEYS, "false");
runner.setProperty(PutSQL.CONNECTION_POOL, "dbcp");
runner.setProperty(RollbackOnFailure.ROLLBACK_ON_FAILURE, "true");
runner.enqueue("INSERT INTO PERSONS_AI (NAME, CODE) VALUES ('Mark', 84)".getBytes());
runner.enqueue("INSERT INTO PERSONS_AI".getBytes()); // intentionally wrong syntax
runner.enqueue("INSERT INTO PERSONS_AI (NAME, CODE) VALUES ('Tom', 3)".getBytes());
runner.enqueue("INSERT INTO PERSONS_AI (NAME, CODE) VALUES ('Harry', 44)".getBytes());
try {
runner.run();
fail("ProcessException should be thrown");
} catch (AssertionError e) {
assertTrue(e.getCause() instanceof ProcessException);
runner.assertTransferCount(PutSQL.REL_FAILURE, 0);
runner.assertTransferCount(PutSQL.REL_SUCCESS, 0);
}
}
@Test
public void testFailInMiddleWithBadParameterTypeAndNotSupportTransaction() throws InitializationException, ProcessException, SQLException, IOException {
final TestRunner runner = TestRunners.newTestRunner(PutSQL.class);
runner.setProperty(PutSQL.SUPPORT_TRANSACTIONS, "false");
testFailInMiddleWithBadParameterType(runner);
runner.run();
runner.assertTransferCount(PutSQL.REL_FAILURE, 1);
runner.assertTransferCount(PutSQL.REL_SUCCESS, 3);
}
@Test
public void testFailInMiddleWithBadParameterTypeAndSupportTransaction() throws InitializationException, ProcessException, SQLException, IOException {
final TestRunner runner = TestRunners.newTestRunner(PutSQL.class);
testFailInMiddleWithBadParameterType(runner);
runner.run();
runner.assertTransferCount(PutSQL.REL_FAILURE, 4);
runner.assertTransferCount(PutSQL.REL_SUCCESS, 0);
}
private void testFailInMiddleWithBadParameterType(final TestRunner runner) throws InitializationException, ProcessException, SQLException, IOException {
runner.addControllerService("dbcp", service);
runner.enableControllerService(service);
runner.setProperty(PutSQL.OBTAIN_GENERATED_KEYS, "false");
runner.setProperty(PutSQL.CONNECTION_POOL, "dbcp");
final Map<String, String> goodAttributes = new HashMap<>();
goodAttributes.put("sql.args.1.type", String.valueOf(Types.INTEGER));
goodAttributes.put("sql.args.1.value", "84");
final Map<String, String> badAttributes = new HashMap<>();
badAttributes.put("sql.args.1.type", String.valueOf(Types.VARCHAR));
badAttributes.put("sql.args.1.value", "hello");
final byte[] data = "INSERT INTO PERSONS_AI (NAME, CODE) VALUES ('Mark', ?)".getBytes();
runner.enqueue(data, goodAttributes);
runner.enqueue(data, badAttributes);
runner.enqueue(data, goodAttributes);
runner.enqueue(data, goodAttributes);
}
@Test
public void testFailInMiddleWithBadParameterTypeRollbackOnFailure() throws InitializationException, ProcessException, SQLException, IOException {
final TestRunner runner = TestRunners.newTestRunner(PutSQL.class);
runner.addControllerService("dbcp", service);
runner.enableControllerService(service);
runner.setProperty(PutSQL.OBTAIN_GENERATED_KEYS, "false");
runner.setProperty(PutSQL.CONNECTION_POOL, "dbcp");
runner.setProperty(RollbackOnFailure.ROLLBACK_ON_FAILURE, "true");
final Map<String, String> goodAttributes = new HashMap<>();
goodAttributes.put("sql.args.1.type", String.valueOf(Types.INTEGER));
goodAttributes.put("sql.args.1.value", "84");
final Map<String, String> badAttributes = new HashMap<>();
badAttributes.put("sql.args.1.type", String.valueOf(Types.VARCHAR));
badAttributes.put("sql.args.1.value", "hello");
final byte[] data = "INSERT INTO PERSONS_AI (NAME, CODE) VALUES ('Mark', ?)".getBytes();
runner.enqueue(data, goodAttributes);
runner.enqueue(data, badAttributes);
runner.enqueue(data, goodAttributes);
runner.enqueue(data, goodAttributes);
try {
runner.run();
fail("ProcessException should be thrown");
} catch (AssertionError e) {
assertTrue(e.getCause() instanceof ProcessException);
runner.assertTransferCount(PutSQL.REL_FAILURE, 0);
runner.assertTransferCount(PutSQL.REL_SUCCESS, 0);
}
}
@Test
public void testFailInMiddleWithBadParameterValueAndSupportTransaction() throws InitializationException, ProcessException, SQLException, IOException {
final TestRunner runner = TestRunners.newTestRunner(PutSQL.class);
testFailInMiddleWithBadParameterValue(runner);
runner.run();
runner.assertTransferCount(PutSQL.REL_SUCCESS, 0);
runner.assertTransferCount(PutSQL.REL_FAILURE, 0);
runner.assertTransferCount(PutSQL.REL_RETRY, 4);
try (final Connection conn = service.getConnection()) {
try (final Statement stmt = conn.createStatement()) {
final ResultSet rs = stmt.executeQuery("SELECT * FROM PERSONS_AI");
assertFalse(rs.next());
}
}
}
@Test
public void testFailInMiddleWithBadParameterValueAndNotSupportTransaction() throws InitializationException, ProcessException, SQLException, IOException {
final TestRunner runner = TestRunners.newTestRunner(PutSQL.class);
runner.setProperty(PutSQL.SUPPORT_TRANSACTIONS, "false");
testFailInMiddleWithBadParameterValue(runner);
runner.run();
runner.assertTransferCount(PutSQL.REL_SUCCESS, 1);
runner.assertTransferCount(PutSQL.REL_FAILURE, 1);
runner.assertTransferCount(PutSQL.REL_RETRY, 2);
try (final Connection conn = service.getConnection()) {
try (final Statement stmt = conn.createStatement()) {
final ResultSet rs = stmt.executeQuery("SELECT * FROM PERSONS_AI");
assertTrue(rs.next());
assertEquals(1, rs.getInt(1));
assertEquals("Mark", rs.getString(2));
assertEquals(84, rs.getInt(3));
assertFalse(rs.next());
}
}
}
private void testFailInMiddleWithBadParameterValue(final TestRunner runner) throws InitializationException, ProcessException, SQLException, IOException {
runner.addControllerService("dbcp", service);
runner.enableControllerService(service);
runner.setProperty(PutSQL.OBTAIN_GENERATED_KEYS, "false");
runner.setProperty(PutSQL.CONNECTION_POOL, "dbcp");
recreateTable("PERSONS_AI",createPersonsAutoId);
final Map<String, String> goodAttributes = new HashMap<>();
goodAttributes.put("sql.args.1.type", String.valueOf(Types.INTEGER));
goodAttributes.put("sql.args.1.value", "84");
final Map<String, String> badAttributes = new HashMap<>();
badAttributes.put("sql.args.1.type", String.valueOf(Types.INTEGER));
badAttributes.put("sql.args.1.value", "9999");
final byte[] data = "INSERT INTO PERSONS_AI (NAME, CODE) VALUES ('Mark', ?)".getBytes();
runner.enqueue(data, goodAttributes);
runner.enqueue(data, badAttributes);
runner.enqueue(data, goodAttributes);
runner.enqueue(data, goodAttributes);
}
@Test
public void testFailInMiddleWithBadParameterValueRollbackOnFailure() throws InitializationException, ProcessException, SQLException, IOException {
final TestRunner runner = TestRunners.newTestRunner(PutSQL.class);
runner.addControllerService("dbcp", service);
runner.enableControllerService(service);
runner.setProperty(PutSQL.OBTAIN_GENERATED_KEYS, "false");
runner.setProperty(PutSQL.CONNECTION_POOL, "dbcp");
runner.setProperty(RollbackOnFailure.ROLLBACK_ON_FAILURE, "true");
recreateTable("PERSONS_AI",createPersonsAutoId);
final Map<String, String> goodAttributes = new HashMap<>();
goodAttributes.put("sql.args.1.type", String.valueOf(Types.INTEGER));
goodAttributes.put("sql.args.1.value", "84");
final Map<String, String> badAttributes = new HashMap<>();
badAttributes.put("sql.args.1.type", String.valueOf(Types.INTEGER));
badAttributes.put("sql.args.1.value", "9999");
final byte[] data = "INSERT INTO PERSONS_AI (NAME, CODE) VALUES ('Mark', ?)".getBytes();
runner.enqueue(data, goodAttributes);
runner.enqueue(data, badAttributes);
runner.enqueue(data, goodAttributes);
runner.enqueue(data, goodAttributes);
try {
runner.run();
fail("ProcessException should be thrown");
} catch (AssertionError e) {
assertTrue(e.getCause() instanceof ProcessException);
runner.assertTransferCount(PutSQL.REL_FAILURE, 0);
runner.assertTransferCount(PutSQL.REL_SUCCESS, 0);
}
try (final Connection conn = service.getConnection()) {
try (final Statement stmt = conn.createStatement()) {
final ResultSet rs = stmt.executeQuery("SELECT * FROM PERSONS_AI");
assertFalse(rs.next());
}
}
}
@Test
public void testUsingSqlDataTypesWithNegativeValues() throws InitializationException, ProcessException, SQLException, IOException {
final TestRunner runner = TestRunners.newTestRunner(PutSQL.class);
try (final Connection conn = service.getConnection()) {
try (final Statement stmt = conn.createStatement()) {
stmt.executeUpdate("CREATE TABLE PERSONS2 (id integer primary key, name varchar(100), code bigint)");
}
}
runner.addControllerService("dbcp", service);
runner.enableControllerService(service);
runner.setProperty(PutSQL.CONNECTION_POOL, "dbcp");
final Map<String, String> attributes = new HashMap<>();
attributes.put("sql.args.1.type", "-5");
attributes.put("sql.args.1.value", "84");
runner.enqueue("INSERT INTO PERSONS2 (ID, NAME, CODE) VALUES (1, 'Mark', ?)".getBytes(), attributes);
runner.run();
runner.assertAllFlowFilesTransferred(PutSQL.REL_SUCCESS, 1);
try (final Connection conn = service.getConnection()) {
try (final Statement stmt = conn.createStatement()) {
final ResultSet rs = stmt.executeQuery("SELECT * FROM PERSONS2");
assertTrue(rs.next());
assertEquals(1, rs.getInt(1));
assertEquals("Mark", rs.getString(2));
assertEquals(84, rs.getInt(3));
assertFalse(rs.next());
}
}
}
// Not specifying a format for the date fields here to continue to test backwards compatibility
@Test
public void testUsingTimestampValuesEpochAndString() throws InitializationException, ProcessException, SQLException, IOException, ParseException {
final TestRunner runner = TestRunners.newTestRunner(PutSQL.class);
try (final Connection conn = service.getConnection()) {
try (final Statement stmt = conn.createStatement()) {
stmt.executeUpdate("CREATE TABLE TIMESTAMPTEST1 (id integer primary key, ts1 timestamp, ts2 timestamp)");
}
}
runner.addControllerService("dbcp", service);
runner.enableControllerService(service);
runner.setProperty(PutSQL.CONNECTION_POOL, "dbcp");
final String arg2TS = "2001-01-01 00:01:01.001";
final String art3TS = "2002-02-02 12:02:02.002";
SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS");
java.util.Date parsedDate = dateFormat.parse(arg2TS);
final Map<String, String> attributes = new HashMap<>();
attributes.put("sql.args.1.type", String.valueOf(Types.TIMESTAMP));
attributes.put("sql.args.1.value", Long.toString(parsedDate.getTime()));
attributes.put("sql.args.2.type", String.valueOf(Types.TIMESTAMP));
attributes.put("sql.args.2.value", art3TS);
runner.enqueue("INSERT INTO TIMESTAMPTEST1 (ID, ts1, ts2) VALUES (1, ?, ?)".getBytes(), attributes);
runner.run();
runner.assertAllFlowFilesTransferred(PutSQL.REL_SUCCESS, 1);
try (final Connection conn = service.getConnection()) {
try (final Statement stmt = conn.createStatement()) {
final ResultSet rs = stmt.executeQuery("SELECT * FROM TIMESTAMPTEST1");
assertTrue(rs.next());
assertEquals(1, rs.getInt(1));
assertEquals(arg2TS, rs.getString(2));
assertEquals(art3TS, rs.getString(3));
assertFalse(rs.next());
}
}
}
@Test
public void testUsingTimestampValuesWithFormatAttribute() throws InitializationException, ProcessException, SQLException, IOException, ParseException {
final TestRunner runner = TestRunners.newTestRunner(PutSQL.class);
try (final Connection conn = service.getConnection()) {
try (final Statement stmt = conn.createStatement()) {
stmt.executeUpdate("CREATE TABLE TIMESTAMPTEST2 (id integer primary key, ts1 timestamp, ts2 timestamp)");
}
}
runner.addControllerService("dbcp", service);
runner.enableControllerService(service);
runner.setProperty(PutSQL.CONNECTION_POOL, "dbcp");
final String dateStr1 = "2002-02-02T12:02:02";
final String dateStrTimestamp1 = "2002-02-02 12:02:02";
final long dateInt1 = Timestamp.valueOf(dateStrTimestamp1).getTime();
final String dateStr2 = "2002-02-02T12:02:02.123456789";
final String dateStrTimestamp2 = "2002-02-02 12:02:02.123456789";
final long dateInt2 = Timestamp.valueOf(dateStrTimestamp2).getTime();
final long nanoInt2 = 123456789L;
final Map<String, String> attributes = new HashMap<>();
attributes.put("sql.args.1.type", String.valueOf(Types.TIMESTAMP));
attributes.put("sql.args.1.value", dateStr1);
attributes.put("sql.args.1.format", "ISO_LOCAL_DATE_TIME");
attributes.put("sql.args.2.type", String.valueOf(Types.TIMESTAMP));
attributes.put("sql.args.2.value", dateStr2);
attributes.put("sql.args.2.format", "yyyy-MM-dd'T'HH:mm:ss.SSSSSSSSS");
runner.enqueue("INSERT INTO TIMESTAMPTEST2 (ID, ts1, ts2) VALUES (1, ?, ?)".getBytes(), attributes);
runner.run();
runner.assertAllFlowFilesTransferred(PutSQL.REL_SUCCESS, 1);
try (final Connection conn = service.getConnection()) {
try (final Statement stmt = conn.createStatement()) {
final ResultSet rs = stmt.executeQuery("SELECT * FROM TIMESTAMPTEST2");
assertTrue(rs.next());
assertEquals(1, rs.getInt(1));
assertEquals(dateInt1, rs.getTimestamp(2).getTime());
assertEquals(dateInt2, rs.getTimestamp(3).getTime());
assertEquals(nanoInt2, rs.getTimestamp(3).getNanos());
assertFalse(rs.next());
}
}
}
@Test
public void testUsingDateTimeValuesWithFormatAttribute() throws InitializationException, ProcessException, SQLException, IOException, ParseException {
final TestRunner runner = TestRunners.newTestRunner(PutSQL.class);
try (final Connection conn = service.getConnection()) {
try (final Statement stmt = conn.createStatement()) {
stmt.executeUpdate("CREATE TABLE TIMESTAMPTEST3 (id integer primary key, ts1 TIME, ts2 DATE)");
}
}
runner.addControllerService("dbcp", service);
runner.enableControllerService(service);
runner.setProperty(PutSQL.CONNECTION_POOL, "dbcp");
final String dateStr = "2002-03-04";
final String timeStr = "02:03:04";
final String timeFormatString = "HH:mm:ss";
final String dateFormatString ="yyyy-MM-dd";
final DateTimeFormatter timeFormatter= DateTimeFormatter.ISO_LOCAL_TIME;
LocalTime parsedTime = LocalTime.parse(timeStr, timeFormatter);
Time expectedTime = Time.valueOf(parsedTime);
final DateTimeFormatter dateFormatter = DateTimeFormatter.ISO_LOCAL_DATE;
LocalDate parsedDate = LocalDate.parse(dateStr, dateFormatter);
Date expectedDate = new Date(Date.from(parsedDate.atStartOfDay().atZone(ZoneId.systemDefault()).toInstant()).getTime());
final long expectedTimeInLong = expectedTime.getTime();
final long expectedDateInLong = expectedDate.getTime();
// test with ISO LOCAL format attribute
Map<String, String> attributes = new HashMap<>();
attributes.put("sql.args.1.type", String.valueOf(Types.TIME));
attributes.put("sql.args.1.value", timeStr);
attributes.put("sql.args.1.format", "ISO_LOCAL_TIME");
attributes.put("sql.args.2.type", String.valueOf(Types.DATE));
attributes.put("sql.args.2.value", dateStr);
attributes.put("sql.args.2.format", "ISO_LOCAL_DATE");
runner.enqueue("INSERT INTO TIMESTAMPTEST3 (ID, ts1, ts2) VALUES (1, ?, ?)".getBytes(), attributes);
// Since Derby database which is used for unit test does not have timezone in DATE and TIME type,
// and PutSQL converts date string into long representation using local timezone,
// we need to use local timezone.
SimpleDateFormat timeFormat = new SimpleDateFormat(timeFormatString);
java.util.Date parsedLocalTime = timeFormat.parse(timeStr);
SimpleDateFormat dateFormat = new SimpleDateFormat(dateFormatString);
java.util.Date parsedLocalDate = dateFormat.parse(dateStr);
// test Long pattern without format attribute
attributes = new HashMap<>();
attributes.put("sql.args.1.type", String.valueOf(Types.TIME));
attributes.put("sql.args.1.value", Long.toString(parsedLocalTime.getTime()));
attributes.put("sql.args.2.type", String.valueOf(Types.DATE));
attributes.put("sql.args.2.value", Long.toString(parsedLocalDate.getTime()));
runner.enqueue("INSERT INTO TIMESTAMPTEST3 (ID, ts1, ts2) VALUES (2, ?, ?)".getBytes(), attributes);
// test with format attribute
attributes = new HashMap<>();
attributes.put("sql.args.1.type", String.valueOf(Types.TIME));
attributes.put("sql.args.1.value", "020304000");
attributes.put("sql.args.1.format", "HHmmssSSS");
attributes.put("sql.args.2.type", String.valueOf(Types.DATE));
attributes.put("sql.args.2.value", "20020304");
attributes.put("sql.args.2.format", "yyyyMMdd");
runner.enqueue("INSERT INTO TIMESTAMPTEST3 (ID, ts1, ts2) VALUES (3, ?, ?)".getBytes(), attributes);
runner.run();
runner.assertAllFlowFilesTransferred(PutSQL.REL_SUCCESS, 3);
try (final Connection conn = service.getConnection()) {
try (final Statement stmt = conn.createStatement()) {
final ResultSet rs = stmt.executeQuery("SELECT * FROM TIMESTAMPTEST3 ORDER BY ID");
assertTrue(rs.next());
assertEquals(1, rs.getInt(1));
assertEquals(expectedTimeInLong, rs.getTime(2).getTime());
assertEquals(expectedDateInLong, rs.getDate(3).getTime());
assertTrue(rs.next());
assertEquals(2, rs.getInt(1));
assertEquals(parsedLocalTime.getTime(), rs.getTime(2).getTime());
assertEquals(parsedLocalDate.getTime(), rs.getDate(3).getTime());
assertTrue(rs.next());
assertEquals(3, rs.getInt(1));
assertEquals(expectedTimeInLong, rs.getTime(2).getTime());
assertEquals(expectedDateInLong, rs.getDate(3).getTime());
assertFalse(rs.next());
}
}
}
@Test
public void testBitType() throws SQLException, InitializationException {
final TestRunner runner = TestRunners.newTestRunner(PutSQL.class);
try (final Connection conn = service.getConnection()) {
try (final Statement stmt = conn.createStatement()) {
stmt.executeUpdate("CREATE TABLE BITTESTS (id integer primary key, bt1 BOOLEAN)");
}
}
runner.addControllerService("dbcp", service);
runner.enableControllerService(service);
runner.setProperty(PutSQL.CONNECTION_POOL, "dbcp");
final byte[] insertStatement = "INSERT INTO BITTESTS (ID, bt1) VALUES (?, ?)".getBytes();
Map<String, String> attributes = new HashMap<>();
attributes.put("sql.args.1.type", String.valueOf(Types.INTEGER));
attributes.put("sql.args.1.value", "1");
attributes.put("sql.args.2.type", String.valueOf(Types.BIT));
attributes.put("sql.args.2.value", "1");
runner.enqueue(insertStatement, attributes);
attributes.put("sql.args.1.type", String.valueOf(Types.INTEGER));
attributes.put("sql.args.1.value", "2");
attributes.put("sql.args.2.type", String.valueOf(Types.BIT));
attributes.put("sql.args.2.value", "0");
runner.enqueue(insertStatement, attributes);
attributes.put("sql.args.1.type", String.valueOf(Types.INTEGER));
attributes.put("sql.args.1.value", "3");
attributes.put("sql.args.2.type", String.valueOf(Types.BIT));
attributes.put("sql.args.2.value", "-5");
runner.enqueue(insertStatement, attributes);
attributes.put("sql.args.1.type", String.valueOf(Types.INTEGER));
attributes.put("sql.args.1.value", "4");
attributes.put("sql.args.2.type", String.valueOf(Types.BIT));
attributes.put("sql.args.2.value", "t");
runner.enqueue(insertStatement, attributes);
attributes.put("sql.args.1.type", String.valueOf(Types.INTEGER));
attributes.put("sql.args.1.value", "5");
attributes.put("sql.args.2.type", String.valueOf(Types.BIT));
attributes.put("sql.args.2.value", "f");
runner.enqueue(insertStatement, attributes);
attributes.put("sql.args.1.type", String.valueOf(Types.INTEGER));
attributes.put("sql.args.1.value", "6");
attributes.put("sql.args.2.type", String.valueOf(Types.BIT));
attributes.put("sql.args.2.value", "T");
runner.enqueue(insertStatement, attributes);
attributes.put("sql.args.1.type", String.valueOf(Types.INTEGER));
attributes.put("sql.args.1.value", "7");
attributes.put("sql.args.2.type", String.valueOf(Types.BIT));
attributes.put("sql.args.2.value", "true");
runner.enqueue(insertStatement, attributes);
attributes.put("sql.args.1.type", String.valueOf(Types.INTEGER));
attributes.put("sql.args.1.value", "8");
attributes.put("sql.args.2.type", String.valueOf(Types.BIT));
attributes.put("sql.args.2.value", "false");
runner.enqueue(insertStatement, attributes);
runner.run();
runner.assertAllFlowFilesTransferred(PutSQL.REL_SUCCESS, 8);
try (final Connection conn = service.getConnection()) {
try (final Statement stmt = conn.createStatement()) {
final ResultSet rs = stmt.executeQuery("SELECT * FROM BITTESTS");
//First test (true)
assertTrue(rs.next());
assertEquals(1, rs.getInt(1));
assertTrue(rs.getBoolean(2));
//Second test (false)
assertTrue(rs.next());
assertEquals(2, rs.getInt(1));
assertFalse(rs.getBoolean(2));
//Third test (false)
assertTrue(rs.next());
assertEquals(3, rs.getInt(1));
assertFalse(rs.getBoolean(2));
//Fourth test (true)
assertTrue(rs.next());
assertEquals(4, rs.getInt(1));
assertTrue(rs.getBoolean(2));
//Fifth test (false)
assertTrue(rs.next());
assertEquals(5, rs.getInt(1));
assertFalse(rs.getBoolean(2));
//Sixth test (true)
assertTrue(rs.next());
assertEquals(6, rs.getInt(1));
assertTrue(rs.getBoolean(2));
//Seventh test (true)
assertTrue(rs.next());
assertEquals(7, rs.getInt(1));
assertTrue(rs.getBoolean(2));
//Eighth test (false)
assertTrue(rs.next());
assertEquals(8, rs.getInt(1));
assertFalse(rs.getBoolean(2));
assertFalse(rs.next());
}
}
}
@Test
public void testUsingTimeValuesEpochAndString() throws InitializationException, ProcessException, SQLException, IOException, ParseException {
final TestRunner runner = TestRunners.newTestRunner(PutSQL.class);
try (final Connection conn = service.getConnection()) {
try (final Statement stmt = conn.createStatement()) {
stmt.executeUpdate("CREATE TABLE TIMETESTS (id integer primary key, ts1 time, ts2 time)");
}
}
runner.addControllerService("dbcp", service);
runner.enableControllerService(service);
runner.setProperty(PutSQL.CONNECTION_POOL, "dbcp");
final String arg2TS = "00:01:02";
final String art3TS = "02:03:04";
final String timeFormatString = "HH:mm:ss";
SimpleDateFormat dateFormat = new SimpleDateFormat(timeFormatString);
java.util.Date parsedDate = dateFormat.parse(arg2TS);
final Map<String, String> attributes = new HashMap<>();
attributes.put("sql.args.1.type", String.valueOf(Types.TIME));
attributes.put("sql.args.1.value", Long.toString(parsedDate.getTime()));
attributes.put("sql.args.2.type", String.valueOf(Types.TIME));
attributes.put("sql.args.2.value", art3TS);
attributes.put("sql.args.2.format", timeFormatString);
runner.enqueue("INSERT INTO TIMETESTS (ID, ts1, ts2) VALUES (1, ?, ?)".getBytes(), attributes);
runner.run();
runner.assertAllFlowFilesTransferred(PutSQL.REL_SUCCESS, 1);
try (final Connection conn = service.getConnection()) {
try (final Statement stmt = conn.createStatement()) {
final ResultSet rs = stmt.executeQuery("SELECT * FROM TIMETESTS");
assertTrue(rs.next());
assertEquals(1, rs.getInt(1));
assertEquals(arg2TS, dateFormat.format(rs.getTime(2)));
assertEquals(art3TS, rs.getString(3));
assertFalse(rs.next());
}
}
}
@Test
public void testUsingDateValuesEpochAndString() throws InitializationException, ProcessException, SQLException, IOException, ParseException {
final TestRunner runner = TestRunners.newTestRunner(PutSQL.class);
try (final Connection conn = service.getConnection()) {
try (final Statement stmt = conn.createStatement()) {
stmt.executeUpdate("CREATE TABLE DATETESTS (id integer primary key, ts1 date, ts2 date)");
}
}
runner.addControllerService("dbcp", service);
runner.enableControllerService(service);
runner.setProperty(PutSQL.CONNECTION_POOL, "dbcp");
final String arg2TS = "2001-01-01";
final String art3TS = "2002-02-02";
SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd");
java.util.Date parsedDate = dateFormat.parse(arg2TS);
final Map<String, String> attributes = new HashMap<>();
attributes.put("sql.args.1.type", String.valueOf(Types.DATE));
attributes.put("sql.args.1.value", Long.toString(parsedDate.getTime()));
attributes.put("sql.args.2.type", String.valueOf(Types.DATE));
attributes.put("sql.args.2.value", art3TS);
runner.enqueue("INSERT INTO DATETESTS (ID, ts1, ts2) VALUES (1, ?, ?)".getBytes(), attributes);
runner.run();
runner.assertAllFlowFilesTransferred(PutSQL.REL_SUCCESS, 1);
try (final Connection conn = service.getConnection()) {
try (final Statement stmt = conn.createStatement()) {
final ResultSet rs = stmt.executeQuery("SELECT * FROM DATETESTS");
assertTrue(rs.next());
assertEquals(1, rs.getInt(1));
assertEquals(arg2TS, rs.getString(2));
assertEquals(art3TS, rs.getString(3));
assertFalse(rs.next());
}
}
}
@Test
public void testBinaryColumnTypes() throws InitializationException, ProcessException, SQLException, IOException, ParseException {
final TestRunner runner = TestRunners.newTestRunner(PutSQL.class);
try (final Connection conn = service.getConnection()) {
try (final Statement stmt = conn.createStatement()) {
stmt.executeUpdate("CREATE TABLE BINARYTESTS (id integer primary key, bn1 CHAR(8) FOR BIT DATA, bn2 VARCHAR(100) FOR BIT DATA, " +
"bn3 LONG VARCHAR FOR BIT DATA)");
}
}
runner.addControllerService("dbcp", service);
runner.enableControllerService(service);
runner.setProperty(PutSQL.CONNECTION_POOL, "dbcp");
final byte[] insertStatement = "INSERT INTO BINARYTESTS (ID, bn1, bn2, bn3) VALUES (?, ?, ?, ?)".getBytes();
final String arg2BIN = fixedSizeByteArrayAsASCIIString(8);
final String art3VARBIN = fixedSizeByteArrayAsASCIIString(50);
final String art4LongBin = fixedSizeByteArrayAsASCIIString(32700); //max size supported by Derby
//ASCII (default) binary formatn
Map<String, String> attributes = new HashMap<>();
attributes.put("sql.args.1.type", String.valueOf(Types.INTEGER));
attributes.put("sql.args.1.value", "1");
attributes.put("sql.args.2.type", String.valueOf(Types.BINARY));
attributes.put("sql.args.2.value", arg2BIN);
attributes.put("sql.args.3.type", String.valueOf(Types.VARBINARY));
attributes.put("sql.args.3.value", art3VARBIN);
attributes.put("sql.args.4.type", String.valueOf(Types.LONGVARBINARY));
attributes.put("sql.args.4.value", art4LongBin);
runner.enqueue(insertStatement, attributes);
//ASCII with specified format
attributes = new HashMap<>();
attributes.put("sql.args.1.type", String.valueOf(Types.INTEGER));
attributes.put("sql.args.1.value", "2");
attributes.put("sql.args.2.type", String.valueOf(Types.BINARY));
attributes.put("sql.args.2.value", arg2BIN);
attributes.put("sql.args.2.format", "ascii");
attributes.put("sql.args.3.type", String.valueOf(Types.VARBINARY));
attributes.put("sql.args.3.value", art3VARBIN);
attributes.put("sql.args.3.format", "ascii");
attributes.put("sql.args.4.type", String.valueOf(Types.LONGVARBINARY));
attributes.put("sql.args.4.value", art4LongBin);
attributes.put("sql.args.4.format", "ascii");
runner.enqueue(insertStatement, attributes);
//Hex
final String arg2HexBIN = fixedSizeByteArrayAsHexString(8);
final String art3HexVARBIN = fixedSizeByteArrayAsHexString(50);
final String art4HexLongBin = fixedSizeByteArrayAsHexString(32700);
attributes = new HashMap<>();
attributes.put("sql.args.1.type", String.valueOf(Types.INTEGER));
attributes.put("sql.args.1.value", "3");
attributes.put("sql.args.2.type", String.valueOf(Types.BINARY));
attributes.put("sql.args.2.value", arg2HexBIN);
attributes.put("sql.args.2.format", "hex");
attributes.put("sql.args.3.type", String.valueOf(Types.VARBINARY));
attributes.put("sql.args.3.value", art3HexVARBIN);
attributes.put("sql.args.3.format", "hex");
attributes.put("sql.args.4.type", String.valueOf(Types.LONGVARBINARY));
attributes.put("sql.args.4.value", art4HexLongBin);
attributes.put("sql.args.4.format", "hex");
runner.enqueue(insertStatement, attributes);
//Base64
final String arg2Base64BIN = fixedSizeByteArrayAsBase64String(8);
final String art3Base64VARBIN = fixedSizeByteArrayAsBase64String(50);
final String art4Base64LongBin = fixedSizeByteArrayAsBase64String(32700);
attributes = new HashMap<>();
attributes.put("sql.args.1.type", String.valueOf(Types.INTEGER));
attributes.put("sql.args.1.value", "4");
attributes.put("sql.args.2.type", String.valueOf(Types.BINARY));
attributes.put("sql.args.2.value", arg2Base64BIN);
attributes.put("sql.args.2.format", "base64");
attributes.put("sql.args.3.type", String.valueOf(Types.VARBINARY));
attributes.put("sql.args.3.value", art3Base64VARBIN);
attributes.put("sql.args.3.format", "base64");
attributes.put("sql.args.4.type", String.valueOf(Types.LONGVARBINARY));
attributes.put("sql.args.4.value", art4Base64LongBin);
attributes.put("sql.args.4.format", "base64");
runner.enqueue(insertStatement, attributes);
runner.run();
runner.assertAllFlowFilesTransferred(PutSQL.REL_SUCCESS, 4);
try (final Connection conn = service.getConnection()) {
try (final Statement stmt = conn.createStatement()) {
final ResultSet rs = stmt.executeQuery("SELECT * FROM BINARYTESTS");
//First Batch
assertTrue(rs.next());
assertEquals(1, rs.getInt(1));
assertTrue(Arrays.equals(arg2BIN.getBytes("ASCII"), rs.getBytes(2)));
assertTrue(Arrays.equals(art3VARBIN.getBytes("ASCII"), rs.getBytes(3)));
assertTrue(Arrays.equals(art4LongBin.getBytes("ASCII"), rs.getBytes(4)));
//Second batch
assertTrue(rs.next());
assertEquals(2, rs.getInt(1));
assertTrue(Arrays.equals(arg2BIN.getBytes("ASCII"), rs.getBytes(2)));
assertTrue(Arrays.equals(art3VARBIN.getBytes("ASCII"), rs.getBytes(3)));
assertTrue(Arrays.equals(art4LongBin.getBytes("ASCII"), rs.getBytes(4)));
//Third Batch (Hex)
assertTrue(rs.next());
assertEquals(3, rs.getInt(1));
assertTrue(Arrays.equals(DatatypeConverter.parseHexBinary(arg2HexBIN), rs.getBytes(2)));
assertTrue(Arrays.equals(DatatypeConverter.parseHexBinary(art3HexVARBIN), rs.getBytes(3)));
assertTrue(Arrays.equals(DatatypeConverter.parseHexBinary(art4HexLongBin), rs.getBytes(4)));
//Fourth Batch (Base64)
assertTrue(rs.next());
assertEquals(4, rs.getInt(1));
assertTrue(Arrays.equals(DatatypeConverter.parseBase64Binary(arg2Base64BIN), rs.getBytes(2)));
assertTrue(Arrays.equals(DatatypeConverter.parseBase64Binary(art3Base64VARBIN), rs.getBytes(3)));
assertTrue(Arrays.equals(DatatypeConverter.parseBase64Binary(art4Base64LongBin), rs.getBytes(4)));
assertFalse(rs.next());
}
}
}
@Test
public void testStatementsWithPreparedParameters() throws InitializationException, ProcessException, SQLException, IOException {
final TestRunner runner = TestRunners.newTestRunner(PutSQL.class);
runner.addControllerService("dbcp", service);
runner.enableControllerService(service);
runner.setProperty(PutSQL.CONNECTION_POOL, "dbcp");
recreateTable("PERSONS", createPersons);
final Map<String, String> attributes = new HashMap<>();
attributes.put("sql.args.1.type", String.valueOf(Types.INTEGER));
attributes.put("sql.args.1.value", "1");
attributes.put("sql.args.2.type", String.valueOf(Types.VARCHAR));
attributes.put("sql.args.2.value", "Mark");
attributes.put("sql.args.3.type", String.valueOf(Types.INTEGER));
attributes.put("sql.args.3.value", "84");
runner.enqueue("INSERT INTO PERSONS (ID, NAME, CODE) VALUES (?, ?, ?)".getBytes(), attributes);
runner.run();
runner.assertAllFlowFilesTransferred(PutSQL.REL_SUCCESS, 1);
try (final Connection conn = service.getConnection()) {
try (final Statement stmt = conn.createStatement()) {
final ResultSet rs = stmt.executeQuery("SELECT * FROM PERSONS");
assertTrue(rs.next());
assertEquals(1, rs.getInt(1));
assertEquals("Mark", rs.getString(2));
assertEquals(84, rs.getInt(3));
assertFalse(rs.next());
}
}
runner.clearTransferState();
attributes.clear();
attributes.put("sql.args.1.type", String.valueOf(Types.VARCHAR));
attributes.put("sql.args.1.value", "George");
attributes.put("sql.args.2.type", String.valueOf(Types.INTEGER));
attributes.put("sql.args.2.value", "1");
runner.enqueue("UPDATE PERSONS SET NAME=? WHERE ID=?".getBytes(), attributes);
runner.run();
runner.assertAllFlowFilesTransferred(PutSQL.REL_SUCCESS, 1);
try (final Connection conn = service.getConnection()) {
try (final Statement stmt = conn.createStatement()) {
final ResultSet rs = stmt.executeQuery("SELECT * FROM PERSONS");
assertTrue(rs.next());
assertEquals(1, rs.getInt(1));
assertEquals("George", rs.getString(2));
assertEquals(84, rs.getInt(3));
assertFalse(rs.next());
}
}
}
@Test
public void testMultipleStatementsWithinFlowFile() throws InitializationException, ProcessException, SQLException, IOException {
final TestRunner runner = TestRunners.newTestRunner(PutSQL.class);
runner.addControllerService("dbcp", service);
runner.enableControllerService(service);
runner.setProperty(PutSQL.CONNECTION_POOL, "dbcp");
recreateTable("PERSONS", createPersons);
final String sql = "INSERT INTO PERSONS (ID, NAME, CODE) VALUES (?, ?, ?); " +
"UPDATE PERSONS SET NAME='George' WHERE ID=?; ";
final Map<String, String> attributes = new HashMap<>();
attributes.put("sql.args.1.type", String.valueOf(Types.INTEGER));
attributes.put("sql.args.1.value", "1");
attributes.put("sql.args.2.type", String.valueOf(Types.VARCHAR));
attributes.put("sql.args.2.value", "Mark");
attributes.put("sql.args.3.type", String.valueOf(Types.INTEGER));
attributes.put("sql.args.3.value", "84");
attributes.put("sql.args.4.type", String.valueOf(Types.INTEGER));
attributes.put("sql.args.4.value", "1");
runner.enqueue(sql.getBytes(), attributes);
runner.run();
// should fail because of the semicolon
runner.assertAllFlowFilesTransferred(PutSQL.REL_FAILURE, 1);
try (final Connection conn = service.getConnection()) {
try (final Statement stmt = conn.createStatement()) {
final ResultSet rs = stmt.executeQuery("SELECT * FROM PERSONS");
assertFalse(rs.next());
}
}
}
@Test
public void testMultipleStatementsWithinFlowFileRollbackOnFailure() throws InitializationException, ProcessException, SQLException, IOException {
final TestRunner runner = TestRunners.newTestRunner(PutSQL.class);
runner.addControllerService("dbcp", service);
runner.enableControllerService(service);
runner.setProperty(PutSQL.CONNECTION_POOL, "dbcp");
runner.setProperty(RollbackOnFailure.ROLLBACK_ON_FAILURE, "true");
recreateTable("PERSONS", createPersons);
final String sql = "INSERT INTO PERSONS (ID, NAME, CODE) VALUES (?, ?, ?); " +
"UPDATE PERSONS SET NAME='George' WHERE ID=?; ";
final Map<String, String> attributes = new HashMap<>();
attributes.put("sql.args.1.type", String.valueOf(Types.INTEGER));
attributes.put("sql.args.1.value", "1");
attributes.put("sql.args.2.type", String.valueOf(Types.VARCHAR));
attributes.put("sql.args.2.value", "Mark");
attributes.put("sql.args.3.type", String.valueOf(Types.INTEGER));
attributes.put("sql.args.3.value", "84");
attributes.put("sql.args.4.type", String.valueOf(Types.INTEGER));
attributes.put("sql.args.4.value", "1");
runner.enqueue(sql.getBytes(), attributes);
try {
runner.run();
fail("ProcessException should be thrown");
} catch (AssertionError e) {
assertTrue(e.getCause() instanceof ProcessException);
}
try (final Connection conn = service.getConnection()) {
try (final Statement stmt = conn.createStatement()) {
final ResultSet rs = stmt.executeQuery("SELECT * FROM PERSONS");
assertFalse(rs.next());
}
}
}
@Test
public void testWithNullParameter() throws InitializationException, ProcessException, SQLException, IOException {
final TestRunner runner = TestRunners.newTestRunner(PutSQL.class);
runner.addControllerService("dbcp", service);
runner.enableControllerService(service);
runner.setProperty(PutSQL.CONNECTION_POOL, "dbcp");
final Map<String, String> attributes = new HashMap<>();
attributes.put("sql.args.1.type", String.valueOf(Types.INTEGER));
attributes.put("sql.args.1.value", "1");
attributes.put("sql.args.2.type", String.valueOf(Types.VARCHAR));
attributes.put("sql.args.2.value", "Mark");
attributes.put("sql.args.3.type", String.valueOf(Types.INTEGER));
runner.enqueue("INSERT INTO PERSONS (ID, NAME, CODE) VALUES (?, ?, ?)".getBytes(), attributes);
runner.run();
runner.assertAllFlowFilesTransferred(PutSQL.REL_SUCCESS, 1);
try (final Connection conn = service.getConnection()) {
try (final Statement stmt = conn.createStatement()) {
final ResultSet rs = stmt.executeQuery("SELECT * FROM PERSONS");
assertTrue(rs.next());
assertEquals(1, rs.getInt(1));
assertEquals("Mark", rs.getString(2));
assertEquals(0, rs.getInt(3));
assertFalse(rs.next());
}
}
}
@Test
public void testInvalidStatement() throws InitializationException, ProcessException, SQLException, IOException {
final TestRunner runner = TestRunners.newTestRunner(PutSQL.class);
runner.addControllerService("dbcp", service);
runner.enableControllerService(service);
runner.setProperty(PutSQL.CONNECTION_POOL, "dbcp");
recreateTable("PERSONS", createPersons);
final String sql = "INSERT INTO PERSONS (ID, NAME, CODE) VALUES (?, ?, ?); " +
"UPDATE SOME_RANDOM_TABLE NAME='George' WHERE ID=?; ";
final Map<String, String> attributes = new HashMap<>();
attributes.put("sql.args.1.type", String.valueOf(Types.INTEGER));
attributes.put("sql.args.1.value", "1");
attributes.put("sql.args.2.type", String.valueOf(Types.VARCHAR));
attributes.put("sql.args.2.value", "Mark");
attributes.put("sql.args.3.type", String.valueOf(Types.INTEGER));
attributes.put("sql.args.3.value", "84");
attributes.put("sql.args.4.type", String.valueOf(Types.INTEGER));
attributes.put("sql.args.4.value", "1");
runner.enqueue(sql.getBytes(), attributes);
runner.run();
// should fail because of the semicolon
runner.assertAllFlowFilesTransferred(PutSQL.REL_FAILURE, 1);
try (final Connection conn = service.getConnection()) {
try (final Statement stmt = conn.createStatement()) {
final ResultSet rs = stmt.executeQuery("SELECT * FROM PERSONS");
assertFalse(rs.next());
}
}
}
@Test
public void testInvalidStatementRollbackOnFailure() throws InitializationException, ProcessException, SQLException, IOException {
final TestRunner runner = TestRunners.newTestRunner(PutSQL.class);
runner.addControllerService("dbcp", service);
runner.enableControllerService(service);
runner.setProperty(PutSQL.CONNECTION_POOL, "dbcp");
runner.setProperty(RollbackOnFailure.ROLLBACK_ON_FAILURE, "true");
recreateTable("PERSONS", createPersons);
final String sql = "INSERT INTO PERSONS (ID, NAME, CODE) VALUES (?, ?, ?); " +
"UPDATE SOME_RANDOM_TABLE NAME='George' WHERE ID=?; ";
final Map<String, String> attributes = new HashMap<>();
attributes.put("sql.args.1.type", String.valueOf(Types.INTEGER));
attributes.put("sql.args.1.value", "1");
attributes.put("sql.args.2.type", String.valueOf(Types.VARCHAR));
attributes.put("sql.args.2.value", "Mark");
attributes.put("sql.args.3.type", String.valueOf(Types.INTEGER));
attributes.put("sql.args.3.value", "84");
attributes.put("sql.args.4.type", String.valueOf(Types.INTEGER));
attributes.put("sql.args.4.value", "1");
runner.enqueue(sql.getBytes(), attributes);
try {
runner.run();
fail("ProcessException should be thrown");
} catch (AssertionError e) {
assertTrue(e.getCause() instanceof ProcessException);
}
try (final Connection conn = service.getConnection()) {
try (final Statement stmt = conn.createStatement()) {
final ResultSet rs = stmt.executeQuery("SELECT * FROM PERSONS");
assertFalse(rs.next());
}
}
}
@Test
public void testRetryableFailure() throws InitializationException, ProcessException, SQLException, IOException {
final TestRunner runner = TestRunners.newTestRunner(PutSQL.class);
final DBCPService service = new SQLExceptionService(null);
runner.addControllerService("dbcp", service);
runner.enableControllerService(service);
runner.setProperty(PutSQL.CONNECTION_POOL, "dbcp");
final String sql = "INSERT INTO PERSONS (ID, NAME, CODE) VALUES (?, ?, ?); " +
"UPDATE PERSONS SET NAME='George' WHERE ID=?; ";
final Map<String, String> attributes = new HashMap<>();
attributes.put("sql.args.1.type", String.valueOf(Types.INTEGER));
attributes.put("sql.args.1.value", "1");
attributes.put("sql.args.2.type", String.valueOf(Types.VARCHAR));
attributes.put("sql.args.2.value", "Mark");
attributes.put("sql.args.3.type", String.valueOf(Types.INTEGER));
attributes.put("sql.args.3.value", "84");
attributes.put("sql.args.4.type", String.valueOf(Types.INTEGER));
attributes.put("sql.args.4.value", "1");
runner.enqueue(sql.getBytes(), attributes);
runner.run();
// should fail because of the semicolon
runner.assertAllFlowFilesTransferred(PutSQL.REL_RETRY, 1);
}
@Test
public void testRetryableFailureRollbackOnFailure() throws InitializationException, ProcessException, SQLException, IOException {
final TestRunner runner = TestRunners.newTestRunner(PutSQL.class);
final DBCPService service = new SQLExceptionService(null);
runner.addControllerService("dbcp", service);
runner.enableControllerService(service);
runner.setProperty(PutSQL.CONNECTION_POOL, "dbcp");
runner.setProperty(RollbackOnFailure.ROLLBACK_ON_FAILURE, "true");
final String sql = "INSERT INTO PERSONS (ID, NAME, CODE) VALUES (?, ?, ?); " +
"UPDATE PERSONS SET NAME='George' WHERE ID=?; ";
final Map<String, String> attributes = new HashMap<>();
attributes.put("sql.args.1.type", String.valueOf(Types.INTEGER));
attributes.put("sql.args.1.value", "1");
attributes.put("sql.args.2.type", String.valueOf(Types.VARCHAR));
attributes.put("sql.args.2.value", "Mark");
attributes.put("sql.args.3.type", String.valueOf(Types.INTEGER));
attributes.put("sql.args.3.value", "84");
attributes.put("sql.args.4.type", String.valueOf(Types.INTEGER));
attributes.put("sql.args.4.value", "1");
runner.enqueue(sql.getBytes(), attributes);
try {
runner.run();
fail("ProcessException should be thrown");
} catch (AssertionError e) {
assertTrue(e.getCause() instanceof ProcessException);
// Should not be routed to retry.
runner.assertAllFlowFilesTransferred(PutSQL.REL_RETRY, 0);
}
}
@Test
public void testMultipleFlowFilesSuccessfulInTransaction() throws InitializationException, ProcessException, SQLException, IOException {
final TestRunner runner = TestRunners.newTestRunner(PutSQL.class);
runner.addControllerService("dbcp", service);
runner.enableControllerService(service);
runner.setProperty(PutSQL.CONNECTION_POOL, "dbcp");
runner.setProperty(PutSQL.BATCH_SIZE, "1");
recreateTable("PERSONS", createPersons);
final Map<String, String> attributes = new HashMap<>();
attributes.put("sql.args.1.type", String.valueOf(Types.INTEGER));
attributes.put("sql.args.1.value", "1");
attributes.put("sql.args.2.type", String.valueOf(Types.VARCHAR));
attributes.put("sql.args.2.value", "Mark");
attributes.put("sql.args.3.type", String.valueOf(Types.INTEGER));
attributes.put("sql.args.3.value", "84");
attributes.put("fragment.identifier", "1");
attributes.put("fragment.count", "2");
attributes.put("fragment.index", "0");
runner.enqueue("INSERT INTO PERSONS (ID, NAME, CODE) VALUES (?, ?, ?)".getBytes(), attributes);
runner.run();
// No FlowFiles should be transferred because there were not enough flowfiles with the same fragment identifier
runner.assertAllFlowFilesTransferred(PutSQL.REL_SUCCESS, 0);
attributes.clear();
attributes.put("fragment.identifier", "1");
attributes.put("fragment.count", "2");
attributes.put("fragment.index", "1");
runner.clearTransferState();
runner.enqueue("UPDATE PERSONS SET NAME='Leonard' WHERE ID=1".getBytes(), attributes);
runner.run();
// Both FlowFiles with fragment identifier 1 should be successful
runner.assertTransferCount(PutSQL.REL_SUCCESS, 2);
runner.assertTransferCount(PutSQL.REL_FAILURE, 0);
runner.assertTransferCount(PutSQL.REL_RETRY, 0);
for (final MockFlowFile mff : runner.getFlowFilesForRelationship(PutSQL.REL_SUCCESS)) {
mff.assertAttributeEquals("fragment.identifier", "1");
}
try (final Connection conn = service.getConnection()) {
try (final Statement stmt = conn.createStatement()) {
final ResultSet rs = stmt.executeQuery("SELECT * FROM PERSONS");
assertTrue(rs.next());
assertEquals(1, rs.getInt(1));
assertEquals("Leonard", rs.getString(2));
assertEquals(84, rs.getInt(3));
assertFalse(rs.next());
}
}
}
@Test
public void testMultipleFlowFilesSuccessfulInTransactionRollBackOnFailure() throws InitializationException, ProcessException, SQLException, IOException {
final TestRunner runner = TestRunners.newTestRunner(PutSQL.class);
runner.addControllerService("dbcp", service);
runner.enableControllerService(service);
runner.setProperty(PutSQL.CONNECTION_POOL, "dbcp");
runner.setProperty(PutSQL.BATCH_SIZE, "1");
runner.setProperty(RollbackOnFailure.ROLLBACK_ON_FAILURE, "true");
recreateTable("PERSONS", createPersons);
final Map<String, String> attributes = new HashMap<>();
attributes.put("sql.args.1.type", String.valueOf(Types.INTEGER));
attributes.put("sql.args.1.value", "1");
attributes.put("sql.args.2.type", String.valueOf(Types.VARCHAR));
attributes.put("sql.args.2.value", "Mark");
attributes.put("sql.args.3.type", String.valueOf(Types.INTEGER));
attributes.put("sql.args.3.value", "84");
attributes.put("fragment.identifier", "1");
attributes.put("fragment.count", "2");
attributes.put("fragment.index", "0");
runner.enqueue("INSERT INTO PERSONS (ID, NAME, CODE) VALUES (?, ?, ?)".getBytes(), attributes);
// ProcessException should not be thrown in this case, because the input FlowFiles are simply differed.
runner.run();
// No FlowFiles should be transferred because there were not enough flowfiles with the same fragment identifier
runner.assertAllFlowFilesTransferred(PutSQL.REL_SUCCESS, 0);
}
@Test
public void testTransactionTimeout() throws InitializationException, ProcessException, SQLException, IOException {
final TestRunner runner = TestRunners.newTestRunner(PutSQL.class);
runner.addControllerService("dbcp", service);
runner.enableControllerService(service);
runner.setProperty(PutSQL.TRANSACTION_TIMEOUT, "5 secs");
runner.setProperty(PutSQL.CONNECTION_POOL, "dbcp");
final Map<String, String> attributes = new HashMap<>();
attributes.put("fragment.identifier", "1");
attributes.put("fragment.count", "2");
attributes.put("fragment.index", "0");
final MockFlowFile mff = new MockFlowFile(0L) {
@Override
public Long getLastQueueDate() {
return System.currentTimeMillis() - 10000L; // return 10 seconds ago
}
@Override
public Map<String, String> getAttributes() {
return attributes;
}
@Override
public String getAttribute(final String attrName) {
return attributes.get(attrName);
}
};
runner.enqueue(mff);
runner.run();
// No FlowFiles should be transferred because there were not enough flowfiles with the same fragment identifier
runner.assertAllFlowFilesTransferred(PutSQL.REL_FAILURE, 1);
}
@Test
public void testTransactionTimeoutRollbackOnFailure() throws InitializationException, ProcessException, SQLException, IOException {
final TestRunner runner = TestRunners.newTestRunner(PutSQL.class);
runner.addControllerService("dbcp", service);
runner.enableControllerService(service);
runner.setProperty(PutSQL.TRANSACTION_TIMEOUT, "5 secs");
runner.setProperty(PutSQL.CONNECTION_POOL, "dbcp");
runner.setProperty(RollbackOnFailure.ROLLBACK_ON_FAILURE, "true");
final Map<String, String> attributes = new HashMap<>();
attributes.put("fragment.identifier", "1");
attributes.put("fragment.count", "2");
attributes.put("fragment.index", "0");
final MockFlowFile mff = new MockFlowFile(0L) {
@Override
public Long getLastQueueDate() {
return System.currentTimeMillis() - 10000L; // return 10 seconds ago
}
@Override
public Map<String, String> getAttributes() {
return attributes;
}
@Override
public String getAttribute(final String attrName) {
return attributes.get(attrName);
}
};
runner.enqueue(mff);
try {
runner.run();
fail("ProcessException should be thrown");
} catch (AssertionError e) {
assertTrue(e.getCause() instanceof ProcessException);
}
runner.assertAllFlowFilesTransferred(PutSQL.REL_FAILURE, 0);
}
@Test
public void testNullFragmentCountRollbackOnFailure() throws InitializationException, ProcessException, SQLException, IOException {
final TestRunner runner = TestRunners.newTestRunner(PutSQL.class);
runner.addControllerService("dbcp", service);
runner.enableControllerService(service);
runner.setProperty(PutSQL.TRANSACTION_TIMEOUT, "5 secs");
runner.setProperty(PutSQL.CONNECTION_POOL, "dbcp");
runner.setProperty(RollbackOnFailure.ROLLBACK_ON_FAILURE, "true");
final Map<String, String> attribute1 = new HashMap<>();
attribute1.put("fragment.identifier", "1");
attribute1.put("fragment.count", "2");
attribute1.put("fragment.index", "0");
final Map<String, String> attribute2 = new HashMap<>();
attribute2.put("fragment.identifier", "1");
// attribute2.put("fragment.count", null);
attribute2.put("fragment.index", "1");
runner.enqueue(new byte[]{}, attribute1);
runner.enqueue(new byte[]{}, attribute2);
try {
runner.run();
fail("ProcessException should be thrown");
} catch (AssertionError e) {
assertTrue(e.getCause() instanceof ProcessException);
}
runner.assertAllFlowFilesTransferred(PutSQL.REL_FAILURE, 0);
}
@Test
public void testStatementsFromProperty() throws InitializationException, ProcessException, SQLException, IOException {
final TestRunner runner = TestRunners.newTestRunner(PutSQL.class);
runner.addControllerService("dbcp", service);
runner.enableControllerService(service);
runner.setProperty(PutSQL.CONNECTION_POOL, "dbcp");
runner.setProperty(PutSQL.SQL_STATEMENT, "INSERT INTO PERSONS (ID, NAME, CODE) VALUES (${row.id}, 'Mark', 84)");
recreateTable("PERSONS", createPersons);
runner.enqueue("This statement should be ignored".getBytes(), new HashMap<String,String>() {{
put("row.id", "1");
}});
runner.run();
runner.assertAllFlowFilesTransferred(PutSQL.REL_SUCCESS, 1);
try (final Connection conn = service.getConnection()) {
try (final Statement stmt = conn.createStatement()) {
final ResultSet rs = stmt.executeQuery("SELECT * FROM PERSONS");
assertTrue(rs.next());
assertEquals(1, rs.getInt(1));
assertEquals("Mark", rs.getString(2));
assertEquals(84, rs.getInt(3));
assertFalse(rs.next());
}
}
runner.setProperty(PutSQL.SQL_STATEMENT, "UPDATE PERSONS SET NAME='George' WHERE ID=${row.id}");
runner.enqueue("This statement should be ignored".getBytes(), new HashMap<String,String>() {{
put("row.id", "1");
}});
runner.run();
try (final Connection conn = service.getConnection()) {
try (final Statement stmt = conn.createStatement()) {
final ResultSet rs = stmt.executeQuery("SELECT * FROM PERSONS");
assertTrue(rs.next());
assertEquals(1, rs.getInt(1));
assertEquals("George", rs.getString(2));
assertEquals(84, rs.getInt(3));
assertFalse(rs.next());
}
}
}
private Map<String, String> createFragmentedTransactionAttributes(String id, int count, int index) {
final Map<String, String> attributes = new HashMap<>();
attributes.put("fragment.identifier", id);
attributes.put("fragment.count", String.valueOf(count));
attributes.put("fragment.index", String.valueOf(index));
return attributes;
}
@Test
public void testTransactionalFlowFileFilter() {
final MockFlowFile ff0 = new MockFlowFile(0);
final MockFlowFile ff1 = new MockFlowFile(1);
final MockFlowFile ff2 = new MockFlowFile(2);
final MockFlowFile ff3 = new MockFlowFile(3);
final MockFlowFile ff4 = new MockFlowFile(4);
ff0.putAttributes(createFragmentedTransactionAttributes("tx-1", 3, 0));
ff1.putAttributes(Collections.singletonMap("accept", "false"));
ff2.putAttributes(createFragmentedTransactionAttributes("tx-1", 3, 1));
ff3.putAttributes(Collections.singletonMap("accept", "true"));
ff4.putAttributes(createFragmentedTransactionAttributes("tx-1", 3, 2));
// TEST 1: Fragmented TX with null service filter
// Even if the controller service does not have filtering rule, tx filter should work.
FlowFileFilter txFilter = new PutSQL.TransactionalFlowFileFilter(null);
// Should perform a fragmented tx.
assertEquals(ACCEPT_AND_CONTINUE, txFilter.filter(ff0));
assertEquals(REJECT_AND_CONTINUE, txFilter.filter(ff1));
assertEquals(ACCEPT_AND_CONTINUE, txFilter.filter(ff2));
assertEquals(REJECT_AND_CONTINUE, txFilter.filter(ff3));
assertEquals(ACCEPT_AND_TERMINATE, txFilter.filter(ff4));
// TEST 2: Non-Fragmented TX with null service filter
txFilter = new PutSQL.TransactionalFlowFileFilter(null);
// Should perform a non-fragmented tx.
assertEquals(ACCEPT_AND_CONTINUE, txFilter.filter(ff1));
assertEquals(REJECT_AND_CONTINUE, txFilter.filter(ff0));
assertEquals(REJECT_AND_CONTINUE, txFilter.filter(ff2));
assertEquals(ACCEPT_AND_CONTINUE, txFilter.filter(ff3));
assertEquals(REJECT_AND_CONTINUE, txFilter.filter(ff4));
final FlowFileFilter nonTxFilter = flowFile -> "true".equals(flowFile.getAttribute("accept"))
? ACCEPT_AND_CONTINUE
: REJECT_AND_CONTINUE;
// TEST 3: Fragmented TX with a service filter
// Even if the controller service does not have filtering rule, tx filter should work.
txFilter = new PutSQL.TransactionalFlowFileFilter(nonTxFilter);
// Should perform a fragmented tx. The nonTxFilter doesn't affect in this case.
assertEquals(ACCEPT_AND_CONTINUE, txFilter.filter(ff0));
assertEquals(REJECT_AND_CONTINUE, txFilter.filter(ff1));
assertEquals(ACCEPT_AND_CONTINUE, txFilter.filter(ff2));
assertEquals(REJECT_AND_CONTINUE, txFilter.filter(ff3));
assertEquals(ACCEPT_AND_TERMINATE, txFilter.filter(ff4));
// TEST 4: Non-Fragmented TX with a service filter
txFilter = new PutSQL.TransactionalFlowFileFilter(nonTxFilter);
// Should perform a non-fragmented tx and use the nonTxFilter.
assertEquals(REJECT_AND_CONTINUE, txFilter.filter(ff1));
assertEquals(REJECT_AND_CONTINUE, txFilter.filter(ff0));
assertEquals(REJECT_AND_CONTINUE, txFilter.filter(ff2));
assertEquals(ACCEPT_AND_CONTINUE, txFilter.filter(ff3));
assertEquals(REJECT_AND_CONTINUE, txFilter.filter(ff4));
}
/**
* Simple implementation only for testing purposes
*/
private static class MockDBCPService extends AbstractControllerService implements DBCPService {
private final String dbLocation;
public MockDBCPService(final String dbLocation) {
this.dbLocation = dbLocation;
}
@Override
public String getIdentifier() {
return "dbcp";
}
@Override
public Connection getConnection() throws ProcessException {
try {
Class.forName("org.apache.derby.jdbc.EmbeddedDriver");
final Connection conn = DriverManager.getConnection("jdbc:derby:" + dbLocation + ";create=true");
return conn;
} catch (final Exception e) {
e.printStackTrace();
throw new ProcessException("getConnection failed: " + e);
}
}
}
/**
* Simple implementation only for testing purposes
*/
private static class SQLExceptionService extends AbstractControllerService implements DBCPService {
private final DBCPService service;
private int allowedBeforeFailure = 0;
private int successful = 0;
public SQLExceptionService(final DBCPService service) {
this.service = service;
}
@Override
public String getIdentifier() {
return "dbcp";
}
@Override
public Connection getConnection() throws ProcessException {
try {
if (++successful > allowedBeforeFailure) {
final Connection conn = Mockito.mock(Connection.class);
Mockito.when(conn.prepareStatement(Mockito.any(String.class))).thenThrow(new SQLException("Unit Test Generated SQLException"));
return conn;
} else {
return service.getConnection();
}
} catch (final Exception e) {
e.printStackTrace();
throw new ProcessException("getConnection failed: " + e);
}
}
}
private void recreateTable(String tableName, String createSQL) throws ProcessException, SQLException {
try (final Connection conn = service.getConnection()) {
try (final Statement stmt = conn.createStatement()) {
stmt.executeUpdate("drop table " + tableName);
stmt.executeUpdate(createSQL);
}
}
}
private String fixedSizeByteArrayAsASCIIString(int length){
byte[] bBinary = RandomUtils.nextBytes(length);
ByteBuffer bytes = ByteBuffer.wrap(bBinary);
StringBuffer sbBytes = new StringBuffer();
for (int i = bytes.position(); i < bytes.limit(); i++)
sbBytes.append((char)bytes.get(i));
return sbBytes.toString();
}
private String fixedSizeByteArrayAsHexString(int length){
byte[] bBinary = RandomUtils.nextBytes(length);
return DatatypeConverter.printHexBinary(bBinary);
}
private String fixedSizeByteArrayAsBase64String(int length){
byte[] bBinary = RandomUtils.nextBytes(length);
return DatatypeConverter.printBase64Binary(bBinary);
}
}
| |
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.codeInspection.miscGenerics;
import com.intellij.codeInsight.daemon.impl.analysis.JavaGenericsUtil;
import com.intellij.codeInspection.InspectionsBundle;
import com.intellij.openapi.util.NullableLazyValue;
import com.intellij.psi.*;
import com.intellij.psi.impl.source.resolve.graphInference.PsiPolyExpressionUtil;
import com.intellij.psi.search.GlobalSearchScope;
import com.intellij.psi.util.*;
import com.intellij.util.ArrayUtil;
import com.intellij.util.ObjectUtils;
import com.siyeh.ig.callMatcher.CallMatcher;
import com.siyeh.ig.psiutils.ExpressionUtils;
import com.siyeh.ig.psiutils.TypeUtils;
import org.jetbrains.annotations.Contract;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import java.util.List;
public class SuspiciousMethodCallUtil
{
// List.of/Set.of are unnecessary here as they don't accept nulls
private static final CallMatcher.Simple SINGLETON_COLLECTION =
CallMatcher.staticCall(CommonClassNames.JAVA_UTIL_COLLECTIONS, "singletonList", "singleton").parameterCount(1);
private static void setupPatternMethods(PsiManager manager,
GlobalSearchScope searchScope,
List<? super PatternMethod> patternMethods)
{
final JavaPsiFacade javaPsiFacade = JavaPsiFacade.getInstance(manager.getProject());
final PsiClass collectionClass = javaPsiFacade.findClass(CommonClassNames.JAVA_UTIL_COLLECTION, searchScope);
PsiClassType object = PsiType.getJavaLangObject(manager, searchScope);
PsiType[] javaLangObject = {object};
PsiType[] twoObjects = {
object,
object
};
MethodSignature removeSignature =
MethodSignatureUtil.createMethodSignature("remove", javaLangObject, PsiTypeParameter.EMPTY_ARRAY, PsiSubstitutor.EMPTY);
if(collectionClass != null)
{
PsiMethod remove = MethodSignatureUtil.findMethodBySignature(collectionClass, removeSignature, false);
addMethod(remove, 0, patternMethods, 0);
addSingleParameterMethod(patternMethods, collectionClass, "contains", object);
if(PsiUtil.isLanguageLevel5OrHigher(collectionClass))
{
PsiClassType wildcardCollection = javaPsiFacade.getElementFactory().createType(collectionClass, PsiWildcardType.createUnbounded(manager));
addSingleParameterMethod(patternMethods, collectionClass, "removeAll", wildcardCollection);
addSingleParameterMethod(patternMethods, collectionClass, "retainAll", wildcardCollection);
}
}
final PsiClass listClass = javaPsiFacade.findClass(CommonClassNames.JAVA_UTIL_LIST, searchScope);
if(listClass != null)
{
addSingleParameterMethod(patternMethods, listClass, "indexOf", object);
addSingleParameterMethod(patternMethods, listClass, "lastIndexOf", object);
}
final PsiClass mapClass = javaPsiFacade.findClass(CommonClassNames.JAVA_UTIL_MAP, searchScope);
if(mapClass != null)
{
PsiMethod remove = MethodSignatureUtil.findMethodBySignature(mapClass, removeSignature, false);
addMethod(remove, 0, patternMethods, 0);
addSingleParameterMethod(patternMethods, mapClass, "get", object);
PsiTypeParameter[] typeParameters = mapClass.getTypeParameters();
if(typeParameters.length > 0)
{
MethodSignature getOrDefaultSignature = MethodSignatureUtil.createMethodSignature("getOrDefault",
new PsiType[]{
object,
PsiSubstitutor.EMPTY.substitute(typeParameters[1])
}, PsiTypeParameter.EMPTY_ARRAY, PsiSubstitutor.EMPTY);
PsiMethod getOrDefault = MethodSignatureUtil.findMethodBySignature(mapClass, getOrDefaultSignature, false);
addMethod(getOrDefault, 0, patternMethods, 0);
}
MethodSignature removeWithDefaultSignature = MethodSignatureUtil.createMethodSignature("remove",
twoObjects,
PsiTypeParameter.EMPTY_ARRAY, PsiSubstitutor.EMPTY);
PsiMethod removeWithDefault = MethodSignatureUtil.findMethodBySignature(mapClass, removeWithDefaultSignature, false);
addMethod(removeWithDefault, 0, patternMethods, 0);
addMethod(removeWithDefault, 1, patternMethods, 1);
addSingleParameterMethod(patternMethods, mapClass, "containsKey", object);
MethodSignature containsValueSignature = MethodSignatureUtil.createMethodSignature("containsValue", javaLangObject, PsiTypeParameter.EMPTY_ARRAY, PsiSubstitutor.EMPTY);
PsiMethod containsValue = MethodSignatureUtil.findMethodBySignature(mapClass, containsValueSignature, false);
addMethod(containsValue, 1, patternMethods, 0);
}
final PsiClass concurrentMapClass = javaPsiFacade.findClass(CommonClassNames.JAVA_UTIL_CONCURRENT_HASH_MAP, searchScope);
if(concurrentMapClass != null)
{
MethodSignature containsSignature = MethodSignatureUtil.createMethodSignature("contains", javaLangObject, PsiTypeParameter.EMPTY_ARRAY, PsiSubstitutor.EMPTY);
PsiMethod contains = MethodSignatureUtil.findMethodBySignature(concurrentMapClass, containsSignature, false);
addMethod(contains, 1, patternMethods, 0);
}
PsiClass guavaTable = javaPsiFacade.findClass("com.google.common.collect.Table", searchScope);
if(guavaTable != null)
{
MethodSignature getSignature =
MethodSignatureUtil.createMethodSignature("get", twoObjects, PsiTypeParameter.EMPTY_ARRAY, PsiSubstitutor.EMPTY);
PsiMethod get = MethodSignatureUtil.findMethodBySignature(guavaTable, getSignature, false);
addMethod(get, 0, patternMethods, 0);
addMethod(get, 1, patternMethods, 1);
MethodSignature containsSignature =
MethodSignatureUtil.createMethodSignature("contains", twoObjects, PsiTypeParameter.EMPTY_ARRAY, PsiSubstitutor.EMPTY);
PsiMethod contains = MethodSignatureUtil.findMethodBySignature(guavaTable, containsSignature, false);
addMethod(contains, 0, patternMethods, 0);
addMethod(contains, 1, patternMethods, 1);
MethodSignature containsRowSignature =
MethodSignatureUtil.createMethodSignature("containsRow", javaLangObject, PsiTypeParameter.EMPTY_ARRAY, PsiSubstitutor.EMPTY);
PsiMethod containsRow = MethodSignatureUtil.findMethodBySignature(guavaTable, containsRowSignature, false);
addMethod(containsRow, 0, patternMethods, 0);
MethodSignature containsColumnSignature =
MethodSignatureUtil.createMethodSignature("containsColumn", javaLangObject, PsiTypeParameter.EMPTY_ARRAY, PsiSubstitutor.EMPTY);
PsiMethod containsColumn = MethodSignatureUtil.findMethodBySignature(guavaTable, containsColumnSignature, false);
addMethod(containsColumn, 1, patternMethods, 0);
MethodSignature containsValueSignature =
MethodSignatureUtil.createMethodSignature("containsValue", javaLangObject, PsiTypeParameter.EMPTY_ARRAY, PsiSubstitutor.EMPTY);
PsiMethod containsValue = MethodSignatureUtil.findMethodBySignature(guavaTable, containsValueSignature, false);
addMethod(containsValue, 2, patternMethods, 0);
MethodSignature removeByRowAndColumnSignature =
MethodSignatureUtil.createMethodSignature("remove", twoObjects, PsiTypeParameter.EMPTY_ARRAY, PsiSubstitutor.EMPTY);
PsiMethod removeByRowAndColumn = MethodSignatureUtil.findMethodBySignature(guavaTable, removeByRowAndColumnSignature, false);
addMethod(removeByRowAndColumn, 0, patternMethods, 0);
addMethod(removeByRowAndColumn, 1, patternMethods, 1);
}
PsiClass guavaMultimap = javaPsiFacade.findClass("com.google.common.collect.Multimap", searchScope);
if(guavaMultimap != null)
{
MethodSignature containsKeySignature =
MethodSignatureUtil.createMethodSignature("containsKey", javaLangObject, PsiTypeParameter.EMPTY_ARRAY, PsiSubstitutor.EMPTY);
PsiMethod containsKey = MethodSignatureUtil.findMethodBySignature(guavaMultimap, containsKeySignature, false);
addMethod(containsKey, 0, patternMethods, 0);
MethodSignature containsValueSignature =
MethodSignatureUtil.createMethodSignature("containsValue", javaLangObject, PsiTypeParameter.EMPTY_ARRAY, PsiSubstitutor.EMPTY);
PsiMethod containsValue = MethodSignatureUtil.findMethodBySignature(guavaMultimap, containsValueSignature, false);
addMethod(containsValue, 1, patternMethods, 0);
MethodSignature containsEntrySignature =
MethodSignatureUtil.createMethodSignature("containsEntry", twoObjects, PsiTypeParameter.EMPTY_ARRAY, PsiSubstitutor.EMPTY);
PsiMethod containsEntry = MethodSignatureUtil.findMethodBySignature(guavaMultimap, containsEntrySignature, false);
addMethod(containsEntry, 0, patternMethods, 0);
addMethod(containsEntry, 1, patternMethods, 1);
MethodSignature removeByKeyAndValueSignature =
MethodSignatureUtil.createMethodSignature("remove", twoObjects, PsiTypeParameter.EMPTY_ARRAY, PsiSubstitutor.EMPTY);
PsiMethod removeByKeyAndValue = MethodSignatureUtil.findMethodBySignature(guavaMultimap, removeByKeyAndValueSignature, false);
addMethod(removeByKeyAndValue, 0, patternMethods, 0);
addMethod(removeByKeyAndValue, 1, patternMethods, 1);
MethodSignature removeAllSignature =
MethodSignatureUtil.createMethodSignature("removeAll", javaLangObject, PsiTypeParameter.EMPTY_ARRAY, PsiSubstitutor.EMPTY);
PsiMethod removeAll = MethodSignatureUtil.findMethodBySignature(guavaMultimap, removeAllSignature, false);
addMethod(removeAll, 0, patternMethods, 0);
}
PsiClass guavaMultiset = javaPsiFacade.findClass("com.google.common.collect.Multiset", searchScope);
if(guavaMultiset != null)
{
MethodSignature countSignature =
MethodSignatureUtil.createMethodSignature("count", javaLangObject, PsiTypeParameter.EMPTY_ARRAY, PsiSubstitutor.EMPTY);
PsiMethod count = MethodSignatureUtil.findMethodBySignature(guavaMultiset, countSignature, false);
addMethod(count, 0, patternMethods, 0);
}
PsiClass guavaCache = javaPsiFacade.findClass("com.google.common.cache.Cache", searchScope);
if(guavaCache != null)
{
MethodSignature getIfPresentSignature =
MethodSignatureUtil.createMethodSignature("getIfPresent", javaLangObject, PsiTypeParameter.EMPTY_ARRAY, PsiSubstitutor.EMPTY);
PsiMethod getIfPresent = MethodSignatureUtil.findMethodBySignature(guavaCache, getIfPresentSignature, false);
addMethod(getIfPresent, 0, patternMethods, 0);
MethodSignature invalidateSignature =
MethodSignatureUtil.createMethodSignature("invalidate", javaLangObject, PsiTypeParameter.EMPTY_ARRAY, PsiSubstitutor.EMPTY);
PsiMethod invalidate = MethodSignatureUtil.findMethodBySignature(guavaCache, invalidateSignature, false);
addMethod(invalidate, 0, patternMethods, 0);
}
}
@Contract(value = "null -> false", pure = true)
static boolean isCollectionAcceptingMethod(@Nullable String name)
{
return "removeAll".equals(name) || "retainAll".equals(name) || "containsAll".equals(name);
}
private static void addSingleParameterMethod(List<? super PatternMethod> patternMethods,
PsiClass methodClass, String methodName, PsiClassType parameterType)
{
MethodSignature signature = MethodSignatureUtil
.createMethodSignature(methodName, new PsiType[]{parameterType}, PsiTypeParameter.EMPTY_ARRAY, PsiSubstitutor.EMPTY);
PsiMethod method = MethodSignatureUtil.findMethodBySignature(methodClass, signature, false);
addMethod(method, 0, patternMethods, 0);
}
private static void addMethod(final PsiMethod patternMethod,
int typeParamIndex,
List<? super PatternMethod> patternMethods,
int argIdx)
{
if(patternMethod != null)
{
patternMethods.add(new PatternMethod(patternMethod, typeParamIndex, argIdx));
}
}
private static boolean isInheritorOrSelf(PsiMethod inheritorCandidate, PsiMethod base)
{
PsiClass aClass = inheritorCandidate.getContainingClass();
PsiClass bClass = base.getContainingClass();
if(aClass == null || bClass == null)
{
return false;
}
PsiSubstitutor substitutor = TypeConversionUtil.getClassSubstitutor(bClass, aClass, PsiSubstitutor.EMPTY);
return substitutor != null &&
MethodSignatureUtil.findMethodBySignature(bClass, inheritorCandidate.getSignature(substitutor), false) == base;
}
@Nullable
public static String getSuspiciousMethodCallMessage(@Nonnull PsiMethodCallExpression methodCall,
PsiExpression arg,
PsiType argType,
boolean reportConvertibleMethodCalls,
@Nonnull List<PatternMethod> patternMethods,
int idx)
{
final PsiReferenceExpression methodExpression = methodCall.getMethodExpression();
if(arg instanceof PsiConditionalExpression &&
argType != null &&
argType.equalsToText(CommonClassNames.JAVA_LANG_OBJECT) &&
PsiPolyExpressionUtil.isPolyExpression(arg))
{
return null;
}
return getSuspiciousMethodCallMessage(methodExpression, argType, reportConvertibleMethodCalls, patternMethods, idx);
}
@Nullable
static String getSuspiciousMethodCallMessage(PsiReferenceExpression methodExpression,
PsiType argType,
boolean reportConvertibleMethodCalls,
@Nonnull List<PatternMethod> patternMethods,
int argIdx)
{
final PsiExpression qualifier = methodExpression.getQualifierExpression();
if(qualifier == null || qualifier instanceof PsiThisExpression || qualifier instanceof PsiSuperExpression)
{
return null;
}
if(argType instanceof PsiPrimitiveType)
{
argType = ((PsiPrimitiveType) argType).getBoxedType(methodExpression);
}
if(argType == null)
{
return null;
}
final JavaResolveResult resolveResult = methodExpression.advancedResolve(false);
PsiElement element = resolveResult.getElement();
if(!(element instanceof PsiMethod))
{
return null;
}
PsiMethod calleeMethod = (PsiMethod) element;
NullableLazyValue<PsiMethod> lazyContextMethod = NullableLazyValue.createValue(() -> PsiTreeUtil.getParentOfType(methodExpression, PsiMethod.class));
//noinspection SynchronizationOnLocalVariableOrMethodParameter
synchronized(patternMethods)
{
if(patternMethods.isEmpty())
{
setupPatternMethods(methodExpression.getManager(), methodExpression.getResolveScope(), patternMethods);
}
}
for(PatternMethod patternMethod : patternMethods)
{
PsiMethod method = patternMethod.patternMethod;
if(!method.getName().equals(methodExpression.getReferenceName()))
{
continue;
}
if(patternMethod.argIdx != argIdx)
{
continue;
}
//we are in collections method implementation
PsiMethod contextMethod = lazyContextMethod.getValue();
if(contextMethod != null && isInheritorOrSelf(contextMethod, method))
{
return null;
}
final PsiClass calleeClass = calleeMethod.getContainingClass();
PsiSubstitutor substitutor = resolveResult.getSubstitutor();
final PsiClass patternClass = method.getContainingClass();
assert patternClass != null;
assert calleeClass != null;
substitutor = TypeConversionUtil.getClassSubstitutor(patternClass, calleeClass, substitutor);
if(substitutor == null)
{
continue;
}
if(!method.getSignature(substitutor).equals(calleeMethod.getSignature(resolveResult.getSubstitutor())))
{
continue;
}
PsiTypeParameter[] typeParameters = patternClass.getTypeParameters();
if(typeParameters.length <= patternMethod.typeParameterIdx)
{
return null;
}
final PsiTypeParameter typeParameter = typeParameters[patternMethod.typeParameterIdx];
PsiType typeParamMapping = substitutor.substitute(typeParameter);
if(typeParamMapping == null)
{
return null;
}
PsiParameter[] parameters = method.getParameterList().getParameters();
if(parameters.length == 1 && ("removeAll".equals(method.getName()) || "retainAll".equals(method.getName())))
{
PsiType paramType = parameters[0].getType();
if(InheritanceUtil.isInheritor(paramType, CommonClassNames.JAVA_UTIL_COLLECTION))
{
PsiType qualifierType = qualifier.getType();
if(qualifierType != null)
{
final PsiType itemType = JavaGenericsUtil.getCollectionItemType(argType, calleeMethod.getResolveScope());
final PsiType qualifierItemType = JavaGenericsUtil.getCollectionItemType(qualifierType, calleeMethod.getResolveScope());
if(qualifierItemType != null && itemType != null && !qualifierItemType.isAssignableFrom(itemType))
{
if(TypeUtils.isJavaLangObject(itemType) && hasNullCollectionArg(methodExpression))
{
// removeAll(Collections.singleton(null)) is a valid way to remove all nulls from collection
return null;
}
if(qualifierItemType.isConvertibleFrom(itemType) && !reportConvertibleMethodCalls)
{
return null;
}
return InspectionsBundle.message("inspection.suspicious.collections.method.calls.problem.descriptor",
PsiFormatUtil.formatType(qualifierType, 0, PsiSubstitutor.EMPTY),
PsiFormatUtil.formatType(itemType, 0, PsiSubstitutor.EMPTY),
"objects");
}
}
return null;
}
}
String message = null;
if(typeParamMapping instanceof PsiCapturedWildcardType)
{
typeParamMapping = ((PsiCapturedWildcardType) typeParamMapping).getWildcard();
}
if(!typeParamMapping.isAssignableFrom(argType))
{
if(typeParamMapping.isConvertibleFrom(argType))
{
if(reportConvertibleMethodCalls)
{
message = InspectionsBundle.message("inspection.suspicious.collections.method.calls.problem.descriptor1",
PsiFormatUtil.formatMethod(calleeMethod, substitutor,
PsiFormatUtilBase.SHOW_NAME |
PsiFormatUtilBase.SHOW_CONTAINING_CLASS,
PsiFormatUtilBase.SHOW_TYPE));
}
}
else
{
PsiType qualifierType = qualifier.getType();
if(qualifierType != null)
{
message = InspectionsBundle.message("inspection.suspicious.collections.method.calls.problem.descriptor",
PsiFormatUtil.formatType(qualifierType, 0, PsiSubstitutor.EMPTY),
PsiFormatUtil.formatType(argType, 0, PsiSubstitutor.EMPTY),
getPreciseObjectTitle(patternClass, patternMethod.typeParameterIdx));
}
}
}
return message;
}
return null;
}
private static String getPreciseObjectTitle(PsiClass patternClass, int index)
{
if(InheritanceUtil.isInheritor(patternClass, CommonClassNames.JAVA_UTIL_MAP))
{
return index == 0 ? "keys" : "values";
}
return "objects";
}
private static boolean hasNullCollectionArg(PsiReferenceExpression methodExpression)
{
PsiMethodCallExpression call = ObjectUtils.tryCast(methodExpression.getParent(), PsiMethodCallExpression.class);
if(call != null)
{
PsiExpression arg =
ExpressionUtils.resolveExpression(ArrayUtil.getFirstElement(call.getArgumentList().getExpressions()));
PsiMethodCallExpression argCall =
ObjectUtils.tryCast(PsiUtil.skipParenthesizedExprDown(arg), PsiMethodCallExpression.class);
return SINGLETON_COLLECTION.test(argCall) && ExpressionUtils.isNullLiteral(argCall.getArgumentList().getExpressions()[0]);
}
return false;
}
static class PatternMethod
{
PsiMethod patternMethod;
int typeParameterIdx;
int argIdx;
PatternMethod(PsiMethod patternMethod, int typeParameterIdx, int argIdx)
{
this.patternMethod = patternMethod;
this.typeParameterIdx = typeParameterIdx;
this.argIdx = argIdx;
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.impl;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CopyOnWriteArrayList;
import org.apache.camel.CamelContext;
import org.apache.camel.CamelContextAware;
import org.apache.camel.Exchange;
import org.apache.camel.MessageHistory;
import org.apache.camel.NamedNode;
import org.apache.camel.Processor;
import org.apache.camel.model.ProcessorDefinition;
import org.apache.camel.spi.Breakpoint;
import org.apache.camel.spi.CamelEvent;
import org.apache.camel.spi.CamelEvent.ExchangeCompletedEvent;
import org.apache.camel.spi.CamelEvent.ExchangeCreatedEvent;
import org.apache.camel.spi.CamelEvent.ExchangeEvent;
import org.apache.camel.spi.Condition;
import org.apache.camel.spi.Debugger;
import org.apache.camel.spi.EventNotifier;
import org.apache.camel.support.EventNotifierSupport;
import org.apache.camel.support.ServiceHelper;
import org.apache.camel.support.ServiceSupport;
import org.apache.camel.util.ObjectHelper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* The default implementation of the {@link Debugger}.
*/
public class DefaultDebugger extends ServiceSupport implements Debugger, CamelContextAware {
private final EventNotifier debugEventNotifier = new DebugEventNotifier();
private final List<BreakpointConditions> breakpoints = new CopyOnWriteArrayList<>();
private final int maxConcurrentSingleSteps = 1;
private final Map<String, Breakpoint> singleSteps = new HashMap<>(maxConcurrentSingleSteps);
private CamelContext camelContext;
/**
* Holder class for breakpoint and the associated conditions
*/
private static final class BreakpointConditions {
private final Breakpoint breakpoint;
private final List<Condition> conditions;
private BreakpointConditions(Breakpoint breakpoint) {
this(breakpoint, new ArrayList<Condition>());
}
private BreakpointConditions(Breakpoint breakpoint, List<Condition> conditions) {
this.breakpoint = breakpoint;
this.conditions = conditions;
}
public Breakpoint getBreakpoint() {
return breakpoint;
}
public List<Condition> getConditions() {
return conditions;
}
}
public DefaultDebugger() {
}
public DefaultDebugger(CamelContext camelContext) {
this.camelContext = camelContext;
}
@Override
public CamelContext getCamelContext() {
return camelContext;
}
@Override
public void setCamelContext(CamelContext camelContext) {
this.camelContext = camelContext;
}
public void addBreakpoint(Breakpoint breakpoint) {
breakpoints.add(new BreakpointConditions(breakpoint));
}
@Override
public void addBreakpoint(Breakpoint breakpoint, Condition... conditions) {
breakpoints.add(new BreakpointConditions(breakpoint, Arrays.asList(conditions)));
}
@Override
public void addSingleStepBreakpoint(final Breakpoint breakpoint) {
addSingleStepBreakpoint(breakpoint, new Condition[]{});
}
@Override
public void addSingleStepBreakpoint(final Breakpoint breakpoint, Condition... conditions) {
// wrap the breakpoint into single step breakpoint so we can automatic enable/disable the single step mode
Breakpoint singlestep = new Breakpoint() {
@Override
public State getState() {
return breakpoint.getState();
}
@Override
public void suspend() {
breakpoint.suspend();
}
@Override
public void activate() {
breakpoint.activate();
}
@Override
public void beforeProcess(Exchange exchange, Processor processor, NamedNode definition) {
breakpoint.beforeProcess(exchange, processor, definition);
}
@Override
public void afterProcess(Exchange exchange, Processor processor, NamedNode definition, long timeTaken) {
breakpoint.afterProcess(exchange, processor, definition, timeTaken);
}
@Override
public void onEvent(Exchange exchange, ExchangeEvent event, NamedNode definition) {
if (event instanceof ExchangeCreatedEvent) {
exchange.getContext().getDebugger().startSingleStepExchange(exchange.getExchangeId(), this);
} else if (event instanceof ExchangeCompletedEvent) {
exchange.getContext().getDebugger().stopSingleStepExchange(exchange.getExchangeId());
}
breakpoint.onEvent(exchange, event, definition);
}
@Override
public String toString() {
return breakpoint.toString();
}
};
addBreakpoint(singlestep, conditions);
}
@Override
public void removeBreakpoint(Breakpoint breakpoint) {
for (BreakpointConditions condition : breakpoints) {
if (condition.getBreakpoint().equals(breakpoint)) {
breakpoints.remove(condition);
}
}
}
@Override
public void suspendAllBreakpoints() {
for (BreakpointConditions breakpoint : breakpoints) {
breakpoint.getBreakpoint().suspend();
}
}
@Override
public void activateAllBreakpoints() {
for (BreakpointConditions breakpoint : breakpoints) {
breakpoint.getBreakpoint().activate();
}
}
@Override
public List<Breakpoint> getBreakpoints() {
List<Breakpoint> answer = new ArrayList<>(breakpoints.size());
for (BreakpointConditions e : breakpoints) {
answer.add(e.getBreakpoint());
}
return Collections.unmodifiableList(answer);
}
@Override
public boolean startSingleStepExchange(String exchangeId, Breakpoint breakpoint) {
// can we accept single stepping the given exchange?
if (singleSteps.size() >= maxConcurrentSingleSteps) {
return false;
}
singleSteps.put(exchangeId, breakpoint);
return true;
}
@Override
public void stopSingleStepExchange(String exchangeId) {
singleSteps.remove(exchangeId);
}
@Override
public boolean beforeProcess(Exchange exchange, Processor processor, NamedNode definition) {
// is the exchange in single step mode?
Breakpoint singleStep = singleSteps.get(exchange.getExchangeId());
if (singleStep != null) {
onBeforeProcess(exchange, processor, definition, singleStep);
return true;
}
// does any of the breakpoints apply?
boolean match = false;
for (BreakpointConditions breakpoint : breakpoints) {
// breakpoint must be active
if (Breakpoint.State.Active.equals(breakpoint.getBreakpoint().getState())) {
if (matchConditions(exchange, processor, definition, breakpoint)) {
match = true;
onBeforeProcess(exchange, processor, definition, breakpoint.getBreakpoint());
}
}
}
return match;
}
@Override
public boolean afterProcess(Exchange exchange, Processor processor, NamedNode definition, long timeTaken) {
// is the exchange in single step mode?
Breakpoint singleStep = singleSteps.get(exchange.getExchangeId());
if (singleStep != null) {
onAfterProcess(exchange, processor, definition, timeTaken, singleStep);
return true;
}
// does any of the breakpoints apply?
boolean match = false;
for (BreakpointConditions breakpoint : breakpoints) {
// breakpoint must be active
if (Breakpoint.State.Active.equals(breakpoint.getBreakpoint().getState())) {
if (matchConditions(exchange, processor, definition, breakpoint)) {
match = true;
onAfterProcess(exchange, processor, definition, timeTaken, breakpoint.getBreakpoint());
}
}
}
return match;
}
@Override
public boolean onEvent(Exchange exchange, ExchangeEvent event) {
// is the exchange in single step mode?
Breakpoint singleStep = singleSteps.get(exchange.getExchangeId());
if (singleStep != null) {
onEvent(exchange, event, singleStep);
return true;
}
// does any of the breakpoints apply?
boolean match = false;
for (BreakpointConditions breakpoint : breakpoints) {
// breakpoint must be active
if (Breakpoint.State.Active.equals(breakpoint.getBreakpoint().getState())) {
if (matchConditions(exchange, event, breakpoint)) {
match = true;
onEvent(exchange, event, breakpoint.getBreakpoint());
}
}
}
return match;
}
protected void onBeforeProcess(Exchange exchange, Processor processor, NamedNode definition, Breakpoint breakpoint) {
try {
breakpoint.beforeProcess(exchange, processor, definition);
} catch (Throwable e) {
log.warn("Exception occurred in breakpoint: " + breakpoint + ". This exception will be ignored.", e);
}
}
protected void onAfterProcess(Exchange exchange, Processor processor, NamedNode definition, long timeTaken, Breakpoint breakpoint) {
try {
breakpoint.afterProcess(exchange, processor, definition, timeTaken);
} catch (Throwable e) {
log.warn("Exception occurred in breakpoint: " + breakpoint + ". This exception will be ignored.", e);
}
}
@SuppressWarnings("unchecked")
protected void onEvent(Exchange exchange, ExchangeEvent event, Breakpoint breakpoint) {
ProcessorDefinition<?> definition = null;
// try to get the last known definition
LinkedList<MessageHistory> list = exchange.getProperty(Exchange.MESSAGE_HISTORY, LinkedList.class);
if (list != null && !list.isEmpty()) {
NamedNode node = list.getLast().getNode();
if (node instanceof ProcessorDefinition) {
definition = (ProcessorDefinition<?>) node;
}
}
try {
breakpoint.onEvent(exchange, event, definition);
} catch (Throwable e) {
log.warn("Exception occurred in breakpoint: " + breakpoint + ". This exception will be ignored.", e);
}
}
private boolean matchConditions(Exchange exchange, Processor processor, NamedNode definition, BreakpointConditions breakpoint) {
for (Condition condition : breakpoint.getConditions()) {
if (!condition.matchProcess(exchange, processor, definition)) {
return false;
}
}
return true;
}
private boolean matchConditions(Exchange exchange, ExchangeEvent event, BreakpointConditions breakpoint) {
for (Condition condition : breakpoint.getConditions()) {
if (!condition.matchEvent(exchange, event)) {
return false;
}
}
return true;
}
@Override
public void start() throws Exception {
super.start();
}
@Override
protected void doStart() throws Exception {
ObjectHelper.notNull(camelContext, "CamelContext", this);
// register our event notifier
ServiceHelper.startService(debugEventNotifier);
camelContext.getManagementStrategy().addEventNotifier(debugEventNotifier);
}
@Override
protected void doStop() throws Exception {
breakpoints.clear();
singleSteps.clear();
ServiceHelper.stopService(debugEventNotifier);
}
@Override
public String toString() {
return "DefaultDebugger";
}
private final class DebugEventNotifier extends EventNotifierSupport {
private DebugEventNotifier() {
setIgnoreCamelContextEvents(true);
setIgnoreServiceEvents(true);
}
@Override
public void notify(CamelEvent event) throws Exception {
ExchangeEvent aee = (ExchangeEvent) event;
Exchange exchange = aee.getExchange();
onEvent(exchange, aee);
if (event instanceof ExchangeCompletedEvent) {
// fail safe to ensure we remove single steps when the Exchange is complete
singleSteps.remove(exchange.getExchangeId());
}
}
@Override
public boolean isEnabled(CamelEvent event) {
return event instanceof ExchangeEvent;
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.activemq.artemis.protocol.amqp.converter.message;
import static org.apache.activemq.artemis.api.core.FilterConstants.NATIVE_MESSAGE_ID;
import static org.apache.activemq.artemis.api.core.Message.HDR_SCHEDULED_DELIVERY_TIME;
import static org.apache.activemq.artemis.protocol.amqp.converter.message.AMQPMessageSupport.AMQP_DATA;
import static org.apache.activemq.artemis.protocol.amqp.converter.message.AMQPMessageSupport.AMQP_NULL;
import static org.apache.activemq.artemis.protocol.amqp.converter.message.AMQPMessageSupport.AMQP_SEQUENCE;
import static org.apache.activemq.artemis.protocol.amqp.converter.message.AMQPMessageSupport.AMQP_UNKNOWN;
import static org.apache.activemq.artemis.protocol.amqp.converter.message.AMQPMessageSupport.AMQP_VALUE_BINARY;
import static org.apache.activemq.artemis.protocol.amqp.converter.message.AMQPMessageSupport.AMQP_VALUE_LIST;
import static org.apache.activemq.artemis.protocol.amqp.converter.message.AMQPMessageSupport.AMQP_VALUE_STRING;
import static org.apache.activemq.artemis.protocol.amqp.converter.message.AMQPMessageSupport.EMPTY_BINARY;
import static org.apache.activemq.artemis.protocol.amqp.converter.message.AMQPMessageSupport.JMS_AMQP_CONTENT_ENCODING;
import static org.apache.activemq.artemis.protocol.amqp.converter.message.AMQPMessageSupport.JMS_AMQP_CONTENT_TYPE;
import static org.apache.activemq.artemis.protocol.amqp.converter.message.AMQPMessageSupport.JMS_AMQP_DELIVERY_ANNOTATION_PREFIX;
import static org.apache.activemq.artemis.protocol.amqp.converter.message.AMQPMessageSupport.JMS_AMQP_FIRST_ACQUIRER;
import static org.apache.activemq.artemis.protocol.amqp.converter.message.AMQPMessageSupport.JMS_AMQP_FOOTER_PREFIX;
import static org.apache.activemq.artemis.protocol.amqp.converter.message.AMQPMessageSupport.JMS_AMQP_HEADER;
import static org.apache.activemq.artemis.protocol.amqp.converter.message.AMQPMessageSupport.JMS_AMQP_MESSAGE_ANNOTATION_PREFIX;
import static org.apache.activemq.artemis.protocol.amqp.converter.message.AMQPMessageSupport.JMS_AMQP_MESSAGE_FORMAT;
import static org.apache.activemq.artemis.protocol.amqp.converter.message.AMQPMessageSupport.JMS_AMQP_NATIVE;
import static org.apache.activemq.artemis.protocol.amqp.converter.message.AMQPMessageSupport.JMS_AMQP_ORIGINAL_ENCODING;
import static org.apache.activemq.artemis.protocol.amqp.converter.message.AMQPMessageSupport.JMS_AMQP_PREFIX;
import static org.apache.activemq.artemis.protocol.amqp.converter.message.AMQPMessageSupport.JMS_AMQP_PROPERTIES;
import static org.apache.activemq.artemis.protocol.amqp.converter.message.AMQPMessageSupport.JMS_AMQP_REPLYTO_GROUP_ID;
import static org.apache.activemq.artemis.protocol.amqp.converter.message.AMQPMessageSupport.SERIALIZED_JAVA_OBJECT_CONTENT_TYPE;
import static org.apache.activemq.artemis.protocol.amqp.converter.message.AMQPMessageSupport.toAddress;
import java.io.UnsupportedEncodingException;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Date;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.Set;
import javax.jms.Destination;
import javax.jms.JMSException;
import javax.jms.Message;
import javax.jms.MessageEOFException;
import javax.jms.Queue;
import javax.jms.TemporaryQueue;
import javax.jms.TemporaryTopic;
import javax.jms.TextMessage;
import javax.jms.Topic;
import org.apache.activemq.artemis.core.message.impl.MessageInternal;
import org.apache.activemq.artemis.protocol.amqp.converter.jms.ServerJMSBytesMessage;
import org.apache.activemq.artemis.protocol.amqp.converter.jms.ServerJMSMapMessage;
import org.apache.activemq.artemis.protocol.amqp.converter.jms.ServerJMSMessage;
import org.apache.activemq.artemis.protocol.amqp.converter.jms.ServerJMSObjectMessage;
import org.apache.activemq.artemis.protocol.amqp.converter.jms.ServerJMSStreamMessage;
import org.apache.activemq.artemis.protocol.amqp.converter.jms.ServerJMSTextMessage;
import org.apache.activemq.artemis.protocol.amqp.exceptions.ActiveMQAMQPIllegalStateException;
import org.apache.activemq.artemis.reader.MessageUtil;
import org.apache.activemq.artemis.utils.IDGenerator;
import org.apache.qpid.proton.amqp.Binary;
import org.apache.qpid.proton.amqp.Symbol;
import org.apache.qpid.proton.amqp.UnsignedByte;
import org.apache.qpid.proton.amqp.UnsignedInteger;
import org.apache.qpid.proton.amqp.messaging.AmqpSequence;
import org.apache.qpid.proton.amqp.messaging.AmqpValue;
import org.apache.qpid.proton.amqp.messaging.ApplicationProperties;
import org.apache.qpid.proton.amqp.messaging.Data;
import org.apache.qpid.proton.amqp.messaging.DeliveryAnnotations;
import org.apache.qpid.proton.amqp.messaging.Footer;
import org.apache.qpid.proton.amqp.messaging.Header;
import org.apache.qpid.proton.amqp.messaging.MessageAnnotations;
import org.apache.qpid.proton.amqp.messaging.Properties;
import org.apache.qpid.proton.amqp.messaging.Section;
import org.apache.qpid.proton.codec.AMQPDefinedTypes;
import org.apache.qpid.proton.codec.DecoderImpl;
import org.apache.qpid.proton.codec.EncoderImpl;
import org.apache.qpid.proton.codec.WritableBuffer;
import org.jboss.logging.Logger;
public class JMSMappingOutboundTransformer extends OutboundTransformer {
private static final Logger logger = Logger.getLogger(JMSMappingOutboundTransformer.class);
public static final Symbol JMS_DEST_TYPE_MSG_ANNOTATION = Symbol.valueOf("x-opt-jms-dest");
public static final Symbol JMS_REPLY_TO_TYPE_MSG_ANNOTATION = Symbol.valueOf("x-opt-jms-reply-to");
public static final byte QUEUE_TYPE = 0x00;
public static final byte TOPIC_TYPE = 0x01;
public static final byte TEMP_QUEUE_TYPE = 0x02;
public static final byte TEMP_TOPIC_TYPE = 0x03;
// For now Proton requires that we create a decoder to create an encoder
private static class EncoderDecoderPair {
DecoderImpl decoder = new DecoderImpl();
EncoderImpl encoder = new EncoderImpl(decoder);
{
AMQPDefinedTypes.registerAllTypes(decoder, encoder);
}
}
private static final ThreadLocal<EncoderDecoderPair> tlsCodec = new ThreadLocal<EncoderDecoderPair>() {
@Override
protected EncoderDecoderPair initialValue() {
return new EncoderDecoderPair();
}
};
public JMSMappingOutboundTransformer(IDGenerator idGenerator) {
super(idGenerator);
}
@Override
public long transform(ServerJMSMessage message, WritableBuffer buffer) throws JMSException, UnsupportedEncodingException {
if (message == null) {
return 0;
}
long messageFormat = 0;
Header header = null;
Properties properties = null;
Map<Symbol, Object> daMap = null;
Map<Symbol, Object> maMap = null;
Map<String, Object> apMap = null;
Map<Object, Object> footerMap = null;
Section body = convertBody(message);
if (message.getInnerMessage().isDurable()) {
if (header == null) {
header = new Header();
}
header.setDurable(true);
}
byte priority = (byte) message.getJMSPriority();
if (priority != Message.DEFAULT_PRIORITY) {
if (header == null) {
header = new Header();
}
header.setPriority(UnsignedByte.valueOf(priority));
}
String type = message.getJMSType();
if (type != null) {
if (properties == null) {
properties = new Properties();
}
properties.setSubject(type);
}
String messageId = message.getJMSMessageID();
if (messageId != null) {
if (properties == null) {
properties = new Properties();
}
try {
properties.setMessageId(AMQPMessageIdHelper.INSTANCE.toIdObject(messageId));
} catch (ActiveMQAMQPIllegalStateException e) {
properties.setMessageId(messageId);
}
}
Destination destination = message.getJMSDestination();
if (destination != null) {
if (properties == null) {
properties = new Properties();
}
properties.setTo(toAddress(destination));
if (maMap == null) {
maMap = new HashMap<>();
}
maMap.put(JMS_DEST_TYPE_MSG_ANNOTATION, destinationType(destination));
}
Destination replyTo = message.getJMSReplyTo();
if (replyTo != null) {
if (properties == null) {
properties = new Properties();
}
properties.setReplyTo(toAddress(replyTo));
if (maMap == null) {
maMap = new HashMap<>();
}
maMap.put(JMS_REPLY_TO_TYPE_MSG_ANNOTATION, destinationType(replyTo));
}
String correlationId = message.getJMSCorrelationID();
if (correlationId != null) {
if (properties == null) {
properties = new Properties();
}
try {
properties.setCorrelationId(AMQPMessageIdHelper.INSTANCE.toIdObject(correlationId));
} catch (ActiveMQAMQPIllegalStateException e) {
properties.setCorrelationId(correlationId);
}
}
long expiration = message.getJMSExpiration();
if (expiration != 0) {
long ttl = expiration - System.currentTimeMillis();
if (ttl < 0) {
ttl = 1;
}
if (header == null) {
header = new Header();
}
header.setTtl(new UnsignedInteger((int) ttl));
if (properties == null) {
properties = new Properties();
}
properties.setAbsoluteExpiryTime(new Date(expiration));
}
long timeStamp = message.getJMSTimestamp();
if (timeStamp != 0) {
if (properties == null) {
properties = new Properties();
}
properties.setCreationTime(new Date(timeStamp));
}
final Set<String> keySet = MessageUtil.getPropertyNames(message.getInnerMessage());
for (String key : keySet) {
if (key.startsWith("JMSX")) {
if (key.equals("JMSXDeliveryCount")) {
// The AMQP delivery-count field only includes prior failed delivery attempts,
// whereas JMSXDeliveryCount includes the first/current delivery attempt.
int amqpDeliveryCount = message.getDeliveryCount() - 1;
if (amqpDeliveryCount > 0) {
if (header == null) {
header = new Header();
}
header.setDeliveryCount(new UnsignedInteger(amqpDeliveryCount));
}
continue;
} else if (key.equals("JMSXUserID")) {
String value = message.getStringProperty(key);
if (properties == null) {
properties = new Properties();
}
properties.setUserId(new Binary(value.getBytes(StandardCharsets.UTF_8)));
continue;
} else if (key.equals("JMSXGroupID")) {
String value = message.getStringProperty(key);
if (properties == null) {
properties = new Properties();
}
properties.setGroupId(value);
continue;
} else if (key.equals("JMSXGroupSeq")) {
UnsignedInteger value = new UnsignedInteger(message.getIntProperty(key));
if (properties == null) {
properties = new Properties();
}
properties.setGroupSequence(value);
continue;
}
} else if (key.startsWith(JMS_AMQP_PREFIX)) {
// AMQP Message Information stored from a conversion to the Core Message
if (key.equals(JMS_AMQP_MESSAGE_FORMAT)) {
messageFormat = message.getLongProperty(JMS_AMQP_MESSAGE_FORMAT);
continue;
} else if (key.equals(JMS_AMQP_NATIVE)) {
// skip..internal use only
continue;
} else if (key.equals(JMS_AMQP_ORIGINAL_ENCODING)) {
// skip..internal use only
continue;
} else if (key.equals(JMS_AMQP_FIRST_ACQUIRER)) {
if (header == null) {
header = new Header();
}
header.setFirstAcquirer(message.getBooleanProperty(key));
continue;
} else if (key.equals(JMS_AMQP_HEADER)) {
if (header == null) {
header = new Header();
}
continue;
} else if (key.startsWith(JMS_AMQP_PROPERTIES)) {
if (properties == null) {
properties = new Properties();
}
continue;
} else if (key.startsWith(JMS_AMQP_DELIVERY_ANNOTATION_PREFIX)) {
if (daMap == null) {
daMap = new HashMap<>();
}
String name = key.substring(JMS_AMQP_DELIVERY_ANNOTATION_PREFIX.length());
daMap.put(Symbol.valueOf(name), message.getObjectProperty(key));
continue;
} else if (key.startsWith(JMS_AMQP_MESSAGE_ANNOTATION_PREFIX)) {
if (maMap == null) {
maMap = new HashMap<>();
}
String name = key.substring(JMS_AMQP_MESSAGE_ANNOTATION_PREFIX.length());
maMap.put(Symbol.valueOf(name), message.getObjectProperty(key));
continue;
} else if (key.equals(JMS_AMQP_CONTENT_TYPE)) {
if (properties == null) {
properties = new Properties();
}
properties.setContentType(Symbol.getSymbol(message.getStringProperty(key)));
continue;
} else if (key.equals(JMS_AMQP_CONTENT_ENCODING)) {
if (properties == null) {
properties = new Properties();
}
properties.setContentEncoding(Symbol.getSymbol(message.getStringProperty(key)));
continue;
} else if (key.equals(JMS_AMQP_REPLYTO_GROUP_ID)) {
if (properties == null) {
properties = new Properties();
}
properties.setReplyToGroupId(message.getStringProperty(key));
continue;
} else if (key.startsWith(JMS_AMQP_FOOTER_PREFIX)) {
if (footerMap == null) {
footerMap = new HashMap<>();
}
String name = key.substring(JMS_AMQP_FOOTER_PREFIX.length());
footerMap.put(name, message.getObjectProperty(key));
continue;
}
} else if (key.equals("_AMQ_GROUP_ID")) {
String value = message.getStringProperty(key);
if (properties == null) {
properties = new Properties();
}
properties.setGroupId(value);
continue;
} else if (key.equals(NATIVE_MESSAGE_ID)) {
// skip..internal use only
continue;
} else if (key.endsWith(HDR_SCHEDULED_DELIVERY_TIME.toString())) {
// skip..remove annotation from previous inbound transformation
continue;
} else if (key.equals(AMQPMessageTypes.AMQP_TYPE_KEY)) {
// skip..internal use only - TODO - Remove this deprecated value in future release.
continue;
}
if (apMap == null) {
apMap = new HashMap<>();
}
Object objectProperty = message.getObjectProperty(key);
if (objectProperty instanceof byte[]) {
objectProperty = new Binary((byte[]) objectProperty);
}
apMap.put(key, objectProperty);
}
EncoderImpl encoder = tlsCodec.get().encoder;
encoder.setByteBuffer(buffer);
if (header != null) {
encoder.writeObject(header);
}
if (daMap != null) {
encoder.writeObject(new DeliveryAnnotations(daMap));
}
if (maMap != null) {
encoder.writeObject(new MessageAnnotations(maMap));
}
if (properties != null) {
encoder.writeObject(properties);
}
if (apMap != null) {
encoder.writeObject(new ApplicationProperties(apMap));
}
if (body != null) {
encoder.writeObject(body);
}
if (footerMap != null) {
encoder.writeObject(new Footer(footerMap));
}
return messageFormat;
}
private Section convertBody(ServerJMSMessage message) throws JMSException {
Section body = null;
short orignalEncoding = AMQP_UNKNOWN;
try {
orignalEncoding = message.getShortProperty(JMS_AMQP_ORIGINAL_ENCODING);
} catch (Exception ex) {
// Ignore and stick with UNKNOWN
}
if (message instanceof ServerJMSBytesMessage) {
Binary payload = getBinaryFromMessageBody((ServerJMSBytesMessage) message);
if (payload == null) {
payload = EMPTY_BINARY;
}
switch (orignalEncoding) {
case AMQP_NULL:
break;
case AMQP_VALUE_BINARY:
body = new AmqpValue(payload);
break;
case AMQP_DATA:
case AMQP_UNKNOWN:
default:
body = new Data(payload);
break;
}
} else if (message instanceof ServerJMSTextMessage) {
switch (orignalEncoding) {
case AMQP_NULL:
break;
case AMQP_DATA:
body = new Data(getBinaryFromMessageBody((ServerJMSTextMessage) message));
break;
case AMQP_VALUE_STRING:
case AMQP_UNKNOWN:
default:
body = new AmqpValue(((TextMessage) message).getText());
break;
}
} else if (message instanceof ServerJMSMapMessage) {
body = new AmqpValue(getMapFromMessageBody((ServerJMSMapMessage) message));
} else if (message instanceof ServerJMSStreamMessage) {
ArrayList<Object> list = new ArrayList<>();
final ServerJMSStreamMessage m = (ServerJMSStreamMessage) message;
try {
while (true) {
list.add(m.readObject());
}
} catch (MessageEOFException e) {
}
// Deprecated encoding markers - TODO - Remove on future release
if (orignalEncoding == AMQP_UNKNOWN) {
String amqpType = message.getStringProperty(AMQPMessageTypes.AMQP_TYPE_KEY);
if (amqpType != null) {
if (amqpType.equals(AMQPMessageTypes.AMQP_LIST)) {
orignalEncoding = AMQP_VALUE_LIST;
} else {
orignalEncoding = AMQP_SEQUENCE;
}
}
}
switch (orignalEncoding) {
case AMQP_SEQUENCE:
body = new AmqpSequence(list);
break;
case AMQP_VALUE_LIST:
case AMQP_UNKNOWN:
default:
body = new AmqpValue(list);
break;
}
} else if (message instanceof ServerJMSObjectMessage) {
Binary payload = getBinaryFromMessageBody((ServerJMSObjectMessage) message);
if (payload == null) {
payload = EMPTY_BINARY;
}
switch (orignalEncoding) {
case AMQP_VALUE_BINARY:
body = new AmqpValue(payload);
break;
case AMQP_DATA:
case AMQP_UNKNOWN:
default:
body = new Data(payload);
break;
}
// For a non-AMQP message we tag the outbound content type as containing
// a serialized Java object so that an AMQP client has a hint as to what
// we are sending it.
if (!message.propertyExists(JMS_AMQP_CONTENT_TYPE)) {
message.setStringProperty(JMS_AMQP_CONTENT_TYPE, SERIALIZED_JAVA_OBJECT_CONTENT_TYPE);
}
} else if (message instanceof ServerJMSMessage) {
// If this is not an AMQP message that was converted then the original encoding
// will be unknown so we check for special cases of messages with special data
// encoded into the server message body.
if (orignalEncoding == AMQP_UNKNOWN) {
MessageInternal internalMessage = message.getInnerMessage();
int readerIndex = internalMessage.getBodyBuffer().readerIndex();
try {
Object s = internalMessage.getBodyBuffer().readNullableSimpleString();
if (s != null) {
body = new AmqpValue(s.toString());
}
} catch (Throwable ignored) {
logger.debug("Exception ignored during conversion, should be ok!", ignored.getMessage(), ignored);
} finally {
internalMessage.getBodyBuffer().readerIndex(readerIndex);
}
}
}
return body;
}
private Binary getBinaryFromMessageBody(ServerJMSBytesMessage message) throws JMSException {
byte[] data = new byte[(int) message.getBodyLength()];
message.readBytes(data);
message.reset(); // Need to reset after readBytes or future readBytes
return new Binary(data);
}
private Binary getBinaryFromMessageBody(ServerJMSTextMessage message) throws JMSException {
Binary result = null;
String text = message.getText();
if (text != null) {
result = new Binary(text.getBytes(StandardCharsets.UTF_8));
}
return result;
}
private Binary getBinaryFromMessageBody(ServerJMSObjectMessage message) throws JMSException {
message.getInnerMessage().getBodyBuffer().resetReaderIndex();
int size = message.getInnerMessage().getBodyBuffer().readInt();
byte[] bytes = new byte[size];
message.getInnerMessage().getBodyBuffer().readBytes(bytes);
return new Binary(bytes);
}
private Map<String, Object> getMapFromMessageBody(ServerJMSMapMessage message) throws JMSException {
final HashMap<String, Object> map = new LinkedHashMap<>();
@SuppressWarnings("unchecked")
final Enumeration<String> names = message.getMapNames();
while (names.hasMoreElements()) {
String key = names.nextElement();
Object value = message.getObject(key);
if (value instanceof byte[]) {
value = new Binary((byte[]) value);
}
map.put(key, value);
}
return map;
}
private static byte destinationType(Destination destination) {
if (destination instanceof Queue) {
if (destination instanceof TemporaryQueue) {
return TEMP_QUEUE_TYPE;
} else {
return QUEUE_TYPE;
}
} else if (destination instanceof Topic) {
if (destination instanceof TemporaryTopic) {
return TEMP_TOPIC_TYPE;
} else {
return TOPIC_TYPE;
}
}
throw new IllegalArgumentException("Unknown Destination Type passed to JMS Transformer.");
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper.geo;
import org.elasticsearch.common.geo.GeoHashUtils;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.DocumentMapperParser;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MergeResult;
import org.elasticsearch.index.mapper.ParsedDocument;
import org.elasticsearch.test.ElasticsearchSingleNodeTest;
import org.junit.Test;
import java.util.ArrayList;
import java.util.Arrays;
import static org.hamcrest.Matchers.*;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.nullValue;
public class GeoPointFieldMapperTests extends ElasticsearchSingleNodeTest {
@Test
public void testLatLonValues() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).endObject().endObject()
.endObject().endObject().string();
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
ParsedDocument doc = defaultMapper.parse("type", "1", XContentFactory.jsonBuilder()
.startObject()
.startObject("point").field("lat", 1.2).field("lon", 1.3).endObject()
.endObject()
.bytes());
assertThat(doc.rootDoc().getField("point.lat"), notNullValue());
assertThat(doc.rootDoc().getField("point.lat").fieldType().stored(), is(false));
assertThat(doc.rootDoc().getField("point.lon"), notNullValue());
assertThat(doc.rootDoc().getField("point.lon").fieldType().stored(), is(false));
assertThat(doc.rootDoc().getField("point.geohash"), nullValue());
assertThat(doc.rootDoc().get("point"), equalTo("1.2,1.3"));
}
@Test
public void testLatLonValuesWithGeohash() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true).endObject().endObject()
.endObject().endObject().string();
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
ParsedDocument doc = defaultMapper.parse("type", "1", XContentFactory.jsonBuilder()
.startObject()
.startObject("point").field("lat", 1.2).field("lon", 1.3).endObject()
.endObject()
.bytes());
assertThat(doc.rootDoc().getField("point.lat"), notNullValue());
assertThat(doc.rootDoc().getField("point.lon"), notNullValue());
assertThat(doc.rootDoc().get("point.geohash"), equalTo(GeoHashUtils.encode(1.2, 1.3)));
}
@Test
public void testLatLonInOneValueWithGeohash() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true).endObject().endObject()
.endObject().endObject().string();
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
ParsedDocument doc = defaultMapper.parse("type", "1", XContentFactory.jsonBuilder()
.startObject()
.field("point", "1.2,1.3")
.endObject()
.bytes());
assertThat(doc.rootDoc().getField("point.lat"), notNullValue());
assertThat(doc.rootDoc().getField("point.lon"), notNullValue());
assertThat(doc.rootDoc().get("point.geohash"), equalTo(GeoHashUtils.encode(1.2, 1.3)));
}
@Test
public void testGeoHashIndexValue() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true).endObject().endObject()
.endObject().endObject().string();
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
ParsedDocument doc = defaultMapper.parse("type", "1", XContentFactory.jsonBuilder()
.startObject()
.field("point", GeoHashUtils.encode(1.2, 1.3))
.endObject()
.bytes());
assertThat(doc.rootDoc().getField("point.lat"), notNullValue());
assertThat(doc.rootDoc().getField("point.lon"), notNullValue());
assertThat(doc.rootDoc().get("point.geohash"), equalTo(GeoHashUtils.encode(1.2, 1.3)));
}
@Test
public void testGeoHashValue() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).endObject().endObject()
.endObject().endObject().string();
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
ParsedDocument doc = defaultMapper.parse("type", "1", XContentFactory.jsonBuilder()
.startObject()
.field("point", GeoHashUtils.encode(1.2, 1.3))
.endObject()
.bytes());
assertThat(doc.rootDoc().getField("point.lat"), notNullValue());
assertThat(doc.rootDoc().getField("point.lon"), notNullValue());
assertThat(doc.rootDoc().get("point"), notNullValue());
}
@Test
public void testNormalizeLatLonValuesDefault() throws Exception {
// default to normalize
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point").endObject().endObject()
.endObject().endObject().string();
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
ParsedDocument doc = defaultMapper.parse("type", "1", XContentFactory.jsonBuilder()
.startObject()
.startObject("point").field("lat", 91).field("lon", 181).endObject()
.endObject()
.bytes());
assertThat(doc.rootDoc().get("point"), equalTo("89.0,1.0"));
doc = defaultMapper.parse("type", "1", XContentFactory.jsonBuilder()
.startObject()
.startObject("point").field("lat", -91).field("lon", -181).endObject()
.endObject()
.bytes());
assertThat(doc.rootDoc().get("point"), equalTo("-89.0,-1.0"));
doc = defaultMapper.parse("type", "1", XContentFactory.jsonBuilder()
.startObject()
.startObject("point").field("lat", 181).field("lon", 361).endObject()
.endObject()
.bytes());
assertThat(doc.rootDoc().get("point"), equalTo("-1.0,-179.0"));
}
@Test
public void testValidateLatLonValues() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("normalize", false).field("validate", true).endObject().endObject()
.endObject().endObject().string();
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
ParsedDocument doc = defaultMapper.parse("type", "1", XContentFactory.jsonBuilder()
.startObject()
.startObject("point").field("lat", 90).field("lon", 1.3).endObject()
.endObject()
.bytes());
try {
defaultMapper.parse("type", "1", XContentFactory.jsonBuilder()
.startObject()
.startObject("point").field("lat", -91).field("lon", 1.3).endObject()
.endObject()
.bytes());
fail();
} catch (MapperParsingException e) {
}
try {
defaultMapper.parse("type", "1", XContentFactory.jsonBuilder()
.startObject()
.startObject("point").field("lat", 91).field("lon", 1.3).endObject()
.endObject()
.bytes());
fail();
} catch (MapperParsingException e) {
}
try {
defaultMapper.parse("type", "1", XContentFactory.jsonBuilder()
.startObject()
.startObject("point").field("lat", 1.2).field("lon", -181).endObject()
.endObject()
.bytes());
fail();
} catch (MapperParsingException e) {
}
try {
defaultMapper.parse("type", "1", XContentFactory.jsonBuilder()
.startObject()
.startObject("point").field("lat", 1.2).field("lon", 181).endObject()
.endObject()
.bytes());
fail();
} catch (MapperParsingException e) {
}
}
@Test
public void testNoValidateLatLonValues() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("normalize", false).field("validate", false).endObject().endObject()
.endObject().endObject().string();
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
ParsedDocument doc = defaultMapper.parse("type", "1", XContentFactory.jsonBuilder()
.startObject()
.startObject("point").field("lat", 90).field("lon", 1.3).endObject()
.endObject()
.bytes());
defaultMapper.parse("type", "1", XContentFactory.jsonBuilder()
.startObject()
.startObject("point").field("lat", -91).field("lon", 1.3).endObject()
.endObject()
.bytes());
defaultMapper.parse("type", "1", XContentFactory.jsonBuilder()
.startObject()
.startObject("point").field("lat", 91).field("lon", 1.3).endObject()
.endObject()
.bytes());
defaultMapper.parse("type", "1", XContentFactory.jsonBuilder()
.startObject()
.startObject("point").field("lat", 1.2).field("lon", -181).endObject()
.endObject()
.bytes());
defaultMapper.parse("type", "1", XContentFactory.jsonBuilder()
.startObject()
.startObject("point").field("lat", 1.2).field("lon", 181).endObject()
.endObject()
.bytes());
}
@Test
public void testLatLonValuesStored() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("store", "yes").endObject().endObject()
.endObject().endObject().string();
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
ParsedDocument doc = defaultMapper.parse("type", "1", XContentFactory.jsonBuilder()
.startObject()
.startObject("point").field("lat", 1.2).field("lon", 1.3).endObject()
.endObject()
.bytes());
assertThat(doc.rootDoc().getField("point.lat"), notNullValue());
assertThat(doc.rootDoc().getField("point.lat").numericValue().doubleValue(), equalTo(1.2));
assertThat(doc.rootDoc().getField("point.lon"), notNullValue());
assertThat(doc.rootDoc().getField("point.lon").numericValue().doubleValue(), equalTo(1.3));
assertThat(doc.rootDoc().getField("point.geohash"), nullValue());
assertThat(doc.rootDoc().get("point"), equalTo("1.2,1.3"));
}
@Test
public void testArrayLatLonValues() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("store", "yes").endObject().endObject()
.endObject().endObject().string();
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
ParsedDocument doc = defaultMapper.parse("type", "1", XContentFactory.jsonBuilder()
.startObject()
.startArray("point")
.startObject().field("lat", 1.2).field("lon", 1.3).endObject()
.startObject().field("lat", 1.4).field("lon", 1.5).endObject()
.endArray()
.endObject()
.bytes());
assertThat(doc.rootDoc().getFields("point.lat").length, equalTo(2));
assertThat(doc.rootDoc().getFields("point.lon").length, equalTo(2));
assertThat(doc.rootDoc().getFields("point.lat")[0].numericValue().doubleValue(), equalTo(1.2));
assertThat(doc.rootDoc().getFields("point.lon")[0].numericValue().doubleValue(), equalTo(1.3));
assertThat(doc.rootDoc().getFields("point")[0].stringValue(), equalTo("1.2,1.3"));
assertThat(doc.rootDoc().getFields("point.lat")[1].numericValue().doubleValue(), equalTo(1.4));
assertThat(doc.rootDoc().getFields("point.lon")[1].numericValue().doubleValue(), equalTo(1.5));
assertThat(doc.rootDoc().getFields("point")[1].stringValue(), equalTo("1.4,1.5"));
}
@Test
public void testLatLonInOneValue() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).endObject().endObject()
.endObject().endObject().string();
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
ParsedDocument doc = defaultMapper.parse("type", "1", XContentFactory.jsonBuilder()
.startObject()
.field("point", "1.2,1.3")
.endObject()
.bytes());
assertThat(doc.rootDoc().getField("point.lat"), notNullValue());
assertThat(doc.rootDoc().getField("point.lon"), notNullValue());
assertThat(doc.rootDoc().get("point"), equalTo("1.2,1.3"));
}
@Test
public void testLatLonInOneValueStored() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("store", "yes").endObject().endObject()
.endObject().endObject().string();
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
ParsedDocument doc = defaultMapper.parse("type", "1", XContentFactory.jsonBuilder()
.startObject()
.field("point", "1.2,1.3")
.endObject()
.bytes());
assertThat(doc.rootDoc().getField("point.lat"), notNullValue());
assertThat(doc.rootDoc().getField("point.lat").numericValue().doubleValue(), equalTo(1.2));
assertThat(doc.rootDoc().getField("point.lon"), notNullValue());
assertThat(doc.rootDoc().getField("point.lon").numericValue().doubleValue(), equalTo(1.3));
assertThat(doc.rootDoc().get("point"), equalTo("1.2,1.3"));
}
@Test
public void testLatLonInOneValueArray() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("store", "yes").endObject().endObject()
.endObject().endObject().string();
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
ParsedDocument doc = defaultMapper.parse("type", "1", XContentFactory.jsonBuilder()
.startObject()
.startArray("point")
.value("1.2,1.3")
.value("1.4,1.5")
.endArray()
.endObject()
.bytes());
assertThat(doc.rootDoc().getFields("point.lat").length, equalTo(2));
assertThat(doc.rootDoc().getFields("point.lon").length, equalTo(2));
assertThat(doc.rootDoc().getFields("point.lat")[0].numericValue().doubleValue(), equalTo(1.2));
assertThat(doc.rootDoc().getFields("point.lon")[0].numericValue().doubleValue(), equalTo(1.3));
assertThat(doc.rootDoc().getFields("point")[0].stringValue(), equalTo("1.2,1.3"));
assertThat(doc.rootDoc().getFields("point.lat")[1].numericValue().doubleValue(), equalTo(1.4));
assertThat(doc.rootDoc().getFields("point.lon")[1].numericValue().doubleValue(), equalTo(1.5));
assertThat(doc.rootDoc().getFields("point")[1].stringValue(), equalTo("1.4,1.5"));
}
@Test
public void testLonLatArray() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).endObject().endObject()
.endObject().endObject().string();
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
ParsedDocument doc = defaultMapper.parse("type", "1", XContentFactory.jsonBuilder()
.startObject()
.startArray("point").value(1.3).value(1.2).endArray()
.endObject()
.bytes());
assertThat(doc.rootDoc().getField("point.lat"), notNullValue());
assertThat(doc.rootDoc().getField("point.lon"), notNullValue());
assertThat(doc.rootDoc().get("point"), equalTo("1.2,1.3"));
}
@Test
public void testLonLatArrayDynamic() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startArray("dynamic_templates").startObject()
.startObject("point").field("match", "point*").startObject("mapping").field("type", "geo_point").field("lat_lon", true).endObject().endObject()
.endObject().endArray()
.endObject().endObject().string();
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
ParsedDocument doc = defaultMapper.parse("type", "1", XContentFactory.jsonBuilder()
.startObject()
.startArray("point").value(1.3).value(1.2).endArray()
.endObject()
.bytes());
assertThat(doc.rootDoc().getField("point.lat"), notNullValue());
assertThat(doc.rootDoc().getField("point.lon"), notNullValue());
assertThat(doc.rootDoc().get("point"), equalTo("1.2,1.3"));
}
@Test
public void testLonLatArrayStored() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("store", "yes").endObject().endObject()
.endObject().endObject().string();
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
ParsedDocument doc = defaultMapper.parse("type", "1", XContentFactory.jsonBuilder()
.startObject()
.startArray("point").value(1.3).value(1.2).endArray()
.endObject()
.bytes());
assertThat(doc.rootDoc().getField("point.lat"), notNullValue());
assertThat(doc.rootDoc().getField("point.lat").numericValue().doubleValue(), equalTo(1.2));
assertThat(doc.rootDoc().getField("point.lon"), notNullValue());
assertThat(doc.rootDoc().getField("point.lon").numericValue().doubleValue(), equalTo(1.3));
assertThat(doc.rootDoc().get("point"), equalTo("1.2,1.3"));
}
@Test
public void testLonLatArrayArrayStored() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("store", "yes").endObject().endObject()
.endObject().endObject().string();
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
ParsedDocument doc = defaultMapper.parse("type", "1", XContentFactory.jsonBuilder()
.startObject()
.startArray("point")
.startArray().value(1.3).value(1.2).endArray()
.startArray().value(1.5).value(1.4).endArray()
.endArray()
.endObject()
.bytes());
assertThat(doc.rootDoc().getFields("point.lat").length, equalTo(2));
assertThat(doc.rootDoc().getFields("point.lon").length, equalTo(2));
assertThat(doc.rootDoc().getFields("point.lat")[0].numericValue().doubleValue(), equalTo(1.2));
assertThat(doc.rootDoc().getFields("point.lon")[0].numericValue().doubleValue(), equalTo(1.3));
assertThat(doc.rootDoc().getFields("point")[0].stringValue(), equalTo("1.2,1.3"));
assertThat(doc.rootDoc().getFields("point.lat")[1].numericValue().doubleValue(), equalTo(1.4));
assertThat(doc.rootDoc().getFields("point.lon")[1].numericValue().doubleValue(), equalTo(1.5));
assertThat(doc.rootDoc().getFields("point")[1].stringValue(), equalTo("1.4,1.5"));
}
@Test
public void testGeoPointMapperMerge() throws Exception {
String stage1Mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true)
.field("validate", true).endObject().endObject()
.endObject().endObject().string();
DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
DocumentMapper stage1 = parser.parse(stage1Mapping);
String stage2Mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true)
.field("validate", false).endObject().endObject()
.endObject().endObject().string();
DocumentMapper stage2 = parser.parse(stage2Mapping);
MergeResult mergeResult = stage1.merge(stage2.mapping(), false, false);
assertThat(mergeResult.hasConflicts(), equalTo(true));
assertThat(mergeResult.buildConflicts().length, equalTo(2));
// todo better way of checking conflict?
assertThat("mapper [point] has different validate_lat", isIn(new ArrayList<>(Arrays.asList(mergeResult.buildConflicts()))));
// correct mapping and ensure no failures
stage2Mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true)
.field("validate", true).field("normalize", true).endObject().endObject()
.endObject().endObject().string();
stage2 = parser.parse(stage2Mapping);
mergeResult = stage1.merge(stage2.mapping(), false, false);
assertThat(Arrays.toString(mergeResult.buildConflicts()), mergeResult.hasConflicts(), equalTo(false));
}
}
| |
/*
* The baseCode project
*
* Copyright (c) 2006 University of British Columbia
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package ubic.basecode.io;
import static org.junit.Assert.assertEquals;
import java.io.BufferedReader;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.util.zip.ZipInputStream;
import org.apache.commons.lang3.ArrayUtils;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import cern.colt.list.DoubleArrayList;
/**
*
* @author pavlidis
*/
public class TestByteArrayConverter {
private int a = 424542;
private int b = 25425;
private int c = 24524523;
private char u = 'k';
private char v = 'i';
private char w = 'r';
private double x = 424542.345;
private double y = 25425.5652;
private double z = 24524523.254;
private ByteArrayConverter bac;
private byte[] boolbytes = new byte[] { 1, 0, 1, 1, 0, 1 };
private byte[] expectedBfC = new byte[] { 0, 107, 0, 105, 0, 114 };
private byte[] expectedBfD = new byte[] { 65, 25, -23, 121, 97, 71, -82, 20, 64, -40, -44, 100, 44, 60, -98, -19,
65, 119, 99, 110, -76, 16, 98, 78 };
private byte[] expectedBfI = new byte[] { 0, 6, 122, 94, 0, 0, 99, 81, 1, 118 };
private byte[] expectedLong = new byte[] { 0, 0, 0, 0, 0, 0, 0, 100, 0, 0, 0, 0, 0, 7, 118, -114, 0, 0, 0, 0, 2,
-69, 75, 37, -1, -1, -1, -1, -1, -3, 47, -61, 0, 0, 0, 0, 0, 0, 3, -75, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 7, 101, -1, -1, -1, -1, -1, -1, -1, 7 };
private String longDoubleString = "";
private StringConverter sc;
private boolean[] testbools = new boolean[] { true, false, true, true, false, true };
private char[] testC = new char[] { u, v, w };
private double[] testD = new double[] { x, y, z };
private Double[] testDO = new Double[] { x, y, z };
private int[] testI = new int[] { a, b, c };
private DoubleArrayList tesDAL = new DoubleArrayList( testD );
private int[] testInts = new int[] { 100, 489102, 45828901, -184381, 949, 0, 0, 1893, -249 };
private long[] testlong = new long[] { 100L, 489102L, 45828901L, -184381L, 949L, 0L, 0L, 1893L, -249L };
private String[] testStrings = new String[] { "foo", "bar", "profiglio", "schwartz", "000", "0", "" };
private double[] wholeBunchOfDoubles;
/*
* @see TestCase#setUp()
*/
@Before
public void setUp() throws Exception {
bac = new ByteArrayConverter();
sc = new StringConverter();
InputStream in = TestByteArrayConverter.class.getResourceAsStream( "/data/melanoma_and_sarcomaMAS5.zip" );
assert in != null;
ZipInputStream is = new ZipInputStream( in );
is.getNextEntry();
BufferedReader br = new BufferedReader( new InputStreamReader( is ) );
StringBuffer buf = new StringBuffer();
String line;
br.readLine(); // ditch the first row.
int k = 0;
while ( ( line = br.readLine() ) != null ) {
buf.append( line.split( "\t", 2 )[1] + "\t" ); // so we get a very long delimited string, albeit with a
// trailing tab.
k++;
if ( k > 100 ) break;
}
longDoubleString = buf.toString();
wholeBunchOfDoubles = sc.stringToDoubles( longDoubleString );
br.close();
is.close();
}
/*
* @see TestCase#tearDown()
*/
@After
public void tearDown() throws Exception {
longDoubleString = null;
wholeBunchOfDoubles = null;
bac = null;
sc = null;
}
@Test
public void testBooleansToByteArray() {
byte[] actual = bac.booleanArrayToBytes( testbools );
for ( int i = 0; i < boolbytes.length; i++ ) {
assertEquals( boolbytes[i], actual[i] );
}
}
@Test
public void testBooleansToLongArray() {
byte[] actual = bac.longArrayToBytes( testlong );
for ( int i = 0; i < expectedLong.length; i++ ) {
assertEquals( expectedLong[i], actual[i] );
}
}
@Test
public void testByteArrayToBooleans() {
boolean[] actual = bac.byteArrayToBooleans( boolbytes );
for ( int i = 0; i < testbools.length; i++ ) {
assertEquals( testbools[i], actual[i] );
}
}
// test blob -> double[]
@Test
public void testByteArrayToDoubleConversionSpeed() {
byte[] lottaBytes = bac.doubleArrayToBytes( wholeBunchOfDoubles );
bac.byteArrayToDoubles( lottaBytes );
}
@Test
public void testByteArrayToDoubleMatrix() {
double[][] testm = new double[][] { { 1, 2 }, { 3, 4 }, { 5, 6 } };
double[][] actualReturn = bac.byteArrayToDoubleMatrix( bac.doubleMatrixToBytes( testm ), 2 );
for ( int i = 0; i < testm.length; i++ ) {
for ( int j = 0; j < testm[i].length; j++ ) {
assertEquals( testm[i][j], actualReturn[i][j], 0.001 );
}
}
}
/**
*
*
*/
@Test
public void testByteArrayToDoubles() {
double[] actualReturn = bac.byteArrayToDoubles( bac.doubleArrayToBytes( testD ) );
double[] expectedValue = testD;
for ( int i = 0; i < actualReturn.length; i++ ) {
assertEquals( "return value", expectedValue[i], actualReturn[i], 0 );
}
}
@Test
public void testByteArrayToLongs() {
long[] actual = bac.byteArrayToLongs( expectedLong );
for ( int i = 0; i < testlong.length; i++ ) {
assertEquals( testlong[i], actual[i] );
}
}
@Test
public void testByteArrayToTabbedString() {
String bools = bac.byteArrayToTabbedString( boolbytes, Boolean.class );
assertEquals( "true\tfalse\ttrue\ttrue\tfalse\ttrue", bools );
}
// test double[] -> blob.
@Test
public void testDoubleArrayToByteArrayConversionSpeed() {
bac.doubleArrayToBytes( wholeBunchOfDoubles );
}
/**
*
*
*/
@Test
public void testDoubleArrayToBytes() {
byte[] actualReturn = bac.doubleArrayToBytes( testD );
byte[] expectedValue = expectedBfD;
for ( int i = 0; i < expectedValue.length; i++ ) {
assertEquals( "return value", expectedValue[i], actualReturn[i] );
}
actualReturn = bac.doubleArrayToBytes( testDO );
for ( int i = 0; i < expectedValue.length; i++ ) {
assertEquals( "return value", expectedValue[i], actualReturn[i] );
}
actualReturn = bac.doubleArrayToBytes( tesDAL );
for ( int i = 0; i < expectedValue.length; i++ ) {
assertEquals( "return value", expectedValue[i], actualReturn[i] );
}
}
// test double[] -> delimited string.
@Test
public void testDoubleArrayToDelimitedStringConversionSpeed() {
sc.doubleArrayToString( wholeBunchOfDoubles );
}
@Test
public void testIntsToBytes() {
int[] actualReturn = bac.byteArrayToInts( bac.intArrayToBytes( testInts ) );
for ( int i = 0; i < testInts.length; i++ ) {
assertEquals( testInts[i], actualReturn[i] );
// System.err.println( actualReturn[i] );
}
}
@Test
public void testObjectToBytes() {
checkBytes( boolbytes, bac.toBytes( ArrayUtils.toObject( testbools ) ) );
checkBytes( expectedBfD, bac.toBytes( ArrayUtils.toObject( testD ) ) );
checkBytes( expectedBfC, bac.toBytes( ArrayUtils.toObject( testC ) ) );
checkBytes( expectedBfI, bac.toBytes( ArrayUtils.toObject( testI ) ) );
}
@Test
public void testStringToBytes() {
String[] actualReturn = bac.byteArrayToStrings( bac.stringArrayToBytes( testStrings ) );
for ( int i = 0; i < testStrings.length; i++ ) {
assertEquals( testStrings[i], actualReturn[i] );
// System.err.println( actualReturn[i] );
}
}
// test string -> double[]
@Test
public void testStringToDoubleArrayConversionSpeed() {
sc.stringToDoubles( longDoubleString );
}
private void checkBytes( byte[] expected, byte[] actual ) {
for ( int i = 0; i < expected.length; i++ ) {
assertEquals( expected[i], actual[i] );
}
}
}
| |
// Copyright 2014 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.rules.java;
import com.google.common.collect.ImmutableBiMap;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.devtools.build.lib.actions.Artifact;
import com.google.devtools.build.lib.actions.MutableActionGraph.ActionConflictException;
import com.google.devtools.build.lib.analysis.ConfiguredTarget;
import com.google.devtools.build.lib.analysis.FileProvider;
import com.google.devtools.build.lib.analysis.OutputGroupInfo;
import com.google.devtools.build.lib.analysis.RuleConfiguredTargetBuilder;
import com.google.devtools.build.lib.analysis.RuleConfiguredTargetFactory;
import com.google.devtools.build.lib.analysis.RuleContext;
import com.google.devtools.build.lib.analysis.Runfiles;
import com.google.devtools.build.lib.analysis.RunfilesProvider;
import com.google.devtools.build.lib.analysis.TransitiveInfoCollection;
import com.google.devtools.build.lib.collect.nestedset.NestedSet;
import com.google.devtools.build.lib.collect.nestedset.NestedSetBuilder;
import com.google.devtools.build.lib.rules.cpp.LibraryToLink;
import java.util.LinkedHashSet;
import java.util.Set;
/** An implementation for the "java_import" rule. */
public class JavaImport implements RuleConfiguredTargetFactory {
private final JavaSemantics semantics;
protected JavaImport(JavaSemantics semantics) {
this.semantics = semantics;
}
@Override
public ConfiguredTarget create(RuleContext ruleContext)
throws InterruptedException, RuleErrorException, ActionConflictException {
JavaCommon.checkRuleLoadedThroughMacro(ruleContext);
ImmutableList<Artifact> srcJars = ImmutableList.of();
ImmutableList<Artifact> jars = collectJars(ruleContext);
Artifact srcJar = ruleContext.getPrerequisiteArtifact("srcjar");
if (ruleContext.hasErrors()) {
return null;
}
ImmutableList<TransitiveInfoCollection> targets =
ImmutableList.<TransitiveInfoCollection>builder()
.addAll(ruleContext.getPrerequisites("deps"))
.addAll(ruleContext.getPrerequisites("exports"))
.build();
final JavaCommon common =
new JavaCommon(
ruleContext,
semantics,
/* sources= */ ImmutableList.<Artifact>of(),
targets,
targets,
targets);
semantics.checkRule(ruleContext, common);
// No need for javac options - no compilation happening here.
ImmutableBiMap.Builder<Artifact, Artifact> compilationToRuntimeJarMapBuilder =
ImmutableBiMap.builder();
ImmutableList<Artifact> interfaceJars =
processWithIjarIfNeeded(jars, ruleContext, compilationToRuntimeJarMapBuilder);
JavaCompilationArtifacts javaArtifacts = collectJavaArtifacts(jars, interfaceJars);
common.setJavaCompilationArtifacts(javaArtifacts);
NestedSet<LibraryToLink> transitiveJavaNativeLibraries =
common.collectTransitiveJavaNativeLibraries();
boolean neverLink = JavaCommon.isNeverLink(ruleContext);
JavaCompilationArgsProvider javaCompilationArgs =
common.collectJavaCompilationArgs(neverLink, false);
NestedSet<Artifact> transitiveJavaSourceJars =
collectTransitiveJavaSourceJars(ruleContext, srcJar);
if (srcJar != null) {
srcJars = ImmutableList.of(srcJar);
}
// The "neverlink" attribute is transitive, so if it is enabled, we don't add any
// runfiles from this target or its dependencies.
Runfiles runfiles =
neverLink
? Runfiles.EMPTY
: new Runfiles.Builder(
ruleContext.getWorkspaceName(),
ruleContext.getConfiguration().legacyExternalRunfiles())
// add the jars to the runfiles
.addArtifacts(javaArtifacts.getRuntimeJars())
.addTargets(targets, RunfilesProvider.DEFAULT_RUNFILES)
.addRunfiles(ruleContext, RunfilesProvider.DEFAULT_RUNFILES)
.addTargets(targets, JavaRunfilesProvider.TO_RUNFILES)
.add(ruleContext, JavaRunfilesProvider.TO_RUNFILES)
.build();
RuleConfiguredTargetBuilder ruleBuilder = new RuleConfiguredTargetBuilder(ruleContext);
NestedSetBuilder<Artifact> filesBuilder = NestedSetBuilder.stableOrder();
filesBuilder.addAll(jars);
ImmutableBiMap<Artifact, Artifact> compilationToRuntimeJarMap =
compilationToRuntimeJarMapBuilder.build();
semantics.addProviders(ruleContext, common, /* gensrcJar= */ null, ruleBuilder);
NestedSet<Artifact> filesToBuild = filesBuilder.build();
JavaSourceInfoProvider javaSourceInfoProvider =
new JavaSourceInfoProvider.Builder()
.setJarFiles(jars)
.setSourceJarsForJarFiles(srcJars)
.build();
JavaRuleOutputJarsProvider.Builder ruleOutputJarsProviderBuilder =
JavaRuleOutputJarsProvider.builder();
for (Artifact jar : jars) {
ruleOutputJarsProviderBuilder.addOutputJar(
jar, compilationToRuntimeJarMap.inverse().get(jar), null /* manifestProto */, srcJars);
}
NestedSet<Artifact> proguardSpecs = new ProguardLibrary(ruleContext).collectProguardSpecs();
JavaRuleOutputJarsProvider ruleOutputJarsProvider = ruleOutputJarsProviderBuilder.build();
JavaSourceJarsProvider sourceJarsProvider =
JavaSourceJarsProvider.create(transitiveJavaSourceJars, srcJars);
JavaCompilationArgsProvider compilationArgsProvider = javaCompilationArgs;
JavaInfo.Builder javaInfoBuilder = JavaInfo.Builder.create();
common.addTransitiveInfoProviders(ruleBuilder, javaInfoBuilder, filesToBuild, null);
JavaInfo javaInfo =
javaInfoBuilder
.addProvider(JavaCompilationArgsProvider.class, compilationArgsProvider)
.addProvider(JavaRuleOutputJarsProvider.class, ruleOutputJarsProvider)
.addProvider(JavaSourceJarsProvider.class, sourceJarsProvider)
.addProvider(JavaSourceInfoProvider.class, javaSourceInfoProvider)
.maybeTransitiveOnlyRuntimeJarsToJavaInfo(common.getDependencies(), true)
.setRuntimeJars(javaArtifacts.getRuntimeJars())
.setJavaConstraints(JavaCommon.getConstraints(ruleContext))
.setNeverlink(neverLink)
.build();
return ruleBuilder
.setFilesToBuild(filesToBuild)
.addStarlarkTransitiveInfo(
JavaStarlarkApiProvider.NAME, JavaStarlarkApiProvider.fromRuleContext())
.addNativeDeclaredProvider(javaInfo)
.add(RunfilesProvider.class, RunfilesProvider.simple(runfiles))
.addNativeDeclaredProvider(new JavaNativeLibraryInfo(transitiveJavaNativeLibraries))
.addNativeDeclaredProvider(new ProguardSpecProvider(proguardSpecs))
.addOutputGroup(JavaSemantics.SOURCE_JARS_OUTPUT_GROUP, transitiveJavaSourceJars)
.addOutputGroup(OutputGroupInfo.HIDDEN_TOP_LEVEL, proguardSpecs)
.build();
}
private NestedSet<Artifact> collectTransitiveJavaSourceJars(
RuleContext ruleContext, Artifact srcJar) {
NestedSetBuilder<Artifact> transitiveJavaSourceJarBuilder = NestedSetBuilder.stableOrder();
if (srcJar != null) {
transitiveJavaSourceJarBuilder.add(srcJar);
}
for (JavaSourceJarsProvider other :
JavaInfo.getProvidersFromListOfTargets(
JavaSourceJarsProvider.class, ruleContext.getPrerequisites("exports"))) {
transitiveJavaSourceJarBuilder.addTransitive(other.getTransitiveSourceJars());
}
return transitiveJavaSourceJarBuilder.build();
}
private JavaCompilationArtifacts collectJavaArtifacts(
ImmutableList<Artifact> jars, ImmutableList<Artifact> interfaceJars) {
return new JavaCompilationArtifacts.Builder()
.addRuntimeJars(jars)
.addFullCompileTimeJars(jars)
// interfaceJars Artifacts have proper owner labels
.addInterfaceJars(interfaceJars)
.build();
}
private ImmutableList<Artifact> collectJars(RuleContext ruleContext) {
Set<Artifact> jars = new LinkedHashSet<>();
for (TransitiveInfoCollection info : ruleContext.getPrerequisites("jars")) {
if (JavaInfo.getProvider(JavaCompilationArgsProvider.class, info) != null) {
ruleContext.attributeError("jars", "should not refer to Java rules");
}
for (Artifact jar : info.getProvider(FileProvider.class).getFilesToBuild().toList()) {
if (!JavaSemantics.JAR.matches(jar.getFilename())) {
ruleContext.attributeError("jars", jar.getFilename() + " is not a .jar file");
} else {
if (!jars.add(jar)) {
ruleContext.attributeError("jars", jar.getFilename() + " is a duplicate");
}
}
}
}
return ImmutableList.copyOf(jars);
}
private ImmutableList<Artifact> processWithIjarIfNeeded(
ImmutableList<Artifact> jars,
RuleContext ruleContext,
ImmutableMap.Builder<Artifact, Artifact> compilationToRuntimeJarMap) {
ImmutableList.Builder<Artifact> interfaceJarsBuilder = ImmutableList.builder();
boolean useIjar = ruleContext.getFragment(JavaConfiguration.class).getUseIjars();
for (Artifact jar : jars) {
Artifact interfaceJar =
useIjar
? JavaCompilationHelper.createIjarAction(
ruleContext,
JavaToolchainProvider.from(ruleContext),
jar,
ruleContext.getLabel(),
/* injectingRuleKind */ null,
true)
: jar;
interfaceJarsBuilder.add(interfaceJar);
compilationToRuntimeJarMap.put(interfaceJar, jar);
}
return interfaceJarsBuilder.build();
}
}
| |
/*
Copyright 1995-2015 Esri
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
For additional information, contact:
Environmental Systems Research Institute, Inc.
Attn: Contracts Dept
380 New York Street
Redlands, California, USA 92373
email: contracts@esri.com
*/
package com.esri.core.geometry;
import com.esri.core.geometry.VertexDescription.Semantics;
import java.io.Serializable;
/**
* A straight line between a pair of points.
*
*/
public final class Line extends Segment implements Serializable {
private static final long serialVersionUID = 2L;// TODO:remove as we use
// writeReplace and
// GeometrySerializer
@Override
public Geometry.Type getType() {
return Type.Line;
}
@Override
public double calculateLength2D() {
double dx = m_xStart - m_xEnd;
double dy = m_yStart - m_yEnd;
return Math.sqrt(dx * dx + dy * dy);
}
@Override
boolean isDegenerate(double tolerance) {
double dx = m_xStart - m_xEnd;
double dy = m_yStart - m_yEnd;
return Math.sqrt(dx * dx + dy * dy) <= tolerance;
}
/**
* Indicates if the line segment is a curve.
*/
@Override
public boolean isCurve() {
return false;
}
@Override
Point2D _getTangent(double t) {
Point2D pt = new Point2D();
pt.sub(getEndXY(), getStartXY());
return pt;
}
@Override
boolean _isDegenerate(double tolerance) {
return calculateLength2D() <= tolerance;
}
// HEADER DEF
// Cpp
/**
* Creates a line segment.
*/
public Line() {
m_description = VertexDescriptionDesignerImpl.getDefaultDescriptor2D();
}
Line(VertexDescription vd) {
m_description = vd;
}
Line(double x1, double y1, double x2, double y2) {
m_description = VertexDescriptionDesignerImpl.getDefaultDescriptor2D();
setStartXY(x1, y1);
setEndXY(x2, y2);
}
@Override
public void queryEnvelope(Envelope env) {
env.setEmpty();
env.assignVertexDescription(m_description);
Envelope2D env2D = new Envelope2D();
queryEnvelope2D(env2D);
env.setEnvelope2D(env2D);
for (int i = 1, n = m_description.getAttributeCount(); i < n; i++) {
int semantics = m_description.getSemantics(i);
for (int iord = 0, nord = VertexDescription
.getComponentCount(semantics); i < nord; i++) {
Envelope1D interval = queryInterval(semantics, iord);
env.setInterval(semantics, iord, interval);
}
}
}
@Override
public void queryEnvelope2D(Envelope2D env) {
env.setCoords(m_xStart, m_yStart, m_xEnd, m_yEnd);
env.normalize();
}
@Override
void queryEnvelope3D(Envelope3D env) {
env.setEmpty();
env.merge(m_xStart, m_yStart, _getAttributeAsDbl(0, Semantics.Z, 0));
env.merge(m_xEnd, m_yEnd, _getAttributeAsDbl(1, Semantics.Z, 0));
}
@Override
public void applyTransformation(Transformation2D transform) {
_touch();
Point2D pt = new Point2D();
pt.x = m_xStart;
pt.y = m_yStart;
transform.transform(pt, pt);
m_xStart = pt.x;
m_yStart = pt.y;
pt.x = m_xEnd;
pt.y = m_yEnd;
transform.transform(pt, pt);
m_xEnd = pt.x;
m_yEnd = pt.y;
}
@Override
void applyTransformation(Transformation3D transform) {
_touch();
Point3D pt = new Point3D();
pt.x = m_xStart;
pt.y = m_yStart;
pt.z = _getAttributeAsDbl(0, Semantics.Z, 0);
pt = transform.transform(pt);
m_xStart = pt.x;
m_yStart = pt.y;
_setAttribute(0, Semantics.Z, 0, pt.z);
pt.x = m_xEnd;
pt.y = m_yEnd;
pt.z = _getAttributeAsDbl(1, Semantics.Z, 0);
pt = transform.transform(pt);
m_xEnd = pt.x;
m_yEnd = pt.y;
_setAttribute(1, Semantics.Z, 0, pt.z);
}
@Override
public Geometry createInstance() {
return new Line(m_description);
}
@Override
double _calculateArea2DHelper(double xorg, double yorg) {
return ((m_xEnd - xorg) - (m_xStart - xorg))
* ((m_yEnd - yorg) + (m_yStart - yorg)) * 0.5;
}
@Override
double tToLength(double t) {
return t * calculateLength2D();
}
@Override
double lengthToT(double len) {
return len / calculateLength2D();
}
double getCoordX_(double t) {
// Must match query_coord_2D and vice verse
// Also match get_attribute_as_dbl
return MathUtils.lerp(m_xStart, m_xEnd, t);
}
double getCoordY_(double t) {
// Must match query_coord_2D and vice verse
// Also match get_attribute_as_dbl
return MathUtils.lerp(m_yStart, m_yEnd, t);
}
@Override
void getCoord2D(double t, Point2D pt) {
// We want:
// 1. When t == 0, get exactly Start
// 2. When t == 1, get exactly End
// 3. When m_x_end == m_x_start, we want m_x_start exactly
// 4. When m_y_end == m_y_start, we want m_y_start exactly
MathUtils.lerp(m_xStart, m_yStart, m_xEnd, m_yEnd, t, pt);
}
@Override
Segment cut(double t1, double t2) {
SegmentBuffer segmentBuffer = new SegmentBuffer();
cut(t1, t2, segmentBuffer);
return segmentBuffer.get();
}
@Override
void cut(double t1, double t2, SegmentBuffer subSegmentBuffer) {
if (subSegmentBuffer == null)
throw new IllegalArgumentException();
subSegmentBuffer.createLine();// Make sure buffer contains Line class.
Segment subSegment = subSegmentBuffer.get();
subSegment.assignVertexDescription(m_description);
Point2D point = new Point2D();
getCoord2D(t1, point);
subSegment.setStartXY(point.x, point.y);
getCoord2D(t2, point);
subSegment.setEndXY(point.x, point.y);
for (int iattr = 1, nattr = m_description.getAttributeCount(); iattr < nattr; iattr++) {
int semantics = m_description._getSemanticsImpl(iattr);
int ncomps = VertexDescription.getComponentCount(semantics);
for (int ordinate = 0; ordinate < ncomps; ordinate++) {
double value1 = getAttributeAsDbl(t1, semantics, ordinate);
subSegment.setStartAttribute(semantics, ordinate, value1);
double value2 = getAttributeAsDbl(t2, semantics, ordinate);
subSegment.setEndAttribute(semantics, ordinate, value2);
}
}
}
@Override
public double getAttributeAsDbl(double t, int semantics, int ordinate) {
if (semantics == VertexDescription.Semantics.POSITION)
return ordinate == 0 ? getCoord2D(t).x : getCoord2D(t).y;
int interpolation = VertexDescription.getInterpolation(semantics);
switch (interpolation) {
case VertexDescription.Interpolation.NONE:
if (t < 0.5)
return getStartAttributeAsDbl(semantics, ordinate);
else
return getEndAttributeAsDbl(semantics, ordinate);
case VertexDescription.Interpolation.LINEAR: {
double s = getStartAttributeAsDbl(semantics, ordinate);
double e = getEndAttributeAsDbl(semantics, ordinate);
return MathUtils.lerp(s, e, t);
}
case VertexDescription.Interpolation.ANGULAR: {
throw new GeometryException("not implemented");
}
}
throw GeometryException.GeometryInternalError();
}
@Override
double getClosestCoordinate(Point2D inputPt, boolean bExtrapolate) {
double vx = m_xEnd - m_xStart;
double vy = m_yEnd - m_yStart;
double v2 = vx * vx + vy * vy;
if (v2 == 0)
return 0.5;
double rx = inputPt.x - m_xStart;
double ry = inputPt.y - m_yStart;
double t = (rx * vx + ry * vy) / v2;
if (!bExtrapolate) {
if (t < 0.0)
t = 0.0;
else if (t > 1.0)
t = 1.0;
}
return t;
}
@Override
public int intersectionWithAxis2D(boolean b_axis_x, double ordinate,
double[] result_ordinates, double[] parameters) {
if (b_axis_x) {
double a = (m_yEnd - m_yStart);
if (a == 0)
return (ordinate == m_yEnd) ? -1 : 0;
double t = (ordinate - m_yStart) / a;
if (t < 0.0 || t > 1.0)
return 0;
if (result_ordinates != null)
(result_ordinates)[0] = getCoordX_(t);
if (parameters != null)
(parameters)[0] = t;
return 1;
} else {
double a = (m_xEnd - m_xStart);
if (a == 0)
return (ordinate == m_xEnd) ? -1 : 0;
double t = (ordinate - m_xStart) / a;
if (t < 0.0 || t > 1.0)
return 0;
if (result_ordinates != null)
(result_ordinates)[0] = getCoordY_(t);
if (parameters != null)
(parameters)[0] = t;
return 1;
}
}
// line segment can have 0 or 1 intersection interval with clipEnv2D.
// The function return 0 or 2 segParams (e.g. 0.0, 0.4; or 0.1, 0.9; or 0.6,
// 1.0; or 0.0, 1.0)
// segParams will be sorted in ascending order; the order of the
// envelopeDistances will correspond (i.e. the envelopeDistances may not be
// in ascending order);
// an envelopeDistance can be -1.0 if the corresponding endpoint is properly
// inside clipEnv2D.
int intersectionWithEnvelope2D(Envelope2D clipEnv2D,
boolean includeEnvBoundary, double[] segParams,
double[] envelopeDistances) {
Point2D p1 = getStartXY();
Point2D p2 = getEndXY();
// includeEnvBoundary xxx ???
int modified = clipEnv2D.clipLine(p1, p2, 0, segParams,
envelopeDistances);
return modified != 0 ? 2 : 0;
}
@Override
double intersectionOfYMonotonicWithAxisX(double y, double x_parallel) {
double a = (m_yEnd - m_yStart);
if (a == 0)
return (y == m_yEnd) ? x_parallel : NumberUtils.NaN();
double t = (y - m_yStart) / a;
assert (t >= 0 && t <= 1.0);
// double t_1 = 1.0 - t;
// assert(t + t_1 == 1.0);
double resx = getCoordX_(t);
if (t == 1.0)
resx = m_xEnd;
assert ((resx >= m_xStart && resx <= m_xEnd) || (resx <= m_xStart && resx >= m_xEnd));
return resx;
}
@Override
boolean _isIntersectingPoint(Point2D pt, double tolerance,
boolean bExcludeExactEndpoints) {
return _intersection(pt, tolerance, bExcludeExactEndpoints) >= 0;// must
// use
// same
// method
// that
// the
// intersection
// routine
// uses.
}
/**
* Returns True if point and the segment intersect (not disjoint) for the
* given tolerance.
*/
@Override
boolean isIntersecting(Point2D pt, double tolerance) {
return _isIntersectingPoint(pt, tolerance, false);
}
void orientBottomUp_() {
if (m_yEnd < m_yStart || (m_yEnd == m_yStart && m_xEnd < m_xStart)) {
double x = m_xStart;
m_xStart = m_xEnd;
m_xEnd = x;
double y = m_yStart;
m_yStart = m_yEnd;
m_yEnd = y;
for (int i = 0, n = m_description.getTotalComponentCount() - 2; i < n; i++) {
double a = m_attributes[i];
m_attributes[i] = m_attributes[i + n];
m_attributes[i + n] = a;
}
}
}
// return -1 for the left side from the infinite line passing through thais
// Line, 1 for the right side of the line, 0 if on the line (in the bounds
// of the roundoff error)
int _side(Point2D pt) {
return _side(pt.x, pt.y);
}
// return -1 for the left side from the infinite line passing through thais
// Line, 1 for the right side of the line, 0 if on the line (in the bounds
// of the roundoff error)
int _side(double ptX, double ptY) {
Point2D v1 = new Point2D(ptX, ptY);
v1.sub(getStartXY());
Point2D v2 = new Point2D();
v2.sub(getEndXY(), getStartXY());
double cross = v2.crossProduct(v1);
double crossError = 4 * NumberUtils.doubleEps()
* (Math.abs(v2.x * v1.y) + Math.abs(v2.y * v1.x));
return cross > crossError ? -1 : cross < -crossError ? 1 : 0;
}
double _intersection(Point2D pt, double tolerance,
boolean bExcludeExactEndPoints) {
Point2D v = new Point2D();
Point2D start = new Point2D();
// Test start point distance to pt.
start.setCoords(m_xStart, m_yStart);
v.sub(pt, start);
double vlength = v.length();
double vLengthError = vlength * 3 * NumberUtils.doubleEps();
if (vlength <= Math.max(tolerance, vLengthError)) {
assert (vlength != 0 || pt.isEqual(start));// probably never asserts
if (bExcludeExactEndPoints && vlength == 0)
return NumberUtils.TheNaN;
else
return 0;
}
Point2D end2D = getEndXY();
// Test end point distance to pt.
v.sub(pt, end2D);
vlength = v.length();
vLengthError = vlength * 3 * NumberUtils.doubleEps();
if (vlength <= Math.max(tolerance, vLengthError)) {
assert (vlength != 0 || pt.isEqual(end2D));// probably never asserts
if (bExcludeExactEndPoints && vlength == 0)
return NumberUtils.TheNaN;
else
return 1.0;
}
// Find a distance from the line to pt.
v.setCoords(m_xEnd - m_xStart, m_yEnd - m_yStart);
double len = v.length();
if (len > 0) {
double invertedLength = 1.0 / len;
v.scale(invertedLength);
Point2D relativePoint = new Point2D();
relativePoint.sub(pt, start);
double projection = relativePoint.dotProduct(v);
double projectionError = 8 * relativePoint._dotProductAbs(v)
* NumberUtils.doubleEps();// See Error Estimation Rules In
// Borg.docx
v.leftPerpendicular();// get left normal to v
double distance = relativePoint.dotProduct(v);
double distanceError = 8 * relativePoint._dotProductAbs(v)
* NumberUtils.doubleEps();// See Error Estimation Rules In
// Borg.docx
double perror = Math.max(tolerance, projectionError);
if (projection < -perror || projection > len + perror)
return NumberUtils.TheNaN;
double merror = Math.max(tolerance, distanceError);
if (Math.abs(distance) <= merror) {
double t = projection * invertedLength;
t = NumberUtils.snap(t, 0.0, 1.0);
Point2D ptOnLine = new Point2D();
getCoord2D(t, ptOnLine);
if (Point2D.distance(ptOnLine, pt) <= tolerance) {
if (t < 0.5) {
if (Point2D.distance(ptOnLine, start) <= tolerance)// the
// projected
// point
// is
// close
// to
// the
// start
// point.
// Need
// to
// return
// 0.
return 0;
} else if (Point2D.distance(ptOnLine, end2D) <= tolerance)// the
// projected
// point
// is
// close
// to
// the
// end
// point.
// Need
// to
// return
// 1.0.
return 1.0;
return t;
}
}
}
return NumberUtils.TheNaN;
}
@Override
public boolean equals(Object other) {
if (other == null)
return false;
if (other == this)
return true;
if (other.getClass() != getClass())
return false;
return _equalsImpl((Segment)other);
}
boolean equals(Line other) {
if (other == this)
return true;
if (!(other instanceof Line))
return false;
return _equalsImpl((Segment) other);
}
boolean _projectionIntersectHelper(Line other, Point2D v, boolean bStart) {
// v is the vector in the direction of this line == end - start.
double orgX = bStart ? m_xStart : m_xEnd;
double orgY = bStart ? m_yStart : m_yEnd;
Point2D m = new Point2D();
m.x = other.getEndX() - orgX;
m.y = other.getEndY() - orgY;
double dot = v.dotProduct(m);
double dotError = 3 * NumberUtils.doubleEps() * v._dotProductAbs(m);
if (dot > dotError) {
m.x = other.getStartX() - orgX;
m.y = other.getStartY() - orgY;
double dot2 = v.dotProduct(m);
double dotError2 = 3 * NumberUtils.doubleEps()
* v._dotProductAbs(m);
return dot2 <= dotError2;
}
return true;
}
boolean _projectionIntersect(Line other) {
// This function returns true, if the "other"'s projection on "this"
Point2D v = new Point2D();
v.x = m_xEnd - m_xStart;
v.y = m_yEnd - m_yStart;
if (!_projectionIntersectHelper(other, v, false))
return false; // Both other.Start and other.End projections on
// "this" lie to the right of the this.End
v.negate();
if (!_projectionIntersectHelper(other, v, true))
return false; // Both other.Start and other.End projections on
// "this" lie to the left of the this.End
return true;
}
// Tests if two lines intersect using projection of one line to another.
static boolean _isIntersectingHelper(Line line1, Line line2) {
int s11 = line1._side(line2.m_xStart, line2.m_yStart);
int s12 = line1._side(line2.m_xEnd, line2.m_yEnd);
if (s11 < 0 && s12 < 0 || s11 > 0 && s12 > 0)
return false;// no intersection. The line2 lies to one side of an
// infinite line passing through line1
int s21 = line2._side(line1.m_xStart, line1.m_yStart);
int s22 = line2._side(line1.m_xEnd, line1.m_yEnd);
if (s21 < 0 && s22 < 0 || s21 > 0 && s22 > 0)
return false;// no intersection.The line1 lies to one side of an
// infinite line passing through line2
double len1 = line1.calculateLength2D();
double len2 = line2.calculateLength2D();
if (len1 > len2) {
return line1._projectionIntersect(line2);
} else {
return line2._projectionIntersect(line1);
}
}
static Point2D _intersectHelper1(Line line1, Line line2, double tolerance) {
Point2D result = new Point2D(NumberUtils.NaN(), NumberUtils.NaN());
double k1x = line1.m_xEnd - line1.m_xStart;
double k1y = line1.m_yEnd - line1.m_yStart;
double k2x = line2.m_xEnd - line2.m_xStart;
double k2y = line2.m_yEnd - line2.m_yStart;
double det = k2x * k1y - k1x * k2y;
if (det == 0)
return result;
// estimate roundoff error for det:
double errdet = 4 * NumberUtils.doubleEps()
* (Math.abs(k2x * k1y) + Math.abs(k1x * k2y));
double bx = line2.m_xStart - line1.m_xStart;
double by = line2.m_yStart - line1.m_yStart;
double a0 = (k2x * by - bx * k2y);
double a0error = 4 * NumberUtils.doubleEps()
* (Math.abs(k2x * by) + Math.abs(bx * k2y));
double t0 = a0 / det;
double absdet = Math.abs(det);
double t0error = (a0error * absdet + errdet * Math.abs(a0))
/ (det * det) + NumberUtils.doubleEps() * Math.abs(t0);
if (t0 < -t0error || t0 > 1.0 + t0error)
return result;
double a1 = (k1x * by - bx * k1y);
double a1error = 4 * NumberUtils.doubleEps()
* (Math.abs(k1x * by) + Math.abs(bx * k1y));
double t1 = a1 / det;
double t1error = (a1error * absdet + errdet * Math.abs(a1))
/ (det * det) + NumberUtils.doubleEps() * Math.abs(t1);
if (t1 < -t1error || t1 > 1.0 + t1error)
return result;
double t0r = NumberUtils.snap(t0, 0.0, 1.0);
double t1r = NumberUtils.snap(t1, 0.0, 1.0);
Point2D pt0 = line1.getCoord2D(t0r);
Point2D pt1 = line2.getCoord2D(t1r);
Point2D pt = new Point2D();
pt.sub(pt0, pt1);
if (pt.length() > tolerance) {
// Roundoff errors cause imprecise result. Try recalculate.
// 1. Use averaged point and recalculate the t values
// Point2D pt;
pt.add(pt0, pt1);
pt.scale(0.5);
t0r = line1.getClosestCoordinate(pt, false);
t1r = line2.getClosestCoordinate(pt, false);
Point2D pt01 = line1.getCoord2D(t0r);
Point2D pt11 = line2.getCoord2D(t1r);
pt01.sub(pt11);
if (pt01.length() > tolerance) {
// Seems to be no intersection here actually. Return NaNs
return result;
}
}
result.setCoords(t0r, t1r);
return result;
}
static int _isIntersectingLineLine(Line line1, Line line2,
double tolerance, boolean bExcludeExactEndpoints) {
// _ASSERT(line1 != line2);
// Check for the endpoints.
// The bExcludeExactEndpoints is True, means we care only about overlaps
// and real intersections, but do not care if the endpoints are exactly
// equal.
// bExcludeExactEndpoints is used in Cracking check test, because during
// cracking test all points are either coincident or further than the
// tolerance.
int counter = 0;
if (line1.m_xStart == line2.m_xStart
&& line1.m_yStart == line2.m_yStart
|| line1.m_xStart == line2.m_xEnd
&& line1.m_yStart == line2.m_yEnd) {
counter++;
if (!bExcludeExactEndpoints)
return 1;
}
if (line1.m_xEnd == line2.m_xStart && line1.m_yEnd == line2.m_yStart
|| line1.m_xEnd == line2.m_xEnd && line1.m_yEnd == line2.m_yEnd) {
counter++;
if (counter == 2)
return 2; // counter == 2 means both endpoints coincide (Lines
// overlap).
if (!bExcludeExactEndpoints)
return 1;
}
if (line2._isIntersectingPoint(line1.getStartXY(), tolerance, true))
return 1;// return true;
if (line2._isIntersectingPoint(line1.getEndXY(), tolerance, true))
return 1;// return true;
if (line1._isIntersectingPoint(line2.getStartXY(), tolerance, true))
return 1;// return true;
if (line1._isIntersectingPoint(line2.getEndXY(), tolerance, true))
return 1;// return true;
if (bExcludeExactEndpoints && (counter != 0))
return 0;// return false;
return _isIntersectingHelper(line1, line2) == false ? 0 : 1;
}
int _intersectLineLineExact(Line line1, Line line2,
Point2D[] intersectionPoints, double[] param1, double[] param2) {
int counter = 0;
if (line1.m_xStart == line2.m_xStart
&& line1.m_yStart == line2.m_yStart) {
if (param1 != null)// if (param1)
param1[counter] = 0.0;
if (param2 != null)// if (param2)
param2[counter] = 0.0;
if (intersectionPoints != null)// if (intersectionPoints)
intersectionPoints[counter] = Point2D.construct(line1.m_xStart,
line1.m_yStart);
counter++;
}
if (line1.m_xStart == line2.m_xEnd && line1.m_yStart == line2.m_yEnd) {
if (param1 != null)// if (param1)
param1[counter] = 0.0;
if (param2 != null)// if (param2)
param2[counter] = 1.0;
if (intersectionPoints != null)// if (intersectionPoints)
intersectionPoints[counter] = Point2D.construct(line1.m_xStart,
line1.m_yStart);
counter++;
}
if (line1.m_xEnd == line2.m_xStart && line1.m_yEnd == line2.m_yStart) {
if (counter == 2) {// both segments a degenerate
if (param1 != null)// if (param1)
{
param1[0] = 0.0;
param1[1] = 1.0;
}
if (param2 != null)// if (param2)
{
param2[0] = 1.0;
}
if (intersectionPoints != null)// if (intersectionPoints)
{
intersectionPoints[0] = Point2D.construct(line1.m_xEnd,
line1.m_yEnd);
intersectionPoints[1] = Point2D.construct(line1.m_xEnd,
line1.m_yEnd);
}
return counter;
}
if (param1 != null)// if (param1)
param1[counter] = 1.0;
if (param2 != null)// if (param2)
param2[counter] = 0.0;
if (intersectionPoints != null)// if (intersectionPoints)
intersectionPoints[counter] = Point2D.construct(line1.m_xEnd,
line1.m_yEnd);
counter++;
}
if (line1.m_xEnd == line2.m_xEnd && line1.m_yEnd == line2.m_yEnd) {
if (counter == 2) {// both segments are degenerate
if (param1 != null)// if (param1)
{
param1[0] = 0.0;
param1[1] = 1.0;
}
if (param2 != null)// if (param2)
{
param2[0] = 1.0;
}
if (intersectionPoints != null)// if (intersectionPoints)
{
intersectionPoints[0] = Point2D.construct(line1.m_xEnd,
line1.m_yEnd);
intersectionPoints[1] = Point2D.construct(line1.m_xEnd,
line1.m_yEnd);
}
return counter;
}
if (param1 != null)// if (param1)
param1[counter] = 1.0;
if (param2 != null)// if (param2)
param2[counter] = 1.0;
if (intersectionPoints != null)// if (intersectionPoints)
intersectionPoints[counter] = Point2D.construct(line1.m_xEnd,
line1.m_yEnd);
counter++;
}
return counter;
}
static int _intersectLineLine(Line line1, Line line2,
Point2D[] intersectionPoints, double[] param1, double[] param2,
double tolerance) {
// _ASSERT(!param1 && !param2 || param1);
int counter = 0;
// Test the end points for exact coincidence.
double t11 = line1._intersection(line2.getStartXY(), tolerance, false);
double t12 = line1._intersection(line2.getEndXY(), tolerance, false);
double t21 = line2._intersection(line1.getStartXY(), tolerance, false);
double t22 = line2._intersection(line1.getEndXY(), tolerance, false);
if (!NumberUtils.isNaN(t11)) {
if (param1 != null)// if (param1)
param1[counter] = t11;
if (param2 != null)// if (param2)
param2[counter] = 0;
if (intersectionPoints != null)// if (intersectionPoints)
intersectionPoints[counter] = Point2D.construct(line2.m_xStart,
line2.m_yStart);
counter++;
}
if (!NumberUtils.isNaN(t12)) {
if (param1 != null)// if (param1)
param1[counter] = t12;
if (param2 != null)// if (param2)
param2[counter] = 1.0;
if (intersectionPoints != null)// if (intersectionPoints)
intersectionPoints[counter] = Point2D.construct(line2.m_xEnd,
line2.m_yEnd);
counter++;
}
if (counter != 2 && !NumberUtils.isNaN(t21)) {
if (!(t11 == 0 && t21 == 0) && !(t12 == 0 && t21 == 1.0))// the "if"
// makes
// sure
// this
// has
// not
// been
// already
// calculated
{
if (param1 != null)// if (param1)
param1[counter] = 0;
if (param2 != null)// if (param2)
param2[counter] = t21;
if (intersectionPoints != null)// if (intersectionPoints)
intersectionPoints[counter] = Point2D.construct(
line1.m_xStart, line1.m_yStart);
counter++;
}
}
if (counter != 2 && !NumberUtils.isNaN(t22)) {
if (!(t11 == 1.0 && t22 == 0) && !(t12 == 1.0 && t22 == 1.0))// the
// "if"
// makes
// sure
// this
// has
// not
// been
// already
// calculated
{
if (param1 != null)// if (param1)
param1[counter] = 1.0;
if (param2 != null)// if (param2)
param2[counter] = t22;
if (intersectionPoints != null)// if (intersectionPoints)
intersectionPoints[counter] = Point2D.construct(
line2.m_xEnd, line2.m_yEnd);
counter++;
}
}
if (counter > 0) {
if (counter == 2 && param1 != null && param1[0] > param1[1]) {// make
// sure
// the
// intersection
// events
// are
// sorted
// along
// the
// line1
// can't
// swap
// doulbes
// in
// java
// NumberUtils::Swap(param1[0],
// param1[1]);
double zeroParam1 = param1[0];
param1[0] = param1[1];
param1[1] = zeroParam1;
if (param2 != null)// if (param2)
{
double zeroParam2 = param2[0];
param2[0] = param2[1];
param2[1] = zeroParam2;// NumberUtils::Swap(ARRAYELEMENT(param2,
// 0), ARRAYELEMENT(param2, 1));
}
if (intersectionPoints != null)// if (intersectionPoints)
{
Point2D tmp = new Point2D(intersectionPoints[0].x,
intersectionPoints[0].y);
intersectionPoints[0] = intersectionPoints[1];
intersectionPoints[1] = tmp;
}
}
return counter;
}
Point2D params = _intersectHelper1(line1, line2, tolerance);
if (NumberUtils.isNaN(params.x))
return 0;
if (intersectionPoints != null)// if (intersectionPoints)
{
intersectionPoints[0] = line1.getCoord2D(params.x);
}
if (param1 != null)// if (param1)
{
param1[0] = params.x;
}
if (param2 != null)// if (param2)
{
param2[0] = params.y;
}
return 1;
}
@Override
public void replaceNaNs(int semantics, double value) {
addAttribute(semantics);
if (isEmpty())
return;
int ncomps = VertexDescription.getComponentCount(semantics);
for (int i = 0; i < ncomps; i++) {
double v = _getAttributeAsDbl(0, semantics, i);
if (Double.isNaN(v))
_setAttribute(0, semantics, 0, value);
v = _getAttributeAsDbl(1, semantics, i);
if (Double.isNaN(v))
_setAttribute(1, semantics, 0, value);
}
}
@Override
int getYMonotonicParts(SegmentBuffer[] monotonicSegments) {
return 0;
}
@Override
void _copyToImpl(Segment dst) {
// TODO Auto-generated method stub
}
/**
* The output of this method can be only used for debugging. It is subject to change without notice.
*/
@Override
public String toString() {
String s = "Line: [" + m_xStart + ", " + m_yStart + ", " + m_xEnd + ", " + m_yEnd +"]";
return s;
}
}
| |
/*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.cassandra.cache;
import java.nio.ByteBuffer;
import java.util.concurrent.atomic.AtomicInteger;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Throwables;
import com.google.common.collect.Iterables;
import com.google.common.util.concurrent.MoreExecutors;
import com.github.benmanes.caffeine.cache.*;
import org.apache.cassandra.concurrent.ImmediateExecutor;
import org.apache.cassandra.config.DatabaseDescriptor;
import org.apache.cassandra.io.sstable.CorruptSSTableException;
import org.apache.cassandra.io.util.*;
import org.apache.cassandra.metrics.ChunkCacheMetrics;
import org.apache.cassandra.utils.memory.BufferPool;
import org.apache.cassandra.utils.memory.BufferPools;
public class ChunkCache
implements CacheLoader<ChunkCache.Key, ChunkCache.Buffer>, RemovalListener<ChunkCache.Key, ChunkCache.Buffer>, CacheSize
{
public static final int RESERVED_POOL_SPACE_IN_MB = 32;
public static final long cacheSize = 1024L * 1024L * Math.max(0, DatabaseDescriptor.getFileCacheSizeInMB() - RESERVED_POOL_SPACE_IN_MB);
public static final boolean roundUp = DatabaseDescriptor.getFileCacheRoundUp();
private static boolean enabled = DatabaseDescriptor.getFileCacheEnabled() && cacheSize > 0;
public static final ChunkCache instance = enabled ? new ChunkCache(BufferPools.forChunkCache()) : null;
private final BufferPool bufferPool;
private final LoadingCache<Key, Buffer> cache;
public final ChunkCacheMetrics metrics;
static class Key
{
final ChunkReader file;
final String path;
final long position;
public Key(ChunkReader file, long position)
{
super();
this.file = file;
this.position = position;
this.path = file.channel().filePath();
}
public int hashCode()
{
final int prime = 31;
int result = 1;
result = prime * result + path.hashCode();
result = prime * result + file.getClass().hashCode();
result = prime * result + Long.hashCode(position);
return result;
}
public boolean equals(Object obj)
{
if (this == obj)
return true;
if (obj == null)
return false;
Key other = (Key) obj;
return (position == other.position)
&& file.getClass() == other.file.getClass()
&& path.equals(other.path);
}
}
class Buffer implements Rebufferer.BufferHolder
{
private final ByteBuffer buffer;
private final long offset;
private final AtomicInteger references;
public Buffer(ByteBuffer buffer, long offset)
{
this.buffer = buffer;
this.offset = offset;
references = new AtomicInteger(1); // start referenced.
}
Buffer reference()
{
int refCount;
do
{
refCount = references.get();
if (refCount == 0)
// Buffer was released before we managed to reference it.
return null;
} while (!references.compareAndSet(refCount, refCount + 1));
return this;
}
@Override
public ByteBuffer buffer()
{
assert references.get() > 0;
return buffer.duplicate();
}
@Override
public long offset()
{
return offset;
}
@Override
public void release()
{
if (references.decrementAndGet() == 0)
bufferPool.put(buffer);
}
}
private ChunkCache(BufferPool pool)
{
bufferPool = pool;
metrics = new ChunkCacheMetrics(this);
cache = Caffeine.newBuilder()
.maximumWeight(cacheSize)
.executor(ImmediateExecutor.INSTANCE)
.weigher((key, buffer) -> ((Buffer) buffer).buffer.capacity())
.removalListener(this)
.recordStats(() -> metrics)
.build(this);
}
@Override
public Buffer load(Key key)
{
ByteBuffer buffer = bufferPool.get(key.file.chunkSize(), key.file.preferredBufferType());
assert buffer != null;
key.file.readChunk(key.position, buffer);
return new Buffer(buffer, key.position);
}
@Override
public void onRemoval(Key key, Buffer buffer, RemovalCause cause)
{
buffer.release();
}
public void close()
{
cache.invalidateAll();
}
private RebuffererFactory wrap(ChunkReader file)
{
return new CachingRebufferer(file);
}
public static RebuffererFactory maybeWrap(ChunkReader file)
{
if (!enabled)
return file;
return instance.wrap(file);
}
public void invalidatePosition(FileHandle dfile, long position)
{
if (!(dfile.rebuffererFactory() instanceof CachingRebufferer))
return;
((CachingRebufferer) dfile.rebuffererFactory()).invalidate(position);
}
public void invalidateFile(String fileName)
{
cache.invalidateAll(Iterables.filter(cache.asMap().keySet(), x -> x.path.equals(fileName)));
}
@VisibleForTesting
public void enable(boolean enabled)
{
ChunkCache.enabled = enabled;
cache.invalidateAll();
metrics.reset();
}
// TODO: Invalidate caches for obsoleted/MOVED_START tables?
/**
* Rebufferer providing cached chunks where data is obtained from the specified ChunkReader.
* Thread-safe. One instance per SegmentedFile, created by ChunkCache.maybeWrap if the cache is enabled.
*/
class CachingRebufferer implements Rebufferer, RebuffererFactory
{
private final ChunkReader source;
final long alignmentMask;
public CachingRebufferer(ChunkReader file)
{
source = file;
int chunkSize = file.chunkSize();
assert Integer.bitCount(chunkSize) == 1 : String.format("%d must be a power of two", chunkSize);
alignmentMask = -chunkSize;
}
@Override
public Buffer rebuffer(long position)
{
try
{
long pageAlignedPos = position & alignmentMask;
Buffer buf;
do
buf = cache.get(new Key(source, pageAlignedPos)).reference();
while (buf == null);
return buf;
}
catch (Throwable t)
{
Throwables.propagateIfInstanceOf(t.getCause(), CorruptSSTableException.class);
throw Throwables.propagate(t);
}
}
public void invalidate(long position)
{
long pageAlignedPos = position & alignmentMask;
cache.invalidate(new Key(source, pageAlignedPos));
}
@Override
public Rebufferer instantiateRebufferer()
{
return this;
}
@Override
public void close()
{
source.close();
}
@Override
public void closeReader()
{
// Instance is shared among readers. Nothing to release.
}
@Override
public ChannelProxy channel()
{
return source.channel();
}
@Override
public long fileLength()
{
return source.fileLength();
}
@Override
public double getCrcCheckChance()
{
return source.getCrcCheckChance();
}
@Override
public String toString()
{
return "CachingRebufferer:" + source;
}
}
@Override
public long capacity()
{
return cacheSize;
}
@Override
public void setCapacity(long capacity)
{
throw new UnsupportedOperationException("Chunk cache size cannot be changed.");
}
@Override
public int size()
{
return cache.asMap().size();
}
@Override
public long weightedSize()
{
return cache.policy().eviction()
.map(policy -> policy.weightedSize().orElseGet(cache::estimatedSize))
.orElseGet(cache::estimatedSize);
}
}
| |
/*
* DBeaver - Universal Database Manager
* Copyright (C) 2010-2021 DBeaver Corp and others
* Copyright (C) 2011-2012 Eugene Fradkin (eugene.fradkin@gmail.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jkiss.dbeaver.ui.editors.data.preferences;
import org.eclipse.jface.dialogs.IDialogConstants;
import org.eclipse.osgi.util.NLS;
import org.eclipse.swt.SWT;
import org.eclipse.swt.events.SelectionAdapter;
import org.eclipse.swt.events.SelectionEvent;
import org.eclipse.swt.graphics.Font;
import org.eclipse.swt.layout.GridData;
import org.eclipse.swt.layout.GridLayout;
import org.eclipse.swt.widgets.*;
import org.jkiss.code.Nullable;
import org.jkiss.dbeaver.Log;
import org.jkiss.dbeaver.ModelPreferences;
import org.jkiss.dbeaver.model.DBPDataSourceContainer;
import org.jkiss.dbeaver.model.data.DBDDataFormatter;
import org.jkiss.dbeaver.model.data.DBDDataFormatterProfile;
import org.jkiss.dbeaver.model.preferences.DBPPreferenceStore;
import org.jkiss.dbeaver.model.preferences.DBPPropertyDescriptor;
import org.jkiss.dbeaver.model.runtime.DBRProgressMonitor;
import org.jkiss.dbeaver.registry.formatter.DataFormatterDescriptor;
import org.jkiss.dbeaver.registry.formatter.DataFormatterRegistry;
import org.jkiss.dbeaver.runtime.DBWorkbench;
import org.jkiss.dbeaver.runtime.properties.PropertySourceCustom;
import org.jkiss.dbeaver.ui.ShellUtils;
import org.jkiss.dbeaver.ui.UIUtils;
import org.jkiss.dbeaver.ui.controls.LocaleSelectorControl;
import org.jkiss.dbeaver.ui.controls.resultset.internal.ResultSetMessages;
import org.jkiss.dbeaver.ui.dialogs.EnterNameDialog;
import org.jkiss.dbeaver.ui.preferences.PreferenceStoreDelegate;
import org.jkiss.dbeaver.ui.preferences.TargetPrefPage;
import org.jkiss.dbeaver.ui.properties.PropertyTreeViewer;
import org.jkiss.dbeaver.utils.HelpUtils;
import org.jkiss.utils.CommonUtils;
import java.util.List;
import java.util.*;
/**
* PrefPageDataFormat
*/
public class PrefPageDataFormat extends TargetPrefPage
{
private static final Log log = Log.getLog(PrefPageDataFormat.class);
public static final String PAGE_ID = "org.jkiss.dbeaver.preferences.main.dataformat"; //$NON-NLS-1$
final private String HELP_DATA_FORMAT_LINK = "Managing-Data-Formats";
private DBDDataFormatterProfile formatterProfile;
private Font boldFont;
private Combo typeCombo;
private PropertyTreeViewer propertiesControl;
private Text sampleText;
private List<DataFormatterDescriptor> formatterDescriptors;
private LocaleSelectorControl localeSelector;
private String profileName;
private Locale profileLocale;
private Map<String, Map<String, Object>> profileProperties = new HashMap<>();
private Combo profilesCombo;
private PropertySourceCustom propertySource;
private Button datetimeNativeFormatCheck;
private Button numericNativeFormatCheck;
private Button numericScientificFormatCheck;
public PrefPageDataFormat()
{
super();
setPreferenceStore(new PreferenceStoreDelegate(DBWorkbench.getPlatform().getPreferenceStore()));
}
@Override
protected boolean hasDataSourceSpecificOptions(DBPDataSourceContainer dataSourceDescriptor)
{
DBPPreferenceStore store = dataSourceDescriptor.getPreferenceStore();
return
store.contains(ModelPreferences.RESULT_NATIVE_DATETIME_FORMAT) ||
store.contains(ModelPreferences.RESULT_NATIVE_NUMERIC_FORMAT) ||
store.contains(ModelPreferences.RESULT_SCIENTIFIC_NUMERIC_FORMAT) ||
dataSourceDescriptor.getDataFormatterProfile().isOverridesParent();
}
@Override
protected boolean supportsDataSourceSpecificOptions()
{
return true;
}
@Override
protected void createPreferenceHeader(Composite composite)
{
if (!isDataSourcePreferencePage()) {
Composite profileGroup = UIUtils.createComposite(composite, 3);
profileGroup.setLayoutData(new GridData(GridData.FILL_HORIZONTAL));
UIUtils.createControlLabel(profileGroup, ResultSetMessages.pref_page_data_format_label_profile);
profilesCombo = new Combo(profileGroup, SWT.DROP_DOWN | SWT.READ_ONLY);
profilesCombo.setLayoutData(new GridData(GridData.FILL_HORIZONTAL));
profilesCombo.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e)
{
changeProfile();
}
});
UIUtils.createDialogButton(
profileGroup,
ResultSetMessages.pref_page_data_format_button_manage_profiles, new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e)
{
manageProfiles();
}
});
}
}
@Override
protected Control createPreferenceContent(Composite parent)
{
boldFont = UIUtils.makeBoldFont(parent.getFont());
Composite composite = UIUtils.createComposite(parent, 2);
// Locale
{
localeSelector = new LocaleSelectorControl(composite, null);
localeSelector.addListener(SWT.Selection, event -> {
if (event.data instanceof Locale) {
onLocaleChange((Locale) event.data);
}
});
}
// Settings
{
Group settingsGroup = new Group(composite, SWT.NONE);
settingsGroup.setText(ResultSetMessages.pref_page_data_format_group_settings);
settingsGroup.setLayout(new GridLayout(2, false));
settingsGroup.setLayoutData(new GridData(GridData.VERTICAL_ALIGN_BEGINNING | GridData.FILL_HORIZONTAL));
datetimeNativeFormatCheck = UIUtils.createCheckbox(settingsGroup, ResultSetMessages.pref_page_data_format_datetime_use_native_formatting, ResultSetMessages.pref_page_data_format_datetime_use_native_formatting_tip, false, 2);
numericNativeFormatCheck = UIUtils.createCheckbox(settingsGroup, ResultSetMessages.pref_page_data_format_numeric_use_native_formatting, ResultSetMessages.pref_page_data_format_numeric_use_native_formatting_tip, false, 2);
numericScientificFormatCheck = UIUtils.createCheckbox(settingsGroup, ResultSetMessages.pref_page_data_format_numeric_use_scientific_notation, ResultSetMessages.pref_page_data_format_numeric_use_scientific_notation_tip, false, 2);
numericNativeFormatCheck.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
numericScientificFormatCheck.setEnabled(numericNativeFormatCheck.getSelection());
}
});
}
// formats
{
Group formatGroup = new Group(composite, SWT.NONE);
formatGroup.setText(ResultSetMessages.pref_page_data_format_group_format);
formatGroup.setLayout(new GridLayout(2, false));
GridData gd = new GridData(GridData.VERTICAL_ALIGN_BEGINNING | GridData.FILL_HORIZONTAL);
gd.horizontalSpan = 2;
formatGroup.setLayoutData(gd);
UIUtils.createControlLabel(formatGroup, ResultSetMessages.pref_page_data_format_label_type);
typeCombo = new Combo(formatGroup, SWT.DROP_DOWN | SWT.READ_ONLY);
typeCombo.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e)
{
reloadFormatter();
}
});
Label propsLabel = UIUtils.createControlLabel(formatGroup, ResultSetMessages.pref_page_data_format_label_settingt);
propsLabel.setLayoutData(new GridData(GridData.VERTICAL_ALIGN_BEGINNING));
propertiesControl = new PropertyTreeViewer(formatGroup, SWT.BORDER);
UIUtils.createControlLabel(formatGroup, ResultSetMessages.pref_page_data_format_label_sample);
sampleText = new Text(formatGroup, SWT.BORDER | SWT.READ_ONLY);
sampleText.setLayoutData(new GridData(GridData.FILL_HORIZONTAL));
Link urlHelpLabel = UIUtils.createLink(formatGroup, "<a href=\"" + HelpUtils.getHelpExternalReference(HELP_DATA_FORMAT_LINK) + "\">"
+ ResultSetMessages.pref_page_data_format_link_patterns + "</a>", new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
ShellUtils.launchProgram(HelpUtils.getHelpExternalReference(HELP_DATA_FORMAT_LINK));
}
});
urlHelpLabel.setLayoutData(new GridData(GridData.FILL, GridData.VERTICAL_ALIGN_BEGINNING, false, false, 2, 1));
}
return composite;
}
private void manageProfiles()
{
DataFormatProfilesEditDialog dialog = new DataFormatProfilesEditDialog(getShell());
dialog.open();
refreshProfileList();
}
private DBDDataFormatterProfile getDefaultProfile()
{
if (isDataSourcePreferencePage()) {
return getDataSourceContainer().getDataFormatterProfile();
} else {
return DataFormatterRegistry.getInstance().getGlobalProfile();
}
}
private void changeProfile()
{
int selectionIndex = profilesCombo.getSelectionIndex();
if (selectionIndex < 0) {
return;
}
DBDDataFormatterProfile newProfile;
if (selectionIndex == 0) {
newProfile = getDefaultProfile();
} else {
String newProfileName = profilesCombo.getItem(selectionIndex);
newProfile = DataFormatterRegistry.getInstance().getCustomProfile(newProfileName);
}
if (newProfile != formatterProfile) {
setCurrentProfile(newProfile);
}
}
private void setCurrentProfile(DBDDataFormatterProfile profile)
{
if (formatterProfile == profile) {
return;
}
formatterProfile = profile;
formatterDescriptors = new ArrayList<>(DataFormatterRegistry.getInstance().getDataFormatters());
profileName = formatterProfile.getProfileName();
profileLocale = formatterProfile.getLocale();
profileProperties.clear();
DBPPreferenceStore store = getTargetPreferenceStore();
for (DataFormatterDescriptor dfd : formatterDescriptors) {
Map<String, Object> formatterProps = formatterProfile.getFormatterProperties(store, dfd.getId());
if (formatterProps != null) {
profileProperties.put(dfd.getId(), formatterProps);
}
}
try {
// Set locale
localeSelector.setLocale(profileLocale);
// Load types
typeCombo.removeAll();
for (DataFormatterDescriptor formatter : formatterDescriptors) {
typeCombo.add(formatter.getName());
}
if (typeCombo.getItemCount() > 0) {
typeCombo.select(0);
}
reloadFormatter();
} catch (Exception e) {
log.warn(e);
}
}
private void refreshProfileList()
{
if (isDataSourcePreferencePage()) {
return;
}
int selectionIndex = profilesCombo.getSelectionIndex();
String oldProfile = null;
if (selectionIndex > 0) {
oldProfile = profilesCombo.getItem(selectionIndex);
}
profilesCombo.removeAll();
profilesCombo.add("<" + DataFormatterRegistry.getInstance().getGlobalProfile().getProfileName() + ">"); //$NON-NLS-1$ //$NON-NLS-2$
for (DBDDataFormatterProfile profile : DataFormatterRegistry.getInstance().getCustomProfiles()) {
profilesCombo.add(profile.getProfileName());
}
if (oldProfile != null) {
profilesCombo.setText(oldProfile);
}
if (profilesCombo.getSelectionIndex() < 0) {
profilesCombo.select(0);
}
profilesCombo.setEnabled(profilesCombo.getItemCount() >= 2);
changeProfile();
}
private DataFormatterDescriptor getCurrentFormatter()
{
int selectionIndex = typeCombo.getSelectionIndex();
if (selectionIndex < 0) {
return null;
}
return formatterDescriptors.get(selectionIndex);
}
private void reloadFormatter()
{
DataFormatterDescriptor formatterDescriptor = getCurrentFormatter();
if (formatterDescriptor == null) {
return;
}
Map<String,Object> formatterProps = profileProperties.get(formatterDescriptor.getId());
Map<String, Object> defaultProps = formatterDescriptor.getSample().getDefaultProperties(localeSelector.getSelectedLocale());
propertySource = new VerifyingPropertySourceCustom(
formatterDescriptor.getProperties(),
formatterProps);
propertySource.setDefaultValues(defaultProps);
propertiesControl.loadProperties(propertySource);
reloadSample();
}
private void reloadSampleThrowable() throws Exception {
DataFormatterDescriptor formatterDescriptor = getCurrentFormatter();
if (formatterDescriptor == null) {
return;
}
DBDDataFormatter formatter = formatterDescriptor.createFormatter();
Map<String, Object> defProps = formatterDescriptor.getSample().getDefaultProperties(profileLocale);
Map<String, Object> props = profileProperties.get(formatterDescriptor.getId());
Map<String, Object> formatterProps = new HashMap<>();
if (defProps != null && !defProps.isEmpty()) {
formatterProps.putAll(defProps);
}
if (props != null && !props.isEmpty()) {
formatterProps.putAll(props);
}
formatter.init(null, profileLocale, formatterProps);
String sampleValue = formatter.formatValue(formatterDescriptor.getSample().getSampleValue());
sampleText.setText(CommonUtils.notEmpty(sampleValue));
}
private void reloadSample() {
try {
reloadSampleThrowable();
} catch (Exception e) {
DBWorkbench.getPlatformUI().showError("Data formats", "Can't apply formatter values to the sample", e);
log.warn("Can't render sample value", e); //$NON-NLS-1$
}
}
private void saveFormatterProperties() throws Exception {
DataFormatterDescriptor formatterDescriptor = getCurrentFormatter();
if (formatterDescriptor == null) {
return;
}
Map<String, Object> props = propertySource.getPropertyValues();
profileProperties.put(formatterDescriptor.getId(), props);
reloadSampleThrowable();
}
private void onLocaleChange(Locale locale)
{
if (!locale.equals(profileLocale)) {
profileLocale = locale;
DataFormatterDescriptor formatter = getCurrentFormatter();
if (formatter != null) {
propertySource.setDefaultValues(formatter.getSample().getDefaultProperties(locale));
propertiesControl.refresh();
}
reloadSample();
}
}
@Override
protected void performDefaults() {
DBPPreferenceStore targetPreferenceStore = getTargetPreferenceStore();
clearPreferences(targetPreferenceStore);
formatterProfile = null;
loadPreferences(targetPreferenceStore);
reloadSample();
super.performDefaults();
}
@Override
protected void loadPreferences(DBPPreferenceStore store)
{
refreshProfileList();
setCurrentProfile(getDefaultProfile());
datetimeNativeFormatCheck.setSelection(store.getBoolean(ModelPreferences.RESULT_NATIVE_DATETIME_FORMAT));
numericNativeFormatCheck.setSelection(store.getBoolean(ModelPreferences.RESULT_NATIVE_NUMERIC_FORMAT));
numericScientificFormatCheck.setSelection(store.getBoolean(ModelPreferences.RESULT_SCIENTIFIC_NUMERIC_FORMAT));
numericScientificFormatCheck.setEnabled(numericNativeFormatCheck.getSelection());
}
@Override
protected void savePreferences(DBPPreferenceStore store) {
propertiesControl.saveEditorValues();
try {
formatterProfile.setProfileName(profileName);
formatterProfile.setLocale(profileLocale);
for (String typeId : profileProperties.keySet()) {
formatterProfile.setFormatterProperties(store, typeId, profileProperties.get(typeId));
}
formatterProfile.saveProfile(store);
store.setValue(ModelPreferences.RESULT_NATIVE_DATETIME_FORMAT, datetimeNativeFormatCheck.getSelection());
store.setValue(ModelPreferences.RESULT_NATIVE_NUMERIC_FORMAT, numericNativeFormatCheck.getSelection());
store.setValue(ModelPreferences.RESULT_SCIENTIFIC_NUMERIC_FORMAT, numericScientificFormatCheck.getSelection());
} catch (Exception e) {
log.warn(e);
}
}
@Override
protected void clearPreferences(DBPPreferenceStore store)
{
if (formatterProfile != null) {
formatterProfile.reset(store);
}
store.setToDefault(ModelPreferences.RESULT_NATIVE_DATETIME_FORMAT);
store.setToDefault(ModelPreferences.RESULT_NATIVE_NUMERIC_FORMAT);
store.setToDefault(ModelPreferences.RESULT_SCIENTIFIC_NUMERIC_FORMAT);
}
@Override
protected String getPropertyPageID()
{
return PAGE_ID;
}
@Override
public void applyData(Object data)
{
super.applyData(data);
if (data instanceof DBDDataFormatterProfile) {
UIUtils.setComboSelection(profilesCombo, ((DBDDataFormatterProfile)data).getProfileName());
changeProfile();
}
}
@Override
public void dispose()
{
boldFont.dispose();
super.dispose();
}
/**
* DataFormatProfilesEditDialog
*/
public static class DataFormatProfilesEditDialog extends org.eclipse.jface.dialogs.Dialog {
private static final int NEW_ID = IDialogConstants.CLIENT_ID + 1;
private static final int DELETE_ID = IDialogConstants.CLIENT_ID + 2;
private org.eclipse.swt.widgets.List profileList;
DataFormatProfilesEditDialog(Shell parentShell)
{
super(parentShell);
}
@Override
protected boolean isResizable()
{
return true;
}
@Override
protected Control createDialogArea(Composite parent)
{
getShell().setText(ResultSetMessages.dialog_data_format_profiles_title);
Composite group = new Composite(parent, SWT.NONE);
group.setLayout(new GridLayout(1, false));
group.setLayoutData(new GridData(GridData.FILL_BOTH));
profileList = new org.eclipse.swt.widgets.List(group, SWT.SINGLE | SWT.BORDER);
GridData gd = new GridData(GridData.FILL_BOTH);
gd.widthHint = 300;
gd.heightHint = 200;
profileList.setLayoutData(gd);
profileList.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e)
{
getButton(DELETE_ID).setEnabled(profileList.getSelectionIndex() >= 0);
}
});
loadProfiles();
return parent;
}
@Override
protected void createButtonsForButtonBar(Composite parent)
{
createButton(parent, NEW_ID, ResultSetMessages.dialog_data_format_profiles_button_new_profile, false);
createButton(parent, DELETE_ID, ResultSetMessages.dialog_data_format_profiles_button_delete_profile, false);
createButton(parent, IDialogConstants.OK_ID, IDialogConstants.CLOSE_LABEL, true);
getButton(DELETE_ID).setEnabled(false);
}
@Override
protected void buttonPressed(int buttonId)
{
DataFormatterRegistry registry = DataFormatterRegistry.getInstance();
if (buttonId == NEW_ID) {
String profileName = EnterNameDialog.chooseName(getShell(), ResultSetMessages.dialog_data_format_profiles_dialog_name_chooser_title);
if (CommonUtils.isEmpty(profileName)) {
return;
}
if (registry.getCustomProfile(profileName) != null) {
UIUtils.showMessageBox(
getShell(),
ResultSetMessages.dialog_data_format_profiles_error_title,
NLS.bind(ResultSetMessages.dialog_data_format_profiles_error_message, profileName), SWT.ICON_ERROR);
} else {
registry.createCustomProfile(profileName);
loadProfiles();
}
} else if (buttonId == DELETE_ID) {
int selectionIndex = profileList.getSelectionIndex();
if (selectionIndex >= 0) {
DBDDataFormatterProfile profile = registry.getCustomProfile(profileList.getItem(selectionIndex));
if (profile != null) {
if (UIUtils.confirmAction(
getShell(),
ResultSetMessages.dialog_data_format_profiles_confirm_delete_title,
ResultSetMessages.dialog_data_format_profiles_confirm_delete_message)) {
registry.deleteCustomProfile(profile);
loadProfiles();
}
}
}
} else {
super.buttonPressed(buttonId);
}
}
private void loadProfiles()
{
profileList.removeAll();
List<DBDDataFormatterProfile> profiles = DataFormatterRegistry.getInstance().getCustomProfiles();
for (DBDDataFormatterProfile profile : profiles) {
profileList.add(profile.getProfileName());
}
Button deleteButton = getButton(DELETE_ID);
if (deleteButton != null) {
deleteButton.setEnabled(false);
}
}
}
/**
* Attempts to apply formatter setting and rollbacks it to the previous value if error occurs
*/
private class VerifyingPropertySourceCustom extends PropertySourceCustom {
public VerifyingPropertySourceCustom(DBPPropertyDescriptor[] properties, Map<String, ?> values) {
super(properties, values);
}
@Override
public void setPropertyValue(@Nullable DBRProgressMonitor monitor, String id, Object value) {
final Object previousValue = getPropertyValue(monitor, id);
super.setPropertyValue(monitor, id, value);
try {
saveFormatterProperties();
} catch (Exception e) {
super.setPropertyValue(monitor, id, previousValue);
throw new IllegalArgumentException(e);
}
}
@Override
public void resetPropertyValueToDefault(String id) {
super.resetPropertyValueToDefault(id);
try {
saveFormatterProperties();
} catch (Exception e) {
throw new IllegalArgumentException(e);
}
}
}
}
| |
/*
* Copyright (C) 2011 The Guava Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.common.util.concurrent;
import static com.google.common.collect.Iterables.concat;
import com.google.common.base.Functions;
import com.google.common.base.Supplier;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.Ordering;
import com.google.common.collect.Sets;
import com.google.common.testing.GcFinalization;
import com.google.common.testing.NullPointerTester;
import java.lang.ref.WeakReference;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.Semaphore;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReadWriteLock;
import java.util.concurrent.locks.ReentrantLock;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import junit.framework.TestCase;
/**
* Tests for Striped.
*
* @author Dimitris Andreou
*/
public class StripedTest extends TestCase {
private static List<Striped<?>> strongImplementations() {
return ImmutableList.of(
Striped.readWriteLock(100),
Striped.readWriteLock(256),
Striped.lock(100),
Striped.lock(256),
Striped.semaphore(100, 1),
Striped.semaphore(256, 1));
}
private static final Supplier<ReadWriteLock> READ_WRITE_LOCK_SUPPLIER =
new Supplier<ReadWriteLock>() {
@Override
public ReadWriteLock get() {
return new ReentrantReadWriteLock();
}
};
private static final Supplier<Lock> LOCK_SUPPLER =
new Supplier<Lock>() {
@Override
public Lock get() {
return new ReentrantLock();
}
};
private static final Supplier<Semaphore> SEMAPHORE_SUPPLER =
new Supplier<Semaphore>() {
@Override
public Semaphore get() {
return new Semaphore(1, false);
}
};
private static List<Striped<?>> weakImplementations() {
return ImmutableList.<Striped<?>>builder()
.add(new Striped.SmallLazyStriped<ReadWriteLock>(50, READ_WRITE_LOCK_SUPPLIER))
.add(new Striped.SmallLazyStriped<ReadWriteLock>(64, READ_WRITE_LOCK_SUPPLIER))
.add(new Striped.LargeLazyStriped<ReadWriteLock>(50, READ_WRITE_LOCK_SUPPLIER))
.add(new Striped.LargeLazyStriped<ReadWriteLock>(64, READ_WRITE_LOCK_SUPPLIER))
.add(new Striped.SmallLazyStriped<Lock>(50, LOCK_SUPPLER))
.add(new Striped.SmallLazyStriped<Lock>(64, LOCK_SUPPLER))
.add(new Striped.LargeLazyStriped<Lock>(50, LOCK_SUPPLER))
.add(new Striped.LargeLazyStriped<Lock>(64, LOCK_SUPPLER))
.add(new Striped.SmallLazyStriped<Semaphore>(50, SEMAPHORE_SUPPLER))
.add(new Striped.SmallLazyStriped<Semaphore>(64, SEMAPHORE_SUPPLER))
.add(new Striped.LargeLazyStriped<Semaphore>(50, SEMAPHORE_SUPPLER))
.add(new Striped.LargeLazyStriped<Semaphore>(64, SEMAPHORE_SUPPLER))
.build();
}
private static Iterable<Striped<?>> allImplementations() {
return concat(strongImplementations(), weakImplementations());
}
public void testNull() throws Exception {
for (Striped<?> striped : allImplementations()) {
new NullPointerTester().testAllPublicInstanceMethods(striped);
}
}
public void testSizes() {
// not bothering testing all variations, since we know they share implementations
assertTrue(Striped.lock(100).size() >= 100);
assertTrue(Striped.lock(256).size() == 256);
assertTrue(Striped.lazyWeakLock(100).size() >= 100);
assertTrue(Striped.lazyWeakLock(256).size() == 256);
}
public void testWeakImplementations() {
for (Striped<?> striped : weakImplementations()) {
WeakReference<Object> weakRef = new WeakReference<>(striped.get(new Object()));
GcFinalization.awaitClear(weakRef);
}
}
public void testWeakReadWrite() {
Striped<ReadWriteLock> striped = Striped.lazyWeakReadWriteLock(1000);
Object key = new Object();
Lock readLock = striped.get(key).readLock();
WeakReference<Object> garbage = new WeakReference<>(new Object());
GcFinalization.awaitClear(garbage);
Lock writeLock = striped.get(key).writeLock();
readLock.lock();
assertFalse(writeLock.tryLock());
readLock.unlock();
}
public void testStrongImplementations() {
for (Striped<?> striped : strongImplementations()) {
WeakReference<Object> weakRef = new WeakReference<>(striped.get(new Object()));
WeakReference<Object> garbage = new WeakReference<>(new Object());
GcFinalization.awaitClear(garbage);
assertNotNull(weakRef.get());
}
}
public void testMaximalWeakStripedLock() {
Striped<Lock> stripedLock = Striped.lazyWeakLock(Integer.MAX_VALUE);
for (int i = 0; i < 10000; i++) {
stripedLock.get(new Object()).lock();
// nothing special (e.g. an exception) happens
}
}
public void testBulkGetReturnsSorted() {
for (Striped<?> striped : allImplementations()) {
Map<Object, Integer> indexByLock = Maps.newHashMap();
for (int i = 0; i < striped.size(); i++) {
indexByLock.put(striped.getAt(i), i);
}
// ensure that bulkGet returns locks in monotonically increasing order
for (int objectsNum = 1; objectsNum <= striped.size() * 2; objectsNum++) {
Set<Object> objects = Sets.newHashSetWithExpectedSize(objectsNum);
for (int i = 0; i < objectsNum; i++) {
objects.add(new Object());
}
Iterable<?> locks = striped.bulkGet(objects);
assertTrue(Ordering.natural().onResultOf(Functions.forMap(indexByLock)).isOrdered(locks));
// check idempotency
Iterable<?> locks2 = striped.bulkGet(objects);
assertEquals(Lists.newArrayList(locks), Lists.newArrayList(locks2));
}
}
}
/** Checks idempotency, and that we observe the promised number of stripes. */
public void testBasicInvariants() {
for (Striped<?> striped : allImplementations()) {
assertBasicInvariants(striped);
}
}
private static void assertBasicInvariants(Striped<?> striped) {
Set<Object> observed = Sets.newIdentityHashSet(); // for the sake of weakly referenced locks.
// this gets the stripes with #getAt(index)
for (int i = 0; i < striped.size(); i++) {
Object object = striped.getAt(i);
assertNotNull(object);
assertSame(object, striped.getAt(i)); // idempotent
observed.add(object);
}
assertTrue("All stripes observed", observed.size() == striped.size());
// this uses #get(key), makes sure an already observed stripe is returned
for (int i = 0; i < striped.size() * 100; i++) {
assertTrue(observed.contains(striped.get(new Object())));
}
try {
striped.getAt(-1);
fail();
} catch (RuntimeException expected) {
}
try {
striped.getAt(striped.size());
fail();
} catch (RuntimeException expected) {
}
}
public void testMaxSize() {
for (Striped<?> striped :
ImmutableList.of(
Striped.lazyWeakLock(Integer.MAX_VALUE),
Striped.lazyWeakSemaphore(Integer.MAX_VALUE, Integer.MAX_VALUE),
Striped.lazyWeakReadWriteLock(Integer.MAX_VALUE))) {
for (int i = 0; i < 3; i++) {
// doesn't throw exception
Object unused = striped.getAt(Integer.MAX_VALUE - i);
}
}
}
}
| |
/*******************************************************************************
* Copyright 2015 EMBL - European Bioinformatics Institute
*
* Licensed under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the
* License.
*******************************************************************************/
package org.mousephenotype.cda.solr.service.dto;
import org.mousephenotype.cda.enumerations.ObservationType;
import java.util.ArrayList;
import java.util.List;
/**
* @since 2015/07/28
* @author tudose
*
*/
public class ParameterDTO extends ImpressBaseDTO{
boolean increment;
boolean metadata;
boolean options;
boolean derived;
boolean required;
boolean media;
boolean annotate;
private String dataType;
private String parameterType;
private String maId;
private String maName;
private String unitY;
private String unitX;
List<String> abnormalMpId;
List<String> increasedMpId;
List<String> decreasedMpId;
ObservationType observationType;
List<String> procedureStableIds;
List<String> procedureNames;
List<String> categories = new ArrayList<>();
List<String> mpIds = new ArrayList<>();
private String emapId;
private String emapName;
public Integer getParameterStableKey() {
return parameterStableKey;
}
public void setParameterStableKey(Integer parameterStableKey) {
this.parameterStableKey = parameterStableKey;
}
private Integer parameterStableKey;
public String getUnitY() {
return unitY;
}
public void setUnitY(String unitY) {
this.unitY = unitY;
}
public String getUnitX() {
return unitX;
}
public void setUnitX(String unitX) {
this.unitX = unitX;
}
public String getEmapId() {
return emapId;
}
public void setEmapId(String emapId) {
this.emapId = emapId;
}
public String getEmapName() {
return emapName;
}
public void setEmapName(String emapName) {
this.emapName = emapName;
}
public List<String> getProcedureNames() {
return procedureNames;
}
public void setProcedureNames(List<String> procedureNames) {
this.procedureNames = procedureNames;
}
public void addProcedureNames(String procedureName) {
if (this.procedureNames == null){
this.procedureNames = new ArrayList<>();
}
this.procedureNames.add(procedureName);
}
public boolean isAnnotate() {
return annotate;
}
public void setAnnotate(boolean annotate) {
this.annotate = annotate;
}
public boolean isIncrement() {
return increment;
}
public void setIncrement(boolean increment) {
this.increment = increment;
}
public boolean isMetadata() {
return metadata;
}
public void setMetadata(boolean metadata) {
this.metadata = metadata;
}
public boolean isOptions() {
return options;
}
public void setOptions(boolean options) {
this.options = options;
}
public boolean isDerived() {
return derived;
}
public void setDerived(boolean derived) {
this.derived = derived;
}
public boolean isRequired() {
return required;
}
public void setRequired(boolean required) {
this.required = required;
}
public boolean isMedia() {
return media;
}
public void setMedia(boolean media) {
this.media = media;
}
public String getDataType() {
return dataType;
}
public void setDataType(String dataType) {
this.dataType = dataType;
}
public String getParameterType() {
return parameterType;
}
public void setParameterType(String parameterType) {
this.parameterType = parameterType;
}
public String getMaId() {
return maId;
}
public void setMaId(String maId) {
this.maId = maId;
}
public String getMaName() {
return maName;
}
public void setMaName(String maName) {
this.maName = maName;
}
public ObservationType getObservationType() {
return observationType;
}
public void setObservationType(ObservationType observationType) {
this.observationType = observationType;
}
public List<String> getProcedureStableIds() {
return procedureStableIds;
}
public void setProcedureStableIds(List<String> procedureStableIds) {
this.procedureStableIds = procedureStableIds;
}
public void addProcedureStableId(String procedure) {
if (this.procedureStableIds == null){
this.procedureStableIds = new ArrayList<>();
}
this.procedureStableIds.add(procedure);
}
public List<String> getCategories() {
return categories;
}
public void setCategories(List<String> categories) {
this.categories = categories;
}
public void addCategories(String category) {
if (this.categories == null){
this.categories = new ArrayList<>();
}
this.categories.add(category);
}
public List<String> getMpIds() {
return mpIds;
}
public void setMpIds(List<String> mpIds) {
this.mpIds = mpIds;
}
public void addMpIds(String mpId) {
if (this.mpIds == null){
this.mpIds = new ArrayList<>();
}
this.mpIds.add(mpId);
}
public List<String> getAbnormalMpId() {
return abnormalMpId;
}
public void setAbnormalMpId(List<String> abnormalMpId) {
this.abnormalMpId = abnormalMpId;
}
public void addAbnormalMpId (String mpId){
if (this.abnormalMpId == null){
this.abnormalMpId = new ArrayList<>();
}
this.abnormalMpId.add(mpId);
}
public List<String> getIncreasedMpId() {
return increasedMpId;
}
public void setIncreasedMpId(List<String> increasedMpId) {
this.increasedMpId = increasedMpId;
}
public void addIncreasedMpId (String mpId){
if (this.increasedMpId == null){
this.increasedMpId = new ArrayList<>();
}
this.increasedMpId.add(mpId);
}
public List<String> getDecreasedMpId() {
return decreasedMpId;
}
public void setDecreasedMpId(List<String> decreasedMpId) {
this.decreasedMpId = decreasedMpId;
}
public void addDecreasedMpId (String mpId){
if (this.decreasedMpId == null){
this.decreasedMpId = new ArrayList<>();
}
this.decreasedMpId.add(mpId);
}
@Override
public String toString() {
return "ParameterDTO{" +
"increment=" + increment +
", metadata=" + metadata +
", options=" + options +
", derived=" + derived +
", required=" + required +
", media=" + media +
", annotate=" + annotate +
", dataType='" + dataType + '\'' +
", parameterType='" + parameterType + '\'' +
", maId='" + maId + '\'' +
", maName='" + maName + '\'' +
", unitY='" + unitY + '\'' +
", unitX='" + unitX + '\'' +
", abnormalMpId=" + abnormalMpId +
", increasedMpId=" + increasedMpId +
", decreasedMpId=" + decreasedMpId +
", observationType=" + observationType +
", procedureStableIds=" + procedureStableIds +
", procedureNames=" + procedureNames +
", categories=" + categories +
", mpIds=" + mpIds +
", emapId='" + emapId + '\'' +
", emapName='" + emapName + '\'' +
'}';
}
public Long getProcedureStableKey() {
return super.getStableKey();
}
}
| |
package popularioty.commons.services.searchengine.factory;
import java.io.IOException;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import org.elasticsearch.action.search.SearchPhaseExecutionException;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.client.Client;
import org.elasticsearch.client.transport.TransportClient;
import org.elasticsearch.common.settings.ImmutableSettings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.transport.InetSocketTransportAddress;
import org.elasticsearch.index.query.FilterBuilders;
import org.elasticsearch.node.Node;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.aggregations.AggregationBuilders;
import org.elasticsearch.search.aggregations.bucket.filter.Filter;
import org.elasticsearch.search.aggregations.bucket.filter.FilterAggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.terms.Terms;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import popularioty.commons.exception.PopulariotyException;
import popularioty.commons.exception.PopulariotyException.Level;
import popularioty.commons.services.searchengine.elasticsearch.ElasticSearchAdapter;
import popularioty.commons.services.searchengine.queries.Query;
import popularioty.commons.services.searchengine.queries.QueryResponse;
import popularioty.commons.services.searchengine.queries.QueryResponse.QueryResponseType;
import popularioty.commons.services.searchengine.queries.QueryType;
import popularioty.commons.services.storageengine.factory.StorageProvider;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
/**
* Although implementing the locking mechanism for each class extending this SearchNode is not the most elegant, it was preffered
* because it preserves encaplsulation.
* @author dp
*
*/
public class ElasticSearchNode extends ElasticSearchAdapter implements SearchProvider {
private static Logger LOG = LoggerFactory.getLogger(ElasticSearchNode.class);
private String prop_transport_host;
private Node node;
private Client client;
private boolean closed = false;
/**
* Gets all the documents from a given list of Ids
* @param res QueryResponse
* @param index index to be used to retrieve the data
* @return List of Maps containing all the documents
* @throws PopulariotyException
*/
private QueryResponse retrieveFullData(QueryResponse res,String index) throws PopulariotyException
{
if(store != null && res.getQueryResponsetype().equals(QueryResponseType.LIST_OF_STRINGS))
{
List<Map<String,Object>> docs = new LinkedList<Map<String,Object>>();
docs = store.getData(res.getListofStringsResult(), index, false);
res.setQueryResponsetype(QueryResponseType.LIST_OF_MAPS);
res.setListofMapsResult(docs);
}
return res;
}
private List<String> parseHosts(String hosts) throws PopulariotyException
{
ObjectMapper mapper = new ObjectMapper();
JsonNode data;
try {
data = mapper.readTree(hosts);
List <String>hostsList = mapper.convertValue(data, List.class);
return hostsList;
} catch (JsonProcessingException e) {
throw new PopulariotyException("Configuration error. Contact the Administrator",null,LOG,"JsonProcessing (Jackson) Exception while parsing array (JSON) from properties while building CouchBaseStorage in popularioty-commons"+e.getMessage() ,Level.ERROR,500);
} catch (IOException e) {
throw new PopulariotyException("Configuration error. Contact the Administrator",null,LOG,"IO Exception while parsing array (JSON) from properties while building CouchBaseStorage in popularioty-commons"+e.getMessage() ,Level.ERROR,500);
}
}
/**
* Receives a string contining a JSON array of urls of host:port pointing to ES nodes in the cluster under the property client.transport.host
* Also the StorageProvider used to ge the data by ID must be provided with the key storage.provider.object
*/
@Override
public void init(Map<String, Object> configuration) throws Exception {
if(configuration.containsKey("storage.provider.object"))
store = (StorageProvider) configuration.get("storage.provider.object");
//{
this.prop_transport_host= (String) configuration.get("client.transport.host");
//}//readProperties(properties);
List<String> hosts = parseHosts(prop_transport_host);
if(configuration.containsKey("client.transport.clustername"))
{
Settings settings = ImmutableSettings.settingsBuilder()
.put("cluster.name", configuration.get("client.transport.clustername")).build();
client = new TransportClient(settings);
}
else{
client = new TransportClient();
}
for(String host: hosts)
{
String arr[] = host.split(":");
client = ((TransportClient) client).addTransportAddress(new InetSocketTransportAddress(arr[0], (arr.length>1?Integer.parseInt(arr[1]):9300)));
}
LOG.info("Initializing the elasticsearch node connection...");
closed = false;
}
@Override
public void close(Map<String, Object> configuration) throws Exception {
if(node != null && !closed)
{
node.close();
node = null;
client = null;
closed = true;
LOG.info("Closing the elasticsearch node connection!");
}
LOG.info("Attempting to close elasticsearch node, but it was already closed...");
}
public Map<String,Long> getCountOfDocumentsByTerm(Map<String,String> mustMatchCriteria, String term)
{
FilterAggregationBuilder built = AggregationBuilders
.filter("agg");
for(String key: mustMatchCriteria.keySet())
{
built.filter(FilterBuilders.termFilter(key ,mustMatchCriteria.get(key) ));
}
built.subAggregation(AggregationBuilders.terms("agg_term").field(term));
SearchResponse sr = client.prepareSearch().addAggregation(built).execute().actionGet();
Filter agg = sr.getAggregations().get("agg");
Terms term_r = agg.getAggregations().get("agg_term");
Map<String,Long> termsresult = new HashMap<String, Long>();
for (Terms.Bucket entry: term_r.getBuckets()) {
termsresult.put(entry.getKey(), entry.getDocCount());
}
return termsresult;
}
public QueryResponse execute(Query query, String index) throws PopulariotyException
{
QueryResponse res = null;
try{
if(query.getType().equals(QueryType.SELECT)|| query.getType().equals(QueryType.SELECT_ID))
{
res = super.executeSelect(query, index,true);
if(store != null && query.getType().equals(QueryType.SELECT) && res.getQueryResponsetype().equals(QueryResponseType.LIST_OF_STRINGS))
{
res = retrieveFullData(res, index);
}
}
else if(query.getType().equals(QueryType.AGGREGATIONS))
res = executeAggregation(query, null);
else if (query.getType().equals(QueryType.FUZZY_TEXT_SEARCH))
res = retrieveFullData(executeFuzzyTextSearch(query, index,true),index);
}catch(SearchPhaseExecutionException se)
{
if(se.phaseName().equals("query"))
throw new PopulariotyException("Search error",null,LOG,"Unable to execute query in ElasticSearch. SearchPhaseExecutionException with phasename == query. Index is empty?",Level.DEBUG,500);
}
return res;
}
// Additional methods specific for elasticsearch node
//TODO needs to be removed after the refactoring has been done!
public Node getNode() {
return node;
}
public void setNode(Node node) {
this.node = node;
}
public Client getClient() {
return client;
}
public void setClient(Client client) {
this.client = client;
}
public static Map<String, Object> addId(SearchHit hit, String idField) {
Map<String, Object> tmp;
tmp = hit.getSource();
tmp.put(idField, hit.getId());
return tmp;
}
}
| |
/*
* The MIT License
*
* Copyright (c) 2020, CloudBees, Inc.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package jenkins.tasks.filters.impl;
import edu.umd.cs.findbugs.annotations.NonNull;
import hudson.EnvVars;
import hudson.Extension;
import hudson.model.Job;
import hudson.model.Run;
import hudson.util.FormValidation;
import jenkins.tasks.filters.EnvVarsFilterRuleContext;
import jenkins.tasks.filters.EnvVarsFilterLocalRule;
import jenkins.tasks.filters.EnvVarsFilterLocalRuleDescriptor;
import jenkins.tasks.filters.EnvVarsFilterableBuilder;
import org.apache.commons.lang.StringUtils;
import org.jenkinsci.Symbol;
import org.jvnet.localizer.Localizable;
import org.kohsuke.accmod.Restricted;
import org.kohsuke.accmod.restrictions.NoExternalUse;
import org.kohsuke.stapler.DataBoundConstructor;
import org.kohsuke.stapler.DataBoundSetter;
import org.kohsuke.stapler.QueryParameter;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.Locale;
import java.util.Map;
/**
* Local rule that removes all the non-retained variables for that step.
*
* @since 2.246
*/
@Restricted(NoExternalUse.class)
public class RetainVariablesLocalRule implements EnvVarsFilterLocalRule {
/**
* The variables considered to be 'characteristic' for the purposes of this rule.
*
* @see Job#getCharacteristicEnvVars()
* @see Run#getCharacteristicEnvVars()
*/
// TODO Make the 'HUDSON_COOKIE' variable less special so we can remove it.
// TODO consider just querying the build, if any, for its characteristic env vars
private static final List<String> CHARACTERISTIC_ENV_VARS = Arrays.asList("jenkins_server_cookie", "hudson_server_cookie", "job_name", "job_base_name", "build_number", "build_id", "build_tag");
/**
* List of lowercase names of variable that will be retained from removal
*/
private String variables = "";
private boolean retainCharacteristicEnvVars = true;
private ProcessVariablesHandling processVariablesHandling = ProcessVariablesHandling.RESET;
@DataBoundConstructor
public RetainVariablesLocalRule() {
}
@DataBoundSetter
public void setVariables(@NonNull String variables) {
this.variables = variables;
}
private static List<String> convertStringToList(@NonNull String variablesCommaSeparated) {
String[] variablesArray = variablesCommaSeparated.split("\\s+");
List<String> variables = new ArrayList<>();
for (String nameFragment : variablesArray) {
if (StringUtils.isNotBlank(nameFragment)) {
variables.add(nameFragment.toLowerCase(Locale.ENGLISH));
}
}
Collections.sort(variables); // TODO do we really want to sort this?
return variables;
}
// for jelly view
@Restricted(NoExternalUse.class)
public @NonNull String getVariables() {
return variables;
}
@DataBoundSetter
public void setRetainCharacteristicEnvVars(boolean retainCharacteristicEnvVars) {
this.retainCharacteristicEnvVars = retainCharacteristicEnvVars;
}
/**
* Whether to retain characteristic environment variables.
* @return true if and only if to retain characteristic environment variables.
*
* @see Job#getCharacteristicEnvVars()
* @see Run#getCharacteristicEnvVars()
*/
public boolean isRetainCharacteristicEnvVars() {
return retainCharacteristicEnvVars;
}
private List<String> variablesToRetain() {
List<String> vars = new ArrayList<>(convertStringToList(this.variables));
if (isRetainCharacteristicEnvVars()) {
vars.addAll(CHARACTERISTIC_ENV_VARS);
}
return vars;
}
@Override
public void filter(@NonNull EnvVars envVars, @NonNull EnvVarsFilterRuleContext context) {
Map<String, String> systemEnvVars = EnvVars.masterEnvVars;
final List<String> variablesRemoved = new ArrayList<>();
final List<String> variablesReset = new ArrayList<>();
final List<String> variables = variablesToRetain();
for (Iterator<Map.Entry<String, String>> iterator = envVars.entrySet().iterator(); iterator.hasNext(); ) {
Map.Entry<String, String> entry = iterator.next();
String variableName = entry.getKey();
String variableValue = entry.getValue();
if (!variables.contains(variableName.toLowerCase(Locale.ENGLISH))) {
// systemEnvVars's keys are case insensitive
String systemValue = systemEnvVars.get(variableName);
if (systemValue == null) {
variablesRemoved.add(variableName);
iterator.remove();
} else {
switch (processVariablesHandling) {
case RESET:
if (!systemValue.equals(variableValue)) {
variablesReset.add(variableName);
}
break;
case REMOVE:
variablesRemoved.add(variableName);
iterator.remove();
break;
}
}
}
}
if (!variablesRemoved.isEmpty()) {
context.getTaskListener().getLogger().println(Messages.RetainVariablesLocalRule_RemovalMessage(getDescriptor().getDisplayName(), StringUtils.join(variablesRemoved.toArray(), ", ")));
}
if (!variablesReset.isEmpty()) {
// reset the variables using the initial value from System
variablesReset.forEach(variableName -> envVars.put(variableName, systemEnvVars.get(variableName)));
context.getTaskListener().getLogger().println(Messages.RetainVariablesLocalRule_ResetMessage(getDescriptor().getDisplayName(), StringUtils.join(variablesReset.toArray(), ", ")));
}
}
public ProcessVariablesHandling getProcessVariablesHandling() {
return processVariablesHandling;
}
@DataBoundSetter
public void setProcessVariablesHandling(ProcessVariablesHandling processVariablesHandling) {
this.processVariablesHandling = processVariablesHandling;
}
// the ordinal is used to sort the rules in term of execution, the higher value first
// and take care of the fact that local rules are always applied before global ones
@Extension(ordinal = 1000)
@Symbol("retainOnlyVariables")
public static final class DescriptorImpl extends EnvVarsFilterLocalRuleDescriptor {
public DescriptorImpl() {
load();
}
@Restricted(NoExternalUse.class)
public FormValidation doCheckRetainCharacteristicEnvVars(@QueryParameter boolean value) {
if (!value) {
return FormValidation.warning(Messages.RetainVariablesLocalRule_CharacteristicEnvVarsFormValidationWarning());
}
return FormValidation.ok(Messages.RetainVariablesLocalRule_CharacteristicEnvVarsFormValidationOK());
}
@Override
public @NonNull String getDisplayName() {
return Messages.RetainVariablesLocalRule_DisplayName();
}
@Override
public boolean isApplicable(@NonNull Class<? extends EnvVarsFilterableBuilder> builderClass) {
return true;
}
}
public enum ProcessVariablesHandling {
RESET(Messages._RetainVariablesLocalRule_RESET_DisplayName()),
REMOVE(Messages._RetainVariablesLocalRule_REMOVE_DisplayName());
private final Localizable localizable;
ProcessVariablesHandling(Localizable localizable) {
this.localizable = localizable;
}
public String getDisplayName() {
return localizable.toString();
}
}
}
| |
/*
* Copyright (c) 2016 Gridtec. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package at.gridtec.lambda4j.function.bi.to;
import at.gridtec.lambda4j.Lambda;
import at.gridtec.lambda4j.consumer.ThrowableLongConsumer;
import at.gridtec.lambda4j.consumer.bi.ThrowableBiConsumer;
import at.gridtec.lambda4j.core.exception.ThrownByFunctionalInterfaceException;
import at.gridtec.lambda4j.core.util.ThrowableUtils;
import at.gridtec.lambda4j.function.ThrowableFunction;
import at.gridtec.lambda4j.function.ThrowableLongFunction;
import at.gridtec.lambda4j.function.bi.ThrowableBiFunction;
import at.gridtec.lambda4j.function.conversion.ThrowableLongToByteFunction;
import at.gridtec.lambda4j.function.conversion.ThrowableLongToCharFunction;
import at.gridtec.lambda4j.function.conversion.ThrowableLongToDoubleFunction;
import at.gridtec.lambda4j.function.conversion.ThrowableLongToFloatFunction;
import at.gridtec.lambda4j.function.conversion.ThrowableLongToIntFunction;
import at.gridtec.lambda4j.function.conversion.ThrowableLongToShortFunction;
import at.gridtec.lambda4j.function.to.ThrowableToLongFunction;
import at.gridtec.lambda4j.operator.unary.ThrowableLongUnaryOperator;
import at.gridtec.lambda4j.predicate.ThrowableLongPredicate;
import at.gridtec.lambda4j.predicate.bi.ThrowableBiPredicate;
import org.apache.commons.lang3.tuple.Pair;
import javax.annotation.Nonnegative;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import java.util.Map;
import java.util.Objects;
import java.util.concurrent.ConcurrentHashMap;
import java.util.function.Function;
import java.util.function.ToLongBiFunction;
/**
* Represents an operation that accepts two input arguments and produces a
* {@code long}-valued result which is able to throw any {@link Throwable}.
* This is a primitive specialization of {@link ThrowableBiFunction}.
* <p>
* This is a {@link FunctionalInterface} whose functional method is {@link #applyAsLongThrows(Object, Object)}.
*
* @param <T> The type of the first argument to the function
* @param <U> The type of the second argument to the function
* @param <X> The type of the throwable to be thrown by this function
* @apiNote This is a throwable JDK lambda.
* @see ThrowableBiFunction
*/
@SuppressWarnings("unused")
@FunctionalInterface
public interface ThrowableToLongBiFunction<T, U, X extends Throwable> extends Lambda, ToLongBiFunction<T, U> {
/**
* Constructs a {@link ThrowableToLongBiFunction} based on a lambda expression or a method reference. Thereby the
* given lambda expression or method reference is returned on an as-is basis to implicitly transform it to the
* desired type. With this method, it is possible to ensure that correct type is used from lambda expression or
* method reference.
*
* @param <T> The type of the first argument to the function
* @param <U> The type of the second argument to the function
* @param <X> The type of the throwable to be thrown by this function
* @param expression A lambda expression or (typically) a method reference, e.g. {@code this::method}
* @return A {@code ThrowableToLongBiFunction} from given lambda expression or method reference.
* @implNote This implementation allows the given argument to be {@code null}, but only if {@code null} given,
* {@code null} will be returned.
* @see <a href="https://docs.oracle.com/javase/tutorial/java/javaOO/lambdaexpressions.html#syntax">Lambda
* Expression</a>
* @see <a href="https://docs.oracle.com/javase/tutorial/java/javaOO/methodreferences.html">Method Reference</a>
*/
static <T, U, X extends Throwable> ThrowableToLongBiFunction<T, U, X> of(
@Nullable final ThrowableToLongBiFunction<T, U, X> expression) {
return expression;
}
/**
* Calls the given {@link ThrowableToLongBiFunction} with the given arguments and returns its result.
*
* @param <T> The type of the first argument to the function
* @param <U> The type of the second argument to the function
* @param <X> The type of the throwable to be thrown by this function
* @param function The function to be called
* @param t The first argument to the function
* @param u The second argument to the function
* @return The result from the given {@code ThrowableToLongBiFunction}.
* @throws NullPointerException If given argument is {@code null}
* @throws X Any throwable from this functions action
*/
static <T, U, X extends Throwable> long call(
@Nonnull final ThrowableToLongBiFunction<? super T, ? super U, ? extends X> function, T t, U u) throws X {
Objects.requireNonNull(function);
return function.applyAsLongThrows(t, u);
}
/**
* Creates a {@link ThrowableToLongBiFunction} which uses the {@code first} parameter of this one as argument for
* the given {@link ThrowableToLongFunction}.
*
* @param <T> The type of the first argument to the function
* @param <U> The type of the second argument to the function
* @param <X> The type of the throwable to be thrown by this function
* @param function The function which accepts the {@code first} parameter of this one
* @return Creates a {@code ThrowableToLongBiFunction} which uses the {@code first} parameter of this one as
* argument for the given {@code ThrowableToLongFunction}.
* @throws NullPointerException If given argument is {@code null}
*/
@Nonnull
static <T, U, X extends Throwable> ThrowableToLongBiFunction<T, U, X> onlyFirst(
@Nonnull final ThrowableToLongFunction<? super T, ? extends X> function) {
Objects.requireNonNull(function);
return (t, u) -> function.applyAsLongThrows(t);
}
/**
* Creates a {@link ThrowableToLongBiFunction} which uses the {@code second} parameter of this one as argument for
* the given {@link ThrowableToLongFunction}.
*
* @param <T> The type of the first argument to the function
* @param <U> The type of the second argument to the function
* @param <X> The type of the throwable to be thrown by this function
* @param function The function which accepts the {@code second} parameter of this one
* @return Creates a {@code ThrowableToLongBiFunction} which uses the {@code second} parameter of this one as
* argument for the given {@code ThrowableToLongFunction}.
* @throws NullPointerException If given argument is {@code null}
*/
@Nonnull
static <T, U, X extends Throwable> ThrowableToLongBiFunction<T, U, X> onlySecond(
@Nonnull final ThrowableToLongFunction<? super U, ? extends X> function) {
Objects.requireNonNull(function);
return (t, u) -> function.applyAsLongThrows(u);
}
/**
* Creates a {@link ThrowableToLongBiFunction} which always returns a given value.
*
* @param <T> The type of the first argument to the function
* @param <U> The type of the second argument to the function
* @param <X> The type of the throwable to be thrown by this function
* @param ret The return value for the constant
* @return A {@code ThrowableToLongBiFunction} which always returns a given value.
*/
@Nonnull
static <T, U, X extends Throwable> ThrowableToLongBiFunction<T, U, X> constant(long ret) {
return (t, u) -> ret;
}
/**
* Applies this function to the given arguments.
*
* @param t The first argument to the function
* @param u The second argument to the function
* @return The return value from the function, which is its result.
* @throws X Any throwable from this functions action
*/
long applyAsLongThrows(T t, U u) throws X;
/**
* Applies this function to the given arguments.
*
* @param t The first argument to the function
* @param u The second argument to the function
* @return The return value from the function, which is its result.
* @apiNote This method mainly exists to use this {@link ThrowableToLongBiFunction} in JRE specific methods only
* accepting {@link ToLongBiFunction}. If this function should be applied, then the {@link
* #applyAsLongThrows(Object, Object)} method should be used.
* @apiNote Overrides the {@link ToLongBiFunction#applyAsLong(Object, Object)} method by using a redefinition as
* default method. This implementation calls the {@link #applyAsLongThrows(Object, Object)} method of this function
* and catches the eventually thrown {@link Throwable} from it. If it is of type {@link RuntimeException} or {@link
* Error} it is rethrown as is. Other {@code Throwable} types are wrapped in a {@link
* ThrownByFunctionalInterfaceException}.
*/
@Override
default long applyAsLong(T t, U u) {
// TODO: Remove commented code below
/*try {
return this.applyAsLongThrows(t, u);
} catch (RuntimeException | Error e) {
throw e;
} catch (Throwable throwable) {
throw new ThrownByFunctionalInterfaceException(throwable.getMessage(), throwable);
}*/
return nest().applyAsLong(t, u);
}
/**
* Applies this function to the given tuple.
*
* @param tuple The tuple to be applied to the function
* @return The return value from the function, which is its result.
* @throws NullPointerException If given argument is {@code null}
* @throws X Any throwable from this functions action
* @see org.apache.commons.lang3.tuple.Pair
*/
default long applyAsLongThrows(@Nonnull Pair<T, U> tuple) throws X {
Objects.requireNonNull(tuple);
return applyAsLongThrows(tuple.getLeft(), tuple.getRight());
}
/**
* Applies this function partially to some arguments of this one, producing a {@link ThrowableToLongFunction} as
* result.
*
* @param t The first argument to this function used to partially apply this function
* @return A {@code ThrowableToLongFunction} that represents this function partially applied the some arguments.
*/
@Nonnull
default ThrowableToLongFunction<U, X> papplyAsLongThrows(T t) {
return (u) -> this.applyAsLongThrows(t, u);
}
/**
* Returns the number of arguments for this function.
*
* @return The number of arguments for this function.
* @implSpec The default implementation always returns {@code 2}.
*/
@Nonnegative
default int arity() {
return 2;
}
/**
* Returns a composed {@link ThrowableToLongBiFunction} that first applies the {@code before} functions to its
* input, and then applies this function to the result.
*
* @param <A> The type of the argument to the first given function, and of composed function
* @param <B> The type of the argument to the second given function, and of composed function
* @param before1 The first function to apply before this function is applied
* @param before2 The second function to apply before this function is applied
* @return A composed {@code ThrowableToLongBiFunction} that first applies the {@code before} functions to its
* input, and then applies this function to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is able to handle every type.
*/
@Nonnull
default <A, B> ThrowableToLongBiFunction<A, B, X> compose(
@Nonnull final ThrowableFunction<? super A, ? extends T, ? extends X> before1,
@Nonnull final ThrowableFunction<? super B, ? extends U, ? extends X> before2) {
Objects.requireNonNull(before1);
Objects.requireNonNull(before2);
return (a, b) -> applyAsLongThrows(before1.applyThrows(a), before2.applyThrows(b));
}
/**
* Returns a composed {@link ThrowableBiFunction} that first applies this function to its input, and then applies
* the {@code after} function to the result.
*
* @param <S> The type of return value from the {@code after} function, and of the composed function
* @param after The function to apply after this function is applied
* @return A composed {@code ThrowableBiFunction} that first applies this function to its input, and then applies
* the {@code after} function to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is able to return every type.
*/
@Nonnull
default <S> ThrowableBiFunction<T, U, S, X> andThen(
@Nonnull final ThrowableLongFunction<? extends S, ? extends X> after) {
Objects.requireNonNull(after);
return (t, u) -> after.applyThrows(applyAsLongThrows(t, u));
}
/**
* Returns a composed {@link ThrowableBiPredicate} that first applies this function to its input, and then applies
* the {@code after} predicate to the result. This method is just convenience, to provide the ability to transform
* this primitive function to an operation returning {@code boolean}.
*
* @param after The predicate to apply after this function is applied
* @return A composed {@code ThrowableBiPredicate} that first applies this function to its input, and then applies
* the {@code after} predicate to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to return primitive values. In this case this is {@code
* boolean}.
*/
@Nonnull
default ThrowableBiPredicate<T, U, X> andThenToBoolean(@Nonnull final ThrowableLongPredicate<? extends X> after) {
Objects.requireNonNull(after);
return (t, u) -> after.testThrows(applyAsLongThrows(t, u));
}
/**
* Returns a composed {@link ThrowableToByteBiFunction} that first applies this function to its input, and then
* applies the {@code after} function to the result. This method is just convenience, to provide the ability to
* transform this primitive function to an operation returning {@code byte}.
*
* @param after The function to apply after this function is applied
* @return A composed {@code ThrowableToByteBiFunction} that first applies this function to its input, and then
* applies the {@code after} function to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to return primitive values. In this case this is {@code
* byte}.
*/
@Nonnull
default ThrowableToByteBiFunction<T, U, X> andThenToByte(
@Nonnull final ThrowableLongToByteFunction<? extends X> after) {
Objects.requireNonNull(after);
return (t, u) -> after.applyAsByteThrows(applyAsLongThrows(t, u));
}
/**
* Returns a composed {@link ThrowableToCharBiFunction} that first applies this function to its input, and then
* applies the {@code after} function to the result. This method is just convenience, to provide the ability to
* transform this primitive function to an operation returning {@code char}.
*
* @param after The function to apply after this function is applied
* @return A composed {@code ThrowableToCharBiFunction} that first applies this function to its input, and then
* applies the {@code after} function to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to return primitive values. In this case this is {@code
* char}.
*/
@Nonnull
default ThrowableToCharBiFunction<T, U, X> andThenToChar(
@Nonnull final ThrowableLongToCharFunction<? extends X> after) {
Objects.requireNonNull(after);
return (t, u) -> after.applyAsCharThrows(applyAsLongThrows(t, u));
}
/**
* Returns a composed {@link ThrowableToDoubleBiFunction} that first applies this function to its input, and then
* applies the {@code after} function to the result. This method is just convenience, to provide the ability to
* transform this primitive function to an operation returning {@code double}.
*
* @param after The function to apply after this function is applied
* @return A composed {@code ThrowableToDoubleBiFunction} that first applies this function to its input, and then
* applies the {@code after} function to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to return primitive values. In this case this is {@code
* double}.
*/
@Nonnull
default ThrowableToDoubleBiFunction<T, U, X> andThenToDouble(
@Nonnull final ThrowableLongToDoubleFunction<? extends X> after) {
Objects.requireNonNull(after);
return (t, u) -> after.applyAsDoubleThrows(applyAsLongThrows(t, u));
}
/**
* Returns a composed {@link ThrowableToFloatBiFunction} that first applies this function to its input, and then
* applies the {@code after} function to the result. This method is just convenience, to provide the ability to
* transform this primitive function to an operation returning {@code float}.
*
* @param after The function to apply after this function is applied
* @return A composed {@code ThrowableToFloatBiFunction} that first applies this function to its input, and then
* applies the {@code after} function to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to return primitive values. In this case this is {@code
* float}.
*/
@Nonnull
default ThrowableToFloatBiFunction<T, U, X> andThenToFloat(
@Nonnull final ThrowableLongToFloatFunction<? extends X> after) {
Objects.requireNonNull(after);
return (t, u) -> after.applyAsFloatThrows(applyAsLongThrows(t, u));
}
/**
* Returns a composed {@link ThrowableToIntBiFunction} that first applies this function to its input, and then
* applies the {@code after} function to the result. This method is just convenience, to provide the ability to
* transform this primitive function to an operation returning {@code int}.
*
* @param after The function to apply after this function is applied
* @return A composed {@code ThrowableToIntBiFunction} that first applies this function to its input, and then
* applies the {@code after} function to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to return primitive values. In this case this is {@code
* int}.
*/
@Nonnull
default ThrowableToIntBiFunction<T, U, X> andThenToInt(
@Nonnull final ThrowableLongToIntFunction<? extends X> after) {
Objects.requireNonNull(after);
return (t, u) -> after.applyAsIntThrows(applyAsLongThrows(t, u));
}
/**
* Returns a composed {@link ThrowableToLongBiFunction} that first applies this function to its input, and then
* applies the {@code after} operator to the result. This method is just convenience, to provide the ability to
* transform this primitive function to an operation returning {@code long}.
*
* @param after The operator to apply after this function is applied
* @return A composed {@code ThrowableToLongBiFunction} that first applies this function to its input, and then
* applies the {@code after} operator to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to return primitive values. In this case this is {@code
* long}.
*/
@Nonnull
default ThrowableToLongBiFunction<T, U, X> andThenToLong(
@Nonnull final ThrowableLongUnaryOperator<? extends X> after) {
Objects.requireNonNull(after);
return (t, u) -> after.applyAsLongThrows(applyAsLongThrows(t, u));
}
/**
* Returns a composed {@link ThrowableToShortBiFunction} that first applies this function to its input, and then
* applies the {@code after} function to the result. This method is just convenience, to provide the ability to
* transform this primitive function to an operation returning {@code short}.
*
* @param after The function to apply after this function is applied
* @return A composed {@code ThrowableToShortBiFunction} that first applies this function to its input, and then
* applies the {@code after} function to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to return primitive values. In this case this is {@code
* short}.
*/
@Nonnull
default ThrowableToShortBiFunction<T, U, X> andThenToShort(
@Nonnull final ThrowableLongToShortFunction<? extends X> after) {
Objects.requireNonNull(after);
return (t, u) -> after.applyAsShortThrows(applyAsLongThrows(t, u));
}
/**
* Returns a composed {@link ThrowableBiConsumer} that fist applies this function to its input, and then consumes
* the result using the given {@link ThrowableLongConsumer}.
*
* @param consumer The operation which consumes the result from this operation
* @return A composed {@code ThrowableBiConsumer} that first applies this function to its input, and then consumes
* the result using the given {@code ThrowableLongConsumer}.
* @throws NullPointerException If given argument is {@code null}
*/
@Nonnull
default ThrowableBiConsumer<T, U, X> consume(@Nonnull final ThrowableLongConsumer<? extends X> consumer) {
Objects.requireNonNull(consumer);
return (t, u) -> consumer.acceptThrows(applyAsLongThrows(t, u));
}
/**
* Returns a tupled version of this function.
*
* @return A tupled version of this function.
*/
@Nonnull
default ThrowableToLongFunction<Pair<T, U>, X> tupled() {
return this::applyAsLongThrows;
}
/**
* Returns a reversed version of this function. This may be useful in recursive context.
*
* @return A reversed version of this function.
*/
@Nonnull
default ThrowableToLongBiFunction<U, T, X> reversed() {
return (u, t) -> applyAsLongThrows(t, u);
}
/**
* Returns a memoized (caching) version of this {@link ThrowableToLongBiFunction}. Whenever it is called, the
* mapping between the input parameters and the return value is preserved in a cache, making subsequent calls
* returning the memoized value instead of computing the return value again.
* <p>
* Unless the function and therefore the used cache will be garbage-collected, it will keep all memoized values
* forever.
*
* @return A memoized (caching) version of this {@code ThrowableToLongBiFunction}.
* @implSpec This implementation does not allow the input parameters or return value to be {@code null} for the
* resulting memoized function, as the cache used internally does not permit {@code null} keys or values.
* @implNote The returned memoized function can be safely used concurrently from multiple threads which makes it
* thread-safe.
*/
@Nonnull
default ThrowableToLongBiFunction<T, U, X> memoized() {
if (isMemoized()) {
return this;
} else {
final Map<Pair<T, U>, Long> cache = new ConcurrentHashMap<>();
final Object lock = new Object();
return (ThrowableToLongBiFunction<T, U, X> & Memoized) (t, u) -> {
final long returnValue;
synchronized (lock) {
returnValue = cache.computeIfAbsent(Pair.of(t, u), ThrowableFunction.of(
key -> applyAsLongThrows(key.getLeft(), key.getRight())));
}
return returnValue;
};
}
}
/**
* Returns a composed {@link ThrowableBiFunction} which represents this {@link ThrowableToLongBiFunction}. Thereby
* the primitive input argument for this function is autoboxed. This method provides the possibility to use this
* {@code ThrowableToLongBiFunction} with methods provided by the {@code JDK}.
*
* @return A composed {@code ThrowableBiFunction} which represents this {@code ThrowableToLongBiFunction}.
*/
@Nonnull
default ThrowableBiFunction<T, U, Long, X> boxed() {
return this::applyAsLongThrows;
}
/**
* Returns a composed {@link ToLongBiFunction2} that applies this function to its input and nests the thrown {@link
* Throwable} from it. The {@code Throwable} is nested (wrapped) in a {@link ThrownByFunctionalInterfaceException},
* which is constructed from the thrown {@code Throwable}s message and the thrown {@code Throwable} itself.
*
* @return A composed {@link ToLongBiFunction2} that applies this function to its input and nests the thrown {@code
* Throwable} from it.
* @implNote If thrown {@code Throwable} is of type {@link Error} it is thrown as-is and thus not nested.
* @see #nest(Function)
* @see ThrownByFunctionalInterfaceException
*/
@Nonnull
default ToLongBiFunction2<T, U> nest() {
return nest(throwable -> new ThrownByFunctionalInterfaceException(throwable.getMessage(), throwable));
}
/**
* Returns a composed {@link ToLongBiFunction2} that applies this function to its input and nests the thrown {@link
* Throwable} from it using {@code mapper} operation. Thereby {@code mapper} may modify the thrown {@code
* Throwable}, regarding its implementation, and returns it nested (wrapped) in a {@link RuntimeException}.
*
* @param mapper The operation to map the thrown {@code Throwable} to {@code RuntimeException}
* @return A composed {@link ToLongBiFunction2} that applies this function to its input and nests the thrown {@code
* Throwable} from it using {@code mapper} operation.
* @throws NullPointerException If given argument is {@code null}
* @implNote If thrown {@code Throwable} is of type {@link Error} it is thrown as-is and thus not nested.
* @see #nest()
*/
@Nonnull
default ToLongBiFunction2<T, U> nest(
@Nonnull final Function<? super Throwable, ? extends RuntimeException> mapper) {
return recover(throwable -> {
throw mapper.apply(throwable);
});
}
/**
* Returns a composed {@link ToLongBiFunction2} that first applies this function to its input, and then applies the
* {@code recover} operation if a {@link Throwable} is thrown from this one. The {@code recover} operation is
* represented by a curried operation which is called with throwable information and same arguments of this
* function.
*
* @param recover The operation to apply if this function throws a {@code Throwable}
* @return A composed {@link ToLongBiFunction2} that first applies this function to its input, and then applies the
* {@code recover} operation if a {@code Throwable} is thrown from this one.
* @throws NullPointerException If given argument or the returned enclosing function is {@code null}
* @implSpec The implementation checks that the returned enclosing function from {@code recover} operation is not
* {@code null}. If it is, then a {@link NullPointerException} with appropriate message is thrown.
* @implNote If thrown {@code Throwable} is of type {@link Error}, it is thrown as-is and thus not passed to {@code
* recover} operation.
*/
@Nonnull
default ToLongBiFunction2<T, U> recover(
@Nonnull final Function<? super Throwable, ? extends ToLongBiFunction<? super T, ? super U>> recover) {
Objects.requireNonNull(recover);
return (t, u) -> {
try {
return this.applyAsLongThrows(t, u);
} catch (Error e) {
throw e;
} catch (Throwable throwable) {
final ToLongBiFunction<? super T, ? super U> function = recover.apply(throwable);
Objects.requireNonNull(function, () -> "recover returned null for " + throwable.getClass() + ": "
+ throwable.getMessage());
return function.applyAsLong(t, u);
}
};
}
/**
* Returns a composed {@link ToLongBiFunction2} that applies this function to its input and sneakily throws the
* thrown {@link Throwable} from it, if it is not of type {@link RuntimeException} or {@link Error}. This means that
* each throwable thrown from the returned composed function behaves exactly the same as an <em>unchecked</em>
* throwable does. As a result, there is no need to handle the throwable of this function in the returned composed
* function by either wrapping it in an <em>unchecked</em> throwable or to declare it in the {@code throws} clause,
* as it would be done in a non sneaky throwing function.
* <p>
* What sneaky throwing simply does, is to fake out the compiler and thus it bypasses the principle of
* <em>checked</em> throwables. On the JVM (class file) level, all throwables, checked or not, can be thrown
* regardless of the {@code throws} clause of methods, which is why this works at all.
* <p>
* However, when using this method to get a sneaky throwing function variant of this throwable function, the
* following advantages, disadvantages and limitations will apply:
* <p>
* If the calling-code is to handle the sneakily thrown throwable, it is required to add it to the {@code throws}
* clause of the method that applies the returned composed function. The compiler will not force the declaration in
* the {@code throws} clause anymore.
* <p>
* If the calling-code already handles the sneakily thrown throwable, the compiler requires it to be added to the
* {@code throws} clause of the method that applies the returned composed function. If not added, the compiler will
* error that the caught throwable is never thrown in the corresponding {@code try} block.
* <p>
* If the returned composed function is directly surrounded by a {@code try}-{@code catch} block to catch the
* sneakily thrown throwable from it, the compiler will error that the caught throwable is never thrown in the
* corresponding {@code try} block.
* <p>
* In any case, if the throwable is not added to the to the {@code throws} clause of the method that applies the
* returned composed function, the calling-code won't be able to catch the throwable by name. It will bubble and
* probably be caught in some {@code catch} statement, catching a base type such as {@code try { ... }
* catch(RuntimeException e) { ... }} or {@code try { ... } catch(Exception e) { ... }}, but perhaps this is
* intended.
* <p>
* When the called code never throws the specific throwable that it declares, it should obviously be omitted. For
* example: {@code new String(byteArr, "UTF-8") throws UnsupportedEncodingException}, but {@code UTF-8} is
* guaranteed by the Java specification to be always present. Here, the {@code throws} declaration is a nuisance and
* any solution to silence it with minimal boilerplate is welcome. The throwable should therefore be omitted in the
* {@code throws} clause of the method that applies the returned composed function.
* <p>
* With all that mentioned, the following example will demonstrate this methods correct use:
* <pre>{@code
* // when called with illegal value ClassNotFoundException is thrown
* public Class<?> sneakyThrowingFunctionalInterface(final String className) throws ClassNotFoundException {
* return ThrowableFunction.of(Class::forName) // create the correct throwable functional interface
* .sneakyThrow() // create a non-throwable variant which is able to sneaky throw (this method)
* .apply(className); // apply non-throwable variant -> may sneaky throw a throwable
* }
*
* // call the the method which surround the sneaky throwing functional interface
* public void callingMethod() {
* try {
* final Class<?> clazz = sneakyThrowingFunctionalInterface("some illegal class name");
* // ... do something with clazz ...
* } catch(ClassNotFoundException e) {
* // ... do something with e ...
* }
* }
* }</pre>
* In conclusion, this somewhat contentious ability should be used carefully, of course, with the advantages,
* disadvantages and limitations described above kept in mind.
*
* @return A composed {@link ToLongBiFunction2} that applies this function to its input and sneakily throws the
* thrown {@link Throwable} from it, unless it is of type {@link RuntimeException} or {@link Error}.
* @implNote If thrown {@link Throwable} is of type {@link RuntimeException} or {@link Error}, it is thrown as-is
* and thus not sneakily thrown.
*/
@Nonnull
default ToLongBiFunction2<T, U> sneakyThrow() {
return (t, u) -> {
try {
return this.applyAsLongThrows(t, u);
} catch (RuntimeException | Error e) {
throw e;
} catch (Throwable throwable) {
throw ThrowableUtils.sneakyThrow(throwable);
}
};
}
}
| |
/*******************************************************************************
* Copyright 2017 Capital One Services, LLC and Bitwise, Inc.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*******************************************************************************/
package hydrograph.ui.propertywindow.widgets.customwidgets.joinproperty;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import org.eclipse.jface.dialogs.Dialog;
import org.eclipse.jface.dialogs.IDialogConstants;
import org.eclipse.swt.SWT;
import org.eclipse.swt.custom.ScrolledComposite;
import org.eclipse.swt.events.MouseAdapter;
import org.eclipse.swt.events.MouseEvent;
import org.eclipse.swt.events.SelectionAdapter;
import org.eclipse.swt.events.SelectionEvent;
import org.eclipse.swt.graphics.Point;
import org.eclipse.swt.layout.GridData;
import org.eclipse.swt.layout.GridLayout;
import org.eclipse.swt.widgets.Combo;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Control;
import org.eclipse.swt.widgets.Label;
import org.eclipse.swt.widgets.Shell;
import org.eclipse.swt.widgets.Text;
import hydrograph.ui.common.util.Constants;
import hydrograph.ui.common.util.CustomColorRegistry;
import hydrograph.ui.common.util.ImagePathConstant;
import hydrograph.ui.datastructure.property.FilterProperties;
import hydrograph.ui.datastructure.property.JoinConfigProperty;
import hydrograph.ui.graph.model.Component;
import hydrograph.ui.propertywindow.propertydialog.PropertyDialogButtonBar;
import hydrograph.ui.propertywindow.widgets.dialogs.FieldDialog;
import hydrograph.ui.propertywindow.widgets.gridwidgets.basic.ELTSWTWidgets;
import hydrograph.ui.propertywindow.widgets.utility.SchemaSyncUtility;
public class ELTJoinConfigGrid extends Dialog {
private int inputPortValue;
private List<String> ITEMS = Arrays.asList(Constants.TRUE, Constants.FALSE);
private List<JoinConfigProperty> tempraryConfigPropertyList;
private List<JoinConfigProperty> joinConfigPropertyList;
private PropertyDialogButtonBar propertyDialogButtonBar;
private ELTSWTWidgets eltswtWidgets = new ELTSWTWidgets();
private Label editLableAsButton;
private Map<String, List<String>> propagatedFiledNames;
private Component component;
private List<List<FilterProperties>> sourceFieldList;
/**
* Create the dialog.
*
* @param parentShell
* @param propertyDialogButtonBar
* @param validationStatus
*/
public ELTJoinConfigGrid(Shell parentShell, PropertyDialogButtonBar propertyDialogButtonBar,
List<JoinConfigProperty> configProperty,Component component) {
super(parentShell);
this.joinConfigPropertyList = configProperty;
this.propertyDialogButtonBar = propertyDialogButtonBar;
copyAll(configProperty);
this.component=component;
}
private List<JoinConfigProperty> copyAll(List<JoinConfigProperty> configProperty) {
tempraryConfigPropertyList = new ArrayList<>();
for (JoinConfigProperty joinConfigProperty : configProperty) {
tempraryConfigPropertyList.add(new JoinConfigProperty(joinConfigProperty.getPortIndex(), joinConfigProperty
.getJoinKey(), joinConfigProperty.getRecordRequired()));
}
return tempraryConfigPropertyList;
}
/**
* Create contents of the dialog.
*
* @param parent
*/
@Override
protected Control createDialogArea(Composite parent) {
Composite container = (Composite) super.createDialogArea(parent);
container.getShell().setText("Join Configuration");
container.setLayout(new GridLayout(1, false));
Composite composite_2 = new Composite(container, SWT.NONE);
GridData gd_composite_2 = new GridData(SWT.LEFT, SWT.CENTER, false, false, 1, 1);
gd_composite_2.heightHint = 16;
gd_composite_2.widthHint = 400;
composite_2.setLayoutData(gd_composite_2);
Composite composite = new Composite(container, SWT.BORDER);
GridData gd_composite = new GridData(SWT.LEFT, SWT.CENTER, false, false, 1, 1);
gd_composite.heightHint = 212;
gd_composite.widthHint = 546;
composite.setLayoutData(gd_composite);
ScrolledComposite scrolledComposite = new ScrolledComposite(composite, SWT.BORDER | SWT.H_SCROLL | SWT.V_SCROLL);
scrolledComposite.setBounds(0, 0, 546, 212);
scrolledComposite.setExpandHorizontal(true);
scrolledComposite.setExpandVertical(true);
Composite composite_1 = new Composite(scrolledComposite, SWT.NONE);
eltswtWidgets.textBoxWidget(composite_1, SWT.BORDER, new int[] { 0, 2, 142, 23 }, "PortIndex", false);
eltswtWidgets.textBoxWidget(composite_1, SWT.BORDER, new int[] { 144, 2, 190, 23 }, "Join Key(s)", false);
eltswtWidgets.textBoxWidget(composite_1, SWT.BORDER, new int[] { 337, 2, 205, 23 }, "Record Required", false);
scrolledComposite.setContent(composite_1);
String count=(String)component.getProperties().get(Constants.INPUT_PORT_COUNT_PROPERTY);
inputPortValue=Integer.valueOf(count);
if (tempraryConfigPropertyList != null && tempraryConfigPropertyList.isEmpty()) {
for (int i = 0; i < inputPortValue; i++) {
tempraryConfigPropertyList.add(new JoinConfigProperty());
}
}
if (inputPortValue > tempraryConfigPropertyList.size()) {
for (int i = tempraryConfigPropertyList.size(); i <= inputPortValue; i++) {
tempraryConfigPropertyList.add(new JoinConfigProperty());
}
}
for (int i = 0, j = 0; i < inputPortValue; i++, j++) {
final JoinConfigProperty joinConfigProperty = tempraryConfigPropertyList.get(i);
Text portIndex = eltswtWidgets.textBoxWidget(composite_1, SWT.BORDER, new int[] { 0, 28 + j, 142, 23 },
"in" + i, false);
joinConfigProperty.setPortIndex("in" + i);
final Text keyText = eltswtWidgets.textBoxWidget(composite_1, SWT.BORDER | SWT.READ_ONLY, new int[] { 144,
28 + j, 170, 23 }, "", false);
keyText.setBackground(CustomColorRegistry.INSTANCE.getColorFromRegistry( 255, 255, 255));
Combo joinTypeCombo = eltswtWidgets.comboWidget(composite_1, SWT.BORDER,
new int[] { 337, 28 + j, 205, 23 }, (String[]) ITEMS.toArray(), 0);
joinTypeCombo.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
String comboText = ((Combo) e.widget).getText();
joinConfigProperty.setRecordRequired(ITEMS.indexOf(comboText));
propertyDialogButtonBar.enableApplyButton(true);
}
});
j = j + 26;
if (tempraryConfigPropertyList != null && !tempraryConfigPropertyList.isEmpty()) {
populate(i, portIndex, keyText, joinTypeCombo);
}
editLableAsButton = new Label(composite_1, SWT.None);
editLableAsButton.setBounds(317, 5 + j, 20, 20);
editLableAsButton.setImage(ImagePathConstant.EDIT_BUTTON.getImageFromRegistry());
editLableAsButton.addMouseListener(new MouseAdapter() {
@Override
public void mouseUp(MouseEvent e) {
keyText.setText(launchDialogToSelectFields(keyText.getText(), joinConfigProperty.getPortIndex()));
keyText.setToolTipText(keyText.getText());
joinConfigProperty.setJoinKey(keyText.getText());
}
});
keyText.setToolTipText(keyText.getText());
}
scrolledComposite.setMinSize(composite_1.computeSize(SWT.DEFAULT, SWT.DEFAULT));
return container;
}
public void populate(int i, Text portIndex, Text keyText, Combo joinTypeCombo) {
portIndex.setText(tempraryConfigPropertyList.get(i).getPortIndex());
keyText.setText(tempraryConfigPropertyList.get(i).getJoinKey());
joinTypeCombo.select(tempraryConfigPropertyList.get(i).getRecordRequired());
}
/**
* Create contents of the button bar.
*
* @param parent
*/
@Override
protected void createButtonsForButtonBar(Composite parent) {
createButton(parent, IDialogConstants.OK_ID, IDialogConstants.OK_LABEL, true);
createButton(parent, IDialogConstants.CANCEL_ID, IDialogConstants.CANCEL_LABEL, false);
}
/**
* Return the initial size of the dialog.
*/
@Override
protected Point getInitialSize() {
return new Point(565, 320);
}
private String launchDialogToSelectFields(String availableValues, String socketId) {
String teminalNumber=socketId.substring("in".length()); //to get a port number removing 'in' from socketId
FieldDialog fieldDialog = new FieldDialog(new Shell(), propertyDialogButtonBar);
fieldDialog.setPropertyFromCommaSepratedString(availableValues);
if(!sourceFieldList.isEmpty()&& Integer.parseInt(teminalNumber) < sourceFieldList.size())
{
fieldDialog.setSourceFieldsFromPropagatedSchema(SchemaSyncUtility.INSTANCE.
converterFilterPropertyListToStringList(sourceFieldList.get(Integer.parseInt(teminalNumber))));
}
fieldDialog.setComponentName(Constants.JOIN_KEYS_WINDOW_TITLE);
fieldDialog.open();
return fieldDialog.getResultAsCommaSeprated();
}
public void setPropagatedFieldProperty(Map<String, List<String>> propagatedFiledNames) {
this.propagatedFiledNames = propagatedFiledNames;
}
public void setSourceFieldList(List<List<FilterProperties>> sourceFieldList) {
this.sourceFieldList = sourceFieldList;
}
@Override
protected void okPressed() {
joinConfigPropertyList.clear();
List<JoinConfigProperty> tempraryConfigPropList=new ArrayList<JoinConfigProperty>();
for(int i=0;i<inputPortValue;i++)
{
tempraryConfigPropList.add(tempraryConfigPropertyList.get(i));
}
joinConfigPropertyList.addAll(tempraryConfigPropList);
super.okPressed();
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.security;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.util.Arrays;
import java.util.HashMap;
import java.util.StringTokenizer;
import java.util.TreeSet;
import javax.security.auth.login.LoginException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.util.Shell;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.WritableUtils;
/** An implementation of UserGroupInformation in the Unix system */
public class UnixUserGroupInformation extends UserGroupInformation {
public static final String DEFAULT_USERNAME = "DrWho";
public static final String DEFAULT_GROUP = "Tardis";
final static public String UGI_PROPERTY_NAME = "hadoop.job.ugi";
final static private HashMap<String, UnixUserGroupInformation> user2UGIMap =
new HashMap<String, UnixUserGroupInformation>();
/** Create an immutable {@link UnixUserGroupInformation} object. */
public static UnixUserGroupInformation createImmutable(String[] ugi) {
return new UnixUserGroupInformation(ugi) {
public void readFields(DataInput in) throws IOException {
throw new UnsupportedOperationException();
}
};
}
private String userName;
private String[] groupNames;
/** Default constructor
*/
public UnixUserGroupInformation() {
}
/** Constructor with parameters user name and its group names.
* The first entry in the groups list is the default group.
*
* @param userName a user's name
* @param groupNames groups list, first of which is the default group
* @exception IllegalArgumentException if any argument is null
*/
public UnixUserGroupInformation(String userName, String[] groupNames) {
setUserGroupNames(userName, groupNames);
}
/** Constructor with parameter user/group names
*
* @param ugi an array containing user/group names, the first
* element of which is the user name, the second of
* which is the default group name.
* @exception IllegalArgumentException if the array size is less than 2
* or any element is null.
*/
public UnixUserGroupInformation(String[] ugi) {
if (ugi==null || ugi.length < 2) {
throw new IllegalArgumentException( "Parameter does contain at least "+
"one user name and one group name");
}
String[] groupNames = new String[ugi.length-1];
System.arraycopy(ugi, 1, groupNames, 0, groupNames.length);
setUserGroupNames(ugi[0], groupNames);
}
/* Set this object's user name and group names
*
* @param userName a user's name
* @param groupNames groups list, the first of which is the default group
* @exception IllegalArgumentException if any argument is null
*/
private void setUserGroupNames(String userName, String[] groupNames) {
if (userName==null || userName.length()==0 ||
groupNames== null || groupNames.length==0) {
throw new IllegalArgumentException(
"Parameters should not be null or an empty string/array");
}
for (int i=0; i<groupNames.length; i++) {
if(groupNames[i] == null || groupNames[i].length() == 0) {
throw new IllegalArgumentException("A null group name at index " + i);
}
}
this.userName = userName;
this.groupNames = groupNames;
}
/** Return an array of group names
*/
public String[] getGroupNames() {
return groupNames;
}
/** Return the user's name
*/
public String getUserName() {
return userName;
}
/* The following two methods implements Writable interface */
final private static String UGI_TECHNOLOGY = "STRING_UGI";
/** Deserialize this object
* First check if this is a UGI in the string format.
* If no, throw an IOException; otherwise
* set this object's fields by reading them from the given data input
*
* @param in input stream
* @exception IOException is thrown if encounter any error when reading
*/
public void readFields(DataInput in) throws IOException {
// read UGI type first
String ugiType = Text.readString(in);
if (!UGI_TECHNOLOGY.equals(ugiType)) {
throw new IOException("Expect UGI prefix: " + UGI_TECHNOLOGY +
", but receive a prefix: " + ugiType);
}
// read this object
userName = Text.readString(in);
int numOfGroups = WritableUtils.readVInt(in);
groupNames = new String[numOfGroups];
for (int i = 0; i < numOfGroups; i++) {
groupNames[i] = Text.readString(in);
}
}
/** Serialize this object
* First write a string marking that this is a UGI in the string format,
* then write this object's serialized form to the given data output
*
* @param out output stream
* @exception IOException if encounter any error during writing
*/
public void write(DataOutput out) throws IOException {
// write a prefix indicating the type of UGI being written
Text.writeString(out, UGI_TECHNOLOGY);
// write this object
Text.writeString(out, userName);
WritableUtils.writeVInt(out, groupNames.length);
for (String groupName : groupNames) {
Text.writeString(out, groupName);
}
}
/* The following two methods deal with transferring UGI through conf.
* In this pass of implementation we store UGI as a string in conf.
* Later we may change it to be a more general approach that stores
* it as a byte array */
/** Store the given <code>ugi</code> as a comma separated string in
* <code>conf</code> as a property <code>attr</code>
*
* The String starts with the user name followed by the default group names,
* and other group names.
*
* @param conf configuration
* @param attr property name
* @param ugi a UnixUserGroupInformation
*/
public static void saveToConf(Configuration conf, String attr,
UnixUserGroupInformation ugi ) {
conf.set(attr, ugi.toString());
}
/** Read a UGI from the given <code>conf</code>
*
* The object is expected to store with the property name <code>attr</code>
* as a comma separated string that starts
* with the user name followed by group names.
* If the property name is not defined, return null.
* It's assumed that there is only one UGI per user. If this user already
* has a UGI in the ugi map, return the ugi in the map.
* Otherwise, construct a UGI from the configuration, store it in the
* ugi map and return it.
*
* @param conf configuration
* @param attr property name
* @return a UnixUGI
* @throws LoginException if the stored string is ill-formatted.
*/
public static UnixUserGroupInformation readFromConf(
Configuration conf, String attr) throws LoginException {
String[] ugi = conf.getStrings(attr);
if(ugi == null) {
return null;
}
UnixUserGroupInformation currentUGI = null;
if (ugi.length>0 ){
currentUGI = user2UGIMap.get(ugi[0]);
}
if (currentUGI == null) {
try {
currentUGI = new UnixUserGroupInformation(ugi);
user2UGIMap.put(currentUGI.getUserName(), currentUGI);
} catch (IllegalArgumentException e) {
throw new LoginException("Login failed: "+e.getMessage());
}
}
return currentUGI;
}
/**
* Get current user's name and the names of all its groups from Unix.
* It's assumed that there is only one UGI per user. If this user already
* has a UGI in the ugi map, return the ugi in the map.
* Otherwise get the current user's information from Unix, store it
* in the map, and return it.
*
* If the current user's UNIX username or groups are configured in such a way
* to throw an Exception, for example if the user uses LDAP, then this method
* will use a the {@link #DEFAULT_USERNAME} and {@link #DEFAULT_GROUP}
* constants.
*/
public static UnixUserGroupInformation login() throws LoginException {
try {
String userName;
// if an exception occurs, then uses the
// default user
try {
userName = getUnixUserName();
} catch (Exception e) {
userName = DEFAULT_USERNAME;
}
// check if this user already has a UGI object in the ugi map
UnixUserGroupInformation ugi = user2UGIMap.get(userName);
if (ugi != null) {
return ugi;
}
/* get groups list from UNIX.
* It's assumed that the first group is the default group.
*/
String[] groupNames;
// if an exception occurs, then uses the
// default group
try {
groupNames = getUnixGroups();
} catch (Exception e) {
groupNames = new String[1];
groupNames[0] = DEFAULT_GROUP;
}
// construct a Unix UGI
ugi = new UnixUserGroupInformation(userName, groupNames);
user2UGIMap.put(ugi.getUserName(), ugi);
return ugi;
} catch (Exception e) {
throw new LoginException("Login failed: "+e.getMessage());
}
}
/** Equivalent to login(conf, false). */
public static UnixUserGroupInformation login(Configuration conf)
throws LoginException {
return login(conf, false);
}
/** Get a user's name & its group names from the given configuration;
* If it is not defined in the configuration, get the current user's
* information from Unix.
* If the user has a UGI in the ugi map, return the one in
* the UGI map.
*
* @param conf either a job configuration or client's configuration
* @param save saving it to conf?
* @return UnixUserGroupInformation a user/group information
* @exception LoginException if not able to get the user/group information
*/
public static UnixUserGroupInformation login(Configuration conf, boolean save
) throws LoginException {
UnixUserGroupInformation ugi = readFromConf(conf, UGI_PROPERTY_NAME);
if (ugi == null) {
ugi = login();
LOG.debug("Unix Login: " + ugi);
if (save) {
saveToConf(conf, UGI_PROPERTY_NAME, ugi);
}
}
return ugi;
}
/* Return a string representation of a string array.
* Two strings are separated by a blank.
*/
private static String toString(String[] strArray) {
if (strArray==null || strArray.length==0) {
return "";
}
StringBuilder buf = new StringBuilder(strArray[0]);
for (int i=1; i<strArray.length; i++) {
buf.append(' ');
buf.append(strArray[i]);
}
return buf.toString();
}
/** Get current user's name from Unix by running the command whoami.
*
* @return current user's name
* @throws IOException if encounter any error while running the command
*/
static String getUnixUserName() throws IOException {
String[] result = executeShellCommand(
new String[]{Shell.USER_NAME_COMMAND});
if (result.length!=1) {
throw new IOException("Expect one token as the result of " +
Shell.USER_NAME_COMMAND + ": " + toString(result));
}
return result[0];
}
/** Get the current user's group list from Unix by running the command groups
*
* @return the groups list that the current user belongs to
* @throws IOException if encounter any error when running the command
*/
private static String[] getUnixGroups() throws IOException {
return executeShellCommand(Shell.getGROUPS_COMMAND());
}
/* Execute a command and return the result as an array of Strings */
private static String[] executeShellCommand(String[] command)
throws IOException {
String groups = Shell.execCommand(command);
StringTokenizer tokenizer = new StringTokenizer(groups);
int numOfTokens = tokenizer.countTokens();
String[] tokens = new String[numOfTokens];
for (int i=0; tokenizer.hasMoreTokens(); i++) {
tokens[i] = tokenizer.nextToken();
}
return tokens;
}
/** Decide if two UGIs are the same
*
* @param other other object
* @return true if they are the same; false otherwise.
*/
public boolean equals(Object other) {
if (this == other) {
return true;
}
if (!(other instanceof UnixUserGroupInformation)) {
return false;
}
UnixUserGroupInformation otherUGI = (UnixUserGroupInformation)other;
// check userName
if (userName == null) {
if (otherUGI.getUserName() != null) {
return false;
}
} else {
if (!userName.equals(otherUGI.getUserName())) {
return false;
}
}
// checkGroupNames
if (groupNames == otherUGI.groupNames) {
return true;
}
if (groupNames.length != otherUGI.groupNames.length) {
return false;
}
// check default group name
if (groupNames.length>0 && !groupNames[0].equals(otherUGI.groupNames[0])) {
return false;
}
// check all group names, ignoring the order
return new TreeSet<String>(Arrays.asList(groupNames)).equals(
new TreeSet<String>(Arrays.asList(otherUGI.groupNames)));
}
/** Returns a hash code for this UGI.
* The hash code for a UGI is the hash code of its user name string.
*
* @return a hash code value for this UGI.
*/
public int hashCode() {
return getUserName().hashCode();
}
/** Convert this object to a string
*
* @return a comma separated string containing the user name and group names
*/
public String toString() {
StringBuilder buf = new StringBuilder();
buf.append(userName);
for (String groupName : groupNames) {
buf.append(',');
buf.append(groupName);
}
return buf.toString();
}
public String getName() {
return toString();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.internal.cache.partitioned.rebalance;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyBoolean;
import static org.mockito.Matchers.anyInt;
import static org.mockito.Matchers.anyLong;
import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.doNothing;
import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import org.junit.Before;
import org.junit.Test;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import org.apache.geode.distributed.internal.membership.InternalDistributedMember;
import org.apache.geode.internal.cache.PartitionedRegion;
import org.apache.geode.internal.cache.control.PartitionRebalanceDetailsImpl;
import org.apache.geode.internal.cache.control.ResourceManagerStats;
import org.apache.geode.internal.cache.partitioned.rebalance.BucketOperator.Completion;
public class BucketOperatorWrapperTest {
private ResourceManagerStats stats;
private PartitionedRegion leaderRegion;
private PartitionedRegion colocatedRegion;
private Set<PartitionRebalanceDetailsImpl> rebalanceDetails;
private BucketOperatorWrapper wrapper;
private BucketOperatorImpl delegate;
private Map<String, Long> colocatedRegionBytes;
private int bucketId = 1;
private InternalDistributedMember sourceMember, targetMember;
private static final String PR_LEADER_REGION_NAME = "leadregion1";
private static final String PR_COLOCATED_REGION_NAME = "coloregion1";
@Before
public void setUp() throws UnknownHostException {
colocatedRegionBytes = new HashMap<String, Long>();
colocatedRegionBytes.put(PR_LEADER_REGION_NAME, 100L);
colocatedRegionBytes.put(PR_COLOCATED_REGION_NAME, 50L);
sourceMember = new InternalDistributedMember(InetAddress.getByName("127.0.0.1"), 1);
targetMember = new InternalDistributedMember(InetAddress.getByName("127.0.0.2"), 1);
stats = mock(ResourceManagerStats.class);
doNothing().when(stats).startBucketCreate(anyInt());
doNothing().when(stats).endBucketCreate(anyInt(), anyBoolean(), anyLong(), anyLong());
leaderRegion = mock(PartitionedRegion.class);
doReturn(PR_LEADER_REGION_NAME).when(leaderRegion).getFullPath();
colocatedRegion = mock(PartitionedRegion.class);
doReturn(PR_COLOCATED_REGION_NAME).when(colocatedRegion).getFullPath();
rebalanceDetails = new HashSet<PartitionRebalanceDetailsImpl>();
PartitionRebalanceDetailsImpl details = spy(new PartitionRebalanceDetailsImpl(leaderRegion));
rebalanceDetails.add(details);
delegate = mock(BucketOperatorImpl.class);
wrapper = new BucketOperatorWrapper(delegate, rebalanceDetails, stats, leaderRegion);
}
@Test
public void bucketWrapperShouldDelegateCreateBucketToEnclosedOperator() {
Completion completionSentToWrapper = mock(Completion.class);
doNothing().when(delegate).createRedundantBucket(targetMember, bucketId, colocatedRegionBytes,
completionSentToWrapper);
wrapper.createRedundantBucket(targetMember, bucketId, colocatedRegionBytes,
completionSentToWrapper);
verify(delegate, times(1)).createRedundantBucket(eq(targetMember), eq(bucketId),
eq(colocatedRegionBytes), any(Completion.class));
}
@Test
public void bucketWrapperShouldRecordNumberOfBucketsCreatedIfCreateBucketSucceeds() {
doAnswer(new Answer<Object>() {
@Override
public Object answer(InvocationOnMock invocation) {
// 3rd argument is Completion object sent to BucketOperatorImpl.createRedundantBucket
((Completion) invocation.getArguments()[3]).onSuccess();
return null;
}
}).when(delegate).createRedundantBucket(eq(targetMember), eq(bucketId),
eq(colocatedRegionBytes), any(Completion.class));
Completion completionSentToWrapper = mock(Completion.class);
wrapper.createRedundantBucket(targetMember, bucketId, colocatedRegionBytes,
completionSentToWrapper);
// verify create buckets is recorded
for (PartitionRebalanceDetailsImpl details : rebalanceDetails) {
if (details.getRegionPath().equalsIgnoreCase(PR_LEADER_REGION_NAME))
verify(details, times(1)).incCreates(eq(colocatedRegionBytes.get(PR_LEADER_REGION_NAME)),
anyLong());
else if (details.getRegionPath().equals(PR_COLOCATED_REGION_NAME))
verify(details, times(1)).incTransfers(colocatedRegionBytes.get(PR_COLOCATED_REGION_NAME),
0); // elapsed is recorded only if its leader
}
}
@Test
public void bucketWrapperShouldNotRecordNumberOfBucketsCreatedIfCreateBucketFails() {
doAnswer(new Answer<Object>() {
@Override
public Object answer(InvocationOnMock invocation) {
// 3rd argument is Completion object sent to BucketOperatorImpl.createRedundantBucket
((Completion) invocation.getArguments()[3]).onFailure();
return null;
}
}).when(delegate).createRedundantBucket(eq(targetMember), eq(bucketId),
eq(colocatedRegionBytes), any(Completion.class));
Completion completionSentToWrapper = mock(Completion.class);
wrapper.createRedundantBucket(targetMember, bucketId, colocatedRegionBytes,
completionSentToWrapper);
// verify create buckets is not recorded
for (PartitionRebalanceDetailsImpl details : rebalanceDetails) {
verify(details, times(0)).incTransfers(anyLong(), anyLong());
}
}
@Test
public void bucketWrapperShouldInvokeOnFailureWhenCreateBucketFails() {
doAnswer(new Answer<Object>() {
@Override
public Object answer(InvocationOnMock invocation) {
// 3rd argument is Completion object sent to BucketOperatorImpl.createRedundantBucket
((Completion) invocation.getArguments()[3]).onFailure();
return null;
}
}).when(delegate).createRedundantBucket(eq(targetMember), eq(bucketId),
eq(colocatedRegionBytes), any(Completion.class));
Completion completionSentToWrapper = mock(Completion.class);
wrapper.createRedundantBucket(targetMember, bucketId, colocatedRegionBytes,
completionSentToWrapper);
// verify onFailure is invoked
verify(completionSentToWrapper, times(1)).onFailure();
}
@Test
public void bucketWrapperShouldInvokeOnSuccessWhenCreateBucketSucceeds() {
doAnswer(new Answer<Object>() {
@Override
public Object answer(InvocationOnMock invocation) {
// 3rd argument is Completion object sent to BucketOperatorImpl.createRedundantBucket
((Completion) invocation.getArguments()[3]).onSuccess();
return null;
}
}).when(delegate).createRedundantBucket(eq(targetMember), eq(bucketId),
eq(colocatedRegionBytes), any(Completion.class));
Completion completionSentToWrapper = mock(Completion.class);
wrapper.createRedundantBucket(targetMember, bucketId, colocatedRegionBytes,
completionSentToWrapper);
verify(completionSentToWrapper, times(1)).onSuccess();
}
@Test
public void bucketWrapperShouldDelegateMoveBucketToEnclosedOperator() {
doReturn(true).when(delegate).moveBucket(sourceMember, targetMember, bucketId,
colocatedRegionBytes);
wrapper.moveBucket(sourceMember, targetMember, bucketId, colocatedRegionBytes);
// verify the delegate is invoked
verify(delegate, times(1)).moveBucket(sourceMember, targetMember, bucketId,
colocatedRegionBytes);
// verify we recorded necessary stats
verify(stats, times(1)).startBucketTransfer(anyInt());
verify(stats, times(1)).endBucketTransfer(anyInt(), anyBoolean(), anyLong(), anyLong());
}
@Test
public void bucketWrapperShouldRecordBytesTransferredPerRegionAfterMoveBucketIsSuccessful() {
doReturn(true).when(delegate).moveBucket(sourceMember, targetMember, bucketId,
colocatedRegionBytes);
wrapper.moveBucket(sourceMember, targetMember, bucketId, colocatedRegionBytes);
// verify the details is updated with bytes transfered
for (PartitionRebalanceDetailsImpl details : rebalanceDetails) {
if (details.getRegionPath().equalsIgnoreCase(PR_LEADER_REGION_NAME))
verify(details, times(1)).incTransfers(eq(colocatedRegionBytes.get(PR_LEADER_REGION_NAME)),
anyLong());
else if (details.getRegionPath().equals(PR_COLOCATED_REGION_NAME))
verify(details, times(1)).incTransfers(colocatedRegionBytes.get(PR_COLOCATED_REGION_NAME),
0); // elapsed is recorded only if its leader
}
// verify we recorded necessary stats
verify(stats, times(1)).startBucketTransfer(anyInt());
verify(stats, times(1)).endBucketTransfer(anyInt(), anyBoolean(), anyLong(), anyLong());
}
@Test
public void bucketWrapperShouldDoNotRecordBytesTransferedIfMoveBucketFails() {
doReturn(false).when(delegate).moveBucket(sourceMember, targetMember, bucketId,
colocatedRegionBytes);
wrapper.moveBucket(sourceMember, targetMember, bucketId, colocatedRegionBytes);
// verify the details is not updated with bytes transfered
for (PartitionRebalanceDetailsImpl details : rebalanceDetails) {
verify(details, times(0)).incTransfers(anyLong(), anyLong());
}
// verify we recorded necessary stats
verify(stats, times(1)).startBucketTransfer(anyInt());
verify(stats, times(1)).endBucketTransfer(anyInt(), anyBoolean(), anyLong(), anyLong());
}
@Test
public void bucketWrapperShouldDelegateRemoveBucketToEnclosedOperator() {
wrapper.removeBucket(targetMember, bucketId, colocatedRegionBytes);
// verify the delegate is invoked
verify(delegate, times(1)).removeBucket(targetMember, bucketId, colocatedRegionBytes);
// verify we recorded necessary stats
verify(stats, times(1)).startBucketRemove(anyInt());
verify(stats, times(1)).endBucketRemove(anyInt(), anyBoolean(), anyLong(), anyLong());
}
@Test
public void bucketWrapperShouldRecordBucketRemovesPerRegionAfterRemoveBucketIsSuccessful() {
doReturn(true).when(delegate).removeBucket(targetMember, bucketId, colocatedRegionBytes);
wrapper.removeBucket(targetMember, bucketId, colocatedRegionBytes);
// verify the details is updated with bytes transfered
for (PartitionRebalanceDetailsImpl details : rebalanceDetails) {
if (details.getRegionPath().equalsIgnoreCase(PR_LEADER_REGION_NAME))
verify(details, times(1)).incRemoves((eq(colocatedRegionBytes.get(PR_LEADER_REGION_NAME))),
anyLong());
else if (details.getRegionPath().equals(PR_COLOCATED_REGION_NAME))
verify(details, times(1)).incRemoves(colocatedRegionBytes.get(PR_COLOCATED_REGION_NAME), 0); // elapsed
// is
// recorded
// only
// if
// its
// leader
}
// verify we recorded necessary stats
verify(stats, times(1)).startBucketRemove(anyInt());
verify(stats, times(1)).endBucketRemove(anyInt(), anyBoolean(), anyLong(), anyLong());
}
@Test
public void bucketWrapperShouldDoNotRecordBucketRemovesIfMoveBucketFails() {
doReturn(false).when(delegate).removeBucket(targetMember, bucketId, colocatedRegionBytes);
wrapper.removeBucket(targetMember, bucketId, colocatedRegionBytes);
// verify the details is not updated with bytes transfered
for (PartitionRebalanceDetailsImpl details : rebalanceDetails) {
verify(details, times(0)).incTransfers(anyLong(), anyLong());
}
// verify we recorded necessary stats
verify(stats, times(1)).startBucketRemove(anyInt());
verify(stats, times(1)).endBucketRemove(anyInt(), anyBoolean(), anyLong(), anyLong());
}
@Test
public void bucketWrapperShouldDelegateMovePrimaryToEnclosedOperator() {
wrapper.movePrimary(sourceMember, targetMember, bucketId);
// verify the delegate is invoked
verify(delegate, times(1)).movePrimary(sourceMember, targetMember, bucketId);
// verify we recorded necessary stats
verify(stats, times(1)).startPrimaryTransfer(anyInt());
verify(stats, times(1)).endPrimaryTransfer(anyInt(), anyBoolean(), anyLong());
}
@Test
public void bucketWrapperShouldRecordPrimaryTransfersPerRegionAfterMovePrimaryIsSuccessful() {
doReturn(true).when(delegate).movePrimary(sourceMember, targetMember, bucketId);
wrapper.movePrimary(sourceMember, targetMember, bucketId);
// verify the details is updated with bytes transfered
for (PartitionRebalanceDetailsImpl details : rebalanceDetails) {
if (details.getRegionPath().equalsIgnoreCase(PR_LEADER_REGION_NAME))
verify(details, times(1)).incPrimaryTransfers(anyLong());
else if (details.getRegionPath().equals(PR_COLOCATED_REGION_NAME))
verify(details, times(1)).incPrimaryTransfers(0); // elapsed is recorded only if its leader
}
// verify we recorded necessary stats
verify(stats, times(1)).startPrimaryTransfer(anyInt());
verify(stats, times(1)).endPrimaryTransfer(anyInt(), anyBoolean(), anyLong());
}
@Test
public void bucketWrapperShouldNotRecordPrimaryTransfersPerRegionAfterMovePrimaryFails() {
doReturn(false).when(delegate).movePrimary(sourceMember, targetMember, bucketId);
wrapper.movePrimary(sourceMember, targetMember, bucketId);
// verify the details is not updated with bytes transfered
for (PartitionRebalanceDetailsImpl details : rebalanceDetails) {
verify(details, times(0)).incTransfers(anyLong(), anyLong());
}
// verify we recorded necessary stats
verify(stats, times(1)).startPrimaryTransfer(anyInt());
verify(stats, times(1)).endPrimaryTransfer(anyInt(), anyBoolean(), anyLong());
}
}
| |
import processing.core.*;
import java.io.PrintWriter;
import ddf.minim.*;
@SuppressWarnings("serial")
public class StayingAlive extends PApplet {
static public void main(String[] args) {
PApplet.main("StayingAlive");
}
Minim minim;
AudioPlayer sou;
PFont font;
PrintWriter output;
int speed = 20;
int mode = 0;
int innerMode = 2;
int speeded = 2;
int speededS = 20;
int score = 50;
sprite mysprite;
//cloud mycloud;
tree mytree;
flowers myflowers;
bird mybird;
bird mybird1;
bird2 mybird2;
bird2 mybird21;
bird3 mybird3;
bird3 mybird32;
bird3 mybird33;
bird3 mybird34;
sprite2 mysprite2;
car mycar;
car2 mycar2;
car3 mycar3;
public void setup() {
// Load a soundfile from the /data folder of the sketch and play it back
minim = new Minim(this);
sou = minim.loadFile("sample.mp3");
sou.loop();
//loading font
font = loadFont("snap40.vlw");
textFont(font);
output = createWriter("score.txt");
size(650, 300);
//fullScreen();
orientation(LANDSCAPE);
background();
smooth();
float groundlevel = height-(height/7);
float stageZone = (height*.75f);
mysprite= new sprite(this, (width/10f), groundlevel, width/15);
//mycloud = new cloud(0, (width/12.5), 0.1);
mytree = new tree(this, 0);
myflowers = new flowers(this, 0, stageZone);
mybird = new bird(this, 0, (height/5), (width/130), (width/20));
mybird1 = new bird(this, (width/2), (height/15), (width/160), (width/20));
mybird2 = new bird2(this, (width/2), (height/7.5f), (width/160), (width/20), 100);
mybird21 = new bird2(this, 0, (height/15), (width/130), (width/20), 200);
//float _xcent, float _ycent, float _speed, float _wbird
mybird3 = new bird3(this, random(0, width), 0, (width/190), (width/20), 200);
mybird32 = new bird3(this, random(0, width), 0, (width/150), (width/20), 200);
mybird33 = new bird3(this, random(0, width), 0, (width/170), (width/20), 200);
mybird34 = new bird3(this, random(0, width), 0, (width/210), (width/20), 200);
mysprite2= new sprite2(this, (width/2f), height/1.2f, (width/22.5f));
mycar = new car(this, width/1.2f, (height/3), (width/6));//red
mycar3 = new car3(this, width/2, (height/5), (width/8), 100);//yellow
mycar2 = new car2(this, width/5, (height/10), (width/10), (200));//green
}
public void draw() {
switch(mode){
case 0: {
background();
homepage();
}
break;
case 1: {
background();
mytree.display();
myflowers.display();
mysprite.display();
mybird.display();
mybird.move();
mybird1.display();
mybird1.move();
mybird2.display();
mybird2.move();
mybird21.display();
mybird21.move();
mybird3.display();
mybird3.move();
fill(247, 115, 232);
text("Lives:" +score/10, width-150, 20);
if(mysprite.startPosX > width){
String lines[] = loadStrings("Win.txt");
for (int i = 0; i < lines.length; i++) {
fill(250, 168, 3);
text(lines[i], (width/4), (height/3), (width/2), (height/2)); // Text wraps within text box
}
score += 1;
noLoop();
}
if(dist( mysprite.startPosX, mysprite.startPosY, mybird3.xcent, mybird3.ycent) < (mysprite.wHead + mybird3.wbird/2) ){
lose();
}
if(score < 1){
String lines[] = loadStrings("Lose.txt");
for (int i = 0; i < lines.length; i++) {
fill(250, 168, 3);
text(lines[i], (width/4), (height/3), (width/2), (height/2)); // Text wraps within text box
}
lose();
noLoop();
}
}
break;
case 2: {
background();
mytree.display();
myflowers.display();
mysprite.display();
mybird.display();
mybird.move();
mybird1.display();
mybird1.move();
mybird2.display();
mybird2.move();
mybird21.display();
mybird21.move();
mybird3.display();
mybird3.move();
mybird32.display();
mybird32.move();
fill(247, 115, 232);
text("Lives:" +score/10, width-150, 20);
if(mysprite.startPosX > width){
String lines[] = loadStrings("Win.txt");
for (int i = 0; i < lines.length; i++) {
fill(250, 168, 3);
text(lines[i], (width/4), (height/3), (width/2), (height/2)); // Text wraps within text box
}
score += 1;
noLoop();
}
if(dist( mysprite.startPosX, mysprite.startPosY, mybird3.xcent, mybird3.ycent) < mysprite.wHead + mybird3.wbird/2 ){
lose();
}
if(dist( mysprite.startPosX, mysprite.startPosY, mybird32.xcent, mybird32.ycent) < mysprite.wHead + mybird32.wbird/2 ){
lose();
}
if(score < 1){
String lines[] = loadStrings("Lose.txt");
for (int i = 0; i < lines.length; i++) {
fill(250, 168, 3);
text(lines[i], (width/4), (height/3), (width/2), (height/2)); // Text wraps within text box
}
lose();
noLoop();
}
}
break;
case 3: {
background();
mytree.display();
myflowers.display();
mysprite.display();
mybird.display();
mybird.move();
mybird1.display();
mybird1.move();
mybird2.display();
mybird2.move();
mybird21.display();
mybird21.move();
mybird3.display();
mybird3.move();
mybird32.display();
mybird32.move();
mybird33.display();
mybird33.move();
fill(247, 115, 232);
text("Lives:" +score/10, width-150, 20);
if(mysprite.startPosX > width){
String lines[] = loadStrings("Win.txt");
for (int i = 0; i < lines.length; i++) {
fill(250, 168, 3);
text(lines[i], (width/4), (height/3), (width/2), (height/2)); // Text wraps within text box
}
score += 1;
noLoop();
}
if(dist( mysprite.startPosX, mysprite.startPosY, mybird3.xcent, mybird3.ycent) < mysprite.wHead + mybird3.wbird/2 ){
lose();
}
if(dist( mysprite.startPosX, mysprite.startPosY, mybird32.xcent, mybird32.ycent) < mysprite.wHead + mybird32.wbird/2 ){
lose();
}
if(dist( mysprite.startPosX, mysprite.startPosY, mybird33.xcent, mybird33.ycent) < mysprite.wHead + mybird33.wbird/2 ){
lose();
}
if(score < 1){
String lines[] = loadStrings("Lose.txt");
for (int i = 0; i < lines.length; i++) {
fill(250, 168, 3);
text(lines[i], (width/4), (height/3), (width/2), (height/2)); // Text wraps within text box
}
lose();
noLoop();
}
}
break;
case 4:
{
background2();
mysprite2.display();
mycar.display();
mycar.move();
fill(255);
text("Lives:" +score/10, width-150, 20);
if(mysprite2.startPosY < 0){
String lines[] = loadStrings("Win.txt");
for (int i = 0; i < lines.length; i++) {
fill(250, 168, 3);
text(lines[i], (width/4), (height/3), (width/2), (height/2)); // Text wraps within text box
}
score +=1;
noLoop();
}
if(dist( mysprite2.startPosX, mysprite2.startPosY, mycar.xPos, mycar.yPos) < mysprite2.wHead + mycar.cWidth/2 ){
lose();
}
if(score < 1){
String lines[] = loadStrings("Lose.txt");
for (int i = 0; i < lines.length; i++) {
fill(250, 168, 3);
text(lines[i], (width/4), (height/3), (width/2), (height/2)); // Text wraps within text box
}
lose();
noLoop();
}
}
break;
case 5:
{
background2();
mysprite2.display();
mycar.display();
mycar.move();
mycar2.display();
mycar2.move();
fill(255);
text("Lives:" +score/10, width-150, 20);
if(mysprite2.startPosY < 0){
String lines[] = loadStrings("Win.txt");
for (int i = 0; i < lines.length; i++) {
fill(250, 168, 3);
text(lines[i], (width/4), (height/3), (width/2), (height/2)); // Text wraps within text box
}
score += 1;
noLoop();
}
if(dist( mysprite2.startPosX, mysprite2.startPosY, mycar.xPos, mycar.yPos) < mysprite2.wHead + mycar.cWidth/2 ){
lose();
}
if(dist( mysprite2.startPosX, mysprite2.startPosY, mycar2.xPos, mycar2.yPos) < mysprite2.wHead + mycar2.cWidth/2 ){
lose();
}
if(score < 1){
String lines[] = loadStrings("Lose.txt");
for (int i = 0; i < lines.length; i++) {
fill(250, 168, 3);
text(lines[i], (width/4), (height/3), (width/2), (height/2)); // Text wraps within text box
}
lose();
noLoop();
}
}
break;
case 6:
{
background2();
mysprite2.display();
mycar.display();
mycar.move();
mycar2.display();
mycar2.move();
mycar3.display();
mycar3.move();
fill(255);
text("Lives:" +score/10, width-150, 20);
if(mysprite2.startPosY < 0){
String lines[] = loadStrings("Win.txt");
for (int i = 0; i < lines.length; i++) {
fill(250, 168, 3);
text(lines[i], (width/4), (height/3), (width/2), (height/2)); // Text wraps within text box
}
score += 1;
noLoop();
}
if(dist( mysprite2.startPosX, mysprite2.startPosY, mycar.xPos, mycar.yPos) < mysprite2.wHead + mycar.cWidth/2 ){
lose();
}
if(dist( mysprite2.startPosX, mysprite2.startPosY, mycar2.xPos, mycar2.yPos) < mysprite2.wHead + mycar2.cWidth/2 ){
lose();
}
if(dist( mysprite2.startPosX, mysprite2.startPosY, mycar3.xPos, mycar3.yPos) < mysprite2.wHead + mycar3.cWidth/2 ){
lose();
}
if(score < 1){
String lines[] = loadStrings("Lose.txt");
for (int i = 0; i < lines.length; i++) {
fill(250, 168, 3);
text(lines[i], (width/4), (height/3), (width/2), (height/2)); // Text wraps within text box
}
lose();
noLoop();
}
}
break;
default:
{
background();
}
break;
}//end switch
}
public void background(){
float stageZone = (height*.75f);
int r = 0;
int g = 0;
//int b; for blue
for( int i=0; i < height; i++){
stroke(r, g, i);
line(0, i, width, i);
noStroke();
}
//rect is stage
fill(78, 113, 66);
rect(0, stageZone, width, (float)(height*.25));
}
public void background2(){
background(0);
rectMode(CENTER);
fill(255);
noStroke();
//noLoop();
//float x=0;
rectMode(CENTER);
//line(0, (height/2), width, (height/2));
for(float i=0; i<height; i+=40){
//line(0, (height/2)+i, width, (height/2)+i);
rect(width/2, 1+i, (width/10)+i, 15);
}
}
public void homepage(){
String lines[] = loadStrings("Welcome.txt");
for (int i = 0; i < lines.length; i++) {
fill(250, 168, 3);
text(lines[i], (width/6), (height/10), (width/1.4f), (height/1.2f)); // Text wraps within text box
}
}
public void lose(){
println("noooo");
score--;
println(score);
text("Lives:" +score, width-150, 20);
output.println("Score:" +score/10);
}
public void win(){
println("yessssss");
score++;
println(score);
text("Lives:" +score, width-150, 20);
output.println("Score:" +score/10);
}
public void keyPressed(){ // This function is called every time a key is pressed.
loop();
if(key == 's'){
output.flush(); // Writes the remaining data to the file
output.close(); // Finishes the file
exit(); // Stops the program
}
if(key == CODED)
{
if (keyCode == LEFT)
{
mysprite.startPosX-=speed;
}
else if (keyCode == RIGHT)
{
mysprite.startPosX+=speed;
}
if (keyCode == UP)
{
mysprite2.startPosY-=speededS;
}
else if (keyCode == DOWN)
{
mysprite2.startPosY+=speededS;
}
}
if (key >= '0' && key <='6')
{
mode = key - '0';
}
if (key == '0'){
mysprite.startPosX = 0;
score = 50;
}
if (key == '1'){
mysprite.startPosX = 0;
}
if (key == '2'){
mysprite.startPosX = 0;
}
if (key == '3'){
mysprite.startPosX = 0;
}
if (key == '4'){
mysprite2.startPosY = height;
}
if (key == '5'){
mysprite2.startPosY = height;
}
if (key == '6'){
mysprite2.startPosY = height;
}
} // End of keyPressed()
}
| |
/*
*
* * Copyright 2005-2015 Red Hat, Inc.
* * Red Hat licenses this file to you under the Apache License, version
* * 2.0 (the "License"); you may not use this file except in compliance
* * with the License. You may obtain a copy of the License at
* * http://www.apache.org/licenses/LICENSE-2.0
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* * implied. See the License for the specific language governing
* * permissions and limitations under the License.
*
*/
package io.fabric8.mq.multiplexer;
import io.fabric8.mq.AsyncExecutors;
import io.fabric8.mq.model.DestinationStatisticsMBean;
import io.fabric8.mq.model.InboundConnection;
import io.fabric8.mq.model.Model;
import io.fabric8.mq.util.LRUCache;
import io.fabric8.mq.util.TransportConnectionStateRegister;
import org.apache.activemq.advisory.AdvisorySupport;
import org.apache.activemq.command.*;
import org.apache.activemq.state.CommandVisitor;
import org.apache.activemq.state.ConsumerState;
import org.apache.activemq.state.ProducerState;
import org.apache.activemq.transport.DefaultTransportListener;
import org.apache.activemq.transport.Transport;
import org.apache.activemq.transport.TransportListener;
import org.apache.activemq.transport.TransportSupport;
import org.apache.activemq.util.ServiceStopper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicLong;
public class MultiplexerInput extends TransportSupport implements CommandVisitor, InboundConnection {
private static Logger LOG = LoggerFactory.getLogger(MultiplexerInput.class);
private final Multiplexer multiplexer;
private final Model model;
private final String protocol;
private final String name;
private final AsyncExecutors asyncExecutors;
private final TransportConnectionStateRegister multiplexerConnectionStateRegister;
private final ConnectionId multiplexerConnectionId;
private final SessionId multiplexerSessionId;
private final Transport input;
final private Map<SessionId, SessionId> sessionIdMap = new ConcurrentHashMap<>();
final private Map<ProducerId, ProducerId> producerIdMap = new ConcurrentHashMap<>();
final private Map<ConsumerId, ConsumerId> originalConsumerIdKeyMap = new ConcurrentHashMap<>();
final private Map<ConsumerId, ConsumerId> multiplexerConsumerIdKeyMap = new ConcurrentHashMap<>();
final private Map<TransactionId, TransactionId> transactionIdMap = new LRUCache<>(10000);
final private DestinationRegister destinationRegister;
private ConnectionInfo connectionInfo;
private AtomicLong inboundMessageCount = new AtomicLong();
private AtomicLong outboundMessageCount = new AtomicLong();
MultiplexerInput(Multiplexer multiplexer, String name, String protocol, AsyncExecutors asyncExecutors, TransportConnectionStateRegister transportConnectionStateRegister, Transport input) {
this.multiplexer = multiplexer;
this.name = name;
this.protocol = protocol;
this.model = multiplexer.getModel();
this.asyncExecutors = asyncExecutors;
this.multiplexerConnectionStateRegister = transportConnectionStateRegister;
this.input = input;
this.multiplexerConnectionId = multiplexer.getMultiplexerConnectionInfo().getConnectionId();
this.multiplexerSessionId = multiplexer.getMultiplexerSessionInfo().getSessionId();
this.destinationRegister = new DestinationRegister(model, this);
}
public Transport getInput() {
return input;
}
public ConnectionId getMultiplexerConnectionId() {
return multiplexerConnectionId;
}
public void setTransportListener(TransportListener commandListener) {
super.setTransportListener(commandListener);
input.setTransportListener(commandListener);
}
public Multiplexer getMultiplexer() {
return multiplexer;
}
@Override
protected void doStop(ServiceStopper serviceStopper) throws Exception {
//clear down our ConnectionState
for (SessionId sessionId : sessionIdMap.values()) {
removeSession(sessionId);
}
multiplexer.removeInput(this);
if (serviceStopper != null) {
serviceStopper.stop(input);
} else {
input.stop();
}
originalConsumerIdKeyMap.clear();
multiplexerConsumerIdKeyMap.clear();
sessionIdMap.clear();
producerIdMap.clear();
transactionIdMap.clear();
model.remove(this);
}
@Override
protected void doStart() throws Exception {
setTransportListener(new DefaultTransportListener() {
@Override
public void onCommand(Object command) {
if (command.getClass() == ShutdownInfo.class) {
try {
stop();
} catch (Exception e) {
LOG.debug("Caught exception stopping", e);
}
}
try {
processCommand(command);
} catch (Throwable error) {
onFailure(error);
}
}
@Override
public void onException(IOException error) {
onFailure(error);
}
});
this.input.start();
model.add(this);
}
protected void processCommand(Object o) {
try {
Command command = (Command) o;
Response response = command.visit(this);
if (response != null) {
//we are processing this locally - not via the Broker
response.setCorrelationId(command.getCommandId());
oneway(response);
}
} catch (Throwable e) {
onFailure(e);
}
}
@Override
public void oneway(Object o) throws IOException {
input.oneway(o);
}
public void oneway(MessageDispatch messageDispatch) throws IOException {
ConsumerId consumerId = messageDispatch.getConsumerId();
ConsumerId originalConsumerId = getOriginalConsumerId(consumerId);
messageDispatch.setConsumerId(originalConsumerId);
input.oneway(messageDispatch);
destinationRegister.addMessageOutbound(messageDispatch.getDestination());
outboundMessageCount.incrementAndGet();
}
@Override
public String getName() {
return name;
}
@Override
public String getRemoteAddress() {
return input.getRemoteAddress();
}
@Override
public int getReceiveCounter() {
return input.getReceiveCounter();
}
public void onFailure(Throwable e) {
if (!isStopping()) {
LOG.debug("Transport error: {}", e.getMessage(), e);
try {
stop();
} catch (Exception ignore) {
}
}
}
@Override
public Response processAddConnection(ConnectionInfo connectionInfo) throws Exception {
this.connectionInfo = connectionInfo;
ConnectionInfo copy = connectionInfo.copy();
copy.setConnectionId(getMultiplexerConnectionId());
/*
multiplexer.sendOutAll(this, copy);
multiplexerConnectionStateRegister.registerConnectionState(copy.getConnectionId(), copy);
*/
Response response = new Response();
response.setCorrelationId(connectionInfo.getCommandId());
return response;
}
@Override
public Response processAddSession(SessionInfo sessionInfo) throws Exception {
SessionInfo copy = new SessionInfo();
sessionInfo.copy(copy);
SessionId originalSessionId = sessionInfo.getSessionId();
SessionId sessionId = new SessionId(getMultiplexerConnectionId(), multiplexer.getNextSessionId());
sessionIdMap.put(originalSessionId, sessionId);
copy.setSessionId(sessionId);
multiplexer.sendOutAll(this, copy);
multiplexerConnectionStateRegister.addSession(copy);
return null;
}
@Override
public Response processAddProducer(ProducerInfo producerInfo) throws Exception {
ProducerInfo copy = producerInfo.copy();
SessionId originalSessionId = producerInfo.getProducerId().getParentId();
SessionId newSessionId = sessionIdMap.get(originalSessionId);
if (newSessionId == null) {
newSessionId = new SessionId(getMultiplexerConnectionId(), multiplexer.getNextSessionId());
sessionIdMap.put(originalSessionId, newSessionId);
}
ProducerId producerId = new ProducerId(newSessionId, multiplexer.getNextProducerId());
copy.setProducerId(producerId);
producerIdMap.put(producerInfo.getProducerId(), producerId);
multiplexer.sendOutAll(this, copy);
multiplexerConnectionStateRegister.addProducer(copy);
destinationRegister.registerProducer(producerInfo.getDestination());
return null;
}
@Override
public Response processAddConsumer(ConsumerInfo consumerInfo) throws Exception {
ConsumerId originalConsumerId = consumerInfo.getConsumerId();
ConsumerInfo copy = new ConsumerInfo();
consumerInfo.copy(copy);
SessionId originalSessionId = consumerInfo.getConsumerId().getParentId();
SessionId newSessionId = sessionIdMap.get(originalSessionId);
if (newSessionId == null) {
//Connection Advisory Consumer sets session id to -1
if (originalSessionId.getValue() == -1) {
newSessionId = multiplexerSessionId;
} else {
newSessionId = new SessionId(getMultiplexerConnectionId(), multiplexer.getNextSessionId());
}
sessionIdMap.put(originalSessionId, newSessionId);
}
ConsumerId multiplexConsumerId = new ConsumerId(newSessionId, multiplexer.getNextConsumerId());
copy.setConsumerId(multiplexConsumerId);
storeConsumerId(originalConsumerId, multiplexConsumerId);
multiplexer.registerConsumer(multiplexConsumerId, this);
multiplexer.sendOutAll(this, copy);
multiplexerConnectionStateRegister.addConsumer(copy);
if (!AdvisorySupport.isAdvisoryTopic(consumerInfo.getDestination())) {
destinationRegister.registerConsumer(consumerInfo.getDestination());
}
return null;
}
@Override
public Response processRemoveConnection(ConnectionId connectionId, long l) throws Exception {
/**
* The connection is shutting down - and will expect a response.
* We don't forward these - as the remote Broker will get confused
* so we sent back a response ourseleves
*/
if (connectionInfo != null) {
RemoveInfo removeCommand = connectionInfo.createRemoveCommand();
removeCommand.setResponseRequired(false);
removeCommand.setLastDeliveredSequenceId(l);
/*
multiplexerConnectionStateRegister.unregisterConnectionState(getMultiplexerConnectionId());
if (!multiplexer.isStopping() && !multiplexer.isStopped()) {
multiplexer.sendOutAll(this, removeCommand);
}
*/
}
//clear down our ConnectionState
for (SessionId sessionId : sessionIdMap.values()) {
if (multiplexerConnectionStateRegister.removeSession(sessionId) != null) {
removeSession(sessionId);
}
}
asyncExecutors.execute(new Runnable() {
public void run() {
try {
Thread.sleep(1000);
stop();
} catch (Throwable e) {
LOG.warn("Caught an error trying to stop");
}
}
});
return new Response();
}
@Override
public Response processRemoveSession(SessionId sessionId, long l) throws Exception {
SessionId multiplexerSessionId = sessionIdMap.remove(sessionId);
if (multiplexerSessionId != null) {
RemoveInfo removeInfo = new RemoveInfo(multiplexerSessionId);
removeInfo.setLastDeliveredSequenceId(l);
multiplexerConnectionStateRegister.removeSession(multiplexerSessionId);
multiplexer.sendOutAll(this, removeInfo);
}
return null;
}
@Override
public Response processRemoveProducer(ProducerId producerId) throws Exception {
ProducerId originalProducerId = producerIdMap.remove(producerId);
if (originalProducerId != null) {
RemoveInfo removeInfo = new RemoveInfo(originalProducerId);
ProducerState state = multiplexerConnectionStateRegister.removeProducer(originalProducerId);
multiplexer.sendOutAll(this, removeInfo);
if (state != null && state.getInfo() != null) {
destinationRegister.unregisterProducer(state.getInfo().getDestination());
}
}
return null;
}
@Override
public Response processRemoveConsumer(ConsumerId consumerId, long l) throws Exception {
ConsumerId multiplexerConsumerId = removeByOriginal(consumerId);
multiplexer.unregisterConsumer(multiplexerConsumerId);
if (multiplexerConsumerId != null) {
RemoveInfo removeInfo = new RemoveInfo(multiplexerConsumerId);
removeInfo.setLastDeliveredSequenceId(l);
ConsumerState state = multiplexerConnectionStateRegister.removeConsumer(multiplexerConsumerId);
multiplexer.sendOutAll(this, removeInfo);
if (state != null && state.getInfo() != null) {
destinationRegister.unregisterConsumer(state.getInfo().getDestination());
}
}
return null;
}
@Override
public Response processAddDestination(DestinationInfo destinationInfo) throws Exception {
DestinationInfo copy = destinationInfo.copy();
copy.setConnectionId(getMultiplexerConnectionId());
multiplexer.sendOut(this, destinationInfo.getDestination(), copy);
return null;
}
@Override
public Response processRemoveDestination(DestinationInfo destinationInfo) throws Exception {
DestinationInfo copy = destinationInfo.copy();
copy.setConnectionId(getMultiplexerConnectionId());
multiplexer.sendOut(this, destinationInfo.getDestination(), copy);
return null;
}
@Override
public Response processRemoveSubscription(RemoveSubscriptionInfo removeSubscriptionInfo) throws Exception {
removeSubscriptionInfo.setConnectionId(getMultiplexerConnectionId());
multiplexer.sendOutAll(this, removeSubscriptionInfo);
return null;
}
@Override
public Response processMessage(Message message) throws Exception {
ProducerId originalProducerId = message.getProducerId();
ProducerId newProducerId = producerIdMap.get(originalProducerId);
if (newProducerId != null) {
ActiveMQDestination destination = message.getDestination();
Message copy = message.copy();
copy.setProducerId(newProducerId);
copy.setTransactionId(getMultiplexTransactionId(message.getOriginalTransactionId()));
multiplexer.sendOut(this, destination, copy);
destinationRegister.addMessageInbound(destination);
inboundMessageCount.incrementAndGet();
} else {
LOG.error("Cannot find producerId for " + originalProducerId);
}
return null;
}
@Override
public Response processMessageAck(MessageAck messageAck) throws Exception {
MessageAck copy = new MessageAck();
messageAck.copy(copy);
copy.setMessageCount(messageAck.getMessageCount());
copy.setTransactionId(getMultiplexTransactionId(messageAck.getTransactionId()));
ConsumerId consumerId = messageAck.getConsumerId();
ConsumerId multiplexerConsumerId = getMultiplexConsumerId(consumerId);
if (multiplexerConsumerId != null) {
copy.setConsumerId(multiplexerConsumerId);
multiplexer.sendOut(this, copy.getDestination(), copy);
}
return null;
}
@Override
public Response processMessagePull(MessagePull messagePull) throws Exception {
ConsumerId consumerId = messagePull.getConsumerId();
ConsumerId multiplexerConsumerId = getMultiplexConsumerId(consumerId);
if (multiplexerConsumerId != null) {
messagePull.setConsumerId(multiplexerConsumerId);
multiplexer.sendOut(this, messagePull.getDestination(), messagePull);
}
return null;
}
@Override
public Response processBeginTransaction(TransactionInfo transactionInfo) throws Exception {
transactionInfo.setTransactionId(getMultiplexTransactionId(transactionInfo.getTransactionId()));
multiplexer.sendOutAll(this, transactionInfo);
return null;
}
@Override
public Response processPrepareTransaction(TransactionInfo transactionInfo) throws Exception {
transactionInfo.setTransactionId(getMultiplexTransactionId(transactionInfo.getTransactionId()));
multiplexer.sendOutAll(this, transactionInfo);
return null;
}
@Override
public Response processCommitTransactionOnePhase(TransactionInfo transactionInfo) throws Exception {
transactionInfo.setTransactionId(getMultiplexTransactionId(transactionInfo.getTransactionId()));
multiplexer.sendOutAll(this, transactionInfo);
return null;
}
@Override
public Response processCommitTransactionTwoPhase(TransactionInfo transactionInfo) throws Exception {
transactionInfo.setTransactionId(getMultiplexTransactionId(transactionInfo.getTransactionId()));
multiplexer.sendOutAll(this, transactionInfo);
return null;
}
@Override
public Response processRollbackTransaction(TransactionInfo transactionInfo) throws Exception {
transactionInfo.setTransactionId(getMultiplexTransactionId(transactionInfo.getTransactionId()));
multiplexer.sendOutAll(this, transactionInfo);
return null;
}
@Override
public Response processWireFormat(WireFormatInfo wireFormatInfo) throws Exception {
return null;
}
@Override
public Response processKeepAlive(KeepAliveInfo keepAliveInfo) throws Exception {
return null;
}
@Override
public Response processShutdown(ShutdownInfo shutdownInfo) throws Exception {
multiplexer.doAsyncProcess(new Runnable() {
@Override
public void run() {
try {
stop();
} catch (Exception e) {
LOG.debug("Caught exception stopping", e);
}
}
});
return null;
}
@Override
public Response processFlush(FlushCommand flushCommand) throws Exception {
multiplexer.sendOutAll(this, flushCommand);
return null;
}
@Override
public Response processBrokerInfo(BrokerInfo brokerInfo) throws Exception {
multiplexer.sendOutAll(this, brokerInfo);
return null;
}
@Override
public Response processRecoverTransactions(TransactionInfo transactionInfo) throws Exception {
transactionInfo.setTransactionId(getMultiplexTransactionId(transactionInfo.getTransactionId()));
multiplexer.sendOutAll(this, transactionInfo);
return null;
}
@Override
public Response processForgetTransaction(TransactionInfo transactionInfo) throws Exception {
transactionInfo.setTransactionId(getAndForgetMultiplexTransactionId(transactionInfo.getTransactionId()));
multiplexer.sendOutAll(this, transactionInfo);
return null;
}
@Override
public Response processEndTransaction(TransactionInfo transactionInfo) throws Exception {
transactionInfo.setTransactionId(getAndForgetMultiplexTransactionId(transactionInfo.getTransactionId()));
multiplexer.sendOutAll(this, transactionInfo);
return null;
}
@Override
public Response processMessageDispatchNotification(MessageDispatchNotification messageDispatchNotification) throws Exception {
multiplexer.sendOutAll(this, messageDispatchNotification);
return null;
}
@Override
public Response processProducerAck(ProducerAck producerAck) throws Exception {
multiplexer.sendOutAll(this, producerAck);
return null;
}
@Override
public Response processMessageDispatch(MessageDispatch messageDispatch) throws Exception {
multiplexer.sendOutAll(this, messageDispatch);
return null;
}
@Override
public Response processControlCommand(ControlCommand controlCommand) throws Exception {
return null;
}
@Override
public Response processConnectionError(ConnectionError connectionError) throws Exception {
multiplexer.sendOutAll(this, connectionError);
return null;
}
@Override
public Response processConnectionControl(ConnectionControl connectionControl) throws Exception {
multiplexer.sendOutAll(this, connectionControl);
return null;
}
@Override
public Response processConsumerControl(ConsumerControl consumerControl) throws Exception {
multiplexer.sendOutAll(this, consumerControl);
return null;
}
private TransactionId getMultiplexTransactionId(TransactionId originalId) {
TransactionId result = originalId;
if (originalId != null && originalId.isLocalTransaction()) {
synchronized (transactionIdMap) {
result = transactionIdMap.get(originalId);
if (result == null) {
long multiplexerTransactionId = multiplexer.getNextTransactionId();
result = new LocalTransactionId(getMultiplexerConnectionId(), multiplexerTransactionId);
transactionIdMap.put(originalId, result);
}
}
}
return result;
}
private TransactionId getAndForgetMultiplexTransactionId(TransactionId originalId) {
TransactionId result = originalId;
if (originalId != null && originalId.isLocalTransaction()) {
synchronized (transactionIdMap) {
TransactionId value = transactionIdMap.remove(originalId);
if (value != null) {
result = value;
}
}
}
return result;
}
private void storeConsumerId(ConsumerId original, ConsumerId multiplexer) {
originalConsumerIdKeyMap.put(original, multiplexer);
multiplexerConsumerIdKeyMap.put(multiplexer, original);
}
private ConsumerId removeByOriginal(ConsumerId original) {
ConsumerId multiplexerId = originalConsumerIdKeyMap.get(original);
if (multiplexerId != null) {
multiplexerConsumerIdKeyMap.remove(multiplexerId);
}
return multiplexerId;
}
private void removeByMultiplexerId(ConsumerId multiplexerId) {
ConsumerId originalId = multiplexerConsumerIdKeyMap.remove(multiplexerId);
if (originalId != null) {
originalConsumerIdKeyMap.remove(originalId);
}
}
private ConsumerId getMultiplexConsumerId(ConsumerId original) {
return originalConsumerIdKeyMap.get(original);
}
private ConsumerId getOriginalConsumerId(ConsumerId multiplex) {
return multiplexerConsumerIdKeyMap.get(multiplex);
}
private void removeSession(SessionId multiplexerDefinedSessionId) {
for (ConsumerId multiplexerConsumerId : multiplexerConsumerIdKeyMap.keySet()) {
if (multiplexerConsumerId.getParentId().equals(multiplexerDefinedSessionId)) {
removeConsumer(multiplexerConsumerId);
}
}
for (Map.Entry<ProducerId, ProducerId> entry : producerIdMap.entrySet()) {
if (entry.getValue().getParentId().equals(multiplexerDefinedSessionId)) {
removeProducer(entry.getValue());
producerIdMap.remove(entry.getKey());
}
}
if (!multiplexerDefinedSessionId.equals(multiplexerSessionId)) {
multiplexerConnectionStateRegister.removeSession(multiplexerDefinedSessionId);
for (Map.Entry<SessionId, SessionId> entry : sessionIdMap.entrySet()) {
if (entry.getValue().equals(multiplexerDefinedSessionId)) {
sessionIdMap.remove(entry.getKey());
break;
}
}
RemoveInfo removeInfo = new RemoveInfo(multiplexerDefinedSessionId);
removeInfo.setLastDeliveredSequenceId(0);
multiplexer.sendOutAll(this, removeInfo);
}
}
private void removeConsumer(ConsumerId multiplexerConsumerId) {
if (multiplexerConsumerId != null) {
removeByMultiplexerId(multiplexerConsumerId);
if (multiplexerConnectionStateRegister.removeConsumer(multiplexerConsumerId) != null) {
multiplexer.unregisterConsumer(multiplexerConsumerId);
RemoveInfo removeInfo = new RemoveInfo(multiplexerConsumerId);
removeInfo.setLastDeliveredSequenceId(0);
multiplexer.sendOutAll(this, removeInfo);
}
}
}
private void removeProducer(ProducerId multiplexerProducerId) {
if (multiplexerProducerId != null) {
multiplexerConnectionStateRegister.removeProducer(multiplexerProducerId);
RemoveInfo removeInfo = new RemoveInfo(multiplexerProducerId);
multiplexer.sendOutAll(this, removeInfo);
}
}
@Override
public long getOutboundMessageCount() {
return outboundMessageCount.get();
}
@Override
public long getInboundMessageCount() {
return inboundMessageCount.get();
}
@Override
public String getUrl() {
return input.getRemoteAddress();
}
@Override
public String getProtocol() {
return protocol;
}
@Override
public List<DestinationStatisticsMBean> getDestinations() {
return destinationRegister.getDestinations();
}
}
| |
/*
* Copyright 2009 The Closure Compiler Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.javascript.jscomp;
import com.google.common.base.Preconditions;
import com.google.common.collect.HashMultimap;
import com.google.common.collect.Multimap;
import com.google.javascript.jscomp.NameReferenceGraph.Name;
import com.google.javascript.jscomp.NameReferenceGraph.Reference;
import com.google.javascript.jscomp.NodeTraversal.ScopedCallback;
import com.google.javascript.jscomp.graph.GraphNode;
import com.google.javascript.rhino.Node;
import com.google.javascript.rhino.Token;
import com.google.javascript.rhino.jstype.FunctionType;
import com.google.javascript.rhino.jstype.JSType;
import com.google.javascript.rhino.jstype.JSTypeNative;
import com.google.javascript.rhino.jstype.ObjectType;
import java.util.ArrayList;
import java.util.Collection;
import javax.annotation.Nullable;
/**
* Constructs a name reference graph.
*
* @see NameReferenceGraph
*
*/
class NameReferenceGraphConstruction implements CompilerPass {
private final AbstractCompiler compiler;
private final NameReferenceGraph graph;
// Maps "foo" -> (curFuncName, unknownObject.foo) if we have no idea what
// the unknown object is. After we finish one pass, we must go through all
// the nodes that might have a name foo and connect that to the curFuncName.
// The accuracy of the analysis will depend heavily on eliminating the need
// to resort to this map.
private final Multimap<String, NameUse> unknownNameUse =
HashMultimap.create();
// Should we continue even if we found a type checker bug.
private static final boolean CONSERVATIVE = false;
// The symbol for the current function so we can quickly create a reference
// edge when we see a call: Example when this symbol is foo() and we see
// bar(), we connect foo -> bar.
private final ArrayList<Name> currentFunctionStack = new ArrayList<>();
NameReferenceGraphConstruction(AbstractCompiler compiler) {
this.compiler = compiler;
this.graph = new NameReferenceGraph(compiler);
}
NameReferenceGraph getNameReferenceGraph() {
return this.graph;
}
@Override
public void process(Node externs, Node root) {
ScopeCreator scopeCreator = new MemoizedScopeCreator(new TypedScopeCreator(compiler));
NodeTraversal externsTraversal = new NodeTraversal(compiler,
new Traversal(true), scopeCreator);
NodeTraversal codeTraversal = new NodeTraversal(compiler,
new Traversal(false), scopeCreator);
Scope topScope = compiler.getTopScope();
if (topScope != null) {
externsTraversal.traverseWithScope(externs, topScope);
codeTraversal.traverseWithScope(root, topScope);
} else {
externsTraversal.traverse(externs);
codeTraversal.traverse(root);
}
connectUnknowns();
}
private class Traversal implements ScopedCallback {
final boolean isExtern;
private Traversal(boolean isExtern) {
this.isExtern = isExtern;
pushContainingFunction(graph.main);
}
@Override
public void enterScope(NodeTraversal t) {
Node root = t.getScopeRoot();
Node parent = root.getParent();
// When we are not in a {{GLOBAL MAIN}}, we need to determine what the
// current function is.
if (!t.inGlobalScope()) {
// TODO(user): A global function foo() is treated as the same
// function as a inner function named foo(). We should use some clever
// naming scheme to avoid this lost of precision.
String name = NodeUtil.getName(root);
if (name == null) {
// When the name is null, we have a function that is presumably not
// reference-able again and should not be modeled in the name graph.
// A common example would be (function() { ... })();
pushContainingFunction(graph.unknown);
return;
}
// If we've done type analysis, then we should be able to get the
// correct JSFunctionType for the containing function. If not,
// we're probably going to get an unknown type here.
JSType type = getType(root);
if (parent.isAssign() &&
NodeUtil.isPrototypeProperty(parent.getFirstChild())) {
pushContainingFunction(
recordPrototypePropDefinition(parent.getFirstChild(), type, parent));
} else {
pushContainingFunction(
recordStaticNameDefinition(
name, type, root, root.getLastChild()));
}
}
}
@Override
public void exitScope(NodeTraversal t) {
if (!t.inGlobalScope()) {
popContainingFunction();
}
}
@Override
public boolean shouldTraverse(NodeTraversal t, Node n, Node parent) {
return true;
}
@SuppressWarnings("fallthrough")
@Override
public void visit(NodeTraversal t, Node n, Node parent) {
switch (n.getType()) {
case NAME:
case GETPROP:
if (parent.isGetProp()) {
// We will resolve this when we visit parent later in the traversal.
return;
} else if (parent.isFunction()) {
// Function declarations have been taken care of in enterScope();
return;
} else if (parent.isAssign()) {
// Handled below.
return;
}
if (isLocalNameReference(t, n)) {
// Ignore all local variable references unless is creates a closure.
return;
}
if (isPrototypeNameReference(n)) {
recordPrototypePropUse(n);
} else if (isStaticNameReference(n, t.getScope())) {
recordStaticNameUse(n);
} else {
recordUnknownUse(n);
}
break;
case ASSIGN:
Node lhs = n.getFirstChild();
Node rhs = n.getLastChild();
if (rhs.isFunction()) {
// These are recorded when entering the scope.
return;
}
if (lhs.isName() ||
lhs.isGetProp() ||
rhs.isGetProp()) {
if (NodeUtil.isPrototypeProperty(lhs)) {
Name name = recordPrototypePropDefinition(
lhs, getType(rhs), n);
name.setAliased(true);
}
}
maybeAliasNamesOnAssign(lhs, rhs);
break;
case VAR:
// var foo = bar;
Node varName = n.getFirstChild();
Node assignedValue = varName.getFirstChild();
if (assignedValue == null) {
return;
}
maybeAliasNamesOnAssign(varName, assignedValue);
break;
case CALL:
Node param = n.getFirstChild();
// We need to alias every name that is passed as a parameter because
// they have different names inside the function's scope.
while ((param = param.getNext()) != null) {
if (param.isName() || param.isGetProp()) {
safeAlias(param);
}
}
maybeRecordExport(n);
break;
}
}
private boolean containsName(Node n) {
return NodeUtil.containsType(n, Token.NAME) ||
NodeUtil.containsType(n, Token.GETELEM) ||
NodeUtil.containsType(n, Token.GETPROP);
}
/**
* Given a node, this alias all the names in the node that need aliasing.
* This is safer than just calling getQualifiedName() because it can return
* null it several situations.
* @param n node to alias
*/
private void safeAlias(Node n) {
if (n.isName() || n.isGetProp()) {
String name = n.getQualifiedName();
// getQualifiedName can return null in cases like bar[0].baz
if (name != null) {
defineAndAlias(name);
return;
}
}
if (n.isGetProp()) {
// var foo = bar[0].baz;
defineAndAlias(n.getLastChild().getString());
} else if (n.isAssign()) {
// In case of nested assignment, we only consider the name of the
// immediate neighbor.
safeAlias(n.getFirstChild());
} else if (n.hasChildren()) {
Node cur = n.getFirstChild();
do {
safeAlias(cur);
} while ((cur = cur.getNext()) != null);
} else {
// No name to alias
}
}
private void maybeAliasNamesOnAssign(Node lhs, Node rhs) {
if ((lhs.isName() || lhs.isGetProp()) &&
containsName(rhs) &&
!rhs.isFunction() &&
!rhs.isNew()) {
safeAlias(lhs);
safeAlias(rhs);
}
}
private void defineAndAlias(String name) {
graph.defineNameIfNotExists(name, isExtern).setAliased(true);
}
private void maybeRecordExport(Node call) {
Preconditions.checkArgument(call.isCall());
Node getProp = call.getFirstChild();
if (!getProp.isGetProp()) {
return;
}
String propQName = getProp.getQualifiedName();
if (propQName == null) {
return;
}
// Keep track of calls to "call" and "apply" because they mess up the name
// graph.
if (propQName.endsWith(".call") || propQName.endsWith(".apply")) {
graph.defineNameIfNotExists(getProp.getFirstChild().getQualifiedName(),
isExtern).markExposedToCallOrApply();
}
if (!"goog.exportSymbol".equals(propQName)) {
return;
}
Node symbol = getProp.getNext();
if (!symbol.isString()) {
return;
}
Node obj = symbol.getNext();
String qName = obj.getQualifiedName();
if (qName == null || obj.getNext() != null) {
return;
}
graph.defineNameIfNotExists(qName, false).markExported();
}
/**
* @return true if n MUST be a local name reference.
*/
private boolean isLocalNameReference(NodeTraversal t, Node n) {
// TODO(user): What happen if it is a reference to an outer local
// variable (closures)?
if (n.isName()) {
Var v = t.getScope().getVar(n.getString());
return v != null && v.isLocal();
}
return false;
}
/**
* @return true if n MUST be a static name reference.
*/
private boolean isStaticNameReference(Node n, Scope scope) {
Preconditions.checkArgument(n.isName() || n.isGetProp());
if (n.isName()) {
return true;
}
String qName = n.getQualifiedName();
if (qName == null) {
return false;
}
// TODO(user): This does not always work due to type system bugs.
return scope.isDeclared(qName, true);
}
/**
* @return true if n MUST be a prototype name reference.
*/
private boolean isPrototypeNameReference(Node n) {
if (!n.isGetProp()) {
return false;
}
JSType type = getType(n.getFirstChild());
if (type.isUnknownType() || type.isUnionType()) {
return false;
}
return (type.isInstanceType() || type.autoboxesTo() != null);
}
private Name recordStaticNameDefinition(String name, JSType type,
Node n, Node rValue) {
if (getNamedContainingFunction() != graph.main) {
// TODO(user): if A.B() defines A.C(), there is a dependence from
// A.C() -> A.B(). However, this is not important in module code motion
// and will be ignored (for now).
}
if (type.isConstructor()) {
return recordClassConstructorOrInterface(
name, type.toMaybeFunctionType(),
n, rValue);
} else {
Name symbol = graph.defineNameIfNotExists(name, isExtern);
symbol.setType(type);
if (n.isAssign()) {
symbol.addAssignmentDeclaration(n);
} else {
symbol.addFunctionDeclaration(n);
}
return symbol;
}
}
/**
* @param assign The assignment node, null if it is just a "forward"
* declaration for recording the rValue's type.
*/
private Name recordPrototypePropDefinition(
Node qName, JSType type, @Nullable Node assign) {
JSType constructor = getType(NodeUtil.getPrototypeClassName(qName));
FunctionType classType = null;
String className = null;
if (constructor != null && constructor.isConstructor()) {
// Case where the class has been properly declared with @constructor
classType = constructor.toMaybeFunctionType();
className = classType.getReferenceName();
} else {
// We'll guess it is a constructor even if it didn't have @constructor
classType = compiler.getTypeIRegistry()
.getNativeFunctionType(JSTypeNative.U2U_CONSTRUCTOR_TYPE);
className = NodeUtil.getPrototypeClassName(qName).getQualifiedName();
}
// In case we haven't seen the function yet.
recordClassConstructorOrInterface(
className, classType, null, null);
String qNameStr = className + ".prototype." +
NodeUtil.getPrototypePropertyName(qName);
Name prototypeProp = graph.defineNameIfNotExists(qNameStr, isExtern);
Preconditions.checkNotNull(prototypeProp,
"%s should be in the name graph as a node.", qNameStr);
if (assign != null) {
prototypeProp.addAssignmentDeclaration(assign);
}
prototypeProp.setType(type);
return prototypeProp;
}
private Reference recordStaticNameUse(Node n) {
if (isExtern) {
// Don't count reference in extern as a use.
return null;
} else {
Reference reference = new Reference(n);
Name name = graph.defineNameIfNotExists(n.getQualifiedName(), isExtern);
name.setType(getType(n));
graph.connect(getNamedContainingFunction(), reference, name);
return reference;
}
}
private void recordPrototypePropUse(Node n) {
Preconditions.checkArgument(n.isGetProp());
Node instance = n.getFirstChild();
JSType instanceType = getType(instance);
JSType boxedType = instanceType.autoboxesTo();
instanceType = boxedType != null ? boxedType : instanceType;
// Retrieves the property.
ObjectType objType = instanceType.toObjectType();
Preconditions.checkState(objType != null);
if (!isExtern) {
// Don't count reference in extern as a use.
Reference ref = new Reference(n);
FunctionType constructor = objType.getConstructor();
if (constructor != null) {
String propName = n.getLastChild().getString();
if (!constructor.getPrototype().hasOwnProperty(propName)) {
recordSuperClassPrototypePropUse(constructor, propName, ref);
}
// TODO(user): TightenType can help a whole lot here.
recordSubclassPrototypePropUse(constructor, propName, ref);
} else {
recordUnknownUse(n);
}
}
}
/**
* Look for the super class implementation up the tree.
*/
private void recordSuperClassPrototypePropUse(
FunctionType classType, String prop, Reference ref) {
FunctionType superClass = classType.getSuperClassConstructor();
while (superClass != null) {
if (superClass.getPrototype().hasOwnProperty(prop)) {
graph.connect(getNamedContainingFunction(), ref,
graph.defineNameIfNotExists(
superClass.getReferenceName() + ".prototype." + prop, false));
return;
} else {
superClass = superClass.getSuperClassConstructor();
}
}
}
/**
* Conservatively assumes that all subclass implementation of this property
* might be called.
*/
private void recordSubclassPrototypePropUse(
FunctionType classType, String prop, Reference ref) {
if (classType.getPrototype().hasOwnProperty(prop)) {
graph.connect(getNamedContainingFunction(), ref,
graph.defineNameIfNotExists(
classType.getReferenceName() + ".prototype." + prop, false));
}
if (classType.getSubTypes() != null) {
for (FunctionType subclass : classType.getSubTypes()) {
recordSubclassPrototypePropUse(subclass, prop, ref);
}
}
}
private void recordUnknownUse(Node n) {
if (isExtern) {
// Don't count reference in extern as a use.
return;
} else {
Preconditions.checkArgument(n.isGetProp());
Reference ref = new Reference(n);
unknownNameUse.put(n.getLastChild().getString(),
new NameUse(getNamedContainingFunction(), ref));
}
}
/**
* Creates the name in the graph if it does not already exist. Also puts all
* the properties and prototype properties of this name in the graph.
*/
private Name recordClassConstructorOrInterface(
String name, FunctionType type, @Nullable Node n, @Nullable Node rhs) {
Preconditions.checkArgument(type.isConstructor() || type.isInterface());
Name symbol = graph.defineNameIfNotExists(name, isExtern);
if (rhs != null) {
// TODO(user): record the definition.
symbol.setType(getType(rhs));
if (n.isAssign()) {
symbol.addAssignmentDeclaration(n);
} else {
symbol.addFunctionDeclaration(n);
}
}
ObjectType prototype = type.getPrototype();
for (String prop : prototype.getOwnPropertyNames()) {
graph.defineNameIfNotExists(
name + ".prototype." + prop, isExtern);
}
return symbol;
}
}
private void connectUnknowns() {
for (GraphNode<Name, Reference> node : graph.getNodes()) {
Name name = node.getValue();
String propName = name.getPropertyName();
if (propName == null) {
continue;
}
Collection<NameUse> uses = unknownNameUse.get(propName);
if (uses != null) {
for (NameUse use : uses) {
graph.connect(use.name, use.reference, name);
}
}
}
}
/**
* A helper to retrieve the type of a node.
*/
private JSType getType(Node n) {
JSType type = n.getJSType();
if (type == null) {
if (CONSERVATIVE) {
throw new RuntimeException("Type system failed us :(");
} else {
return compiler.getTypeIRegistry().getNativeType(JSTypeNative.UNKNOWN_TYPE);
}
}
// Null-ability does not affect the name graph's result.
return type.restrictByNotNullOrUndefined();
}
/**
* Mark the provided node as the current function that we are analyzing.
* and add it to the stack of scopes we are inside.
*
* @param functionNode node representing current function.
*/
private void pushContainingFunction(Name functionNode) {
currentFunctionStack.add(functionNode);
}
/**
* Remove the top item off the containing function stack, and restore the
* previous containing scope to the be the current containing function.
*/
private void popContainingFunction() {
currentFunctionStack.remove(currentFunctionStack.size() - 1);
}
/**
* Find the first containing function that's not an function expression
* closure.
*/
private Name getNamedContainingFunction() {
Name containingFn = null;
int pos;
for (pos = currentFunctionStack.size() - 1; pos >= 0; pos = pos - 1) {
Name cf = currentFunctionStack.get(pos);
if (cf != graph.unknown) {
containingFn = cf;
break;
}
}
Preconditions.checkNotNull(containingFn);
return containingFn;
}
private static class NameUse {
private final Name name;
private final Reference reference;
private NameUse(Name name, Reference reference) {
this.name = name;
this.reference = reference;
}
}
}
| |
package germ.gui.windows;
import germ.i18n.Messages;
import java.awt.Container;
import java.awt.GraphicsEnvironment;
import java.awt.GridBagConstraints;
import java.awt.Insets;
import java.awt.Point;
import javax.swing.JLabel;
import javax.swing.JTextField;
/**
* Klasa predstavlja prozor za prikaz trenutnih svojstava Stakeholder-a
*/
@SuppressWarnings("serial")
public class StakeholderProperties extends PropertyWindow {
private JLabel lblSurname = new JLabel(Messages.getString("StakeholderProperties.0")); //$NON-NLS-1$
private JLabel lblPosition = new JLabel(Messages.getString("StakeholderProperties.1")); //$NON-NLS-1$
private JLabel lblCompany = new JLabel(Messages.getString("StakeholderProperties.2")); //$NON-NLS-1$
private JLabel lblAdress = new JLabel(Messages.getString("StakeholderProperties.3")); //$NON-NLS-1$
private JLabel lblTelephone = new JLabel(Messages.getString("StakeholderProperties.4")); //$NON-NLS-1$
private JLabel lblMail = new JLabel(Messages.getString("StakeholderProperties.5")); //$NON-NLS-1$
private JTextField tfSurname = new JTextField(20);
private JTextField tfPosition = new JTextField(20);
private JTextField tfCompany = new JTextField(20);
private JTextField tfAdress = new JTextField(20);
private JTextField tfTelephone = new JTextField(20);
private JTextField tfMail = new JTextField(20);
private JLabel lblCreationDate = new JLabel(Messages.getString("StakeholderProperties.6")); //$NON-NLS-1$
private JLabel lblDateCreated = new JLabel(""); //$NON-NLS-1$
private JLabel lblLastChangeDAte = new JLabel(Messages.getString("StakeholderProperties.8")); //$NON-NLS-1$
private JLabel lblDateChanged = new JLabel(""); //$NON-NLS-1$
public StakeholderProperties() {
super();
Point center = GraphicsEnvironment.getLocalGraphicsEnvironment().getCenterPoint();
setTitle(Messages.getString("StakeholderProperties.10")); //$NON-NLS-1$
setDefaultCloseOperation(HIDE_ON_CLOSE);
Container container = getContentPane();
GridBagConstraints c01 = new GridBagConstraints();
c01.gridx = 0;
c01.gridy = 1;
c01.anchor = GridBagConstraints.EAST;
c01.insets = new Insets(10, 20, 0, 0);
GridBagConstraints c11 = new GridBagConstraints();
c11.gridx = 1;
c11.gridy = 1;
c11.weightx = 1;
c11.fill = GridBagConstraints.HORIZONTAL;
c11.anchor = GridBagConstraints.WEST;
c11.insets = new Insets(10, 20, 0, 20);
GridBagConstraints c05 = new GridBagConstraints();
c05.gridx = 0;
c05.gridy = 5;
c05.anchor = GridBagConstraints.EAST;
c05.insets = new Insets(10, 20, 0, 0);
GridBagConstraints c15 = new GridBagConstraints();
c15.gridx = 1;
c15.gridy = 5;
c15.weightx = 1;
c15.fill = GridBagConstraints.HORIZONTAL;
c15.anchor = GridBagConstraints.WEST;
c15.insets = new Insets(10, 20, 0, 20);
GridBagConstraints c06 = new GridBagConstraints();
c06.gridx = 0;
c06.gridy = 6;
c06.anchor = GridBagConstraints.EAST;
c06.insets = new Insets(10, 20, 0, 0);
GridBagConstraints c16 = new GridBagConstraints();
c16.gridx = 1;
c16.gridy = 6;
c16.weightx = 1;
c16.fill = GridBagConstraints.HORIZONTAL;
c16.anchor = GridBagConstraints.WEST;
c16.insets = new Insets(10, 20, 0, 20);
GridBagConstraints c07 = new GridBagConstraints();
c07.gridx = 0;
c07.gridy = 7;
c07.anchor = GridBagConstraints.EAST;
c07.insets = new Insets(10, 20, 0, 0);
GridBagConstraints c17 = new GridBagConstraints();
c17.gridx = 1;
c17.gridy = 7;
c17.weightx = 1;
c17.fill = GridBagConstraints.HORIZONTAL;
c17.anchor = GridBagConstraints.WEST;
c17.insets = new Insets(10, 20, 0, 20);
GridBagConstraints c08 = new GridBagConstraints();
c08.gridx = 0;
c08.gridy = 8;
c08.anchor = GridBagConstraints.EAST;
c08.insets = new Insets(10, 20, 0, 0);
GridBagConstraints c18 = new GridBagConstraints();
c18.gridx = 1;
c18.gridy = 8;
c18.weightx = 1;
c18.fill = GridBagConstraints.HORIZONTAL;
c18.anchor = GridBagConstraints.WEST;
c18.insets = new Insets(10, 20, 0, 20);
GridBagConstraints c09 = new GridBagConstraints();
c09.gridx = 0;
c09.gridy = 9;
c09.anchor = GridBagConstraints.EAST;
c09.insets = new Insets(10, 20, 0, 0);
GridBagConstraints c19 = new GridBagConstraints();
c19.gridx = 1;
c19.gridy = 9;
c19.weightx = 1;
c19.fill = GridBagConstraints.HORIZONTAL;
c19.anchor = GridBagConstraints.WEST;
c19.insets = new Insets(10, 20, 0, 20);
GridBagConstraints c010 = new GridBagConstraints();
c010.gridx = 0;
c010.gridy = 10;
c010.anchor = GridBagConstraints.EAST;
c010.insets = new Insets(10,20,0,0);
GridBagConstraints c110 = new GridBagConstraints();
c110.gridx = 1;
c110.gridy = 10;
c110.anchor = GridBagConstraints.WEST;
c110.insets = new Insets(10,20,0,0);
GridBagConstraints c011 = new GridBagConstraints();
c011.gridx = 0;
c011.gridy = 11;
c011.anchor = GridBagConstraints.EAST;
c011.insets = new Insets(10,20,0,0);
GridBagConstraints c111 = new GridBagConstraints();
c111.gridx = 1;
c111.gridy = 11;
c111.anchor = GridBagConstraints.WEST;
c111.insets = new Insets(10,20,0,0);
GridBagConstraints c12 = new GridBagConstraints();
c12.gridx = 0;
c12.gridy = 12;
c12.weightx = 0;
c12.gridwidth = 2;
c12.anchor = GridBagConstraints.CENTER;
c12.insets = new Insets(35, 0, 10, 0);
container.add(lblSurname, c01);
container.add(tfSurname, c11);
container.add(lblPosition, c05);
container.add(tfPosition, c15);
container.add(lblCompany, c06);
container.add(tfCompany, c16);
container.add(lblAdress, c07);
container.add(tfAdress, c17);
container.add(lblTelephone, c08);
container.add(tfTelephone, c18);
container.add(lblMail, c09);
container.add(tfMail, c19);
container.add(lblCreationDate, c010);
container.add(lblDateCreated, c110);
container.add(lblLastChangeDAte, c011);
container.add(lblDateChanged, c111);
container.add(okCancelBox, c12);
this.pack();
setLocation(center.x - getSize().width / 2, center.y - getSize().height / 2);
}
public String getSurname() {
return tfSurname.getText();
}
public void setSurname(String tfSurname) {
this.tfSurname.setText(tfSurname);
}
public String getPosition() {
return tfPosition.getText();
}
public void setPosition(String tfPosition) {
this.tfPosition.setText(tfPosition);
}
public String getCompany() {
return tfCompany.getText();
}
public void setCompany(String tfCompany) {
this.tfCompany.setText(tfCompany);
}
public String getAdress() {
return tfAdress.getText();
}
public void setAdress(String tfAdress) {
this.tfAdress.setText(tfAdress);
}
public String getTelephone() {
return tfTelephone.getText();
}
public void setTelephone(String tfTelephone) {
this.tfTelephone.setText(tfTelephone);
}
public String getMail() {
return tfMail.getText();
}
public void setMail(String tfMail) {
this.tfMail.setText(tfMail);
}
public void setDateCreated(String dateCreated) {
this.lblDateCreated.setText(dateCreated);
}
public void setDateChanged(String dateChanged) {
this.lblDateChanged.setText(dateChanged);
}
}
| |
/* ========================================================================
* PlantUML : a free UML diagram generator
* ========================================================================
*
* (C) Copyright 2009-2017, Arnaud Roques
*
* Project Info: http://plantuml.com
*
* This file is part of PlantUML.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*
* Original Author: Arnaud Roques
*/
package net.sourceforge.plantuml.graphic;
import java.util.EnumSet;
import java.util.Map;
import net.sourceforge.plantuml.FontParam;
import net.sourceforge.plantuml.ISkinParam;
import net.sourceforge.plantuml.SkinParamUtils;
import net.sourceforge.plantuml.cucadiagram.Stereotype;
import net.sourceforge.plantuml.graphic.color.ColorType;
import net.sourceforge.plantuml.graphic.color.Colors;
import net.sourceforge.plantuml.ugraphic.UFont;
public class FontConfiguration {
private final EnumSet<FontStyle> styles;
private final UFont currentFont;
private final UFont motherFont;
private final HtmlColor motherColor;
private final HtmlColor hyperlinkColor;
private final HtmlColor currentColor;
private final HtmlColor extendedColor;
private final FontPosition fontPosition;
private final SvgAttributes svgAttributes;
private final boolean hyperlink;
private final boolean useUnderlineForHyperlink;
private final int tabSize;
public FontConfiguration(UFont font, HtmlColor color, HtmlColor hyperlinkColor, boolean useUnderlineForHyperlink) {
this(font, color, hyperlinkColor, useUnderlineForHyperlink, 8);
}
public FontConfiguration(UFont font, HtmlColor color, HtmlColor hyperlinkColor, boolean useUnderlineForHyperlink,
int tabSize) {
this(getStyles(font), font, color, font, color, null, FontPosition.NORMAL, new SvgAttributes(), false,
hyperlinkColor, useUnderlineForHyperlink, tabSize);
}
public static FontConfiguration blackBlueTrue(UFont font) {
return new FontConfiguration(font, HtmlColorUtils.BLACK, HtmlColorUtils.BLUE, true, 8);
}
public FontConfiguration(ISkinParam skinParam, FontParam fontParam, Stereotype stereo) {
this(SkinParamUtils.getFont(skinParam, fontParam, stereo), SkinParamUtils.getFontColor(skinParam, fontParam,
stereo), skinParam.getHyperlinkColor(), skinParam.useUnderlineForHyperlink(), skinParam.getTabSize());
}
// ---
public final boolean useUnderlineForHyperlink() {
return useUnderlineForHyperlink;
}
public final HtmlColor getHyperlinkColor() {
return hyperlinkColor;
}
// ---
private static EnumSet<FontStyle> getStyles(UFont font) {
final boolean bold = font.isBold();
final boolean italic = font.isItalic();
if (bold && italic) {
return EnumSet.of(FontStyle.ITALIC, FontStyle.BOLD);
}
if (bold) {
return EnumSet.of(FontStyle.BOLD);
}
if (italic) {
return EnumSet.of(FontStyle.ITALIC);
}
return EnumSet.noneOf(FontStyle.class);
}
@Override
public String toString() {
return styles.toString() + " " + currentColor;
}
private FontConfiguration(EnumSet<FontStyle> styles, UFont motherFont, HtmlColor motherColor, UFont currentFont,
HtmlColor currentColor, HtmlColor extendedColor, FontPosition fontPosition, SvgAttributes svgAttributes,
boolean hyperlink, HtmlColor hyperlinkColor, boolean useUnderlineForHyperlink, int tabSize) {
this.styles = styles;
this.currentFont = currentFont;
this.motherFont = motherFont;
this.currentColor = currentColor;
this.motherColor = motherColor;
this.extendedColor = extendedColor;
this.fontPosition = fontPosition;
this.svgAttributes = svgAttributes;
this.hyperlink = hyperlink;
this.hyperlinkColor = hyperlinkColor;
this.useUnderlineForHyperlink = useUnderlineForHyperlink;
this.tabSize = tabSize;
}
public FontConfiguration forceFont(UFont newFont, HtmlColor htmlColorForStereotype) {
if (newFont == null) {
return add(FontStyle.ITALIC);
}
FontConfiguration result = new FontConfiguration(styles, newFont, motherColor, newFont, currentColor,
extendedColor, fontPosition, svgAttributes, hyperlink, hyperlinkColor, useUnderlineForHyperlink,
tabSize);
if (htmlColorForStereotype != null) {
result = result.changeColor(htmlColorForStereotype);
}
return result;
}
public FontConfiguration changeAttributes(SvgAttributes toBeAdded) {
return new FontConfiguration(styles, motherFont, motherColor, currentFont, currentColor, extendedColor,
fontPosition, svgAttributes.add(toBeAdded), hyperlink, hyperlinkColor, useUnderlineForHyperlink,
tabSize);
}
private FontConfiguration withHyperlink() {
return new FontConfiguration(styles, motherFont, motherColor, currentFont, currentColor, extendedColor,
fontPosition, svgAttributes, true, hyperlinkColor, useUnderlineForHyperlink, tabSize);
}
public FontConfiguration changeColor(HtmlColor htmlColor) {
return new FontConfiguration(styles, motherFont, motherColor, currentFont, htmlColor, extendedColor,
fontPosition, svgAttributes, hyperlink, hyperlinkColor, useUnderlineForHyperlink, tabSize);
}
public FontConfiguration mute(Colors colors) {
if (colors == null) {
throw new IllegalArgumentException();
}
final HtmlColor color = colors.getColor(ColorType.TEXT);
if (color == null) {
return this;
}
return changeColor(color);
}
FontConfiguration changeExtendedColor(HtmlColor newExtendedColor) {
return new FontConfiguration(styles, motherFont, motherColor, currentFont, currentColor, newExtendedColor,
fontPosition, svgAttributes, hyperlink, hyperlinkColor, useUnderlineForHyperlink, tabSize);
}
public FontConfiguration changeSize(float size) {
return new FontConfiguration(styles, motherFont, motherColor, currentFont.deriveSize(size), currentColor,
extendedColor, fontPosition, svgAttributes, hyperlink, hyperlinkColor, useUnderlineForHyperlink,
tabSize);
}
public FontConfiguration bigger(double delta) {
return changeSize((float) (currentFont.getSize() + delta));
}
public FontConfiguration changeFontPosition(FontPosition fontPosition) {
return new FontConfiguration(styles, motherFont, motherColor, currentFont, currentColor, extendedColor,
fontPosition, svgAttributes, hyperlink, hyperlinkColor, useUnderlineForHyperlink, tabSize);
}
public FontConfiguration changeFamily(String family) {
return new FontConfiguration(styles, motherFont, motherColor, new UFont(family, currentFont.getStyle(),
currentFont.getSize()), currentColor, extendedColor, fontPosition, svgAttributes, hyperlink,
hyperlinkColor, useUnderlineForHyperlink, tabSize);
}
public FontConfiguration resetFont() {
return new FontConfiguration(styles, motherFont, motherColor, motherFont, motherColor, null,
FontPosition.NORMAL, new SvgAttributes(), hyperlink, hyperlinkColor, useUnderlineForHyperlink, tabSize);
}
FontConfiguration add(FontStyle style) {
final EnumSet<FontStyle> r = styles.clone();
r.add(style);
return new FontConfiguration(r, motherFont, motherColor, currentFont, currentColor, extendedColor,
fontPosition, svgAttributes, hyperlink, hyperlinkColor, useUnderlineForHyperlink, tabSize);
}
public FontConfiguration italic() {
return add(FontStyle.ITALIC);
}
public FontConfiguration bold() {
return add(FontStyle.BOLD);
}
public FontConfiguration underline() {
return add(FontStyle.UNDERLINE);
}
public FontConfiguration hyperlink() {
if (useUnderlineForHyperlink) {
return add(FontStyle.UNDERLINE).withHyperlink();
}
return withHyperlink();
}
FontConfiguration remove(FontStyle style) {
final EnumSet<FontStyle> r = styles.clone();
r.remove(style);
return new FontConfiguration(r, motherFont, motherColor, currentFont, currentColor, extendedColor,
fontPosition, svgAttributes, hyperlink, hyperlinkColor, useUnderlineForHyperlink, tabSize);
}
public UFont getFont() {
UFont result = currentFont;
for (FontStyle style : styles) {
result = style.mutateFont(result);
}
return fontPosition.mute(result);
}
public HtmlColor getColor() {
if (hyperlink) {
return hyperlinkColor;
}
return currentColor;
}
public HtmlColor getExtendedColor() {
return extendedColor;
}
public boolean containsStyle(FontStyle style) {
return styles.contains(style);
}
public int getSpace() {
return fontPosition.getSpace();
}
public Map<String, String> getAttributes() {
return svgAttributes.attributes();
}
public double getSize2D() {
return currentFont.getSize2D();
}
public int getTabSize() {
return tabSize;
}
}
| |
package com.jayway.jsonpath.internal.filter;
import com.jayway.jsonpath.InvalidPathException;
import com.jayway.jsonpath.JsonPathException;
import com.jayway.jsonpath.Predicate;
import com.jayway.jsonpath.internal.Path;
import com.jayway.jsonpath.internal.path.PathCompiler;
import net.minidev.json.parser.JSONParser;
import java.time.OffsetDateTime;
import java.util.regex.Pattern;
import static com.jayway.jsonpath.internal.filter.ValueNodes.*;
public abstract class ValueNode {
public abstract Class<?> type(Predicate.PredicateContext ctx);
public boolean isPatternNode() {
return false;
}
public PatternNode asPatternNode() {
throw new InvalidPathException("Expected regexp node");
}
public boolean isPathNode() {
return false;
}
public PathNode asPathNode() {
throw new InvalidPathException("Expected path node");
}
public boolean isNumberNode() {
return false;
}
public NumberNode asNumberNode() {
throw new InvalidPathException("Expected number node");
}
public boolean isStringNode() {
return false;
}
public StringNode asStringNode() {
throw new InvalidPathException("Expected string node");
}
public boolean isBooleanNode() {
return false;
}
public BooleanNode asBooleanNode() {
throw new InvalidPathException("Expected boolean node");
}
public boolean isJsonNode() {
return false;
}
public JsonNode asJsonNode() {
throw new InvalidPathException("Expected json node");
}
public boolean isPredicateNode() {
return false;
}
public PredicateNode asPredicateNode() {
throw new InvalidPathException("Expected predicate node");
}
public boolean isValueListNode() {
return false;
}
public ValueListNode asValueListNode() {
throw new InvalidPathException("Expected value list node");
}
public boolean isNullNode() {
return false;
}
public NullNode asNullNode() {
throw new InvalidPathException("Expected null node");
}
public UndefinedNode asUndefinedNode() {
throw new InvalidPathException("Expected undefined node");
}
public boolean isUndefinedNode() {
return false;
}
public boolean isClassNode() {
return false;
}
public ClassNode asClassNode() {
throw new InvalidPathException("Expected class node");
}
//workaround for issue: https://github.com/json-path/JsonPath/issues/613
public boolean isOffsetDateTimeNode(){
return false;
}
public OffsetDateTimeNode asOffsetDateTimeNode(){
throw new InvalidPathException("Expected offsetDateTime node");
}
private static boolean isPath(Object o) {
if(o == null || !(o instanceof String)){
return false;
}
String str = o.toString().trim();
if (str.length() <= 0) {
return false;
}
char c0 = str.charAt(0);
if(c0 == '@' || c0 == '$'){
try {
PathCompiler.compile(str);
return true;
} catch(Exception e){
return false;
}
}
return false;
}
private static boolean isJson(Object o) {
if(o == null || !(o instanceof String)){
return false;
}
String str = o.toString().trim();
if (str.length() <= 1) {
return false;
}
char c0 = str.charAt(0);
char c1 = str.charAt(str.length() - 1);
if ((c0 == '[' && c1 == ']') || (c0 == '{' && c1 == '}')){
try {
new JSONParser(JSONParser.MODE_PERMISSIVE).parse(str);
return true;
} catch(Exception e){
return false;
}
}
return false;
}
//----------------------------------------------------
//
// Factory methods
//
//----------------------------------------------------
public static ValueNode toValueNode(Object o){
if(o == null) return NULL_NODE;
if(o instanceof ValueNode) return (ValueNode)o;
if(o instanceof Class) return createClassNode((Class)o);
else if(isPath(o)) return new PathNode(o.toString(), false, false);
else if(isJson(o)) return createJsonNode(o.toString());
else if(o instanceof String) return createStringNode(o.toString(), true);
else if(o instanceof Character) return createStringNode(o.toString(), false);
else if(o instanceof Number) return createNumberNode(o.toString());
else if(o instanceof Boolean) return createBooleanNode(o.toString());
else if(o instanceof Pattern) return createPatternNode((Pattern)o);
else if (o instanceof OffsetDateTime) return createOffsetDateTimeNode(o.toString()); //workaround for issue: https://github.com/json-path/JsonPath/issues/613
else throw new JsonPathException("Could not determine value type");
}
public static StringNode createStringNode(CharSequence charSequence, boolean escape){
return new StringNode(charSequence, escape);
}
public static ClassNode createClassNode(Class<?> clazz){
return new ClassNode(clazz);
}
public static NumberNode createNumberNode(CharSequence charSequence){
return new NumberNode(charSequence);
}
public static BooleanNode createBooleanNode(CharSequence charSequence){
return Boolean.parseBoolean(charSequence.toString()) ? TRUE : FALSE;
}
public static NullNode createNullNode(){
return NULL_NODE;
}
public static JsonNode createJsonNode(CharSequence json) {
return new JsonNode(json);
}
public static JsonNode createJsonNode(Object parsedJson) {
return new JsonNode(parsedJson);
}
public static PatternNode createPatternNode(CharSequence pattern) {
return new PatternNode(pattern);
}
public static PatternNode createPatternNode(Pattern pattern) {
return new PatternNode(pattern);
}
//workaround for issue: https://github.com/json-path/JsonPath/issues/613
public static OffsetDateTimeNode createOffsetDateTimeNode(CharSequence charSequence){
return new OffsetDateTimeNode(charSequence);
}
public static UndefinedNode createUndefinedNode() {
return UNDEFINED;
}
public static PathNode createPathNode(CharSequence path, boolean existsCheck, boolean shouldExists) {
return new PathNode(path, existsCheck, shouldExists);
}
public static ValueNode createPathNode(Path path) {
return new PathNode(path);
}
}
| |
package net.fortytwo.twitlogic.util.properties;
import java.io.File;
import java.net.MalformedURLException;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URL;
import java.text.DateFormat;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.Properties;
/**
* @author Joshua Shinavier (http://fortytwo.net).
*/
public class TypedProperties extends Properties {
private final DateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ssZ");
public TypedProperties(final Properties defaults) {
super(defaults);
}
public TypedProperties() {
super();
}
private String getProperty(final String name,
final boolean required) throws PropertyException {
String s = getProperty(name);
if (null == s) {
if (required) {
throw new PropertyValueNotFoundException(name);
}
} else {
s = s.trim();
}
return s;
}
// String values ///////////////////////////////////////////////////////////
public String getString(final String name) throws PropertyException {
return getProperty(name, true);
}
public String getString(final String name,
final String defaultValue) throws PropertyException {
String value = getProperty(name, false);
return (null == value)
? defaultValue
: value;
}
public void setString(final String name,
final String value) {
if (null == value) {
remove(name);
} else {
setProperty(name, value);
}
}
// boolean values //////////////////////////////////////////////////////////
public boolean getBoolean(final String name) throws PropertyException {
String value = getProperty(name, true);
return value.equals("true");
}
public boolean getBoolean(final String name,
final boolean defaultValue) throws PropertyException {
String value = getProperty(name, false);
return (null == value)
? defaultValue
: value.equals("true");
}
public void setBoolean(final String name,
final boolean value) {
setProperty(name, "" + value);
}
// double values ///////////////////////////////////////////////////////////
public double getDouble(final String name) throws PropertyException {
String value = getProperty(name, true);
try {
return new Double(value);
}
catch (NumberFormatException e) {
throw new InvalidPropertyValueException(name, e);
}
}
public double getDouble(final String name,
final double defaultValue) throws PropertyException {
String value = getProperty(name, false);
try {
return (null == value)
? defaultValue
: new Double(value);
}
catch (NumberFormatException e) {
throw new InvalidPropertyValueException(name, e);
}
}
public void setDouble(final String name, final double value) {
setProperty(name, "" + value);
}
// float values ////////////////////////////////////////////////////////////
public float getFloat(final String name) throws PropertyException {
String value = getProperty(name, true);
try {
return new Float(value);
}
catch (NumberFormatException e) {
throw new InvalidPropertyValueException(name, e);
}
}
public float getFloat(final String name, final float defaultValue) throws PropertyException {
String value = getProperty(name, false);
try {
return (null == value)
? defaultValue
: new Float(value);
}
catch (NumberFormatException e) {
throw new InvalidPropertyValueException(name, e);
}
}
public void setFloat(final String name, final float value) {
setProperty(name, "" + value);
}
// int values //////////////////////////////////////////////////////////////
public int getInt(final String name) throws PropertyException {
String value = getProperty(name, true);
try {
return new Integer(value);
}
catch (NumberFormatException e) {
throw new InvalidPropertyValueException(name, e);
}
}
public int getInt(final String name, final int defaultValue) throws PropertyException {
String value = getProperty(name, false);
try {
return (null == value)
? defaultValue
: new Integer(value);
}
catch (NumberFormatException e) {
throw new InvalidPropertyValueException(name, e);
}
}
public void setInt(final String name, final int value) {
setProperty(name, "" + value);
}
// long values /////////////////////////////////////////////////////////////
public long getLong(final String name) throws PropertyException {
String value = getProperty(name, true);
try {
return new Long(value);
}
catch (NumberFormatException e) {
throw new InvalidPropertyValueException(name, e);
}
}
public long getLong(final String name, final long defaultValue) throws PropertyException {
String value = getProperty(name, false);
try {
return (null == value)
? defaultValue
: new Long(value);
}
catch (NumberFormatException e) {
throw new InvalidPropertyValueException(name, e);
}
}
public void setLong(final String name, final long value) {
setProperty(name, "" + value);
}
// URI values //////////////////////////////////////////////////////////////
public URI getURI(final String name) throws PropertyException {
String value = getProperty(name, true);
try {
return new URI(value);
}
catch (URISyntaxException e) {
throw new InvalidPropertyValueException(name, e);
}
}
public URI getURI(final String name, final URI defaultValue) throws PropertyException {
String value = getProperty(name, false);
try {
return (null == value)
? defaultValue
: new URI(value);
}
catch (URISyntaxException e) {
throw new InvalidPropertyValueException(name, e);
}
}
public void setURI(final String name, final URI value) {
if (null == value) {
remove(name);
} else {
setProperty(name, value.toString());
}
}
// URL values //////////////////////////////////////////////////////////////
public URL getURL(final String name) throws PropertyException {
String value = getProperty(name, true);
try {
return new URL(value);
}
catch (MalformedURLException e) {
throw new InvalidPropertyValueException(name, e);
}
}
public URL getURL(final String name, final URL defaultValue) throws PropertyException {
String value = getProperty(name, false);
try {
return (null == value)
? defaultValue
: new URL(value);
}
catch (MalformedURLException e) {
throw new InvalidPropertyValueException(name, e);
}
}
public void setURL(final String name, final URL value) {
if (null == value) {
remove(name);
} else {
setProperty(name, value.toString());
}
}
// File values ///////////////////////////////////////////////////////////////
public File getFile(final String name) throws PropertyException {
String value = getProperty(name, true);
return new File(value);
}
public File getFile(final String name, final File defaultValue) throws PropertyException {
String value = getProperty(name, false);
return (null == value)
? defaultValue
: new File(value);
}
public void setFile(final String name, final File value) {
if (null == value) {
remove(name);
} else {
setProperty(name, "" + value.getAbsolutePath());
}
}
// Date values ///////////////////////////////////////////////////////////////
public Date getDate(final String name) throws PropertyException {
String value = getProperty(name, true);
try {
return dateFormat.parse(value);
}
catch (ParseException e) {
throw new InvalidPropertyValueException(name, e);
}
}
public Date getDate(final String name, final Date defaultValue) throws PropertyException {
String value = getProperty(name, false);
try {
return null == value
? defaultValue
: dateFormat.parse(value);
}
catch (ParseException e) {
throw new InvalidPropertyValueException(name, e);
}
}
public void setDate(final String name, final Date value) {
if (null == value) {
remove(name);
} else {
setProperty(name, "" + dateFormat.format(value));
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.activemq.artemis.tests.unit.core.postoffice.impl;
import org.apache.activemq.artemis.tests.util.UnitTestCase;
import org.junit.Test;
import org.junit.Assert;
import org.apache.activemq.artemis.api.core.SimpleString;
import org.apache.activemq.artemis.core.postoffice.Address;
import org.apache.activemq.artemis.core.postoffice.impl.AddressImpl;
public class AddressImplTest extends UnitTestCase
{
@Test
public void testNoDots()
{
SimpleString s1 = new SimpleString("abcde");
SimpleString s2 = new SimpleString("abcde");
Address a1 = new AddressImpl(s1);
Address a2 = new AddressImpl(s2);
Assert.assertTrue(a1.matches(a2));
}
@Test
public void testDotsSameLength2()
{
SimpleString s1 = new SimpleString("a.b");
SimpleString s2 = new SimpleString("a.b");
Address a1 = new AddressImpl(s1);
Address a2 = new AddressImpl(s2);
Assert.assertTrue(a1.matches(a2));
}
@Test
public void testA()
{
SimpleString s1 = new SimpleString("a.b.c");
SimpleString s2 = new SimpleString("a.b.c.d.e.f.g.h.i.j.k.l.m.n.*");
Address a1 = new AddressImpl(s1);
Address a2 = new AddressImpl(s2);
Assert.assertFalse(a1.matches(a2));
}
@Test
public void testB()
{
SimpleString s1 = new SimpleString("a.b.c.d");
SimpleString s2 = new SimpleString("a.b.x.e");
SimpleString s3 = new SimpleString("a.b.c.*");
Address a1 = new AddressImpl(s1);
Address a2 = new AddressImpl(s2);
Address w = new AddressImpl(s3);
Assert.assertTrue(a1.matches(w));
Assert.assertFalse(a2.matches(w));
}
@Test
public void testC()
{
SimpleString s1 = new SimpleString("a.b.c.d");
SimpleString s2 = new SimpleString("a.b.c.x");
SimpleString s3 = new SimpleString("a.b.*.d");
Address a1 = new AddressImpl(s1);
Address a2 = new AddressImpl(s2);
Address w = new AddressImpl(s3);
Assert.assertTrue(a1.matches(w));
Assert.assertFalse(a2.matches(w));
}
@Test
public void testD()
{
SimpleString s1 = new SimpleString("a.b.c.d.e");
SimpleString s2 = new SimpleString("a.b.c.x.e");
SimpleString s3 = new SimpleString("a.b.*.d.*");
Address a1 = new AddressImpl(s1);
Address a2 = new AddressImpl(s2);
Address w = new AddressImpl(s3);
Assert.assertTrue(a1.matches(w));
Assert.assertFalse(a2.matches(w));
}
@Test
public void testE()
{
SimpleString s1 = new SimpleString("a.b.c.d.e.f");
SimpleString s2 = new SimpleString("a.b.c.x.e.f");
SimpleString s3 = new SimpleString("a.b.*.d.*.f");
Address a1 = new AddressImpl(s1);
Address a2 = new AddressImpl(s2);
Address w = new AddressImpl(s3);
Assert.assertTrue(a1.matches(w));
Assert.assertFalse(a2.matches(w));
}
@Test
public void testF()
{
SimpleString s1 = new SimpleString("a.b.c.d.e.f");
SimpleString s2 = new SimpleString("a.b.c.x.e.f");
SimpleString s3 = new SimpleString("#");
Address a1 = new AddressImpl(s1);
Address a2 = new AddressImpl(s2);
Address w = new AddressImpl(s3);
Assert.assertTrue(a1.matches(w));
Assert.assertTrue(a2.matches(w));
}
@Test
public void testG()
{
SimpleString s1 = new SimpleString("a.b.c.d.e.f");
SimpleString s2 = new SimpleString("a.b.c.x.e.f");
SimpleString s3 = new SimpleString("a.#");
Address a1 = new AddressImpl(s1);
Address a2 = new AddressImpl(s2);
Address w = new AddressImpl(s3);
Assert.assertTrue(a1.matches(w));
Assert.assertTrue(a2.matches(w));
}
@Test
public void testH()
{
SimpleString s1 = new SimpleString("a.b.c.d.e.f");
SimpleString s2 = new SimpleString("a.b.c.x.e.f");
SimpleString s3 = new SimpleString("#.b.#");
Address a1 = new AddressImpl(s1);
Address a2 = new AddressImpl(s2);
Address w = new AddressImpl(s3);
Assert.assertTrue(a1.matches(w));
Assert.assertTrue(a2.matches(w));
}
@Test
public void testI()
{
SimpleString s1 = new SimpleString("a.b.c.d.e.f");
SimpleString s2 = new SimpleString("a.b.c.x.e.f");
SimpleString s3 = new SimpleString("a.#.b.#");
Address a1 = new AddressImpl(s1);
Address a2 = new AddressImpl(s2);
Address w = new AddressImpl(s3);
Assert.assertTrue(a1.matches(w));
Assert.assertTrue(a2.matches(w));
}
@Test
public void testJ()
{
SimpleString s1 = new SimpleString("a.b.c.d.e.f");
SimpleString s2 = new SimpleString("a.b.c.x.e.f");
SimpleString s3 = new SimpleString("a.#.c.d.e.f");
Address a1 = new AddressImpl(s1);
Address a2 = new AddressImpl(s2);
Address w = new AddressImpl(s3);
Assert.assertTrue(a1.matches(w));
Assert.assertFalse(a2.matches(w));
}
@Test
public void testK()
{
SimpleString s1 = new SimpleString("a.b.c.d.e.f");
SimpleString s2 = new SimpleString("a.b.c.d.e.x");
SimpleString s3 = new SimpleString("a.#.c.d.e.*");
Address a1 = new AddressImpl(s1);
Address a2 = new AddressImpl(s2);
Address w = new AddressImpl(s3);
Assert.assertTrue(a1.matches(w));
Assert.assertTrue(a2.matches(w));
}
@Test
public void testL()
{
SimpleString s1 = new SimpleString("a.b.c.d.e.f");
SimpleString s2 = new SimpleString("a.b.c.d.e.x");
SimpleString s3 = new SimpleString("a.#.c.d.*.f");
Address a1 = new AddressImpl(s1);
Address a2 = new AddressImpl(s2);
Address w = new AddressImpl(s3);
Assert.assertTrue(a1.matches(w));
Assert.assertFalse(a2.matches(w));
}
@Test
public void testM()
{
SimpleString s1 = new SimpleString("a.b.c");
SimpleString s2 = new SimpleString("a.b.x.e");
SimpleString s3 = new SimpleString("a.b.c.#");
Address a1 = new AddressImpl(s1);
Address a2 = new AddressImpl(s2);
Address w = new AddressImpl(s3);
Assert.assertTrue(a1.matches(w));
Assert.assertFalse(a2.matches(w));
}
@Test
public void testN()
{
SimpleString s1 = new SimpleString("usd.stock");
SimpleString s2 = new SimpleString("a.b.x.e");
SimpleString s3 = new SimpleString("*.stock.#");
Address a1 = new AddressImpl(s1);
Address a2 = new AddressImpl(s2);
Address w = new AddressImpl(s3);
Assert.assertTrue(a1.matches(w));
Assert.assertFalse(a2.matches(w));
}
@Test
public void testO()
{
SimpleString s1 = new SimpleString("a.b.c.d");
SimpleString s2 = new SimpleString("a.b.x.e");
SimpleString s3 = new SimpleString("a.b.c.*");
Address a1 = new AddressImpl(s1);
Address a2 = new AddressImpl(s2);
Address w = new AddressImpl(s3);
Assert.assertTrue(a1.matches(w));
Assert.assertFalse(a2.matches(w));
}
@Test
public void testP()
{
SimpleString s1 = new SimpleString("a.b.c.d");
SimpleString s3 = new SimpleString("a.b.c#");
Address a1 = new AddressImpl(s1);
Address w = new AddressImpl(s3);
Assert.assertFalse(a1.matches(w));
}
@Test
public void testQ()
{
SimpleString s1 = new SimpleString("a.b.c.d");
SimpleString s3 = new SimpleString("#a.b.c");
Address a1 = new AddressImpl(s1);
Address w = new AddressImpl(s3);
Assert.assertFalse(a1.matches(w));
}
@Test
public void testR()
{
SimpleString s1 = new SimpleString("a.b.c.d");
SimpleString s3 = new SimpleString("#*a.b.c");
Address a1 = new AddressImpl(s1);
Address w = new AddressImpl(s3);
Assert.assertFalse(a1.matches(w));
}
@Test
public void testS()
{
SimpleString s1 = new SimpleString("a.b.c.d");
SimpleString s3 = new SimpleString("a.b.c*");
Address a1 = new AddressImpl(s1);
Address w = new AddressImpl(s3);
Assert.assertFalse(a1.matches(w));
}
@Test
public void testT()
{
SimpleString s1 = new SimpleString("a.b.c.d");
SimpleString s3 = new SimpleString("*a.b.c");
Address a1 = new AddressImpl(s1);
Address w = new AddressImpl(s3);
Assert.assertFalse(a1.matches(w));
}
@Test
public void testU()
{
SimpleString s1 = new SimpleString("a.b.c.d");
SimpleString s3 = new SimpleString("*a.b.c");
Address a1 = new AddressImpl(s1);
Address w = new AddressImpl(s3);
Assert.assertFalse(a1.matches(w));
}
}
| |
/*
* The MIT License (MIT)
*
* Copyright (c) 2014-2015 Sri Harsha Chilakapati
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package com.shc.silenceengine.math;
import com.shc.silenceengine.graphics.cameras.BaseCamera;
import com.shc.silenceengine.math.geom2d.Polygon;
import com.shc.silenceengine.math.geom3d.Polyhedron;
import com.shc.silenceengine.math.geom3d.Sphere;
/**
* This class represents the Frustum, the volume of the camera view. Contains useful functions to check whether a shape
* exists completely inside, or intersects the view of the camera.
*
* @author Sri Harsha Chilakapati
*/
public class Frustum
{
// The plane locations in the array
public static final int LEFT = 0;
public static final int RIGHT = 1;
public static final int TOP = 2;
public static final int BOTTOM = 3;
public static final int NEAR = 4;
public static final int FAR = 5;
// The frustum corner locations in the array
public static final int TOP_LEFT_FAR = 0;
public static final int TOP_RIGHT_FAR = 1;
public static final int TOP_RIGHT_NEAR = 2;
public static final int TOP_LEFT_NEAR = 3;
public static final int BOTTOM_LEFT_FAR = 4;
public static final int BOTTOM_RIGHT_FAR = 5;
public static final int BOTTOM_RIGHT_NEAR = 6;
public static final int BOTTOM_LEFT_NEAR = 7;
// The 2D frustum corner locations in the array
public static final int TOP_LEFT = 0;
public static final int TOP_RIGHT = 1;
public static final int BOTTOM_LEFT = 2;
public static final int BOTTOM_RIGHT = 3;
// The array of all the planes of the frustum
private Plane[] planes;
// The frustum matrix
private Matrix4 frustumMatrix;
// The frustum corners, polygon vertices, polygon and polyhedron
private Vector3[] frustumCorners;
private Vector2[] frustumPolygonVertices;
private Polygon frustumPolygon;
private Polyhedron frustumPolyhedron;
public Frustum()
{
// Create the planes array
planes = new Plane[6];
for (int i = 0; i < planes.length; i++)
planes[i] = new Plane();
// Create the frustum matrix and corners array
frustumMatrix = new Matrix4().initIdentity();
frustumCorners = new Vector3[8];
for (int i = 0; i < 8; i++)
frustumCorners[i] = new Vector3();
frustumPolygonVertices = new Vector2[4];
// Create the frustum polygon
frustumPolygon = new Polygon();
for (int i = 0; i < 4; i++)
{
frustumPolygonVertices[i] = new Vector2();
frustumPolygon.addVertex(frustumPolygonVertices[i]);
}
// Create the frustum polyhedron
frustumPolyhedron = new Polyhedron();
frustumPolyhedron.addVertex(frustumCorners[BOTTOM_LEFT_NEAR]);
frustumPolyhedron.addVertex(frustumCorners[BOTTOM_RIGHT_NEAR]);
frustumPolyhedron.addVertex(frustumCorners[TOP_LEFT_NEAR]);
frustumPolyhedron.addVertex(frustumCorners[TOP_RIGHT_NEAR]);
frustumPolyhedron.addVertex(frustumCorners[TOP_RIGHT_NEAR]);
frustumPolyhedron.addVertex(frustumCorners[BOTTOM_RIGHT_NEAR]);
frustumPolyhedron.addVertex(frustumCorners[TOP_RIGHT_FAR]);
frustumPolyhedron.addVertex(frustumCorners[BOTTOM_RIGHT_FAR]);
frustumPolyhedron.addVertex(frustumCorners[BOTTOM_RIGHT_FAR]);
frustumPolyhedron.addVertex(frustumCorners[BOTTOM_LEFT_FAR]);
frustumPolyhedron.addVertex(frustumCorners[TOP_RIGHT_FAR]);
frustumPolyhedron.addVertex(frustumCorners[TOP_LEFT_FAR]);
frustumPolyhedron.addVertex(frustumCorners[TOP_LEFT_FAR]);
frustumPolyhedron.addVertex(frustumCorners[BOTTOM_LEFT_FAR]);
frustumPolyhedron.addVertex(frustumCorners[TOP_LEFT_NEAR]);
frustumPolyhedron.addVertex(frustumCorners[BOTTOM_LEFT_NEAR]);
frustumPolyhedron.addVertex(frustumCorners[BOTTOM_LEFT_NEAR]);
frustumPolyhedron.addVertex(frustumCorners[BOTTOM_LEFT_FAR]);
frustumPolyhedron.addVertex(frustumCorners[BOTTOM_RIGHT_NEAR]);
frustumPolyhedron.addVertex(frustumCorners[BOTTOM_RIGHT_FAR]);
frustumPolyhedron.addVertex(frustumCorners[BOTTOM_RIGHT_FAR]);
frustumPolyhedron.addVertex(frustumCorners[TOP_LEFT_NEAR]);
frustumPolyhedron.addVertex(frustumCorners[TOP_LEFT_NEAR]);
frustumPolyhedron.addVertex(frustumCorners[TOP_RIGHT_NEAR]);
frustumPolyhedron.addVertex(frustumCorners[TOP_LEFT_FAR]);
frustumPolyhedron.addVertex(frustumCorners[TOP_RIGHT_FAR]);
}
public Frustum update(BaseCamera camera)
{
return update(camera.getProjection(), camera.getView());
}
public Frustum update(Matrix4 projection, Matrix4 view)
{
// Calculate the frustum matrix
frustumMatrix.set(view).multiplySelf(projection);
// Extract the frustum volume planes
planes[LEFT].set(frustumMatrix.get(0, 3) + frustumMatrix.get(0, 0),
frustumMatrix.get(1, 3) + frustumMatrix.get(1, 0),
frustumMatrix.get(2, 3) + frustumMatrix.get(2, 0),
frustumMatrix.get(3, 3) + frustumMatrix.get(3, 0));
planes[RIGHT].set(frustumMatrix.get(0, 3) - frustumMatrix.get(0, 0),
frustumMatrix.get(1, 3) - frustumMatrix.get(1, 0),
frustumMatrix.get(2, 3) - frustumMatrix.get(2, 0),
frustumMatrix.get(3, 3) - frustumMatrix.get(3, 0));
planes[TOP].set(frustumMatrix.get(0, 3) - frustumMatrix.get(0, 1),
frustumMatrix.get(1, 3) - frustumMatrix.get(1, 1),
frustumMatrix.get(2, 3) - frustumMatrix.get(2, 1),
frustumMatrix.get(3, 3) - frustumMatrix.get(3, 1));
planes[BOTTOM].set(frustumMatrix.get(0, 3) + frustumMatrix.get(0, 1),
frustumMatrix.get(1, 3) + frustumMatrix.get(1, 1),
frustumMatrix.get(2, 3) + frustumMatrix.get(2, 1),
frustumMatrix.get(3, 3) + frustumMatrix.get(3, 1));
planes[NEAR].set(frustumMatrix.get(0, 3) + frustumMatrix.get(0, 2),
frustumMatrix.get(1, 3) + frustumMatrix.get(1, 2),
frustumMatrix.get(2, 3) + frustumMatrix.get(2, 2),
frustumMatrix.get(3, 3) + frustumMatrix.get(3, 2));
planes[FAR].set(frustumMatrix.get(0, 3) - frustumMatrix.get(0, 2),
frustumMatrix.get(1, 3) - frustumMatrix.get(1, 2),
frustumMatrix.get(2, 3) - frustumMatrix.get(2, 2),
frustumMatrix.get(3, 3) - frustumMatrix.get(3, 2));
// Find the corner points of the volume (Polyhedron is updated automatically)
Plane.intersection(planes[TOP], planes[LEFT], planes[FAR], frustumCorners[TOP_LEFT_FAR]);
Plane.intersection(planes[TOP], planes[RIGHT], planes[FAR], frustumCorners[TOP_RIGHT_FAR]);
Plane.intersection(planes[TOP], planes[RIGHT], planes[NEAR], frustumCorners[TOP_RIGHT_NEAR]);
Plane.intersection(planes[TOP], planes[LEFT], planes[NEAR], frustumCorners[TOP_LEFT_NEAR]);
Plane.intersection(planes[BOTTOM], planes[LEFT], planes[FAR], frustumCorners[BOTTOM_LEFT_FAR]);
Plane.intersection(planes[BOTTOM], planes[RIGHT], planes[FAR], frustumCorners[BOTTOM_RIGHT_FAR]);
Plane.intersection(planes[BOTTOM], planes[RIGHT], planes[NEAR], frustumCorners[BOTTOM_RIGHT_NEAR]);
Plane.intersection(planes[BOTTOM], planes[LEFT], planes[NEAR], frustumCorners[BOTTOM_LEFT_NEAR]);
// Calculate the 2D frustum polygon
frustumPolygonVertices[TOP_LEFT].set(frustumCorners[TOP_LEFT_NEAR].x, frustumCorners[TOP_LEFT_NEAR].y);
frustumPolygonVertices[TOP_RIGHT].set(frustumCorners[TOP_RIGHT_NEAR].x, frustumCorners[TOP_RIGHT_NEAR].y);
frustumPolygonVertices[BOTTOM_RIGHT].set(frustumCorners[BOTTOM_RIGHT_NEAR].x, frustumCorners[BOTTOM_RIGHT_NEAR].y);
frustumPolygonVertices[BOTTOM_LEFT].set(frustumCorners[BOTTOM_LEFT_NEAR].x, frustumCorners[BOTTOM_LEFT_NEAR].y);
return this;
}
public boolean intersects(Polygon polygon)
{
Vector2 center = polygon.getCenter();
// If the center point is inside the frustum, then the polygon should be intersecting
if (isInside(center.x, center.y, 0))
return true;
// If there is at least one point that is inside the frustum, then the polygon should be intersecting
for (Vector2 v : polygon.getVertices())
if (isInside(v.x + polygon.getPosition().x, v.y + polygon.getPosition().y, planes[NEAR].d))
return true;
// Otherwise, there maybe one edge that is intersecting the frustum
return polygon.intersects(frustumPolygon);
}
public boolean isInside(Polygon polygon)
{
// Definitely not inside if the center is not in frustum
if (!isInside(polygon.getCenter().x, polygon.getCenter().y, planes[NEAR].d))
return false;
boolean inside = false;
Vector3 temp = Vector3.REUSABLE_STACK.pop();
for (Vector2 v : polygon.getVertices())
{
temp.set(v.x, v.y, planes[NEAR].d).addSelf(polygon.getPosition(), 0);
inside = isInside(temp);
if (!inside)
break;
}
Vector3.REUSABLE_STACK.push(temp);
return inside;
}
public boolean intersects(Polyhedron polyhedron)
{
Vector3 position = polyhedron.getPosition();
// If the center point is in the frustum, then it should be intersecting
if (isInside(position))
return true;
// Now check if the AABB of the polyhedron intersects the frustum
if (intersects(position, polyhedron.getWidth(), polyhedron.getHeight(), polyhedron.getThickness()))
{
// Special case, if the polyhedron is a sphere, then there is no need to test fully
if (polyhedron instanceof Sphere)
return intersects(position, ((Sphere) polyhedron).getRadius());
// Check if at least one point of the polyhedron is inside frustum.
for (Vector3 v : polyhedron.getVertices())
if (isInside(v.x + position.x, v.y + position.y, v.z + position.z))
return true;
// The only remaining chance for intersection is whether an edge intersects with the frustum.
return polyhedron.intersects(frustumPolyhedron);
}
// If this fails too, then there is no chance that the polyhedron intersects
return false;
}
public boolean intersects(Vector3 position, float radius)
{
if (isInside(position))
return true;
for (Plane plane : planes)
if (plane.normal.dot(position) + radius + plane.d < 0)
return false;
return true;
}
public boolean intersects(Vector3 position, float width, float height, float thickness)
{
if (isInside(position))
return true;
float halfWidth = width / 2;
float halfHeight = height / 2;
float halfThickness = thickness / 2;
float x = position.x;
float y = position.y;
float z = position.z;
for (Plane plane : planes)
{
if (plane.testPoint(x + halfWidth, y + halfHeight, z + halfThickness) == Plane.Side.BACK &&
plane.testPoint(x + halfWidth, y + halfHeight, z - halfThickness) == Plane.Side.BACK &&
plane.testPoint(x + halfWidth, y - halfHeight, z + halfThickness) == Plane.Side.BACK &&
plane.testPoint(x + halfWidth, y - halfHeight, z - halfThickness) == Plane.Side.BACK &&
plane.testPoint(x - halfWidth, y + halfHeight, z + halfThickness) == Plane.Side.BACK &&
plane.testPoint(x - halfWidth, y + halfHeight, z - halfThickness) == Plane.Side.BACK &&
plane.testPoint(x - halfWidth, y - halfHeight, z + halfThickness) == Plane.Side.BACK &&
plane.testPoint(x - halfWidth, y - halfHeight, z - halfThickness) == Plane.Side.BACK)
return false;
}
return true;
}
public boolean isInside(Polyhedron polyhedron)
{
// Definitely not inside if the center is not in frustum
if (!isInside(polyhedron.getPosition()))
return false;
boolean inside = false;
Vector3 temp = Vector3.REUSABLE_STACK.pop();
for (Vector3 v : polyhedron.getVertices())
{
temp.set(v).addSelf(polyhedron.getPosition());
inside = isInside(temp);
if (!inside)
break;
}
Vector3.REUSABLE_STACK.push(temp);
return inside;
}
public boolean isInside(Vector3 point, float width, float height, float thickness)
{
if (!isInside(point))
return false;
for (Plane plane : planes)
{
float m = plane.normal.dot(point);
float n = width / 2 * Math.abs(point.x - width / 2) +
height / 2 * Math.abs(point.y - height / 2) +
thickness / 2 * Math.abs(point.z - thickness / 2);
if (m + n < 0)
return false;
}
return true;
}
public boolean isInside(Vector3 point, float radius)
{
if (!isInside(point))
return false;
for (Plane plane : planes)
{
float m = plane.normal.dot(point);
float n = radius * Math.abs(point.x - radius) +
radius * Math.abs(point.y - radius) +
radius * Math.abs(point.z - radius);
if (m + n < 0)
return false;
}
return true;
}
public boolean isInside(Vector3 point)
{
return isInside(point.x, point.y, point.z);
}
public boolean isInside(float x, float y, float z)
{
boolean inside = false;
for (Plane plane : planes)
{
if (!(inside = plane.testPoint(x, y, z) == Plane.Side.FRONT))
break;
}
return inside;
}
public Plane getPlane(int id)
{
return planes[id];
}
public Vector3 getCorner(int id)
{
return frustumCorners[id];
}
public Vector2 getCorner2D(int id)
{
return frustumPolygonVertices[id];
}
public Polygon getPolygon()
{
return frustumPolygon;
}
public Polyhedron getPolyhedron()
{
return frustumPolyhedron;
}
@Override
public String toString()
{
return "Frustum{" +
"planeLeft=" + getPlane(LEFT) +
", planeRight=" + getPlane(RIGHT) +
", planeTop=" + getPlane(TOP) +
", planeBottom=" + getPlane(BOTTOM) +
", planeNear=" + getPlane(NEAR) +
", planeFar=" + getPlane(FAR) +
'}';
}
}
| |
/*
* CPAchecker is a tool for configurable software verification.
* This file is part of CPAchecker.
*
* Copyright (C) 2007-2012 Dirk Beyer
* All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*
* CPAchecker web page:
* http://cpachecker.sosy-lab.org
*/
package org.sosy_lab.cpachecker.util.predicates;
import java.util.ArrayDeque;
import java.util.Deque;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import java.util.logging.Level;
import org.sosy_lab.common.AbstractMBean;
import org.sosy_lab.common.LogManager;
import org.sosy_lab.common.Triple;
import org.sosy_lab.common.configuration.Configuration;
import org.sosy_lab.common.configuration.InvalidConfigurationException;
import org.sosy_lab.common.configuration.Option;
import org.sosy_lab.common.configuration.Options;
import org.sosy_lab.cpachecker.core.ShutdownNotifier;
import org.sosy_lab.cpachecker.util.predicates.interfaces.BooleanFormula;
import org.sosy_lab.cpachecker.util.predicates.interfaces.Region;
import org.sosy_lab.cpachecker.util.predicates.interfaces.RegionManager;
import org.sosy_lab.cpachecker.util.predicates.interfaces.RegionManager.RegionBuilder;
import org.sosy_lab.cpachecker.util.predicates.interfaces.view.BooleanFormulaManagerView;
import org.sosy_lab.cpachecker.util.predicates.interfaces.view.FormulaManagerView;
import com.google.common.base.Function;
import com.google.common.base.Functions;
import com.google.common.base.Joiner;
import com.google.common.collect.Maps;
/**
* This class stores a mapping between abstract regions and the corresponding
* symbolic formula. It is therefore the bridge between the abstract and the
* symbolic "worlds".
* It is also responsible for the creation of {@link AbstractionPredicate}s.
*/
@Options(prefix = "cpa.predicate")
public final class AbstractionManager {
public static interface AbstractionPredicatesMXBean {
int getNumberOfPredicates();
String getPredicates();
}
private class AbstractionPredicatesMBean extends AbstractMBean implements AbstractionPredicatesMXBean {
public AbstractionPredicatesMBean() {
super("org.sosy_lab.cpachecker:type=predicate,name=AbstractionPredicates", logger);
register();
}
@Override
public int getNumberOfPredicates() {
return numberOfPredicates;
}
@Override
public String getPredicates() {
// TODO this may run into a ConcurrentModificationException
return Joiner.on('\n').join(absVarToPredicate.values());
}
}
private volatile int numberOfPredicates = 0;
private final LogManager logger;
private final RegionManager rmgr;
private final FormulaManagerView fmgr;
// Here we keep the mapping abstract predicate variable -> predicate
private final Map<Region, AbstractionPredicate> absVarToPredicate = Maps.newHashMap();
// and the mapping symbolic variable -> predicate
private final Map<BooleanFormula, AbstractionPredicate> symbVarToPredicate = Maps.newHashMap();
// and the mapping atom -> predicate
private final Map<BooleanFormula, AbstractionPredicate> atomToPredicate = Maps.newHashMap();
@Option(name = "abs.useCache", description = "use caching of region to formula conversions")
private boolean useCache = true;
private final Map<Region, BooleanFormula> toConcreteCache;
private BooleanFormulaManagerView bfmgr;
public AbstractionManager(RegionManager pRmgr, FormulaManagerView pFmgr,
Configuration config, LogManager pLogger) throws InvalidConfigurationException {
config.inject(this, AbstractionManager.class);
logger = pLogger;
rmgr = pRmgr;
fmgr = pFmgr;
bfmgr = pFmgr.getBooleanFormulaManager();
if (useCache) {
toConcreteCache = new HashMap<>();
} else {
toConcreteCache = null;
}
new AbstractionPredicatesMBean(); // don't store it, we wouldn't know when to unregister anyway
}
public int getNumberOfPredicates() {
return numberOfPredicates;
}
/**
* creates a Predicate from the Boolean symbolic variable (var) and
* the atom that defines it
*/
public AbstractionPredicate makePredicate(BooleanFormula atom) {
AbstractionPredicate result = atomToPredicate.get(atom);
if (result == null) {
BooleanFormula symbVar = fmgr.createPredicateVariable("PRED"+numberOfPredicates++);
Region absVar = rmgr.createPredicate();
logger.log(Level.FINEST, "Created predicate", absVar,
"from variable", symbVar, "and atom", atom);
result = new AbstractionPredicate(absVar, symbVar, atom);
symbVarToPredicate.put(symbVar, result);
absVarToPredicate.put(absVar, result);
atomToPredicate.put(atom, result);
}
return result;
}
/**
* creates a Predicate that represents "false"
*/
public AbstractionPredicate makeFalsePredicate() {
return makePredicate(bfmgr.makeBoolean(false));
}
/**
* Get predicate corresponding to a variable.
* @param var A symbolic formula representing the variable. The same formula has to been passed to makePredicate earlier.
* @return a Predicate
*/
private AbstractionPredicate getPredicate(BooleanFormula var) {
AbstractionPredicate result = symbVarToPredicate.get(var);
if (result == null) { throw new IllegalArgumentException(var
+ " seems not to be a formula corresponding to a single predicate variable."); }
return result;
}
/**
* Given an abstract formula (which is a BDD over the predicates), build
* its concrete representation (which is a symbolic formula corresponding
* to the BDD, in which each predicate is replaced with its definition)
*/
public BooleanFormula toConcrete(Region af) {
if (rmgr instanceof SymbolicRegionManager) {
// optimization shortcut
return ((SymbolicRegionManager)rmgr).toFormula(af);
}
Map<Region, BooleanFormula> cache;
if (useCache) {
cache = toConcreteCache;
} else {
cache = new HashMap<>();
}
Deque<Region> toProcess = new ArrayDeque<>();
cache.put(rmgr.makeTrue(), bfmgr.makeBoolean(true));
cache.put(rmgr.makeFalse(), bfmgr.makeBoolean(false));
toProcess.push(af);
while (!toProcess.isEmpty()) {
Region n = toProcess.peek();
if (cache.containsKey(n)) {
toProcess.pop();
continue;
}
boolean childrenDone = true;
BooleanFormula m1 = null;
BooleanFormula m2 = null;
Triple<Region, Region, Region> parts = rmgr.getIfThenElse(n);
Region c1 = parts.getSecond();
Region c2 = parts.getThird();
if (!cache.containsKey(c1)) {
toProcess.push(c1);
childrenDone = false;
} else {
m1 = cache.get(c1);
}
if (!cache.containsKey(c2)) {
toProcess.push(c2);
childrenDone = false;
} else {
m2 = cache.get(c2);
}
if (childrenDone) {
assert m1 != null;
assert m2 != null;
toProcess.pop();
Region var = parts.getFirst();
AbstractionPredicate pred = absVarToPredicate.get(var);
assert pred != null;
BooleanFormula atom = pred.getSymbolicAtom();
if (bfmgr.isTrue(m1)) {
if (bfmgr.isFalse(m2)) {
// ITE(atom, true, false) <==> atom
cache.put(n, atom);
} else {
// ITE(atom, true, m2) <==> (atom || m2)
cache.put(n, bfmgr.or(atom, m2));
}
} else if (bfmgr.isFalse(m1)) {
if (bfmgr.isTrue(m2)) {
// ITE(atom, false, true) <==> !atom
cache.put(n, bfmgr.not(atom));
} else {
// ITE(atom, false, m2) <==> (!atom && m2)
cache.put(n, bfmgr.and(bfmgr.not(atom), m2));
}
} else {
if (bfmgr.isTrue(m2)) {
// ITE(atom, m1, true) <==> (!atom || m1)
cache.put(n, bfmgr.or(bfmgr.not(atom), m1));
} else if (bfmgr.isFalse(m2)) {
// ITE(atom, m1, false) <==> (atom && m1)
cache.put(n, bfmgr.and(atom, m1));
} else {
// ITE(atom, m1, m2)
cache.put(n, bfmgr.ifThenElse(atom, m1, m2));
}
}
}
}
BooleanFormula result = cache.get(af);
assert result != null;
return result;
}
/**
* checks whether the data region represented by f1
* is a subset of that represented by f2
* @param f1 an AbstractFormula
* @param f2 an AbstractFormula
* @return true if (f1 => f2), false otherwise
*/
public boolean entails(Region f1, Region f2) throws InterruptedException {
return rmgr.entails(f1, f2);
}
/**
* Return the set of predicates that occur in a a region.
* In some cases, this method also returns the predicate 'false'
* in the set.
*/
public Set<AbstractionPredicate> extractPredicates(Region af) {
Set<AbstractionPredicate> vars = new HashSet<>();
Deque<Region> toProcess = new ArrayDeque<>();
toProcess.push(af);
while (!toProcess.isEmpty()) {
Region n = toProcess.pop();
if (n.isTrue() || n.isFalse()) {
vars.add(this.makeFalsePredicate());
continue;
}
AbstractionPredicate pred = absVarToPredicate.get(n);
if (pred == null) {
Triple<Region, Region, Region> parts = rmgr.getIfThenElse(n);
Region var = parts.getFirst();
pred = absVarToPredicate.get(var);
assert pred != null;
toProcess.push(parts.getSecond());
toProcess.push(parts.getThird());
}
vars.add(pred);
}
return vars;
}
public Region buildRegionFromFormula(BooleanFormula pF) {
return rmgr.fromFormula(pF, fmgr,
Functions.compose(new Function<AbstractionPredicate, Region>() {
@Override
public Region apply(AbstractionPredicate pInput) {
return pInput.getAbstractVariable();
}
}, Functions.forMap(atomToPredicate)));
}
public RegionCreator getRegionCreator() {
return new RegionCreator();
}
public class RegionCreator {
public RegionBuilder newRegionBuilder(ShutdownNotifier pShutdownNotifier) {
return rmgr.builder(pShutdownNotifier);
}
/**
* @return a representation of logical truth
*/
public Region makeTrue() {
return rmgr.makeTrue();
}
/**
* @return a representation of logical falseness
*/
public Region makeFalse() {
return rmgr.makeFalse();
}
/**
* Creates a region representing a negation of the argument
* @param f an AbstractFormula
* @return (!f1)
*/
public Region makeNot(Region f) {
return rmgr.makeNot(f);
}
/**
* Creates a region representing an AND of the two argument
* @param f1 an AbstractFormula
* @param f2 an AbstractFormula
* @return (f1 & f2)
*/
public Region makeAnd(Region f1, Region f2) {
return rmgr.makeAnd(f1, f2);
}
/**
* Creates a region representing an OR of the two argument
* @param f1 an AbstractFormula
* @param f2 an AbstractFormula
* @return (f1 | f2)
*/
public Region makeOr(Region f1, Region f2) {
return rmgr.makeOr(f1, f2);
}
/**
* Creates a region representing an equality (bi-implication) of the two argument
* @param f1 an AbstractFormula
* @param f2 an AbstractFormula
* @return (f1 <=> f2)
*/
public Region makeEqual(Region f1, Region f2) {
return rmgr.makeEqual(f1, f2);
}
/**
* Creates a region representing an if then else construct of the three arguments
* @param f1 an AbstractFormula
* @param f2 an AbstractFormula
* @param f3 an AbstractFormula
* @return (if f1 then f2 else f3)
*/
public Region makeIte(Region f1, Region f2, Region f3) {
return rmgr.makeIte(f1, f2, f3);
}
/**
* Creates a region representing an existential quantification of the two argument
* @param f1 an AbstractFormula
* @param f2 an AbstractFormula
* @return (\exists f2: f1)
*/
public Region makeExists(Region f1, Region f2) {
return rmgr.makeExists(f1, f2);
}
public Region getPredicate(BooleanFormula var) {
return AbstractionManager.this.getPredicate(var).getAbstractVariable();
}
}
}
| |
/*
* Licensed to The Apereo Foundation under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional
* information regarding copyright ownership.
*
* The Apereo Foundation licenses this file to you under the Apache License,
* Version 2.0, (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.tle.web.login;
import com.tle.common.Check;
import com.tle.common.i18n.CurrentLocale;
import com.tle.common.settings.standard.AutoLogin;
import com.tle.common.usermanagement.user.valuebean.UserBean;
import com.tle.core.services.user.UserService;
import com.tle.core.settings.service.ConfigurationService;
import com.tle.web.freemarker.FreemarkerFactory;
import com.tle.web.freemarker.annotations.ViewFactory;
import com.tle.web.sections.SectionInfo;
import com.tle.web.sections.SectionTree;
import com.tle.web.sections.ajax.AjaxGenerator;
import com.tle.web.sections.ajax.AjaxGenerator.EffectType;
import com.tle.web.sections.ajax.handler.AjaxFactory;
import com.tle.web.sections.annotations.Bookmarked;
import com.tle.web.sections.annotations.EventFactory;
import com.tle.web.sections.annotations.EventHandlerMethod;
import com.tle.web.sections.equella.annotation.PlugKey;
import com.tle.web.sections.equella.component.SelectionsTable;
import com.tle.web.sections.equella.component.model.DynamicSelectionsTableModel;
import com.tle.web.sections.equella.component.model.SelectionsTableSelection;
import com.tle.web.sections.equella.layout.OneColumnLayout;
import com.tle.web.sections.equella.receipt.ReceiptService;
import com.tle.web.sections.equella.utils.SelectUserDialog;
import com.tle.web.sections.equella.utils.SelectedUser;
import com.tle.web.sections.equella.utils.UserLinkSection;
import com.tle.web.sections.equella.utils.UserLinkService;
import com.tle.web.sections.events.RenderEventContext;
import com.tle.web.sections.events.js.EventGenerator;
import com.tle.web.sections.js.JSCallable;
import com.tle.web.sections.js.generic.OverrideHandler;
import com.tle.web.sections.render.GenericTemplateResult;
import com.tle.web.sections.render.Label;
import com.tle.web.sections.render.SectionRenderable;
import com.tle.web.sections.render.TemplateResult;
import com.tle.web.sections.render.TextLabel;
import com.tle.web.sections.standard.Button;
import com.tle.web.sections.standard.Checkbox;
import com.tle.web.sections.standard.Link;
import com.tle.web.sections.standard.annotations.Component;
import com.tle.web.sections.standard.model.HtmlLinkState;
import com.tle.web.settings.menu.SettingsUtils;
import com.tle.web.template.Breadcrumbs;
import com.tle.web.template.Decorations;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.regex.Pattern;
import javax.inject.Inject;
/** @author larry */
@SuppressWarnings("nls")
public class RootLoginSettingsSection
extends OneColumnLayout<RootLoginSettingsSection.LoginSettingsModel> {
private static final String WILD_IP_REGEX =
"^((25[0-5]|2[0-4]\\d|[01]?\\d\\d?|\\*)\\.){3}(25[0-5]|2[0-4]\\d|[01]?\\d\\d?|\\*)$";
@PlugKey("login.title")
private static Label TITLE_LABEL;
@PlugKey("login.selectthisuser")
private static Label OK_LABEL;
@PlugKey("login.ipaddresslist.remove")
private static Label DELETE_LABEL;
@PlugKey("login.settings.save.receipt")
private static Label SAVE_RECEIPT_LABEL;
@PlugKey("login.ipaddresslist.empty")
private static Label EMPTY_LABEL;
@PlugKey("login.user.invalid")
private static String INVALID_USER;
@PlugKey("login.ipaddresslist.invalid")
private static String INVALID_IP_LIST;
@Inject private LoginSettingsPrivilegeTreeProvider securityProvider;
@Inject private ConfigurationService configService;
@Inject private ReceiptService receiptService;
@Inject private UserService userService;
@Inject private UserLinkService userLinkService;
private UserLinkSection userLinkSection;
@EventFactory private EventGenerator events;
@ViewFactory private FreemarkerFactory viewFactory;
@AjaxFactory private AjaxGenerator ajax;
@Component private Checkbox enableSSLCheck;
@Component private Checkbox enableViaIpCheck;
@Component private Checkbox disableAutoLoginCheck;
@Component private Checkbox disallowUserEditCheck;
@Component private Checkbox transientDRMCheck;
@Component private Checkbox enableAnonACL;
@Inject @Component private SelectUserDialog selectUserDialog;
@Component @Inject private AddIpAddressDialog addIpAddressDialog;
@Component private SelectionsTable ipAddressTable;
@Component
@PlugKey("ipaddress.dialog.title")
private Link addIpAddressLink;
@Component
@PlugKey("settings.save.button")
private Button saveButton;
private JSCallable deleteFunc;
@Override
public void registered(String id, SectionTree tree) {
super.registered(id, tree);
userLinkSection = userLinkService.register(tree, id);
selectUserDialog.setAjax(true);
selectUserDialog.setOkLabel(OK_LABEL);
selectUserDialog.setMultipleUsers(false);
JSCallable inplace = ajax.getEffectFunction(EffectType.REPLACE_IN_PLACE);
selectUserDialog.setOkCallback(
ajax.getAjaxUpdateDomFunction(
tree, this, events.getEventHandler("selectLoginAsUser"), inplace, "selecteduser"));
addIpAddressLink.setClickHandler(addIpAddressDialog.getOpenFunction());
addIpAddressDialog.setOkCallback(
ajax.getAjaxUpdateDomFunction(
tree,
this,
events.getEventHandler("addIpAddressToSectionList"),
inplace,
"enteredIpAddress"));
deleteFunc =
ajax.getAjaxUpdateDomFunction(
tree, this, events.getEventHandler("removeIpAddress"), "enteredIpAddress");
// Add the toggleEnabler to the all-controlling enable-via-IP checkbox
enableViaIpCheck.setClickHandler(
ajax.getAjaxUpdateDomFunction(
tree, this, events.getEventHandler("toggleEnabled"), inplace, "innercontrols"));
saveButton.setClickHandler(events.getNamedHandler("save"));
ipAddressTable.setSelectionsModel(
new DynamicSelectionsTableModel<String>() {
@Override
protected List<String> getSourceList(SectionInfo info) {
LoginSettingsModel model = getModel(info);
return model.getIpAddresses();
}
@Override
protected void transform(
SectionInfo info,
SelectionsTableSelection selection,
String thing,
List<SectionRenderable> actions,
int index) {
selection.setName(new TextLabel(thing));
actions.add(makeRemoveAction(DELETE_LABEL, new OverrideHandler(deleteFunc, thing)));
}
});
ipAddressTable.setAddAction(addIpAddressLink);
ipAddressTable.setNothingSelectedText(EMPTY_LABEL);
}
@Override
protected TemplateResult setupTemplate(RenderEventContext info) {
securityProvider.checkAuthorised();
LoginSettingsModel model = getModel(info);
if (!model.isLoaded()) {
AutoLogin settings = getAutoLoginSettings();
boolean enabledViaIp = settings.isEnabledViaIp();
model.setShowInnerControls(enabledViaIp);
model.setUserId(settings.getUserid());
model.setUsername(settings.getUsername());
enableSSLCheck.setChecked(info, settings.isLoginViaSSL());
enableViaIpCheck.setChecked(info, enabledViaIp);
disableAutoLoginCheck.setChecked(info, settings.isNotAutomatic());
disallowUserEditCheck.setChecked(info, settings.isEditDetailsDisallowed());
transientDRMCheck.setChecked(info, settings.isTransientDrmAcceptances());
enableAnonACL.setChecked(info, settings.isEnableIpReferAcl());
model.clearIpAddresses();
for (String ipAddr : settings.getAddresses()) {
addIpAddress(info, ipAddr);
}
model.setLoaded(true);
} else {
model.setShowInnerControls(enableViaIpCheck.isChecked(info));
}
if (!Check.isEmpty(model.getUserId())) {
model.setUserLink(userLinkSection.createLink(info, model.getUserId()));
}
return new GenericTemplateResult(
viewFactory.createNamedResult(BODY, "loginsettings.ftl", this));
}
/**
* @see
* com.tle.web.sections.equella.layout.OneColumnLayout#addBreadcrumbsAndTitle(com.tle.web.sections.SectionInfo,
* com.tle.web.template.Decorations, com.tle.web.template.Breadcrumbs)
*/
@Override
protected void addBreadcrumbsAndTitle(
SectionInfo info, Decorations decorations, Breadcrumbs crumbs) {
decorations.setTitle(TITLE_LABEL);
crumbs.addToStart(SettingsUtils.getBreadcrumb(info));
}
private AutoLogin getAutoLoginSettings() {
return configService.getProperties(new AutoLogin());
}
@EventHandlerMethod
public void toggleEnabled(SectionInfo info) {
boolean enableIsChecked = enableViaIpCheck.isChecked(info);
getModel(info).setShowInnerControls(enableIsChecked);
}
@EventHandlerMethod
public void selectLoginAsUser(SectionInfo info, String usersJson) throws Exception {
SelectedUser selectedUser = SelectUserDialog.userFromJsonString(usersJson);
if (selectedUser != null) {
String userId = selectedUser.getUuid();
UserBean userBean = userService.getInformationForUser(userId);
LoginSettingsModel model = getModel(info);
model.setUserId(userId);
model.setUsername(userBean.getUsername());
model.setLoaded(true);
}
}
@EventHandlerMethod
public void addIpAddressToSectionList(SectionInfo info, String enteredString) {
if (!Check.isEmpty(enteredString)) {
// We should be dealing with invalid content in the dialog
Pattern wildCardNumericIp = Pattern.compile(WILD_IP_REGEX);
boolean okWildcardNUmericIP = wildCardNumericIp.matcher(enteredString).matches();
if (okWildcardNUmericIP) {
addIpAddress(info, enteredString);
getModel(info).setLoaded(true);
}
}
}
@EventHandlerMethod
public void removeIpAddress(SectionInfo info, String key) {
getModel(info).getIpAddresses().remove(key);
}
@EventHandlerMethod
public void save(SectionInfo info) {
if (saveLoginSettings(info)) {
receiptService.setReceipt(SAVE_RECEIPT_LABEL);
getModel(info).setLoaded(false);
} else {
info.preventGET();
}
}
private boolean saveLoginSettings(SectionInfo info) {
AutoLogin settings = getAutoLoginSettings();
LoginSettingsModel model = getModel(info);
boolean viaIP = enableViaIpCheck.isChecked(info);
String userId = model.getUserId();
String username = model.getUsername();
List<String> newAddressess = model.getIpAddresses();
if (viaIP) {
if (Check.isEmpty(userId) || Check.isEmpty(username)) {
model.addError("user", CurrentLocale.get(INVALID_USER));
}
if (Check.isEmpty(newAddressess)) {
model.addError("iplist", CurrentLocale.get(INVALID_IP_LIST));
}
}
if (model.getErrors().size() == 0) {
settings.setEnabledViaIp(viaIP);
settings.setLoginViaSSL(enableSSLCheck.isChecked(info));
settings.setUserid(userId);
settings.setUsername(username);
settings.setNotAutomatic(disableAutoLoginCheck.isChecked(info));
settings.setEditDetailsDisallowed(disallowUserEditCheck.isChecked(info));
settings.setTransientDrmAcceptances(transientDRMCheck.isChecked(info));
settings.setEnableIpReferAcl(enableAnonACL.isChecked(info));
List<String> ipAddesses = settings.getAddresses();
if (!Check.isEmpty(newAddressess)) {
ipAddesses.clear();
ipAddesses.addAll(newAddressess);
}
model.setLoaded(false);
configService.setProperties(settings);
userService.refreshSettings();
return true;
}
return false;
}
public void addIpAddress(SectionInfo info, String ipAddress) {
getModel(info).addIpAddress(ipAddress);
}
public Checkbox getEnableSSLCheck() {
return enableSSLCheck;
}
public Checkbox getEnableViaIpCheck() {
return enableViaIpCheck;
}
public SelectUserDialog getSelectUserDialog() {
return selectUserDialog;
}
public Checkbox getDisableAutoLoginCheck() {
return disableAutoLoginCheck;
}
public Checkbox getDisallowUserEditCheck() {
return disallowUserEditCheck;
}
public Checkbox getTransientDRMCheck() {
return transientDRMCheck;
}
public boolean hasIpAddresses(SectionInfo info) {
List<String> ipAddresses = getModel(info).getIpAddresses();
return !Check.isEmpty(ipAddresses);
}
public Button getSaveButton() {
return saveButton;
}
@Override
public Object instantiateModel(SectionInfo info) {
return new LoginSettingsModel();
}
@Override
public Class<LoginSettingsModel> getModelClass() {
return LoginSettingsModel.class;
}
public SelectionsTable getIpAddressTable() {
return ipAddressTable;
}
public static class LoginSettingsModel extends OneColumnLayout.OneColumnLayoutModel {
private boolean showInnerControls;
@Bookmarked private boolean loaded;
@Bookmarked private String userId;
@Bookmarked private String username;
private HtmlLinkState userLink;
@Bookmarked private List<String> ipAddresses;
private final Map<String, String> errors = new HashMap<String, String>();
public boolean isShowInnerControls() {
return showInnerControls;
}
public void setShowInnerControls(boolean showInnerControls) {
this.showInnerControls = showInnerControls;
}
public boolean isLoaded() {
return loaded;
}
public void setLoaded(boolean loaded) {
this.loaded = loaded;
}
public String getUserId() {
return userId;
}
public void setUserId(String userId) {
this.userId = userId;
}
public String getUsername() {
return username;
}
public void setUsername(String username) {
this.username = username;
}
public HtmlLinkState getUserLink() {
return userLink;
}
public void setUserLink(HtmlLinkState userLink) {
this.userLink = userLink;
}
public List<String> getIpAddresses() {
return ipAddresses;
}
public void setIpAddresses(List<String> ipAddresses) {
this.ipAddresses = ipAddresses;
}
public void addIpAddress(String ipAddress) {
if (ipAddresses == null) {
ipAddresses = new ArrayList<String>();
}
ipAddresses.add(ipAddress);
}
public void clearIpAddresses() {
if (ipAddresses != null) {
ipAddresses.clear();
}
}
public Map<String, String> getErrors() {
return errors;
}
public void addError(String key, String value) {
this.errors.put(key, value);
}
}
public Checkbox getEnableAnonACL() {
return enableAnonACL;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.juli;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.lang.management.ManagementFactory;
import java.lang.management.ThreadInfo;
import java.lang.management.ThreadMXBean;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.Map.Entry;
import java.util.logging.Formatter;
import java.util.logging.LogRecord;
/**
* Provides same information as default log format but on a single line to make
* it easier to grep the logs. The only exception is stacktraces which are
* always preceded by whitespace to make it simple to skip them.
*/
/*
* Date processing based on AccessLogValve.
*/
public class OneLineFormatter extends Formatter {
private static final String LINE_SEP = System.getProperty("line.separator");
private static final String ST_SEP = LINE_SEP + " ";
private static final String UNKONWN_THREAD_NAME = "Unknown thread with ID ";
private static final Object threadMxBeanLock = new Object();
private static volatile ThreadMXBean threadMxBean = null;
private static final int THREAD_NAME_CACHE_SIZE = 10000;
private static ThreadLocal<LinkedHashMap<Integer,String>> threadNameCache =
new ThreadLocal<LinkedHashMap<Integer,String>>() {
@Override
protected LinkedHashMap<Integer,String> initialValue() {
return new LinkedHashMap<Integer,String>() {
private static final long serialVersionUID = 1L;
@Override
protected boolean removeEldestEntry(
Entry<Integer, String> eldest) {
return (size() > THREAD_NAME_CACHE_SIZE);
}
};
}
};
/* Timestamp format */
private static final String timeFormat = "dd-MMM-yyyy HH:mm:ss";
/**
* The size of our global date format cache
*/
private static final int globalCacheSize = 30;
/**
* The size of our thread local date format cache
*/
private static final int localCacheSize = 5;
/**
* Global date format cache.
*/
private static final DateFormatCache globalDateCache =
new DateFormatCache(globalCacheSize, timeFormat, null);
/**
* Thread local date format cache.
*/
private static final ThreadLocal<DateFormatCache> localDateCache =
new ThreadLocal<DateFormatCache>() {
@Override
protected DateFormatCache initialValue() {
return new DateFormatCache(localCacheSize, timeFormat, globalDateCache);
}
};
@Override
public String format(LogRecord record) {
StringBuilder sb = new StringBuilder();
// Timestamp
addTimestamp(sb, record.getMillis());
// Severity
sb.append(' ');
sb.append(record.getLevel());
// Thread
sb.append(' ');
sb.append('[');
if (Thread.currentThread() instanceof AsyncFileHandler.LoggerThread) {
// If using the async handler can't get the thread name from the
// current thread.
sb.append(getThreadName(record.getThreadID()));
} else {
sb.append(Thread.currentThread().getName());
}
sb.append(']');
// Source
sb.append(' ');
sb.append(record.getSourceClassName());
sb.append('.');
sb.append(record.getSourceMethodName());
// Message
sb.append(' ');
sb.append(formatMessage(record));
// Stack trace
if (record.getThrown() != null) {
sb.append(ST_SEP);
StringWriter sw = new StringWriter();
PrintWriter pw = new PrintWriter(sw);
record.getThrown().printStackTrace(pw);
pw.close();
sb.append(sw.getBuffer());
}
// New line for next record
sb.append(LINE_SEP);
return sb.toString();
}
protected void addTimestamp(StringBuilder buf, long timestamp) {
buf.append(localDateCache.get().getFormat(timestamp));
long frac = timestamp % 1000;
buf.append('.');
if (frac < 100) {
if (frac < 10) {
buf.append('0');
buf.append('0');
} else {
buf.append('0');
}
}
buf.append(frac);
}
/**
* LogRecord has threadID but no thread name.
* LogRecord uses an int for thread ID but thread IDs are longs.
* If the real thread ID > (Integer.MAXVALUE / 2) LogRecord uses it's own
* ID in an effort to avoid clashes due to overflow.
* <p>
* Words fail me to describe what I think of the design decision to use an
* int in LogRecord for a long value and the resulting mess that follows.
*/
private static String getThreadName(int logRecordThreadId) {
Map<Integer,String> cache = threadNameCache.get();
String result = null;
if (logRecordThreadId > (Integer.MAX_VALUE / 2)) {
result = cache.get(Integer.valueOf(logRecordThreadId));
}
if (result != null) {
return result;
}
if (logRecordThreadId > Integer.MAX_VALUE / 2) {
result = UNKONWN_THREAD_NAME + logRecordThreadId;
} else {
// Double checked locking OK as threadMxBean is volatile
if (threadMxBean == null) {
synchronized (threadMxBeanLock) {
if (threadMxBean == null) {
threadMxBean = ManagementFactory.getThreadMXBean();
}
}
}
ThreadInfo threadInfo =
threadMxBean.getThreadInfo(logRecordThreadId);
if (threadInfo == null) {
return Long.toString(logRecordThreadId);
}
result = threadInfo.getThreadName();
}
cache.put(Integer.valueOf(logRecordThreadId), result);
return result;
}
}
| |
// Copyright 2000-2019 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.testFramework;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.application.PathManager;
import com.intellij.openapi.module.Module;
import com.intellij.openapi.projectRoots.*;
import com.intellij.openapi.projectRoots.impl.JavaSdkImpl;
import com.intellij.openapi.roots.LanguageLevelModuleExtensionImpl;
import com.intellij.openapi.roots.LanguageLevelProjectExtension;
import com.intellij.openapi.roots.ModuleRootManager;
import com.intellij.openapi.roots.OrderRootType;
import com.intellij.openapi.util.Disposer;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vfs.JarFileSystem;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.pom.java.LanguageLevel;
import com.intellij.util.ArrayUtil;
import com.intellij.util.PathUtil;
import com.intellij.util.SystemProperties;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.lang.JavaVersion;
import org.jetbrains.annotations.Contract;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.annotations.TestOnly;
import org.junit.Assert;
import org.junit.Assume;
import java.io.File;
import java.util.ArrayList;
import java.util.List;
@TestOnly
public class IdeaTestUtil extends PlatformTestUtil {
private static final String MOCK_JDK_DIR_NAME_PREFIX = "mockJDK-";
@SuppressWarnings("UseOfSystemOutOrSystemErr")
public static void printDetectedPerformanceTimings() {
System.out.println(Timings.getStatistics());
}
public static void withLevel(@NotNull final Module module, @NotNull LanguageLevel level, @NotNull final Runnable r) {
final LanguageLevelProjectExtension projectExt = LanguageLevelProjectExtension.getInstance(module.getProject());
final LanguageLevel projectLevel = projectExt.getLanguageLevel();
final LanguageLevel moduleLevel = LanguageLevelModuleExtensionImpl.getInstance(module).getLanguageLevel();
try {
projectExt.setLanguageLevel(level);
setModuleLanguageLevel(module, level);
r.run();
}
finally {
setModuleLanguageLevel(module, moduleLevel);
projectExt.setLanguageLevel(projectLevel);
}
}
public static void setModuleLanguageLevel(@NotNull Module module, @Nullable LanguageLevel level) {
final LanguageLevelModuleExtensionImpl
modifiable = (LanguageLevelModuleExtensionImpl)LanguageLevelModuleExtensionImpl.getInstance(module).getModifiableModel(true);
modifiable.setLanguageLevel(level);
modifiable.commit();
}
public static void setModuleLanguageLevel(@NotNull Module module, @NotNull LanguageLevel level, @NotNull Disposable parentDisposable) {
LanguageLevel prev = LanguageLevelModuleExtensionImpl.getInstance(module).getLanguageLevel();
setModuleLanguageLevel(module, level);
Disposer.register(parentDisposable, () -> setModuleLanguageLevel(module, prev));
}
@NotNull
public static Sdk getMockJdk(@NotNull JavaVersion version) {
int mockJdk = version.feature >= 11 ? 11 :
version.feature >= 9 ? 9 :
version.feature >= 7 ? version.feature :
version.feature >= 5 ? 7 :
4;
String path = getPathForJdkNamed(MOCK_JDK_DIR_NAME_PREFIX + (mockJdk < 11 ? "1." : "") + mockJdk).getPath();
return createMockJdk("java " + version, path);
}
@NotNull
private static Sdk createMockJdk(@NotNull String name, @NotNull String path) {
return ((JavaSdkImpl)JavaSdk.getInstance()).createMockJdk(name, path, false);
}
@NotNull
public static Sdk getMockJdk14() {
return getMockJdk(JavaVersion.compose(4));
}
@NotNull
public static Sdk getMockJdk17() {
return getMockJdk(JavaVersion.compose(7));
}
@NotNull
public static Sdk getMockJdk17(@NotNull String name) {
return createMockJdk(name, getMockJdk17Path().getPath());
}
@NotNull
public static Sdk getMockJdk18() {
return getMockJdk(JavaVersion.compose(8));
}
@NotNull
public static Sdk getMockJdk9() {
return getMockJdk(JavaVersion.compose(9));
}
@NotNull
public static File getMockJdk14Path() {
return getPathForJdkNamed(MOCK_JDK_DIR_NAME_PREFIX + "1.4");
}
@NotNull
public static File getMockJdk17Path() {
return getPathForJdkNamed(MOCK_JDK_DIR_NAME_PREFIX + "1.7");
}
@NotNull
public static File getMockJdk18Path() {
return getPathForJdkNamed(MOCK_JDK_DIR_NAME_PREFIX + "1.8");
}
@NotNull
public static File getMockJdk9Path() {
return getPathForJdkNamed(MOCK_JDK_DIR_NAME_PREFIX + "1.9");
}
public static String getMockJdkVersion(@NotNull String path) {
String name = PathUtil.getFileName(path);
if (name.startsWith(MOCK_JDK_DIR_NAME_PREFIX)) {
return "java " + StringUtil.trimStart(name, MOCK_JDK_DIR_NAME_PREFIX);
}
return null;
}
@NotNull
private static File getPathForJdkNamed(@NotNull String name) {
return new File(PathManager.getCommunityHomePath(), "java/" + name);
}
@NotNull
public static Sdk getWebMockJdk17() {
Sdk jdk = getMockJdk17();
jdk=addWebJarsTo(jdk);
return jdk;
}
@NotNull
@Contract(pure=true)
public static Sdk addWebJarsTo(@NotNull Sdk jdk) {
try {
jdk = (Sdk)jdk.clone();
}
catch (CloneNotSupportedException e) {
throw new RuntimeException(e);
}
SdkModificator sdkModificator = jdk.getSdkModificator();
sdkModificator.addRoot(findJar("lib/jsp-api.jar"), OrderRootType.CLASSES);
sdkModificator.addRoot(findJar("lib/servlet-api.jar"), OrderRootType.CLASSES);
sdkModificator.commitChanges();
return jdk;
}
@NotNull
private static VirtualFile findJar(@NotNull String name) {
String path = PathManager.getHomePath() + '/' + name;
VirtualFile file = VfsTestUtil.findFileByCaseSensitivePath(path);
VirtualFile jar = JarFileSystem.getInstance().getJarRootForLocalFile(file);
assert jar != null : "no .jar for: " + path;
return jar;
}
public static void setTestVersion(@NotNull JavaSdkVersion testVersion, @NotNull Module module, @NotNull Disposable parentDisposable) {
Sdk sdk = ModuleRootManager.getInstance(module).getSdk();
Assert.assertNotNull(sdk);
String oldVersionString = sdk.getVersionString();
// hack
((SdkModificator)sdk).setVersionString(testVersion.getDescription());
Assert.assertSame(testVersion, JavaSdk.getInstance().getVersion(sdk));
Disposer.register(parentDisposable, () -> ((SdkModificator)sdk).setVersionString(oldVersionString));
}
@NotNull
public static String requireRealJdkHome() {
String javaHome = SystemProperties.getJavaHome();
List<String> paths =
ContainerUtil.packNullables(javaHome, new File(javaHome).getParent(), System.getenv("JDK_16_x64"), System.getenv("JDK_16"));
for (String path : paths) {
if (JdkUtil.checkForJdk(path)) {
return path;
}
}
//noinspection ConstantConditions
Assume.assumeTrue("Cannot find JDK, checked paths: " + paths, false);
return null;
}
@NotNull
public static File findSourceFile(@NotNull String basePath) {
File testFile = new File(basePath + ".java");
if (!testFile.exists()) testFile = new File(basePath + ".groovy");
if (!testFile.exists()) throw new IllegalArgumentException("No test source for " + basePath);
return testFile;
}
@SuppressWarnings("UnnecessaryFullyQualifiedName")
public static void compileFile(@NotNull File source, @NotNull File out, @NotNull String... options) {
Assert.assertTrue("source does not exist: " + source.getPath(), source.isFile());
List<String> args = new ArrayList<>();
args.add("-d");
args.add(out.getAbsolutePath());
ContainerUtil.addAll(args, options);
args.add(source.getAbsolutePath());
if (source.getName().endsWith(".groovy")) {
try {
org.codehaus.groovy.tools.FileSystemCompiler.commandLineCompile(ArrayUtil.toStringArray(args));
}
catch (Exception e) {
throw new IllegalStateException(e);
}
}
else {
int result = com.sun.tools.javac.Main.compile(ArrayUtil.toStringArray(args));
if (result != 0) throw new IllegalStateException("javac failed with exit code " + result);
}
}
}
| |
/**
*/
package CIM.IEC61970.Informative.MarketOperations.impl;
import CIM.IEC61970.Informative.InfERPSupport.impl.ErpOrganisationImpl;
import CIM.IEC61970.Informative.MarketOperations.Market;
import CIM.IEC61970.Informative.MarketOperations.MarketOperationsPackage;
import CIM.IEC61970.Informative.MarketOperations.Pnode;
import CIM.IEC61970.Informative.MarketOperations.RTO;
import CIM.IEC61970.Informative.MarketOperations.ResourceGroupReq;
import CIM.IEC61970.Informative.MarketOperations.SecurityConstraintSum;
import CIM.IEC61970.Informative.MarketOperations.SecurityConstraints;
import java.util.Collection;
import org.eclipse.emf.common.notify.NotificationChain;
import org.eclipse.emf.common.util.EList;
import org.eclipse.emf.ecore.EClass;
import org.eclipse.emf.ecore.InternalEObject;
import org.eclipse.emf.ecore.util.EObjectWithInverseResolvingEList;
import org.eclipse.emf.ecore.util.InternalEList;
/**
* <!-- begin-user-doc -->
* An implementation of the model object '<em><b>RTO</b></em>'.
* <!-- end-user-doc -->
* <p>
* The following features are implemented:
* </p>
* <ul>
* <li>{@link CIM.IEC61970.Informative.MarketOperations.impl.RTOImpl#getMarkets <em>Markets</em>}</li>
* <li>{@link CIM.IEC61970.Informative.MarketOperations.impl.RTOImpl#getSecurityConstraints <em>Security Constraints</em>}</li>
* <li>{@link CIM.IEC61970.Informative.MarketOperations.impl.RTOImpl#getResourceGroupReqs <em>Resource Group Reqs</em>}</li>
* <li>{@link CIM.IEC61970.Informative.MarketOperations.impl.RTOImpl#getSecurityConstraintsLinear <em>Security Constraints Linear</em>}</li>
* <li>{@link CIM.IEC61970.Informative.MarketOperations.impl.RTOImpl#getPnodes <em>Pnodes</em>}</li>
* </ul>
*
* @generated
*/
public class RTOImpl extends ErpOrganisationImpl implements RTO {
/**
* The cached value of the '{@link #getMarkets() <em>Markets</em>}' reference list.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getMarkets()
* @generated
* @ordered
*/
protected EList<Market> markets;
/**
* The cached value of the '{@link #getSecurityConstraints() <em>Security Constraints</em>}' reference list.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getSecurityConstraints()
* @generated
* @ordered
*/
protected EList<SecurityConstraints> securityConstraints;
/**
* The cached value of the '{@link #getResourceGroupReqs() <em>Resource Group Reqs</em>}' reference list.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getResourceGroupReqs()
* @generated
* @ordered
*/
protected EList<ResourceGroupReq> resourceGroupReqs;
/**
* The cached value of the '{@link #getSecurityConstraintsLinear() <em>Security Constraints Linear</em>}' reference list.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getSecurityConstraintsLinear()
* @generated
* @ordered
*/
protected EList<SecurityConstraintSum> securityConstraintsLinear;
/**
* The cached value of the '{@link #getPnodes() <em>Pnodes</em>}' reference list.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getPnodes()
* @generated
* @ordered
*/
protected EList<Pnode> pnodes;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected RTOImpl() {
super();
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
protected EClass eStaticClass() {
return MarketOperationsPackage.Literals.RTO;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EList<Market> getMarkets() {
if (markets == null) {
markets = new EObjectWithInverseResolvingEList<Market>(Market.class, this, MarketOperationsPackage.RTO__MARKETS, MarketOperationsPackage.MARKET__RTO);
}
return markets;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EList<SecurityConstraints> getSecurityConstraints() {
if (securityConstraints == null) {
securityConstraints = new EObjectWithInverseResolvingEList<SecurityConstraints>(SecurityConstraints.class, this, MarketOperationsPackage.RTO__SECURITY_CONSTRAINTS, MarketOperationsPackage.SECURITY_CONSTRAINTS__RTO);
}
return securityConstraints;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EList<ResourceGroupReq> getResourceGroupReqs() {
if (resourceGroupReqs == null) {
resourceGroupReqs = new EObjectWithInverseResolvingEList.ManyInverse<ResourceGroupReq>(ResourceGroupReq.class, this, MarketOperationsPackage.RTO__RESOURCE_GROUP_REQS, MarketOperationsPackage.RESOURCE_GROUP_REQ__RT_OS);
}
return resourceGroupReqs;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EList<SecurityConstraintSum> getSecurityConstraintsLinear() {
if (securityConstraintsLinear == null) {
securityConstraintsLinear = new EObjectWithInverseResolvingEList<SecurityConstraintSum>(SecurityConstraintSum.class, this, MarketOperationsPackage.RTO__SECURITY_CONSTRAINTS_LINEAR, MarketOperationsPackage.SECURITY_CONSTRAINT_SUM__RTO);
}
return securityConstraintsLinear;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EList<Pnode> getPnodes() {
if (pnodes == null) {
pnodes = new EObjectWithInverseResolvingEList<Pnode>(Pnode.class, this, MarketOperationsPackage.RTO__PNODES, MarketOperationsPackage.PNODE__RTO);
}
return pnodes;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@SuppressWarnings("unchecked")
@Override
public NotificationChain eInverseAdd(InternalEObject otherEnd, int featureID, NotificationChain msgs) {
switch (featureID) {
case MarketOperationsPackage.RTO__MARKETS:
return ((InternalEList<InternalEObject>)(InternalEList<?>)getMarkets()).basicAdd(otherEnd, msgs);
case MarketOperationsPackage.RTO__SECURITY_CONSTRAINTS:
return ((InternalEList<InternalEObject>)(InternalEList<?>)getSecurityConstraints()).basicAdd(otherEnd, msgs);
case MarketOperationsPackage.RTO__RESOURCE_GROUP_REQS:
return ((InternalEList<InternalEObject>)(InternalEList<?>)getResourceGroupReqs()).basicAdd(otherEnd, msgs);
case MarketOperationsPackage.RTO__SECURITY_CONSTRAINTS_LINEAR:
return ((InternalEList<InternalEObject>)(InternalEList<?>)getSecurityConstraintsLinear()).basicAdd(otherEnd, msgs);
case MarketOperationsPackage.RTO__PNODES:
return ((InternalEList<InternalEObject>)(InternalEList<?>)getPnodes()).basicAdd(otherEnd, msgs);
}
return super.eInverseAdd(otherEnd, featureID, msgs);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public NotificationChain eInverseRemove(InternalEObject otherEnd, int featureID, NotificationChain msgs) {
switch (featureID) {
case MarketOperationsPackage.RTO__MARKETS:
return ((InternalEList<?>)getMarkets()).basicRemove(otherEnd, msgs);
case MarketOperationsPackage.RTO__SECURITY_CONSTRAINTS:
return ((InternalEList<?>)getSecurityConstraints()).basicRemove(otherEnd, msgs);
case MarketOperationsPackage.RTO__RESOURCE_GROUP_REQS:
return ((InternalEList<?>)getResourceGroupReqs()).basicRemove(otherEnd, msgs);
case MarketOperationsPackage.RTO__SECURITY_CONSTRAINTS_LINEAR:
return ((InternalEList<?>)getSecurityConstraintsLinear()).basicRemove(otherEnd, msgs);
case MarketOperationsPackage.RTO__PNODES:
return ((InternalEList<?>)getPnodes()).basicRemove(otherEnd, msgs);
}
return super.eInverseRemove(otherEnd, featureID, msgs);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Object eGet(int featureID, boolean resolve, boolean coreType) {
switch (featureID) {
case MarketOperationsPackage.RTO__MARKETS:
return getMarkets();
case MarketOperationsPackage.RTO__SECURITY_CONSTRAINTS:
return getSecurityConstraints();
case MarketOperationsPackage.RTO__RESOURCE_GROUP_REQS:
return getResourceGroupReqs();
case MarketOperationsPackage.RTO__SECURITY_CONSTRAINTS_LINEAR:
return getSecurityConstraintsLinear();
case MarketOperationsPackage.RTO__PNODES:
return getPnodes();
}
return super.eGet(featureID, resolve, coreType);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@SuppressWarnings("unchecked")
@Override
public void eSet(int featureID, Object newValue) {
switch (featureID) {
case MarketOperationsPackage.RTO__MARKETS:
getMarkets().clear();
getMarkets().addAll((Collection<? extends Market>)newValue);
return;
case MarketOperationsPackage.RTO__SECURITY_CONSTRAINTS:
getSecurityConstraints().clear();
getSecurityConstraints().addAll((Collection<? extends SecurityConstraints>)newValue);
return;
case MarketOperationsPackage.RTO__RESOURCE_GROUP_REQS:
getResourceGroupReqs().clear();
getResourceGroupReqs().addAll((Collection<? extends ResourceGroupReq>)newValue);
return;
case MarketOperationsPackage.RTO__SECURITY_CONSTRAINTS_LINEAR:
getSecurityConstraintsLinear().clear();
getSecurityConstraintsLinear().addAll((Collection<? extends SecurityConstraintSum>)newValue);
return;
case MarketOperationsPackage.RTO__PNODES:
getPnodes().clear();
getPnodes().addAll((Collection<? extends Pnode>)newValue);
return;
}
super.eSet(featureID, newValue);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void eUnset(int featureID) {
switch (featureID) {
case MarketOperationsPackage.RTO__MARKETS:
getMarkets().clear();
return;
case MarketOperationsPackage.RTO__SECURITY_CONSTRAINTS:
getSecurityConstraints().clear();
return;
case MarketOperationsPackage.RTO__RESOURCE_GROUP_REQS:
getResourceGroupReqs().clear();
return;
case MarketOperationsPackage.RTO__SECURITY_CONSTRAINTS_LINEAR:
getSecurityConstraintsLinear().clear();
return;
case MarketOperationsPackage.RTO__PNODES:
getPnodes().clear();
return;
}
super.eUnset(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public boolean eIsSet(int featureID) {
switch (featureID) {
case MarketOperationsPackage.RTO__MARKETS:
return markets != null && !markets.isEmpty();
case MarketOperationsPackage.RTO__SECURITY_CONSTRAINTS:
return securityConstraints != null && !securityConstraints.isEmpty();
case MarketOperationsPackage.RTO__RESOURCE_GROUP_REQS:
return resourceGroupReqs != null && !resourceGroupReqs.isEmpty();
case MarketOperationsPackage.RTO__SECURITY_CONSTRAINTS_LINEAR:
return securityConstraintsLinear != null && !securityConstraintsLinear.isEmpty();
case MarketOperationsPackage.RTO__PNODES:
return pnodes != null && !pnodes.isEmpty();
}
return super.eIsSet(featureID);
}
} //RTOImpl
| |
// **********************************************************************
//
// <copyright>
//
// BBN Technologies
// 10 Moulton Street
// Cambridge, MA 02138
// (617) 873-8000
//
// Copyright (C) BBNT Solutions LLC. All rights reserved.
//
// </copyright>
// **********************************************************************
//
// $Source: /cvs/distapps/openmap/src/openmap/com/bbn/openmap/layer/dted/DTEDFrame.java,v $
// $RCSfile: DTEDFrame.java,v $
// $Revision: 1.8 $
// $Date: 2008/02/29 00:51:10 $
// $Author: dietrick $
//
// **********************************************************************
package com.bbn.openmap.layer.dted;
import java.awt.Color;
import com.bbn.openmap.dataAccess.dted.DTEDFrame;
import com.bbn.openmap.omGraphics.OMGraphic;
import com.bbn.openmap.omGraphics.OMRaster;
import com.bbn.openmap.omGraphics.OMRasterObject;
import com.bbn.openmap.proj.CADRG;
import com.bbn.openmap.proj.Projection;
import com.bbn.openmap.proj.coords.LatLonPoint;
import com.bbn.openmap.util.Debug;
/**
* The DTEDSubframedFrame is the representation of the DTED (Digital Terrain
* Elevation Data) data from a single dted data file. It keeps track of all the
* attribute information of it's data, and also maintains an array of images
* (DTEDFrameSubframe) that represent views of the elevation posts.
*/
public class DTEDSubframedFrame
extends DTEDFrame {
/** The colortable used to create the images. */
public DTEDFrameColorTable colorTable;
/** The subframe presentation attributes. */
public DTEDFrameSubframeInfo subframeInfo; // master
/**
* The frame image is divided into 200x200 pixel subframes, with a leftover
* frame at the end. This is how many horizontal subframes there are.
*/
public int number_horiz_subframes;
/**
* The frame image is divided into 200x200 pixel subframes, with a leftover
* frame at the end. This is how many vertical subframes there are.
*/
public int number_vert_subframes;
/** The image array for the subframes. */
public DTEDFrameSubframe subframes[][];
// ////////////////
// Administrative methods
// ////////////////
/**
* Simplest constructor.
*
* @param filePath complete path to the DTED frame.
*/
public DTEDSubframedFrame(String filePath) {
this(filePath, null, null, false);
}
/**
* Constructor with colortable and presentation information.
*
* @param filePath complete path to the DTED frame.
* @param cTable the colortable to use for the images.
* @param info presentation parameters.
*/
public DTEDSubframedFrame(String filePath, DTEDFrameColorTable cTable, DTEDFrameSubframeInfo info) {
this(filePath, cTable, info, false);
}
/**
* Constructor with colortable and presentation information.
*
* @param filePath complete path to the DTED frame.
* @param readWholeFile If true, all of the elevation data will be read at
* load time. If false, elevation post data will be read in per
* longitude column depending on the need. False is recommended for
* DTEd level 1 and 2.
*/
public DTEDSubframedFrame(String filePath, boolean readWholeFile) {
this(filePath, null, null, readWholeFile);
}
/**
* Constructor with colortable and presentation information.
*
* @param filePath complete path to the DTED frame.
* @param cTable the colortable to use for the images.
* @param info presentation parameters.
* @param readWholeFile If true, all of the elevation data will be read at
* load time. If false, elevation post data will be read in per
* longitude column depending on the need. False is recommended for
* DTED level 1 and 2.
*/
public DTEDSubframedFrame(String filePath, DTEDFrameColorTable cTable, DTEDFrameSubframeInfo info, boolean readWholeFile) {
super(filePath, readWholeFile);
colorTable = cTable;
subframeInfo = info;
}
public void setColorTable(DTEDFrameColorTable c_Table) {
colorTable = c_Table;
}
public DTEDFrameColorTable getColorTable() {
return colorTable;
}
/**
* Sets the subframe array. Blows away any images that may already be there.
*/
public void initSubframes(int numHorizSubframes, int numVertSubframes) {
number_horiz_subframes = numHorizSubframes;
number_vert_subframes = numVertSubframes;
subframes = new DTEDFrameSubframe[numHorizSubframes][numVertSubframes];
if (Debug.debugging("dted")) {
Debug.output("///////// DTEDFrame: subframe array initialized, " + numHorizSubframes + "x" + numVertSubframes);
}
}
/**
* If you just want to get an image for the DTEDFrame, then call this. One
* OMRaster for the entire DTEDFrame will be returned, with the default
* rendering parameters (Colored shading) and the default colortable. Use
* the other getOMRaster method if you want something different. This method
* actually calls that other method, so read the documentation for that as
* well.
*
* @param proj EqualArc projection to use to create image.
* @return raster image to display in OpenMap.
*/
public OMGraphic getImage(Projection proj) {
return getImage(null, null, proj);
}
/**
* If you just want to get an image for the DTEDFrame, then call this. One
* OMRaster for the entire DTEDFrame will be returned. In the
* DTEDFrameSubframeInfo, you need to set the color type and all the
* parameters that are assiociated with the rendering parameters. The
* projection parameters of the DFSI (image height, width, pixel intervals)
* will be set in this method based on the projection. If you want a
* different sized image, scale the thing you get back from this method, or
* change the scale of the projection that is passed in. Calling this method
* will cause the DTEDFrame subframe cache to reset itself to hold one
* subframe covering the entire frame. Just so you know.
*
* @param dfsi the DTEDFrameSubframeInfo describing the subframe.
* @param colortable the colortable to use when building the image.
* @return raster image to display in OpenMap.
* @param proj EqualArc projection to use to create image.
*/
public OMGraphic getImage(DTEDFrameSubframeInfo dfsi, DTEDFrameColorTable colortable, Projection proj) {
if (proj == null) {
Debug.error("DTEDFrame.getOMRaster: need projection to create image.");
return null;
}
if (colortable == null) {
colortable = new DTEDFrameColorTable();
}
if (dfsi == null) {
dfsi =
new DTEDFrameSubframeInfo(DTEDFrameSubframe.COLOREDSHADING, DTEDFrameSubframe.DEFAULT_BANDHEIGHT,
DTEDFrameSubframe.LEVEL_1, // Doesn't
// matter
DTEDFrameSubframe.DEFAULT_SLOPE_ADJUST);
}
CADRG cadrg = CADRG.convertProjection(proj);
dfsi.xPixInterval = 360 / cadrg.getXPixConstant(); // degrees/pixel
dfsi.yPixInterval = 90 / cadrg.getYPixConstant();
dfsi.height = (int) (1 / dfsi.yPixInterval);
dfsi.width = (int) (1 / dfsi.xPixInterval);
// Will trigger the right thing in getSubframeOMRaster;
subframes = null;
return getSubframeImage(dfsi, colortable, proj);
}
/**
* Return the subframe image as described in the DTEDFrameSubframeInfo. This
* is called by the DTEDCacheHandler, which has in turn set the
* DTEDFrameSubframeInfo parameters to match the projection parameters. This
* turns out to be kinda important.
*
* @param dfsi the DTEDFrameSubframeInfo describing the subframe.
* @param colortable the colortable to use when building the image.
* @return raster image to display in OpenMap.
*/
public OMGraphic getSubframeImage(DTEDFrameSubframeInfo dfsi, DTEDFrameColorTable colortable, Projection proj) {
if (!frame_is_valid)
return null;
OMGraphic raster = null;
if (dfsi.viewType == DTEDFrameSubframe.NOSHADING)
return null;
if (dfsi.viewType == DTEDFrameSubframe.COLOREDSHADING)
colortable.setGreyScale(false);
else
colortable.setGreyScale(true);
float lat_origin = dfsi.lat;
float lon_origin = dfsi.lon;
if (subframes == null) {
// Need to set a couple of things up if the DTEDFrameCache
// isn't being used to set up the subframe information in
// the dfsi.
initSubframes(1, 1);
// NOTE!! The algorithm uses the coordinates of the top
// left corner as a reference!!!!!!!!
lat_origin = dsi.lat_origin + 1;
lon_origin = dsi.lon_origin;
}
DTEDFrameSubframe subframe = subframes[dfsi.subx][dfsi.suby];
if (Debug.debugging("dteddetail")) {
Debug.output("Subframe lat/lon => lat= " + lat_origin + " vs. " + dfsi.lat + " lon= " + lon_origin + " vs. " + dfsi.lon
+ " subx = " + dfsi.subx + " suby = " + dfsi.suby);
Debug.output("Height/width of subframe => height= " + dfsi.height + " width= " + dfsi.width);
}
if (subframe != null) {
raster = subframe.getImageIfCurrent(proj, dfsi);
if (raster != null) {
if (Debug.debugging("dted")) {
Debug.output("######## DTEDFrame: returning cached subframe");
}
return raster;
}
if (Debug.debugging("dted")) {
Debug.output(" *** DTEDFrame: changing image of cached subframe");
}
/*
* If there is an image, the types are different and it needs to be
* redrawn
*/
subframe.dfsi = dfsi.makeClone();
} else {
if (Debug.debugging("dted")) {
Debug.output(" +++ DTEDFrame: creating subframe");
}
subframe = new DTEDFrameSubframe(dfsi);
subframes[dfsi.subx][dfsi.suby] = subframe;
}
// lat/lon_post_intervals are *10 too big - // extra 0 in
// 36000 to counteract
// start in lower left of subframe
double start_lat_index = (lat_origin - (double) dsi.sw_lat) * 36000.0 / (double) uhl.lat_post_interval;
double start_lon_index = (lon_origin - (double) dsi.sw_lon) * 36000.0 / (double) uhl.lon_post_interval;
double end_lat_index =
((lat_origin - ((double) dfsi.height * dfsi.yPixInterval)) - (double) dsi.sw_lat) * 36000.0
/ (double) uhl.lat_post_interval;
double end_lon_index =
((lon_origin + ((double) dfsi.width * dfsi.xPixInterval)) - (double) dsi.sw_lon) * 36000.0
/ (double) uhl.lon_post_interval;
double lat_interval = (start_lat_index - end_lat_index) / (double) dfsi.height;
double lon_interval = (end_lon_index - start_lon_index) / (double) dfsi.width;
if (Debug.debugging("dteddetail"))
Debug.output(" start_lat_index => " + start_lat_index + "\n" + " end_lat_index => " + end_lat_index + "\n"
+ " start_lon_index => " + start_lon_index + "\n" + " end_lon_index => " + end_lon_index + "\n"
+ " lat_interval => " + lat_interval + "\n" + " lon_interval => " + lon_interval);
short e1, e2;
short xc = 0;
short yc = 0;
short xnw = 0;
short ynw = 0;
short xse = 0;
short yse = 0;
double slope;
double distance = 1.0;
float value = 0.0f;
int assignment = 0;
double modifier = (double) 0;
double xw_offset = 0;
double xe_offset = 0;
double yn_offset = 0;
double ys_offset = 0;
int elevation = (int) 0;
// Calculations needed once for slope shading
if (dfsi.viewType == DTEDFrameSubframe.SLOPESHADING
|| (dfsi.viewType == DTEDFrameSubframe.COLOREDSHADING && colortable.colors.length > DTEDFrameColorTable.NUM_ELEVATION_COLORS)) {
// to get to the right part of the frame, kind of like a
// subframe
// indexing thing
xw_offset = start_lon_index - Math.ceil(lon_interval);
xe_offset = start_lon_index + Math.ceil(lon_interval);
yn_offset = start_lat_index + Math.ceil(lat_interval);
ys_offset = start_lat_index - Math.ceil(lat_interval);
switch (dfsi.dtedLevel) {
// larger numbers make less contrast
case 0:
modifier = (double) 4;
break;// 1000 ideal
case 1:
modifier = (double) .02;
break;// 2 ideal
case 2:
modifier = (double) .0001;
break;
case 3:
modifier = (double) .000001;
break;
default:
modifier = (double) 1;
}
// With more colors, contrast tends to be a little light
// for the
// default - brighten it up more
if (colortable.colors.length > 215)
modifier /= 10;
for (int h = dfsi.slopeAdjust; h < 5; h++)
modifier *= 10;
distance = Math.sqrt((modifier * lon_interval * lon_interval) + (modifier * lat_interval * lat_interval));
}
ImageData imageData = ImageData.getImageData(dfsi.colorModel, dfsi.width, dfsi.height, colortable.colors);
for (short x = 0; x < dfsi.width; x++) {
// used for both elevation banding and slope
xc = (short) (start_lon_index + ((x) * lon_interval));
if (xc < 0)
xc = 0;
if (xc > dsi.num_lon_points - 1)
xc = (short) (dsi.num_lon_points - 1);
if ((elevations[xc] == null) && !readDataRecord(xc)) {
Debug.error("DTEDFrame: Problem reading lat point line in data record");
return null;
}
if (dfsi.viewType == DTEDFrameSubframe.SLOPESHADING
|| (dfsi.viewType == DTEDFrameSubframe.COLOREDSHADING && colortable.colors.length > DTEDFrameColorTable.NUM_ELEVATION_COLORS)) {
// This is actually finding the right x post for this
// pixel,
// within the subframe measurements.
xnw = (short) (xw_offset + Math.floor(x * lon_interval));
xse = (short) (xe_offset + Math.floor(x * lon_interval));
// trying to smooth out the edge of the frame
if (xc == 0 || xnw < 0) {
xnw = xc;
xse = (short) (xnw + 2.0 * Math.ceil(lon_interval));
}
if (xc == dsi.num_lon_points - 1 || xse > dsi.num_lon_points - 1) {
xse = (short) (dsi.num_lon_points - 1);
xnw = (short) (xse - 2.0 * Math.ceil(lon_interval));
}
if (((elevations[xnw] == null) && !readDataRecord(xnw)) || ((elevations[xse] == null) && !readDataRecord(xse))) {
Debug.error("DTEDFrame: Problem reading lat point line in data record");
return null;
}
}
// Now, calculate the data and assign the pixels based on
// y
for (short y = 0; y < dfsi.height; y++) {
// used for elevation band and slope
yc = (short) (start_lat_index - ((y) * lat_interval));
if (yc < 0)
yc = 0;
elevation = (int) elevations[xc][yc];
// elevation shading
if (dfsi.viewType == DTEDFrameSubframe.METERSHADING || dfsi.viewType == DTEDFrameSubframe.FEETSHADING) {
// Just use the top two-thirds of the colors
if (elevation == 0)
assignment = 0; // color water Blue
else {
if (elevation < 0)
elevation *= -1; // Death Valley
if (dfsi.viewType == DTEDFrameSubframe.FEETSHADING)
elevation = (int) (elevation * 3.2);
// Start at the darkest color, and then go up
// through the
// colormap for each band height, the start
// back at the
// darkest when you get to the last color. To
// make this
// more useful, I limit the number of colors
// (10) used - if
// there isn;t enough contrast between the
// colors, you can't
// see the bands. The contrast adjustment in
// 24-bit color
// mode(216 colors) lets you add a few colors.
if (colortable.colors.length < 216) {
try {
assignment = (int) ((elevation / dfsi.bandHeight) % (colortable.colors.length - 6) + 6);
} catch (java.lang.ArithmeticException ae) {
assignment = 1;
}
} else {
try {
assignment =
(int) (((elevation / dfsi.bandHeight) % (10 - 2 * (3 - dfsi.slopeAdjust)) * (colortable.colors.length / (10 - 2 * (3 - dfsi.slopeAdjust)))) + 6);
} catch (java.lang.ArithmeticException ae) {
assignment = 1;
}
}
}
imageData.set(x, y, assignment);
}
// Slope shading
else if (dfsi.viewType == DTEDFrameSubframe.SLOPESHADING
|| (dfsi.viewType == DTEDFrameSubframe.COLOREDSHADING && colortable.colors.length > DTEDFrameColorTable.NUM_ELEVATION_COLORS)) {
// find the y post indexes within the subframe
ynw = (short) (yn_offset - Math.floor(y * lat_interval));
yse = (short) (ys_offset - Math.floor(y * lat_interval));
// trying to smooth out the edge of the frame by
// handling the
// frame limits
if (yse < 0)
yse = 0;
if (yc == dsi.num_lat_lines - 1 || ynw > dsi.num_lat_lines - 1)
ynw = (short) (dsi.num_lat_lines - 1);
e2 = elevations[xse][yse]; // down & right
// elevation
e1 = elevations[xnw][ynw]; // up and left
// elevation
slope = (e2 - e1) / distance; // slope relative to
// nw sun
// colormap value darker for negative slopes,
// brighter for
// positive slopes
if (dfsi.viewType == DTEDFrameSubframe.COLOREDSHADING) {
assignment = 1;
elevation = (int) (elevation * 3.2);// feet
for (int l = 1; l < DTEDFrameColorTable.NUM_ELEVATION_COLORS; l++)
if (elevation <= colortable.elevation_color_cutoff[l]) {
if (slope < 0)
assignment = (int) (l + DTEDFrameColorTable.NUM_ELEVATION_COLORS);
else if (slope > 0)
assignment = (int) (l + (DTEDFrameColorTable.NUM_ELEVATION_COLORS * 2));
else
assignment = (int) l;
break;
}
if (elevation == 0)
assignment = 0;
imageData.set(x, y, assignment);
}
else {
value = (float) (((colortable.colors.length - 1) / 2) + slope);
// not water, but close in the colormap - max
// dark
if (slope != 0 && value < 1)
value = 1;
if (elevation == 0)
value = 0; // water?!?
if (value > (colortable.colors.length - 1))
value = colortable.colors.length - 1; // max
// bright
assignment = (int) value;
imageData.set(x, y, assignment);
}
}
// Subframe outlines - different colors for each side
// of the frame
// This is really for debugging purposes, really.
else if (dfsi.viewType == DTEDFrameSubframe.BOUNDARYSHADING) {
int c;
if (x < 1)
c = 1;
else if (x > dfsi.width - 2)
c = 12;
else if (y < 1)
c = 1;
else if (y > dfsi.height - 2)
c = 12;
else
c = 7;
imageData.set(x, y, c);
} else if (dfsi.viewType == DTEDFrameSubframe.COLOREDSHADING) {
assignment = 1;
elevation = (int) (elevation * 3.2);// feet
for (int l = 1; l < DTEDFrameColorTable.NUM_ELEVATION_COLORS; l++)
if (elevation <= colortable.elevation_color_cutoff[l]) {
assignment = (int) l;
break;
}
if (elevation == 0)
assignment = 0;
if (elevation < 0)
assignment = 1;
if (elevation > 33000)
assignment = 1;
imageData.set(x, y, assignment);
}
}
}
imageData.updateData(subframe);
if (Debug.debugging("dteddetail"))
Debug.output("DTEDFrame: leaving raster");
return subframe.getImage(proj);
}
public static void main(String args[]) {
Debug.init();
if (args.length < 1) {
System.out.println("DTEDFrame: Need a path/filename");
System.exit(0);
}
System.out.println("DTEDFrame: " + args[0]);
DTEDSubframedFrame df = new DTEDSubframedFrame(args[0]);
if (df.frame_is_valid) {
System.out.println(df.uhl);
System.out.println(df.dsi);
System.out.println(df.acc);
// int startx = 5;
// int starty = 6;
// int endx = 10;
// int endy = 30;
// short[][] e = df.getElevations(startx, starty, endx,
// endy);
// for (int i = e[0].length-1; i >= 0; i--) {
// for (int j = 0; j < e.length; j++) {
// System.out.print(e[j][i] + " ");
// }
// System.out.println();
// }
}
float lat = df.dsi.lat_origin + .5f;
float lon = df.dsi.lon_origin + .5f;
CADRG crg = new CADRG(new LatLonPoint.Double(lat, lon), 1500000, 600, 600);
final OMGraphic ras = df.getImage(crg);
// Pushes the image to the left top of the frame.
if (ras instanceof OMRaster) {
crg.setHeight(((OMRaster) ras).getHeight());
crg.setWidth(((OMRaster) ras).getWidth());
}
ras.generate(crg);
java.awt.Frame window = new java.awt.Frame(args[0]) {
public void paint(java.awt.Graphics g) {
if (ras != null) {
ras.render(g);
}
}
};
window.addWindowListener(new java.awt.event.WindowAdapter() {
public void windowClosing(java.awt.event.WindowEvent e) {
// need a shutdown event to notify other gui beans and
// then exit.
System.exit(0);
}
});
window.setSize(crg.getWidth(), crg.getHeight());
window.setVisible(true);
window.repaint();
}
protected static abstract class ImageData {
protected abstract void set(short x, short y, int value);
protected abstract void updateData(DTEDFrameSubframe dfs);
int width = 0;
int height = 0;
Color[] colors;
protected ImageData(int w, int h, Color[] colors) {
this.width = w;
this.height = h;
this.colors = colors;
}
protected static ImageData getImageData(int colorModel, int width, int height, Color[] colors) {
if (colorModel == OMRasterObject.COLORMODEL_DIRECT) {
return new Pixel(width, height, colors);
} else {
return new Byte(width, height, colors);
}
}
protected static class Pixel
extends ImageData {
int[] pixels;
int ranColor;
protected Pixel(int w, int h, Color[] colors) {
super(w, h, colors);
pixels = new int[w * h];
int red = (int) (Math.random() * 255);
int green = (int) (Math.random() * 255);
int blue = (int) (Math.random() * 255);
Color color = new Color(red, green, blue);
ranColor = color.getRGB();
}
protected void set(short x, short y, int value) {
pixels[(y * width) + x] = colors[value].getRGB();
// pixels[(y * width) + x] = ranColor;
}
protected void updateData(DTEDFrameSubframe dfs) {
dfs.setPixels(pixels);
}
}
protected static class Byte
extends ImageData {
byte[] bytes;
protected Byte(int w, int h, Color[] colors) {
super(w, h, colors);
bytes = new byte[w * h];
}
protected void set(short x, short y, int value) {
bytes[(y * width) + x] = (byte) value;
}
protected void updateData(DTEDFrameSubframe dfs) {
dfs.setBitsAndColors(bytes, colors);
}
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.builder.endpoint.dsl;
import javax.annotation.Generated;
import org.apache.camel.ExchangePattern;
import org.apache.camel.builder.EndpointConsumerBuilder;
import org.apache.camel.builder.EndpointProducerBuilder;
import org.apache.camel.builder.endpoint.AbstractEndpointBuilder;
import org.apache.camel.spi.ExceptionHandler;
/**
* Expose gRPC endpoints and access external gRPC endpoints.
*
* Generated by camel build tools - do NOT edit this file!
*/
@Generated("org.apache.camel.maven.packaging.EndpointDslMojo")
public interface GrpcEndpointBuilderFactory {
/**
* Builder for endpoint consumers for the gRPC component.
*/
public interface GrpcEndpointConsumerBuilder
extends
EndpointConsumerBuilder {
default AdvancedGrpcEndpointConsumerBuilder advanced() {
return (AdvancedGrpcEndpointConsumerBuilder) this;
}
/**
* The HTTP/2 flow control window size (MiB).
*
* The option is a: <code>int</code> type.
*
* Default: 1048576
* Group: common
*/
default GrpcEndpointConsumerBuilder flowControlWindow(
int flowControlWindow) {
doSetProperty("flowControlWindow", flowControlWindow);
return this;
}
/**
* The HTTP/2 flow control window size (MiB).
*
* The option will be converted to a <code>int</code> type.
*
* Default: 1048576
* Group: common
*/
default GrpcEndpointConsumerBuilder flowControlWindow(
String flowControlWindow) {
doSetProperty("flowControlWindow", flowControlWindow);
return this;
}
/**
* The maximum message size allowed to be received/sent (MiB).
*
* The option is a: <code>int</code> type.
*
* Default: 4194304
* Group: common
*/
default GrpcEndpointConsumerBuilder maxMessageSize(int maxMessageSize) {
doSetProperty("maxMessageSize", maxMessageSize);
return this;
}
/**
* The maximum message size allowed to be received/sent (MiB).
*
* The option will be converted to a <code>int</code> type.
*
* Default: 4194304
* Group: common
*/
default GrpcEndpointConsumerBuilder maxMessageSize(String maxMessageSize) {
doSetProperty("maxMessageSize", maxMessageSize);
return this;
}
/**
* Allows for bridging the consumer to the Camel routing Error Handler,
* which mean any exceptions occurred while the consumer is trying to
* pickup incoming messages, or the likes, will now be processed as a
* message and handled by the routing Error Handler. By default the
* consumer will use the org.apache.camel.spi.ExceptionHandler to deal
* with exceptions, that will be logged at WARN or ERROR level and
* ignored.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: consumer
*/
default GrpcEndpointConsumerBuilder bridgeErrorHandler(
boolean bridgeErrorHandler) {
doSetProperty("bridgeErrorHandler", bridgeErrorHandler);
return this;
}
/**
* Allows for bridging the consumer to the Camel routing Error Handler,
* which mean any exceptions occurred while the consumer is trying to
* pickup incoming messages, or the likes, will now be processed as a
* message and handled by the routing Error Handler. By default the
* consumer will use the org.apache.camel.spi.ExceptionHandler to deal
* with exceptions, that will be logged at WARN or ERROR level and
* ignored.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: consumer
*/
default GrpcEndpointConsumerBuilder bridgeErrorHandler(
String bridgeErrorHandler) {
doSetProperty("bridgeErrorHandler", bridgeErrorHandler);
return this;
}
/**
* This option specifies the top-level strategy for processing service
* requests and responses in streaming mode. If an aggregation strategy
* is selected, all requests will be accumulated in the list, then
* transferred to the flow, and the accumulated responses will be sent
* to the sender. If a propagation strategy is selected, request is sent
* to the stream, and the response will be immediately sent back to the
* sender.
*
* The option is a:
* <code>org.apache.camel.component.grpc.GrpcConsumerStrategy</code>
* type.
*
* Default: PROPAGATION
* Group: consumer
*/
default GrpcEndpointConsumerBuilder consumerStrategy(
GrpcConsumerStrategy consumerStrategy) {
doSetProperty("consumerStrategy", consumerStrategy);
return this;
}
/**
* This option specifies the top-level strategy for processing service
* requests and responses in streaming mode. If an aggregation strategy
* is selected, all requests will be accumulated in the list, then
* transferred to the flow, and the accumulated responses will be sent
* to the sender. If a propagation strategy is selected, request is sent
* to the stream, and the response will be immediately sent back to the
* sender.
*
* The option will be converted to a
* <code>org.apache.camel.component.grpc.GrpcConsumerStrategy</code>
* type.
*
* Default: PROPAGATION
* Group: consumer
*/
default GrpcEndpointConsumerBuilder consumerStrategy(
String consumerStrategy) {
doSetProperty("consumerStrategy", consumerStrategy);
return this;
}
/**
* Determines if onCompleted events should be pushed to the Camel route.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: consumer
*/
default GrpcEndpointConsumerBuilder forwardOnCompleted(
boolean forwardOnCompleted) {
doSetProperty("forwardOnCompleted", forwardOnCompleted);
return this;
}
/**
* Determines if onCompleted events should be pushed to the Camel route.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: consumer
*/
default GrpcEndpointConsumerBuilder forwardOnCompleted(
String forwardOnCompleted) {
doSetProperty("forwardOnCompleted", forwardOnCompleted);
return this;
}
/**
* Determines if onError events should be pushed to the Camel route.
* Exceptions will be set as message body.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: consumer
*/
default GrpcEndpointConsumerBuilder forwardOnError(
boolean forwardOnError) {
doSetProperty("forwardOnError", forwardOnError);
return this;
}
/**
* Determines if onError events should be pushed to the Camel route.
* Exceptions will be set as message body.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: consumer
*/
default GrpcEndpointConsumerBuilder forwardOnError(String forwardOnError) {
doSetProperty("forwardOnError", forwardOnError);
return this;
}
/**
* The maximum number of concurrent calls permitted for each incoming
* server connection.
*
* The option is a: <code>int</code> type.
*
* Default: 2147483647
* Group: consumer
*/
default GrpcEndpointConsumerBuilder maxConcurrentCallsPerConnection(
int maxConcurrentCallsPerConnection) {
doSetProperty("maxConcurrentCallsPerConnection", maxConcurrentCallsPerConnection);
return this;
}
/**
* The maximum number of concurrent calls permitted for each incoming
* server connection.
*
* The option will be converted to a <code>int</code> type.
*
* Default: 2147483647
* Group: consumer
*/
default GrpcEndpointConsumerBuilder maxConcurrentCallsPerConnection(
String maxConcurrentCallsPerConnection) {
doSetProperty("maxConcurrentCallsPerConnection", maxConcurrentCallsPerConnection);
return this;
}
/**
* Lets the route to take control over stream observer. If this value is
* set to true, then the response observer of gRPC call will be set with
* the name GrpcConstants.GRPC_RESPONSE_OBSERVER in the Exchange object.
* Please note that the stream observer's onNext(), onError(),
* onCompleted() methods should be called in the route.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: consumer
*/
default GrpcEndpointConsumerBuilder routeControlledStreamObserver(
boolean routeControlledStreamObserver) {
doSetProperty("routeControlledStreamObserver", routeControlledStreamObserver);
return this;
}
/**
* Lets the route to take control over stream observer. If this value is
* set to true, then the response observer of gRPC call will be set with
* the name GrpcConstants.GRPC_RESPONSE_OBSERVER in the Exchange object.
* Please note that the stream observer's onNext(), onError(),
* onCompleted() methods should be called in the route.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: consumer
*/
default GrpcEndpointConsumerBuilder routeControlledStreamObserver(
String routeControlledStreamObserver) {
doSetProperty("routeControlledStreamObserver", routeControlledStreamObserver);
return this;
}
/**
* Authentication method type in advance to the SSL/TLS negotiation.
*
* The option is a:
* <code>org.apache.camel.component.grpc.GrpcAuthType</code> type.
*
* Default: NONE
* Group: security
*/
default GrpcEndpointConsumerBuilder authenticationType(
GrpcAuthType authenticationType) {
doSetProperty("authenticationType", authenticationType);
return this;
}
/**
* Authentication method type in advance to the SSL/TLS negotiation.
*
* The option will be converted to a
* <code>org.apache.camel.component.grpc.GrpcAuthType</code> type.
*
* Default: NONE
* Group: security
*/
default GrpcEndpointConsumerBuilder authenticationType(
String authenticationType) {
doSetProperty("authenticationType", authenticationType);
return this;
}
/**
* JSON Web Token sign algorithm.
*
* The option is a:
* <code>org.apache.camel.component.grpc.auth.jwt.JwtAlgorithm</code>
* type.
*
* Default: HMAC256
* Group: security
*/
default GrpcEndpointConsumerBuilder jwtAlgorithm(
JwtAlgorithm jwtAlgorithm) {
doSetProperty("jwtAlgorithm", jwtAlgorithm);
return this;
}
/**
* JSON Web Token sign algorithm.
*
* The option will be converted to a
* <code>org.apache.camel.component.grpc.auth.jwt.JwtAlgorithm</code>
* type.
*
* Default: HMAC256
* Group: security
*/
default GrpcEndpointConsumerBuilder jwtAlgorithm(String jwtAlgorithm) {
doSetProperty("jwtAlgorithm", jwtAlgorithm);
return this;
}
/**
* JSON Web Token issuer.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*/
default GrpcEndpointConsumerBuilder jwtIssuer(String jwtIssuer) {
doSetProperty("jwtIssuer", jwtIssuer);
return this;
}
/**
* JSON Web Token secret.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*/
default GrpcEndpointConsumerBuilder jwtSecret(String jwtSecret) {
doSetProperty("jwtSecret", jwtSecret);
return this;
}
/**
* JSON Web Token subject.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*/
default GrpcEndpointConsumerBuilder jwtSubject(String jwtSubject) {
doSetProperty("jwtSubject", jwtSubject);
return this;
}
/**
* The X.509 certificate chain file resource in PEM format link.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*/
default GrpcEndpointConsumerBuilder keyCertChainResource(
String keyCertChainResource) {
doSetProperty("keyCertChainResource", keyCertChainResource);
return this;
}
/**
* The PKCS#8 private key file password.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*/
default GrpcEndpointConsumerBuilder keyPassword(String keyPassword) {
doSetProperty("keyPassword", keyPassword);
return this;
}
/**
* The PKCS#8 private key file resource in PEM format link.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*/
default GrpcEndpointConsumerBuilder keyResource(String keyResource) {
doSetProperty("keyResource", keyResource);
return this;
}
/**
* Identifies the security negotiation type used for HTTP/2
* communication.
*
* The option is a: <code>io.grpc.netty.NegotiationType</code> type.
*
* Default: PLAINTEXT
* Group: security
*/
default GrpcEndpointConsumerBuilder negotiationType(
NegotiationType negotiationType) {
doSetProperty("negotiationType", negotiationType);
return this;
}
/**
* Identifies the security negotiation type used for HTTP/2
* communication.
*
* The option will be converted to a
* <code>io.grpc.netty.NegotiationType</code> type.
*
* Default: PLAINTEXT
* Group: security
*/
default GrpcEndpointConsumerBuilder negotiationType(
String negotiationType) {
doSetProperty("negotiationType", negotiationType);
return this;
}
/**
* Service Account key file in JSON format resource link supported by
* the Google Cloud SDK.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*/
default GrpcEndpointConsumerBuilder serviceAccountResource(
String serviceAccountResource) {
doSetProperty("serviceAccountResource", serviceAccountResource);
return this;
}
/**
* The trusted certificates collection file resource in PEM format for
* verifying the remote endpoint's certificate.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*/
default GrpcEndpointConsumerBuilder trustCertCollectionResource(
String trustCertCollectionResource) {
doSetProperty("trustCertCollectionResource", trustCertCollectionResource);
return this;
}
}
/**
* Advanced builder for endpoint consumers for the gRPC component.
*/
public interface AdvancedGrpcEndpointConsumerBuilder
extends
EndpointConsumerBuilder {
default GrpcEndpointConsumerBuilder basic() {
return (GrpcEndpointConsumerBuilder) this;
}
/**
* To let the consumer use a custom ExceptionHandler. Notice if the
* option bridgeErrorHandler is enabled then this option is not in use.
* By default the consumer will deal with exceptions, that will be
* logged at WARN or ERROR level and ignored.
*
* The option is a: <code>org.apache.camel.spi.ExceptionHandler</code>
* type.
*
* Group: consumer (advanced)
*/
default AdvancedGrpcEndpointConsumerBuilder exceptionHandler(
ExceptionHandler exceptionHandler) {
doSetProperty("exceptionHandler", exceptionHandler);
return this;
}
/**
* To let the consumer use a custom ExceptionHandler. Notice if the
* option bridgeErrorHandler is enabled then this option is not in use.
* By default the consumer will deal with exceptions, that will be
* logged at WARN or ERROR level and ignored.
*
* The option will be converted to a
* <code>org.apache.camel.spi.ExceptionHandler</code> type.
*
* Group: consumer (advanced)
*/
default AdvancedGrpcEndpointConsumerBuilder exceptionHandler(
String exceptionHandler) {
doSetProperty("exceptionHandler", exceptionHandler);
return this;
}
/**
* Sets the exchange pattern when the consumer creates an exchange.
*
* The option is a: <code>org.apache.camel.ExchangePattern</code> type.
*
* Group: consumer (advanced)
*/
default AdvancedGrpcEndpointConsumerBuilder exchangePattern(
ExchangePattern exchangePattern) {
doSetProperty("exchangePattern", exchangePattern);
return this;
}
/**
* Sets the exchange pattern when the consumer creates an exchange.
*
* The option will be converted to a
* <code>org.apache.camel.ExchangePattern</code> type.
*
* Group: consumer (advanced)
*/
default AdvancedGrpcEndpointConsumerBuilder exchangePattern(
String exchangePattern) {
doSetProperty("exchangePattern", exchangePattern);
return this;
}
/**
* Whether the endpoint should use basic property binding (Camel 2.x) or
* the newer property binding with additional capabilities.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: advanced
*/
default AdvancedGrpcEndpointConsumerBuilder basicPropertyBinding(
boolean basicPropertyBinding) {
doSetProperty("basicPropertyBinding", basicPropertyBinding);
return this;
}
/**
* Whether the endpoint should use basic property binding (Camel 2.x) or
* the newer property binding with additional capabilities.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: advanced
*/
default AdvancedGrpcEndpointConsumerBuilder basicPropertyBinding(
String basicPropertyBinding) {
doSetProperty("basicPropertyBinding", basicPropertyBinding);
return this;
}
/**
* Sets whether synchronous processing should be strictly used, or Camel
* is allowed to use asynchronous processing (if supported).
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: advanced
*/
default AdvancedGrpcEndpointConsumerBuilder synchronous(
boolean synchronous) {
doSetProperty("synchronous", synchronous);
return this;
}
/**
* Sets whether synchronous processing should be strictly used, or Camel
* is allowed to use asynchronous processing (if supported).
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: advanced
*/
default AdvancedGrpcEndpointConsumerBuilder synchronous(
String synchronous) {
doSetProperty("synchronous", synchronous);
return this;
}
}
/**
* Builder for endpoint producers for the gRPC component.
*/
public interface GrpcEndpointProducerBuilder
extends
EndpointProducerBuilder {
default AdvancedGrpcEndpointProducerBuilder advanced() {
return (AdvancedGrpcEndpointProducerBuilder) this;
}
/**
* The HTTP/2 flow control window size (MiB).
*
* The option is a: <code>int</code> type.
*
* Default: 1048576
* Group: common
*/
default GrpcEndpointProducerBuilder flowControlWindow(
int flowControlWindow) {
doSetProperty("flowControlWindow", flowControlWindow);
return this;
}
/**
* The HTTP/2 flow control window size (MiB).
*
* The option will be converted to a <code>int</code> type.
*
* Default: 1048576
* Group: common
*/
default GrpcEndpointProducerBuilder flowControlWindow(
String flowControlWindow) {
doSetProperty("flowControlWindow", flowControlWindow);
return this;
}
/**
* The maximum message size allowed to be received/sent (MiB).
*
* The option is a: <code>int</code> type.
*
* Default: 4194304
* Group: common
*/
default GrpcEndpointProducerBuilder maxMessageSize(int maxMessageSize) {
doSetProperty("maxMessageSize", maxMessageSize);
return this;
}
/**
* The maximum message size allowed to be received/sent (MiB).
*
* The option will be converted to a <code>int</code> type.
*
* Default: 4194304
* Group: common
*/
default GrpcEndpointProducerBuilder maxMessageSize(String maxMessageSize) {
doSetProperty("maxMessageSize", maxMessageSize);
return this;
}
/**
* Whether the producer should be started lazy (on the first message).
* By starting lazy you can use this to allow CamelContext and routes to
* startup in situations where a producer may otherwise fail during
* starting and cause the route to fail being started. By deferring this
* startup to be lazy then the startup failure can be handled during
* routing messages via Camel's routing error handlers. Beware that when
* the first message is processed then creating and starting the
* producer may take a little time and prolong the total processing time
* of the processing.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: producer
*/
default GrpcEndpointProducerBuilder lazyStartProducer(
boolean lazyStartProducer) {
doSetProperty("lazyStartProducer", lazyStartProducer);
return this;
}
/**
* Whether the producer should be started lazy (on the first message).
* By starting lazy you can use this to allow CamelContext and routes to
* startup in situations where a producer may otherwise fail during
* starting and cause the route to fail being started. By deferring this
* startup to be lazy then the startup failure can be handled during
* routing messages via Camel's routing error handlers. Beware that when
* the first message is processed then creating and starting the
* producer may take a little time and prolong the total processing time
* of the processing.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: producer
*/
default GrpcEndpointProducerBuilder lazyStartProducer(
String lazyStartProducer) {
doSetProperty("lazyStartProducer", lazyStartProducer);
return this;
}
/**
* gRPC method name.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: producer
*/
default GrpcEndpointProducerBuilder method(String method) {
doSetProperty("method", method);
return this;
}
/**
* The mode used to communicate with a remote gRPC server. In SIMPLE
* mode a single exchange is translated into a remote procedure call. In
* STREAMING mode all exchanges will be sent within the same request
* (input and output of the recipient gRPC service must be of type
* 'stream').
*
* The option is a:
* <code>org.apache.camel.component.grpc.GrpcProducerStrategy</code>
* type.
*
* Default: SIMPLE
* Group: producer
*/
default GrpcEndpointProducerBuilder producerStrategy(
GrpcProducerStrategy producerStrategy) {
doSetProperty("producerStrategy", producerStrategy);
return this;
}
/**
* The mode used to communicate with a remote gRPC server. In SIMPLE
* mode a single exchange is translated into a remote procedure call. In
* STREAMING mode all exchanges will be sent within the same request
* (input and output of the recipient gRPC service must be of type
* 'stream').
*
* The option will be converted to a
* <code>org.apache.camel.component.grpc.GrpcProducerStrategy</code>
* type.
*
* Default: SIMPLE
* Group: producer
*/
default GrpcEndpointProducerBuilder producerStrategy(
String producerStrategy) {
doSetProperty("producerStrategy", producerStrategy);
return this;
}
/**
* When using STREAMING client mode, it indicates the endpoint where
* responses should be forwarded.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: producer
*/
default GrpcEndpointProducerBuilder streamRepliesTo(
String streamRepliesTo) {
doSetProperty("streamRepliesTo", streamRepliesTo);
return this;
}
/**
* The user agent header passed to the server.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: producer
*/
default GrpcEndpointProducerBuilder userAgent(String userAgent) {
doSetProperty("userAgent", userAgent);
return this;
}
/**
* Authentication method type in advance to the SSL/TLS negotiation.
*
* The option is a:
* <code>org.apache.camel.component.grpc.GrpcAuthType</code> type.
*
* Default: NONE
* Group: security
*/
default GrpcEndpointProducerBuilder authenticationType(
GrpcAuthType authenticationType) {
doSetProperty("authenticationType", authenticationType);
return this;
}
/**
* Authentication method type in advance to the SSL/TLS negotiation.
*
* The option will be converted to a
* <code>org.apache.camel.component.grpc.GrpcAuthType</code> type.
*
* Default: NONE
* Group: security
*/
default GrpcEndpointProducerBuilder authenticationType(
String authenticationType) {
doSetProperty("authenticationType", authenticationType);
return this;
}
/**
* JSON Web Token sign algorithm.
*
* The option is a:
* <code>org.apache.camel.component.grpc.auth.jwt.JwtAlgorithm</code>
* type.
*
* Default: HMAC256
* Group: security
*/
default GrpcEndpointProducerBuilder jwtAlgorithm(
JwtAlgorithm jwtAlgorithm) {
doSetProperty("jwtAlgorithm", jwtAlgorithm);
return this;
}
/**
* JSON Web Token sign algorithm.
*
* The option will be converted to a
* <code>org.apache.camel.component.grpc.auth.jwt.JwtAlgorithm</code>
* type.
*
* Default: HMAC256
* Group: security
*/
default GrpcEndpointProducerBuilder jwtAlgorithm(String jwtAlgorithm) {
doSetProperty("jwtAlgorithm", jwtAlgorithm);
return this;
}
/**
* JSON Web Token issuer.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*/
default GrpcEndpointProducerBuilder jwtIssuer(String jwtIssuer) {
doSetProperty("jwtIssuer", jwtIssuer);
return this;
}
/**
* JSON Web Token secret.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*/
default GrpcEndpointProducerBuilder jwtSecret(String jwtSecret) {
doSetProperty("jwtSecret", jwtSecret);
return this;
}
/**
* JSON Web Token subject.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*/
default GrpcEndpointProducerBuilder jwtSubject(String jwtSubject) {
doSetProperty("jwtSubject", jwtSubject);
return this;
}
/**
* The X.509 certificate chain file resource in PEM format link.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*/
default GrpcEndpointProducerBuilder keyCertChainResource(
String keyCertChainResource) {
doSetProperty("keyCertChainResource", keyCertChainResource);
return this;
}
/**
* The PKCS#8 private key file password.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*/
default GrpcEndpointProducerBuilder keyPassword(String keyPassword) {
doSetProperty("keyPassword", keyPassword);
return this;
}
/**
* The PKCS#8 private key file resource in PEM format link.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*/
default GrpcEndpointProducerBuilder keyResource(String keyResource) {
doSetProperty("keyResource", keyResource);
return this;
}
/**
* Identifies the security negotiation type used for HTTP/2
* communication.
*
* The option is a: <code>io.grpc.netty.NegotiationType</code> type.
*
* Default: PLAINTEXT
* Group: security
*/
default GrpcEndpointProducerBuilder negotiationType(
NegotiationType negotiationType) {
doSetProperty("negotiationType", negotiationType);
return this;
}
/**
* Identifies the security negotiation type used for HTTP/2
* communication.
*
* The option will be converted to a
* <code>io.grpc.netty.NegotiationType</code> type.
*
* Default: PLAINTEXT
* Group: security
*/
default GrpcEndpointProducerBuilder negotiationType(
String negotiationType) {
doSetProperty("negotiationType", negotiationType);
return this;
}
/**
* Service Account key file in JSON format resource link supported by
* the Google Cloud SDK.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*/
default GrpcEndpointProducerBuilder serviceAccountResource(
String serviceAccountResource) {
doSetProperty("serviceAccountResource", serviceAccountResource);
return this;
}
/**
* The trusted certificates collection file resource in PEM format for
* verifying the remote endpoint's certificate.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*/
default GrpcEndpointProducerBuilder trustCertCollectionResource(
String trustCertCollectionResource) {
doSetProperty("trustCertCollectionResource", trustCertCollectionResource);
return this;
}
}
/**
* Advanced builder for endpoint producers for the gRPC component.
*/
public interface AdvancedGrpcEndpointProducerBuilder
extends
EndpointProducerBuilder {
default GrpcEndpointProducerBuilder basic() {
return (GrpcEndpointProducerBuilder) this;
}
/**
* Whether the endpoint should use basic property binding (Camel 2.x) or
* the newer property binding with additional capabilities.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: advanced
*/
default AdvancedGrpcEndpointProducerBuilder basicPropertyBinding(
boolean basicPropertyBinding) {
doSetProperty("basicPropertyBinding", basicPropertyBinding);
return this;
}
/**
* Whether the endpoint should use basic property binding (Camel 2.x) or
* the newer property binding with additional capabilities.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: advanced
*/
default AdvancedGrpcEndpointProducerBuilder basicPropertyBinding(
String basicPropertyBinding) {
doSetProperty("basicPropertyBinding", basicPropertyBinding);
return this;
}
/**
* Sets whether synchronous processing should be strictly used, or Camel
* is allowed to use asynchronous processing (if supported).
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: advanced
*/
default AdvancedGrpcEndpointProducerBuilder synchronous(
boolean synchronous) {
doSetProperty("synchronous", synchronous);
return this;
}
/**
* Sets whether synchronous processing should be strictly used, or Camel
* is allowed to use asynchronous processing (if supported).
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: advanced
*/
default AdvancedGrpcEndpointProducerBuilder synchronous(
String synchronous) {
doSetProperty("synchronous", synchronous);
return this;
}
}
/**
* Builder for endpoint for the gRPC component.
*/
public interface GrpcEndpointBuilder
extends
GrpcEndpointConsumerBuilder,
GrpcEndpointProducerBuilder {
default AdvancedGrpcEndpointBuilder advanced() {
return (AdvancedGrpcEndpointBuilder) this;
}
/**
* The HTTP/2 flow control window size (MiB).
*
* The option is a: <code>int</code> type.
*
* Default: 1048576
* Group: common
*/
default GrpcEndpointBuilder flowControlWindow(int flowControlWindow) {
doSetProperty("flowControlWindow", flowControlWindow);
return this;
}
/**
* The HTTP/2 flow control window size (MiB).
*
* The option will be converted to a <code>int</code> type.
*
* Default: 1048576
* Group: common
*/
default GrpcEndpointBuilder flowControlWindow(String flowControlWindow) {
doSetProperty("flowControlWindow", flowControlWindow);
return this;
}
/**
* The maximum message size allowed to be received/sent (MiB).
*
* The option is a: <code>int</code> type.
*
* Default: 4194304
* Group: common
*/
default GrpcEndpointBuilder maxMessageSize(int maxMessageSize) {
doSetProperty("maxMessageSize", maxMessageSize);
return this;
}
/**
* The maximum message size allowed to be received/sent (MiB).
*
* The option will be converted to a <code>int</code> type.
*
* Default: 4194304
* Group: common
*/
default GrpcEndpointBuilder maxMessageSize(String maxMessageSize) {
doSetProperty("maxMessageSize", maxMessageSize);
return this;
}
/**
* Authentication method type in advance to the SSL/TLS negotiation.
*
* The option is a:
* <code>org.apache.camel.component.grpc.GrpcAuthType</code> type.
*
* Default: NONE
* Group: security
*/
default GrpcEndpointBuilder authenticationType(
GrpcAuthType authenticationType) {
doSetProperty("authenticationType", authenticationType);
return this;
}
/**
* Authentication method type in advance to the SSL/TLS negotiation.
*
* The option will be converted to a
* <code>org.apache.camel.component.grpc.GrpcAuthType</code> type.
*
* Default: NONE
* Group: security
*/
default GrpcEndpointBuilder authenticationType(String authenticationType) {
doSetProperty("authenticationType", authenticationType);
return this;
}
/**
* JSON Web Token sign algorithm.
*
* The option is a:
* <code>org.apache.camel.component.grpc.auth.jwt.JwtAlgorithm</code>
* type.
*
* Default: HMAC256
* Group: security
*/
default GrpcEndpointBuilder jwtAlgorithm(JwtAlgorithm jwtAlgorithm) {
doSetProperty("jwtAlgorithm", jwtAlgorithm);
return this;
}
/**
* JSON Web Token sign algorithm.
*
* The option will be converted to a
* <code>org.apache.camel.component.grpc.auth.jwt.JwtAlgorithm</code>
* type.
*
* Default: HMAC256
* Group: security
*/
default GrpcEndpointBuilder jwtAlgorithm(String jwtAlgorithm) {
doSetProperty("jwtAlgorithm", jwtAlgorithm);
return this;
}
/**
* JSON Web Token issuer.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*/
default GrpcEndpointBuilder jwtIssuer(String jwtIssuer) {
doSetProperty("jwtIssuer", jwtIssuer);
return this;
}
/**
* JSON Web Token secret.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*/
default GrpcEndpointBuilder jwtSecret(String jwtSecret) {
doSetProperty("jwtSecret", jwtSecret);
return this;
}
/**
* JSON Web Token subject.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*/
default GrpcEndpointBuilder jwtSubject(String jwtSubject) {
doSetProperty("jwtSubject", jwtSubject);
return this;
}
/**
* The X.509 certificate chain file resource in PEM format link.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*/
default GrpcEndpointBuilder keyCertChainResource(
String keyCertChainResource) {
doSetProperty("keyCertChainResource", keyCertChainResource);
return this;
}
/**
* The PKCS#8 private key file password.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*/
default GrpcEndpointBuilder keyPassword(String keyPassword) {
doSetProperty("keyPassword", keyPassword);
return this;
}
/**
* The PKCS#8 private key file resource in PEM format link.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*/
default GrpcEndpointBuilder keyResource(String keyResource) {
doSetProperty("keyResource", keyResource);
return this;
}
/**
* Identifies the security negotiation type used for HTTP/2
* communication.
*
* The option is a: <code>io.grpc.netty.NegotiationType</code> type.
*
* Default: PLAINTEXT
* Group: security
*/
default GrpcEndpointBuilder negotiationType(
NegotiationType negotiationType) {
doSetProperty("negotiationType", negotiationType);
return this;
}
/**
* Identifies the security negotiation type used for HTTP/2
* communication.
*
* The option will be converted to a
* <code>io.grpc.netty.NegotiationType</code> type.
*
* Default: PLAINTEXT
* Group: security
*/
default GrpcEndpointBuilder negotiationType(String negotiationType) {
doSetProperty("negotiationType", negotiationType);
return this;
}
/**
* Service Account key file in JSON format resource link supported by
* the Google Cloud SDK.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*/
default GrpcEndpointBuilder serviceAccountResource(
String serviceAccountResource) {
doSetProperty("serviceAccountResource", serviceAccountResource);
return this;
}
/**
* The trusted certificates collection file resource in PEM format for
* verifying the remote endpoint's certificate.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*/
default GrpcEndpointBuilder trustCertCollectionResource(
String trustCertCollectionResource) {
doSetProperty("trustCertCollectionResource", trustCertCollectionResource);
return this;
}
}
/**
* Advanced builder for endpoint for the gRPC component.
*/
public interface AdvancedGrpcEndpointBuilder
extends
AdvancedGrpcEndpointConsumerBuilder,
AdvancedGrpcEndpointProducerBuilder {
default GrpcEndpointBuilder basic() {
return (GrpcEndpointBuilder) this;
}
/**
* Whether the endpoint should use basic property binding (Camel 2.x) or
* the newer property binding with additional capabilities.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: advanced
*/
default AdvancedGrpcEndpointBuilder basicPropertyBinding(
boolean basicPropertyBinding) {
doSetProperty("basicPropertyBinding", basicPropertyBinding);
return this;
}
/**
* Whether the endpoint should use basic property binding (Camel 2.x) or
* the newer property binding with additional capabilities.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: advanced
*/
default AdvancedGrpcEndpointBuilder basicPropertyBinding(
String basicPropertyBinding) {
doSetProperty("basicPropertyBinding", basicPropertyBinding);
return this;
}
/**
* Sets whether synchronous processing should be strictly used, or Camel
* is allowed to use asynchronous processing (if supported).
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: advanced
*/
default AdvancedGrpcEndpointBuilder synchronous(boolean synchronous) {
doSetProperty("synchronous", synchronous);
return this;
}
/**
* Sets whether synchronous processing should be strictly used, or Camel
* is allowed to use asynchronous processing (if supported).
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: advanced
*/
default AdvancedGrpcEndpointBuilder synchronous(String synchronous) {
doSetProperty("synchronous", synchronous);
return this;
}
}
/**
* Proxy enum for
* <code>org.apache.camel.component.grpc.GrpcConsumerStrategy</code> enum.
*/
enum GrpcConsumerStrategy {
AGGREGATION,
PROPAGATION;
}
/**
* Proxy enum for
* <code>org.apache.camel.component.grpc.GrpcProducerStrategy</code> enum.
*/
enum GrpcProducerStrategy {
SIMPLE,
STREAMING;
}
/**
* Proxy enum for <code>org.apache.camel.component.grpc.GrpcAuthType</code>
* enum.
*/
enum GrpcAuthType {
NONE,
GOOGLE,
JWT;
}
/**
* Proxy enum for
* <code>org.apache.camel.component.grpc.auth.jwt.JwtAlgorithm</code> enum.
*/
enum JwtAlgorithm {
HMAC256,
HMAC384,
HMAC512;
}
/**
* Proxy enum for <code>io.grpc.netty.NegotiationType</code> enum.
*/
enum NegotiationType {
TLS,
PLAINTEXT_UPGRADE,
PLAINTEXT;
}
public interface GrpcBuilders {
/**
* gRPC (camel-grpc)
* Expose gRPC endpoints and access external gRPC endpoints.
*
* Category: rpc
* Since: 2.19
* Maven coordinates: org.apache.camel:camel-grpc
*
* Syntax: <code>grpc:host:port/service</code>
*
* Path parameter: host (required)
* The gRPC server host name. This is localhost or 0.0.0.0 when being a
* consumer or remote server host name when using producer.
*
* Path parameter: port (required)
* The gRPC local or remote server port
*
* Path parameter: service (required)
* Fully qualified service name from the protocol buffer descriptor file
* (package dot service definition name)
*
* @param path host:port/service
*/
default GrpcEndpointBuilder grpc(String path) {
return GrpcEndpointBuilderFactory.endpointBuilder("grpc", path);
}
/**
* gRPC (camel-grpc)
* Expose gRPC endpoints and access external gRPC endpoints.
*
* Category: rpc
* Since: 2.19
* Maven coordinates: org.apache.camel:camel-grpc
*
* Syntax: <code>grpc:host:port/service</code>
*
* Path parameter: host (required)
* The gRPC server host name. This is localhost or 0.0.0.0 when being a
* consumer or remote server host name when using producer.
*
* Path parameter: port (required)
* The gRPC local or remote server port
*
* Path parameter: service (required)
* Fully qualified service name from the protocol buffer descriptor file
* (package dot service definition name)
*
* @param componentName to use a custom component name for the endpoint
* instead of the default name
* @param path host:port/service
*/
default GrpcEndpointBuilder grpc(String componentName, String path) {
return GrpcEndpointBuilderFactory.endpointBuilder(componentName, path);
}
}
static GrpcEndpointBuilder endpointBuilder(String componentName, String path) {
class GrpcEndpointBuilderImpl extends AbstractEndpointBuilder implements GrpcEndpointBuilder, AdvancedGrpcEndpointBuilder {
public GrpcEndpointBuilderImpl(String path) {
super(componentName, path);
}
}
return new GrpcEndpointBuilderImpl(path);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.amqp.processors;
import java.io.IOException;
import java.util.ArrayList;
import java.util.BitSet;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import java.util.concurrent.ArrayBlockingQueue;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeoutException;
import com.rabbitmq.client.AMQP.Basic.RecoverOk;
import com.rabbitmq.client.AMQP.BasicProperties;
import com.rabbitmq.client.AMQP.Exchange.BindOk;
import com.rabbitmq.client.AMQP.Exchange.DeclareOk;
import com.rabbitmq.client.AMQP.Exchange.DeleteOk;
import com.rabbitmq.client.AMQP.Exchange.UnbindOk;
import com.rabbitmq.client.AMQP.Queue.PurgeOk;
import com.rabbitmq.client.AMQP.Tx.CommitOk;
import com.rabbitmq.client.AMQP.Tx.RollbackOk;
import com.rabbitmq.client.AMQP.Tx.SelectOk;
import com.rabbitmq.client.AlreadyClosedException;
import com.rabbitmq.client.BuiltinExchangeType;
import com.rabbitmq.client.CancelCallback;
import com.rabbitmq.client.Channel;
import com.rabbitmq.client.Command;
import com.rabbitmq.client.ConfirmCallback;
import com.rabbitmq.client.ConfirmListener;
import com.rabbitmq.client.Connection;
import com.rabbitmq.client.Consumer;
import com.rabbitmq.client.ConsumerShutdownSignalCallback;
import com.rabbitmq.client.DeliverCallback;
import com.rabbitmq.client.Envelope;
import com.rabbitmq.client.GetResponse;
import com.rabbitmq.client.Method;
import com.rabbitmq.client.ReturnCallback;
import com.rabbitmq.client.ReturnListener;
import com.rabbitmq.client.ShutdownListener;
import com.rabbitmq.client.ShutdownSignalException;
/**
* Implementation of {@link Channel} to be used during testing
*/
class TestChannel implements Channel {
private final ExecutorService executorService;
private final Map<String, List<Consumer>> consumerMap = new HashMap<>();
private final Map<String, BlockingQueue<GetResponse>> enqueuedMessages;
private final Map<String, List<String>> routingKeyToQueueMappings;
private final Map<String, String> exchangeToRoutingKeyMappings;
private final List<ReturnListener> returnListeners;
private boolean open;
private boolean corrupted;
private Connection connection;
private long deliveryTag = 0L;
private final BitSet acknowledgments = new BitSet();
private final BitSet nacks = new BitSet();
public TestChannel(Map<String, String> exchangeToRoutingKeyMappings,
Map<String, List<String>> routingKeyToQueueMappings) {
this.enqueuedMessages = new HashMap<>();
this.routingKeyToQueueMappings = routingKeyToQueueMappings;
if (this.routingKeyToQueueMappings != null) {
for (List<String> queues : routingKeyToQueueMappings.values()) {
for (String queue : queues) {
this.enqueuedMessages.put(queue, new ArrayBlockingQueue<GetResponse>(100));
}
}
}
this.exchangeToRoutingKeyMappings = exchangeToRoutingKeyMappings;
this.executorService = Executors.newCachedThreadPool();
this.returnListeners = new ArrayList<>();
this.open = true;
}
void corruptChannel() {
this.corrupted = true;
}
void setConnection(Connection connection) {
this.connection = connection;
}
@Override
public void addShutdownListener(ShutdownListener listener) {
throw new UnsupportedOperationException("This method is not currently supported as it is not used by current API in testing");
}
@Override
public void removeShutdownListener(ShutdownListener listener) {
throw new UnsupportedOperationException("This method is not currently supported as it is not used by current API in testing");
}
@Override
public ShutdownSignalException getCloseReason() {
throw new UnsupportedOperationException("This method is not currently supported as it is not used by current API in testing");
}
@Override
public void notifyListeners() {
throw new UnsupportedOperationException("This method is not currently supported as it is not used by current API in testing");
}
@Override
public boolean isOpen() {
return this.open;
}
@Override
public int getChannelNumber() {
throw new UnsupportedOperationException("This method is not currently supported as it is not used by current API in testing");
}
@Override
public Connection getConnection() {
return this.connection;
}
@Override
public void close() throws IOException, TimeoutException {
this.open = false;
}
@Override
public void close(int closeCode, String closeMessage) throws IOException, TimeoutException {
throw new UnsupportedOperationException("This method is not currently supported as it is not used by current API in testing");
}
@Override
public void abort() throws IOException {
throw new UnsupportedOperationException("This method is not currently supported as it is not used by current API in testing");
}
@Override
public void abort(int closeCode, String closeMessage) throws IOException {
throw new UnsupportedOperationException("This method is not currently supported as it is not used by current API in testing");
}
@Override
public void addReturnListener(ReturnListener listener) {
this.returnListeners.add(listener);
}
@Override
public boolean removeReturnListener(ReturnListener listener) {
throw new UnsupportedOperationException("This method is not currently supported as it is not used by current API in testing");
}
@Override
public void clearReturnListeners() {
throw new UnsupportedOperationException("This method is not currently supported as it is not used by current API in testing");
}
@Override
public void addConfirmListener(ConfirmListener listener) {
throw new UnsupportedOperationException("This method is not currently supported as it is not used by current API in testing");
}
@Override
public boolean removeConfirmListener(ConfirmListener listener) {
throw new UnsupportedOperationException("This method is not currently supported as it is not used by current API in testing");
}
@Override
public void clearConfirmListeners() {
throw new UnsupportedOperationException("This method is not currently supported as it is not used by current API in testing");
}
@Override
public Consumer getDefaultConsumer() {
throw new UnsupportedOperationException("This method is not currently supported as it is not used by current API in testing");
}
@Override
public void setDefaultConsumer(Consumer consumer) {
throw new UnsupportedOperationException("This method is not currently supported as it is not used by current API in testing");
}
@Override
public void basicQos(int prefetchSize, int prefetchCount, boolean global) throws IOException {
throw new UnsupportedOperationException("This method is not currently supported as it is not used by current API in testing");
}
@Override
public void basicQos(int prefetchCount, boolean global) throws IOException {
throw new UnsupportedOperationException("This method is not currently supported as it is not used by current API in testing");
}
@Override
public void basicQos(int prefetchCount) throws IOException {
throw new UnsupportedOperationException("This method is not currently supported as it is not used by current API in testing");
}
@Override
public void basicPublish(String exchange, String routingKey, BasicProperties props, byte[] body)
throws IOException {
this.basicPublish(exchange, routingKey, true, props, body);
}
@Override
public void basicPublish(final String exchange, final String routingKey, boolean mandatory,
final BasicProperties props, final byte[] body) throws IOException {
if (this.corrupted) {
throw new IOException("Channel is corrupted");
}
if (!this.open) {
throw new AlreadyClosedException(new ShutdownSignalException(false, false, null, null));
}
if (exchange.equals("")){ // default exchange; routingKey corresponds to a queue.
BlockingQueue<GetResponse> messages = this.getMessageQueue(routingKey);
final Envelope envelope = new Envelope(deliveryTag++, false, exchange, routingKey);
GetResponse response = new GetResponse(envelope, props, body, messages.size());
messages.offer(response);
} else {
String rKey = this.exchangeToRoutingKeyMappings.get(exchange);
if (rKey.equals(routingKey)) {
List<String> queueNames = this.routingKeyToQueueMappings.get(routingKey);
if (queueNames == null || queueNames.isEmpty()) {
this.discard(exchange, routingKey, mandatory, props, body);
} else {
for (String queueName : queueNames) {
BlockingQueue<GetResponse> messages = this.getMessageQueue(queueName);
final Envelope envelope = new Envelope(deliveryTag++, false, exchange, routingKey);
GetResponse response = new GetResponse(envelope, props, body, messages.size());
messages.offer(response);
final List<Consumer> consumers = consumerMap.get(queueName);
if (consumers != null) {
for (final Consumer consumer : consumers) {
consumer.handleDelivery("consumerTag", envelope, props, body);
}
}
}
}
} else {
this.discard(exchange, routingKey, mandatory, props, body);
}
}
}
private void discard(final String exchange, final String routingKey, boolean mandatory, final BasicProperties props,
final byte[] body) {
// NO ROUTE. Invoke return listener async
for (final ReturnListener listener : returnListeners) {
this.executorService.execute(new Runnable() {
@Override
public void run() {
try {
listener.handleReturn(-9, "Rejecting", exchange, routingKey, props, body);
} catch (Exception e) {
throw new IllegalStateException("Failed to send return message", e);
}
}
});
}
}
private BlockingQueue<GetResponse> getMessageQueue(String name) {
BlockingQueue<GetResponse> messages = this.enqueuedMessages.get(name);
if (messages == null) {
messages = new ArrayBlockingQueue<>(100);
this.enqueuedMessages.put(name, messages);
}
return messages;
}
@Override
public void basicPublish(String exchange, String routingKey, boolean mandatory, boolean immediate,
BasicProperties props, byte[] body) throws IOException {
throw new UnsupportedOperationException("This method is not currently supported as it is not used by current API in testing");
}
@Override
public DeclareOk exchangeDeclare(String exchange, String type) throws IOException {
throw new UnsupportedOperationException("This method is not currently supported as it is not used by current API in testing");
}
@Override
public DeclareOk exchangeDeclare(String exchange, String type, boolean durable) throws IOException {
throw new UnsupportedOperationException("This method is not currently supported as it is not used by current API in testing");
}
@Override
public DeclareOk exchangeDeclare(String exchange, String type, boolean durable, boolean autoDelete,
Map<String, Object> arguments) throws IOException {
throw new UnsupportedOperationException("This method is not currently supported as it is not used by current API in testing");
}
@Override
public DeclareOk exchangeDeclare(String exchange, String type, boolean durable, boolean autoDelete,
boolean internal, Map<String, Object> arguments) throws IOException {
throw new UnsupportedOperationException("This method is not currently supported as it is not used by current API in testing");
}
@Override
public void exchangeDeclareNoWait(String exchange, String type, boolean durable, boolean autoDelete,
boolean internal, Map<String, Object> arguments) throws IOException {
throw new UnsupportedOperationException("This method is not currently supported as it is not used by current API in testing");
}
@Override
public DeclareOk exchangeDeclarePassive(String name) throws IOException {
throw new UnsupportedOperationException("This method is not currently supported as it is not used by current API in testing");
}
@Override
public DeleteOk exchangeDelete(String exchange, boolean ifUnused) throws IOException {
throw new UnsupportedOperationException("This method is not currently supported as it is not used by current API in testing");
}
@Override
public void exchangeDeleteNoWait(String exchange, boolean ifUnused) throws IOException {
throw new UnsupportedOperationException("This method is not currently supported as it is not used by current API in testing");
}
@Override
public DeleteOk exchangeDelete(String exchange) throws IOException {
throw new UnsupportedOperationException("This method is not currently supported as it is not used by current API in testing");
}
@Override
public BindOk exchangeBind(String destination, String source, String routingKey) throws IOException {
throw new UnsupportedOperationException("This method is not currently supported as it is not used by current API in testing");
}
@Override
public BindOk exchangeBind(String destination, String source, String routingKey, Map<String, Object> arguments)
throws IOException {
throw new UnsupportedOperationException("This method is not currently supported as it is not used by current API in testing");
}
@Override
public void exchangeBindNoWait(String destination, String source, String routingKey, Map<String, Object> arguments)
throws IOException {
throw new UnsupportedOperationException("This method is not currently supported as it is not used by current API in testing");
}
@Override
public UnbindOk exchangeUnbind(String destination, String source, String routingKey) throws IOException {
throw new UnsupportedOperationException("This method is not currently supported as it is not used by current API in testing");
}
@Override
public UnbindOk exchangeUnbind(String destination, String source, String routingKey, Map<String, Object> arguments)
throws IOException {
throw new UnsupportedOperationException("This method is not currently supported as it is not used by current API in testing");
}
@Override
public void exchangeUnbindNoWait(String destination, String source, String routingKey,
Map<String, Object> arguments) throws IOException {
throw new UnsupportedOperationException("This method is not currently supported as it is not used by current API in testing");
}
@Override
public com.rabbitmq.client.AMQP.Queue.DeclareOk queueDeclare() throws IOException {
throw new UnsupportedOperationException("This method is not currently supported as it is not used by current API in testing");
}
@Override
public com.rabbitmq.client.AMQP.Queue.DeclareOk queueDeclare(String queue, boolean durable, boolean exclusive,
boolean autoDelete, Map<String, Object> arguments) throws IOException {
throw new UnsupportedOperationException("This method is not currently supported as it is not used by current API in testing");
}
@Override
public void queueDeclareNoWait(String queue, boolean durable, boolean exclusive, boolean autoDelete,
Map<String, Object> arguments) throws IOException {
throw new UnsupportedOperationException("This method is not currently supported as it is not used by current API in testing");
}
@Override
public com.rabbitmq.client.AMQP.Queue.DeclareOk queueDeclarePassive(String queue) throws IOException {
throw new UnsupportedOperationException("This method is not currently supported as it is not used by current API in testing");
}
@Override
public com.rabbitmq.client.AMQP.Queue.DeleteOk queueDelete(String queue) throws IOException {
throw new UnsupportedOperationException("This method is not currently supported as it is not used by current API in testing");
}
@Override
public com.rabbitmq.client.AMQP.Queue.DeleteOk queueDelete(String queue, boolean ifUnused, boolean ifEmpty)
throws IOException {
throw new UnsupportedOperationException("This method is not currently supported as it is not used by current API in testing");
}
@Override
public void queueDeleteNoWait(String queue, boolean ifUnused, boolean ifEmpty) throws IOException {
throw new UnsupportedOperationException("This method is not currently supported as it is not used by current API in testing");
}
@Override
public com.rabbitmq.client.AMQP.Queue.BindOk queueBind(String queue, String exchange, String routingKey)
throws IOException {
throw new UnsupportedOperationException("This method is not currently supported as it is not used by current API in testing");
}
@Override
public com.rabbitmq.client.AMQP.Queue.BindOk queueBind(String queue, String exchange, String routingKey,
Map<String, Object> arguments) throws IOException {
throw new UnsupportedOperationException("This method is not currently supported as it is not used by current API in testing");
}
@Override
public void queueBindNoWait(String queue, String exchange, String routingKey, Map<String, Object> arguments)
throws IOException {
throw new UnsupportedOperationException("This method is not currently supported as it is not used by current API in testing");
}
@Override
public com.rabbitmq.client.AMQP.Queue.UnbindOk queueUnbind(String queue, String exchange, String routingKey)
throws IOException {
throw new UnsupportedOperationException("This method is not currently supported as it is not used by current API in testing");
}
@Override
public com.rabbitmq.client.AMQP.Queue.UnbindOk queueUnbind(String queue, String exchange, String routingKey,
Map<String, Object> arguments) throws IOException {
throw new UnsupportedOperationException("This method is not currently supported as it is not used by current API in testing");
}
@Override
public PurgeOk queuePurge(String queue) throws IOException {
throw new UnsupportedOperationException("This method is not currently supported as it is not used by current API in testing");
}
@Override
public GetResponse basicGet(String queue, boolean autoAck) throws IOException {
BlockingQueue<GetResponse> messages = this.enqueuedMessages.get(queue);
if (messages == null) {
throw new IOException("Queue is not defined");
} else {
return messages.poll();
}
}
@Override
public void basicAck(long deliveryTag, boolean multiple) throws IOException {
acknowledgments.set((int) deliveryTag);
}
public boolean isAck(final int deliveryTag) {
return acknowledgments.get(deliveryTag);
}
@Override
public void basicNack(long deliveryTag, boolean multiple, boolean requeue) throws IOException {
nacks.set((int) deliveryTag);
}
public boolean isNack(final int deliveryTag) {
return nacks.get(deliveryTag);
}
@Override
public void basicReject(long deliveryTag, boolean requeue) throws IOException {
nacks.set((int) deliveryTag);
}
@Override
public String basicConsume(String queue, Consumer callback) throws IOException {
throw new UnsupportedOperationException("This method is not currently supported as it is not used by current API in testing");
}
@Override
public String basicConsume(String queue, boolean autoAck, Consumer callback) throws IOException {
final BlockingQueue<GetResponse> messageQueue = enqueuedMessages.get(queue);
if (messageQueue == null) {
throw new IOException("Queue is not defined");
}
consumerMap.computeIfAbsent(queue, q -> new ArrayList<>()).add(callback);
final String consumerTag = UUID.randomUUID().toString();
GetResponse message;
while ((message = messageQueue.poll()) != null) {
callback.handleDelivery(consumerTag, message.getEnvelope(), message.getProps(), message.getBody());
}
return consumerTag;
}
@Override
public String basicConsume(String queue, boolean autoAck, Map<String, Object> arguments, Consumer callback)
throws IOException {
throw new UnsupportedOperationException("This method is not currently supported as it is not used by current API in testing");
}
@Override
public String basicConsume(String queue, boolean autoAck, String consumerTag, Consumer callback)
throws IOException {
throw new UnsupportedOperationException("This method is not currently supported as it is not used by current API in testing");
}
@Override
public String basicConsume(String queue, boolean autoAck, String consumerTag, boolean noLocal, boolean exclusive,
Map<String, Object> arguments, Consumer callback) throws IOException {
throw new UnsupportedOperationException("This method is not currently supported as it is not used by current API in testing");
}
@Override
public void basicCancel(String consumerTag) throws IOException {
// consumerMap is indexed by queue name so the passed in consumerTag parameter needs to be the name of the test queue
for (Consumer consumer: consumerMap.get(consumerTag)) {
consumer.handleCancel(consumerTag);
}
}
@Override
public RecoverOk basicRecover() throws IOException {
throw new UnsupportedOperationException("This method is not currently supported as it is not used by current API in testing");
}
@Override
public RecoverOk basicRecover(boolean requeue) throws IOException {
throw new UnsupportedOperationException("This method is not currently supported as it is not used by current API in testing");
}
@Override
public SelectOk txSelect() throws IOException {
throw new UnsupportedOperationException("This method is not currently supported as it is not used by current API in testing");
}
@Override
public CommitOk txCommit() throws IOException {
throw new UnsupportedOperationException("This method is not currently supported as it is not used by current API in testing");
}
@Override
public RollbackOk txRollback() throws IOException {
throw new UnsupportedOperationException("This method is not currently supported as it is not used by current API in testing");
}
@Override
public com.rabbitmq.client.AMQP.Confirm.SelectOk confirmSelect() throws IOException {
throw new UnsupportedOperationException("This method is not currently supported as it is not used by current API in testing");
}
@Override
public long getNextPublishSeqNo() {
throw new UnsupportedOperationException("This method is not currently supported as it is not used by current API in testing");
}
@Override
public boolean waitForConfirms() throws InterruptedException {
throw new UnsupportedOperationException("This method is not currently supported as it is not used by current API in testing");
}
@Override
public boolean waitForConfirms(long timeout) throws InterruptedException, TimeoutException {
throw new UnsupportedOperationException("This method is not currently supported as it is not used by current API in testing");
}
@Override
public void waitForConfirmsOrDie() throws IOException, InterruptedException {
throw new UnsupportedOperationException("This method is not currently supported as it is not used by current API in testing");
}
@Override
public void waitForConfirmsOrDie(long timeout) throws IOException, InterruptedException, TimeoutException {
throw new UnsupportedOperationException("This method is not currently supported as it is not used by current API in testing");
}
@Override
public void asyncRpc(Method method) throws IOException {
throw new UnsupportedOperationException("This method is not currently supported as it is not used by current API in testing");
}
@Override
public Command rpc(Method method) throws IOException {
throw new UnsupportedOperationException("This method is not currently supported as it is not used by current API in testing");
}
@Override
public long messageCount(String queue) throws IOException {
throw new UnsupportedOperationException("This method is not currently supported as it is not used by current API in testing");
}
@Override
public long consumerCount(String queue) throws IOException {
throw new UnsupportedOperationException("This method is not currently supported as it is not used by current API in testing");
}
@Override
public ReturnListener addReturnListener(ReturnCallback returnCallback) {
throw new UnsupportedOperationException("This method is not currently supported as it is not used by current API in testing");
}
@Override
public ConfirmListener addConfirmListener(ConfirmCallback ackCallback, ConfirmCallback nackCallback) {
throw new UnsupportedOperationException("This method is not currently supported as it is not used by current API in testing");
}
@Override
public DeclareOk exchangeDeclare(String exchange, BuiltinExchangeType type) throws IOException {
throw new UnsupportedOperationException("This method is not currently supported as it is not used by current API in testing");
}
@Override
public DeclareOk exchangeDeclare(String exchange, BuiltinExchangeType type, boolean durable) throws IOException {
throw new UnsupportedOperationException("This method is not currently supported as it is not used by current API in testing");
}
@Override
public DeclareOk exchangeDeclare(String exchange, BuiltinExchangeType type, boolean durable, boolean autoDelete, Map<String, Object> arguments) throws IOException {
throw new UnsupportedOperationException("This method is not currently supported as it is not used by current API in testing");
}
@Override
public DeclareOk exchangeDeclare(String exchange, BuiltinExchangeType type, boolean durable, boolean autoDelete, boolean internal, Map<String, Object> arguments) throws IOException {
throw new UnsupportedOperationException("This method is not currently supported as it is not used by current API in testing");
}
@Override
public void exchangeDeclareNoWait(String exchange, BuiltinExchangeType type, boolean durable, boolean autoDelete, boolean internal, Map<String, Object> arguments) throws IOException {
throw new UnsupportedOperationException("This method is not currently supported as it is not used by current API in testing");
}
@Override
public String basicConsume(String queue, DeliverCallback deliverCallback, CancelCallback cancelCallback) throws IOException {
throw new UnsupportedOperationException("This method is not currently supported as it is not used by current API in testing");
}
@Override
public String basicConsume(String queue, DeliverCallback deliverCallback, ConsumerShutdownSignalCallback shutdownSignalCallback) throws IOException {
throw new UnsupportedOperationException("This method is not currently supported as it is not used by current API in testing");
}
@Override
public String basicConsume(String queue, DeliverCallback deliverCallback, CancelCallback cancelCallback, ConsumerShutdownSignalCallback shutdownSignalCallback) throws IOException {
throw new UnsupportedOperationException("This method is not currently supported as it is not used by current API in testing");
}
@Override
public String basicConsume(String queue, boolean autoAck, DeliverCallback deliverCallback, CancelCallback cancelCallback) throws IOException {
throw new UnsupportedOperationException("This method is not currently supported as it is not used by current API in testing");
}
@Override
public String basicConsume(String queue, boolean autoAck, DeliverCallback deliverCallback, ConsumerShutdownSignalCallback shutdownSignalCallback) throws IOException {
throw new UnsupportedOperationException("This method is not currently supported as it is not used by current API in testing");
}
@Override
public String basicConsume(String queue, boolean autoAck, DeliverCallback deliverCallback, CancelCallback cancelCallback, ConsumerShutdownSignalCallback shutdownSignalCallback)
throws IOException {
throw new UnsupportedOperationException("This method is not currently supported as it is not used by current API in testing");
}
@Override
public String basicConsume(String queue, boolean autoAck, Map<String, Object> arguments, DeliverCallback deliverCallback, CancelCallback cancelCallback) throws IOException {
throw new UnsupportedOperationException("This method is not currently supported as it is not used by current API in testing");
}
@Override
public String basicConsume(String queue, boolean autoAck, Map<String, Object> arguments, DeliverCallback deliverCallback, ConsumerShutdownSignalCallback shutdownSignalCallback)
throws IOException {
throw new UnsupportedOperationException("This method is not currently supported as it is not used by current API in testing");
}
@Override
public String basicConsume(String queue, boolean autoAck, Map<String, Object> arguments, DeliverCallback deliverCallback, CancelCallback cancelCallback,
ConsumerShutdownSignalCallback shutdownSignalCallback) throws IOException {
throw new UnsupportedOperationException("This method is not currently supported as it is not used by current API in testing");
}
@Override
public String basicConsume(String queue, boolean autoAck, String consumerTag, DeliverCallback deliverCallback, CancelCallback cancelCallback) throws IOException {
throw new UnsupportedOperationException("This method is not currently supported as it is not used by current API in testing");
}
@Override
public String basicConsume(String queue, boolean autoAck, String consumerTag, DeliverCallback deliverCallback, ConsumerShutdownSignalCallback shutdownSignalCallback) throws IOException {
throw new UnsupportedOperationException("This method is not currently supported as it is not used by current API in testing");
}
@Override
public String basicConsume(String queue, boolean autoAck, String consumerTag, DeliverCallback deliverCallback, CancelCallback cancelCallback, ConsumerShutdownSignalCallback shutdownSignalCallback)
throws IOException {
throw new UnsupportedOperationException("This method is not currently supported as it is not used by current API in testing");
}
@Override
public String basicConsume(String queue, boolean autoAck, String consumerTag, boolean noLocal, boolean exclusive, Map<String, Object> arguments, DeliverCallback deliverCallback,
CancelCallback cancelCallback) throws IOException {
throw new UnsupportedOperationException("This method is not currently supported as it is not used by current API in testing");
}
@Override
public String basicConsume(String queue, boolean autoAck, String consumerTag, boolean noLocal, boolean exclusive, Map<String, Object> arguments, DeliverCallback deliverCallback,
ConsumerShutdownSignalCallback shutdownSignalCallback) throws IOException {
throw new UnsupportedOperationException("This method is not currently supported as it is not used by current API in testing");
}
@Override
public String basicConsume(String queue, boolean autoAck, String consumerTag, boolean noLocal, boolean exclusive, Map<String, Object> arguments, DeliverCallback deliverCallback,
CancelCallback cancelCallback, ConsumerShutdownSignalCallback shutdownSignalCallback) throws IOException {
throw new UnsupportedOperationException("This method is not currently supported as it is not used by current API in testing");
}
@Override
public CompletableFuture<Command> asyncCompletableRpc(Method method) throws IOException {
throw new UnsupportedOperationException("This method is not currently supported as it is not used by current API in testing");
}
}
| |
/**
* Copyright (C) 2013 Mikhail Malakhov <malakhv@live.ru>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* */
package com.malakhv.preference;
import android.annotation.TargetApi;
import android.content.Context;
import android.content.SharedPreferences;
import android.content.res.TypedArray;
import android.os.Build;
import android.preference.DialogPreference;
import android.preference.Preference;
import android.util.AttributeSet;
import android.view.View;
import android.widget.SeekBar;
import android.widget.TextView;
/**
* A {@link Preference} that displays a {@link SeekBar} as a dialog.
* @author Mikhail.Malakhov [malakhv@live.ru|https://github.com/malakhv]
*/
@SuppressWarnings("unused")
public class SeekBarDialog extends DialogPreference {
/** The default value of progress for {@link SeekBar} in the dialog. */
public static final int DEFAULT_VALUE = 0;
/** The default maximum value of progress for {@link SeekBar} in the dialog. */
public static final int DEFAULT_MAX_VALUE = 100;
/** The default dialog message formatted text. */
public static final String DEFAULT_DIALOG_MESSAGE = "%s %%";
/** The maximum value of progress for {@link SeekBar} in the dialog. */
private int mMax = DEFAULT_MAX_VALUE;
/** The current progress value of {@link SeekBar} in the dialog. */
private int mValue;
/** The summary of this Preference. */
private String mSummary = null;
/** The dialog message of this Preference. */
private String mDialogMessage = null;
/** The {@link SeekBar} shown in the dialog. */
private SeekBar mSeekBar = null;
/** The {@link TextView} shown in the dialog as a message. */
private TextView mDialogMessageView = null;
/** The internal listener for {@link SeekBar} shown in the dialog. */
private OnSeekBarChangeListener mOnSeekBarChangeListener = new OnSeekBarChangeListener();
/**
* Simple constructor to use when creating a preference from code.
* */
public SeekBarDialog(Context context) { this(context, null); }
/**
* Constructor that is called when inflating a preference from XML.
* */
public SeekBarDialog(Context context, AttributeSet attrs) {
this(context, attrs, android.R.attr.dialogPreferenceStyle);
}
/**
* Perform inflation from XML and apply a class-specific base style from a theme attribute.
* */
public SeekBarDialog(Context context, AttributeSet attrs, int defStyleAttr) {
this(context, attrs, defStyleAttr, 0);
}
/**
* Perform inflation from XML and apply a class-specific base style from a theme attribute or
* style resource.
* */
@TargetApi(Build.VERSION_CODES.LOLLIPOP)
public SeekBarDialog(Context context, AttributeSet attrs, int defStyleAttr, int defStyleRes) {
super(context, attrs, defStyleAttr, defStyleRes);
// Retrieve the SeekBar attributes
TypedArray a = context.obtainStyledAttributes(attrs, R.styleable.SeekBarDialog,
defStyleAttr, defStyleRes);
mMax = a.getInt(R.styleable.SeekBarDialog_android_max, DEFAULT_MAX_VALUE);
mValue = a.getInt(R.styleable.SeekBarDialog_android_progress, DEFAULT_VALUE);
mDialogMessage = a.getString(R.styleable.SeekBarDialog_android_dialogMessage);
if (mDialogMessage == null) mDialogMessage = DEFAULT_DIALOG_MESSAGE;
// Retrieve the Preference summary attribute since it's private in the Preference class
mSummary = a.getString(R.styleable.SeekBarDialog_android_summary);
a.recycle();
// Set custom layout with SeekBar for the dialog
this.setDialogLayoutResource(R.layout.seek_bar_dialog);
}
/**
* Gets the value from this {@link Preference}.
* */
public int getValue() { return mValue; }
/**
* Binds views in the content View of the dialog to data. <p>Make sure to call through to the
* superclass implementation.</p>
* @param view The content View of the dialog, if it is custom.
* */
@Override
protected void onBindDialogView(View view) {
super.onBindDialogView(view);
// Configure SeekBar
mSeekBar = (SeekBar) view.findViewById(R.id.seek_bar);
mSeekBar.setProgress(mValue);
mSeekBar.setMax(mMax);
mSeekBar.setOnSeekBarChangeListener(mOnSeekBarChangeListener);
// Configure dialog message view
mDialogMessageView = (TextView) view.findViewById(android.R.id.message);
}
/**
* Called when the dialog is dismissed and should be used to save data to
* the {@link SharedPreferences}.
* @param positiveResult Whether the positive button was clicked (true), or the negative button
* was clicked or the dialog was canceled (false).
* */
@Override
protected void onDialogClosed(boolean positiveResult) {
super.onDialogClosed(positiveResult);
if (positiveResult) { // Need to persist value
final int value = mSeekBar.getProgress();
if (callChangeListener(value)) setValue(value);
}
mSeekBar.setOnSeekBarChangeListener(null);
}
/**
* Set the current progress to the specified value.
* */
public void setValue(int value) {
final boolean wasBlocking = shouldDisableDependents();
// Persist value
if (value < 0) value = 0;
if (value > mMax) value = mMax;
boolean changed = mValue != value;
mValue = value;
persistInt(mValue);
// Notify about value was changed
if (changed) notifyChanged();
// Resolve dependencies
final boolean isBlocking = shouldDisableDependents();
if (isBlocking != wasBlocking) notifyDependencyChange(isBlocking);
}
/**
* Returns the summary of this {@link SeekBarDialog}. If the summary has a
* {@link String#format String formatting} marker in it (i.e. "%s" or "%1$s"), then
* the current value will be substituted in its place.
* */
@Override
public CharSequence getSummary() {
return mSummary != null ? String.format(mSummary, mValue) : super.getSummary();
}
/**
* Sets the summary for this {@link SeekBarDialog} with a CharSequence. If the summary has a
* {@link String#format String formatting} marker in it (i.e. "%s" or "%1$s"), then
* the current value will be substituted in its place when it's retrieved.
*/
@Override
public void setSummary(CharSequence summary) {
super.setSummary(summary);
mSummary = (summary != null ? summary.toString() : null);
}
/**
* Returns the message to be shown on subsequent dialogs. If the message has a
* {@link String#format String formatting} marker in it (i.e. "%s" or "%1$s"), then
* the current value will be substituted in its place.
* */
@Override
public CharSequence getDialogMessage() { return getDialogMessage(mValue); }
/**
* Returns the message to be shown on subsequent dialogs for specified value. If the message
* has a {@link String#format String formatting} marker in it (i.e. "%s" or "%1$s"),
* then the current value will be substituted in its place.
* */
public CharSequence getDialogMessage(int value) {
return mDialogMessage != null ? String.format(mDialogMessage, value)
: super.getDialogMessage();
}
/**
* Called when a Preference is being inflated and the default value attribute needs to be read.
* */
@Override
protected Object onGetDefaultValue(TypedArray a, int index) {
return a.getInt(index, DEFAULT_VALUE);
}
/**
* Implement this to set the initial value of the Preference. For more details, please, see
* method from super class.
* */
@Override
protected void onSetInitialValue(boolean restoreValue, Object defaultValue) {
setValue(restoreValue ? getPersistedInt(mValue) : (int) defaultValue);
}
/**
* A callback that notifies clients when the progress level has been changed. For more details,
* please, see {@link OnSeekBarChangeListener}.
*
* */
private class OnSeekBarChangeListener implements SeekBar.OnSeekBarChangeListener {
/**
* Notification that the progress level has changed.
* */
@Override
public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) {
if (mDialogMessageView != null) mDialogMessageView.setText(getDialogMessage(progress));
}
/** Notification that the user has started a touch gesture. */
@Override
public void onStartTrackingTouch(SeekBar seekBar) { /* do nothing */}
/**
* Notification that the user has finished a touch gesture.
*/
@Override
public void onStopTrackingTouch(SeekBar seekBar) { /* do nothing */ }
}
}
| |
package com.timgroup.eventstore.archiver;
import com.codahale.metrics.Histogram;
import com.codahale.metrics.MetricRegistry;
import com.codahale.metrics.Timer;
import com.timgroup.eventstore.api.EventRecord;
import com.timgroup.eventstore.api.EventSource;
import com.timgroup.eventstore.api.Position;
import com.timgroup.eventstore.api.ResolvedEvent;
import com.timgroup.eventsubscription.Deserializer;
import com.timgroup.eventsubscription.Event;
import com.timgroup.eventsubscription.EventSubscription;
import com.timgroup.eventsubscription.SubscriptionBuilder;
import com.timgroup.remotefilestorage.s3.S3UploadableStorageForInputStream;
import com.timgroup.tucker.info.Component;
import com.timgroup.tucker.info.Report;
import com.timgroup.tucker.info.Status;
import com.timgroup.tucker.info.component.SimpleValueComponent;
import java.net.InetAddress;
import java.time.Clock;
import java.time.Duration;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.concurrent.atomic.AtomicLong;
import static com.timgroup.tucker.info.Status.INFO;
import static java.lang.String.format;
import static java.util.stream.Collectors.toList;
public class S3Archiver {
public static final String DEFAULT_MONITORING_PREFIX = "tg-eventstore-s3-archiver";
private final EventSource liveEventSource;
private final EventSubscription eventSubscription;
private final String eventStoreId;
private final BatchingPolicy batchingPolicy;
private final S3ArchiveMaxPositionFetcher maxPositionFetcher;
private final BatchingUploadHandler batchingUploadHandler;
private final SimpleValueComponent checkpointPositionComponent;
private final AtomicLong maxPositionInArchive = new AtomicLong();
private final AtomicLong maxPositionInEventSource = new AtomicLong();
private final Timer s3ListingTimer;
private final String monitoringPrefix;
private RunState runState = RunState.UNSTARTED;
private S3Archiver(EventSource liveEventSource,
S3UploadableStorageForInputStream output,
String eventStoreId,
SubscriptionBuilder subscriptionBuilder,
BatchingPolicy batchingPolicy,
Optional<Long> maxPositionInArchiveOnStartup,
S3ArchiveMaxPositionFetcher maxPositionFetcher,
String applicationName,
MetricRegistry metricRegistry,
String monitoringPrefix,
Clock clock)
{
this.liveEventSource = liveEventSource;
this.eventStoreId = eventStoreId;
this.monitoringPrefix = monitoringPrefix;
this.batchingPolicy = batchingPolicy;
S3ArchiveKeyFormat batchS3ObjectKeyFormat = new S3ArchiveKeyFormat(eventStoreId);
this.maxPositionFetcher = maxPositionFetcher;
this.checkpointPositionComponent = new SimpleValueComponent(this.monitoringPrefix + "-checkpoint-position",
"Checkpoint position that archiver resumed from on startup");
this.checkpointPositionComponent.updateValue(INFO, maxPositionInArchiveOnStartup);
Map<String, String> appMetadata = new HashMap<>();
appMetadata.put("event_source", liveEventSource.toString());
appMetadata.put("app_name", applicationName);
appMetadata.put("app_version", System.getProperty("timgroup.app.version"));
appMetadata.put("hostname", hostname());
Histogram uncompressedSizeMetrics = metricRegistry.histogram(this.monitoringPrefix + ".archive.batch.uncompressed_size_bytes");
Histogram compressedSizeMetrics = metricRegistry.histogram(this.monitoringPrefix + ".archive.batch.compressed_size_bytes");
CurrentBatchWriter currentBatchWriter = new CurrentBatchWriter(
batchingPolicy,
this::positionFrom,
batchS3ObjectKeyFormat,
uncompressedSizeMetrics,
compressedSizeMetrics);
Timer s3UploadTimer = metricRegistry.timer(this.monitoringPrefix + ".archive.upload");
this.batchingUploadHandler = new BatchingUploadHandler(output, currentBatchWriter, clock, appMetadata, monitoringPrefix, s3UploadTimer);
this.eventSubscription = subscriptionBuilder
.readingFrom(liveEventSource.readAll(), convertPosition(maxPositionInArchiveOnStartup))
.deserializingUsing(Deserializer.applying(EventRecordHolder::new))
.publishingTo(batchingUploadHandler)
.withMaxInitialReplayDuration(Duration.ofMinutes(30))
.build();
this.maxPositionInArchive.set(maxPositionInArchiveOnStartup.orElse(0L));
metricRegistry.gauge(this.monitoringPrefix + ".archive.max_position", () -> maxPositionInArchive::get);
metricRegistry.gauge(this.monitoringPrefix + ".event_source.max_position", () -> maxPositionInEventSource::get);
metricRegistry.gauge(this.monitoringPrefix + ".archive.events_awaiting_upload", () -> currentBatchWriter::eventsInCurrentBatch);
this.s3ListingTimer = metricRegistry.timer(this.monitoringPrefix + ".archive.list");
}
private static String hostname() {
try {
return InetAddress.getLocalHost().getHostName();
} catch (Exception e) {
return "localhost";
}
}
private Position convertPosition(Optional<Long> positionInArchive) {
Long maxPositionInArchive = positionInArchive.orElse(0L);
return liveEventSource.readAll().storePositionCodec().deserializePosition(String.valueOf(maxPositionInArchive));
}
private Long positionFrom(ResolvedEvent eventFromLiveEventSource) {
return Long.parseLong(liveEventSource.readAll().storePositionCodec().serializePosition(eventFromLiveEventSource.position()));
}
public static S3Archiver newS3Archiver(EventSource liveEventSource, S3UploadableStorageForInputStream output,
String eventStoreId, SubscriptionBuilder subscriptionBuilder, BatchingPolicy batchingPolicy,
S3ArchiveMaxPositionFetcher maxPositionFetcher, String applicationName, MetricRegistry metricRegistry,
String monitoringPrefix, Clock clock) {
return new S3Archiver(liveEventSource, output, eventStoreId, subscriptionBuilder, batchingPolicy, maxPositionFetcher.maxPosition(),
maxPositionFetcher, applicationName, metricRegistry, monitoringPrefix, clock);
}
public void start() {
this.eventSubscription.start();
this.runState = RunState.RUNNING;
}
public void stop() {
this.eventSubscription.stop();
this.runState = RunState.STOPPED;
}
public Optional<ResolvedEvent> lastEventInLiveEventStore() {
Optional<ResolvedEvent> lastEventInLive = liveEventSource.readAll().readLastEvent();
Optional<Long> maxPositionInLive = lastEventInLive.map(this::positionFrom);
maxPositionInLive.ifPresent(maxPositionInEventSource::set);
return lastEventInLive;
}
public Collection<Component> monitoring() {
List<Component> components = new ArrayList<>();
components.addAll(liveEventSource.monitoring());
components.addAll(eventSubscription.statusComponents());
components.add(new ArchiveStalenessComponent(monitoringPrefix));
components.add(checkpointPositionComponent);
components.addAll(batchingUploadHandler.monitoring());
return components.stream().map(c -> c.withStatusNoWorseThan(Status.WARNING)).collect(toList());
}
public String getEventStoreId() {
return eventStoreId;
}
public Optional<Long> maxPositionInArchive() {
try (Timer.Context ignored = this.s3ListingTimer.time()) {
Optional<Long> maxPosition = this.maxPositionFetcher.maxPosition();
this.maxPositionInArchive.set(maxPosition.orElse(0L));
return maxPosition;
}
}
public ArchiverState state() {
return new ArchiverState(this.runState, lastEventInLiveEventStore().map(this::positionFrom), maxPositionInArchive());
}
public enum RunState { UNSTARTED, RUNNING, STOPPED }
public static final class EventRecordHolder implements Event {
@SuppressWarnings("WeakerAccess")
public final EventRecord record;
private EventRecordHolder(EventRecord record) {
this.record = record;
}
}
private final class ArchiveStalenessComponent extends Component {
ArchiveStalenessComponent(String monitoringPrefix) {
super(monitoringPrefix + "-staleness", "Is archive up to date?");
}
@Override
public Report getReport() {
Optional<ResolvedEvent> lastEventInLive = lastEventInLiveEventStore();
Optional<Long> livePosition = lastEventInLive.map(S3Archiver.this::positionFrom);
Optional<Long> maxPositionInArchive = maxPositionInArchive();
boolean isStale = batchingPolicy.isStale(maxPositionInArchive, livePosition, lastEventInLive.map(ResolvedEvent::eventRecord));
String value = format("%s%nmax_position in live=%s%nmax_position in archive=%s",
isStale ? "Archive is stale compared to live event store" : "Archive is up to date with live event store",
livePosition.map(Object::toString).orElse("[none]"),
maxPositionInArchive.map(Object::toString).orElse("[none]")
);
return isStale ? new Report(Status.WARNING, value) : new Report(Status.OK, value);
}
}
}
| |
/*
* Copyright (C) 2017 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.example.android.contentprovidersample.provider;
import android.content.ContentProvider;
import android.content.ContentProviderOperation;
import android.content.ContentProviderResult;
import android.content.ContentUris;
import android.content.ContentValues;
import android.content.Context;
import android.content.OperationApplicationException;
import android.content.UriMatcher;
import android.database.Cursor;
import android.net.Uri;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import com.example.android.contentprovidersample.data.Cheese;
import com.example.android.contentprovidersample.data.CheeseDao;
import com.example.android.contentprovidersample.data.SampleDatabase;
import java.util.ArrayList;
/**
* A {@link ContentProvider} based on a Room database.
*
* <p>Note that you don't need to implement a ContentProvider unless you want to expose the data
* outside your process or your application already uses a ContentProvider.</p>
*/
public class SampleContentProvider extends ContentProvider {
/** The authority of this content provider. */
public static final String AUTHORITY = "com.example.android.contentprovidersample.provider";
/** The URI for the Cheese table. */
public static final Uri URI_CHEESE = Uri.parse(
"content://" + AUTHORITY + "/" + Cheese.TABLE_NAME);
/** The match code for some items in the Cheese table. */
private static final int CODE_CHEESE_DIR = 1;
/** The match code for an item in the Cheese table. */
private static final int CODE_CHEESE_ITEM = 2;
/** The URI matcher. */
private static final UriMatcher MATCHER = new UriMatcher(UriMatcher.NO_MATCH);
static {
MATCHER.addURI(AUTHORITY, Cheese.TABLE_NAME, CODE_CHEESE_DIR);
MATCHER.addURI(AUTHORITY, Cheese.TABLE_NAME + "/*", CODE_CHEESE_ITEM);
}
@Override
public boolean onCreate() {
return true;
}
@Nullable
@Override
public Cursor query(@NonNull Uri uri, @Nullable String[] projection, @Nullable String selection,
@Nullable String[] selectionArgs, @Nullable String sortOrder) {
final int code = MATCHER.match(uri);
if (code == CODE_CHEESE_DIR || code == CODE_CHEESE_ITEM) {
final Context context = getContext();
if (context == null) {
return null;
}
CheeseDao cheese = SampleDatabase.getInstance(context).cheese();
final Cursor cursor;
if (code == CODE_CHEESE_DIR) {
cursor = cheese.selectAll();
} else {
cursor = cheese.selectById(ContentUris.parseId(uri));
}
cursor.setNotificationUri(context.getContentResolver(), uri);
return cursor;
} else {
throw new IllegalArgumentException("Unknown URI: " + uri);
}
}
@Nullable
@Override
public String getType(@NonNull Uri uri) {
switch (MATCHER.match(uri)) {
case CODE_CHEESE_DIR:
return "vnd.android.cursor.dir/" + AUTHORITY + "." + Cheese.TABLE_NAME;
case CODE_CHEESE_ITEM:
return "vnd.android.cursor.item/" + AUTHORITY + "." + Cheese.TABLE_NAME;
default:
throw new IllegalArgumentException("Unknown URI: " + uri);
}
}
@Nullable
@Override
public Uri insert(@NonNull Uri uri, @Nullable ContentValues values) {
switch (MATCHER.match(uri)) {
case CODE_CHEESE_DIR:
final Context context = getContext();
if (context == null) {
return null;
}
final long id = SampleDatabase.getInstance(context).cheese()
.insert(Cheese.fromContentValues(values));
context.getContentResolver().notifyChange(uri, null);
return ContentUris.withAppendedId(uri, id);
case CODE_CHEESE_ITEM:
throw new IllegalArgumentException("Invalid URI, cannot insert with ID: " + uri);
default:
throw new IllegalArgumentException("Unknown URI: " + uri);
}
}
@Override
public int delete(@NonNull Uri uri, @Nullable String selection,
@Nullable String[] selectionArgs) {
switch (MATCHER.match(uri)) {
case CODE_CHEESE_DIR:
throw new IllegalArgumentException("Invalid URI, cannot update without ID" + uri);
case CODE_CHEESE_ITEM:
final Context context = getContext();
if (context == null) {
return 0;
}
final int count = SampleDatabase.getInstance(context).cheese()
.deleteById(ContentUris.parseId(uri));
context.getContentResolver().notifyChange(uri, null);
return count;
default:
throw new IllegalArgumentException("Unknown URI: " + uri);
}
}
@Override
public int update(@NonNull Uri uri, @Nullable ContentValues values, @Nullable String selection,
@Nullable String[] selectionArgs) {
switch (MATCHER.match(uri)) {
case CODE_CHEESE_DIR:
throw new IllegalArgumentException("Invalid URI, cannot update without ID" + uri);
case CODE_CHEESE_ITEM:
final Context context = getContext();
if (context == null) {
return 0;
}
final Cheese cheese = Cheese.fromContentValues(values);
cheese.id = ContentUris.parseId(uri);
final int count = SampleDatabase.getInstance(context).cheese()
.update(cheese);
context.getContentResolver().notifyChange(uri, null);
return count;
default:
throw new IllegalArgumentException("Unknown URI: " + uri);
}
}
@NonNull
@Override
public ContentProviderResult[] applyBatch(
@NonNull ArrayList<ContentProviderOperation> operations)
throws OperationApplicationException {
final Context context = getContext();
if (context == null) {
return new ContentProviderResult[0];
}
final SampleDatabase database = SampleDatabase.getInstance(context);
database.beginTransaction();
try {
final ContentProviderResult[] result = super.applyBatch(operations);
database.setTransactionSuccessful();
return result;
} finally {
database.endTransaction();
}
}
@Override
public int bulkInsert(@NonNull Uri uri, @NonNull ContentValues[] valuesArray) {
switch (MATCHER.match(uri)) {
case CODE_CHEESE_DIR:
final Context context = getContext();
if (context == null) {
return 0;
}
final SampleDatabase database = SampleDatabase.getInstance(context);
final Cheese[] cheeses = new Cheese[valuesArray.length];
for (int i = 0; i < valuesArray.length; i++) {
cheeses[i] = Cheese.fromContentValues(valuesArray[i]);
}
return database.cheese().insertAll(cheeses).length;
case CODE_CHEESE_ITEM:
throw new IllegalArgumentException("Invalid URI, cannot insert with ID: " + uri);
default:
throw new IllegalArgumentException("Unknown URI: " + uri);
}
}
}
| |
package com.paypal.svcs.types.ap;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import com.paypal.svcs.types.ap.FundingPlan;
import com.paypal.svcs.types.ap.PayErrorList;
import com.paypal.svcs.types.ap.PaymentInfoList;
import com.paypal.svcs.types.ap.SenderIdentifier;
import com.paypal.svcs.types.ap.WarningDataList;
import com.paypal.svcs.types.common.ErrorData;
import com.paypal.svcs.types.common.ResponseEnvelope;
/**
* The PayResponse contains the result of the Pay operation.
* The payKey and execution status of the request should always
* be provided.
*/
public class PayResponse{
/**
*
*@Required
*/
private ResponseEnvelope responseEnvelope;
/**
*
*@Required
*/
private String payKey;
/**
*
*@Required
*/
private String paymentExecStatus;
/**
*
*/
private PayErrorList payErrorList;
/**
*
*@Required
*/
private PaymentInfoList paymentInfoList;
/**
*
*/
private SenderIdentifier sender;
/**
*
*/
private FundingPlan defaultFundingPlan;
/**
*
*/
private WarningDataList warningDataList;
/**
*
*/
private List<ErrorData> error = new ArrayList<ErrorData>();
/**
* Default Constructor
*/
public PayResponse (){
}
/**
* Getter for responseEnvelope
*/
public ResponseEnvelope getResponseEnvelope() {
return responseEnvelope;
}
/**
* Setter for responseEnvelope
*/
public void setResponseEnvelope(ResponseEnvelope responseEnvelope) {
this.responseEnvelope = responseEnvelope;
}
/**
* Getter for payKey
*/
public String getPayKey() {
return payKey;
}
/**
* Setter for payKey
*/
public void setPayKey(String payKey) {
this.payKey = payKey;
}
/**
* Getter for paymentExecStatus
*/
public String getPaymentExecStatus() {
return paymentExecStatus;
}
/**
* Setter for paymentExecStatus
*/
public void setPaymentExecStatus(String paymentExecStatus) {
this.paymentExecStatus = paymentExecStatus;
}
/**
* Getter for payErrorList
*/
public PayErrorList getPayErrorList() {
return payErrorList;
}
/**
* Setter for payErrorList
*/
public void setPayErrorList(PayErrorList payErrorList) {
this.payErrorList = payErrorList;
}
/**
* Getter for paymentInfoList
*/
public PaymentInfoList getPaymentInfoList() {
return paymentInfoList;
}
/**
* Setter for paymentInfoList
*/
public void setPaymentInfoList(PaymentInfoList paymentInfoList) {
this.paymentInfoList = paymentInfoList;
}
/**
* Getter for sender
*/
public SenderIdentifier getSender() {
return sender;
}
/**
* Setter for sender
*/
public void setSender(SenderIdentifier sender) {
this.sender = sender;
}
/**
* Getter for defaultFundingPlan
*/
public FundingPlan getDefaultFundingPlan() {
return defaultFundingPlan;
}
/**
* Setter for defaultFundingPlan
*/
public void setDefaultFundingPlan(FundingPlan defaultFundingPlan) {
this.defaultFundingPlan = defaultFundingPlan;
}
/**
* Getter for warningDataList
*/
public WarningDataList getWarningDataList() {
return warningDataList;
}
/**
* Setter for warningDataList
*/
public void setWarningDataList(WarningDataList warningDataList) {
this.warningDataList = warningDataList;
}
/**
* Getter for error
*/
public List<ErrorData> getError() {
return error;
}
/**
* Setter for error
*/
public void setError(List<ErrorData> error) {
this.error = error;
}
public static com.paypal.svcs.types.ap.PayResponse createInstance(Map<String, String> map, String prefix, int index) {
com.paypal.svcs.types.ap.PayResponse payResponse = null;
int i = 0;
if (index != -1) {
if (prefix != null && prefix.length() != 0 && !prefix.endsWith(".")) {
prefix = prefix + "(" + index + ").";
}
} else {
if (prefix != null && prefix.length() != 0 && !prefix.endsWith(".")) {
prefix = prefix + ".";
}
}
ResponseEnvelope responseEnvelope = ResponseEnvelope.createInstance(map, prefix + "responseEnvelope", -1);
if (responseEnvelope != null) {
payResponse = (payResponse == null) ? new com.paypal.svcs.types.ap.PayResponse() : payResponse;
payResponse.setResponseEnvelope(responseEnvelope);
}
if (map.containsKey(prefix + "payKey")) {
payResponse = (payResponse == null) ? new com.paypal.svcs.types.ap.PayResponse() : payResponse;
payResponse.setPayKey(map.get(prefix + "payKey"));
}
if (map.containsKey(prefix + "paymentExecStatus")) {
payResponse = (payResponse == null) ? new com.paypal.svcs.types.ap.PayResponse() : payResponse;
payResponse.setPaymentExecStatus(map.get(prefix + "paymentExecStatus"));
}
PayErrorList payErrorList = PayErrorList.createInstance(map, prefix + "payErrorList", -1);
if (payErrorList != null) {
payResponse = (payResponse == null) ? new com.paypal.svcs.types.ap.PayResponse() : payResponse;
payResponse.setPayErrorList(payErrorList);
}
PaymentInfoList paymentInfoList = PaymentInfoList.createInstance(map, prefix + "paymentInfoList", -1);
if (paymentInfoList != null) {
payResponse = (payResponse == null) ? new com.paypal.svcs.types.ap.PayResponse() : payResponse;
payResponse.setPaymentInfoList(paymentInfoList);
}
SenderIdentifier sender = SenderIdentifier.createInstance(map, prefix + "sender", -1);
if (sender != null) {
payResponse = (payResponse == null) ? new com.paypal.svcs.types.ap.PayResponse() : payResponse;
payResponse.setSender(sender);
}
FundingPlan defaultFundingPlan = FundingPlan.createInstance(map, prefix + "defaultFundingPlan", -1);
if (defaultFundingPlan != null) {
payResponse = (payResponse == null) ? new com.paypal.svcs.types.ap.PayResponse() : payResponse;
payResponse.setDefaultFundingPlan(defaultFundingPlan);
}
WarningDataList warningDataList = WarningDataList.createInstance(map, prefix + "warningDataList", -1);
if (warningDataList != null) {
payResponse = (payResponse == null) ? new com.paypal.svcs.types.ap.PayResponse() : payResponse;
payResponse.setWarningDataList(warningDataList);
}
i = 0;
while(true) {
ErrorData error = ErrorData.createInstance(map, prefix + "error", i);
if (error != null) {
payResponse = (payResponse == null) ? new com.paypal.svcs.types.ap.PayResponse() : payResponse;
payResponse.getError().add(error);
i++;
} else {
break;
}
}
return payResponse;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.mllp.internal;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.net.Socket;
import java.net.SocketAddress;
import java.net.SocketTimeoutException;
import java.nio.charset.Charset;
import java.util.ArrayList;
import java.util.List;
import org.apache.camel.Exchange;
import org.apache.camel.ExchangePattern;
import org.apache.camel.Message;
import org.apache.camel.Route;
import org.apache.camel.component.mllp.MllpAcknowledgementDeliveryException;
import org.apache.camel.component.mllp.MllpConstants;
import org.apache.camel.component.mllp.MllpInvalidAcknowledgementException;
import org.apache.camel.component.mllp.MllpInvalidMessageException;
import org.apache.camel.component.mllp.MllpProtocolConstants;
import org.apache.camel.component.mllp.MllpReceiveException;
import org.apache.camel.component.mllp.MllpSocketException;
import org.apache.camel.component.mllp.MllpTcpServerConsumer;
import org.apache.camel.converter.IOConverter;
import org.apache.camel.impl.MDCUnitOfWork;
import org.apache.camel.processor.mllp.Hl7AcknowledgementGenerationException;
import org.apache.camel.util.IOHelper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.slf4j.MDC;
/**
* Runnable to read the Socket
*/
public class TcpSocketConsumerRunnable implements Runnable {
final Socket clientSocket;
final MllpSocketBuffer mllpBuffer;
Logger log = LoggerFactory.getLogger(this.getClass());
MllpTcpServerConsumer consumer;
boolean running;
private final String localAddress;
private final String remoteAddress;
private final String combinedAddress;
public TcpSocketConsumerRunnable(MllpTcpServerConsumer consumer, Socket clientSocket, MllpSocketBuffer mllpBuffer) {
this.consumer = consumer;
// this.setName(createThreadName(clientSocket));
this.clientSocket = clientSocket;
SocketAddress localSocketAddress = clientSocket.getLocalSocketAddress();
if (localSocketAddress != null) {
localAddress = localSocketAddress.toString();
} else {
localAddress = null;
}
SocketAddress remoteSocketAddress = clientSocket.getRemoteSocketAddress();
if (remoteSocketAddress != null) {
remoteAddress = remoteSocketAddress.toString();
} else {
remoteAddress = null;
}
combinedAddress = MllpSocketBuffer.formatAddressString(remoteSocketAddress, localSocketAddress);
try {
if (consumer.getConfiguration().hasKeepAlive()) {
this.clientSocket.setKeepAlive(consumer.getConfiguration().getKeepAlive());
}
if (consumer.getConfiguration().hasTcpNoDelay()) {
this.clientSocket.setTcpNoDelay(consumer.getConfiguration().getTcpNoDelay());
}
if (consumer.getConfiguration().hasReceiveBufferSize()) {
this.clientSocket.setReceiveBufferSize(consumer.getConfiguration().getReceiveBufferSize());
}
if (consumer.getConfiguration().hasSendBufferSize()) {
this.clientSocket.setSendBufferSize(consumer.getConfiguration().getSendBufferSize());
}
this.clientSocket.setSoLinger(false, -1);
// Initial Read Timeout
this.clientSocket.setSoTimeout(consumer.getConfiguration().getReceiveTimeout());
} catch (IOException initializationException) {
throw new IllegalStateException("Failed to initialize " + this.getClass().getSimpleName(), initializationException);
}
if (mllpBuffer == null) {
this.mllpBuffer = new MllpSocketBuffer(consumer.getEndpoint());
} else {
this.mllpBuffer = mllpBuffer;
}
}
/**
* derive a thread name from the class name, the component URI and the connection information
* <p/>
* The String will in the format <class name>[endpoint key] - [local socket address] -> [remote socket address]
*
* @return the thread name
*/
String createThreadName(Socket socket) {
// Get the URI without options
String fullEndpointKey = consumer.getEndpoint().getEndpointKey();
String endpointKey;
if (fullEndpointKey.contains("?")) {
endpointKey = fullEndpointKey.substring(0, fullEndpointKey.indexOf('?'));
} else {
endpointKey = fullEndpointKey;
}
// Now put it all together
return String.format("%s[%s] - %s", this.getClass().getSimpleName(), endpointKey, combinedAddress);
}
@Override
public void run() {
running = true;
String originalThreadName = Thread.currentThread().getName();
Thread.currentThread().setName(createThreadName(clientSocket));
MDC.put(MDCUnitOfWork.MDC_CAMEL_CONTEXT_ID, consumer.getEndpoint().getCamelContext().getName());
Route route = consumer.getRoute();
if (route != null) {
String routeId = route.getId();
if (routeId != null) {
MDC.put(MDCUnitOfWork.MDC_ROUTE_ID, route.getId());
}
}
log.debug("Starting {} for {}", this.getClass().getSimpleName(), combinedAddress);
try {
byte[] hl7MessageBytes = null;
if (mllpBuffer.hasCompleteEnvelope()) {
// If we got a complete message on the validation read, process it
hl7MessageBytes = mllpBuffer.toMllpPayload();
mllpBuffer.reset();
consumer.processMessage(hl7MessageBytes, this);
}
while (running && null != clientSocket && clientSocket.isConnected() && !clientSocket.isClosed()) {
log.debug("Checking for data ....");
try {
mllpBuffer.readFrom(clientSocket);
if (mllpBuffer.hasCompleteEnvelope()) {
hl7MessageBytes = mllpBuffer.toMllpPayload();
if (log.isDebugEnabled()) {
log.debug("Received {} byte message {}", hl7MessageBytes.length, Hl7Util.convertToPrintFriendlyString(hl7MessageBytes));
}
if (mllpBuffer.hasLeadingOutOfBandData()) {
// TODO: Move the conversion utilities to the MllpSocketBuffer to avoid a byte[] copy
log.warn("Ignoring leading out-of-band data: {}", Hl7Util.convertToPrintFriendlyString(mllpBuffer.getLeadingOutOfBandData()));
}
if (mllpBuffer.hasTrailingOutOfBandData()) {
log.warn("Ignoring trailing out-of-band data: {}", Hl7Util.convertToPrintFriendlyString(mllpBuffer.getTrailingOutOfBandData()));
}
mllpBuffer.reset();
consumer.processMessage(hl7MessageBytes, this);
} else if (!mllpBuffer.hasStartOfBlock()) {
byte[] payload = mllpBuffer.toByteArray();
log.warn("Ignoring {} byte un-enveloped payload {}", payload.length, Hl7Util.convertToPrintFriendlyString(payload));
mllpBuffer.reset();
} else if (!mllpBuffer.isEmpty()) {
byte[] payload = mllpBuffer.toByteArray();
log.warn("Partial {} byte payload received {}", payload.length, Hl7Util.convertToPrintFriendlyString(payload));
}
} catch (SocketTimeoutException timeoutEx) {
if (mllpBuffer.isEmpty()) {
if (consumer.getConfiguration().hasIdleTimeout()) {
long currentTicks = System.currentTimeMillis();
long lastReceivedMessageTicks = consumer.getConsumerRunnables().get(this);
long idleTime = currentTicks - lastReceivedMessageTicks;
if (idleTime >= consumer.getConfiguration().getIdleTimeout()) {
String resetMessage = String.format("Connection idle time %d exceeded idleTimeout %d", idleTime, consumer.getConfiguration().getIdleTimeout());
mllpBuffer.resetSocket(clientSocket, resetMessage);
}
}
log.debug("No data received - ignoring timeout");
} else {
mllpBuffer.resetSocket(clientSocket);
new MllpInvalidMessageException("Timeout receiving complete message payload", mllpBuffer.toByteArrayAndReset(), timeoutEx);
consumer.handleMessageTimeout("Timeout receiving complete message payload", mllpBuffer.toByteArrayAndReset(), timeoutEx);
}
} catch (MllpSocketException mllpSocketEx) {
mllpBuffer.resetSocket(clientSocket);
if (!mllpBuffer.isEmpty()) {
consumer.handleMessageException("Exception encountered reading payload", mllpBuffer.toByteArrayAndReset(), mllpSocketEx);
} else {
log.debug("Ignoring exception encountered checking for data", mllpSocketEx);
}
}
}
} catch (Exception unexpectedEx) {
log.error("Unexpected exception encountered receiving messages", unexpectedEx);
} finally {
consumer.getConsumerRunnables().remove(this);
log.debug("{} for {} completed", this.getClass().getSimpleName(), combinedAddress);
Thread.currentThread().setName(originalThreadName);
MDC.remove(MDCUnitOfWork.MDC_ROUTE_ID);
MDC.remove(MDCUnitOfWork.MDC_CAMEL_CONTEXT_ID);
mllpBuffer.resetSocket(clientSocket);
}
}
public Socket getSocket() {
return clientSocket;
}
public MllpSocketBuffer getMllpBuffer() {
return mllpBuffer;
}
public void closeSocket() {
mllpBuffer.closeSocket(clientSocket);
}
public void closeSocket(String logMessage) {
mllpBuffer.closeSocket(clientSocket, logMessage);
}
public void resetSocket() {
mllpBuffer.resetSocket(clientSocket);
}
public void resetSocket(String logMessage) {
mllpBuffer.resetSocket(clientSocket, logMessage);
}
public void stop() {
running = false;
}
public boolean hasLocalAddress() {
return localAddress != null && !localAddress.isEmpty();
}
public String getLocalAddress() {
return localAddress;
}
public boolean hasRemoteAddress() {
return remoteAddress != null && !remoteAddress.isEmpty();
}
public String getRemoteAddress() {
return remoteAddress;
}
public boolean hasCombinedAddress() {
return combinedAddress != null && combinedAddress.isEmpty();
}
public String getCombinedAddress() {
return combinedAddress;
}
}
| |
/*
* Licensed to GraphHopper GmbH under one or more contributor
* license agreements. See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*
* GraphHopper GmbH licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except in
* compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.graphhopper.reader.dem;
import com.graphhopper.storage.DAType;
import com.graphhopper.storage.DataAccess;
import com.graphhopper.storage.Directory;
import com.graphhopper.storage.GHDirectory;
import com.graphhopper.util.BitUtil;
import com.graphhopper.util.Downloader;
import com.graphhopper.util.Helper;
import gnu.trove.map.hash.TIntObjectHashMap;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.*;
import java.net.SocketTimeoutException;
import java.util.zip.ZipInputStream;
/**
* Elevation data from NASA (SRTM).
* <p>
* Important information about SRTM: the coordinates of the lower-left corner of tile N40W118 are 40
* degrees north latitude and 118 degrees west longitude. To be more exact, these coordinates refer
* to the geometric center of the lower left sample, which in the case of SRTM3 data will be about
* 90 meters in extent.
* <p>
*
* @author Peter Karich
*/
public class SRTMProvider implements ElevationProvider {
private static final BitUtil BIT_UTIL = BitUtil.BIG;
private final Logger logger = LoggerFactory.getLogger(getClass());
private final int DEFAULT_WIDTH = 1201;
private final int WIDTH_BYTE_INDEX = 0;
// use a map as an array is not quite useful if we want to hold only parts of the world
private final TIntObjectHashMap<HeightTile> cacheData = new TIntObjectHashMap<HeightTile>();
private final TIntObjectHashMap<String> areas = new TIntObjectHashMap<String>();
private final double precision = 1e7;
private final double invPrecision = 1 / precision;
private Directory dir;
private DAType daType = DAType.MMAP;
private Downloader downloader = new Downloader("GraphHopper SRTMReader").setTimeout(10000);
private File cacheDir = new File("/tmp/srtm");
// possible alternatives see #451
// http://mirror.ufs.ac.za/datasets/SRTM3/
//"http://dds.cr.usgs.gov/srtm/version2_1/SRTM3/"
private String baseUrl = "https://srtm.kurviger.de/SRTM3/";
private boolean calcMean = false;
public SRTMProvider() {
// move to explicit calls?
init();
}
public static void main(String[] args) throws IOException {
SRTMProvider provider = new SRTMProvider();
// 1046
System.out.println(provider.getEle(47.468668, 14.575127));
// 1113
System.out.println(provider.getEle(47.467753, 14.573911));
// 1946
System.out.println(provider.getEle(46.468835, 12.578777));
// 845
System.out.println(provider.getEle(48.469123, 9.576393));
// 1113 vs new:
provider.setCalcMean(true);
System.out.println(provider.getEle(47.467753, 14.573911));
}
@Override
public void setCalcMean(boolean calcMean) {
this.calcMean = calcMean;
}
/**
* The URLs are a bit ugly and so we need to find out which area name a certain lat,lon
* coordinate has.
*/
private SRTMProvider init() {
try {
String strs[] = {"Africa", "Australia", "Eurasia", "Islands", "North_America", "South_America"};
for (String str : strs) {
InputStream is = getClass().getResourceAsStream(str + "_names.txt");
for (String line : Helper.readFile(new InputStreamReader(is, Helper.UTF_CS))) {
int lat = Integer.parseInt(line.substring(1, 3));
if (line.substring(0, 1).charAt(0) == 'S')
lat = -lat;
int lon = Integer.parseInt(line.substring(4, 7));
if (line.substring(3, 4).charAt(0) == 'W')
lon = -lon;
int intKey = calcIntKey(lat, lon);
String key = areas.put(intKey, str);
if (key != null)
throw new IllegalStateException("do not overwrite existing! key " + intKey + " " + key + " vs. " + str);
}
}
return this;
} catch (Exception ex) {
throw new IllegalStateException("Cannot load area names from classpath", ex);
}
}
// use int key instead of string for lower memory usage
private int calcIntKey(double lat, double lon) {
// we could use LinearKeyAlgo but this is simpler as we only need integer precision:
return (down(lat) + 90) * 1000 + down(lon) + 180;
}
public void setDownloader(Downloader downloader) {
this.downloader = downloader;
}
@Override
public ElevationProvider setCacheDir(File cacheDir) {
if (cacheDir.exists() && !cacheDir.isDirectory())
throw new IllegalArgumentException("Cache path has to be a directory");
try {
this.cacheDir = cacheDir.getCanonicalFile();
} catch (IOException ex) {
throw new RuntimeException(ex);
}
return this;
}
@Override
public ElevationProvider setBaseURL(String baseUrl) {
if (baseUrl == null || baseUrl.isEmpty())
throw new IllegalArgumentException("baseUrl cannot be empty");
this.baseUrl = baseUrl;
return this;
}
@Override
public ElevationProvider setDAType(DAType daType) {
this.daType = daType;
return this;
}
int down(double val) {
int intVal = (int) val;
if (val >= 0 || intVal - val < invPrecision)
return intVal;
return intVal - 1;
}
String getFileString(double lat, double lon) {
int intKey = calcIntKey(lat, lon);
String str = areas.get(intKey);
if (str == null)
return null;
int minLat = Math.abs(down(lat));
int minLon = Math.abs(down(lon));
str += "/";
if (lat >= 0)
str += "N";
else
str += "S";
if (minLat < 10)
str += "0";
str += minLat;
if (lon >= 0)
str += "E";
else
str += "W";
if (minLon < 10)
str += "0";
if (minLon < 100)
str += "0";
str += minLon;
return str;
}
@Override
public double getEle(double lat, double lon) {
lat = (int) (lat * precision) / precision;
lon = (int) (lon * precision) / precision;
int intKey = calcIntKey(lat, lon);
HeightTile demProvider = cacheData.get(intKey);
if (demProvider != null)
return demProvider.getHeight(lat, lon);
if (!cacheDir.exists())
cacheDir.mkdirs();
String fileDetails = getFileString(lat, lon);
if (fileDetails == null)
return 0;
DataAccess heights = getDirectory().find("dem" + intKey);
boolean loadExisting = false;
try {
loadExisting = heights.loadExisting();
} catch (Exception ex) {
logger.warn("cannot load dem" + intKey + ", error:" + ex.getMessage());
}
if (!loadExisting)
updateHeightsFromZipFile(fileDetails, heights);
int width = (int) (Math.sqrt(heights.getHeader(WIDTH_BYTE_INDEX)) + 0.5);
if (width == 0)
width = DEFAULT_WIDTH;
demProvider = new HeightTile(down(lat), down(lon), width, precision, 1);
cacheData.put(intKey, demProvider);
demProvider.setCalcMean(calcMean);
demProvider.setHeights(heights);
return demProvider.getHeight(lat, lon);
}
private void updateHeightsFromZipFile(String fileDetails, DataAccess heights) throws RuntimeException {
try {
byte[] bytes = getByteArrayFromZipFile(fileDetails);
heights.create(bytes.length);
for (int bytePos = 0; bytePos < bytes.length; bytePos += 2) {
short val = BIT_UTIL.toShort(bytes, bytePos);
if (val < -1000 || val > 12000)
val = Short.MIN_VALUE;
heights.setShort(bytePos, val);
}
heights.setHeader(WIDTH_BYTE_INDEX, bytes.length / 2);
heights.flush();
} catch (Exception ex) {
throw new RuntimeException(ex);
}
}
private byte[] getByteArrayFromZipFile(String fileDetails) throws InterruptedException, FileNotFoundException, IOException {
String zippedURL = baseUrl + "/" + fileDetails + ".hgt.zip";
File file = new File(cacheDir, new File(zippedURL).getName());
InputStream is;
// get zip file if not already in cacheDir
if (!file.exists())
for (int i = 0; i < 3; i++) {
try {
downloader.downloadFile(zippedURL, file.getAbsolutePath());
break;
} catch (SocketTimeoutException ex) {
// just try again after a little nap
Thread.sleep(2000);
continue;
}
}
is = new FileInputStream(file);
ZipInputStream zis = new ZipInputStream(is);
zis.getNextEntry();
BufferedInputStream buff = new BufferedInputStream(zis);
ByteArrayOutputStream os = new ByteArrayOutputStream();
byte[] buffer = new byte[0xFFFF];
int len;
while ((len = buff.read(buffer)) > 0) {
os.write(buffer, 0, len);
}
os.flush();
Helper.close(buff);
return os.toByteArray();
}
@Override
public void release() {
cacheData.clear();
// for memory mapped type we create temporary unpacked files which should be removed
if (dir != null)
dir.clear();
}
@Override
public String toString() {
return "SRTM";
}
private Directory getDirectory() {
if (dir != null)
return dir;
logger.info(this.toString() + " Elevation Provider, from: " + baseUrl + ", to: " + cacheDir + ", as: " + daType);
return dir = new GHDirectory(cacheDir.getAbsolutePath(), daType);
}
}
| |
package com.ilearnrw.reader.types;
/*
* Copyright (c) 2015, iLearnRW. Licensed under Modified BSD Licence. See licence.txt for details.
*/
import java.util.ArrayList;
import com.ilearnrw.reader.R;
import com.ilearnrw.reader.interfaces.OnSettingUpdated;
import com.ilearnrw.reader.types.adapters.ColorOptionsAdapter;
import com.ilearnrw.reader.types.adapters.ColorPresetAdapter;
import com.ilearnrw.reader.utils.Helper;
import android.annotation.SuppressLint;
import android.app.AlertDialog;
import android.content.Context;
import android.content.SharedPreferences;
import android.content.res.TypedArray;
import android.graphics.Color;
import android.os.Parcel;
import android.os.Parcelable;
import android.preference.Preference;
import android.preference.PreferenceManager;
import android.util.AttributeSet;
import android.view.LayoutInflater;
import android.view.View;
import android.widget.AdapterView;
import android.widget.ListView;
import android.widget.AdapterView.OnItemClickListener;
public class ColorOptionPreference extends Preference implements OnSettingUpdated{
private int mColorText, mColorBackground, mColorHighlight;
private String mColorValues;
private Context mContext;
private ArrayList<Preset> ps;
private ArrayList<ColorOption> cops;
public ColorOptionPreference(Context context) {
super(context);
initPreference(context, null);
}
public ColorOptionPreference(Context context, AttributeSet attrs) {
super(context, attrs);
initPreference(context, attrs);
}
public ColorOptionPreference(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
initPreference(context, attrs);
}
private void initPreference(Context context, AttributeSet attrs){
mContext = context;
setWidgetLayoutResource(R.layout.widget_preference_color_options);
ps = new ArrayList<Preset>();
ps.add(new Preset("Black on White", "#000000", "#FFFFFF", "#FFFF00"));
ps.add(new Preset("Black on White (low contrast)", "#454545", "#F2F2F2", "#F4F4F4"));
ps.add(new Preset("White on Black", "#FFFFFF", "#000000", "#698818"));
ps.add(new Preset("White on Black (low contrast)", "#D2D2D2", "#202020", "#5D693F"));
ps.add(new Preset("Dark on Cream", "#000000", "#FFFFCC", "#A9BD77"));
ps.add(new Preset("Dark on Cream (low contrast)", "#505050", "#FFFFCC", "#D1E79A"));
ps.add(new Preset("Dark on gray", "#454545", "#CCCCCC", "#FFFFAA"));
ps.add(new Preset("Green on Black", "#00CC00", "#000000", "#100C7A"));
ps.add(new Preset("Green on Black (low contrast)", "#70DC70", "#202020", "#476687"));
ps.add(new Preset("Green on Cream", "#008000", "#FFFFCC", "#D5D543"));
cops = new ArrayList<ColorOption>();
cops.add(new ColorOption("Text", "#000000", "#000000"));
cops.add(new ColorOption("Background", "#FFFFCC", "#FFFFCC"));
cops.add(new ColorOption("Highlight", "#D1E79A", "#D1E79A"));
}
@Override
protected void onBindView(View view) {
super.onBindView(view);
final View textBox = view.findViewById(R.id.color_options_widget_text_box);
final View bgBox = view.findViewById(R.id.color_options_widget_bg_box);
final View hlBox = view.findViewById(R.id.color_options_widget_hl_box);
if(textBox != null)
textBox.setBackgroundColor(mColorText);
if(bgBox != null)
bgBox.setBackgroundColor(mColorBackground);
if(hlBox != null)
hlBox.setBackgroundColor(mColorHighlight);
}
@SuppressLint("InflateParams")
@Override
protected void onClick() {
LayoutInflater inflater = LayoutInflater.from(mContext);
View dialogView = inflater.inflate(R.layout.dialog_coloring_settings, null);
AlertDialog.Builder builder = new AlertDialog.Builder(mContext);
builder.setView(dialogView);
builder.setPositiveButton(mContext.getString(android.R.string.ok), null);
final AlertDialog alertDialog = builder.create();
ListView presets = (ListView) dialogView.findViewById(R.id.lv_presets);
ColorPresetAdapter adapter = new ColorPresetAdapter(mContext, R.layout.row_default, ps);
presets.setAdapter(adapter);
presets.setOnItemClickListener(new OnItemClickListener() {
@Override
public void onItemClick(AdapterView<?> parent, View view, int position, long id) {
Preset item = ps.get(position);
String hex = Helper.fixHex(item.getTextColor());
mColorText = Helper.hexToColor(hex);
hex = Helper.fixHex(item.getBackgroundColor());
mColorBackground = Helper.hexToColor(hex);
hex = Helper.fixHex(item.getHighlightColor());
mColorHighlight = Helper.hexToColor(hex);
mColorValues = Integer.toString(mColorText) + " " + Integer.toString(mColorBackground) + " " + Integer.toString(mColorHighlight);
persistString(mColorValues);
SharedPreferences sp = PreferenceManager.getDefaultSharedPreferences(mContext);
sp.edit().putInt(mContext.getString(R.string.pref_text_color_title), mColorText).apply();
sp.edit().putInt(mContext.getString(R.string.pref_background_color_title), mColorBackground).apply();
sp.edit().putInt(mContext.getString(R.string.pref_highlight_color_title), mColorHighlight).apply();
notifyChanged();
alertDialog.dismiss();
}
});
ListView options = (ListView) dialogView.findViewById(R.id.lv_options);
ColorOptionsAdapter coadapter = new ColorOptionsAdapter(mContext, R.layout.row_color_options, cops, this);
options.setAdapter(coadapter);
alertDialog.show();
super.onClick();
}
@Override
protected Object onGetDefaultValue(TypedArray a, int index) {
return a.getInteger(index, 0);
}
@Override
protected void onSetInitialValue(boolean restorePersistedValue,
Object defaultValue) {
if(restorePersistedValue){
mColorValues = getPersistedString(mColorValues);
String[] values = mColorValues.split(" ");
mColorText = Integer.parseInt(values[0]);
mColorBackground = Integer.parseInt(values[1]);
mColorHighlight = Integer.parseInt(values[2]);
}
else {
mColorText = Color.argb(255, 0, 0, 0);
mColorBackground = Color.argb(255, 255, 255, 204);
mColorHighlight = Color.argb(255, 209, 231, 154);
mColorValues = Integer.toString(mColorText) + " " + Integer.toString(mColorBackground) + " " + Integer.toString(mColorHighlight);
persistString(mColorValues);
}
}
@Override
protected Parcelable onSaveInstanceState() {
final Parcelable superState = super.onSaveInstanceState();
if(isPersistent()) return superState;
final SavedState state = new SavedState(superState);
state.text = mColorText;
state.background = mColorBackground;
state.hl = mColorHighlight;
return state;
}
@Override
protected void onRestoreInstanceState(Parcelable state) {
if(!state.getClass().equals(SavedState.class)){
super.onRestoreInstanceState(state);
return;
}
SavedState myState = (SavedState) state;
super.onRestoreInstanceState(myState.getSuperState());
mColorText = myState.text;
mColorBackground = myState.background;
mColorHighlight = myState.hl;
notifyChanged();
}
public String getValue(){
return mColorValues;
}
private static class SavedState extends BaseSavedState {
int text, background, hl;
public SavedState(Parcelable superState){
super(superState);
}
public SavedState(Parcel source){
super(source);
text = source.readInt();
background = source.readInt();
hl = source.readInt();
}
@Override
public void writeToParcel(Parcel dest, int flags) {
super.writeToParcel(dest, flags);
dest.writeInt(text);
dest.writeInt(background);
dest.writeInt(hl);
}
@SuppressWarnings("unused")
public static final Parcelable.Creator<SavedState> CREATOR = new Parcelable.Creator<SavedState>() {
@Override
public SavedState createFromParcel(Parcel source) {
return new SavedState(source);
}
@Override
public SavedState[] newArray(int size) {
return new SavedState[size];
}
};
}
@Override
public void onSettingUpdated(Integer pos, Integer color) {
switch (pos) {
case 0:
mColorText = color;
break;
case 1:
mColorBackground = color;
break;
case 2:
mColorHighlight = color;
break;
}
mColorValues = Integer.toString(mColorText) + " " + Integer.toString(mColorBackground) + " " + Integer.toString(mColorHighlight);
persistString(mColorValues);
notifyChanged();
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.kubernetes.replication_controllers;
import java.util.Map;
import io.fabric8.kubernetes.api.model.DoneableReplicationController;
import io.fabric8.kubernetes.api.model.ReplicationController;
import io.fabric8.kubernetes.api.model.ReplicationControllerBuilder;
import io.fabric8.kubernetes.api.model.ReplicationControllerList;
import io.fabric8.kubernetes.api.model.ReplicationControllerSpec;
import io.fabric8.kubernetes.client.Watch;
import io.fabric8.kubernetes.client.Watcher;
import io.fabric8.kubernetes.client.dsl.FilterWatchListMultiDeletable;
import io.fabric8.kubernetes.client.dsl.MixedOperation;
import io.fabric8.kubernetes.client.dsl.NonNamespaceOperation;
import io.fabric8.kubernetes.client.dsl.RollableScalableResource;
import org.apache.camel.Exchange;
import org.apache.camel.component.kubernetes.AbstractKubernetesEndpoint;
import org.apache.camel.component.kubernetes.KubernetesConstants;
import org.apache.camel.component.kubernetes.KubernetesOperations;
import org.apache.camel.impl.DefaultProducer;
import org.apache.camel.util.MessageHelper;
import org.apache.camel.util.ObjectHelper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class KubernetesReplicationControllersProducer extends DefaultProducer {
private static final Logger LOG = LoggerFactory
.getLogger(KubernetesReplicationControllersProducer.class);
public KubernetesReplicationControllersProducer(AbstractKubernetesEndpoint endpoint) {
super(endpoint);
}
@Override
public AbstractKubernetesEndpoint getEndpoint() {
return (AbstractKubernetesEndpoint) super.getEndpoint();
}
@Override
public void process(Exchange exchange) throws Exception {
String operation;
if (ObjectHelper.isEmpty(getEndpoint().getKubernetesConfiguration()
.getOperation())) {
operation = exchange.getIn().getHeader(
KubernetesConstants.KUBERNETES_OPERATION, String.class);
} else {
operation = getEndpoint().getKubernetesConfiguration()
.getOperation();
}
switch (operation) {
case KubernetesOperations.LIST_REPLICATION_CONTROLLERS_OPERATION:
doList(exchange, operation);
break;
case KubernetesOperations.LIST_REPLICATION_CONTROLLERS_BY_LABELS_OPERATION:
doListReplicationControllersByLabels(exchange, operation);
break;
case KubernetesOperations.GET_REPLICATION_CONTROLLER_OPERATION:
doGetReplicationController(exchange, operation);
break;
case KubernetesOperations.CREATE_REPLICATION_CONTROLLER_OPERATION:
doCreateReplicationController(exchange, operation);
break;
case KubernetesOperations.DELETE_REPLICATION_CONTROLLER_OPERATION:
doDeleteReplicationController(exchange, operation);
break;
case KubernetesOperations.SCALE_REPLICATION_CONTROLLER_OPERATION:
doScaleReplicationController(exchange, operation);
break;
default:
throw new IllegalArgumentException("Unsupported operation "
+ operation);
}
}
protected void doList(Exchange exchange, String operation) throws Exception {
ReplicationControllerList rcList = null;
String namespaceName = exchange.getIn().getHeader(
KubernetesConstants.KUBERNETES_NAMESPACE_NAME, String.class);
if (!ObjectHelper.isEmpty(namespaceName)) {
rcList = getEndpoint().getKubernetesClient()
.replicationControllers().inNamespace(namespaceName).list();
} else {
rcList = getEndpoint().getKubernetesClient()
.replicationControllers().inAnyNamespace().list();
}
MessageHelper.copyHeaders(exchange.getIn(), exchange.getOut(), true);
exchange.getOut().setBody(rcList.getItems());
}
protected void doListReplicationControllersByLabels(Exchange exchange,
String operation) throws Exception {
ReplicationControllerList rcList = null;
Map<String, String> labels = exchange.getIn().getHeader(
KubernetesConstants.KUBERNETES_REPLICATION_CONTROLLERS_LABELS,
Map.class);
String namespaceName = exchange.getIn().getHeader(
KubernetesConstants.KUBERNETES_NAMESPACE_NAME, String.class);
if (!ObjectHelper.isEmpty(namespaceName)) {
NonNamespaceOperation<ReplicationController, ReplicationControllerList, DoneableReplicationController,
RollableScalableResource<ReplicationController, DoneableReplicationController>> replicationControllers = getEndpoint().getKubernetesClient()
.replicationControllers().inNamespace(namespaceName);
for (Map.Entry<String, String> entry : labels.entrySet()) {
replicationControllers.withLabel(entry.getKey(),
entry.getValue());
}
rcList = replicationControllers.list();
} else {
FilterWatchListMultiDeletable<ReplicationController, ReplicationControllerList, Boolean, Watch, Watcher<ReplicationController>> replicationControllers = getEndpoint().getKubernetesClient()
.replicationControllers().inAnyNamespace();
for (Map.Entry<String, String> entry : labels.entrySet()) {
replicationControllers.withLabel(entry.getKey(),
entry.getValue());
}
rcList = replicationControllers.list();
}
MessageHelper.copyHeaders(exchange.getIn(), exchange.getOut(), true);
exchange.getOut().setBody(rcList.getItems());
}
protected void doGetReplicationController(Exchange exchange,
String operation) throws Exception {
ReplicationController rc = null;
String rcName = exchange.getIn().getHeader(
KubernetesConstants.KUBERNETES_REPLICATION_CONTROLLER_NAME,
String.class);
String namespaceName = exchange.getIn().getHeader(
KubernetesConstants.KUBERNETES_NAMESPACE_NAME, String.class);
if (ObjectHelper.isEmpty(rcName)) {
LOG.error("Get a specific replication controller require specify a replication controller name");
throw new IllegalArgumentException(
"Get a specific replication controller require specify a replication controller name");
}
if (ObjectHelper.isEmpty(namespaceName)) {
LOG.error("Get a specific replication controller require specify a namespace name");
throw new IllegalArgumentException(
"Get a specific replication controller require specify a namespace name");
}
rc = getEndpoint().getKubernetesClient().replicationControllers()
.inNamespace(namespaceName).withName(rcName).get();
MessageHelper.copyHeaders(exchange.getIn(), exchange.getOut(), true);
exchange.getOut().setBody(rc);
}
protected void doCreateReplicationController(Exchange exchange,
String operation) throws Exception {
ReplicationController rc = null;
String rcName = exchange.getIn().getHeader(
KubernetesConstants.KUBERNETES_REPLICATION_CONTROLLER_NAME,
String.class);
String namespaceName = exchange.getIn().getHeader(
KubernetesConstants.KUBERNETES_NAMESPACE_NAME, String.class);
ReplicationControllerSpec rcSpec = exchange.getIn().getHeader(
KubernetesConstants.KUBERNETES_REPLICATION_CONTROLLER_SPEC,
ReplicationControllerSpec.class);
if (ObjectHelper.isEmpty(rcName)) {
LOG.error("Create a specific replication controller require specify a replication controller name");
throw new IllegalArgumentException(
"Create a specific replication controller require specify a replication controller name");
}
if (ObjectHelper.isEmpty(namespaceName)) {
LOG.error("Create a specific replication controller require specify a namespace name");
throw new IllegalArgumentException(
"Create a specific replication controller require specify a namespace name");
}
if (ObjectHelper.isEmpty(rcSpec)) {
LOG.error("Create a specific replication controller require specify a replication controller spec bean");
throw new IllegalArgumentException(
"Create a specific replication controller require specify a replication controller spec bean");
}
Map<String, String> labels = exchange.getIn().getHeader(
KubernetesConstants.KUBERNETES_REPLICATION_CONTROLLERS_LABELS,
Map.class);
ReplicationController rcCreating = new ReplicationControllerBuilder()
.withNewMetadata().withName(rcName).withLabels(labels)
.endMetadata().withSpec(rcSpec).build();
rc = getEndpoint().getKubernetesClient().replicationControllers()
.inNamespace(namespaceName).create(rcCreating);
MessageHelper.copyHeaders(exchange.getIn(), exchange.getOut(), true);
exchange.getOut().setBody(rc);
}
protected void doDeleteReplicationController(Exchange exchange,
String operation) throws Exception {
String rcName = exchange.getIn().getHeader(
KubernetesConstants.KUBERNETES_REPLICATION_CONTROLLER_NAME,
String.class);
String namespaceName = exchange.getIn().getHeader(
KubernetesConstants.KUBERNETES_NAMESPACE_NAME, String.class);
if (ObjectHelper.isEmpty(rcName)) {
LOG.error("Delete a specific replication controller require specify a replication controller name");
throw new IllegalArgumentException(
"Delete a specific replication controller require specify a replication controller name");
}
if (ObjectHelper.isEmpty(namespaceName)) {
LOG.error("Delete a specific replication controller require specify a namespace name");
throw new IllegalArgumentException(
"Delete a specific replication controller require specify a namespace name");
}
boolean rcDeleted = getEndpoint().getKubernetesClient()
.replicationControllers().inNamespace(namespaceName)
.withName(rcName).delete();
MessageHelper.copyHeaders(exchange.getIn(), exchange.getOut(), true);
exchange.getOut().setBody(rcDeleted);
}
protected void doScaleReplicationController(Exchange exchange,
String operation) throws Exception {
String rcName = exchange.getIn().getHeader(
KubernetesConstants.KUBERNETES_REPLICATION_CONTROLLER_NAME,
String.class);
String namespaceName = exchange.getIn().getHeader(
KubernetesConstants.KUBERNETES_NAMESPACE_NAME, String.class);
Integer replicasNumber = exchange.getIn().getHeader(
KubernetesConstants.KUBERNETES_REPLICATION_CONTROLLER_REPLICAS, Integer.class);
if (ObjectHelper.isEmpty(rcName)) {
LOG.error("Scale a specific replication controller require specify a replication controller name");
throw new IllegalArgumentException(
"Scale a specific replication controller require specify a replication controller name");
}
if (ObjectHelper.isEmpty(namespaceName)) {
LOG.error("Scale a specific replication controller require specify a namespace name");
throw new IllegalArgumentException(
"Scale a specific replication controller require specify a namespace name");
}
if (ObjectHelper.isEmpty(replicasNumber)) {
LOG.error("Scale a specific replication controller require specify a replicas number");
throw new IllegalArgumentException(
"Scale a specific replication controller require specify a replicas number");
}
ReplicationController rcScaled = getEndpoint().getKubernetesClient()
.replicationControllers().inNamespace(namespaceName)
.withName(rcName).scale(replicasNumber, true);
MessageHelper.copyHeaders(exchange.getIn(), exchange.getOut(), true);
exchange.getOut().setBody(rcScaled.getStatus().getReplicas());
}
}
| |
// Copyright 2008 Google Inc. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.opengse;
import com.google.opengse.util.IteratorEnumeration;
import com.google.opengse.httputil.Cookies;
import java.util.*;
import java.io.UnsupportedEncodingException;
import java.io.IOException;
import java.io.BufferedReader;
import javax.servlet.ServletInputStream;
import javax.servlet.http.Cookie;
/**
* A class which adapts HttpRequest to HttpServletRequest
*
* @author Mike Jennings
*/
public class HttpServletRequestAdapter
extends HttpServletRequestUnsupported {
private static ConnectionInformation nullConnectInfo
= new NullConnectionInformation();
private final HttpRequest delegate;
private final ConnectionInformation connectInfo;
private Map<String, Object> attributes_ = null;
private ArrayList<Cookie> cookies_;
public HttpServletRequestAdapter(final HttpRequest delegate) {
super();
this.delegate = delegate;
final ConnectionInformation ci = delegate.getConnectionInformation();
if (ci == null) {
connectInfo = nullConnectInfo;
} else {
connectInfo = ci;
}
}
@Override
public Cookie[] getCookies() {
if (cookies_ == null) {
@SuppressWarnings("unchecked")
Enumeration<String> enumeration = getHeaders("Cookie");
cookies_ = Cookies.parse(enumeration);
if (cookies_ == null) {
return null;
}
}
return cookies_.toArray(new Cookie[0]);
}
@Override
public String getHeader(final String name) {
return delegate.getHeader(name);
}
@Override
public Enumeration<String> getHeaders(final String name) {
return delegate.getHeaders(name);
}
@Override
public Enumeration<String> getHeaderNames() {
return delegate.getHeaderNames();
}
@Override
public String getMethod() {
return delegate.getMethod();
}
@Override
public String getPathTranslated() {
return delegate.getPathTranslated();
}
@Override
public String getQueryString() {
return delegate.getQueryString();
}
@Override
public String getRequestURI() {
return delegate.getRequestURI();
}
@Override
public StringBuffer getRequestURL() {
return delegate.getRequestURL();
}
private Map<String, Object> getAttributes() {
if (attributes_ == null) {
attributes_ = new HashMap<String, Object>();
}
return attributes_;
}
@Override
public Object getAttribute(final String name) {
return getAttributes().get(name);
}
@Override
public Enumeration<String> getAttributeNames() {
return new IteratorEnumeration<String>(getAttributes().keySet().iterator());
}
@Override
public void setCharacterEncoding(final String env)
throws UnsupportedEncodingException {
delegate.setCharacterEncoding(env);
}
@Override
public ServletInputStream getInputStream() throws IOException {
return delegate.getInputStream();
}
@Override
public String getParameter(final String name) {
final String[] vals = delegate.getParameterMap().get(name);
if (vals == null) {
return null;
}
return vals[0];
}
@Override
public Enumeration<String> getParameterNames() {
return new IteratorEnumeration<String>(
delegate.getParameterMap().keySet().iterator());
}
@Override
public String[] getParameterValues(final String name) {
return delegate.getParameterMap().get(name);
}
@Override
public Map<String, String[]> getParameterMap() {
return delegate.getParameterMap();
}
@Override
public String getProtocol() {
return delegate.getProtocol();
}
@Override
public String getScheme() {
return delegate.getScheme();
}
@Override
public String getServerName() {
return connectInfo.getServerName();
}
@Override
public int getServerPort() {
return connectInfo.getServerPort();
}
@Override
public BufferedReader getReader() throws IOException {
return delegate.getReader();
}
@Override
public String getRemoteAddr() {
return connectInfo.getRemoteAddr();
}
@Override
public String getRemoteHost() {
return connectInfo.getRemoteHost();
}
@Override
public void setAttribute(final String name, final Object value) {
final Map<String, Object> attributes = getAttributes();
if (value == null) {
attributes.remove(name);
} else {
attributes.put(name, value);
}
}
@Override
public void removeAttribute(final String name) {
getAttributes().remove(name);
}
@Override
public Locale getLocale() {
return delegate.getLocale();
}
@Override
public Enumeration<Locale> getLocales() {
return delegate.getLocales();
}
@Override
public boolean isSecure() {
return connectInfo.isSecure();
}
@Override
public int getRemotePort() {
return connectInfo.getRemotePort();
}
@Override
public String getLocalName() {
return connectInfo.getLocalName();
}
@Override
public String getLocalAddr() {
return connectInfo.getLocalAddr();
}
@Override
public int getLocalPort() {
return connectInfo.getLocalPort();
}
private static class NullConnectionInformation
implements ConnectionInformation {
public String getServerName() {
return null;
}
public int getServerPort() {
return -1;
}
public String getRemoteAddr() {
return null;
}
public String getRemoteHost() {
return null;
}
public boolean isSecure() {
return false;
}
public int getRemotePort() {
return 0;
}
public String getLocalName() {
return null;
}
public String getLocalAddr() {
return null;
}
public int getLocalPort() {
return 0;
}
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.