gt stringclasses 1
value | context stringlengths 2.05k 161k |
|---|---|
// $ANTLR 3.1.1 d:\\work\\eclipse\\xerial\\xerial-storage\\src\\main\\java\\org\\xerial\\amoeba\\query\\impl\\AmoebaQueryTreeParser.g 2009-07-06 17:44:01
package org.xerial.amoeba.query.impl;
import org.antlr.runtime.*;
import org.antlr.runtime.tree.*;import java.util.Stack;
import java.util.List;
import java.util.ArrayList;
public class AmoebaQueryTreeParser extends TreeParser {
public static final String[] tokenNames = new String[] {
"<invalid>", "<EOR>", "<DOWN>", "<UP>", "NUMBER", "STRING", "SELECTION", "CONDITION", "PROJECTION", "PROJECT_ALL", "ORDER_BY", "REF_ALL", "TARGET", "FUNCTION", "AND", "OR", "INPUT", "OUTPUT", "ASSIGN_LABEL", "COMPARE", "ATTRIBUTE", "REF", "VALUE", "CONTAINED_IN", "RELATION", "INSERT", "VALUE_SET", "NEW_VALUE", "NEW_RELATION", "UPDATE", "ATTRIBUTE_DEF", "SORT_TARGET", "ONE_TO_MANY", "ONE_TO_ONE", "OBJECT_DEF", "OBJECT", "LANG", "ML_COMMENT", "LINE_COMMENT", "Dot", "Colon", "Comma", "RParen", "LParen", "Quot", "Apos", "At", "Wildcard", "EQUAL", "NOT_EQUAL", "LESS", "LEQ", "GREATER", "GEQ", "SPLIT", "Select", "From", "Where", "In", "Insert", "Into", "Object", "Relationship", "HasMany", "HasOne", "DIVIDE", "DISTINCT", "RANGE", "DataType", "Letter", "Digit", "Digits", "NameChar", "WhiteSpaceChar", "Quot_s", "Apos_s", "StringLiteral", "Name", "QName", "'by'", "'auto'", "'AUTO'", "'update'", "'set'", "'{'", "'}'", "'as'", "'order'", "'or'", "'and'", "'like'"
};
public static final int DataType=68;
public static final int OUTPUT=17;
public static final int T__86=86;
public static final int INPUT=16;
public static final int LANG=36;
public static final int Quot_s=74;
public static final int Apos=45;
public static final int Digit=70;
public static final int Relationship=62;
public static final int T__80=80;
public static final int TARGET=12;
public static final int NUMBER=4;
public static final int LEQ=51;
public static final int VALUE_SET=26;
public static final int REF=21;
public static final int PROJECT_ALL=9;
public static final int GEQ=53;
public static final int VALUE=22;
public static final int OR=15;
public static final int T__87=87;
public static final int Letter=69;
public static final int DIVIDE=65;
public static final int AND=14;
public static final int Insert=59;
public static final int LESS=50;
public static final int Comma=41;
public static final int FUNCTION=13;
public static final int Dot=39;
public static final int Into=60;
public static final int Object=61;
public static final int CONDITION=7;
public static final int ATTRIBUTE_DEF=30;
public static final int ONE_TO_ONE=33;
public static final int OBJECT_DEF=34;
public static final int GREATER=52;
public static final int HasMany=63;
public static final int ML_COMMENT=37;
public static final int Colon=40;
public static final int NEW_VALUE=27;
public static final int At=46;
public static final int NOT_EQUAL=49;
public static final int OBJECT=35;
public static final int DISTINCT=66;
public static final int RANGE=67;
public static final int NameChar=72;
public static final int CONTAINED_IN=23;
public static final int T__89=89;
public static final int SPLIT=54;
public static final int ORDER_BY=10;
public static final int Wildcard=47;
public static final int T__79=79;
public static final int STRING=5;
public static final int NEW_RELATION=28;
public static final int Where=57;
public static final int RELATION=24;
public static final int UPDATE=29;
public static final int RParen=42;
public static final int Apos_s=75;
public static final int StringLiteral=76;
public static final int T__88=88;
public static final int LINE_COMMENT=38;
public static final int In=58;
public static final int Name=77;
public static final int LParen=43;
public static final int T__84=84;
public static final int T__90=90;
public static final int From=56;
public static final int HasOne=64;
public static final int Select=55;
public static final int EQUAL=48;
public static final int COMPARE=19;
public static final int REF_ALL=11;
public static final int ONE_TO_MANY=32;
public static final int QName=78;
public static final int SELECTION=6;
public static final int WhiteSpaceChar=73;
public static final int EOF=-1;
public static final int T__85=85;
public static final int T__82=82;
public static final int T__81=81;
public static final int PROJECTION=8;
public static final int ATTRIBUTE=20;
public static final int Quot=44;
public static final int T__83=83;
public static final int SORT_TARGET=31;
public static final int INSERT=25;
public static final int Digits=71;
public static final int ASSIGN_LABEL=18;
// delegates
// delegators
public AmoebaQueryTreeParser(TreeNodeStream input) {
this(input, new RecognizerSharedState());
}
public AmoebaQueryTreeParser(TreeNodeStream input, RecognizerSharedState state) {
super(input, state);
}
public String[] getTokenNames() { return AmoebaQueryTreeParser.tokenNames; }
public String getGrammarFileName() { return "d:\\work\\eclipse\\xerial\\xerial-storage\\src\\main\\java\\org\\xerial\\amoeba\\query\\impl\\AmoebaQueryTreeParser.g"; }
// $ANTLR start "query"
// d:\\work\\eclipse\\xerial\\xerial-storage\\src\\main\\java\\org\\xerial\\amoeba\\query\\impl\\AmoebaQueryTreeParser.g:21:1: query : selectClause ;
public final void query() throws RecognitionException {
try {
// d:\\work\\eclipse\\xerial\\xerial-storage\\src\\main\\java\\org\\xerial\\amoeba\\query\\impl\\AmoebaQueryTreeParser.g:22:2: ( selectClause )
// d:\\work\\eclipse\\xerial\\xerial-storage\\src\\main\\java\\org\\xerial\\amoeba\\query\\impl\\AmoebaQueryTreeParser.g:22:4: selectClause
{
pushFollow(FOLLOW_selectClause_in_query45);
selectClause();
state._fsp--;
}
}
catch (RecognitionException re) {
reportError(re);
recover(input,re);
}
finally {
}
return ;
}
// $ANTLR end "query"
// $ANTLR start "selectClause"
// d:\\work\\eclipse\\xerial\\xerial-storage\\src\\main\\java\\org\\xerial\\amoeba\\query\\impl\\AmoebaQueryTreeParser.g:26:1: selectClause : ^( Select expr ) ;
public final void selectClause() throws RecognitionException {
try {
// d:\\work\\eclipse\\xerial\\xerial-storage\\src\\main\\java\\org\\xerial\\amoeba\\query\\impl\\AmoebaQueryTreeParser.g:27:2: ( ^( Select expr ) )
// d:\\work\\eclipse\\xerial\\xerial-storage\\src\\main\\java\\org\\xerial\\amoeba\\query\\impl\\AmoebaQueryTreeParser.g:27:4: ^( Select expr )
{
match(input,Select,FOLLOW_Select_in_selectClause59);
match(input, Token.DOWN, null);
pushFollow(FOLLOW_expr_in_selectClause61);
expr();
state._fsp--;
match(input, Token.UP, null);
}
}
catch (RecognitionException re) {
reportError(re);
recover(input,re);
}
finally {
}
return ;
}
// $ANTLR end "selectClause"
// $ANTLR start "expr"
// d:\\work\\eclipse\\xerial\\xerial-storage\\src\\main\\java\\org\\xerial\\amoeba\\query\\impl\\AmoebaQueryTreeParser.g:31:1: expr : QName ;
public final void expr() throws RecognitionException {
try {
// d:\\work\\eclipse\\xerial\\xerial-storage\\src\\main\\java\\org\\xerial\\amoeba\\query\\impl\\AmoebaQueryTreeParser.g:31:5: ( QName )
// d:\\work\\eclipse\\xerial\\xerial-storage\\src\\main\\java\\org\\xerial\\amoeba\\query\\impl\\AmoebaQueryTreeParser.g:31:7: QName
{
match(input,QName,FOLLOW_QName_in_expr74);
}
}
catch (RecognitionException re) {
reportError(re);
recover(input,re);
}
finally {
}
return ;
}
// $ANTLR end "expr"
// Delegated rules
public static final BitSet FOLLOW_selectClause_in_query45 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_Select_in_selectClause59 = new BitSet(new long[]{0x0000000000000004L});
public static final BitSet FOLLOW_expr_in_selectClause61 = new BitSet(new long[]{0x0000000000000008L});
public static final BitSet FOLLOW_QName_in_expr74 = new BitSet(new long[]{0x0000000000000002L});
}
| |
/*
* Copyright (C) 2010 Klaus Reimer <k@ailis.de>
* See LICENSE.txt for licensing information.
*/
package de.ailis.gramath;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.DoubleBuffer;
/**
* Base class for vectors with three double elements.
*
* @author Klaus Reimer (k@ailis.de)
*/
public abstract class Vector3d extends Vector
{
/** Serial version UID. */
private static final long serialVersionUID = 1L;
/** The X coordinate. */
protected double x;
/** The Y coordinate. */
protected double y;
/** The Z coordinate. */
protected double z;
/** The buffer representation of the vector. */
private transient DoubleBuffer buffer;
/** If cached length is valid. */
private boolean lengthValid = false;
/** The cached vector length. */
private double length;
/** Temporary matrix for internal calculations. */
private static final ThreadLocal<MutableMatrix4d> tmpMatrix = new ThreadLocal<MutableMatrix4d>();
/**
* Constructs an uninitialized vector.
*/
protected Vector3d()
{
// Empty
}
/**
* Constructs a new vector with the specified elements.
*
* @param x
* The X coordinate
* @param y
* The Y coordinate
* @param z
* The Z coordinate
*/
public Vector3d(final double x, final double y, final double z)
{
this.x = x;
this.y = y;
this.z = z;
}
/**
* Constructs a new vector from the elements of the specified point. The
* Z coordinate is set to 0.
*
* @param point
* The point to copy the elements from.
*/
public Vector3d(final Point2d point)
{
this.x = (float) point.x;
this.y = (float) point.y;
this.z = 0;
}
/**
* Constructs a new vector from the elements of the specified point. The
* Z coordinate is set to 0.
*
* @param point
* The point to copy the elements from.
*/
public Vector3d(final Point2f point)
{
this.x = point.x;
this.y = point.y;
this.z = 0;
}
/**
* Constructs a new vector from the elements of the specified point. The
* Z coordinate of the point is lost.
*
* @param point
* The point to copy the elements from.
*/
public Vector3d(final Point3d point)
{
this.x = (float) point.x;
this.y = (float) point.y;
this.z = (float) point.z;
}
/**
* Constructs a new vector from the elements of the specified point. The
* Z coordinate of the point is lost.
*
* @param point
* The point to copy the elements from.
*/
public Vector3d(final Point3f point)
{
this.x = point.x;
this.y = point.y;
this.z = point.z;
}
/**
* Constructs a new vector from the elements of the specified vector. The
* Z coordinate is set to 0.
*
* @param vector
* The vector to copy the elements from.
*/
public Vector3d(final Vector2d vector)
{
this.x = (float) vector.x;
this.y = (float) vector.y;
this.z = 0;
}
/**
* Constructs a new vector from the elements of the specified vector. The
* Z coordinate is set to 0.
*
* @param vector
* The vector to copy the elements from.
*/
public Vector3d(final Vector2f vector)
{
this.x = vector.x;
this.y = vector.y;
this.z = 0;
}
/**
* Constructs a new vector from the elements of the specified vector.
*
* @param vector
* The vector to copy the elements from.
*/
public Vector3d(final Vector3d vector)
{
this.x = (float) vector.x;
this.y = (float) vector.y;
this.z = (float) vector.z;
}
/**
* Constructs a new vector from the elements of the specified vector.
*
* @param vector
* The vector to copy the elements from.
*/
public Vector3d(final Vector3f vector)
{
this.x = vector.x;
this.y = vector.y;
this.z = vector.z;
}
/**
* Creates a new empty instance of the vector class.
*
* @return A new empty instance. Never null.
*/
protected abstract Vector3d newInstance();
/**
* Creates a new empty instance of the matrix with the same type as the
* specified matrix. The main purpose of this method is extracting the
* one-and-only type-check warning into a single method so only this single
* line needs to be annotated with SuppressWarnings.
*
* @param <T>
* The matrix type
* @param m
* The matrix to use as a template
* @return The new empty matrix
*/
@SuppressWarnings("unchecked")
private static <T extends Vector3d> T newInstance(final T m)
{
return (T) m.newInstance();
}
/**
* Invalidates the vector so cached values will be re-calculated.
*/
protected final void invalidate()
{
this.lengthValid = false;
}
/**
* Creates the cross product of vector a and vector b and stores the result
* into the specified result vector. It is safe to use one of the source
* vectors as result vector.
*
* @param <T>
* The result vector type
* @param a
* The first vector.
* @param b
* The second vector.
* @param result
* The result vector.
* @return The result vector.
*/
protected static <T extends Vector3d> T cross(final Vector3d a,
final Vector3d b, final T result)
{
final double x = a.y * b.z - a.z * b.y;
final double y = a.z * b.x - a.x * b.z;
result.z = a.x * b.y - a.y * b.x;
result.x = x;
result.y = y;
result.invalidate();
return result;
}
/**
* Adds vector a and vector b and stores the result into the specified
* result vector. It is safe to use one of the source vectors as result
* vector.
*
* @param <T>
* The result vector type
* @param a
* The first vector.
* @param b
* The second vector.
* @param result
* The result vector.
* @return The result vector.
*/
protected static <T extends Vector3d> T add(final Vector3d a,
final Vector3d b, final T result)
{
result.x = a.x + b.x;
result.y = a.y + b.y;
result.z = a.z + b.z;
result.invalidate();
return result;
}
/**
* Subtracts vector b from vector a and stores the result into th specified
* result vector. It is safe to use one of the source vectors as result
* vector.
*
* @param <T>
* The result vector type
* @param a
* The first vector.
* @param b
* The second vector.
* @param result
* The result vector.
* @return The result vector.
*/
protected static <T extends Vector3d> T sub(final Vector3d a,
final Vector3d b, final T result)
{
result.x = a.x - b.x;
result.y = a.y - b.y;
result.z = a.z - b.z;
result.invalidate();
return result;
}
/**
* Normalizes the specified vector and stores the result in the specified
* result vector. It is safe the use the source vector as result vector. If
* source vector is a null vector then result vector is also a null vector.
*
* @param <T>
* The result vector type.
* @param vector
* The vector to normalize. Must not be null.
* @param result
* The result vector. Must not be null.
* @return The result vector. Never null.
*/
protected static <T extends Vector3d> T normalize(final Vector3d vector,
final T result)
{
final double l = vector.getLength();
result.x = l == 0 ? 0 : vector.x / l;
result.y = l == 0 ? 0 : vector.y / l;
result.z = l == 0 ? 0 : vector.z / l;
result.invalidate();
return result;
}
/**
* Negates the specified vector and stores the result in the specified
* result vector. It is safe the use the source vector as result vector.
*
* @param <T>
* The result vector type.
* @param vector
* The vector to negate. Must not be null.
* @param result
* The result vector. Must not be null.
* @return The result vector. Never null.
*/
protected static <T extends Vector3d> T negate(final Vector3d vector,
final T result)
{
result.x = -vector.x;
result.y = -vector.y;
result.z = -vector.z;
result.invalidate();
return result;
}
/**
* Transforms the specified vector with the specified matrix and stores the
* result in the specified result vector. It is safe the use the source
* vector as result vector.
*
* @param <T>
* The result vector type.
* @param vector
* The vector to transform. Must not be null.
* @param matrix
* The transformation matrix
* @param result
* The result vector. Must not be null.
* @return The result vector. Never null.
*/
protected static <T extends Vector3d> T transform(final Vector3d vector,
final Matrix4d matrix, final T result)
{
final double[] m = matrix.m;
final double x = vector.x;
final double y = vector.y;
final double z = vector.z;
result.x = m[0 + 0 * 4] * x + m[0 + 1 * 4] * y + m[0 + 2 * 4] * z
+ m[0 + 3 * 4];
result.y = m[1 + 0 * 4] * x + m[1 + 1 * 4] * y + m[1 + 2 * 4] * z
+ m[1 + 3 * 4];
result.z = m[2 + 0 * 4] * x + m[2 + 1 * 4] * y + m[2 + 2 * 4] * z
+ m[2 + 3 * 4];
result.invalidate();
return result;
}
/**
* Returns the temporary matrix for internal calculations. This matrix is
* cached in a thread local variable. The matrix content is not defined, so
* make sure you set all elements before using it.
*
* @return The temporary matrix. Never null.
*/
private static MutableMatrix4d getTmpMatrix()
{
MutableMatrix4d matrix = tmpMatrix.get();
if (matrix == null)
{
matrix = new MutableMatrix4d();
tmpMatrix.set(matrix);
}
return matrix;
}
/**
* Rotates the specified vector and stores the result in the specified
* result vector. It is safe the use the source vector as result vector.
*
* @param <T>
* The result vector type.
* @param vector
* The vector to transform. Must not be null.
* @param axis
* The rotation axis. Must not be null.
* @param angle
* The rotation angle in anti-clockwise RAD.
* @param result
* The result vector. Must not be null.
* @return The result vector. Never null.
*/
protected static <T extends Vector3d> T rotate(final Vector3d vector,
final Vector3d axis, final double angle, final T result)
{
return transform(vector, getTmpMatrix().setRotation(axis, angle), result);
}
/**
* Scales the specified vector and stores the result in the specified result
* vector. It is safe the use the source vector as result vector.
*
* @param <T>
* The result vector type.
* @param vector
* The vector to transform. Must not be null.
* @param s
* The scale factor for both axes.
* @param result
* The result vector. Must not be null.
* @return The result vector. Never null.
*/
protected static <T extends Vector3d> T scale(final Vector3d vector,
final double s, final T result)
{
result.x = vector.x * s;
result.y = vector.y * s;
result.z = vector.z * s;
result.invalidate();
return result;
}
/**
* Scales the specified vector and stores the result in the specified result
* vector. It is safe the use the source vector as result vector.
*
* @param <T>
* The result vector type.
* @param vector
* The vector to transform. Must not be null.
* @param sx
* The X scale factor.
* @param sy
* The Y scale factor.
* @param sz
* The Z scale factor.
* @param result
* The result vector. Must not be null.
* @return The result vector. Never null.
*/
protected static <T extends Vector3d> T scale(final Vector3d vector,
final double sx, final double sy, final double sz, final T result)
{
result.x = vector.x * sx;
result.y = vector.y * sy;
result.z = vector.z * sz;
result.invalidate();
return result;
}
/**
* Scales the specified vector along the X axis and stores the result in the
* specified result vector. It is safe the use the source vector as result
* vector.
*
* @param <T>
* The result vector type.
* @param vector
* The vector to transform. Must not be null.
* @param s
* The scale factor.
* @param result
* The result vector. Must not be null.
* @return The result vector. Never null.
*/
protected static <T extends Vector3d> T scaleX(final Vector3d vector,
final double s, final T result)
{
result.x = vector.x * s;
result.y = vector.y;
result.z = vector.z;
result.invalidate();
return result;
}
/**
* Scales the specified vector along the Y axis and stores the result in the
* specified result vector. It is safe the use the source vector as result
* vector.
*
* @param <T>
* The result vector type.
* @param vector
* The vector to transform. Must not be null.
* @param s
* The scale factor.
* @param result
* The result vector. Must not be null.
* @return The result vector. Never null.
*/
protected static <T extends Vector3d> T scaleY(final Vector3d vector,
final double s, final T result)
{
result.x = vector.x;
result.y = vector.y * s;
result.z = vector.z;
result.invalidate();
return result;
}
/**
* Scales the specified vector along the Z axis and stores the result in the
* specified result vector. It is safe the use the source vector as result
* vector.
*
* @param <T>
* The result vector type.
* @param vector
* The vector to transform. Must not be null.
* @param s
* The scale factor.
* @param result
* The result vector. Must not be null.
* @return The result vector. Never null.
*/
protected static <T extends Vector3d> T scaleZ(final Vector3d vector,
final double s, final T result)
{
result.x = vector.x;
result.y = vector.y;
result.z = vector.z * s;
result.invalidate();
return result;
}
/**
* Translates the specified vector and stores the result in the specified
* result vector. It is safe the use the source vector as result vector.
*
* @param <T>
* The result vector type.
* @param vector
* The vector to transform. Must not be null.
* @param tx
* The X translation.
* @param ty
* The Y translation.
* @param tz
* The Z translation.
* @param result
* The result vector. Must not be null.
* @return The result vector. Never null.
*/
protected static <T extends Vector3d> T translate(final Vector3d vector,
final double tx, final double ty, final double tz, final T result)
{
result.x = vector.x + tx;
result.y = vector.y + ty;
result.z = vector.z + tz;
result.invalidate();
return result;
}
/**
* Translates the specified vector along the X axis and stores the result in
* the specified result vector. It is safe the use the source vector as
* result vector.
*
* @param <T>
* The result vector type.
* @param vector
* The vector to transform. Must not be null.
* @param t
* The translation.
* @param result
* The result vector. Must not be null.
* @return The result vector. Never null.
*/
protected static <T extends Vector3d> T translateX(final Vector3d vector,
final double t, final T result)
{
result.x = vector.x + t;
result.y = vector.y;
result.z = vector.z;
result.invalidate();
return result;
}
/**
* Translates the specified vector along the Y axis and stores the result in
* the specified result vector. It is safe the use the source vector as
* result vector.
*
* @param <T>
* The result vector type.
* @param vector
* The vector to transform. Must not be null.
* @param t
* The translation.
* @param result
* The result vector. Must not be null.
* @return The result vector. Never null.
*/
protected static <T extends Vector3d> T translateY(final Vector3d vector,
final double t, final T result)
{
result.x = vector.x;
result.y = vector.y + t;
result.z = vector.z;
result.invalidate();
return result;
}
/**
* Translates the specified vector along the Z axis and stores the result in
* the specified result vector. It is safe the use the source vector as
* result vector.
*
* @param <T>
* The result vector type.
* @param vector
* The vector to transform. Must not be null.
* @param t
* The translation.
* @param result
* The result vector. Must not be null.
* @return The result vector. Never null.
*/
protected static <T extends Vector3d> T translateZ(final Vector3d vector,
final double t, final T result)
{
result.x = vector.x;
result.y = vector.y;
result.z = vector.z + t;
result.invalidate();
return result;
}
/**
* Projects the specified source vector into the specified result vector by
* using the specified projection matrix.
*
* @param <T>
* The vector type.
* @param vector
* The vector to project.
* @param matrix
* The projection matrix.
* @param result
* The result vector.
* @return This vector for chaining
*/
protected static <T extends Vector3d> T project(final T vector,
final Matrix4d matrix, final T result)
{
final double m[] = matrix.m;
final double x = vector.getX();
final double y = vector.getY();
final double z = vector.getZ();
double w = x * m[Matrix4.M30] + y * m[Matrix4.M31] + z * m[Matrix4.M32]
+ m[Matrix4.M33];
result.x = (x * m[Matrix4.M00] + y * m[Matrix4.M01]
+ z * m[Matrix4.M02] + m[Matrix4.M03]) / w;
result.y = (x * m[Matrix4.M10] + y * m[Matrix4.M11]
+ z * m[Matrix4.M12] + m[Matrix4.M13]) / w;
result.z = (x * m[Matrix4.M20] + y * m[Matrix4.M21]
+ z * m[Matrix4.M22] + m[Matrix4.M23]) / w;
return result;
}
/**
* @see java.lang.Object#toString()
*/
@Override
public final String toString()
{
return "[ " + this.x + ", " + this.y + ", " + this.z + " ]";
}
/**
* Returns the direct NIO double buffer in native ordering containing the
* vector elements. The returned double buffer is cached and mutable but
* modifications do not modify the vector class itself.
*
* @return The vector as a direct NIO double buffer. Never null.
*/
public final DoubleBuffer getBuffer()
{
if (this.buffer == null)
this.buffer = ByteBuffer.allocateDirect(3 * 8)
.order(ByteOrder.nativeOrder()).asDoubleBuffer();
this.buffer.rewind();
this.buffer.put(this.x).put(this.y).put(this.z);
this.buffer.rewind();
return this.buffer;
}
/**
* Returns the X coordinate.
*
* @return The X coordinate.
*/
public final double getX()
{
return this.x;
}
/**
* Returns the Y coordinate.
*
* @return The Y coordinate.
*/
public final double getY()
{
return this.y;
}
/**
* Returns the Z coordinate.
*
* @return The Z coordinate.
*/
public final double getZ()
{
return this.z;
}
/**
* Returns the length of the vector. This length is automatically cached in
* the vector and automatically invalidated when the vector is changed.
*
* @return The length of the vector.
*/
public final double getLength()
{
if (!this.lengthValid)
{
this.length = Math.sqrt(this.x * this.x + this.y
* this.y + this.z * this.z);
this.lengthValid = true;
}
return this.length;
}
/**
* Returns the normalization of this vector.
*
* @return This normalization of this vector. Never null.
*/
public abstract Vector3d getNormalization();
/**
* Returns the dot product between this vector and the specified vector.
*
* @param v
* The vector to dot-multiplicate this one with. Must not be
* null.
* @return The dot product.
*/
public final double dot(final Vector3d v)
{
return dot(this, v);
}
/**
* Returns the angle between this vector and the specified one.
*
* @param v
* The other vector Must not be null.
* @return The angle in RAD.
*/
public final double angle(final Vector3d v)
{
return Math.acos(getNormalization().dot(v.getNormalization()));
}
/**
* @see de.ailis.gramath.Vector#isNull()
*/
@Override
public final boolean isNull()
{
return this.x == 0 && this.y == 0 && this.z == 0;
}
/**
* Returns the dot product between the two specified vectors.
*
* @param a
* The first vector. Must not be null.
* @param b
* The second vector. Must not be null.
* @return The dot product. Never null.
*/
public static double dot(final Vector3d a, final Vector3d b)
{
return a.x * b.x + a.y * b.y + a.z * b.z;
}
/**
* Creates the cross product of the specified vectors and returns a new
* vector with the result. The returned vector has the same type as the
* first specified vector.
*
* @param <T>
* The result vector type
* @param a
* The first vector. Must not be null.
* @param b
* The second vector. Must not be null.
* @return The result vector. Never null.
*/
public static <T extends Vector3d> T cross(final T a, final Vector3d b)
{
return cross(a, b, newInstance(a));
}
/**
* Adds the specified vectors and returns a new vector with the result. The
* returned vector has the same type as the first specified vector.
*
* @param <T>
* The result vector type
* @param a
* The first vector. Most not be null.
* @param b
* The second vector. Most not be null.
* @return The result vector. Never null.
*/
public static <T extends Vector3d> T add(final T a, final Vector3d b)
{
return add(a, b, newInstance(a));
}
/**
* Subtracts vector b from vector a and returns a new vector with the
* result. The returned vector has the same type as the first specified
* vector.
*
* @param <T>
* The result vector type.
* @param a
* The first vector. Most not be null.
* @param b
* The second vector. Most not be null.
* @return The result vector. Never null.
*/
public static <T extends Vector3d> T sub(final T a, final Vector3d b)
{
return sub(a, b, newInstance(a));
}
/**
* Negates the specified vector and returns a new vector with the result.
*
* @param <T>
* The vector type
* @param vector
* The vector to negate. Must not be null.
* @return The negated vector. Never null.
*/
public static <T extends Vector3d> T negate(final T vector)
{
return negate(vector, newInstance(vector));
}
/**
* Rotates the specified vector and returns a new vector with the result.
*
* @param <T>
* The vector type.
* @param vector
* The vector to rotate. Must not be null.
* @param axis
* The rotation axis. Must not be null.
* @param angle
* The rotation angle in anti-clockwise RAD.
* @return The rotated vector. Never null.
*/
public static <T extends Vector3d> T rotate(final T vector,
final Vector3d axis,
final double angle)
{
return rotate(vector, axis, angle, newInstance(vector));
}
/**
* Scales the specified vector and returns a new vector with the result.
*
* @param <T>
* The vector type.
* @param vector
* The vector to scale. Must not be null.
* @param s
* The scale factor.
* @return The scaled vector. Never null.
*/
public static <T extends Vector3d> T scale(final T vector,
final double s)
{
return scale(vector, s, newInstance(vector));
}
/**
* Scales the specified vector and returns a new vector with the result.
*
* @param <T>
* The vector type.
* @param vector
* The vector to scale. Must not be null.
* @param sx
* The X scale factor.
* @param sy
* The Y scale factor.
* @param sz
* The Z scale factor.
* @return The scaled vector. Never null.
*/
public static <T extends Vector3d> T scale(final T vector,
final double sx, final double sy, final double sz)
{
return scale(vector, sx, sy, sz, newInstance(vector));
}
/**
* Scales the specified vector along the X axis and returns a new vector
* with the result.
*
* @param <T>
* The vector type.
* @param vector
* The vector to scale. Must not be null.
* @param s
* The scale factor.
* @return The scaled vector. Never null.
*/
public static <T extends Vector3d> T scaleX(final T vector,
final double s)
{
return scaleX(vector, s, newInstance(vector));
}
/**
* Scales the specified vector along the Y axis and returns a new vector
* with the result.
*
* @param <T>
* The vector type.
* @param vector
* The vector to scale. Must not be null.
* @param s
* The scale factor.
* @return The scaled vector. Never null.
*/
public static <T extends Vector3d> T scaleY(final T vector,
final double s)
{
return scaleY(vector, s, newInstance(vector));
}
/**
* Scales the specified vector along the Z axis and returns a new vector
* with the result.
*
* @param <T>
* The vector type.
* @param vector
* The vector to scale. Must not be null.
* @param s
* The scale factor.
* @return The scaled vector. Never null.
*/
public static <T extends Vector3d> T scaleZ(final T vector,
final double s)
{
return scaleZ(vector, s, newInstance(vector));
}
/**
* Translates the specified vector and returns a new vector with the result.
*
* @param <T>
* The vector type.
* @param vector
* The vector to translate. Must not be null.
* @param tx
* The X translation.
* @param ty
* The Y translation.
* @param tz
* The Z translation.
* @return The translated vector. Never null.
*/
public static <T extends Vector3d> T translate(final T vector,
final double tx, final double ty, final double tz)
{
return translate(vector, tx, ty, tz, newInstance(vector));
}
/**
* Translates the specified vector along the X axis and returns a new vector
* with the result.
*
* @param <T>
* The vector type.
* @param vector
* The vector to translate. Must not be null.
* @param t
* The translation.
* @return The translated vector. Never null.
*/
public static <T extends Vector3d> T translateX(final T vector,
final double t)
{
return translateX(vector, t, newInstance(vector));
}
/**
* Translates the specified vector along the Y axis and returns a new vector
* with the result.
*
* @param <T>
* The vector type.
* @param vector
* The vector to translate. Must not be null.
* @param t
* The translation.
* @return The translated vector. Never null.
*/
public static <T extends Vector3d> T translateY(final T vector,
final double t)
{
return translateY(vector, t, newInstance(vector));
}
/**
* Translates the specified vector along the Z axis and returns a new vector
* with the result.
*
* @param <T>
* The vector type.
* @param vector
* The vector to translate. Must not be null.
* @param t
* The translation.
* @return The translated vector. Never null.
*/
public static <T extends Vector3d> T translateZ(final T vector,
final double t)
{
return translateZ(vector, t, newInstance(vector));
}
/**
* Projects the specified vector by using the specified projection matrix
* and returns a new vector with the result.
*
* @param <T>
* The vector type.
* @param vector
* The vector to project.
* @param matrix
* The projection matrix.
* @return This projected vector. Never null.
*/
public static <T extends Vector3d> T project(final T vector,
final Matrix4d matrix)
{
return project(vector, matrix, newInstance(vector));
}
/**
* Transforms the specified vector with the specified matrix and returns a
* new vector with the result.
*
* @param <T>
* The vector type.
* @param v
* The vector to transform.
* @param m
* The transformation matrix.
* @return The result vector.
*/
public static <T extends Vector3d> T transform(final T v, final Matrix4d m)
{
return transform(v, m, newInstance(v));
}
/**
* @see java.lang.Object#hashCode()
*/
@Override
public int hashCode()
{
final int prime = 31;
int result = 1;
long temp;
temp = Double.doubleToLongBits(this.x);
result = prime * result + (int) (temp ^ (temp >>> 32));
temp = Double.doubleToLongBits(this.y);
result = prime * result + (int) (temp ^ (temp >>> 32));
temp = Double.doubleToLongBits(this.z);
result = prime * result + (int) (temp ^ (temp >>> 32));
return result;
}
/**
* @see java.lang.Object#equals(java.lang.Object)
*/
@Override
public boolean equals(final Object obj)
{
if (this == obj) return true;
if (obj == null) return false;
if (getClass() != obj.getClass()) return false;
final Vector3d other = (Vector3d) obj;
if (Double.doubleToLongBits(this.x) != Double.doubleToLongBits(other.x))
return false;
if (Double.doubleToLongBits(this.y) != Double.doubleToLongBits(other.y))
return false;
if (Double.doubleToLongBits(this.z) != Double.doubleToLongBits(other.z))
return false;
return true;
}
}
| |
/*
* Copyright 2014 Amund Elstad.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.aelstad.keccakj.core;
import java.io.FilterOutputStream;
import java.io.IOException;
import java.util.Arrays;
import com.github.aelstad.keccakj.core.KeccakStateUtils.StateOp;
import com.github.aelstad.keccakj.io.BitInputStream;
import com.github.aelstad.keccakj.io.BitOutputStream;
public class KeccakSponge {
Keccak1600 keccak1600;
int domainPaddingBitLength;
byte domainPadding;
private int ratePos;
SqueezeStream squeezeStream;
AbsorbStream absorbStream;
private final class SqueezeStream extends BitInputStream {
private boolean closed = true;
public SqueezeStream() {
}
@Override
public void close() {
if(!closed) {
keccak1600.clear();
closed = true;
ratePos = 0;
}
}
void open() {
if(closed) {
if(absorbStream != null)
absorbStream.close();
ratePos = 0;
closed = false;
}
}
@Override
public long readBits(byte[] bits, long bitOff, long bitLen) {
open();
long rv = 0;
while(bitLen > 0) {
int remainingBits = keccak1600.remainingBits(ratePos);
if(remainingBits <= 0) {
keccak1600.permute();
ratePos = 0;
remainingBits = keccak1600.remainingBits(ratePos);
}
int chunk = (int) Math.min(bitLen, remainingBits);
if((ratePos & 7)==0 && (bitOff&7)==0 && (chunk&7)==0) {
keccak1600.getBytes(ratePos>>3, bits, (int) (bitOff>>3), chunk>>3);
} else {
keccak1600.getBits(ratePos, bits, bitOff, chunk);
}
ratePos += chunk;
bitLen -= chunk;
bitOff += chunk;
rv += chunk;
}
return rv;
}
@Override
public long transformBits(byte[] input, long inputOff, byte[] output, long outputOff, long bitLen) {
long rv = 0;
while(bitLen > 0) {
int remainingBits = keccak1600.remainingBits(ratePos);
if(remainingBits <= 0) {
keccak1600.permute();
ratePos = 0;
remainingBits = keccak1600.remainingBits(ratePos);
}
int chunk = (int) Math.min(bitLen, remainingBits);
if((ratePos & 7)==0 && (inputOff&7)==0 && (outputOff&7)==0 && (chunk&7)==0) {
keccak1600.bytesOp(StateOp.XOR_TRANSFORM, ratePos>>3, output, (int) (outputOff>>3), input, (int) (inputOff>>3), chunk>>3);
} else {
keccak1600.bitsOp(StateOp.XOR_TRANSFORM, ratePos, output, outputOff, input, inputOff, chunk);
}
ratePos += chunk;
bitLen -= chunk;
inputOff += chunk;
outputOff += chunk;
rv += chunk;
}
return rv;
}
}
private final class AbsorbStream extends BitOutputStream {
private boolean closed = false;
@Override
public void close() {
if(!closed){
keccak1600.pad(domainPadding, domainPaddingBitLength, ratePos);
keccak1600.permute();
closed = true;
ratePos = 0;
}
}
@Override
public void writeBits(byte[] bits, long bitOff, long bitLen) {
open();
while(bitLen > 0) {
int remainingBits = keccak1600.remainingBits(ratePos);
if(remainingBits <= 0) {
keccak1600.permute();
ratePos = 0;
remainingBits = keccak1600.remainingBits(ratePos);
}
int chunk = (int) Math.min(bitLen, remainingBits);
if((ratePos & 7)==0 && (bitOff&7)==0 && (chunk&7)==0) {
keccak1600.setXorBytes(ratePos>>3, bits, (int) (bitOff>>3), chunk>>3);
} else {
keccak1600.setXorBits(ratePos, bits, bitOff, chunk);
}
ratePos += chunk;
bitLen -= chunk;
bitOff += chunk;
}
}
public void open() {
if(closed) {
if(squeezeStream != null) {
squeezeStream.close();
} else {
keccak1600.clear();
ratePos = 0;
}
closed = false;
}
}
}
public KeccakSponge(int capacityInBits, byte domainPadding, int domainPaddingBitLength) {
this.keccak1600 = new Keccak1600(capacityInBits);
this.domainPadding = domainPadding;
this.domainPaddingBitLength = domainPaddingBitLength;
this.ratePos = 0;
}
public KeccakSponge(int rounds, int capacityInBits, byte domainPadding, int domainPaddingBitLength) {
this.keccak1600 = new Keccak1600(capacityInBits, rounds);
this.domainPadding = domainPadding;
this.domainPaddingBitLength = domainPaddingBitLength;
this.ratePos = 0;
}
public void reset() {
if(absorbStream != null) {
absorbStream.open();
}
}
public BitInputStream getSqueezeStream() {
if(squeezeStream == null) {
squeezeStream = new SqueezeStream();
}
squeezeStream.open();
return squeezeStream;
}
public BitOutputStream getAbsorbStream() {
if(absorbStream == null) {
absorbStream = new AbsorbStream();
}
absorbStream.open();
return absorbStream;
}
public java.io.FilterOutputStream getTransformingSqueezeStream(final java.io.OutputStream target) {
return new FilterOutputStream(target) {
byte[] buf = new byte[4096];
@Override
public void write(byte[] b, int off, int len) throws IOException {
while(len > 0) {
int chunk = Math.min(len, buf.length);
getSqueezeStream().transform(b, off, buf, 0, chunk);
target.write(buf, 0, chunk);
off += chunk;
len -= chunk;
}
}
@Override
public void write(byte[] b) throws IOException {
this.write(b, 0, b.length);
}
@Override
public void write(int b) throws IOException {
target.write(b ^ getSqueezeStream().read());
}
@Override
public void close() throws IOException {
buf = null;
getSqueezeStream().close();
super.close();
}
};
}
public byte[] getRateBits(int boff, int len)
{
byte[] rv = new byte[(len+ (8 - len & 7)) >> 3];
keccak1600.getBits(boff, rv, boff, len);
return rv;
}
public int getRateBits() {
return keccak1600.getRateBits();
}
}
| |
/*
* Copyright (C) 2010 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.example.android.accelerometerplay;
import android.app.Activity;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Canvas;
import android.graphics.BitmapFactory.Options;
import android.hardware.Sensor;
import android.hardware.SensorEvent;
import android.hardware.SensorEventListener;
import android.hardware.SensorManager;
import android.os.Bundle;
import android.os.PowerManager;
import android.os.PowerManager.WakeLock;
import android.util.DisplayMetrics;
import android.view.Display;
import android.view.Surface;
import android.view.View;
import android.view.WindowManager;
/**
* This is an example of using the accelerometer to integrate the device's
* acceleration to a position using the Verlet method. This is illustrated with
* a very simple particle system comprised of a few iron balls freely moving on
* an inclined wooden table. The inclination of the virtual table is controlled
* by the device's accelerometer.
*
* @see SensorManager
* @see SensorEvent
* @see Sensor
*/
public class AccelerometerPlayActivity extends Activity {
private SimulationView mSimulationView;
private SensorManager mSensorManager;
private PowerManager mPowerManager;
private WindowManager mWindowManager;
private Display mDisplay;
private WakeLock mWakeLock;
/** Called when the activity is first created. */
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
// Get an instance of the SensorManager
mSensorManager = (SensorManager) getSystemService(SENSOR_SERVICE);
// Get an instance of the PowerManager
mPowerManager = (PowerManager) getSystemService(POWER_SERVICE);
// Get an instance of the WindowManager
mWindowManager = (WindowManager) getSystemService(WINDOW_SERVICE);
mDisplay = mWindowManager.getDefaultDisplay();
// Create a bright wake lock
mWakeLock = mPowerManager.newWakeLock(PowerManager.SCREEN_BRIGHT_WAKE_LOCK, getClass()
.getName());
// instantiate our simulation view and set it as the activity's content
mSimulationView = new SimulationView(this);
setContentView(mSimulationView);
}
@Override
protected void onResume() {
super.onResume();
/*
* when the activity is resumed, we acquire a wake-lock so that the
* screen stays on, since the user will likely not be fiddling with the
* screen or buttons.
*/
mWakeLock.acquire();
// Start the simulation
mSimulationView.startSimulation();
}
@Override
protected void onPause() {
super.onPause();
/*
* When the activity is paused, we make sure to stop the simulation,
* release our sensor resources and wake locks
*/
// Stop the simulation
mSimulationView.stopSimulation();
// and release our wake-lock
mWakeLock.release();
}
class SimulationView extends View implements SensorEventListener {
// diameter of the balls in meters
private static final float sBallDiameter = 0.004f;
private static final float sBallDiameter2 = sBallDiameter * sBallDiameter;
// friction of the virtual table and air
private static final float sFriction = 0.1f;
private Sensor mAccelerometer;
private long mLastT;
private float mLastDeltaT;
private float mXDpi;
private float mYDpi;
private float mMetersToPixelsX;
private float mMetersToPixelsY;
private Bitmap mBitmap;
private Bitmap mWood;
private float mXOrigin;
private float mYOrigin;
private float mSensorX;
private float mSensorY;
private long mSensorTimeStamp;
private long mCpuTimeStamp;
private float mHorizontalBound;
private float mVerticalBound;
private final ParticleSystem mParticleSystem = new ParticleSystem();
/*
* Each of our particle holds its previous and current position, its
* acceleration. for added realism each particle has its own friction
* coefficient.
*/
class Particle {
private float mPosX;
private float mPosY;
private float mAccelX;
private float mAccelY;
private float mLastPosX;
private float mLastPosY;
private float mOneMinusFriction;
Particle() {
// make each particle a bit different by randomizing its
// coefficient of friction
final float r = ((float) Math.random() - 0.5f) * 0.2f;
mOneMinusFriction = 1.0f - sFriction + r;
}
public void computePhysics(float sx, float sy, float dT, float dTC) {
// Force of gravity applied to our virtual object
final float m = 1000.0f; // mass of our virtual object
final float gx = -sx * m;
final float gy = -sy * m;
/*
* F = mA <=> A = F / m We could simplify the code by
* completely eliminating "m" (the mass) from all the equations,
* but it would hide the concepts from this sample code.
*/
final float invm = 1.0f / m;
final float ax = gx * invm;
final float ay = gy * invm;
/*
* Time-corrected Verlet integration The position Verlet
* integrator is defined as x(t+dt) = x(t) + x(t) - x(t-dt) +
* a(t).t^2 However, the above equation doesn't handle variable
* dt very well, a time-corrected version is needed: x(t+dt) =
* x(t) + (x(t) - x(t-dt)) * (dt/dt_prev) + a(t).t^2 We also add
* a simple friction term (f) to the equation: x(t+dt) = x(t) +
* (1-f) * (x(t) - x(t-dt)) * (dt/dt_prev) + a(t)t^2
*/
final float dTdT = dT * dT;
final float x = mPosX + mOneMinusFriction * dTC * (mPosX - mLastPosX) + mAccelX
* dTdT;
final float y = mPosY + mOneMinusFriction * dTC * (mPosY - mLastPosY) + mAccelY
* dTdT;
mLastPosX = mPosX;
mLastPosY = mPosY;
mPosX = x;
mPosY = y;
mAccelX = ax;
mAccelY = ay;
}
/*
* Resolving constraints and collisions with the Verlet integrator
* can be very simple, we simply need to move a colliding or
* constrained particle in such way that the constraint is
* satisfied.
*/
public void resolveCollisionWithBounds() {
final float xmax = mHorizontalBound;
final float ymax = mVerticalBound;
final float x = mPosX;
final float y = mPosY;
if (x > xmax) {
mPosX = xmax;
} else if (x < -xmax) {
mPosX = -xmax;
}
if (y > ymax) {
mPosY = ymax;
} else if (y < -ymax) {
mPosY = -ymax;
}
}
}
/*
* A particle system is just a collection of particles
*/
class ParticleSystem {
static final int NUM_PARTICLES = 15;
private Particle mBalls[] = new Particle[NUM_PARTICLES];
ParticleSystem() {
/*
* Initially our particles have no speed or acceleration
*/
for (int i = 0; i < mBalls.length; i++) {
mBalls[i] = new Particle();
}
}
/*
* Update the position of each particle in the system using the
* Verlet integrator.
*/
private void updatePositions(float sx, float sy, long timestamp) {
final long t = timestamp;
if (mLastT != 0) {
final float dT = (float) (t - mLastT) * (1.0f / 1000000000.0f);
if (mLastDeltaT != 0) {
final float dTC = dT / mLastDeltaT;
final int count = mBalls.length;
for (int i = 0; i < count; i++) {
Particle ball = mBalls[i];
ball.computePhysics(sx, sy, dT, dTC);
}
}
mLastDeltaT = dT;
}
mLastT = t;
}
/*
* Performs one iteration of the simulation. First updating the
* position of all the particles and resolving the constraints and
* collisions.
*/
public void update(float sx, float sy, long now) {
// update the system's positions
updatePositions(sx, sy, now);
// We do no more than a limited number of iterations
final int NUM_MAX_ITERATIONS = 10;
/*
* Resolve collisions, each particle is tested against every
* other particle for collision. If a collision is detected the
* particle is moved away using a virtual spring of infinite
* stiffness.
*/
boolean more = true;
final int count = mBalls.length;
for (int k = 0; k < NUM_MAX_ITERATIONS && more; k++) {
more = false;
for (int i = 0; i < count; i++) {
Particle curr = mBalls[i];
for (int j = i + 1; j < count; j++) {
Particle ball = mBalls[j];
float dx = ball.mPosX - curr.mPosX;
float dy = ball.mPosY - curr.mPosY;
float dd = dx * dx + dy * dy;
// Check for collisions
if (dd <= sBallDiameter2) {
/*
* add a little bit of entropy, after nothing is
* perfect in the universe.
*/
dx += ((float) Math.random() - 0.5f) * 0.0001f;
dy += ((float) Math.random() - 0.5f) * 0.0001f;
dd = dx * dx + dy * dy;
// simulate the spring
final float d = (float) Math.sqrt(dd);
final float c = (0.5f * (sBallDiameter - d)) / d;
curr.mPosX -= dx * c;
curr.mPosY -= dy * c;
ball.mPosX += dx * c;
ball.mPosY += dy * c;
more = true;
}
}
/*
* Finally make sure the particle doesn't intersects
* with the walls.
*/
curr.resolveCollisionWithBounds();
}
}
}
public int getParticleCount() {
return mBalls.length;
}
public float getPosX(int i) {
return mBalls[i].mPosX;
}
public float getPosY(int i) {
return mBalls[i].mPosY;
}
}
public void startSimulation() {
/*
* It is not necessary to get accelerometer events at a very high
* rate, by using a slower rate (SENSOR_DELAY_UI), we get an
* automatic low-pass filter, which "extracts" the gravity component
* of the acceleration. As an added benefit, we use less power and
* CPU resources.
*/
mSensorManager.registerListener(this, mAccelerometer, SensorManager.SENSOR_DELAY_UI);
}
public void stopSimulation() {
mSensorManager.unregisterListener(this);
}
public SimulationView(Context context) {
super(context);
mAccelerometer = mSensorManager.getDefaultSensor(Sensor.TYPE_ACCELEROMETER);
DisplayMetrics metrics = new DisplayMetrics();
getWindowManager().getDefaultDisplay().getMetrics(metrics);
mXDpi = metrics.xdpi;
mYDpi = metrics.ydpi;
mMetersToPixelsX = mXDpi / 0.0254f;
mMetersToPixelsY = mYDpi / 0.0254f;
// rescale the ball so it's about 0.5 cm on screen
Bitmap ball = BitmapFactory.decodeResource(getResources(), R.drawable.ball);
final int dstWidth = (int) (sBallDiameter * mMetersToPixelsX + 0.5f);
final int dstHeight = (int) (sBallDiameter * mMetersToPixelsY + 0.5f);
mBitmap = Bitmap.createScaledBitmap(ball, dstWidth, dstHeight, true);
Options opts = new Options();
opts.inDither = true;
opts.inPreferredConfig = Bitmap.Config.RGB_565;
mWood = BitmapFactory.decodeResource(getResources(), R.drawable.wood, opts);
}
@Override
protected void onSizeChanged(int w, int h, int oldw, int oldh) {
// compute the origin of the screen relative to the origin of
// the bitmap
mXOrigin = (w - mBitmap.getWidth()) * 0.5f;
mYOrigin = (h - mBitmap.getHeight()) * 0.5f;
mHorizontalBound = ((w / mMetersToPixelsX - sBallDiameter) * 0.5f);
mVerticalBound = ((h / mMetersToPixelsY - sBallDiameter) * 0.5f);
}
@Override
public void onSensorChanged(SensorEvent event) {
if (event.sensor.getType() != Sensor.TYPE_ACCELEROMETER)
return;
/*
* record the accelerometer data, the event's timestamp as well as
* the current time. The latter is needed so we can calculate the
* "present" time during rendering. In this application, we need to
* take into account how the screen is rotated with respect to the
* sensors (which always return data in a coordinate space aligned
* to with the screen in its native orientation).
*/
switch (mDisplay.getRotation()) {
case Surface.ROTATION_0:
mSensorX = event.values[0];
mSensorY = event.values[1];
break;
case Surface.ROTATION_90:
mSensorX = -event.values[1];
mSensorY = event.values[0];
break;
case Surface.ROTATION_180:
mSensorX = -event.values[0];
mSensorY = -event.values[1];
break;
case Surface.ROTATION_270:
mSensorX = event.values[1];
mSensorY = -event.values[0];
break;
}
mSensorTimeStamp = event.timestamp;
mCpuTimeStamp = System.nanoTime();
}
@Override
protected void onDraw(Canvas canvas) {
/*
* draw the background
*/
canvas.drawBitmap(mWood, 0, 0, null);
/*
* compute the new position of our object, based on accelerometer
* data and present time.
*/
final ParticleSystem particleSystem = mParticleSystem;
final long now = mSensorTimeStamp + (System.nanoTime() - mCpuTimeStamp);
final float sx = mSensorX;
final float sy = mSensorY;
particleSystem.update(sx, sy, now);
final float xc = mXOrigin;
final float yc = mYOrigin;
final float xs = mMetersToPixelsX;
final float ys = mMetersToPixelsY;
final Bitmap bitmap = mBitmap;
final int count = particleSystem.getParticleCount();
for (int i = 0; i < count; i++) {
/*
* We transform the canvas so that the coordinate system matches
* the sensors coordinate system with the origin in the center
* of the screen and the unit is the meter.
*/
final float x = xc + particleSystem.getPosX(i) * xs;
final float y = yc - particleSystem.getPosY(i) * ys;
canvas.drawBitmap(bitmap, x, y, null);
}
// and make sure to redraw asap
invalidate();
}
@Override
public void onAccuracyChanged(Sensor sensor, int accuracy) {
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.runtime.scheduler.adaptive;
import org.apache.flink.annotation.VisibleForTesting;
import org.apache.flink.api.common.JobID;
import org.apache.flink.api.common.JobStatus;
import org.apache.flink.core.execution.SavepointFormatType;
import org.apache.flink.queryablestate.KvStateID;
import org.apache.flink.runtime.accumulators.AccumulatorSnapshot;
import org.apache.flink.runtime.checkpoint.CheckpointCoordinator;
import org.apache.flink.runtime.checkpoint.CheckpointMetrics;
import org.apache.flink.runtime.checkpoint.CompletedCheckpoint;
import org.apache.flink.runtime.checkpoint.TaskStateSnapshot;
import org.apache.flink.runtime.execution.ExecutionState;
import org.apache.flink.runtime.executiongraph.ArchivedExecutionGraph;
import org.apache.flink.runtime.executiongraph.ExecutionAttemptID;
import org.apache.flink.runtime.executiongraph.ExecutionGraph;
import org.apache.flink.runtime.executiongraph.TaskExecutionStateTransition;
import org.apache.flink.runtime.io.network.partition.ResultPartitionID;
import org.apache.flink.runtime.jobgraph.IntermediateDataSetID;
import org.apache.flink.runtime.jobgraph.JobVertexID;
import org.apache.flink.runtime.jobgraph.OperatorID;
import org.apache.flink.runtime.jobmanager.PartitionProducerDisposedException;
import org.apache.flink.runtime.jobmaster.SerializedInputSplit;
import org.apache.flink.runtime.messages.FlinkJobNotFoundException;
import org.apache.flink.runtime.messages.checkpoint.DeclineCheckpoint;
import org.apache.flink.runtime.operators.coordination.CoordinationRequest;
import org.apache.flink.runtime.operators.coordination.CoordinationResponse;
import org.apache.flink.runtime.operators.coordination.OperatorEvent;
import org.apache.flink.runtime.query.KvStateLocation;
import org.apache.flink.runtime.query.UnknownKvStateLocation;
import org.apache.flink.runtime.scheduler.ExecutionGraphHandler;
import org.apache.flink.runtime.scheduler.KvStateHandler;
import org.apache.flink.runtime.scheduler.OperatorCoordinatorHandler;
import org.apache.flink.runtime.scheduler.stopwithsavepoint.StopWithSavepointTerminationManager;
import org.apache.flink.runtime.state.KeyGroupRange;
import org.apache.flink.util.FlinkException;
import org.apache.flink.util.Preconditions;
import org.apache.flink.util.concurrent.FutureUtils;
import org.slf4j.Logger;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.CompletionException;
import java.util.concurrent.Executor;
/**
* Abstract state class which contains an {@link ExecutionGraph} and the required handlers to
* execute common operations.
*/
abstract class StateWithExecutionGraph implements State {
private final Context context;
private final ExecutionGraph executionGraph;
private final ExecutionGraphHandler executionGraphHandler;
private final OperatorCoordinatorHandler operatorCoordinatorHandler;
private final KvStateHandler kvStateHandler;
private final Logger logger;
StateWithExecutionGraph(
Context context,
ExecutionGraph executionGraph,
ExecutionGraphHandler executionGraphHandler,
OperatorCoordinatorHandler operatorCoordinatorHandler,
Logger logger) {
this.context = context;
this.executionGraph = executionGraph;
this.executionGraphHandler = executionGraphHandler;
this.operatorCoordinatorHandler = operatorCoordinatorHandler;
this.kvStateHandler = new KvStateHandler(executionGraph);
this.logger = logger;
FutureUtils.assertNoException(
executionGraph
.getTerminationFuture()
.thenAcceptAsync(
jobStatus -> {
if (jobStatus.isGloballyTerminalState()) {
context.runIfState(
this, () -> onGloballyTerminalState(jobStatus));
}
},
context.getMainThreadExecutor()));
}
@VisibleForTesting
ExecutionGraph getExecutionGraph() {
return executionGraph;
}
JobID getJobId() {
return executionGraph.getJobID();
}
protected OperatorCoordinatorHandler getOperatorCoordinatorHandler() {
return operatorCoordinatorHandler;
}
protected ExecutionGraphHandler getExecutionGraphHandler() {
return executionGraphHandler;
}
@Override
public void onLeave(Class<? extends State> newState) {
if (!StateWithExecutionGraph.class.isAssignableFrom(newState)) {
// we are leaving the StateWithExecutionGraph --> we need to dispose temporary services
operatorCoordinatorHandler.disposeAllOperatorCoordinators();
}
}
@Override
public ArchivedExecutionGraph getJob() {
return ArchivedExecutionGraph.createFrom(executionGraph, getJobStatus());
}
@Override
public void suspend(Throwable cause) {
executionGraph.suspend(cause);
Preconditions.checkState(executionGraph.getState().isTerminalState());
context.goToFinished(ArchivedExecutionGraph.createFrom(executionGraph));
}
@Override
public Logger getLogger() {
return logger;
}
void notifyPartitionDataAvailable(ResultPartitionID partitionID) {
executionGraph.notifyPartitionDataAvailable(partitionID);
}
SerializedInputSplit requestNextInputSplit(
JobVertexID vertexID, ExecutionAttemptID executionAttempt) throws IOException {
return executionGraphHandler.requestNextInputSplit(vertexID, executionAttempt);
}
ExecutionState requestPartitionState(
IntermediateDataSetID intermediateResultId, ResultPartitionID resultPartitionId)
throws PartitionProducerDisposedException {
return executionGraphHandler.requestPartitionState(intermediateResultId, resultPartitionId);
}
void acknowledgeCheckpoint(
JobID jobID,
ExecutionAttemptID executionAttemptID,
long checkpointId,
CheckpointMetrics checkpointMetrics,
TaskStateSnapshot checkpointState) {
executionGraphHandler.acknowledgeCheckpoint(
jobID, executionAttemptID, checkpointId, checkpointMetrics, checkpointState);
}
void declineCheckpoint(DeclineCheckpoint decline) {
executionGraphHandler.declineCheckpoint(decline);
}
void reportCheckpointMetrics(
ExecutionAttemptID executionAttemptID,
long checkpointId,
CheckpointMetrics checkpointMetrics) {
executionGraphHandler.reportCheckpointMetrics(
executionAttemptID, checkpointId, checkpointMetrics);
}
void updateAccumulators(AccumulatorSnapshot accumulatorSnapshot) {
executionGraph.updateAccumulators(accumulatorSnapshot);
}
KvStateLocation requestKvStateLocation(JobID jobId, String registrationName)
throws FlinkJobNotFoundException, UnknownKvStateLocation {
return kvStateHandler.requestKvStateLocation(jobId, registrationName);
}
void notifyKvStateRegistered(
JobID jobId,
JobVertexID jobVertexId,
KeyGroupRange keyGroupRange,
String registrationName,
KvStateID kvStateId,
InetSocketAddress kvStateServerAddress)
throws FlinkJobNotFoundException {
kvStateHandler.notifyKvStateRegistered(
jobId,
jobVertexId,
keyGroupRange,
registrationName,
kvStateId,
kvStateServerAddress);
}
void notifyKvStateUnregistered(
JobID jobId,
JobVertexID jobVertexId,
KeyGroupRange keyGroupRange,
String registrationName)
throws FlinkJobNotFoundException {
kvStateHandler.notifyKvStateUnregistered(
jobId, jobVertexId, keyGroupRange, registrationName);
}
CompletableFuture<String> triggerSavepoint(
String targetDirectory, boolean cancelJob, SavepointFormatType formatType) {
final CheckpointCoordinator checkpointCoordinator =
executionGraph.getCheckpointCoordinator();
StopWithSavepointTerminationManager.checkSavepointActionPreconditions(
checkpointCoordinator, targetDirectory, getJobId(), logger);
logger.info(
"Triggering {}savepoint for job {}.",
cancelJob ? "cancel-with-" : "",
executionGraph.getJobID());
if (cancelJob) {
checkpointCoordinator.stopCheckpointScheduler();
}
return checkpointCoordinator
.triggerSavepoint(targetDirectory, formatType)
.thenApply(CompletedCheckpoint::getExternalPointer)
.handleAsync(
(path, throwable) -> {
if (throwable != null) {
if (cancelJob && context.isState(this)) {
startCheckpointScheduler(checkpointCoordinator);
}
throw new CompletionException(throwable);
} else if (cancelJob && context.isState(this)) {
logger.info(
"Savepoint stored in {}. Now cancelling {}.",
path,
executionGraph.getJobID());
cancel();
}
return path;
},
context.getMainThreadExecutor());
}
CompletableFuture<String> triggerCheckpoint() {
final CheckpointCoordinator checkpointCoordinator =
executionGraph.getCheckpointCoordinator();
final JobID jobID = executionGraph.getJobID();
if (checkpointCoordinator == null) {
throw new IllegalStateException(String.format("Job %s is not a streaming job.", jobID));
}
logger.info("Triggering a checkpoint for job {}.", jobID);
return checkpointCoordinator
.triggerCheckpoint(false)
.thenApply(CompletedCheckpoint::getExternalPointer)
.handleAsync(
(path, throwable) -> {
if (throwable != null) {
throw new CompletionException(throwable);
}
return path;
},
context.getMainThreadExecutor());
}
private void startCheckpointScheduler(final CheckpointCoordinator checkpointCoordinator) {
if (checkpointCoordinator.isPeriodicCheckpointingConfigured()) {
try {
checkpointCoordinator.startCheckpointScheduler();
} catch (IllegalStateException ignored) {
// Concurrent shut down of the coordinator
}
}
}
void deliverOperatorEventToCoordinator(
ExecutionAttemptID taskExecutionId, OperatorID operatorId, OperatorEvent evt)
throws FlinkException {
operatorCoordinatorHandler.deliverOperatorEventToCoordinator(
taskExecutionId, operatorId, evt);
}
CompletableFuture<CoordinationResponse> deliverCoordinationRequestToCoordinator(
OperatorID operatorId, CoordinationRequest request) throws FlinkException {
return operatorCoordinatorHandler.deliverCoordinationRequestToCoordinator(
operatorId, request);
}
/**
* Updates the execution graph with the given task execution state transition.
*
* @param taskExecutionStateTransition taskExecutionStateTransition to update the ExecutionGraph
* with
* @return {@code true} if the update was successful; otherwise {@code false}
*/
abstract boolean updateTaskExecutionState(
TaskExecutionStateTransition taskExecutionStateTransition);
/**
* Callback which is called once the execution graph reaches a globally terminal state.
*
* @param globallyTerminalState globally terminal state which the execution graph reached
*/
abstract void onGloballyTerminalState(JobStatus globallyTerminalState);
/** Context of the {@link StateWithExecutionGraph} state. */
interface Context {
/**
* Run the given action if the current state equals the expected state.
*
* @param expectedState expectedState is the expected state
* @param action action to run if the current state equals the expected state
*/
void runIfState(State expectedState, Runnable action);
/**
* Checks whether the current state is the expected state.
*
* @param expectedState expectedState is the expected state
* @return {@code true} if the current state equals the expected state; otherwise {@code
* false}
*/
boolean isState(State expectedState);
/**
* Gets the main thread executor.
*
* @return the main thread executor
*/
Executor getMainThreadExecutor();
/**
* Transitions into the {@link Finished} state.
*
* @param archivedExecutionGraph archivedExecutionGraph which is passed to the {@link
* Finished} state
*/
void goToFinished(ArchivedExecutionGraph archivedExecutionGraph);
}
}
| |
/**
* Copyright (c) 2009/09-2012/08, Regents of the University of Colorado
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
/**
* Copyright 2012/09-2013/04, 2013/11-Present, University of Massachusetts Amherst
* Copyright 2013/05-2013/10, IPSoft Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.clearnlp.generation;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.regex.Pattern;
import com.clearnlp.constant.english.ENAux;
import com.clearnlp.constant.universal.UNConstant;
import com.clearnlp.constant.universal.UNPunct;
import com.clearnlp.constituent.CTLibEn;
import com.clearnlp.dependency.DEPArc;
import com.clearnlp.dependency.DEPLib;
import com.clearnlp.dependency.DEPLibEn;
import com.clearnlp.dependency.DEPNode;
import com.clearnlp.dependency.DEPTree;
import com.clearnlp.dependency.srl.ArgInfo;
import com.clearnlp.dependency.srl.SRLArc;
import com.clearnlp.dependency.srl.SRLLib;
import com.clearnlp.dependency.srl.SRLTree;
import com.clearnlp.util.UTRegex;
import com.clearnlp.util.UTString;
import com.clearnlp.util.pair.Pair;
/**
* Designed for Eliza at IPsoft.
* @since 1.4.0
* @author Jinho D. Choi ({@code jdchoi77@gmail.com})
*/
public class LGAnswerGenerator
{
final boolean USE_COREF = true;
final Pattern P_REMOVE = UTRegex.getORPatternExact(DEPLibEn.DEP_CONJ, DEPLibEn.DEP_CC, DEPLibEn.DEP_CONJ);
final Pattern P_PRESERVE = UTRegex.getORPatternExact(DEPLibEn.DEP_AGENT, DEPLibEn.DEP_EXPL, DEPLibEn.DEP_HMOD, DEPLibEn.DEP_HYPH, DEPLibEn.DEP_NEG, DEPLibEn.DEP_PRT, DEPLibEn.DEP_PUNCT);
/** For short answers. */
public LGAnswerGenerator() {}
public String getAnswer(List<DEPTree> rTrees, List<ArgInfo> rArgs, String conjunction, String delim, boolean verbose, boolean trivialize)
{
List<Pair<String,String>> answers = getShortAnswers(rTrees, rArgs, conjunction, delim, verbose, trivialize);
if (verbose) answers = getLongAnswers(rTrees, rArgs, conjunction, delim);
return joinAnswers(answers, conjunction, delim);
}
private List<Pair<String,String>> getLongAnswers(List<DEPTree> rTrees, List<ArgInfo> rArgs, String conjunction, String delim)
{
List<Pair<String,String>> answers = new ArrayList<Pair<String,String>>();
DEPTree rTree;
ArgInfo rArg;
int i;
for (i=rTrees.size()-1; i>=0; i--)
{
rTree = rTrees.get(i);
rArg = rArgs.get(i);
answers.add(getAnswerString(rTree.get(rArg.getPredicateId()), delim));
}
return answers;
}
public List<Pair<String,String>> getShortAnswers(List<DEPTree> rTrees, List<ArgInfo> rArgs, String conjunction, String delim, boolean verbose, boolean trivialize)
{
List<Pair<String,String>> answers = new ArrayList<Pair<String,String>>();
Pair<DEPTree,SRLTree> p;
ArgInfo rArg;
int i;
for (i=rTrees.size()-1; i>=0; i--)
{
rArg = rArgs.get(i);
p = getTrees(rTrees.get(i), rArg.getPredicateId());
answers.add(getShortAnswer(p.o1, p.o2, rArg, delim, verbose, trivialize));
rTrees.set(i, p.o1);
}
return answers;
}
public Pair<String,String> getShortAnswer(DEPTree rTree, SRLTree sTree, ArgInfo rArg, String delim, boolean verbose, boolean trivialize)
{
removeDependents(rTree.get(DEPLib.ROOT_ID), sTree.getPredicate());
SRLLib.relinkRelativeClause(sTree);
SRLLib.relinkCoordination(sTree);
return getShortAnswerAux(rTree, sTree, rArg, delim, verbose, trivialize);
}
private Pair<DEPTree,SRLTree> getTrees(DEPTree dTree, int predID)
{
dTree = dTree.clone();
dTree.setDependents();
LGLibEn.convertUnI(dTree);
LGLibEn.convertFirstFormToLowerCase(dTree);
return new Pair<DEPTree,SRLTree>(dTree, dTree.getSRLTree(predID));
}
private void removeDependents(DEPNode root, DEPNode verb)
{
List<DEPArc> remove = new ArrayList<DEPArc>();
for (DEPArc arc : verb.getDependents())
{
if (arc.isLabel(P_REMOVE))
{
arc.getNode().setHead(root);
remove.add(arc);
}
}
verb.removeDependents(remove);
}
private void removeDependents(DEPNode root, DEPNode verb, Set<DEPNode> keep, boolean verbose, boolean trivialize)
{
if (verbose) return;
List<DEPArc> remove = new ArrayList<DEPArc>();
boolean changeDo = true, hasModal = false;
DEPNode dep;
for (DEPArc arc : verb.getDependents())
{
dep = arc.getNode();
if (arc.isLabel(DEPLibEn.P_AUX))
{
if (dep.isPos(CTLibEn.POS_MD) || dep.isLemma(ENAux.DO) || dep.isLemma(ENAux.HAVE))
hasModal = true;
}
else if (!keep.contains(dep) && !arc.isLabel(P_PRESERVE) && !arc.isLabel(DEPLibEn.P_SBJ))
{
dep.setHead(root);
remove.add(arc);
}
else if (dep.id > verb.id && !dep.isLabel(DEPLibEn.DEP_PUNCT))
changeDo = false;
}
verb.removeDependents(remove);
if (trivialize && changeDo && !verb.isLemma(ENAux.BE))
{
if (hasModal)
verb.form = UNConstant.EMPTY;
else
{
if (verb.isPos(CTLibEn.POS_VB) || verb.isPos(CTLibEn.POS_VBP))
verb.form = ENAux.DO;
else if (verb.isPos(CTLibEn.POS_VBZ))
verb.form = ENAux.DOES;
else if (verb.isPos(CTLibEn.POS_VBD))
verb.form = ENAux.DID;
else if (verb.isPos(CTLibEn.POS_VBN))
verb.form = ENAux.DONE;
else if (verb.isPos(CTLibEn.POS_VBG))
verb.form = ENAux.DOING;
}
verb.lemma = ENAux.DO;
}
}
private Pair<String,String> getShortAnswerAux(DEPTree rTree, SRLTree sTree, ArgInfo rArg, String delim, boolean verbose, boolean trivialize)
{
DEPNode root = rTree.get(DEPLib.ROOT_ID);
DEPNode pred = sTree.getPredicate();
if (rArg.hasSemanticInfo())
{
List<DEPNode> nodes = sTree.getArgumentNodes(getBaseLabels(rArg.getSemanticInfo()));
if (nodes.isEmpty())
return null;
else
{
removeDependents(root, pred, getSubNodeSet(pred, nodes), verbose, trivialize);
stripArgs(pred, nodes);
return getAnswerString(nodes, delim);
}
}
if (rArg.hasSyntacticInfo())
{
DEPNode dep = getNodeFromSyntacticInfo(pred, rArg, delim);
if (dep == null)
return null;
else
{
removeDependents(root, pred, getSubNodeSet(pred, dep), verbose, trivialize);
return getAnswerString(dep, delim);
}
}
return getAnswerString(pred, delim);
}
private void stripArgs(DEPNode pred, List<DEPNode> args)
{
if (args.size() >= 2)
{
DEPNode fst = args.get(0);
DEPNode snd = args.get(1);
if (fst.id < pred.id && fst.isLabel(DEPLibEn.P_SBJ) && snd.id > pred.id && snd.getSHead(pred, SRLLib.P_ARG_CONCATENATION) != null)
args.remove(fst);
}
}
private Set<DEPNode> getSubNodeSet(DEPNode pred, DEPNode node)
{
Set<DEPNode> set = new HashSet<DEPNode>();
set.add(getDependent(pred, node));
return set;
}
private Set<DEPNode> getSubNodeSet(DEPNode pred, List<DEPNode> nodes)
{
Set<DEPNode> set = new HashSet<DEPNode>();
for (DEPNode node : nodes)
set.add(getDependent(pred, node));
return set;
}
private DEPNode getDependent(DEPNode pred, DEPNode node)
{
if (node.isDependentOf(pred))
return node;
else
return getDependent(pred, node.getHead());
}
private Pattern getBaseLabels(String label)
{
label = SRLLib.getBaseLabel(label);
return UTRegex.getORPatternExact(label, SRLLib.PREFIX_CONCATENATION+label);
}
private DEPNode getNodeFromSyntacticInfo(DEPNode head, ArgInfo rArg, String delim)
{
Pair<String,String> p = rArg.popNextSyntacticInfo();
String label = p.o1, lemma = p.o2;
DEPNode dep;
for (DEPArc arc : head.getDependents())
{
dep = arc.getNode();
if (dep.isLemma(lemma) && (arc.isLabel(label) || (label.equals(DEPLibEn.DEP_PREP) && dep.isPos(CTLibEn.POS_IN))))
{
if (!rArg.hasSyntacticInfo())
return dep;
else
return getNodeFromSyntacticInfo(dep, rArg, delim);
}
}
return null;
}
private Pair<String,String> getAnswerString(DEPNode node, String delim)
{
return getAnswerStringPost(LGLibEn.getForms(node, USE_COREF, delim), delim, node.getFirstNode());
}
private Pair<String,String> getAnswerString(List<DEPNode> nodes, String delim)
{
StringBuilder build = new StringBuilder();
for (DEPNode node : nodes)
{
build.append(delim);
build.append(LGLibEn.getForms(node, USE_COREF, delim));
}
return getAnswerStringPost(build.substring(delim.length()), delim, nodes.get(0).getFirstNode());
}
private Pair<String,String> getAnswerStringPost(String answer, String delim, DEPNode fst)
{
answer = UTString.stripPunctuation(answer);
String prep = UNConstant.EMPTY;
if (fst.isPos(CTLibEn.POS_IN))
{
prep = fst.lemma + delim;
int len = prep.length();
String sub = answer.substring(0, len).toLowerCase();
if (prep.equals(sub))
{
answer = answer.substring(len);
if (fst.isLabel(DEPLibEn.DEP_AGENT))
prep = UNConstant.EMPTY;
}
else
prep = UNConstant.EMPTY;
}
return new Pair<String,String>(answer.trim(), prep);
}
private String joinAnswers(List<Pair<String,String>> answers, String conjunction, String delim)
{
StringBuilder build = new StringBuilder();
String prep = UNConstant.EMPTY;
int i, size = answers.size();
Pair<String,String> p;
for (i=0; i<size; i++)
{
p = answers.get(i);
if (i > 0)
{
if (i+1 == size)
{
build.append(delim);
build.append(conjunction);
}
else
build.append(UNPunct.COMMA);
}
if (!prep.equals(p.o2))
{
prep = p.o2;
build.append(delim);
build.append(prep);
}
build.append(delim);
build.append(p.o1);
}
return build.substring(delim.length());
}
@Deprecated
public String getAnswer(DEPTree qdTree, DEPTree rdTree, int qVerbID, int rVerbID, String delim)
{
qdTree = qdTree.clone(); qdTree.setDependents();
rdTree = rdTree.clone(); rdTree.setDependents();
SRLTree qsTree = qdTree.getSRLTree(qVerbID);
SRLTree rsTree = rdTree.getSRLTree(rVerbID);
stripUnnecessaries(rsTree.getPredicate());
SRLLib.relinkRelativeClause(rsTree);
SRLLib.relinkCoordination(rsTree);
LGLibEn.convertUnI(qdTree);
LGLibEn.convertUnI(rdTree);
if (isShortAnswer(qsTree, rsTree))
return getShortAnswer(qdTree, rdTree, qsTree, rsTree, delim);
else
return getLongAnswer(qdTree, qsTree, rsTree.getPredicate(), delim);
}
@Deprecated
private void stripUnnecessaries(DEPNode node)
{
List<DEPArc> remove = new ArrayList<DEPArc>();
for (DEPArc arc : node.getDependents())
{
if (arc.isLabel(DEPLibEn.DEP_PUNCT) || arc.isLabel(DEPLibEn.DEP_COMPLM) || arc.isLabel(DEPLibEn.DEP_MARK))
remove.add(arc);
}
node.removeDependents(remove);
}
@Deprecated
private boolean isShortAnswer(SRLTree qsTree, SRLTree rsTree)
{
if (matchPassive(qsTree, rsTree, "A0")) return true;
if (matchPassive(qsTree, rsTree, "A1")) return true;
DEPNode rVerb = rsTree.getPredicate();
DEPNode sbj = rVerb.getFirstDependentByLabel(DEPLibEn.P_SBJ);
if (sbj != null) return DEPLibEn.isSmallClause(rVerb);
return true;
}
@Deprecated
private boolean matchPassive(SRLTree qsTree, SRLTree rsTree, String label)
{
SRLArc arc = rsTree.getFirstArgument(label);
return matchPassive(qsTree.getFirstArgument(SRLLib.PREFIX_REFERENT+label), arc) || matchPassive(qsTree.getFirstArgument(label), arc);
}
@Deprecated
private boolean matchPassive(SRLArc qArc, SRLArc rArc)
{
return qArc != null && rArc != null && (qArc.getNode().isLabel(DEPLibEn.P_SBJ) && rArc.getNode().isLabel(DEPLibEn.DEP_AGENT) || qArc.getNode().isLabel(DEPLibEn.DEP_AGENT) && rArc.getNode().isLabel(DEPLibEn.P_SBJ));
}
@Deprecated
private String getShortAnswer(DEPTree qdTree, DEPTree rdTree, SRLTree qsTree, SRLTree rsTree, String delim)
{
return getShortAnswerFromDeclarative(qdTree, rdTree, qsTree, rsTree, delim);
}
@Deprecated
private String getShortAnswerFromDeclarative(DEPTree qdTree, DEPTree rdTree, SRLTree qsTree, SRLTree rsTree, String delim)
{
List<SRLArc> arcs = new ArrayList<SRLArc>();
DEPNode qArg, rArg;
String answer;
for (SRLArc qArc : qsTree.getArguments())
{
if (qArc.isLabel(SRLLib.P_ARG_REF))
{
arcs = rsTree.getArguments(getBaseLabels(qArc.getLabel()));
if (arcs.isEmpty())
{
String label = SRLLib.getBaseLabel(qArc.getLabel());
DEPNode node = qArc.getNode();
if (node.isLemma("where") || label.equals(SRLLib.ARGM_LOC) || label.equals(SRLLib.ARGM_DIR) || label.equals(SRLLib.ARGM_GOL))
arcs = rsTree.getArguments(Pattern.compile("^(AM-LOC|AM-DIR|AM-GOL)$"));
else if (node.isLemma("when") || label.equals(SRLLib.ARGM_TMP))
arcs = rsTree.getArguments(Pattern.compile("^AM-TMP$"));
else if (label.equals("A1"))
arcs = rsTree.getArguments(getBaseLabels("A2"));
else if (label.equals("A2"))
arcs = rsTree.getArguments(getBaseLabels("A1"));
else if (label.equals("AM-MNR"))
{
for (SRLArc arc : rsTree.getArguments())
{
rArg = arc.getNode();
if (rArg.isLabel(DEPLibEn.DEP_ACOMP) || rArg.isLabel(DEPLibEn.DEP_ADVMOD))
arcs.add(arc);
}
}
else if (node.isPos(CTLibEn.POS_IN))
{
for (SRLArc arc : rsTree.getArguments())
{
rArg = arc.getNode();
if (rArg.isPos(CTLibEn.POS_IN) && rArg.isLemma(node.lemma))
arcs.add(arc);
}
}
}
return arcs.isEmpty() ? null : getAnswer(arcs, delim);
}
}
for (SRLArc qArc : qsTree.getArguments())
{
qArg = qArc.getNode();
if (qArg.getFeat(DEPLibEn.FEAT_PB) != null)
{
arcs = rsTree.getArguments(getBaseLabels(qArc.getLabel()));
for (SRLArc rArc : arcs)
{
rArg = rArc.getNode();
if (rArg.getFeat(DEPLibEn.FEAT_PB) != null && rArg.isLemma(qArg.lemma))
{
answer = getShortAnswer(qdTree, rdTree, qdTree.getSRLTree(qArg), rdTree.getSRLTree(rArg), delim);
if (answer != null) return answer;
}
}
}
}
return null;
}
@Deprecated
private String getLongAnswer(DEPTree qdTree, SRLTree qsTree, DEPNode rVerb, String delim)
{
StringBuilder build = new StringBuilder();
getLongAnswerFromDeclarative(qdTree, qsTree, rVerb, delim, build);
return getAnswerPost(build, delim);
}
@Deprecated
private void getLongAnswerFromDeclarative(DEPTree qdTree, SRLTree qsTree, DEPNode rVerb, String delim, StringBuilder build)
{
Set<String> qPreps = getLemmaSet(qsTree, CTLibEn.POS_IN);
Set<String> qLabels = qsTree.getBaseLabelSet();
boolean[] bMod = getModifierAspects(qsTree);
boolean notAdded = true;
DEPNode rDep, qDep;
DEPArc rHead;
for (DEPArc rArc : rVerb.getDependents())
{
rDep = rArc.getNode();
rHead = rDep.getSHead(rVerb);
if (notAdded && rDep.id > rVerb.id)
{
build.append(delim);
build.append(rVerb.form);
notAdded = false;
}
if (rArc.isLabel(DEPLibEn.DEP_CONJ) || rArc.isLabel(DEPLibEn.DEP_CC) || rArc.isLabel(DEPLibEn.DEP_PRECONJ))
continue;
else if (rHead == null || rHead.isLabel(SRLLib.ARGM_MOD) || rHead.isLabel(SRLLib.ARGM_NEG))
{
build.append(delim);
build.append(LGLibEn.getForms(rDep, USE_COREF, delim));
}
else if (containsLabel(qsTree, qLabels, qPreps, SRLLib.getBaseLabel(rHead.getLabel()), rDep, bMod[0], bMod[1], bMod[2]))
{
if (rDep.getFeat(DEPLibEn.FEAT_PB) != null && (qDep = findPredicateInQuestion(qsTree, rHead.getLabel(), rDep.lemma)) != null)
{
getLongAnswerFromDeclarative(qdTree, qdTree.getSRLTree(qDep), rDep, delim, build);
}
else
{
build.append(delim);
build.append(LGLibEn.getForms(rDep, USE_COREF, delim));
}
}
}
if (notAdded)
{
build.append(delim);
build.append(rVerb.form);
}
}
private Set<String> getLemmaSet(SRLTree sTree, String pos)
{
Set<String> set = new HashSet<String>();
DEPNode arg;
for (SRLArc arc : sTree.getArguments())
{
arg = arc.getNode();
if (arg.isPos(pos))
set.add(arg.lemma);
}
return set;
}
private boolean[] getModifierAspects(SRLTree qsTree)
{
boolean[] b = {false, false, false};
String label;
DEPNode arg;
for (SRLArc arc : qsTree.getArguments())
{
label = SRLLib.getBaseLabel(arc.getLabel());
arg = arc.getNode();
if (arg.isLemma("where") || isLocative(label))
b[0] = true;
else if (arg.isLemma("when") || isTemporal(label))
b[1] = true;
else if (isManner(label))
b[2] = true;
}
return b;
}
@Deprecated
private boolean isLocative(String label)
{
return label.equals(SRLLib.ARGM_LOC) || label.equals(SRLLib.ARGM_DIR) || label.equals(SRLLib.ARGM_GOL);
}
@Deprecated
private boolean isTemporal(String label)
{
return label.equals(SRLLib.ARGM_TMP);
}
@Deprecated
private boolean isManner(String label)
{
return label.equals(SRLLib.ARGM_MNR);
}
@Deprecated
private boolean containsLabel(SRLTree qsTree, Set<String> qLabels, Set<String> qPreps, String rLabel, DEPNode rDep, boolean qLocative, boolean qTemporal, boolean qManner)
{
if (qLabels.contains(rLabel))
return true;
if (isLocative(rLabel) && qLocative)
return true;
if (isTemporal(rLabel) && qTemporal)
return true;
if ((rDep.isLabel(DEPLibEn.DEP_ACOMP) || rDep.isLabel(DEPLibEn.DEP_ADVMOD)) && qManner)
return true;
if (rLabel.equals(SRLLib.ARG1) && qLabels.contains(SRLLib.ARG2))
return true;
if (rLabel.equals(SRLLib.ARG2) && qLabels.contains(SRLLib.ARG1))
return true;
if (rDep.isPos(CTLibEn.POS_IN) && qPreps.contains(rDep.lemma))
return true;
return false;
}
@Deprecated
private DEPNode findPredicateInQuestion(SRLTree qsTree, String label, String lemma)
{
SRLArc qArc = qsTree.getFirstArgument(label);
if (qArc != null && qArc.getNode().getFeat(DEPLibEn.FEAT_PB) != null)
{
return qArc.getNode();
}
else
{
DEPNode arg;
for (SRLArc arc : qsTree.getArguments())
{
arg = arc.getNode();
if (arg.getFeat(DEPLibEn.FEAT_PB) != null && arg.isLemma(lemma))
return arg;
}
}
return null;
}
@Deprecated
private String getAnswer(List<SRLArc> arcs, String delim)
{
StringBuilder build = new StringBuilder();
for (SRLArc arc : arcs)
{
build.append(delim);
build.append(LGLibEn.getForms(arc.getNode(), USE_COREF, delim));
}
String s = getAnswerPost(build, delim);
SRLArc arc = arcs.get(0);
if (arc.getNode().isLabel(DEPLibEn.DEP_AGENT) && s.startsWith("By"))
s = s.substring(2).trim();
return s;
}
@Deprecated
private String getAnswerPost(StringBuilder build, String delim)
{
String s = build.substring(delim.length());
s = UTString.stripPunctuation(s);
s = UTString.convertFirstCharToUpper(s);
return s;
}
}
| |
/**
* Copyright 2012 Nikita Koksharov
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.corundumstudio.socketio.namespace;
import io.netty.util.internal.PlatformDependent;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Queue;
import java.util.Set;
import java.util.UUID;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.ConcurrentMap;
import com.corundumstudio.socketio.AckMode;
import com.corundumstudio.socketio.AckRequest;
import com.corundumstudio.socketio.BroadcastOperations;
import com.corundumstudio.socketio.Configuration;
import com.corundumstudio.socketio.MultiTypeArgs;
import com.corundumstudio.socketio.SocketIOClient;
import com.corundumstudio.socketio.SocketIONamespace;
import com.corundumstudio.socketio.annotation.ScannerEngine;
import com.corundumstudio.socketio.listener.ConnectListener;
import com.corundumstudio.socketio.listener.DataListener;
import com.corundumstudio.socketio.listener.DisconnectListener;
import com.corundumstudio.socketio.listener.ExceptionListener;
import com.corundumstudio.socketio.listener.MultiTypeEventListener;
import com.corundumstudio.socketio.protocol.JsonSupport;
import com.corundumstudio.socketio.protocol.Packet;
import com.corundumstudio.socketio.store.StoreFactory;
import com.corundumstudio.socketio.store.pubsub.JoinLeaveMessage;
import com.corundumstudio.socketio.store.pubsub.PubSubStore;
import com.corundumstudio.socketio.transport.NamespaceClient;
/**
* Hub object for all clients in one namespace.
* Namespace shares by different namespace-clients.
*
* @see com.corundumstudio.socketio.transport.NamespaceClient
*/
public class Namespace implements SocketIONamespace {
public static final String DEFAULT_NAME = "";
private final ScannerEngine engine = new ScannerEngine();
private final ConcurrentMap<String, EventEntry<?>> eventListeners = PlatformDependent.newConcurrentHashMap();
private final Queue<ConnectListener> connectListeners = new ConcurrentLinkedQueue<ConnectListener>();
private final Queue<DisconnectListener> disconnectListeners = new ConcurrentLinkedQueue<DisconnectListener>();
private final Map<UUID, SocketIOClient> allClients = PlatformDependent.newConcurrentHashMap();
private final ConcurrentMap<String, Set<UUID>> roomClients = PlatformDependent.newConcurrentHashMap();
private final ConcurrentMap<UUID, Set<String>> clientRooms = PlatformDependent.newConcurrentHashMap();
private final String name;
private final AckMode ackMode;
private final JsonSupport jsonSupport;
private final StoreFactory storeFactory;
private final ExceptionListener exceptionListener;
public Namespace(String name, Configuration configuration) {
super();
this.name = name;
this.jsonSupport = configuration.getJsonSupport();
this.storeFactory = configuration.getStoreFactory();
this.exceptionListener = configuration.getExceptionListener();
this.ackMode = configuration.getAckMode();
}
public void addClient(SocketIOClient client) {
allClients.put(client.getSessionId(), client);
}
public String getName() {
return name;
}
@Override
public void addMultiTypeEventListener(String eventName, MultiTypeEventListener listener,
Class<?>... eventClass) {
EventEntry entry = eventListeners.get(eventName);
if (entry == null) {
entry = new EventEntry();
EventEntry<?> oldEntry = eventListeners.putIfAbsent(eventName, entry);
if (oldEntry != null) {
entry = oldEntry;
}
}
entry.addListener(listener);
jsonSupport.addEventMapping(name, eventName, eventClass);
}
@Override
@SuppressWarnings({"unchecked", "rawtypes"})
public <T> void addEventListener(String eventName, Class<T> eventClass, DataListener<T> listener) {
EventEntry entry = eventListeners.get(eventName);
if (entry == null) {
entry = new EventEntry<T>();
EventEntry<?> oldEntry = eventListeners.putIfAbsent(eventName, entry);
if (oldEntry != null) {
entry = oldEntry;
}
}
entry.addListener(listener);
jsonSupport.addEventMapping(name, eventName, eventClass);
}
@SuppressWarnings({"rawtypes", "unchecked"})
public void onEvent(NamespaceClient client, String eventName, List<Object> args, AckRequest ackRequest) {
EventEntry entry = eventListeners.get(eventName);
if (entry == null) {
return;
}
try {
Queue<DataListener> listeners = entry.getListeners();
for (DataListener dataListener : listeners) {
Object data = getEventData(args, dataListener);
dataListener.onData(client, data, ackRequest);
}
} catch (Exception e) {
exceptionListener.onEventException(e, args, client);
if (ackMode == AckMode.AUTO_SUCCESS_ONLY) {
return;
}
}
sendAck(ackRequest);
}
private void sendAck(AckRequest ackRequest) {
if (ackMode == AckMode.AUTO || ackMode == AckMode.AUTO_SUCCESS_ONLY) {
// send ack response if it not executed
// during {@link DataListener#onData} invocation
ackRequest.sendAckData(Collections.emptyList());
}
}
private Object getEventData(List<Object> args, DataListener dataListener) {
if (dataListener instanceof MultiTypeEventListener) {
return new MultiTypeArgs(args);
} else {
if (!args.isEmpty()) {
return args.get(0);
}
}
return null;
}
@Override
public void addDisconnectListener(DisconnectListener listener) {
disconnectListeners.add(listener);
}
public void onDisconnect(SocketIOClient client) {
allClients.remove(client.getSessionId());
leave(getName(), client.getSessionId());
storeFactory.pubSubStore().publish(PubSubStore.LEAVE, new JoinLeaveMessage(client.getSessionId(), getName(), getName()));
try {
for (DisconnectListener listener : disconnectListeners) {
listener.onDisconnect(client);
}
} catch (Exception e) {
exceptionListener.onDisconnectException(e, client);
}
}
@Override
public void addConnectListener(ConnectListener listener) {
connectListeners.add(listener);
}
public void onConnect(SocketIOClient client) {
join(getName(), client.getSessionId());
storeFactory.pubSubStore().publish(PubSubStore.JOIN, new JoinLeaveMessage(client.getSessionId(), getName(), getName()));
try {
for (ConnectListener listener : connectListeners) {
listener.onConnect(client);
}
} catch (Exception e) {
exceptionListener.onConnectException(e, client);
}
}
@Override
public BroadcastOperations getBroadcastOperations() {
return new BroadcastOperations(allClients.values(), storeFactory);
}
@Override
public BroadcastOperations getRoomOperations(String room) {
return new BroadcastOperations(getRoomClients(room), storeFactory);
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((name == null) ? 0 : name.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
Namespace other = (Namespace) obj;
if (name == null) {
if (other.name != null)
return false;
} else if (!name.equals(other.name))
return false;
return true;
}
@Override
public void addListeners(Object listeners) {
addListeners(listeners, listeners.getClass());
}
@Override
public void addListeners(Object listeners, Class listenersClass) {
engine.scan(this, listeners, listenersClass);
}
public void joinRoom(String room, UUID sessionId) {
join(room, sessionId);
storeFactory.pubSubStore().publish(PubSubStore.JOIN, new JoinLeaveMessage(sessionId, room, getName()));
}
public void dispatch(String room, Packet packet) {
Iterable<SocketIOClient> clients = getRoomClients(room);
for (SocketIOClient socketIOClient : clients) {
socketIOClient.send(packet);
}
}
private <K, V> void join(ConcurrentMap<K, Set<V>> map, K key, V value) {
Set<V> clients = map.get(key);
if (clients == null) {
clients = Collections.newSetFromMap(PlatformDependent.<V, Boolean>newConcurrentHashMap());
Set<V> oldClients = map.putIfAbsent(key, clients);
if (oldClients != null) {
clients = oldClients;
}
}
clients.add(value);
// object may be changed due to other concurrent call
if (clients != map.get(key)) {
// re-join if queue has been replaced
join(map, key, value);
}
}
public void join(String room, UUID sessionId) {
join(roomClients, room, sessionId);
join(clientRooms, sessionId, room);
}
public void leaveRoom(String room, UUID sessionId) {
leave(room, sessionId);
storeFactory.pubSubStore().publish(PubSubStore.LEAVE, new JoinLeaveMessage(sessionId, room, getName()));
}
private <K, V> void leave(ConcurrentMap<K, Set<V>> map, K room, V sessionId) {
Set<V> clients = map.get(room);
if (clients == null) {
return;
}
clients.remove(sessionId);
if (clients.isEmpty()) {
map.remove(room, Collections.emptySet());
}
}
public void leave(String room, UUID sessionId) {
leave(roomClients, room, sessionId);
leave(clientRooms, sessionId, room);
}
public Set<String> getRooms(SocketIOClient client) {
Set<String> res = clientRooms.get(client.getSessionId());
if (res == null) {
return Collections.emptySet();
}
return Collections.unmodifiableSet(res);
}
public Iterable<SocketIOClient> getRoomClients(String room) {
Set<UUID> sessionIds = roomClients.get(room);
if (sessionIds == null) {
return Collections.emptyList();
}
List<SocketIOClient> result = new ArrayList<SocketIOClient>();
for (UUID sessionId : sessionIds) {
SocketIOClient client = allClients.get(sessionId);
if(client != null) {
result.add(client);
}
}
return result;
}
public Collection<SocketIOClient> getAllClients() {
return Collections.unmodifiableCollection(allClients.values());
}
public JsonSupport getJsonSupport() {
return jsonSupport;
}
public SocketIOClient getClient(UUID uuid) {
return allClients.get(uuid);
}
}
| |
/*
* Copyright 2011 SpringSource
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.grails.plugins;
import grails.core.GrailsApplication;
import grails.io.IOUtils;
import grails.io.ResourceUtils;
import grails.plugins.exceptions.PluginException;
import grails.util.BuildSettings;
import org.grails.core.io.StaticResourceLoader;
import org.grails.io.support.GrailsResourceUtils;
import org.springframework.core.io.Resource;
import org.springframework.core.io.UrlResource;
import org.springframework.core.io.support.PathMatchingResourcePatternResolver;
import org.springframework.core.io.support.ResourcePatternResolver;
import org.springframework.util.StringUtils;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.net.URL;
import java.nio.charset.Charset;
import java.util.*;
/**
* Models a pre-compiled binary plugin.
*
* @see grails.plugins.GrailsPlugin
*
* @author Graeme Rocher
* @since 2.0
*/
@SuppressWarnings("rawtypes")
public class BinaryGrailsPlugin extends DefaultGrailsPlugin {
public static final String VIEWS_PROPERTIES = "views.properties";
public static final String RELATIVE_VIEWS_PROPERTIES = "gsp/views.properties";
public static final char UNDERSCORE = '_';
public static final String PROPERTIES_EXTENSION = ".properties";
public static final String DEFAULT_PROPERTIES_ENCODING = "UTF-8";
public static final String PLUGIN_DESCRIPTOR_PATH = "META-INF/grails-plugin.xml";
private final BinaryGrailsPluginDescriptor descriptor;
private Class[] providedArtefacts = {};
private final Map<String, Class> precompiledViewMap = new HashMap<String, Class>();
private final Resource baseResource;
private final Resource baseResourcesResource;
private final boolean isJar;
private final File projectDirectory;
/**
* Creates a binary plugin instance.
*
* @param pluginClass The plugin class
* @param descriptor The META-INF/grails-plugin.xml descriptor
* @param application The application
*/
public BinaryGrailsPlugin(Class<?> pluginClass, BinaryGrailsPluginDescriptor descriptor, GrailsApplication application) {
super(pluginClass, application);
this.descriptor = descriptor;
URL rootResource = IOUtils.findRootResource(pluginClass);
if(rootResource == null) {
throw new PluginException("Cannot evaluate plugin location for plugin " + pluginClass);
}
this.baseResource = new UrlResource(rootResource);
try {
this.isJar = ResourceUtils.isJarURL(baseResource.getURL());
} catch (IOException e) {
throw new PluginException("Cannot evaluate plugin location for plugin " + pluginClass, e);
}
this.projectDirectory = isJar ? null : IOUtils.findApplicationDirectoryFile(pluginClass);
if(BuildSettings.BASE_DIR != null && projectDirectory != null) {
try {
if(projectDirectory.getCanonicalPath().startsWith(BuildSettings.BASE_DIR.getCanonicalPath())) {
isBase = true;
}
} catch (IOException e) {
// ignore
}
}
URL rootResourcesURL = IOUtils.findRootResourcesURL(pluginClass);
if(rootResourcesURL == null) {
throw new PluginException("Cannot evaluate plugin location for plugin " + pluginClass);
}
this.baseResourcesResource= new UrlResource(rootResourcesURL);
if (descriptor != null) {
initializeProvidedArtefacts(descriptor.getProvidedlassNames());
initializeViewMap(descriptor);
}
}
public File getProjectDirectory() {
return projectDirectory;
}
protected void initializeViewMap(BinaryGrailsPluginDescriptor descriptor) {
final Resource descriptorResource = descriptor.getResource();
Resource viewsPropertiesResource = null;
try {
viewsPropertiesResource = descriptorResource.createRelative(VIEWS_PROPERTIES);
} catch (IOException e) {
// ignore
}
if (viewsPropertiesResource == null || !viewsPropertiesResource.exists()) {
try {
String urlString = descriptorResource.getURL().toString();
if(urlString.endsWith(PLUGIN_DESCRIPTOR_PATH)) {
urlString = urlString.substring(0, urlString.length() - PLUGIN_DESCRIPTOR_PATH.length());
URL newUrl = new URL(urlString + RELATIVE_VIEWS_PROPERTIES);
viewsPropertiesResource = new UrlResource(newUrl);
}
} catch (IOException e) {
// ignore
}
}
if (viewsPropertiesResource == null || !viewsPropertiesResource.exists()) {
return;
}
Properties viewsProperties = new Properties();
InputStream input = null;
try {
input = viewsPropertiesResource.getInputStream();
viewsProperties.load(input);
for (Object view : viewsProperties.keySet()) {
String viewName = view.toString();
final String viewClassName = viewsProperties.getProperty(viewName);
try {
final Class<?> viewClass = grailsApplication.getClassLoader().loadClass(viewClassName);
precompiledViewMap.put(viewName, viewClass);
} catch (Throwable e) {
throw new PluginException("Failed to initialize view ["+viewName+"] from plugin ["+ getName()+ "] : " + e.getMessage(), e);
}
}
} catch (IOException e) {
LOG.error("Error loading views for binary plugin ["+this+"]: " + e.getMessage(),e);
} finally {
try {
if (input != null) input.close();
} catch (IOException e) {
// ignore
}
}
}
protected void initializeProvidedArtefacts(List<String> classNames) {
List<Class> artefacts = new ArrayList<Class>();
if (!classNames.isEmpty()) {
final ClassLoader classLoader = grailsApplication.getClassLoader();
for (String className : classNames) {
try {
artefacts.add(classLoader.loadClass(className));
} catch (Throwable e) {
throw new PluginException("Failed to initialize class ["+className+"] from plugin ["+ getName()+ "] : " + e.getMessage(), e);
}
}
}
artefacts.addAll(Arrays.asList(super.getProvidedArtefacts()));
providedArtefacts = artefacts.toArray(new Class[artefacts.size()]);
}
@Override
public Class<?>[] getProvidedArtefacts() {
return providedArtefacts;
}
/**
* @return The META-INF/grails-plugin.xml descriptor
*/
public BinaryGrailsPluginDescriptor getBinaryDescriptor() {
return descriptor;
}
/**
* Resolves a static resource contained within this binary plugin
*
* @param path The relative path to the static resource
* @return The resource or null if it doesn't exist
*/
public Resource getResource(String path) {
final Resource descriptorResource = descriptor.getResource();
try {
Resource resource = descriptorResource.createRelative("static" + path);
if (resource.exists()) {
return resource;
}
} catch (IOException e) {
return null;
}
return null;
}
/**
* Obtains all properties for this binary plugin for the given locale.
*
* Note this method does not cache so clients should in general cache the results of this method.
*
* @param locale The locale
* @return The properties or null if non exist
*/
public Properties getProperties(final Locale locale) {
Resource url = this.baseResourcesResource;
Properties properties = null;
if(url != null) {
StaticResourceLoader resourceLoader = new StaticResourceLoader();
resourceLoader.setBaseResource(url);
ResourcePatternResolver resolver = new PathMatchingResourcePatternResolver(resourceLoader);
try {
// first load all properties
Resource[] resources = resolver.getResources('*' + PROPERTIES_EXTENSION);
resources = resources.length > 0 ? filterResources(resources, locale) : resources;
if(resources.length > 0) {
properties = new Properties();
// message bundles are locale specific. The more underscores the locale has the more specific the locale
// so we order by the number of underscores present so that the most specific appears
Arrays.sort(resources, new Comparator<Resource>() {
@Override
public int compare(Resource o1, Resource o2) {
String f1 = o1.getFilename();
String f2 = o2.getFilename();
int firstUnderscoreCount = StringUtils.countOccurrencesOf(f1, "_");
int secondUnderscoreCount = StringUtils.countOccurrencesOf(f2, "_");
if(firstUnderscoreCount == secondUnderscoreCount) {
return 0;
}
else {
return firstUnderscoreCount > secondUnderscoreCount ? 1 : -1;
}
}
});
loadFromResources(properties, resources);
}
} catch (IOException e) {
return null;
}
}
return properties;
}
private Resource[] filterResources(Resource[] resources, Locale locale) {
List<Resource> finalResources = new ArrayList<Resource>(resources.length);
for (Resource resource : resources) {
String fn = resource.getFilename();
if(fn.indexOf(UNDERSCORE) > -1) {
if(fn.endsWith(UNDERSCORE + locale.toString() + PROPERTIES_EXTENSION)) {
finalResources.add(resource);
}
else if(fn.endsWith(UNDERSCORE + locale.getLanguage() + UNDERSCORE + locale.getCountry() + PROPERTIES_EXTENSION)) {
finalResources.add(resource);
}
else if(fn.endsWith(UNDERSCORE + locale.getLanguage() + PROPERTIES_EXTENSION)) {
finalResources.add(resource);
}
}
else {
finalResources.add(resource);
}
}
return finalResources.toArray(new Resource[finalResources.size()]);
}
private void loadFromResources(Properties properties, Resource[] resources) throws IOException {
for (Resource messageResource : resources) {
InputStream inputStream = messageResource.getInputStream();
try {
properties.load(new InputStreamReader(inputStream, Charset.forName(System.getProperty("file.encoding", DEFAULT_PROPERTIES_ENCODING))));
} finally {
try {
inputStream.close();
} catch (IOException e) {
// ignore
}
}
}
}
/**
* Resolves a view for the given view name.
*
* @param viewName The view name
*
* @return The view class which is a subclass of GroovyPage
*/
public Class resolveView(String viewName) {
// this is a workaround for GRAILS-9234; in that scenario the viewName will be
// "/WEB-INF/grails-app/views/plugins/plugin9234-0.1/junk/_book.gsp" with the
// extra "/plugins/plugin9234-0.1". I'm not sure if that's needed elsewhere, so
// removing it here for the lookup
String extraPath = "/plugins/" + getName() + '-' + getVersion() + '/';
viewName = viewName.replace(extraPath, "/");
return precompiledViewMap.get(viewName);
}
}
| |
package org.testcontainers.containers;
import static java.net.HttpURLConnection.HTTP_OK;
import static java.util.stream.Collectors.toSet;
import java.time.Duration;
import java.util.Set;
import java.util.stream.Stream;
import org.testcontainers.containers.wait.strategy.HttpWaitStrategy;
import org.testcontainers.containers.wait.strategy.LogMessageWaitStrategy;
import org.testcontainers.containers.wait.strategy.WaitAllStrategy;
import org.testcontainers.containers.wait.strategy.WaitStrategy;
import org.testcontainers.utility.ComparableVersion;
import org.testcontainers.utility.DockerImageName;
import org.testcontainers.utility.LicenseAcceptance;
import org.testcontainers.utility.MountableFile;
/**
* Testcontainer for Neo4j.
*
* @param <S> "SELF" to be used in the <code>withXXX</code> methods.
* @author Michael J. Simons
*/
public class Neo4jContainer<S extends Neo4jContainer<S>> extends GenericContainer<S> {
/**
* The image defaults to the official Neo4j image: <a href="https://hub.docker.com/_/neo4j/">Neo4j</a>.
*/
private static final DockerImageName DEFAULT_IMAGE_NAME = DockerImageName.parse("neo4j");
/**
* The default tag (version) to use.
*/
private static final String DEFAULT_TAG = "4.4";
private static final String ENTERPRISE_TAG = DEFAULT_TAG + "-enterprise";
/**
* Default port for the binary Bolt protocol.
*/
private static final int DEFAULT_BOLT_PORT = 7687;
/**
* The port of the transactional HTTPS endpoint: <a href="https://neo4j.com/docs/rest-docs/current/">Neo4j REST API</a>.
*/
private static final int DEFAULT_HTTPS_PORT = 7473;
/**
* The port of the transactional HTTP endpoint: <a href="https://neo4j.com/docs/rest-docs/current/">Neo4j REST API</a>.
*/
private static final int DEFAULT_HTTP_PORT = 7474;
/**
* The official image requires a change of password by default from "neo4j" to something else. This defaults to "password".
*/
private static final String DEFAULT_ADMIN_PASSWORD = "password";
private static final String AUTH_FORMAT = "neo4j/%s";
private final boolean standardImage;
private String adminPassword = DEFAULT_ADMIN_PASSWORD;
/**
* Creates a Neo4jContainer using the official Neo4j docker image.
* @deprecated use {@link Neo4jContainer(DockerImageName)} instead
*/
@Deprecated
public Neo4jContainer() {
this(DEFAULT_IMAGE_NAME.withTag(DEFAULT_TAG));
}
/**
* Creates a Neo4jContainer using a specific docker image.
*
* @param dockerImageName The docker image to use.
*/
public Neo4jContainer(String dockerImageName) {
this(DockerImageName.parse(dockerImageName));
}
/**
* Creates a Neo4jContainer using a specific docker image.
*
* @param dockerImageName The docker image to use.
*/
public Neo4jContainer(final DockerImageName dockerImageName) {
super(dockerImageName);
this.standardImage = dockerImageName.getUnversionedPart()
.equals(DEFAULT_IMAGE_NAME.getUnversionedPart());
dockerImageName.assertCompatibleWith(DEFAULT_IMAGE_NAME);
WaitStrategy waitForBolt = new LogMessageWaitStrategy()
.withRegEx(String.format(".*Bolt enabled on .*:%d\\.\n", DEFAULT_BOLT_PORT));
WaitStrategy waitForHttp = new HttpWaitStrategy()
.forPort(DEFAULT_HTTP_PORT)
.forStatusCodeMatching(response -> response == HTTP_OK);
this.waitStrategy = new WaitAllStrategy()
.withStrategy(waitForBolt)
.withStrategy(waitForHttp)
.withStartupTimeout(Duration.ofMinutes(2));
addExposedPorts(DEFAULT_BOLT_PORT, DEFAULT_HTTP_PORT, DEFAULT_HTTPS_PORT);
}
@Override
public Set<Integer> getLivenessCheckPortNumbers() {
return Stream.of(DEFAULT_BOLT_PORT, DEFAULT_HTTP_PORT, DEFAULT_HTTPS_PORT)
.map(this::getMappedPort)
.collect(toSet());
}
@Override
protected void configure() {
boolean emptyAdminPassword = this.adminPassword == null || this.adminPassword.isEmpty();
String neo4jAuth = emptyAdminPassword ? "none" : String.format(AUTH_FORMAT, this.adminPassword);
addEnv("NEO4J_AUTH", neo4jAuth);
}
/**
* @return Bolt URL for use with Neo4j's Java-Driver.
*/
public String getBoltUrl() {
return String.format("bolt://" + getHost() + ":" + getMappedPort(DEFAULT_BOLT_PORT));
}
/**
* @return URL of the transactional HTTP endpoint.
*/
public String getHttpUrl() {
return String.format("http://" + getHost() + ":" + getMappedPort(DEFAULT_HTTP_PORT));
}
/**
* @return URL of the transactional HTTPS endpoint.
*/
public String getHttpsUrl() {
return String.format("https://" + getHost() + ":" + getMappedPort(DEFAULT_HTTPS_PORT));
}
/**
* Configures the container to use the enterprise edition of the default docker image.
* <br><br>
* Please have a look at the <a href="https://neo4j.com/licensing/">Neo4j Licensing page</a>. While the Neo4j
* Community Edition can be used for free in your projects under the GPL v3 license, Neo4j Enterprise edition
* needs either a commercial, education or evaluation license.
*
* @return This container.
*/
public S withEnterpriseEdition() {
if (!standardImage) {
throw new IllegalStateException(
String.format("Cannot use enterprise version with alternative image %s.",
getDockerImageName()));
}
setDockerImageName(DEFAULT_IMAGE_NAME.withTag(ENTERPRISE_TAG).asCanonicalNameString());
LicenseAcceptance.assertLicenseAccepted(getDockerImageName());
addEnv("NEO4J_ACCEPT_LICENSE_AGREEMENT", "yes");
return self();
}
/**
* Sets the admin password for the default account (which is <pre>neo4j</pre>). A null value or an empty string
* disables authentication.
*
* @param adminPassword The admin password for the default database account.
* @return This container.
*/
public S withAdminPassword(final String adminPassword) {
this.adminPassword = adminPassword;
return self();
}
/**
* Disables authentication.
*
* @return This container.
*/
public S withoutAuthentication() {
return withAdminPassword(null);
}
/**
* Copies an existing {@code graph.db} folder into the container. This can either be a classpath resource or a
* host resource. Please have a look at the factory methods in {@link MountableFile}.
* <br>
* If you want to map your database into the container instead of copying them, please use {@code #withClasspathResourceMapping},
* but this will only work when your test does not run in a container itself.
* <br>
* Note: This method only works with Neo4j 3.5.
* <br>
* Mapping would work like this:
* <pre>
* @Container
* private static final Neo4jContainer databaseServer = new Neo4jContainer<>()
* .withClasspathResourceMapping("/test-graph.db", "/data/databases/graph.db", BindMode.READ_WRITE);
* </pre>
*
* @param graphDb The graph.db folder to copy into the container
* @throws IllegalArgumentException If the database version is not 3.5.
* @return This container.
*/
public S withDatabase(MountableFile graphDb) {
if (!isNeo4jDatabaseVersionSupportingDbCopy()) {
throw new IllegalArgumentException(
"Copying database folder is not supported for Neo4j instances with version 4.0 or higher.");
}
return withCopyFileToContainer(graphDb, "/data/databases/graph.db");
}
/**
* Adds plugins to the given directory to the container. If {@code plugins} denotes a directory, than all of that
* directory is mapped to Neo4j's plugins. Otherwise, single resources are copied over.
* <br>
* If you want to map your plugins into the container instead of copying them, please use {@code #withClasspathResourceMapping},
* but this will only work when your test does not run in a container itself.
*
* @param plugins
* @return This container.
*/
public S withPlugins(MountableFile plugins) {
return withCopyFileToContainer(plugins, "/var/lib/neo4j/plugins/");
}
/**
* Adds Neo4j configuration properties to the container. The properties can be added as in the official Neo4j
* configuration, the method automatically translate them into the format required by the Neo4j container.
*
* @param key The key to configure, i.e. {@code dbms.security.procedures.unrestricted}
* @param value The value to set
* @return This container.
*/
public S withNeo4jConfig(String key, String value) {
addEnv(formatConfigurationKey(key), value);
return self();
}
/**
* @return The admin password for the <code>neo4j</code> account or literal <code>null</code> if auth is disabled.
*/
public String getAdminPassword() {
return adminPassword;
}
private static String formatConfigurationKey(String plainConfigKey) {
final String prefix = "NEO4J_";
return String.format("%s%s", prefix, plainConfigKey
.replaceAll("_", "__")
.replaceAll("\\.", "_"));
}
private boolean isNeo4jDatabaseVersionSupportingDbCopy() {
String usedImageVersion = DockerImageName.parse(getDockerImageName()).getVersionPart();
ComparableVersion usedComparableVersion = new ComparableVersion(usedImageVersion);
boolean versionSupportingDbCopy =
usedComparableVersion.isLessThan("4.0") && usedComparableVersion.isGreaterThanOrEqualTo("2");
if (versionSupportingDbCopy) {
return true;
}
if (!usedComparableVersion.isSemanticVersion()) {
logger().warn("Version {} is not a semantic version. The function \"withDatabase\" will fail.", usedImageVersion);
logger().warn("Copying databases is only supported for Neo4j versions 3.5.x");
}
return false;
}
}
| |
/*
* Copyright 2000-2013 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.slicer;
import com.intellij.codeInsight.PsiEquivalenceUtil;
import com.intellij.concurrency.ConcurrentCollectionFactory;
import com.intellij.ide.util.treeView.AbstractTreeNode;
import com.intellij.ide.util.treeView.AbstractTreeStructure;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.progress.ProgressIndicator;
import com.intellij.openapi.progress.ProgressManager;
import com.intellij.openapi.progress.Task;
import com.intellij.openapi.ui.Messages;
import com.intellij.openapi.util.Comparing;
import com.intellij.openapi.util.Computable;
import com.intellij.openapi.util.Ref;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiJavaReference;
import com.intellij.psi.PsiNamedElement;
import com.intellij.psi.impl.source.tree.AstBufferUtil;
import com.intellij.util.NullableFunction;
import com.intellij.util.PairProcessor;
import com.intellij.util.WalkingState;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.containers.FactoryMap;
import consulo.util.collection.HashingStrategy;
import consulo.util.collection.Sets;
import javax.annotation.Nonnull;
import java.util.*;
/**
* @author cdr
*/
public class SliceLeafAnalyzer {
public static final HashingStrategy<PsiElement> LEAF_ELEMENT_EQUALITY = new HashingStrategy<PsiElement>() {
@Override
public int hashCode(final PsiElement element) {
if (element == null) return 0;
String text = ApplicationManager.getApplication().runReadAction(new Computable<String>() {
@Override
public String compute() {
PsiElement elementToCompare = element;
if (element instanceof PsiJavaReference) {
PsiElement resolved = ((PsiJavaReference)element).resolve();
if (resolved != null) {
elementToCompare = resolved;
}
}
return elementToCompare instanceof PsiNamedElement ? ((PsiNamedElement)elementToCompare).getName()
: AstBufferUtil.getTextSkippingWhitespaceComments(elementToCompare.getNode());
}
});
return Comparing.hashcode(text);
}
@Override
public boolean equals(final PsiElement o1, final PsiElement o2) {
return ApplicationManager.getApplication().runReadAction(new Computable<Boolean>() {
@Override
public Boolean compute() {
return o1 != null && o2 != null && PsiEquivalenceUtil.areElementsEquivalent(o1, o2);
}
});
}
};
static SliceNode filterTree(SliceNode oldRoot, NullableFunction<SliceNode, SliceNode> filter, PairProcessor<SliceNode, List<SliceNode>> postProcessor){
SliceNode filtered = filter.fun(oldRoot);
if (filtered == null) return null;
List<SliceNode> childrenFiltered = new ArrayList<SliceNode>();
if (oldRoot.myCachedChildren != null) {
for (SliceNode child : oldRoot.myCachedChildren) {
SliceNode childFiltered = filterTree(child, filter,postProcessor);
if (childFiltered != null) {
childrenFiltered.add(childFiltered);
}
}
}
boolean success = postProcessor == null || postProcessor.process(filtered, childrenFiltered);
if (!success) return null;
filtered.myCachedChildren = new ArrayList<SliceNode>(childrenFiltered);
return filtered;
}
private static void groupByValues(@Nonnull Collection<PsiElement> leaves,
@Nonnull SliceRootNode oldRoot,
@Nonnull Map<SliceNode, Collection<PsiElement>> map) {
assert oldRoot.myCachedChildren.size() == 1;
SliceRootNode root = createTreeGroupedByValues(leaves, oldRoot, map);
SliceNode oldRootStart = oldRoot.myCachedChildren.get(0);
SliceUsage rootUsage = oldRootStart.getValue();
String description = SliceManager.getElementDescription(null, rootUsage.getElement(), " (grouped by value)");
SliceManager.getInstance(root.getProject()).createToolWindow(true, root, true, description);
}
@Nonnull
public static SliceRootNode createTreeGroupedByValues(Collection<PsiElement> leaves, SliceRootNode oldRoot, final Map<SliceNode, Collection<PsiElement>> map) {
SliceNode oldRootStart = oldRoot.myCachedChildren.get(0);
SliceRootNode root = oldRoot.copy();
root.setChanged();
root.targetEqualUsages.clear();
root.myCachedChildren = new ArrayList<SliceNode>(leaves.size());
for (final PsiElement leafExpression : leaves) {
SliceNode newNode = filterTree(oldRootStart, new NullableFunction<SliceNode, SliceNode>() {
@Override
public SliceNode fun(SliceNode oldNode) {
if (oldNode.getDuplicate() != null) return null;
if (!node(oldNode, map).contains(leafExpression)) return null;
return oldNode.copy();
}
}, new PairProcessor<SliceNode, List<SliceNode>>() {
@Override
public boolean process(SliceNode node, List<SliceNode> children) {
if (!children.isEmpty()) return true;
PsiElement element = node.getValue().getElement();
if (element == null) return false;
return element.getManager().areElementsEquivalent(element, leafExpression); // leaf can be there only if it's filtering expression
}
});
SliceLeafValueRootNode lvNode = new SliceLeafValueRootNode(root.getProject(), leafExpression, root, Collections.singletonList(newNode),
oldRoot.getValue().params);
root.myCachedChildren.add(lvNode);
}
return root;
}
public static void startAnalyzeValues(@Nonnull final AbstractTreeStructure treeStructure, @Nonnull final Runnable finish) {
final SliceRootNode root = (SliceRootNode)treeStructure.getRootElement();
final Ref<Collection<PsiElement>> leafExpressions = Ref.create(null);
final Map<SliceNode, Collection<PsiElement>> map = createMap();
ProgressManager.getInstance().run(new Task.Backgroundable(root.getProject(), "Expanding all nodes... (may very well take the whole day)", true) {
@Override
public void run(@Nonnull final ProgressIndicator indicator) {
Collection<PsiElement> l = calcLeafExpressions(root, treeStructure, map);
leafExpressions.set(l);
}
@Override
public void onCancel() {
finish.run();
}
@Override
public void onSuccess() {
try {
Collection<PsiElement> leaves = leafExpressions.get();
if (leaves == null) return; //cancelled
if (leaves.isEmpty()) {
Messages.showErrorDialog("Unable to find leaf expressions to group by", "Cannot group");
return;
}
groupByValues(leaves, root, map);
}
finally {
finish.run();
}
}
});
}
public static Map<SliceNode, Collection<PsiElement>> createMap() {
return FactoryMap.createMap(sliceNode -> ConcurrentCollectionFactory.createConcurrentSet(SliceLeafAnalyzer.LEAF_ELEMENT_EQUALITY), () -> ConcurrentCollectionFactory.createMap(ContainerUtil.<SliceNode>identityStrategy()));
}
static class SliceNodeGuide implements WalkingState.TreeGuide<SliceNode> {
private final AbstractTreeStructure myTreeStructure;
// use tree structure because it's setting 'parent' fields in the process
SliceNodeGuide(@Nonnull AbstractTreeStructure treeStructure) {
myTreeStructure = treeStructure;
}
@Override
public SliceNode getNextSibling(@Nonnull SliceNode element) {
AbstractTreeNode parent = element.getParent();
if (parent == null) return null;
return element.getNext((List)parent.getChildren());
}
@Override
public SliceNode getPrevSibling(@Nonnull SliceNode element) {
AbstractTreeNode parent = element.getParent();
if (parent == null) return null;
return element.getPrev((List)parent.getChildren());
}
@Override
public SliceNode getFirstChild(@Nonnull SliceNode element) {
Object[] children = myTreeStructure.getChildElements(element);
return children.length == 0 ? null : (SliceNode)children[0];
}
@Override
public SliceNode getParent(@Nonnull SliceNode element) {
AbstractTreeNode parent = element.getParent();
return parent instanceof SliceNode ? (SliceNode)parent : null;
}
}
private static Collection<PsiElement> node(SliceNode node, Map<SliceNode, Collection<PsiElement>> map) {
return map.get(node);
}
@Nonnull
public static Collection<PsiElement> calcLeafExpressions(@Nonnull final SliceNode root,
@Nonnull AbstractTreeStructure treeStructure,
@Nonnull final Map<SliceNode, Collection<PsiElement>> map) {
final SliceNodeGuide guide = new SliceNodeGuide(treeStructure);
WalkingState<SliceNode> walkingState = new WalkingState<SliceNode>(guide) {
@Override
public void visit(@Nonnull SliceNode element) {
element.calculateDupNode();
node(element, map).clear();
SliceNode duplicate = element.getDuplicate();
if (duplicate != null) {
node(element, map).addAll(node(duplicate, map));
}
else {
final SliceUsage sliceUsage = element.getValue();
Collection<? extends AbstractTreeNode> children = element.getChildren();
if (children.isEmpty()) {
PsiElement value = ApplicationManager.getApplication().runReadAction((Computable<PsiElement>) sliceUsage::getElement);
node(element, map).addAll(Sets.newHashSet(Set.of(value), LEAF_ELEMENT_EQUALITY));
}
super.visit(element);
}
}
@Override
public void elementFinished(@Nonnull SliceNode element) {
SliceNode parent = guide.getParent(element);
if (parent != null) {
node(parent, map).addAll(node(element, map));
}
}
};
walkingState.visit(root);
return node(root, map);
}
}
| |
/*
* Copyright 2015-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.artifact_cache;
import static org.hamcrest.junit.MatcherAssume.assumeThat;
import static org.junit.Assert.assertThat;
import com.facebook.buck.event.BuckEventBus;
import com.facebook.buck.event.BuckEventBusFactory;
import com.facebook.buck.io.ProjectFilesystem;
import com.facebook.buck.testutil.FakeProjectFilesystem;
import com.facebook.buck.util.environment.Platform;
import com.google.common.util.concurrent.MoreExecutors;
import org.hamcrest.Matchers;
import org.junit.Test;
import java.nio.file.Paths;
import java.util.Optional;
public class ArtifactCachesTest {
@Test
public void testCreateHttpCacheOnly() throws Exception {
ArtifactCacheBuckConfig cacheConfig = ArtifactCacheBuckConfigTest.createFromText(
"[cache]",
"mode = http");
ProjectFilesystem projectFilesystem = new FakeProjectFilesystem();
BuckEventBus buckEventBus = BuckEventBusFactory.newInstance();
ArtifactCache artifactCache = new ArtifactCaches(
cacheConfig,
buckEventBus,
projectFilesystem,
Optional.empty(),
MoreExecutors.newDirectExecutorService(),
Optional.empty()).newInstance();
assertThat(stripDecorators(artifactCache), Matchers.instanceOf(HttpArtifactCache.class));
}
@Test
public void testCreateDirCacheOnly() throws Exception {
ArtifactCacheBuckConfig cacheConfig = ArtifactCacheBuckConfigTest.createFromText(
"[cache]",
"mode = dir");
ProjectFilesystem projectFilesystem = new FakeProjectFilesystem();
BuckEventBus buckEventBus = BuckEventBusFactory.newInstance();
ArtifactCache artifactCache = new ArtifactCaches(
cacheConfig,
buckEventBus,
projectFilesystem,
Optional.empty(),
MoreExecutors.newDirectExecutorService(),
Optional.empty()).newInstance();
assertThat(stripDecorators(artifactCache), Matchers.instanceOf(DirArtifactCache.class));
}
@Test
public void testCreateMultipleDirCaches() throws Exception {
ArtifactCacheBuckConfig cacheConfig = ArtifactCacheBuckConfigTest.createFromText(
"[cache]",
"dir_cache_names = dir1, dir2",
"[cache#dir1]",
"dir = dir1",
"dir_mode = readwrite",
"[cache#dir2]",
"dir = dir2",
"dir_mode = readonly");
ProjectFilesystem projectFilesystem = new FakeProjectFilesystem();
BuckEventBus buckEventBus = BuckEventBusFactory.newInstance();
ArtifactCache artifactCache = stripDecorators(
new ArtifactCaches(
cacheConfig,
buckEventBus,
projectFilesystem,
Optional.empty(),
MoreExecutors.newDirectExecutorService(),
Optional.empty()).newInstance());
assertThat(artifactCache, Matchers.instanceOf(MultiArtifactCache.class));
MultiArtifactCache multiArtifactCache = (MultiArtifactCache) artifactCache;
assertThat(multiArtifactCache.getArtifactCaches().size(), Matchers.equalTo(2));
ArtifactCache c1 = stripDecorators(multiArtifactCache.getArtifactCaches().get(0));
ArtifactCache c2 = stripDecorators(multiArtifactCache.getArtifactCaches().get(1));
assertThat(c1, Matchers.instanceOf(DirArtifactCache.class));
assertThat(c2, Matchers.instanceOf(DirArtifactCache.class));
DirArtifactCache dir1 = (DirArtifactCache) c1;
assertThat(dir1.getCacheDir(), Matchers.equalTo(Paths.get("dir1").toAbsolutePath()));
assertThat(
dir1.getCacheReadMode(),
Matchers.equalTo(CacheReadMode.READWRITE));
DirArtifactCache dir2 = (DirArtifactCache) c2;
assertThat(dir2.getCacheDir(), Matchers.equalTo(Paths.get("dir2").toAbsolutePath()));
assertThat(
dir2.getCacheReadMode(),
Matchers.equalTo(CacheReadMode.READONLY));
}
@Test
public void testCreateBoth() throws Exception {
ArtifactCacheBuckConfig cacheConfig = ArtifactCacheBuckConfigTest.createFromText(
"[cache]",
"mode = dir, http");
ProjectFilesystem projectFilesystem = new FakeProjectFilesystem();
BuckEventBus buckEventBus = BuckEventBusFactory.newInstance();
ArtifactCache artifactCache = new ArtifactCaches(
cacheConfig,
buckEventBus,
projectFilesystem,
Optional.empty(),
MoreExecutors.newDirectExecutorService(),
Optional.empty()).newInstance();
assertThat(stripDecorators(artifactCache), Matchers.instanceOf(MultiArtifactCache.class));
}
@Test
public void testCreateDirCacheOnlyWhenOnBlacklistedWifi() throws Exception {
ArtifactCacheBuckConfig cacheConfig = ArtifactCacheBuckConfigTest.createFromText(
"[cache]",
"mode = dir, http",
"blacklisted_wifi_ssids = weevil, evilwifi");
ProjectFilesystem projectFilesystem = new FakeProjectFilesystem();
BuckEventBus buckEventBus = BuckEventBusFactory.newInstance();
ArtifactCache artifactCache = new ArtifactCaches(
cacheConfig,
buckEventBus,
projectFilesystem,
Optional.of("evilwifi"),
MoreExecutors.newDirectExecutorService(),
Optional.empty()).newInstance();
assertThat(stripDecorators(artifactCache), Matchers.instanceOf(DirArtifactCache.class));
}
@Test
public void testCreateReadOnlyDirCacheExperimentalCache() throws Exception {
assumeThat(Platform.detect(), Matchers.not(Matchers.equalTo(Platform.WINDOWS)));
ArtifactCacheBuckConfig cacheConfig = ArtifactCacheBuckConfigTest.createFromText(
"[cache]",
"mode = dir, http",
"_exp_propagation = true",
"_exp_propagation_force_control_group = true");
ProjectFilesystem projectFilesystem = new FakeProjectFilesystem();
BuckEventBus buckEventBus = BuckEventBusFactory.newInstance();
ArtifactCache artifactCache = new ArtifactCaches(
cacheConfig,
buckEventBus,
projectFilesystem,
Optional.empty(),
MoreExecutors.newDirectExecutorService(),
Optional.empty()).newInstance();
ArtifactCache result = stripDecorators(artifactCache);
assertThat(result, Matchers.instanceOf(RemoteArtifactsInLocalCacheArtifactCache.class));
RemoteArtifactsInLocalCacheArtifactCache experimentalCache =
(RemoteArtifactsInLocalCacheArtifactCache) result;
assertThat(experimentalCache.getLocalCache(), Matchers.instanceOf(MultiArtifactCache.class));
MultiArtifactCache localCache = (MultiArtifactCache) experimentalCache.getLocalCache();
assertThat(localCache.getArtifactCaches().get(0), Matchers.instanceOf(CacheDecorator.class));
CacheDecorator decorator =
(CacheDecorator) localCache.getArtifactCaches().get(0);
assertThat(decorator.getDelegate(), Matchers.instanceOf(DirArtifactCache.class));
assertThat(experimentalCache.getRemoteCache(), Matchers.instanceOf(MultiArtifactCache.class));
MultiArtifactCache remoteCache = (MultiArtifactCache) experimentalCache.getRemoteCache();
assertThat(
remoteCache.getArtifactCaches().get(0),
Matchers.instanceOf(HttpArtifactCache.class));
}
private static ArtifactCache stripDecorators(ArtifactCache artifactCache) {
if (artifactCache instanceof LoggingArtifactCacheDecorator) {
LoggingArtifactCacheDecorator cacheDecorator = (LoggingArtifactCacheDecorator) artifactCache;
return stripDecorators(cacheDecorator.getDelegate());
}
if (artifactCache instanceof TwoLevelArtifactCacheDecorator) {
TwoLevelArtifactCacheDecorator cacheDecorator =
(TwoLevelArtifactCacheDecorator) artifactCache;
return stripDecorators(cacheDecorator.getDelegate());
}
return artifactCache;
}
}
| |
/*
* JBoss, Home of Professional Open Source.
* Copyright 2014 Red Hat, Inc., and individual contributors
* as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.undertow.conduits;
import io.undertow.UndertowLogger;
import io.undertow.UndertowOptions;
import io.undertow.server.OpenListener;
import org.xnio.Buffers;
import org.xnio.ChannelListeners;
import org.xnio.IoUtils;
import org.xnio.Options;
import org.xnio.StreamConnection;
import org.xnio.XnioExecutor;
import org.xnio.channels.StreamSourceChannel;
import org.xnio.conduits.AbstractStreamSinkConduit;
import org.xnio.conduits.StreamSinkConduit;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.channels.ClosedChannelException;
import java.nio.channels.FileChannel;
import java.util.concurrent.TimeUnit;
/**
* Wrapper for write timeout. This should always be the first wrapper applied to the underlying channel.
*
* @author Stuart Douglas
* @see org.xnio.Options#WRITE_TIMEOUT
*/
public final class WriteTimeoutStreamSinkConduit extends AbstractStreamSinkConduit<StreamSinkConduit> {
private XnioExecutor.Key handle;
private final StreamConnection connection;
private volatile long expireTime = -1;
private final OpenListener openListener;
private static final int FUZZ_FACTOR = 50; //we add 50ms to the timeout to make sure the underlying channel has actually timed out
private final Runnable timeoutCommand = new Runnable() {
@Override
public void run() {
handle = null;
if (expireTime == -1) {
return;
}
long current = System.currentTimeMillis();
if (current < expireTime) {
//timeout has been bumped, re-schedule
handle = connection.getIoThread().executeAfter(timeoutCommand, (expireTime - current) + FUZZ_FACTOR, TimeUnit.MILLISECONDS);
return;
}
UndertowLogger.REQUEST_LOGGER.tracef("Timing out channel %s due to inactivity", connection.getSinkChannel());
IoUtils.safeClose(connection);
if (connection.getSourceChannel().isReadResumed()) {
ChannelListeners.invokeChannelListener(connection.getSourceChannel(), connection.getSourceChannel().getReadListener());
}
if (connection.getSinkChannel().isWriteResumed()) {
ChannelListeners.invokeChannelListener(connection.getSinkChannel(), connection.getSinkChannel().getWriteListener());
}
}
};
public WriteTimeoutStreamSinkConduit(final StreamSinkConduit delegate, StreamConnection connection, OpenListener openListener) {
super(delegate);
this.connection = connection;
this.openListener = openListener;
}
private void handleWriteTimeout(final long ret) throws IOException {
if (!connection.isOpen()) {
return;
}
if (ret == 0 && handle != null) {
return;
}
Integer timeout = getTimeout();
if (timeout == null || timeout <= 0) {
return;
}
long currentTime = System.currentTimeMillis();
long expireTimeVar = expireTime;
if (expireTimeVar != -1 && currentTime > expireTimeVar) {
IoUtils.safeClose(connection);
throw new ClosedChannelException();
}
expireTime = currentTime + timeout;
}
@Override
public int write(final ByteBuffer src) throws IOException {
int ret = super.write(src);
handleWriteTimeout(ret);
return ret;
}
@Override
public long write(final ByteBuffer[] srcs, final int offset, final int length) throws IOException {
long ret = super.write(srcs, offset, length);
handleWriteTimeout(ret);
return ret;
}
@Override
public int writeFinal(ByteBuffer src) throws IOException {
int ret = super.writeFinal(src);
handleWriteTimeout(ret);
if(!src.hasRemaining()) {
if(handle != null) {
handle.remove();
handle = null;
}
}
return ret;
}
@Override
public long writeFinal(ByteBuffer[] srcs, int offset, int length) throws IOException {
long ret = super.writeFinal(srcs, offset, length);
handleWriteTimeout(ret);
if(!Buffers.hasRemaining(srcs)) {
if(handle != null) {
handle.remove();
handle = null;
}
}
return ret;
}
@Override
public long transferFrom(final FileChannel src, final long position, final long count) throws IOException {
long ret = super.transferFrom(src, position, count);
handleWriteTimeout(ret);
return ret;
}
@Override
public long transferFrom(final StreamSourceChannel source, final long count, final ByteBuffer throughBuffer) throws IOException {
long ret = super.transferFrom(source, count, throughBuffer);
handleWriteTimeout(ret);
return ret;
}
@Override
public void awaitWritable() throws IOException {
Integer timeout = getTimeout();
if (timeout != null && timeout > 0) {
super.awaitWritable(timeout + FUZZ_FACTOR, TimeUnit.MILLISECONDS);
} else {
super.awaitWritable();
}
}
@Override
public void awaitWritable(long time, TimeUnit timeUnit) throws IOException {
Integer timeout = getTimeout();
if (timeout != null && timeout > 0) {
long millis = timeUnit.toMillis(time);
super.awaitWritable(Math.min(millis, timeout + FUZZ_FACTOR), TimeUnit.MILLISECONDS);
} else {
super.awaitWritable(time, timeUnit);
}
}
private Integer getTimeout() {
Integer timeout = 0;
try {
timeout = connection.getSourceChannel().getOption(Options.WRITE_TIMEOUT);
} catch (IOException ignore) {}
Integer idleTimeout = openListener.getUndertowOptions().get(UndertowOptions.IDLE_TIMEOUT);
if ((timeout == null || timeout <= 0) && idleTimeout != null) {
timeout = idleTimeout;
} else if (timeout != null && idleTimeout != null && idleTimeout > 0) {
timeout = Math.min(timeout, idleTimeout);
}
return timeout;
}
@Override
public void terminateWrites() throws IOException {
super.terminateWrites();
if(handle != null) {
handle.remove();
handle = null;
}
}
@Override
public void truncateWrites() throws IOException {
super.truncateWrites();
if(handle != null) {
handle.remove();
handle = null;
}
}
@Override
public void resumeWrites() {
super.resumeWrites();
handleResumeTimeout();
}
@Override
public void suspendWrites() {
super.suspendWrites();
XnioExecutor.Key handle = this.handle;
if(handle != null) {
handle.remove();
this.handle = null;
}
}
@Override
public void wakeupWrites() {
super.wakeupWrites();
handleResumeTimeout();
}
private void handleResumeTimeout() {
Integer timeout = getTimeout();
if (timeout == null || timeout <= 0) {
return;
}
long currentTime = System.currentTimeMillis();
expireTime = currentTime + timeout;
XnioExecutor.Key key = handle;
if (key == null) {
handle = connection.getIoThread().executeAfter(timeoutCommand, timeout, TimeUnit.MILLISECONDS);
}
}
}
| |
package org.apache.lucene.codecs.blockterms;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.IOException;
import java.util.Collections;
import java.util.Comparator;
import java.util.Iterator;
import java.util.TreeMap;
import org.apache.lucene.codecs.BlockTermState;
import org.apache.lucene.codecs.CodecUtil;
import org.apache.lucene.codecs.FieldsProducer;
import org.apache.lucene.codecs.PostingsReaderBase;
import org.apache.lucene.index.CorruptIndexException;
import org.apache.lucene.index.DocsAndPositionsEnum;
import org.apache.lucene.index.DocsEnum;
import org.apache.lucene.index.FieldInfo.IndexOptions;
import org.apache.lucene.index.FieldInfo;
import org.apache.lucene.index.FieldInfos;
import org.apache.lucene.index.IndexFileNames;
import org.apache.lucene.index.SegmentInfo;
import org.apache.lucene.index.TermState;
import org.apache.lucene.index.Terms;
import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.store.ByteArrayDataInput;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.util.Accountable;
import org.apache.lucene.util.ArrayUtil;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.BytesRefBuilder;
import org.apache.lucene.util.DoubleBarrelLRUCache;
import org.apache.lucene.util.RamUsageEstimator;
/** Handles a terms dict, but decouples all details of
* doc/freqs/positions reading to an instance of {@link
* PostingsReaderBase}. This class is reusable for
* codecs that use a different format for
* docs/freqs/positions (though codecs are also free to
* make their own terms dict impl).
*
* <p>This class also interacts with an instance of {@link
* TermsIndexReaderBase}, to abstract away the specific
* implementation of the terms dict index.
* @lucene.experimental */
public class BlockTermsReader extends FieldsProducer {
private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(BlockTermsReader.class);
// Open input to the main terms dict file (_X.tis)
private final IndexInput in;
// Reads the terms dict entries, to gather state to
// produce DocsEnum on demand
private final PostingsReaderBase postingsReader;
private final TreeMap<String,FieldReader> fields = new TreeMap<>();
// Reads the terms index
private TermsIndexReaderBase indexReader;
// keeps the dirStart offset
private long dirOffset;
private final int version;
// Used as key for the terms cache
private static class FieldAndTerm extends DoubleBarrelLRUCache.CloneableKey {
String field;
BytesRef term;
public FieldAndTerm() {
}
public FieldAndTerm(FieldAndTerm other) {
field = other.field;
term = BytesRef.deepCopyOf(other.term);
}
@Override
public boolean equals(Object _other) {
FieldAndTerm other = (FieldAndTerm) _other;
return other.field.equals(field) && term.bytesEquals(other.term);
}
@Override
public FieldAndTerm clone() {
return new FieldAndTerm(this);
}
@Override
public int hashCode() {
return field.hashCode() * 31 + term.hashCode();
}
}
// private String segment;
public BlockTermsReader(TermsIndexReaderBase indexReader, Directory dir, FieldInfos fieldInfos, SegmentInfo info, PostingsReaderBase postingsReader, IOContext context,
String segmentSuffix)
throws IOException {
this.postingsReader = postingsReader;
// this.segment = segment;
in = dir.openInput(IndexFileNames.segmentFileName(info.name, segmentSuffix, BlockTermsWriter.TERMS_EXTENSION),
context);
boolean success = false;
try {
version = readHeader(in);
// Have PostingsReader init itself
postingsReader.init(in);
if (version >= BlockTermsWriter.VERSION_CHECKSUM) {
// NOTE: data file is too costly to verify checksum against all the bytes on open,
// but for now we at least verify proper structure of the checksum footer: which looks
// for FOOTER_MAGIC + algorithmID. This is cheap and can detect some forms of corruption
// such as file truncation.
CodecUtil.retrieveChecksum(in);
}
// Read per-field details
seekDir(in, dirOffset);
final int numFields = in.readVInt();
if (numFields < 0) {
throw new CorruptIndexException("invalid number of fields: " + numFields + " (resource=" + in + ")");
}
for(int i=0;i<numFields;i++) {
final int field = in.readVInt();
final long numTerms = in.readVLong();
assert numTerms >= 0;
final long termsStartPointer = in.readVLong();
final FieldInfo fieldInfo = fieldInfos.fieldInfo(field);
final long sumTotalTermFreq = fieldInfo.getIndexOptions() == IndexOptions.DOCS_ONLY ? -1 : in.readVLong();
final long sumDocFreq = in.readVLong();
final int docCount = in.readVInt();
final int longsSize = version >= BlockTermsWriter.VERSION_META_ARRAY ? in.readVInt() : 0;
if (docCount < 0 || docCount > info.getDocCount()) { // #docs with field must be <= #docs
throw new CorruptIndexException("invalid docCount: " + docCount + " maxDoc: " + info.getDocCount() + " (resource=" + in + ")");
}
if (sumDocFreq < docCount) { // #postings must be >= #docs with field
throw new CorruptIndexException("invalid sumDocFreq: " + sumDocFreq + " docCount: " + docCount + " (resource=" + in + ")");
}
if (sumTotalTermFreq != -1 && sumTotalTermFreq < sumDocFreq) { // #positions must be >= #postings
throw new CorruptIndexException("invalid sumTotalTermFreq: " + sumTotalTermFreq + " sumDocFreq: " + sumDocFreq + " (resource=" + in + ")");
}
FieldReader previous = fields.put(fieldInfo.name, new FieldReader(fieldInfo, numTerms, termsStartPointer, sumTotalTermFreq, sumDocFreq, docCount, longsSize));
if (previous != null) {
throw new CorruptIndexException("duplicate fields: " + fieldInfo.name + " (resource=" + in + ")");
}
}
success = true;
} finally {
if (!success) {
in.close();
}
}
this.indexReader = indexReader;
}
private int readHeader(IndexInput input) throws IOException {
int version = CodecUtil.checkHeader(input, BlockTermsWriter.CODEC_NAME,
BlockTermsWriter.VERSION_START,
BlockTermsWriter.VERSION_CURRENT);
if (version < BlockTermsWriter.VERSION_APPEND_ONLY) {
dirOffset = input.readLong();
}
return version;
}
private void seekDir(IndexInput input, long dirOffset) throws IOException {
if (version >= BlockTermsWriter.VERSION_CHECKSUM) {
input.seek(input.length() - CodecUtil.footerLength() - 8);
dirOffset = input.readLong();
} else if (version >= BlockTermsWriter.VERSION_APPEND_ONLY) {
input.seek(input.length() - 8);
dirOffset = input.readLong();
}
input.seek(dirOffset);
}
@Override
public void close() throws IOException {
try {
try {
if (indexReader != null) {
indexReader.close();
}
} finally {
// null so if an app hangs on to us (ie, we are not
// GCable, despite being closed) we still free most
// ram
indexReader = null;
if (in != null) {
in.close();
}
}
} finally {
if (postingsReader != null) {
postingsReader.close();
}
}
}
@Override
public Iterator<String> iterator() {
return Collections.unmodifiableSet(fields.keySet()).iterator();
}
@Override
public Terms terms(String field) throws IOException {
assert field != null;
return fields.get(field);
}
@Override
public int size() {
return fields.size();
}
private static final long FIELD_READER_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(FieldReader.class);
private class FieldReader extends Terms implements Accountable {
final long numTerms;
final FieldInfo fieldInfo;
final long termsStartPointer;
final long sumTotalTermFreq;
final long sumDocFreq;
final int docCount;
final int longsSize;
FieldReader(FieldInfo fieldInfo, long numTerms, long termsStartPointer, long sumTotalTermFreq, long sumDocFreq, int docCount, int longsSize) {
assert numTerms > 0;
this.fieldInfo = fieldInfo;
this.numTerms = numTerms;
this.termsStartPointer = termsStartPointer;
this.sumTotalTermFreq = sumTotalTermFreq;
this.sumDocFreq = sumDocFreq;
this.docCount = docCount;
this.longsSize = longsSize;
}
@Override
public long ramBytesUsed() {
return FIELD_READER_RAM_BYTES_USED;
}
@Override
public Comparator<BytesRef> getComparator() {
return BytesRef.getUTF8SortedAsUnicodeComparator();
}
@Override
public TermsEnum iterator(TermsEnum reuse) throws IOException {
return new SegmentTermsEnum();
}
@Override
public boolean hasFreqs() {
return fieldInfo.getIndexOptions().compareTo(IndexOptions.DOCS_AND_FREQS) >= 0;
}
@Override
public boolean hasOffsets() {
return fieldInfo.getIndexOptions().compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS) >= 0;
}
@Override
public boolean hasPositions() {
return fieldInfo.getIndexOptions().compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) >= 0;
}
@Override
public boolean hasPayloads() {
return fieldInfo.hasPayloads();
}
@Override
public long size() {
return numTerms;
}
@Override
public long getSumTotalTermFreq() {
return sumTotalTermFreq;
}
@Override
public long getSumDocFreq() throws IOException {
return sumDocFreq;
}
@Override
public int getDocCount() throws IOException {
return docCount;
}
// Iterates through terms in this field
private final class SegmentTermsEnum extends TermsEnum {
private final IndexInput in;
private final BlockTermState state;
private final boolean doOrd;
private final FieldAndTerm fieldTerm = new FieldAndTerm();
private final TermsIndexReaderBase.FieldIndexEnum indexEnum;
private final BytesRefBuilder term = new BytesRefBuilder();
/* This is true if indexEnum is "still" seek'd to the index term
for the current term. We set it to true on seeking, and then it
remains valid until next() is called enough times to load another
terms block: */
private boolean indexIsCurrent;
/* True if we've already called .next() on the indexEnum, to "bracket"
the current block of terms: */
private boolean didIndexNext;
/* Next index term, bracketing the current block of terms; this is
only valid if didIndexNext is true: */
private BytesRef nextIndexTerm;
/* True after seekExact(TermState), do defer seeking. If the app then
calls next() (which is not "typical"), then we'll do the real seek */
private boolean seekPending;
/* How many blocks we've read since last seek. Once this
is >= indexEnum.getDivisor() we set indexIsCurrent to false (since
the index can no long bracket seek-within-block). */
private int blocksSinceSeek;
private byte[] termSuffixes;
private ByteArrayDataInput termSuffixesReader = new ByteArrayDataInput();
/* Common prefix used for all terms in this block. */
private int termBlockPrefix;
/* How many terms in current block */
private int blockTermCount;
private byte[] docFreqBytes;
private final ByteArrayDataInput freqReader = new ByteArrayDataInput();
private int metaDataUpto;
private long[] longs;
private byte[] bytes;
private ByteArrayDataInput bytesReader;
public SegmentTermsEnum() throws IOException {
in = BlockTermsReader.this.in.clone();
in.seek(termsStartPointer);
indexEnum = indexReader.getFieldEnum(fieldInfo);
doOrd = indexReader.supportsOrd();
fieldTerm.field = fieldInfo.name;
state = postingsReader.newTermState();
state.totalTermFreq = -1;
state.ord = -1;
termSuffixes = new byte[128];
docFreqBytes = new byte[64];
//System.out.println("BTR.enum init this=" + this + " postingsReader=" + postingsReader);
longs = new long[longsSize];
}
@Override
public Comparator<BytesRef> getComparator() {
return BytesRef.getUTF8SortedAsUnicodeComparator();
}
// TODO: we may want an alternate mode here which is
// "if you are about to return NOT_FOUND I won't use
// the terms data from that"; eg FuzzyTermsEnum will
// (usually) just immediately call seek again if we
// return NOT_FOUND so it's a waste for us to fill in
// the term that was actually NOT_FOUND
@Override
public SeekStatus seekCeil(final BytesRef target) throws IOException {
if (indexEnum == null) {
throw new IllegalStateException("terms index was not loaded");
}
//System.out.println("BTR.seek seg=" + segment + " target=" + fieldInfo.name + ":" + target.utf8ToString() + " " + target + " current=" + term().utf8ToString() + " " + term() + " indexIsCurrent=" + indexIsCurrent + " didIndexNext=" + didIndexNext + " seekPending=" + seekPending + " divisor=" + indexReader.getDivisor() + " this=" + this);
if (didIndexNext) {
if (nextIndexTerm == null) {
//System.out.println(" nextIndexTerm=null");
} else {
//System.out.println(" nextIndexTerm=" + nextIndexTerm.utf8ToString());
}
}
boolean doSeek = true;
// See if we can avoid seeking, because target term
// is after current term but before next index term:
if (indexIsCurrent) {
final int cmp = BytesRef.getUTF8SortedAsUnicodeComparator().compare(term.get(), target);
if (cmp == 0) {
// Already at the requested term
return SeekStatus.FOUND;
} else if (cmp < 0) {
// Target term is after current term
if (!didIndexNext) {
if (indexEnum.next() == -1) {
nextIndexTerm = null;
} else {
nextIndexTerm = indexEnum.term();
}
//System.out.println(" now do index next() nextIndexTerm=" + (nextIndexTerm == null ? "null" : nextIndexTerm.utf8ToString()));
didIndexNext = true;
}
if (nextIndexTerm == null || BytesRef.getUTF8SortedAsUnicodeComparator().compare(target, nextIndexTerm) < 0) {
// Optimization: requested term is within the
// same term block we are now in; skip seeking
// (but do scanning):
doSeek = false;
//System.out.println(" skip seek: nextIndexTerm=" + (nextIndexTerm == null ? "null" : nextIndexTerm.utf8ToString()));
}
}
}
if (doSeek) {
//System.out.println(" seek");
// Ask terms index to find biggest indexed term (=
// first term in a block) that's <= our text:
in.seek(indexEnum.seek(target));
boolean result = nextBlock();
// Block must exist since, at least, the indexed term
// is in the block:
assert result;
indexIsCurrent = true;
didIndexNext = false;
blocksSinceSeek = 0;
if (doOrd) {
state.ord = indexEnum.ord()-1;
}
term.copyBytes(indexEnum.term());
//System.out.println(" seek: term=" + term.utf8ToString());
} else {
//System.out.println(" skip seek");
if (state.termBlockOrd == blockTermCount && !nextBlock()) {
indexIsCurrent = false;
return SeekStatus.END;
}
}
seekPending = false;
int common = 0;
// Scan within block. We could do this by calling
// _next() and testing the resulting term, but this
// is wasteful. Instead, we first confirm the
// target matches the common prefix of this block,
// and then we scan the term bytes directly from the
// termSuffixesreader's byte[], saving a copy into
// the BytesRef term per term. Only when we return
// do we then copy the bytes into the term.
while(true) {
// First, see if target term matches common prefix
// in this block:
if (common < termBlockPrefix) {
final int cmp = (term.byteAt(common)&0xFF) - (target.bytes[target.offset + common]&0xFF);
if (cmp < 0) {
// TODO: maybe we should store common prefix
// in block header? (instead of relying on
// last term of previous block)
// Target's prefix is after the common block
// prefix, so term cannot be in this block
// but it could be in next block. We
// must scan to end-of-block to set common
// prefix for next block:
if (state.termBlockOrd < blockTermCount) {
while(state.termBlockOrd < blockTermCount-1) {
state.termBlockOrd++;
state.ord++;
termSuffixesReader.skipBytes(termSuffixesReader.readVInt());
}
final int suffix = termSuffixesReader.readVInt();
term.setLength(termBlockPrefix + suffix);
term.grow(term.length());
termSuffixesReader.readBytes(term.bytes(), termBlockPrefix, suffix);
}
state.ord++;
if (!nextBlock()) {
indexIsCurrent = false;
return SeekStatus.END;
}
common = 0;
} else if (cmp > 0) {
// Target's prefix is before the common prefix
// of this block, so we position to start of
// block and return NOT_FOUND:
assert state.termBlockOrd == 0;
final int suffix = termSuffixesReader.readVInt();
term.setLength(termBlockPrefix + suffix);
term.grow(term.length());
termSuffixesReader.readBytes(term.bytes(), termBlockPrefix, suffix);
return SeekStatus.NOT_FOUND;
} else {
common++;
}
continue;
}
// Test every term in this block
while (true) {
state.termBlockOrd++;
state.ord++;
final int suffix = termSuffixesReader.readVInt();
// We know the prefix matches, so just compare the new suffix:
final int termLen = termBlockPrefix + suffix;
int bytePos = termSuffixesReader.getPosition();
boolean next = false;
final int limit = target.offset + (termLen < target.length ? termLen : target.length);
int targetPos = target.offset + termBlockPrefix;
while(targetPos < limit) {
final int cmp = (termSuffixes[bytePos++]&0xFF) - (target.bytes[targetPos++]&0xFF);
if (cmp < 0) {
// Current term is still before the target;
// keep scanning
next = true;
break;
} else if (cmp > 0) {
// Done! Current term is after target. Stop
// here, fill in real term, return NOT_FOUND.
term.setLength(termBlockPrefix + suffix);
term.grow(term.length());
termSuffixesReader.readBytes(term.bytes(), termBlockPrefix, suffix);
//System.out.println(" NOT_FOUND");
return SeekStatus.NOT_FOUND;
}
}
if (!next && target.length <= termLen) {
term.setLength(termBlockPrefix + suffix);
term.grow(term.length());
termSuffixesReader.readBytes(term.bytes(), termBlockPrefix, suffix);
if (target.length == termLen) {
// Done! Exact match. Stop here, fill in
// real term, return FOUND.
//System.out.println(" FOUND");
return SeekStatus.FOUND;
} else {
//System.out.println(" NOT_FOUND");
return SeekStatus.NOT_FOUND;
}
}
if (state.termBlockOrd == blockTermCount) {
// Must pre-fill term for next block's common prefix
term.setLength(termBlockPrefix + suffix);
term.grow(term.length());
termSuffixesReader.readBytes(term.bytes(), termBlockPrefix, suffix);
break;
} else {
termSuffixesReader.skipBytes(suffix);
}
}
// The purpose of the terms dict index is to seek
// the enum to the closest index term before the
// term we are looking for. So, we should never
// cross another index term (besides the first
// one) while we are scanning:
assert indexIsCurrent;
if (!nextBlock()) {
//System.out.println(" END");
indexIsCurrent = false;
return SeekStatus.END;
}
common = 0;
}
}
@Override
public BytesRef next() throws IOException {
//System.out.println("BTR.next() seekPending=" + seekPending + " pendingSeekCount=" + state.termBlockOrd);
// If seek was previously called and the term was cached,
// usually caller is just going to pull a D/&PEnum or get
// docFreq, etc. But, if they then call next(),
// this method catches up all internal state so next()
// works properly:
if (seekPending) {
assert !indexIsCurrent;
in.seek(state.blockFilePointer);
final int pendingSeekCount = state.termBlockOrd;
boolean result = nextBlock();
final long savOrd = state.ord;
// Block must exist since seek(TermState) was called w/ a
// TermState previously returned by this enum when positioned
// on a real term:
assert result;
while(state.termBlockOrd < pendingSeekCount) {
BytesRef nextResult = _next();
assert nextResult != null;
}
seekPending = false;
state.ord = savOrd;
}
return _next();
}
/* Decodes only the term bytes of the next term. If caller then asks for
metadata, ie docFreq, totalTermFreq or pulls a D/&PEnum, we then (lazily)
decode all metadata up to the current term. */
private BytesRef _next() throws IOException {
//System.out.println("BTR._next seg=" + segment + " this=" + this + " termCount=" + state.termBlockOrd + " (vs " + blockTermCount + ")");
if (state.termBlockOrd == blockTermCount && !nextBlock()) {
//System.out.println(" eof");
indexIsCurrent = false;
return null;
}
// TODO: cutover to something better for these ints! simple64?
final int suffix = termSuffixesReader.readVInt();
//System.out.println(" suffix=" + suffix);
term.setLength(termBlockPrefix + suffix);
term.grow(term.length());
termSuffixesReader.readBytes(term.bytes(), termBlockPrefix, suffix);
state.termBlockOrd++;
// NOTE: meaningless in the non-ord case
state.ord++;
//System.out.println(" return term=" + fieldInfo.name + ":" + term.utf8ToString() + " " + term + " tbOrd=" + state.termBlockOrd);
return term.get();
}
@Override
public BytesRef term() {
return term.get();
}
@Override
public int docFreq() throws IOException {
//System.out.println("BTR.docFreq");
decodeMetaData();
//System.out.println(" return " + state.docFreq);
return state.docFreq;
}
@Override
public long totalTermFreq() throws IOException {
decodeMetaData();
return state.totalTermFreq;
}
@Override
public DocsEnum docs(Bits liveDocs, DocsEnum reuse, int flags) throws IOException {
//System.out.println("BTR.docs this=" + this);
decodeMetaData();
//System.out.println("BTR.docs: state.docFreq=" + state.docFreq);
return postingsReader.docs(fieldInfo, state, liveDocs, reuse, flags);
}
@Override
public DocsAndPositionsEnum docsAndPositions(Bits liveDocs, DocsAndPositionsEnum reuse, int flags) throws IOException {
if (fieldInfo.getIndexOptions().compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) < 0) {
// Positions were not indexed:
return null;
}
decodeMetaData();
return postingsReader.docsAndPositions(fieldInfo, state, liveDocs, reuse, flags);
}
@Override
public void seekExact(BytesRef target, TermState otherState) {
//System.out.println("BTR.seekExact termState target=" + target.utf8ToString() + " " + target + " this=" + this);
assert otherState != null && otherState instanceof BlockTermState;
assert !doOrd || ((BlockTermState) otherState).ord < numTerms;
state.copyFrom(otherState);
seekPending = true;
indexIsCurrent = false;
term.copyBytes(target);
}
@Override
public TermState termState() throws IOException {
//System.out.println("BTR.termState this=" + this);
decodeMetaData();
TermState ts = state.clone();
//System.out.println(" return ts=" + ts);
return ts;
}
@Override
public void seekExact(long ord) throws IOException {
//System.out.println("BTR.seek by ord ord=" + ord);
if (indexEnum == null) {
throw new IllegalStateException("terms index was not loaded");
}
assert ord < numTerms;
// TODO: if ord is in same terms block and
// after current ord, we should avoid this seek just
// like we do in the seek(BytesRef) case
in.seek(indexEnum.seek(ord));
boolean result = nextBlock();
// Block must exist since ord < numTerms:
assert result;
indexIsCurrent = true;
didIndexNext = false;
blocksSinceSeek = 0;
seekPending = false;
state.ord = indexEnum.ord()-1;
assert state.ord >= -1: "ord=" + state.ord;
term.copyBytes(indexEnum.term());
// Now, scan:
int left = (int) (ord - state.ord);
while(left > 0) {
final BytesRef term = _next();
assert term != null;
left--;
assert indexIsCurrent;
}
}
@Override
public long ord() {
if (!doOrd) {
throw new UnsupportedOperationException();
}
return state.ord;
}
/* Does initial decode of next block of terms; this
doesn't actually decode the docFreq, totalTermFreq,
postings details (frq/prx offset, etc.) metadata;
it just loads them as byte[] blobs which are then
decoded on-demand if the metadata is ever requested
for any term in this block. This enables terms-only
intensive consumes (eg certain MTQs, respelling) to
not pay the price of decoding metadata they won't
use. */
private boolean nextBlock() throws IOException {
// TODO: we still lazy-decode the byte[] for each
// term (the suffix), but, if we decoded
// all N terms up front then seeking could do a fast
// bsearch w/in the block...
//System.out.println("BTR.nextBlock() fp=" + in.getFilePointer() + " this=" + this);
state.blockFilePointer = in.getFilePointer();
blockTermCount = in.readVInt();
//System.out.println(" blockTermCount=" + blockTermCount);
if (blockTermCount == 0) {
return false;
}
termBlockPrefix = in.readVInt();
// term suffixes:
int len = in.readVInt();
if (termSuffixes.length < len) {
termSuffixes = new byte[ArrayUtil.oversize(len, 1)];
}
//System.out.println(" termSuffixes len=" + len);
in.readBytes(termSuffixes, 0, len);
termSuffixesReader.reset(termSuffixes, 0, len);
// docFreq, totalTermFreq
len = in.readVInt();
if (docFreqBytes.length < len) {
docFreqBytes = new byte[ArrayUtil.oversize(len, 1)];
}
//System.out.println(" freq bytes len=" + len);
in.readBytes(docFreqBytes, 0, len);
freqReader.reset(docFreqBytes, 0, len);
// metadata
len = in.readVInt();
if (bytes == null) {
bytes = new byte[ArrayUtil.oversize(len, 1)];
bytesReader = new ByteArrayDataInput();
} else if (bytes.length < len) {
bytes = new byte[ArrayUtil.oversize(len, 1)];
}
in.readBytes(bytes, 0, len);
bytesReader.reset(bytes, 0, len);
metaDataUpto = 0;
state.termBlockOrd = 0;
blocksSinceSeek++;
indexIsCurrent = indexIsCurrent && (blocksSinceSeek < indexReader.getDivisor());
//System.out.println(" indexIsCurrent=" + indexIsCurrent);
return true;
}
private void decodeMetaData() throws IOException {
//System.out.println("BTR.decodeMetadata mdUpto=" + metaDataUpto + " vs termCount=" + state.termBlockOrd + " state=" + state);
if (!seekPending) {
// TODO: cutover to random-access API
// here.... really stupid that we have to decode N
// wasted term metadata just to get to the N+1th
// that we really need...
// lazily catch up on metadata decode:
final int limit = state.termBlockOrd;
boolean absolute = metaDataUpto == 0;
// TODO: better API would be "jump straight to term=N"???
while (metaDataUpto < limit) {
//System.out.println(" decode mdUpto=" + metaDataUpto);
// TODO: we could make "tiers" of metadata, ie,
// decode docFreq/totalTF but don't decode postings
// metadata; this way caller could get
// docFreq/totalTF w/o paying decode cost for
// postings
// TODO: if docFreq were bulk decoded we could
// just skipN here:
// docFreq, totalTermFreq
state.docFreq = freqReader.readVInt();
//System.out.println(" dF=" + state.docFreq);
if (fieldInfo.getIndexOptions() != IndexOptions.DOCS_ONLY) {
state.totalTermFreq = state.docFreq + freqReader.readVLong();
//System.out.println(" totTF=" + state.totalTermFreq);
}
// metadata
for (int i = 0; i < longs.length; i++) {
longs[i] = bytesReader.readVLong();
}
postingsReader.decodeTerm(longs, bytesReader, fieldInfo, state, absolute);
metaDataUpto++;
absolute = false;
}
} else {
//System.out.println(" skip! seekPending");
}
}
}
}
@Override
public long ramBytesUsed() {
long ramBytesUsed = BASE_RAM_BYTES_USED;
ramBytesUsed += (postingsReader!=null) ? postingsReader.ramBytesUsed() : 0;
ramBytesUsed += (indexReader!=null) ? indexReader.ramBytesUsed() : 0;
ramBytesUsed += fields.size() * 2L * RamUsageEstimator.NUM_BYTES_OBJECT_REF;
for (FieldReader reader : fields.values()) {
ramBytesUsed += reader.ramBytesUsed();
}
return ramBytesUsed;
}
@Override
public void checkIntegrity() throws IOException {
// verify terms
if (version >= BlockTermsWriter.VERSION_CHECKSUM) {
CodecUtil.checksumEntireFile(in);
}
// verify postings
postingsReader.checkIntegrity();
}
}
| |
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.rds.model;
import java.io.Serializable;
import javax.annotation.Generated;
/**
* <p>
* Contains the details for an Amazon RDS DB security group.
* </p>
* <p>
* This data type is used as a response element in the <code>DescribeDBSecurityGroups</code> action.
* </p>
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/rds-2014-10-31/DBSecurityGroup" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class DBSecurityGroup implements Serializable, Cloneable {
/**
* <p>
* Provides the Amazon Web Services ID of the owner of a specific DB security group.
* </p>
*/
private String ownerId;
/**
* <p>
* Specifies the name of the DB security group.
* </p>
*/
private String dBSecurityGroupName;
/**
* <p>
* Provides the description of the DB security group.
* </p>
*/
private String dBSecurityGroupDescription;
/**
* <p>
* Provides the VpcId of the DB security group.
* </p>
*/
private String vpcId;
/**
* <p>
* Contains a list of <code>EC2SecurityGroup</code> elements.
* </p>
*/
private com.amazonaws.internal.SdkInternalList<EC2SecurityGroup> eC2SecurityGroups;
/**
* <p>
* Contains a list of <code>IPRange</code> elements.
* </p>
*/
private com.amazonaws.internal.SdkInternalList<IPRange> iPRanges;
/**
* <p>
* The Amazon Resource Name (ARN) for the DB security group.
* </p>
*/
private String dBSecurityGroupArn;
/**
* <p>
* Provides the Amazon Web Services ID of the owner of a specific DB security group.
* </p>
*
* @param ownerId
* Provides the Amazon Web Services ID of the owner of a specific DB security group.
*/
public void setOwnerId(String ownerId) {
this.ownerId = ownerId;
}
/**
* <p>
* Provides the Amazon Web Services ID of the owner of a specific DB security group.
* </p>
*
* @return Provides the Amazon Web Services ID of the owner of a specific DB security group.
*/
public String getOwnerId() {
return this.ownerId;
}
/**
* <p>
* Provides the Amazon Web Services ID of the owner of a specific DB security group.
* </p>
*
* @param ownerId
* Provides the Amazon Web Services ID of the owner of a specific DB security group.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DBSecurityGroup withOwnerId(String ownerId) {
setOwnerId(ownerId);
return this;
}
/**
* <p>
* Specifies the name of the DB security group.
* </p>
*
* @param dBSecurityGroupName
* Specifies the name of the DB security group.
*/
public void setDBSecurityGroupName(String dBSecurityGroupName) {
this.dBSecurityGroupName = dBSecurityGroupName;
}
/**
* <p>
* Specifies the name of the DB security group.
* </p>
*
* @return Specifies the name of the DB security group.
*/
public String getDBSecurityGroupName() {
return this.dBSecurityGroupName;
}
/**
* <p>
* Specifies the name of the DB security group.
* </p>
*
* @param dBSecurityGroupName
* Specifies the name of the DB security group.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DBSecurityGroup withDBSecurityGroupName(String dBSecurityGroupName) {
setDBSecurityGroupName(dBSecurityGroupName);
return this;
}
/**
* <p>
* Provides the description of the DB security group.
* </p>
*
* @param dBSecurityGroupDescription
* Provides the description of the DB security group.
*/
public void setDBSecurityGroupDescription(String dBSecurityGroupDescription) {
this.dBSecurityGroupDescription = dBSecurityGroupDescription;
}
/**
* <p>
* Provides the description of the DB security group.
* </p>
*
* @return Provides the description of the DB security group.
*/
public String getDBSecurityGroupDescription() {
return this.dBSecurityGroupDescription;
}
/**
* <p>
* Provides the description of the DB security group.
* </p>
*
* @param dBSecurityGroupDescription
* Provides the description of the DB security group.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DBSecurityGroup withDBSecurityGroupDescription(String dBSecurityGroupDescription) {
setDBSecurityGroupDescription(dBSecurityGroupDescription);
return this;
}
/**
* <p>
* Provides the VpcId of the DB security group.
* </p>
*
* @param vpcId
* Provides the VpcId of the DB security group.
*/
public void setVpcId(String vpcId) {
this.vpcId = vpcId;
}
/**
* <p>
* Provides the VpcId of the DB security group.
* </p>
*
* @return Provides the VpcId of the DB security group.
*/
public String getVpcId() {
return this.vpcId;
}
/**
* <p>
* Provides the VpcId of the DB security group.
* </p>
*
* @param vpcId
* Provides the VpcId of the DB security group.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DBSecurityGroup withVpcId(String vpcId) {
setVpcId(vpcId);
return this;
}
/**
* <p>
* Contains a list of <code>EC2SecurityGroup</code> elements.
* </p>
*
* @return Contains a list of <code>EC2SecurityGroup</code> elements.
*/
public java.util.List<EC2SecurityGroup> getEC2SecurityGroups() {
if (eC2SecurityGroups == null) {
eC2SecurityGroups = new com.amazonaws.internal.SdkInternalList<EC2SecurityGroup>();
}
return eC2SecurityGroups;
}
/**
* <p>
* Contains a list of <code>EC2SecurityGroup</code> elements.
* </p>
*
* @param eC2SecurityGroups
* Contains a list of <code>EC2SecurityGroup</code> elements.
*/
public void setEC2SecurityGroups(java.util.Collection<EC2SecurityGroup> eC2SecurityGroups) {
if (eC2SecurityGroups == null) {
this.eC2SecurityGroups = null;
return;
}
this.eC2SecurityGroups = new com.amazonaws.internal.SdkInternalList<EC2SecurityGroup>(eC2SecurityGroups);
}
/**
* <p>
* Contains a list of <code>EC2SecurityGroup</code> elements.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setEC2SecurityGroups(java.util.Collection)} or {@link #withEC2SecurityGroups(java.util.Collection)} if
* you want to override the existing values.
* </p>
*
* @param eC2SecurityGroups
* Contains a list of <code>EC2SecurityGroup</code> elements.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DBSecurityGroup withEC2SecurityGroups(EC2SecurityGroup... eC2SecurityGroups) {
if (this.eC2SecurityGroups == null) {
setEC2SecurityGroups(new com.amazonaws.internal.SdkInternalList<EC2SecurityGroup>(eC2SecurityGroups.length));
}
for (EC2SecurityGroup ele : eC2SecurityGroups) {
this.eC2SecurityGroups.add(ele);
}
return this;
}
/**
* <p>
* Contains a list of <code>EC2SecurityGroup</code> elements.
* </p>
*
* @param eC2SecurityGroups
* Contains a list of <code>EC2SecurityGroup</code> elements.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DBSecurityGroup withEC2SecurityGroups(java.util.Collection<EC2SecurityGroup> eC2SecurityGroups) {
setEC2SecurityGroups(eC2SecurityGroups);
return this;
}
/**
* <p>
* Contains a list of <code>IPRange</code> elements.
* </p>
*
* @return Contains a list of <code>IPRange</code> elements.
*/
public java.util.List<IPRange> getIPRanges() {
if (iPRanges == null) {
iPRanges = new com.amazonaws.internal.SdkInternalList<IPRange>();
}
return iPRanges;
}
/**
* <p>
* Contains a list of <code>IPRange</code> elements.
* </p>
*
* @param iPRanges
* Contains a list of <code>IPRange</code> elements.
*/
public void setIPRanges(java.util.Collection<IPRange> iPRanges) {
if (iPRanges == null) {
this.iPRanges = null;
return;
}
this.iPRanges = new com.amazonaws.internal.SdkInternalList<IPRange>(iPRanges);
}
/**
* <p>
* Contains a list of <code>IPRange</code> elements.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setIPRanges(java.util.Collection)} or {@link #withIPRanges(java.util.Collection)} if you want to override
* the existing values.
* </p>
*
* @param iPRanges
* Contains a list of <code>IPRange</code> elements.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DBSecurityGroup withIPRanges(IPRange... iPRanges) {
if (this.iPRanges == null) {
setIPRanges(new com.amazonaws.internal.SdkInternalList<IPRange>(iPRanges.length));
}
for (IPRange ele : iPRanges) {
this.iPRanges.add(ele);
}
return this;
}
/**
* <p>
* Contains a list of <code>IPRange</code> elements.
* </p>
*
* @param iPRanges
* Contains a list of <code>IPRange</code> elements.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DBSecurityGroup withIPRanges(java.util.Collection<IPRange> iPRanges) {
setIPRanges(iPRanges);
return this;
}
/**
* <p>
* The Amazon Resource Name (ARN) for the DB security group.
* </p>
*
* @param dBSecurityGroupArn
* The Amazon Resource Name (ARN) for the DB security group.
*/
public void setDBSecurityGroupArn(String dBSecurityGroupArn) {
this.dBSecurityGroupArn = dBSecurityGroupArn;
}
/**
* <p>
* The Amazon Resource Name (ARN) for the DB security group.
* </p>
*
* @return The Amazon Resource Name (ARN) for the DB security group.
*/
public String getDBSecurityGroupArn() {
return this.dBSecurityGroupArn;
}
/**
* <p>
* The Amazon Resource Name (ARN) for the DB security group.
* </p>
*
* @param dBSecurityGroupArn
* The Amazon Resource Name (ARN) for the DB security group.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DBSecurityGroup withDBSecurityGroupArn(String dBSecurityGroupArn) {
setDBSecurityGroupArn(dBSecurityGroupArn);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getOwnerId() != null)
sb.append("OwnerId: ").append(getOwnerId()).append(",");
if (getDBSecurityGroupName() != null)
sb.append("DBSecurityGroupName: ").append(getDBSecurityGroupName()).append(",");
if (getDBSecurityGroupDescription() != null)
sb.append("DBSecurityGroupDescription: ").append(getDBSecurityGroupDescription()).append(",");
if (getVpcId() != null)
sb.append("VpcId: ").append(getVpcId()).append(",");
if (getEC2SecurityGroups() != null)
sb.append("EC2SecurityGroups: ").append(getEC2SecurityGroups()).append(",");
if (getIPRanges() != null)
sb.append("IPRanges: ").append(getIPRanges()).append(",");
if (getDBSecurityGroupArn() != null)
sb.append("DBSecurityGroupArn: ").append(getDBSecurityGroupArn());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof DBSecurityGroup == false)
return false;
DBSecurityGroup other = (DBSecurityGroup) obj;
if (other.getOwnerId() == null ^ this.getOwnerId() == null)
return false;
if (other.getOwnerId() != null && other.getOwnerId().equals(this.getOwnerId()) == false)
return false;
if (other.getDBSecurityGroupName() == null ^ this.getDBSecurityGroupName() == null)
return false;
if (other.getDBSecurityGroupName() != null && other.getDBSecurityGroupName().equals(this.getDBSecurityGroupName()) == false)
return false;
if (other.getDBSecurityGroupDescription() == null ^ this.getDBSecurityGroupDescription() == null)
return false;
if (other.getDBSecurityGroupDescription() != null && other.getDBSecurityGroupDescription().equals(this.getDBSecurityGroupDescription()) == false)
return false;
if (other.getVpcId() == null ^ this.getVpcId() == null)
return false;
if (other.getVpcId() != null && other.getVpcId().equals(this.getVpcId()) == false)
return false;
if (other.getEC2SecurityGroups() == null ^ this.getEC2SecurityGroups() == null)
return false;
if (other.getEC2SecurityGroups() != null && other.getEC2SecurityGroups().equals(this.getEC2SecurityGroups()) == false)
return false;
if (other.getIPRanges() == null ^ this.getIPRanges() == null)
return false;
if (other.getIPRanges() != null && other.getIPRanges().equals(this.getIPRanges()) == false)
return false;
if (other.getDBSecurityGroupArn() == null ^ this.getDBSecurityGroupArn() == null)
return false;
if (other.getDBSecurityGroupArn() != null && other.getDBSecurityGroupArn().equals(this.getDBSecurityGroupArn()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getOwnerId() == null) ? 0 : getOwnerId().hashCode());
hashCode = prime * hashCode + ((getDBSecurityGroupName() == null) ? 0 : getDBSecurityGroupName().hashCode());
hashCode = prime * hashCode + ((getDBSecurityGroupDescription() == null) ? 0 : getDBSecurityGroupDescription().hashCode());
hashCode = prime * hashCode + ((getVpcId() == null) ? 0 : getVpcId().hashCode());
hashCode = prime * hashCode + ((getEC2SecurityGroups() == null) ? 0 : getEC2SecurityGroups().hashCode());
hashCode = prime * hashCode + ((getIPRanges() == null) ? 0 : getIPRanges().hashCode());
hashCode = prime * hashCode + ((getDBSecurityGroupArn() == null) ? 0 : getDBSecurityGroupArn().hashCode());
return hashCode;
}
@Override
public DBSecurityGroup clone() {
try {
return (DBSecurityGroup) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
}
| |
/*
* Copyright 2000-2014 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.openapi.ui;
import com.intellij.ide.DataManager;
import com.intellij.ide.ui.UISettings;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.actionSystem.CommonDataKeys;
import com.intellij.openapi.actionSystem.CommonShortcuts;
import com.intellij.openapi.actionSystem.DataProvider;
import com.intellij.openapi.actionSystem.ex.ActionManagerEx;
import com.intellij.openapi.actionSystem.ex.ActionUtil;
import com.intellij.openapi.actionSystem.impl.MouseGestureManager;
import com.intellij.openapi.application.impl.ApplicationInfoImpl;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.project.ProjectManager;
import com.intellij.openapi.project.ProjectManagerAdapter;
import com.intellij.openapi.project.ProjectManagerListener;
import com.intellij.openapi.ui.popup.util.PopupUtil;
import com.intellij.openapi.util.ActionCallback;
import com.intellij.openapi.util.Disposer;
import com.intellij.openapi.util.SystemInfo;
import com.intellij.openapi.util.WindowStateService;
import com.intellij.openapi.wm.*;
import com.intellij.openapi.wm.ex.LayoutFocusTraversalPolicyExt;
import com.intellij.openapi.wm.ex.WindowManagerEx;
import com.intellij.openapi.wm.impl.IdeFrameImpl;
import com.intellij.openapi.wm.impl.IdeGlassPaneImpl;
import com.intellij.openapi.wm.impl.IdeMenuBar;
import com.intellij.ui.AppUIUtil;
import com.intellij.ui.BalloonLayout;
import com.intellij.ui.FocusTrackback;
import com.intellij.ui.FrameState;
import com.intellij.util.ImageLoader;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.ui.UIUtil;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import java.awt.*;
import java.awt.event.*;
import java.io.File;
import java.util.Map;
public class FrameWrapper implements Disposable, DataProvider {
private String myDimensionKey = null;
private JComponent myComponent = null;
private JComponent myPreferredFocus = null;
private String myTitle = "";
private Image myImage = ImageLoader.loadFromResource(ApplicationInfoImpl.getShadowInstance().getIconUrl());
private boolean myCloseOnEsc = false;
private Window myFrame;
private final Map<String, Object> myDataMap = ContainerUtil.newHashMap();
private Project myProject;
private final ProjectManagerListener myProjectListener = new MyProjectManagerListener();
private FocusTrackback myFocusTrackback;
private FocusWatcher myFocusWatcher;
private ActionCallback myFocusedCallback;
private boolean myDisposing;
private boolean myDisposed;
protected StatusBar myStatusBar;
private boolean myShown;
private boolean myIsDialog;
private boolean myImageWasChanged;
public FrameWrapper(Project project) {
this(project, null);
}
public FrameWrapper(Project project, @Nullable @NonNls String dimensionServiceKey) {
this(project, dimensionServiceKey, false);
}
public FrameWrapper(Project project, @Nullable @NonNls String dimensionServiceKey, boolean isDialog) {
myDimensionKey = dimensionServiceKey;
myIsDialog = isDialog;
if (project != null) {
setProject(project);
}
}
public void setDimensionKey(String dimensionKey) {
myDimensionKey = dimensionKey;
}
public void setData(String dataId, Object data) {
myDataMap.put(dataId, data);
}
public void setProject(@NotNull final Project project) {
myProject = project;
setData(CommonDataKeys.PROJECT.getName(), project);
ProjectManager.getInstance().addProjectManagerListener(project, myProjectListener);
Disposer.register(this, new Disposable() {
@Override
public void dispose() {
ProjectManager.getInstance().removeProjectManagerListener(project, myProjectListener);
}
});
}
public void show() {
show(true);
}
public void show(boolean restoreBounds) {
myFocusedCallback = new ActionCallback();
if (myProject != null) {
IdeFocusManager.getInstance(myProject).typeAheadUntil(myFocusedCallback);
}
final Window frame = getFrame();
if (myStatusBar != null) {
myStatusBar.install((IdeFrame)frame);
}
myFocusTrackback = new FocusTrackback(this, IdeFocusManager.findInstance().getFocusOwner(), true);
if (frame instanceof JFrame) {
((JFrame)frame).setDefaultCloseOperation(WindowConstants.DISPOSE_ON_CLOSE);
} else {
((JDialog)frame).setDefaultCloseOperation(WindowConstants.DISPOSE_ON_CLOSE);
}
final WindowAdapter focusListener = new WindowAdapter() {
public void windowOpened(WindowEvent e) {
IdeFocusManager fm = IdeFocusManager.getInstance(myProject);
JComponent toFocus = getPreferredFocusedComponent();
if (toFocus == null) {
toFocus = fm.getFocusTargetFor(myComponent);
}
if (toFocus != null) {
fm.requestFocus(toFocus, true).notify(myFocusedCallback);
} else {
myFocusedCallback.setRejected();
}
}
};
frame.addWindowListener(focusListener);
Disposer.register(this, new Disposable() {
@Override
public void dispose() {
frame.removeWindowListener(focusListener);
}
});
if (myCloseOnEsc) addCloseOnEsc((RootPaneContainer)frame);
((RootPaneContainer)frame).getContentPane().add(myComponent, BorderLayout.CENTER);
if (frame instanceof JFrame) {
((JFrame)frame).setTitle(myTitle);
} else {
((JDialog)frame).setTitle(myTitle);
}
if (myImageWasChanged) {
frame.setIconImage(myImage);
}
else {
AppUIUtil.updateWindowIcon(myFrame);
}
if (restoreBounds) {
loadFrameState();
}
myFocusWatcher = new FocusWatcher() {
protected void focusLostImpl(final FocusEvent e) {
myFocusTrackback.consume();
}
};
myFocusWatcher.install(myComponent);
myShown = true;
frame.setVisible(true);
if (UIUtil.isUnderAlloyLookAndFeel() && frame instanceof JFrame) {
//please ask [kb] before remove it
((JFrame)frame).setMaximizedBounds(null);
}
}
public void close() {
Disposer.dispose(this);
}
public void dispose() {
if (isDisposed()) return;
Window frame = myFrame;
StatusBar statusBar = myStatusBar;
if (myShown && myDimensionKey != null) {
WindowStateService.getInstance().saveStateFor(myProject, myDimensionKey, frame);
}
myFrame = null;
myPreferredFocus = null;
myProject = null;
myDataMap.clear();
if (myFocusTrackback != null) {
myFocusTrackback.restoreFocus();
}
if (myComponent != null && myFocusWatcher != null) {
myFocusWatcher.deinstall(myComponent);
}
myFocusWatcher = null;
myFocusedCallback = null;
myFocusTrackback = null;
myComponent = null;
myImage = null;
myDisposed = true;
if (frame != null) {
JRootPane rootPane = ((RootPaneContainer)frame).getRootPane();
if (rootPane != null) {
DialogWrapper.unregisterKeyboardActions(rootPane);
}
frame.setVisible(false);
if (frame instanceof JFrame) {
FocusTrackback.release((JFrame)frame);
}
if (frame instanceof IdeFrame) {
MouseGestureManager.getInstance().remove((IdeFrame)frame);
}
frame.dispose();
}
if (statusBar != null) {
Disposer.dispose(statusBar);
}
}
public boolean isDisposed() {
return myDisposed;
}
private void addCloseOnEsc(final RootPaneContainer frame) {
JRootPane rootPane = frame.getRootPane();
ActionListener closeAction = new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
if (!PopupUtil.handleEscKeyEvent()) {
// if you remove this line problems will start happen on Mac OS X
// 2 projects opened, call Cmd+D on the second opened project and then Esc.
// Weird situation: 2nd IdeFrame will be active, but focus will be somewhere inside the 1st IdeFrame
// App is unusable until Cmd+Tab, Cmd+tab
FrameWrapper.this.myFrame.setVisible(false);
close();
}
}
};
rootPane.registerKeyboardAction(closeAction, KeyStroke.getKeyStroke(KeyEvent.VK_ESCAPE, 0), JComponent.WHEN_IN_FOCUSED_WINDOW);
ActionUtil.registerForEveryKeyboardShortcut(rootPane, closeAction, CommonShortcuts.getCloseActiveWindow());
}
public Window getFrame() {
assert !myDisposed : "Already disposed!";
if (myFrame == null) {
final IdeFrame parent = WindowManager.getInstance().getIdeFrame(myProject);
myFrame = myIsDialog ? createJDialog(parent) : createJFrame(parent);
}
return myFrame;
}
protected JFrame createJFrame(IdeFrame parent) {
return new MyJFrame(parent) {
@Override
public IdeRootPaneNorthExtension getNorthExtension(String key) {
return FrameWrapper.this.getNorthExtension(key);
}
};
}
protected JDialog createJDialog(IdeFrame parent) {
return new MyJDialog(parent);
}
protected IdeRootPaneNorthExtension getNorthExtension(String key) {
return null;
}
@Override
public Object getData(@NonNls String dataId) {
if (CommonDataKeys.PROJECT.is(dataId)) {
return myProject;
}
return null;
}
public void setComponent(JComponent component) {
myComponent = component;
}
public void setPreferredFocusedComponent(JComponent preferedFocus) {
myPreferredFocus = preferedFocus;
}
public JComponent getPreferredFocusedComponent() {
return myPreferredFocus;
}
public void closeOnEsc() {
myCloseOnEsc = true;
}
public void setImage(Image image) {
myImageWasChanged = true;
myImage = image;
}
protected void loadFrameState() {
final Window frame = getFrame();
if (myDimensionKey != null && !WindowStateService.getInstance().loadStateFor(myProject, myDimensionKey, frame)) {
final IdeFrame ideFrame = WindowManagerEx.getInstanceEx().getIdeFrame(myProject);
if (ideFrame != null) {
frame.setBounds(ideFrame.suggestChildFrameBounds());
}
}
((RootPaneContainer)frame).getRootPane().revalidate();
}
public void setTitle(String title) {
myTitle = title;
}
public void addDisposable(@NotNull Disposable disposable) {
Disposer.register(this, disposable);
}
protected void setStatusBar(StatusBar statusBar) {
if (myStatusBar != null) {
Disposer.dispose(myStatusBar);
}
myStatusBar = statusBar;
}
private class MyJFrame extends JFrame implements DataProvider, IdeFrame.Child {
private final IdeFrame myParent;
private String myFrameTitle;
private String myFileTitle;
private File myFile;
private MyJFrame(IdeFrame parent) throws HeadlessException {
FrameState.setFrameStateListener(this);
myParent = parent;
setGlassPane(new IdeGlassPaneImpl(getRootPane(), true));
boolean setMenuOnFrame = SystemInfo.isMac;
if (SystemInfo.isLinux && "Unity".equals(System.getenv("XDG_CURRENT_DESKTOP"))) {
try {
Class.forName("com.jarego.jayatana.Agent");
setMenuOnFrame = true;
}
catch (ClassNotFoundException e) {
// ignore
}
}
if (setMenuOnFrame) {
setJMenuBar(new IdeMenuBar(ActionManagerEx.getInstanceEx(), DataManager.getInstance()));
}
MouseGestureManager.getInstance().add(this);
setFocusTraversalPolicy(new LayoutFocusTraversalPolicyExt());
setDefaultCloseOperation(DISPOSE_ON_CLOSE);
}
@Override
public JComponent getComponent() {
return getRootPane();
}
@Override
public StatusBar getStatusBar() {
return myStatusBar != null ? myStatusBar : myParent.getStatusBar();
}
@Override
public Rectangle suggestChildFrameBounds() {
return myParent.suggestChildFrameBounds();
}
@Override
public Project getProject() {
return myParent.getProject();
}
@Override
public void setFrameTitle(String title) {
myFrameTitle = title;
updateTitle();
}
@Override
public void setFileTitle(String fileTitle, File ioFile) {
myFileTitle = fileTitle;
myFile = ioFile;
updateTitle();
}
@Override
public IdeRootPaneNorthExtension getNorthExtension(String key) {
return null;
}
@Override
public BalloonLayout getBalloonLayout() {
return null;
}
private void updateTitle() {
IdeFrameImpl.updateTitle(this, myFrameTitle, myFileTitle, myFile);
}
@Override
public IdeFrame getParentFrame() {
return myParent;
}
public void dispose() {
if (myDisposing) return;
myDisposing = true;
Disposer.dispose(FrameWrapper.this);
super.dispose();
}
public Object getData(String dataId) {
if (IdeFrame.KEY.getName().equals(dataId)) {
return this;
}
Object data = FrameWrapper.this.getData(dataId);
return data != null ? data : myDataMap.get(dataId);
}
@Override
public void paint(Graphics g) {
UISettings.setupAntialiasing(g);
super.paint(g);
}
}
private class MyJDialog extends JDialog implements DataProvider, IdeFrame.Child {
private final IdeFrame myParent;
private MyJDialog(IdeFrame parent) throws HeadlessException {
super((JFrame)parent);
myParent = parent;
setGlassPane(new IdeGlassPaneImpl(getRootPane()));
getRootPane().putClientProperty("Window.style", "small");
setBackground(UIUtil.getPanelBackground());
MouseGestureManager.getInstance().add(this);
setFocusTraversalPolicy(new LayoutFocusTraversalPolicyExt());
setDefaultCloseOperation(DISPOSE_ON_CLOSE);
}
@Override
public JComponent getComponent() {
return getRootPane();
}
@Override
public StatusBar getStatusBar() {
return null;
}
@Nullable
@Override
public BalloonLayout getBalloonLayout() {
return null;
}
@Override
public Rectangle suggestChildFrameBounds() {
return myParent.suggestChildFrameBounds();
}
@Override
public Project getProject() {
return myParent.getProject();
}
@Override
public void setFrameTitle(String title) {
setTitle(title);
}
@Override
public void setFileTitle(String fileTitle, File ioFile) {
setTitle(fileTitle);
}
@Override
public IdeRootPaneNorthExtension getNorthExtension(String key) {
return null;
}
@Override
public IdeFrame getParentFrame() {
return myParent;
}
public void dispose() {
if (myDisposing) return;
myDisposing = true;
Disposer.dispose(FrameWrapper.this);
super.dispose();
}
public Object getData(String dataId) {
if (IdeFrame.KEY.getName().equals(dataId)) {
return this;
}
Object data = FrameWrapper.this.getData(dataId);
return data != null ? data : myDataMap.get(dataId);
}
@Override
public void paint(Graphics g) {
UISettings.setupAntialiasing(g);
super.paint(g);
}
}
public void setLocation(Point location) {
getFrame().setLocation(location);
}
public void setSize(Dimension size) {
getFrame().setSize(size);
}
private class MyProjectManagerListener extends ProjectManagerAdapter {
public void projectClosing(Project project) {
if (project == myProject) {
close();
}
}
}
}
| |
/*
* Copyright 2010-2013 Ning, Inc.
* Copyright 2014 Groupon, Inc
* Copyright 2014 The Billing Project, LLC
*
* The Billing Project licenses this file to you under the Apache License, version 2.0
* (the "License"); you may not use this file except in compliance with the
* License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package org.killbill.billing.jaxrs.resources;
public interface JaxrsResource {
public static final String API_PREFIX = "";
public static final String API_VERSION = "/1.0";
public static final String API_POSTFIX = "/kb";
public static final String PREFIX = API_PREFIX + API_VERSION + API_POSTFIX;
public static final String TIMELINE = "timeline";
public static final String REGISTER_NOTIFICATION_CALLBACK = "registerNotificationCallback";
public static final String UPLOAD_PLUGIN_CONFIG = "uploadPluginConfig";
public static final String USER_KEY_VALUE = "userKeyValue";
public static final String SEARCH = "search";
/*
* Multi-Tenancy headers
*/
public static String HDR_API_KEY = "X-Killbill-ApiKey";
public static String HDR_API_SECRET = "X-Killbill-ApiSecret";
/*
* Metadata Additional headers
*/
public static String HDR_CREATED_BY = "X-Killbill-CreatedBy";
public static String HDR_REASON = "X-Killbill-Reason";
public static String HDR_COMMENT = "X-Killbill-Comment";
public static String HDR_PAGINATION_CURRENT_OFFSET = "X-Killbill-Pagination-CurrentOffset";
public static String HDR_PAGINATION_NEXT_OFFSET = "X-Killbill-Pagination-NextOffset";
public static String HDR_PAGINATION_TOTAL_NB_RECORDS = "X-Killbill-Pagination-TotalNbRecords";
public static String HDR_PAGINATION_MAX_NB_RECORDS = "X-Killbill-Pagination-MaxNbRecords";
public static String HDR_PAGINATION_NEXT_PAGE_URI = "X-Killbill-Pagination-NextPageUri";
/*
* Patterns
*/
public static String STRING_PATTERN = "\\w+";
public static String UUID_PATTERN = "\\w+-\\w+-\\w+-\\w+-\\w+";
public static String NUMBER_PATTERN = "[0-9]+";
public static String ANYTHING_PATTERN = ".*";
/*
* Query parameters
*/
public static final String QUERY_EXTERNAL_KEY = "externalKey";
public static final String QUERY_API_KEY = "apiKey";
public static final String QUERY_REQUESTED_DT = "requestedDate";
public static final String QUERY_CALL_COMPLETION = "callCompletion";
public static final String QUERY_USE_REQUESTED_DATE_FOR_BILLING = "useRequestedDateForBilling";
public static final String QUERY_CALL_TIMEOUT = "callTimeoutSec";
public static final String QUERY_TARGET_DATE = "targetDate";
public static final String QUERY_BILLING_POLICY = "billingPolicy";
public static final String QUERY_ENTITLEMENT_POLICY = "entitlementPolicy";
public static final String QUERY_SEARCH_OFFSET = "offset";
public static final String QUERY_SEARCH_LIMIT = "limit";
public static final String QUERY_ACCOUNT_WITH_BALANCE = "accountWithBalance";
public static final String QUERY_ACCOUNT_WITH_BALANCE_AND_CBA = "accountWithBalanceAndCBA";
public static final String QUERY_ACCOUNT_ID = "accountId";
public static final String QUERY_INVOICE_WITH_ITEMS = "withItems";
public static final String QUERY_UNPAID_INVOICES_ONLY = "unpaidInvoicesOnly";
public static final String QUERY_PAYMENT_EXTERNAL = "externalPayment";
public static final String QUERY_PAYMENT_AMOUNT = "paymentAmount";
public static final String QUERY_PAYMENT_WITH_REFUNDS_AND_CHARGEBACKS = "withRefundsAndChargebacks";
public static final String QUERY_PAYMENT_PLUGIN_NAME = "pluginName";
public static final String QUERY_PAYMENT_METHOD_ID = "paymentMethodId";
public static final String QUERY_PAYMENT_CONTROL_PLUGIN_NAME = "controlPluginName";
public static final String QUERY_TAGS = "tagList";
public static final String QUERY_TAGS_INCLUDED_DELETED = "includedDeleted";
public static final String QUERY_CUSTOM_FIELDS = "customFieldList";
public static final String QUERY_PAYMENT_METHOD_PLUGIN_NAME = "pluginName";
public static final String QUERY_WITH_PLUGIN_INFO = "withPluginInfo";
public static final String QUERY_PAYMENT_METHOD_IS_DEFAULT = "isDefault";
public static final String QUERY_PAY_ALL_UNPAID_INVOICES = "payAllUnpaidInvoices";
public static final String QUERY_PAY_INVOICE = "payInvoice";
public static final String QUERY_PLUGIN_PROPERTY = "pluginProperty";
public static final String QUERY_START_DATE = "startDate";
public static final String QUERY_END_DATE = "endDate";
public static final String QUERY_DELETE_IF_EXISTS = "deleteIfExists";
public static final String QUERY_BUNDLE_TRANSFER_ADDON = "transferAddOn";
public static final String QUERY_BUNDLE_TRANSFER_CANCEL_IMM = "cancelImmediately";
public static final String QUERY_DELETE_DEFAULT_PM_WITH_AUTO_PAY_OFF = "deleteDefaultPmWithAutoPayOff";
public static final String QUERY_AUDIT = "audit";
public static final String QUERY_NOTIFICATION_CALLBACK = "cb";
public static final String PAGINATION = "pagination";
public static final String ADMIN = "admin";
public static final String ADMIN_PATH = PREFIX + "/" + ADMIN;
public static final String ACCOUNTS = "accounts";
public static final String ACCOUNTS_PATH = PREFIX + "/" + ACCOUNTS;
public static final String ANALYTICS = "analytics";
public static final String ANALYTICS_PATH = PREFIX + "/" + ANALYTICS;
public static final String BUNDLES = "bundles";
public static final String BUNDLES_PATH = PREFIX + "/" + BUNDLES;
public static final String SECURITY = "security";
public static final String SECURITY_PATH = PREFIX + "/" + SECURITY;
public static final String SUBSCRIPTIONS = "subscriptions";
public static final String SUBSCRIPTIONS_PATH = PREFIX + "/" + SUBSCRIPTIONS;
public static final String ENTITLEMENTS = "entitlements";
public static final String ENTITLEMENTS_PATH = PREFIX + "/" + ENTITLEMENTS;
public static final String TAG_DEFINITIONS = "tagDefinitions";
public static final String TAG_DEFINITIONS_PATH = PREFIX + "/" + TAG_DEFINITIONS;
public static final String INVOICES = "invoices";
public static final String INVOICES_PATH = PREFIX + "/" + INVOICES;
public static final String CHARGES = "charges";
public static final String CHARGES_PATH = PREFIX + "/" + INVOICES + "/" + CHARGES;
public static final String PAYMENTS = "payments";
public static final String PAYMENTS_PATH = PREFIX + "/" + PAYMENTS;
public static final String PAYMENT_TRANSACTIONS = "paymentTransactions";
public static final String PAYMENT_TRANSACTIONS_PATH = PREFIX + "/" + PAYMENT_TRANSACTIONS;
public static final String PAYMENT_GATEWAYS = "paymentGateways";
public static final String PAYMENT_GATEWAYS_PATH = PREFIX + "/" + PAYMENT_GATEWAYS;
public static final String REFUNDS = "refunds";
public static final String PAYMENT_METHODS = "paymentMethods";
public static final String PAYMENT_METHODS_PATH = PREFIX + "/" + PAYMENT_METHODS;
public static final String PAYMENT_METHODS_DEFAULT_PATH_POSTFIX = "setDefault";
public static final String CREDITS = "credits";
public static final String CREDITS_PATH = PREFIX + "/" + CREDITS;
public static final String INVOICE_PAYMENTS = "invoicePayments";
public static final String INVOICE_PAYMENTS_PATH = PREFIX + "/" + INVOICE_PAYMENTS;
public static final String DRY_RUN = "dryRun";
public static final String CHARGEBACKS = "chargebacks";
public static final String CHARGEBACKS_PATH = PREFIX + "/" + CHARGEBACKS;
public static final String TAGS = "tags";
public static final String TAGS_PATH = PREFIX + "/" + TAGS;
public static final String CUSTOM_FIELDS = "customFields";
public static final String CUSTOM_FIELDS_PATH = PREFIX + "/" + CUSTOM_FIELDS;
public static final String EMAILS = "emails";
public static final String EMAIL_NOTIFICATIONS = "emailNotifications";
public static final String CATALOG = "catalog";
public static final String CATALOG_PATH = PREFIX + "/" + CATALOG;
public static final String OVERDUE = "overdue";
public static final String OVERDUE_PATH = PREFIX + "/" + OVERDUE;
public static final String TENANTS = "tenants";
public static final String TENANTS_PATH = PREFIX + "/" + TENANTS;
public static final String USAGES = "usages";
public static final String USAGES_PATH = PREFIX + "/" + USAGES;
public static final String EXPORT = "export";
public static final String EXPORT_PATH = PREFIX + "/" + EXPORT;
public static final String PLUGINS = "plugins";
// No PREFIX here!
public static final String PLUGINS_PATH = "/" + PLUGINS;
public static final String TEST = "test";
public static final String TEST_PATH = PREFIX + "/" + TEST;
public static final String CBA_REBALANCING = "cbaRebalancing";
public static final String PAUSE = "pause";
public static final String RESUME = "resume";
public static final String AUTHORIZATION = "authorization";
public static final String CAPTURE = "capture";
public static final String HOSTED = "hosted";
public static final String FORM = "form";
public static final String NOTIFICATION = "notification";
public static final String INVOICE_TEMPLATE = "template";
public static final String INVOICE_MP_TEMPLATE = "manualPayTemplate";
public static final String INVOICE_TRANSLATION = "translation";
public static final String INVOICE_CATALOG_TRANSLATION = "catalogTranslation";
public static final String UPCOMING_INVOICE_TARGET_DATE = "upcomingInvoiceTargetDate";
public static final String COMBO = "combo";
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.tools;
import java.io.IOException;
import java.io.PrintStream;
import java.lang.management.ManagementFactory;
import java.lang.management.MemoryMXBean;
import java.lang.management.MemoryUsage;
import java.lang.management.RuntimeMXBean;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.util.AbstractMap;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import javax.management.JMX;
import javax.management.MBeanServerConnection;
import javax.management.MalformedObjectNameException;
import javax.management.ObjectName;
import javax.management.openmbean.CompositeData;
import javax.management.openmbean.OpenDataException;
import javax.management.openmbean.TabularData;
import javax.management.remote.JMXConnector;
import javax.management.remote.JMXConnectorFactory;
import javax.management.remote.JMXServiceURL;
import org.apache.cassandra.concurrent.Stage;
import org.apache.cassandra.db.ColumnFamilyStoreMBean;
import org.apache.cassandra.db.HintedHandOffManager;
import org.apache.cassandra.db.HintedHandOffManagerMBean;
import org.apache.cassandra.db.compaction.CompactionManager;
import org.apache.cassandra.db.compaction.CompactionManagerMBean;
import org.apache.cassandra.gms.FailureDetector;
import org.apache.cassandra.gms.FailureDetectorMBean;
import org.apache.cassandra.gms.Gossiper;
import org.apache.cassandra.gms.GossiperMBean;
import org.apache.cassandra.locator.EndpointSnitchInfoMBean;
import org.apache.cassandra.metrics.CassandraMetricsRegistry;
import org.apache.cassandra.metrics.ColumnFamilyMetrics.Sampler;
import org.apache.cassandra.metrics.StorageMetrics;
import org.apache.cassandra.metrics.ThreadPoolMetrics;
import org.apache.cassandra.net.MessagingService;
import org.apache.cassandra.net.MessagingServiceMBean;
import org.apache.cassandra.service.CacheService;
import org.apache.cassandra.service.CacheServiceMBean;
import org.apache.cassandra.service.GCInspector;
import org.apache.cassandra.service.GCInspectorMXBean;
import org.apache.cassandra.service.StorageProxy;
import org.apache.cassandra.service.StorageProxyMBean;
import org.apache.cassandra.service.StorageServiceMBean;
import org.apache.cassandra.streaming.StreamManagerMBean;
import org.apache.cassandra.streaming.StreamState;
import org.apache.cassandra.streaming.management.StreamStateCompositeData;
import com.google.common.base.Function;
import com.google.common.collect.Iterables;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import com.google.common.util.concurrent.Uninterruptibles;
/**
* JMX client operations for Cassandra.
*/
public class NodeProbe implements AutoCloseable
{
private static final String fmtUrl = "service:jmx:rmi:///jndi/rmi://[%s]:%d/jmxrmi";
private static final String ssObjName = "org.apache.cassandra.db:type=StorageService";
private static final int defaultPort = 7199;
final String host;
final int port;
private String username;
private String password;
private JMXConnector jmxc;
private MBeanServerConnection mbeanServerConn;
private CompactionManagerMBean compactionProxy;
private StorageServiceMBean ssProxy;
private GossiperMBean gossProxy;
private MemoryMXBean memProxy;
private GCInspectorMXBean gcProxy;
private RuntimeMXBean runtimeProxy;
private StreamManagerMBean streamProxy;
public MessagingServiceMBean msProxy;
private FailureDetectorMBean fdProxy;
private CacheServiceMBean cacheService;
private StorageProxyMBean spProxy;
private HintedHandOffManagerMBean hhProxy;
private boolean failed;
/**
* Creates a NodeProbe using the specified JMX host, port, username, and password.
*
* @param host hostname or IP address of the JMX agent
* @param port TCP port of the remote JMX agent
* @throws IOException on connection failures
*/
public NodeProbe(String host, int port, String username, String password) throws IOException
{
assert username != null && !username.isEmpty() && password != null && !password.isEmpty()
: "neither username nor password can be blank";
this.host = host;
this.port = port;
this.username = username;
this.password = password;
connect();
}
/**
* Creates a NodeProbe using the specified JMX host and port.
*
* @param host hostname or IP address of the JMX agent
* @param port TCP port of the remote JMX agent
* @throws IOException on connection failures
*/
public NodeProbe(String host, int port) throws IOException
{
this.host = host;
this.port = port;
connect();
}
/**
* Creates a NodeProbe using the specified JMX host and default port.
*
* @param host hostname or IP address of the JMX agent
* @throws IOException on connection failures
*/
public NodeProbe(String host) throws IOException
{
this.host = host;
this.port = defaultPort;
connect();
}
/**
* Create a connection to the JMX agent and setup the M[X]Bean proxies.
*
* @throws IOException on connection failures
*/
private void connect() throws IOException
{
JMXServiceURL jmxUrl = new JMXServiceURL(String.format(fmtUrl, host, port));
Map<String,Object> env = new HashMap<String,Object>();
if (username != null)
{
String[] creds = { username, password };
env.put(JMXConnector.CREDENTIALS, creds);
}
jmxc = JMXConnectorFactory.connect(jmxUrl, env);
mbeanServerConn = jmxc.getMBeanServerConnection();
try
{
ObjectName name = new ObjectName(ssObjName);
ssProxy = JMX.newMBeanProxy(mbeanServerConn, name, StorageServiceMBean.class);
name = new ObjectName(MessagingService.MBEAN_NAME);
msProxy = JMX.newMBeanProxy(mbeanServerConn, name, MessagingServiceMBean.class);
name = new ObjectName(StreamManagerMBean.OBJECT_NAME);
streamProxy = JMX.newMBeanProxy(mbeanServerConn, name, StreamManagerMBean.class);
name = new ObjectName(CompactionManager.MBEAN_OBJECT_NAME);
compactionProxy = JMX.newMBeanProxy(mbeanServerConn, name, CompactionManagerMBean.class);
name = new ObjectName(FailureDetector.MBEAN_NAME);
fdProxy = JMX.newMBeanProxy(mbeanServerConn, name, FailureDetectorMBean.class);
name = new ObjectName(CacheService.MBEAN_NAME);
cacheService = JMX.newMBeanProxy(mbeanServerConn, name, CacheServiceMBean.class);
name = new ObjectName(StorageProxy.MBEAN_NAME);
spProxy = JMX.newMBeanProxy(mbeanServerConn, name, StorageProxyMBean.class);
name = new ObjectName(HintedHandOffManager.MBEAN_NAME);
hhProxy = JMX.newMBeanProxy(mbeanServerConn, name, HintedHandOffManagerMBean.class);
name = new ObjectName(GCInspector.MBEAN_NAME);
gcProxy = JMX.newMBeanProxy(mbeanServerConn, name, GCInspectorMXBean.class);
name = new ObjectName(Gossiper.MBEAN_NAME);
gossProxy = JMX.newMBeanProxy(mbeanServerConn, name, GossiperMBean.class);
}
catch (MalformedObjectNameException e)
{
throw new RuntimeException(
"Invalid ObjectName? Please report this as a bug.", e);
}
memProxy = ManagementFactory.newPlatformMXBeanProxy(mbeanServerConn,
ManagementFactory.MEMORY_MXBEAN_NAME, MemoryMXBean.class);
runtimeProxy = ManagementFactory.newPlatformMXBeanProxy(
mbeanServerConn, ManagementFactory.RUNTIME_MXBEAN_NAME, RuntimeMXBean.class);
}
public void close() throws IOException
{
jmxc.close();
}
public int forceKeyspaceCleanup(String keyspaceName, String... columnFamilies) throws IOException, ExecutionException, InterruptedException
{
return ssProxy.forceKeyspaceCleanup(keyspaceName, columnFamilies);
}
public int scrub(boolean disableSnapshot, boolean skipCorrupted, String keyspaceName, String... columnFamilies) throws IOException, ExecutionException, InterruptedException
{
return ssProxy.scrub(disableSnapshot, skipCorrupted, keyspaceName, columnFamilies);
}
public int verify(boolean extendedVerify, String keyspaceName, String... columnFamilies) throws IOException, ExecutionException, InterruptedException
{
return ssProxy.verify(extendedVerify, keyspaceName, columnFamilies);
}
public int upgradeSSTables(String keyspaceName, boolean excludeCurrentVersion, String... columnFamilies) throws IOException, ExecutionException, InterruptedException
{
return ssProxy.upgradeSSTables(keyspaceName, excludeCurrentVersion, columnFamilies);
}
public void forceKeyspaceCleanup(PrintStream out, String keyspaceName, String... columnFamilies) throws IOException, ExecutionException, InterruptedException
{
if (forceKeyspaceCleanup(keyspaceName, columnFamilies) != 0)
{
failed = true;
out.println("Aborted cleaning up at least one table in keyspace "+keyspaceName+", check server logs for more information.");
}
}
public void scrub(PrintStream out, boolean disableSnapshot, boolean skipCorrupted, String keyspaceName, String... columnFamilies) throws IOException, ExecutionException, InterruptedException
{
if (scrub(disableSnapshot, skipCorrupted, keyspaceName, columnFamilies) != 0)
{
failed = true;
out.println("Aborted scrubbing at least one table in keyspace "+keyspaceName+", check server logs for more information.");
}
}
public void verify(PrintStream out, boolean extendedVerify, String keyspaceName, String... columnFamilies) throws IOException, ExecutionException, InterruptedException
{
if (verify(extendedVerify, keyspaceName, columnFamilies) != 0)
{
failed = true;
out.println("Aborted verifying at least one table in keyspace "+keyspaceName+", check server logs for more information.");
}
}
public void upgradeSSTables(PrintStream out, String keyspaceName, boolean excludeCurrentVersion, String... columnFamilies) throws IOException, ExecutionException, InterruptedException
{
if (upgradeSSTables(keyspaceName, excludeCurrentVersion, columnFamilies) != 0)
{
failed = true;
out.println("Aborted upgrading sstables for atleast one table in keyspace "+keyspaceName+", check server logs for more information.");
}
}
public void forceKeyspaceCompaction(boolean splitOutput, String keyspaceName, String... columnFamilies) throws IOException, ExecutionException, InterruptedException
{
ssProxy.forceKeyspaceCompaction(splitOutput, keyspaceName, columnFamilies);
}
public void forceKeyspaceFlush(String keyspaceName, String... columnFamilies) throws IOException, ExecutionException, InterruptedException
{
ssProxy.forceKeyspaceFlush(keyspaceName, columnFamilies);
}
public void repairAsync(final PrintStream out, final String keyspace, Map<String, String> options) throws IOException
{
RepairRunner runner = new RepairRunner(out, ssProxy, keyspace, options);
try
{
jmxc.addConnectionNotificationListener(runner, null, null);
ssProxy.addNotificationListener(runner, null, null);
runner.run();
}
catch (Exception e)
{
throw new IOException(e) ;
}
finally
{
try
{
ssProxy.removeNotificationListener(runner);
jmxc.removeConnectionNotificationListener(runner);
}
catch (Throwable e)
{
out.println("Exception occurred during clean-up. " + e);
}
}
}
public Map<Sampler, CompositeData> getPartitionSample(String ks, String cf, int capacity, int duration, int count, List<Sampler> samplers) throws OpenDataException
{
ColumnFamilyStoreMBean cfsProxy = getCfsProxy(ks, cf);
for(Sampler sampler : samplers)
{
cfsProxy.beginLocalSampling(sampler.name(), capacity);
}
Uninterruptibles.sleepUninterruptibly(duration, TimeUnit.MILLISECONDS);
Map<Sampler, CompositeData> result = Maps.newHashMap();
for(Sampler sampler : samplers)
{
result.put(sampler, cfsProxy.finishLocalSampling(sampler.name(), count));
}
return result;
}
public void invalidateCounterCache()
{
cacheService.invalidateCounterCache();
}
public void invalidateKeyCache()
{
cacheService.invalidateKeyCache();
}
public void invalidateRowCache()
{
cacheService.invalidateRowCache();
}
public void drain() throws IOException, InterruptedException, ExecutionException
{
ssProxy.drain();
}
public Map<String, String> getTokenToEndpointMap()
{
return ssProxy.getTokenToEndpointMap();
}
public List<String> getLiveNodes()
{
return ssProxy.getLiveNodes();
}
public List<String> getJoiningNodes()
{
return ssProxy.getJoiningNodes();
}
public List<String> getLeavingNodes()
{
return ssProxy.getLeavingNodes();
}
public List<String> getMovingNodes()
{
return ssProxy.getMovingNodes();
}
public List<String> getUnreachableNodes()
{
return ssProxy.getUnreachableNodes();
}
public Map<String, String> getLoadMap()
{
return ssProxy.getLoadMap();
}
public Map<InetAddress, Float> getOwnership()
{
return ssProxy.getOwnership();
}
public Map<InetAddress, Float> effectiveOwnership(String keyspace) throws IllegalStateException
{
return ssProxy.effectiveOwnership(keyspace);
}
public CacheServiceMBean getCacheServiceMBean()
{
String cachePath = "org.apache.cassandra.db:type=Caches";
try
{
return JMX.newMBeanProxy(mbeanServerConn, new ObjectName(cachePath), CacheServiceMBean.class);
}
catch (MalformedObjectNameException e)
{
throw new RuntimeException(e);
}
}
public double[] getAndResetGCStats()
{
return gcProxy.getAndResetStats();
}
public Iterator<Map.Entry<String, ColumnFamilyStoreMBean>> getColumnFamilyStoreMBeanProxies()
{
try
{
return new ColumnFamilyStoreMBeanIterator(mbeanServerConn);
}
catch (MalformedObjectNameException e)
{
throw new RuntimeException("Invalid ObjectName? Please report this as a bug.", e);
}
catch (IOException e)
{
throw new RuntimeException("Could not retrieve list of stat mbeans.", e);
}
}
public CompactionManagerMBean getCompactionManagerProxy()
{
return compactionProxy;
}
public List<String> getTokens()
{
return ssProxy.getTokens();
}
public List<String> getTokens(String endpoint)
{
try
{
return ssProxy.getTokens(endpoint);
}
catch (UnknownHostException e)
{
throw new RuntimeException(e);
}
}
public String getLocalHostId()
{
return ssProxy.getLocalHostId();
}
public Map<String, String> getHostIdMap()
{
return ssProxy.getHostIdMap();
}
public String getLoadString()
{
return ssProxy.getLoadString();
}
public String getReleaseVersion()
{
return ssProxy.getReleaseVersion();
}
public int getCurrentGenerationNumber()
{
return ssProxy.getCurrentGenerationNumber();
}
public long getUptime()
{
return runtimeProxy.getUptime();
}
public MemoryUsage getHeapMemoryUsage()
{
return memProxy.getHeapMemoryUsage();
}
/**
* Take a snapshot of all the keyspaces, optionally specifying only a specific column family.
*
* @param snapshotName the name of the snapshot.
* @param columnFamily the column family to snapshot or all on null
* @param keyspaces the keyspaces to snapshot
*/
public void takeSnapshot(String snapshotName, String columnFamily, String... keyspaces) throws IOException
{
if (columnFamily != null)
{
if (keyspaces.length != 1)
{
throw new IOException("When specifying the table for a snapshot, you must specify one and only one keyspace");
}
ssProxy.takeColumnFamilySnapshot(keyspaces[0], columnFamily, snapshotName);
}
else
ssProxy.takeSnapshot(snapshotName, keyspaces);
}
/**
* Take a snapshot of all column family from different keyspaces.
*
* @param snapshotName
* the name of the snapshot.
* @param columnfamilylist
* list of columnfamily from different keyspace in the form of ks1.cf1 ks2.cf2
*/
public void takeMultipleColumnFamilySnapshot(String snapshotName, String... columnFamilyList)
throws IOException
{
if (null != columnFamilyList && columnFamilyList.length != 0)
{
ssProxy.takeMultipleColumnFamilySnapshot(snapshotName, columnFamilyList);
}
else
{
throw new IOException("The column family List for a snapshot should not be empty or null");
}
}
/**
* Remove all the existing snapshots.
*/
public void clearSnapshot(String tag, String... keyspaces) throws IOException
{
ssProxy.clearSnapshot(tag, keyspaces);
}
public Map<String, TabularData> getSnapshotDetails()
{
return ssProxy.getSnapshotDetails();
}
public long trueSnapshotsSize()
{
return ssProxy.trueSnapshotsSize();
}
public boolean isJoined()
{
return ssProxy.isJoined();
}
public void joinRing() throws IOException
{
ssProxy.joinRing();
}
public void decommission() throws InterruptedException
{
ssProxy.decommission();
}
public void move(String newToken) throws IOException
{
ssProxy.move(newToken);
}
public void removeNode(String token)
{
ssProxy.removeNode(token);
}
public String getRemovalStatus()
{
return ssProxy.getRemovalStatus();
}
public void forceRemoveCompletion()
{
ssProxy.forceRemoveCompletion();
}
public void assassinateEndpoint(String address) throws UnknownHostException
{
gossProxy.assassinateEndpoint(address);
}
/**
* Set the compaction threshold
*
* @param minimumCompactionThreshold minimum compaction threshold
* @param maximumCompactionThreshold maximum compaction threshold
*/
public void setCompactionThreshold(String ks, String cf, int minimumCompactionThreshold, int maximumCompactionThreshold)
{
ColumnFamilyStoreMBean cfsProxy = getCfsProxy(ks, cf);
cfsProxy.setCompactionThresholds(minimumCompactionThreshold, maximumCompactionThreshold);
}
public void disableAutoCompaction(String ks, String ... columnFamilies) throws IOException
{
ssProxy.disableAutoCompaction(ks, columnFamilies);
}
public void enableAutoCompaction(String ks, String ... columnFamilies) throws IOException
{
ssProxy.enableAutoCompaction(ks, columnFamilies);
}
public void setIncrementalBackupsEnabled(boolean enabled)
{
ssProxy.setIncrementalBackupsEnabled(enabled);
}
public boolean isIncrementalBackupsEnabled()
{
return ssProxy.isIncrementalBackupsEnabled();
}
public void setCacheCapacities(int keyCacheCapacity, int rowCacheCapacity, int counterCacheCapacity)
{
try
{
String keyCachePath = "org.apache.cassandra.db:type=Caches";
CacheServiceMBean cacheMBean = JMX.newMBeanProxy(mbeanServerConn, new ObjectName(keyCachePath), CacheServiceMBean.class);
cacheMBean.setKeyCacheCapacityInMB(keyCacheCapacity);
cacheMBean.setRowCacheCapacityInMB(rowCacheCapacity);
cacheMBean.setCounterCacheCapacityInMB(counterCacheCapacity);
}
catch (MalformedObjectNameException e)
{
throw new RuntimeException(e);
}
}
public void setCacheKeysToSave(int keyCacheKeysToSave, int rowCacheKeysToSave, int counterCacheKeysToSave)
{
try
{
String keyCachePath = "org.apache.cassandra.db:type=Caches";
CacheServiceMBean cacheMBean = JMX.newMBeanProxy(mbeanServerConn, new ObjectName(keyCachePath), CacheServiceMBean.class);
cacheMBean.setKeyCacheKeysToSave(keyCacheKeysToSave);
cacheMBean.setRowCacheKeysToSave(rowCacheKeysToSave);
cacheMBean.setCounterCacheKeysToSave(counterCacheKeysToSave);
}
catch (MalformedObjectNameException e)
{
throw new RuntimeException(e);
}
}
public void setHintedHandoffThrottleInKB(int throttleInKB)
{
ssProxy.setHintedHandoffThrottleInKB(throttleInKB);
}
public List<InetAddress> getEndpoints(String keyspace, String cf, String key)
{
return ssProxy.getNaturalEndpoints(keyspace, cf, key);
}
public List<String> getSSTables(String keyspace, String cf, String key)
{
ColumnFamilyStoreMBean cfsProxy = getCfsProxy(keyspace, cf);
return cfsProxy.getSSTablesForKey(key);
}
public Set<StreamState> getStreamStatus()
{
return Sets.newHashSet(Iterables.transform(streamProxy.getCurrentStreams(), new Function<CompositeData, StreamState>()
{
public StreamState apply(CompositeData input)
{
return StreamStateCompositeData.fromCompositeData(input);
}
}));
}
public String getOperationMode()
{
return ssProxy.getOperationMode();
}
public boolean isStarting()
{
return ssProxy.isStarting();
}
public void truncate(String keyspaceName, String cfName)
{
try
{
ssProxy.truncate(keyspaceName, cfName);
}
catch (TimeoutException e)
{
throw new RuntimeException("Error while executing truncate", e);
}
catch (IOException e)
{
throw new RuntimeException("Error while executing truncate", e);
}
}
public EndpointSnitchInfoMBean getEndpointSnitchInfoProxy()
{
try
{
return JMX.newMBeanProxy(mbeanServerConn, new ObjectName("org.apache.cassandra.db:type=EndpointSnitchInfo"), EndpointSnitchInfoMBean.class);
}
catch (MalformedObjectNameException e)
{
throw new RuntimeException(e);
}
}
public ColumnFamilyStoreMBean getCfsProxy(String ks, String cf)
{
ColumnFamilyStoreMBean cfsProxy = null;
try
{
String type = cf.contains(".") ? "IndexColumnFamilies" : "ColumnFamilies";
Set<ObjectName> beans = mbeanServerConn.queryNames(
new ObjectName("org.apache.cassandra.db:type=*" + type +",keyspace=" + ks + ",columnfamily=" + cf), null);
if (beans.isEmpty())
throw new MalformedObjectNameException("couldn't find that bean");
assert beans.size() == 1;
for (ObjectName bean : beans)
cfsProxy = JMX.newMBeanProxy(mbeanServerConn, bean, ColumnFamilyStoreMBean.class);
}
catch (MalformedObjectNameException mone)
{
System.err.println("ColumnFamilyStore for " + ks + "/" + cf + " not found.");
System.exit(1);
}
catch (IOException e)
{
System.err.println("ColumnFamilyStore for " + ks + "/" + cf + " not found: " + e);
System.exit(1);
}
return cfsProxy;
}
public StorageProxyMBean getSpProxy()
{
return spProxy;
}
public String getEndpoint()
{
// Try to find the endpoint using the local token, doing so in a crazy manner
// to maintain backwards compatibility with the MBean interface
String stringToken = ssProxy.getTokens().get(0);
Map<String, String> tokenToEndpoint = ssProxy.getTokenToEndpointMap();
for (Map.Entry<String, String> pair : tokenToEndpoint.entrySet())
{
if (pair.getKey().equals(stringToken))
{
return pair.getValue();
}
}
throw new RuntimeException("Could not find myself in the endpoint list, something is very wrong! Is the Cassandra node fully started?");
}
public String getDataCenter()
{
try
{
return getEndpointSnitchInfoProxy().getDatacenter(getEndpoint());
}
catch (UnknownHostException e)
{
return "Unknown";
}
}
public String getRack()
{
try
{
return getEndpointSnitchInfoProxy().getRack(getEndpoint());
}
catch (UnknownHostException e)
{
return "Unknown";
}
}
public List<String> getKeyspaces()
{
return ssProxy.getKeyspaces();
}
public List<String> getNonSystemKeyspaces()
{
return ssProxy.getNonSystemKeyspaces();
}
public String getClusterName()
{
return ssProxy.getClusterName();
}
public String getPartitioner()
{
return ssProxy.getPartitionerName();
}
public void disableHintedHandoff()
{
spProxy.setHintedHandoffEnabled(false);
}
public void enableHintedHandoff()
{
spProxy.setHintedHandoffEnabled(true);
}
public boolean isHandoffEnabled()
{
return spProxy.getHintedHandoffEnabled();
}
public void enableHintedHandoff(String dcNames)
{
spProxy.setHintedHandoffEnabledByDCList(dcNames);
}
public void pauseHintsDelivery()
{
hhProxy.pauseHintsDelivery(true);
}
public void resumeHintsDelivery()
{
hhProxy.pauseHintsDelivery(false);
}
public void truncateHints(final String host)
{
hhProxy.deleteHintsForEndpoint(host);
}
public void truncateHints()
{
try
{
hhProxy.truncateAllHints();
}
catch (ExecutionException e)
{
throw new RuntimeException("Error while executing truncate hints", e);
}
catch (InterruptedException e)
{
throw new RuntimeException("Error while executing truncate hints", e);
}
}
public void stopNativeTransport()
{
ssProxy.stopNativeTransport();
}
public void startNativeTransport()
{
ssProxy.startNativeTransport();
}
public boolean isNativeTransportRunning()
{
return ssProxy.isNativeTransportRunning();
}
public void stopGossiping()
{
ssProxy.stopGossiping();
}
public void startGossiping()
{
ssProxy.startGossiping();
}
public boolean isGossipRunning()
{
return ssProxy.isGossipRunning();
}
public void stopThriftServer()
{
ssProxy.stopRPCServer();
}
public void startThriftServer()
{
ssProxy.startRPCServer();
}
public boolean isThriftServerRunning()
{
return ssProxy.isRPCServerRunning();
}
public void stopCassandraDaemon()
{
ssProxy.stopDaemon();
}
public boolean isInitialized()
{
return ssProxy.isInitialized();
}
public void setCompactionThroughput(int value)
{
ssProxy.setCompactionThroughputMbPerSec(value);
}
public int getCompactionThroughput()
{
return ssProxy.getCompactionThroughputMbPerSec();
}
public int getStreamThroughput()
{
return ssProxy.getStreamThroughputMbPerSec();
}
public int getExceptionCount()
{
return (int)StorageMetrics.exceptions.getCount();
}
public Map<String, Integer> getDroppedMessages()
{
return msProxy.getDroppedMessages();
}
public void loadNewSSTables(String ksName, String cfName)
{
ssProxy.loadNewSSTables(ksName, cfName);
}
public void rebuildIndex(String ksName, String cfName, String... idxNames)
{
ssProxy.rebuildSecondaryIndex(ksName, cfName, idxNames);
}
public String getGossipInfo()
{
return fdProxy.getAllEndpointStates();
}
public void stop(String string)
{
compactionProxy.stopCompaction(string);
}
public void setStreamThroughput(int value)
{
ssProxy.setStreamThroughputMbPerSec(value);
}
public void setTraceProbability(double value)
{
ssProxy.setTraceProbability(value);
}
public String getSchemaVersion()
{
return ssProxy.getSchemaVersion();
}
public List<String> describeRing(String keyspaceName) throws IOException
{
return ssProxy.describeRingJMX(keyspaceName);
}
public void rebuild(String sourceDc)
{
ssProxy.rebuild(sourceDc);
}
public List<String> sampleKeyRange()
{
return ssProxy.sampleKeyRange();
}
public void resetLocalSchema() throws IOException
{
ssProxy.resetLocalSchema();
}
public boolean isFailed()
{
return failed;
}
public long getReadRepairAttempted()
{
return spProxy.getReadRepairAttempted();
}
public long getReadRepairRepairedBlocking()
{
return spProxy.getReadRepairRepairedBlocking();
}
public long getReadRepairRepairedBackground()
{
return spProxy.getReadRepairRepairedBackground();
}
// JMX getters for the o.a.c.metrics API below.
/**
* Retrieve cache metrics based on the cache type (KeyCache, RowCache, or CounterCache)
* @param cacheType KeyCach, RowCache, or CounterCache
* @param metricName Capacity, Entries, HitRate, Size, Requests or Hits.
*/
public Object getCacheMetric(String cacheType, String metricName)
{
try
{
switch(metricName)
{
case "Capacity":
case "Entries":
case "HitRate":
case "Size":
return JMX.newMBeanProxy(mbeanServerConn,
new ObjectName("org.apache.cassandra.metrics:type=Cache,scope=" + cacheType + ",name=" + metricName),
CassandraMetricsRegistry.JmxGaugeMBean.class).getValue();
case "Requests":
case "Hits":
return JMX.newMBeanProxy(mbeanServerConn,
new ObjectName("org.apache.cassandra.metrics:type=Cache,scope=" + cacheType + ",name=" + metricName),
CassandraMetricsRegistry.JmxMeterMBean.class).getCount();
default:
throw new RuntimeException("Unknown cache metric name.");
}
}
catch (MalformedObjectNameException e)
{
throw new RuntimeException(e);
}
}
public Object getThreadPoolMetric(Stage stage, String metricName)
{
return ThreadPoolMetrics.getJmxMetric(mbeanServerConn, stage.getJmxType(), stage.getJmxName(), metricName);
}
/**
* Retrieve ColumnFamily metrics
* @param ks Keyspace for which stats are to be displayed.
* @param cf ColumnFamily for which stats are to be displayed.
* @param metricName View {@link org.apache.cassandra.metrics.ColumnFamilyMetrics}.
*/
public Object getColumnFamilyMetric(String ks, String cf, String metricName)
{
try
{
String type = cf.contains(".") ? "IndexColumnFamily": "ColumnFamily";
ObjectName oName = new ObjectName(String.format("org.apache.cassandra.metrics:type=%s,keyspace=%s,scope=%s,name=%s", type, ks, cf, metricName));
switch(metricName)
{
case "BloomFilterDiskSpaceUsed":
case "BloomFilterFalsePositives":
case "BloomFilterFalseRatio":
case "BloomFilterOffHeapMemoryUsed":
case "IndexSummaryOffHeapMemoryUsed":
case "CompressionMetadataOffHeapMemoryUsed":
case "CompressionRatio":
case "EstimatedColumnCountHistogram":
case "EstimatedRowSizeHistogram":
case "KeyCacheHitRate":
case "LiveSSTableCount":
case "MaxRowSize":
case "MeanRowSize":
case "MemtableColumnsCount":
case "MemtableLiveDataSize":
case "MemtableOffHeapSize":
case "MinRowSize":
case "RecentBloomFilterFalsePositives":
case "RecentBloomFilterFalseRatio":
case "SnapshotsSize":
return JMX.newMBeanProxy(mbeanServerConn, oName, CassandraMetricsRegistry.JmxGaugeMBean.class).getValue();
case "LiveDiskSpaceUsed":
case "MemtableSwitchCount":
case "SpeculativeRetries":
case "TotalDiskSpaceUsed":
case "WriteTotalLatency":
case "ReadTotalLatency":
case "PendingFlushes":
return JMX.newMBeanProxy(mbeanServerConn, oName, CassandraMetricsRegistry.JmxCounterMBean.class).getCount();
case "CoordinatorReadLatency":
case "CoordinatorScanLatency":
case "ReadLatency":
case "WriteLatency":
return JMX.newMBeanProxy(mbeanServerConn, oName, CassandraMetricsRegistry.JmxTimerMBean.class);
case "LiveScannedHistogram":
case "SSTablesPerReadHistogram":
case "TombstoneScannedHistogram":
return JMX.newMBeanProxy(mbeanServerConn, oName, CassandraMetricsRegistry.JmxHistogramMBean.class);
default:
throw new RuntimeException("Unknown table metric.");
}
}
catch (MalformedObjectNameException e)
{
throw new RuntimeException(e);
}
}
/**
* Retrieve Proxy metrics
* @param scope RangeSlice, Read or Write
*/
public CassandraMetricsRegistry.JmxTimerMBean getProxyMetric(String scope)
{
try
{
return JMX.newMBeanProxy(mbeanServerConn,
new ObjectName("org.apache.cassandra.metrics:type=ClientRequest,scope=" + scope + ",name=Latency"),
CassandraMetricsRegistry.JmxTimerMBean.class);
}
catch (MalformedObjectNameException e)
{
throw new RuntimeException(e);
}
}
/**
* Retrieve Proxy metrics
* @param metricName CompletedTasks, PendingTasks, BytesCompacted or TotalCompactionsCompleted.
*/
public Object getCompactionMetric(String metricName)
{
try
{
switch(metricName)
{
case "BytesCompacted":
return JMX.newMBeanProxy(mbeanServerConn,
new ObjectName("org.apache.cassandra.metrics:type=Compaction,name=" + metricName),
CassandraMetricsRegistry.JmxCounterMBean.class);
case "CompletedTasks":
case "PendingTasks":
return JMX.newMBeanProxy(mbeanServerConn,
new ObjectName("org.apache.cassandra.metrics:type=Compaction,name=" + metricName),
CassandraMetricsRegistry.JmxGaugeMBean.class).getValue();
case "TotalCompactionsCompleted":
return JMX.newMBeanProxy(mbeanServerConn,
new ObjectName("org.apache.cassandra.metrics:type=Compaction,name=" + metricName),
CassandraMetricsRegistry.JmxMeterMBean.class);
default:
throw new RuntimeException("Unknown compaction metric.");
}
}
catch (MalformedObjectNameException e)
{
throw new RuntimeException(e);
}
}
/**
* Retrieve Proxy metrics
* @param metricName Exceptions, Load, TotalHints or TotalHintsInProgress.
*/
public long getStorageMetric(String metricName)
{
try
{
return JMX.newMBeanProxy(mbeanServerConn,
new ObjectName("org.apache.cassandra.metrics:type=Storage,name=" + metricName),
CassandraMetricsRegistry.JmxCounterMBean.class).getCount();
}
catch (MalformedObjectNameException e)
{
throw new RuntimeException(e);
}
}
public double[] metricPercentilesAsArray(CassandraMetricsRegistry.JmxHistogramMBean metric)
{
return new double[]{ metric.get50thPercentile(),
metric.get75thPercentile(),
metric.get95thPercentile(),
metric.get98thPercentile(),
metric.get99thPercentile(),
metric.getMin(),
metric.getMax()};
}
public double[] metricPercentilesAsArray(CassandraMetricsRegistry.JmxTimerMBean metric)
{
return new double[]{ metric.get50thPercentile(),
metric.get75thPercentile(),
metric.get95thPercentile(),
metric.get98thPercentile(),
metric.get99thPercentile(),
metric.getMin(),
metric.getMax()};
}
public TabularData getCompactionHistory()
{
return compactionProxy.getCompactionHistory();
}
public void reloadTriggers()
{
spProxy.reloadTriggerClasses();
}
public void setLoggingLevel(String classQualifier, String level)
{
try
{
ssProxy.setLoggingLevel(classQualifier, level);
}
catch (Exception e)
{
throw new RuntimeException("Error setting log for " + classQualifier +" on level " + level +". Please check logback configuration and ensure to have <jmxConfigurator /> set", e);
}
}
public Map<String, String> getLoggingLevels()
{
return ssProxy.getLoggingLevels();
}
public void resumeBootstrap(PrintStream out) throws IOException
{
BootstrapMonitor monitor = new BootstrapMonitor(out);
try
{
jmxc.addConnectionNotificationListener(monitor, null, null);
ssProxy.addNotificationListener(monitor, null, null);
if (ssProxy.resumeBootstrap())
{
out.println("Resuming bootstrap");
monitor.awaitCompletion();
}
else
{
out.println("Node is already bootstrapped.");
}
}
catch (Exception e)
{
throw new IOException(e);
}
finally
{
try
{
ssProxy.removeNotificationListener(monitor);
jmxc.removeConnectionNotificationListener(monitor);
}
catch (Throwable e)
{
out.println("Exception occurred during clean-up. " + e);
}
}
}
}
class ColumnFamilyStoreMBeanIterator implements Iterator<Map.Entry<String, ColumnFamilyStoreMBean>>
{
private MBeanServerConnection mbeanServerConn;
Iterator<Entry<String, ColumnFamilyStoreMBean>> mbeans;
public ColumnFamilyStoreMBeanIterator(MBeanServerConnection mbeanServerConn)
throws MalformedObjectNameException, NullPointerException, IOException
{
this.mbeanServerConn = mbeanServerConn;
List<Entry<String, ColumnFamilyStoreMBean>> cfMbeans = getCFSMBeans(mbeanServerConn, "ColumnFamilies");
cfMbeans.addAll(getCFSMBeans(mbeanServerConn, "IndexColumnFamilies"));
Collections.sort(cfMbeans, new Comparator<Entry<String, ColumnFamilyStoreMBean>>()
{
public int compare(Entry<String, ColumnFamilyStoreMBean> e1, Entry<String, ColumnFamilyStoreMBean> e2)
{
//compare keyspace, then CF name, then normal vs. index
int keyspaceNameCmp = e1.getKey().compareTo(e2.getKey());
if(keyspaceNameCmp != 0)
return keyspaceNameCmp;
// get CF name and split it for index name
String e1CF[] = e1.getValue().getColumnFamilyName().split("\\.");
String e2CF[] = e2.getValue().getColumnFamilyName().split("\\.");
assert e1CF.length <= 2 && e2CF.length <= 2 : "unexpected split count for table name";
//if neither are indexes, just compare CF names
if(e1CF.length == 1 && e2CF.length == 1)
return e1CF[0].compareTo(e2CF[0]);
//check if it's the same CF
int cfNameCmp = e1CF[0].compareTo(e2CF[0]);
if(cfNameCmp != 0)
return cfNameCmp;
// if both are indexes (for the same CF), compare them
if(e1CF.length == 2 && e2CF.length == 2)
return e1CF[1].compareTo(e2CF[1]);
//if length of e1CF is 1, it's not an index, so sort it higher
return e1CF.length == 1 ? 1 : -1;
}
});
mbeans = cfMbeans.iterator();
}
private List<Entry<String, ColumnFamilyStoreMBean>> getCFSMBeans(MBeanServerConnection mbeanServerConn, String type)
throws MalformedObjectNameException, IOException
{
ObjectName query = new ObjectName("org.apache.cassandra.db:type=" + type +",*");
Set<ObjectName> cfObjects = mbeanServerConn.queryNames(query, null);
List<Entry<String, ColumnFamilyStoreMBean>> mbeans = new ArrayList<Entry<String, ColumnFamilyStoreMBean>>(cfObjects.size());
for(ObjectName n : cfObjects)
{
String keyspaceName = n.getKeyProperty("keyspace");
ColumnFamilyStoreMBean cfsProxy = JMX.newMBeanProxy(mbeanServerConn, n, ColumnFamilyStoreMBean.class);
mbeans.add(new AbstractMap.SimpleImmutableEntry<String, ColumnFamilyStoreMBean>(keyspaceName, cfsProxy));
}
return mbeans;
}
public boolean hasNext()
{
return mbeans.hasNext();
}
public Entry<String, ColumnFamilyStoreMBean> next()
{
return mbeans.next();
}
public void remove()
{
throw new UnsupportedOperationException();
}
}
| |
package umich.edu.csev.rps;
import android.os.AsyncTask;
import android.os.Handler;
import android.os.SystemClock;
import android.support.v7.app.ActionBarActivity;
import android.support.v7.app.ActionBar;
import android.support.v4.app.Fragment;
import android.os.Bundle;
import android.text.Editable;
import android.text.TextWatcher;
import android.view.LayoutInflater;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.view.ViewGroup;
import android.os.Build;
import android.widget.Button;
import android.widget.EditText;
import android.widget.ImageButton;
import android.widget.ProgressBar;
import android.widget.TextView;
import org.apache.http.HttpResponse;
import org.apache.http.HttpStatus;
import org.apache.http.StatusLine;
import org.apache.http.client.HttpClient;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.impl.client.DefaultHttpClient;
import org.json.JSONArray;
import org.json.JSONObject;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.net.URI;
public class MainActivity extends ActionBarActivity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
if (savedInstanceState == null) {
getSupportFragmentManager().beginTransaction()
.add(R.id.container, new PlaceholderFragment())
.commit();
}
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.main, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
// Handle action bar item clicks here. The action bar will
// automatically handle clicks on the Home/Up button, so long
// as you specify a parent activity in AndroidManifest.xml.
int id = item.getItemId();
if (id == R.id.action_settings) {
return true;
}
return super.onOptionsItemSelected(item);
}
/**
* A placeholder fragment containing a simple view.
*/
public static class PlaceholderFragment extends Fragment {
View rootView;
public String urlString = "http://192.168.1.201:8888/mmorps";
int serverHeight = -1;
public String pairString = "";
int pairHeight = -1;
ImageButton rockButton;
ImageButton paperButton;
ImageButton scissorsButton;
Button settingsButton;
ProgressBar spinner;
TextView textStatus;
String statusStr = "You need pairing code to play";
TextView textLeaders;
String leadersStr;
private Handler mHandler = new Handler();
long mStartTime = 0L;
String rpsGuid = null;
long checkTime = -1;
long leaderTime = 1;
String EOL = System.getProperty("line.separator");
private void handleStats(String input) {
try {
System.out.println("handleStats ="+input);
JSONArray json = new JSONArray(input);
System.out.println("array="+json);
leadersStr = "Leaderboard:"+EOL;
for(int i = 0 ; i < json.length(); i++){
String name = json.getJSONObject(i).getString("name");
Integer score = json.getJSONObject(i).getInt("score");
Integer games = json.getJSONObject(i).getInt("games");
leadersStr = leadersStr + " " + name + "(" + games + ")" + " score=" + score + EOL;
System.out.println(leadersStr);
}
leaderTime = 15L;
} catch (Exception e) {
System.out.println("Exception "+e.getMessage());
}
}
private void handlePlay(String input) {
try {
System.out.println("handlePlay ="+input);
JSONObject json = new JSONObject(input);
System.out.println("object="+json);
/*
leadersStr = "Leaderboard:"+EOL;
for(int i = 0 ; i < json.length(); i++){
String name = json.getJSONObject(i).getString("name");
Integer score = json.getJSONObject(i).getInt("score");
Integer games = json.getJSONObject(i).getInt("games");
leadersStr = leadersStr + " " + name + "(" + games + ")" + " score=" + score + EOL;
System.out.println(leadersStr);
}
leaderTime = 15L;
*/
} catch (Exception e) {
System.out.println("Exception "+e.getMessage());
}
}
private class MyGETJSON extends AsyncTask<String, Void, String> {
@Override
protected String doInBackground(String... params) {
String script = null;
for(String whatever : params){
System.out.println("P="+whatever);
script = whatever;
}
try {
HttpClient httpclient = new DefaultHttpClient();
String theUrl = urlString+"/"+script;
System.out.println("theUrl="+theUrl);
URI website = new URI(theUrl);
HttpGet get = new HttpGet();
get.setURI(website);
HttpResponse response = httpclient.execute(get);
StatusLine statusLine = response.getStatusLine();
System.out.println("SL="+statusLine);
if(statusLine.getStatusCode() == HttpStatus.SC_OK){
ByteArrayOutputStream out = new ByteArrayOutputStream();
response.getEntity().writeTo(out);
out.close();
String responseString = out.toString();
System.out.println("Response\n");
System.out.println(responseString);
if ( script.startsWith("stats.php")) handleStats(responseString);
if ( script.startsWith("play.php")) handlePlay(responseString);
} else {
//Closes the connection.
response.getEntity().getContent().close();
throw new IOException(statusLine.getReasonPhrase());
}
} catch (Exception e) {
System.out.println("Exception "+e.getMessage());
}
return null;
}
@Override
protected void onPostExecute(String result) {
// System.out.println("onPostExecute\n"+leadersStr);
textLeaders.setText(leadersStr);
textStatus.setText(statusStr);
if ( checkTime < 1 ) {
spinner.setVisibility(View.INVISIBLE);
}
}
@Override
protected void onPreExecute() {
}
@Override
protected void onProgressUpdate(Void... values) {
}
}
// http://android-developers.blogspot.kr/2007/11/stitch-in-time.html
private Runnable mUpdateTimeTask = new Runnable() {
public void run() {
final long start = mStartTime;
long current = SystemClock.uptimeMillis();
// System.out.println("start="+start+" current="+current);
long millis = current - start;
int seconds = (int) (millis / 1000);
int minutes = seconds / 60;
seconds = seconds % 60;
/* if (seconds < 10) {
textStatus.setText("" + minutes + ":0" + seconds);
} else {
textStatus.setText("" + minutes + ":" + seconds);
} */
if ( leaderTime > 0 ) leaderTime = leaderTime -1;
if ( leaderTime == 0 ) {
spinner.setVisibility(View.VISIBLE);
leaderTime = -1;
System.out.println("I AM TRIGGERED");
new MyGETJSON().execute("stats.php");
System.out.println("Back");
}
mHandler.postAtTime(this,
start + (((minutes * 60) + seconds + 1) * 1000));
}
};
public PlaceholderFragment() {
}
public void doPlay(int playValue, EditText textServer, EditText textPair) {
if ( pairString.length()< 1 ) {
statusStr = "Please enter a pairing code";
textStatus.setText(statusStr);
textServer.setVisibility(View.VISIBLE);
textPair.setVisibility(View.VISIBLE);
settingsButton.setText("^");
} else {
String url = "play.php?play="+playValue+"&pair="+pairString;
System.out.println("url="+url);
new MyGETJSON().execute(url);
System.out.println("Back");
}
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
rootView = inflater.inflate(R.layout.fragment_main, container, false);
// http://stackoverflow.com/questions/12559461/how-to-show-progress-barcircle-in-an-activity-having-a-listview-before-loading
spinner = (ProgressBar) rootView.findViewById(R.id.spinner);
System.out.println("Spinner="+spinner);
// http://stackoverflow.com/questions/4310525/android-on-edittext-changed-listener
final EditText textServer = (EditText) rootView.findViewById(R.id.serverText);
System.out.println("text="+textServer);
textServer.setText(urlString);
textServer.addTextChangedListener(new TextWatcher(){
public void afterTextChanged(Editable s) {
// System.out.println("url onchange="+textServer.getText());
urlString = textServer.getText().toString();
}
public void beforeTextChanged(CharSequence s, int start, int count, int after){}
public void onTextChanged(CharSequence s, int start, int before, int count){}
});
final EditText textPair = (EditText) rootView.findViewById(R.id.pairText);
System.out.println("text=" + textPair);
textPair.addTextChangedListener(new TextWatcher() {
public void afterTextChanged(Editable s) {
// System.out.println("pair onchange="+textPair.getText());
pairString = textPair.getText().toString();
}
public void beforeTextChanged(CharSequence s, int start, int count, int after) {
}
public void onTextChanged(CharSequence s, int start, int before, int count) {
}
});
// http://stackoverflow.com/questions/4622517/hide-a-edittext-make-it-visible-by-clicking-a-menu
settingsButton = (Button) rootView.findViewById(R.id.settingsButton);
settingsButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View arg0) {
int current = textServer.getVisibility();
if ( current == View.GONE ) {
textServer.setVisibility(View.VISIBLE);
textPair.setVisibility(View.VISIBLE);
settingsButton.setText("^");
} else {
textServer.setVisibility(View.GONE);
textPair.setVisibility(View.GONE);
settingsButton.setText("+");
}
}
});
rockButton = (ImageButton) rootView.findViewById(R.id.rockButton);
System.out.println("ImageButton = "+rockButton);
rockButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View arg0) {
System.out.println("Rock CLicked");
doPlay(0, textServer, textPair);
}
});
paperButton = (ImageButton) rootView.findViewById(R.id.paperButton);
System.out.println("paperButton = "+paperButton);
paperButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View arg0) {
System.out.println("paperClicked ");
doPlay(1, textServer, textPair);
}
});
scissorsButton = (ImageButton) rootView.findViewById(R.id.scissorsButton);
System.out.println("scissorsButton = "+scissorsButton);
scissorsButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View arg0) {
System.out.println("scissorsClicked");
doPlay(2, textServer, textPair);
}
});
textStatus = (TextView) rootView.findViewById(R.id.textStatus);
textLeaders = (TextView) rootView.findViewById(R.id.textLeaders);
// http://stackoverflow.com/questions/1748977/making-textview-scrollable-in-android
// yourTextView.setMovementMethod(new ScrollingMovementMethod())
// First up my timer...
mStartTime = SystemClock.uptimeMillis();
System.out.println("Click="+mStartTime);
mHandler.removeCallbacks(mUpdateTimeTask);
mHandler.postDelayed(mUpdateTimeTask, 100);
return rootView;
}
}
}
| |
/*
* Copyright 2000-2017 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.jetbrains.python.refactoring;
import com.intellij.codeInsight.PsiEquivalenceUtil;
import com.intellij.find.findUsages.FindUsagesHandler;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Comparing;
import com.intellij.openapi.util.Pair;
import com.intellij.openapi.util.TextRange;
import com.intellij.psi.*;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.psi.util.PsiUtilCore;
import com.intellij.usageView.UsageInfo;
import com.intellij.util.containers.ContainerUtil;
import com.jetbrains.python.PyNames;
import com.jetbrains.python.findUsages.PyFindUsagesHandlerFactory;
import com.jetbrains.python.psi.*;
import com.jetbrains.python.refactoring.introduce.IntroduceValidator;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.*;
public class PyRefactoringUtil {
private PyRefactoringUtil() {
}
@NotNull
public static List<PsiElement> getOccurrences(@NotNull final PsiElement pattern, @Nullable final PsiElement context) {
if (context == null) {
return Collections.emptyList();
}
final List<PsiElement> occurrences = new ArrayList<>();
final PyElementVisitor visitor = new PyElementVisitor() {
public void visitElement(@NotNull final PsiElement element) {
if (element instanceof PyParameter) {
return;
}
if (PsiEquivalenceUtil.areElementsEquivalent(element, pattern)) {
occurrences.add(element);
return;
}
if (element instanceof PyStringLiteralExpression) {
final Pair<PsiElement, TextRange> selection = pattern.getUserData(PyReplaceExpressionUtil.SELECTION_BREAKS_AST_NODE);
if (selection != null) {
final String substring = selection.getSecond().substring(pattern.getText());
final PyStringLiteralExpression expr = (PyStringLiteralExpression)element;
final String text = element.getText();
if (text != null && expr.getStringNodes().size() == 1) {
final int start = text.indexOf(substring);
if (start >= 0) {
element.putUserData(PyReplaceExpressionUtil.SELECTION_BREAKS_AST_NODE, Pair.create(element, TextRange.from(start, substring.length())));
occurrences.add(element);
return;
}
}
}
}
element.acceptChildren(this);
}
};
context.acceptChildren(visitor);
return occurrences;
}
@Nullable
public static PyExpression getSelectedExpression(@NotNull final Project project,
@NotNull PsiFile file,
@NotNull final PsiElement element1,
@NotNull final PsiElement element2) {
PsiElement parent = PsiTreeUtil.findCommonParent(element1, element2);
if (parent != null && !(parent instanceof PyElement)) {
parent = PsiTreeUtil.getParentOfType(parent, PyElement.class);
}
if (parent == null) {
return null;
}
// If it is PyIfPart for example, parent if statement, we should deny
if (!(parent instanceof PyExpression)){
return null;
}
// We cannot extract anything within import statements
if (PsiTreeUtil.getParentOfType(parent, PyImportStatement.class, PyFromImportStatement.class) != null){
return null;
}
if ((element1 == PsiTreeUtil.getDeepestFirst(parent)) && (element2 == PsiTreeUtil.getDeepestLast(parent))) {
return (PyExpression) parent;
}
// Check if selection breaks AST node in binary expression
if (parent instanceof PyBinaryExpression) {
final String selection = file.getText().substring(element1.getTextOffset(), element2.getTextOffset() + element2.getTextLength());
final PyElementGenerator generator = PyElementGenerator.getInstance(project);
final LanguageLevel langLevel = LanguageLevel.forElement(element1);
final PyExpression expression = generator.createFromText(langLevel, PyAssignmentStatement.class, "z=" + selection).getAssignedValue();
if (!(expression instanceof PyBinaryExpression) || PsiUtilCore.hasErrorElementChild(expression)) {
return null;
}
final String parentText = parent.getText();
final int startOffset = element1.getTextOffset() - parent.getTextOffset() - 1;
if (startOffset < 0) {
return null;
}
final int endOffset = element2.getTextOffset() + element2.getTextLength() - parent.getTextOffset();
final String prefix = parentText.substring(0, startOffset);
final String suffix = parentText.substring(endOffset, parentText.length());
final TextRange textRange = TextRange.from(startOffset, endOffset - startOffset);
final PsiElement fakeExpression = generator.createExpressionFromText(langLevel, prefix + "python" + suffix);
if (PsiUtilCore.hasErrorElementChild(fakeExpression)) {
return null;
}
expression.putUserData(PyReplaceExpressionUtil.SELECTION_BREAKS_AST_NODE, Pair.create(parent, textRange));
return expression;
}
return null;
}
@NotNull
public static Collection<String> collectUsedNames(@Nullable final PsiElement scope) {
if (!(scope instanceof PyClass) && !(scope instanceof PyFile) && !(scope instanceof PyFunction)) {
return Collections.emptyList();
}
final Set<String> variables = new HashSet<String>() {
@Override
public boolean add(String s) {
return s != null && super.add(s);
}
};
scope.acceptChildren(new PyRecursiveElementVisitor() {
@Override
public void visitPyTargetExpression(@NotNull final PyTargetExpression node) {
variables.add(node.getName());
}
@Override
public void visitPyNamedParameter(@NotNull final PyNamedParameter node) {
variables.add(node.getName());
}
@Override
public void visitPyReferenceExpression(PyReferenceExpression node) {
if (!node.isQualified()) {
variables.add(node.getReferencedName());
}
else {
super.visitPyReferenceExpression(node);
}
}
@Override
public void visitPyFunction(@NotNull final PyFunction node) {
variables.add(node.getName());
}
@Override
public void visitPyClass(@NotNull final PyClass node) {
variables.add(node.getName());
}
});
return variables;
}
@Nullable
public static PsiElement findExpressionInRange(@NotNull final PsiFile file, int startOffset, int endOffset) {
PsiElement element1 = file.findElementAt(startOffset);
PsiElement element2 = file.findElementAt(endOffset - 1);
if (element1 instanceof PsiWhiteSpace) {
startOffset = element1.getTextRange().getEndOffset();
element1 = file.findElementAt(startOffset);
}
if (element2 instanceof PsiWhiteSpace) {
endOffset = element2.getTextRange().getStartOffset();
element2 = file.findElementAt(endOffset - 1);
}
if (element1 == null || element2 == null) {
return null;
}
return getSelectedExpression(file.getProject(), file, element1, element2);
}
@NotNull
public static PsiElement[] findStatementsInRange(@NotNull final PsiFile file, int startOffset, int endOffset) {
ArrayList<PsiElement> array = new ArrayList<>();
PsiElement element1 = file.findElementAt(startOffset);
PsiElement element2 = file.findElementAt(endOffset - 1);
PsiElement endComment = null;
boolean startsWithWhitespace = false;
boolean endsWithWhitespace = false;
if (element1 instanceof PsiWhiteSpace) {
startOffset = element1.getTextRange().getEndOffset();
element1 = file.findElementAt(startOffset);
startsWithWhitespace = true;
}
if (element2 instanceof PsiWhiteSpace) {
element2 = PsiTreeUtil.skipWhitespacesBackward(element2);
endsWithWhitespace = true;
}
while (element2 instanceof PsiComment) {
endComment = element2;
element2 = PsiTreeUtil.skipWhitespacesAndCommentsBackward(element2);
endsWithWhitespace = true;
}
while (element1 instanceof PsiComment) {
array.add(element1);
element1 = PsiTreeUtil.skipWhitespacesForward(element1);
startsWithWhitespace = true;
}
if (element1 == null || element2 == null) {
return PsiElement.EMPTY_ARRAY;
}
PsiElement parent = PsiTreeUtil.findCommonParent(element1, element2);
if (parent == null) {
return PsiElement.EMPTY_ARRAY;
}
while (true) {
if (parent instanceof PyStatement) {
parent = parent.getParent();
break;
}
if (parent instanceof PyStatementList) {
break;
}
if (parent == null || parent instanceof PsiFile) {
return PsiElement.EMPTY_ARRAY;
}
parent = parent.getParent();
}
if (!parent.equals(element1)) {
while (!parent.equals(element1.getParent())) {
element1 = element1.getParent();
}
}
if (startOffset != element1.getTextRange().getStartOffset() && !startsWithWhitespace) {
return PsiElement.EMPTY_ARRAY;
}
if (!parent.equals(element2)) {
while (!parent.equals(element2.getParent())) {
element2 = element2.getParent();
}
}
if (endOffset != element2.getTextRange().getEndOffset() && !endsWithWhitespace) {
return PsiElement.EMPTY_ARRAY;
}
if (element1 instanceof PyFunction || element1 instanceof PyClass) {
return PsiElement.EMPTY_ARRAY;
}
if (element2 instanceof PyFunction || element2 instanceof PyClass) {
return PsiElement.EMPTY_ARRAY;
}
PsiElement[] children = parent.getChildren();
boolean flag = false;
for (PsiElement child : children) {
if (child.equals(element1)) {
flag = true;
}
if (flag && !(child instanceof PsiWhiteSpace)) {
array.add(child);
}
if (child.equals(element2)) {
break;
}
}
while (endComment instanceof PsiComment) {
array.add(endComment);
endComment = PsiTreeUtil.skipWhitespacesForward(endComment);
}
for (PsiElement element : array) {
if (!(element instanceof PyStatement || element instanceof PsiWhiteSpace || element instanceof PsiComment)) {
return PsiElement.EMPTY_ARRAY;
}
}
return PsiUtilCore.toPsiElementArray(array);
}
public static boolean areConflictingMethods(PyFunction pyFunction, PyFunction pyFunction1) {
final PyParameter[] firstParams = pyFunction.getParameterList().getParameters();
final PyParameter[] secondParams = pyFunction1.getParameterList().getParameters();
final String firstName = pyFunction.getName();
final String secondName = pyFunction1.getName();
return Comparing.strEqual(firstName, secondName) && firstParams.length == secondParams.length;
}
@NotNull
public static List<UsageInfo> findUsages(@NotNull PsiNamedElement element, boolean forHighlightUsages) {
final List<UsageInfo> usages = new ArrayList<>();
final FindUsagesHandler handler = new PyFindUsagesHandlerFactory().createFindUsagesHandler(element, forHighlightUsages);
assert handler != null;
final List<PsiElement> elementsToProcess = new ArrayList<>();
Collections.addAll(elementsToProcess, handler.getPrimaryElements());
Collections.addAll(elementsToProcess, handler.getSecondaryElements());
for (PsiElement e : elementsToProcess) {
handler.processElementUsages(e, usageInfo -> {
if (!usageInfo.isNonCodeUsage) {
usages.add(usageInfo);
}
return true;
}, FindUsagesHandler.createFindUsagesOptions(element.getProject(), null));
}
return usages;
}
/**
* Selects the shortest unique name inside the scope of scopeAnchor generated using {@link NameSuggesterUtil#generateNamesByType(String)}.
* If none of those names is suitable, unique names is made by appending number suffix.
*
* @param typeName initial type name for generator
* @param scopeAnchor PSI element used to determine correct scope
* @return unique name in the scope of scopeAnchor
*/
@NotNull
public static String selectUniqueNameFromType(@NotNull String typeName, @NotNull PsiElement scopeAnchor) {
return selectUniqueName(typeName, true, scopeAnchor);
}
/**
* Selects the shortest unique name inside the scope of scopeAnchor generated using {@link NameSuggesterUtil#generateNames(String)}.
* If none of those names is suitable, unique names is made by appending number suffix.
*
* @param templateName initial template name for generator
* @param scopeAnchor PSI element used to determine correct scope
* @return unique name in the scope of scopeAnchor
*/
@NotNull
public static String selectUniqueName(@NotNull String templateName, @NotNull PsiElement scopeAnchor) {
return selectUniqueName(templateName, false, scopeAnchor);
}
@NotNull
private static String selectUniqueName(@NotNull String templateName, boolean templateIsType, @NotNull PsiElement scopeAnchor) {
final Collection<String> suggestions;
if (templateIsType) {
suggestions = NameSuggesterUtil.generateNamesByType(templateName);
}
else {
suggestions = NameSuggesterUtil.generateNames(templateName);
}
for (String name : suggestions) {
if (isValidNewName(name, scopeAnchor)) {
return name;
}
}
final String shortestName = ContainerUtil.getFirstItem(suggestions);
//noinspection ConstantConditions
return appendNumberUntilValid(shortestName, scopeAnchor);
}
/**
* Appends increasing numbers starting from 1 to the name until it becomes unique within the scope of the scopeAnchor.
*
* @param name initial name
* @param scopeAnchor PSI element used to determine correct scope
* @return unique name in the scope probably with number suffix appended
*/
@NotNull
public static String appendNumberUntilValid(@NotNull String name, @NotNull PsiElement scopeAnchor) {
int counter = 1;
String candidate = name;
while (!isValidNewName(candidate, scopeAnchor)) {
candidate = name + counter;
counter++;
}
return candidate;
}
public static boolean isValidNewName(@NotNull String name, @NotNull PsiElement scopeAnchor) {
return !(IntroduceValidator.isDefinedInScope(name, scopeAnchor) || PyNames.isReserved(name));
}
}
| |
/*
* Copyright 2000-2016 Vaadin Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.vaadin.tests.components.grid.basicfeatures.escalator;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.junit.Before;
import org.junit.ComparisonFailure;
import org.junit.Test;
import org.openqa.selenium.By;
import org.openqa.selenium.Keys;
import org.openqa.selenium.WebElement;
import com.vaadin.client.WidgetUtil;
import com.vaadin.shared.Range;
import com.vaadin.testbench.TestBenchElement;
import com.vaadin.testbench.elements.NotificationElement;
import com.vaadin.testbench.parallel.BrowserUtil;
import com.vaadin.tests.components.grid.basicfeatures.EscalatorBasicClientFeaturesTest;
@SuppressWarnings("boxing")
public class EscalatorSpacerTest extends EscalatorBasicClientFeaturesTest {
//@formatter:off
// separate strings made so that eclipse can show the concatenated string by hovering the mouse over the constant
// translate3d(0px, 40px, 123px);
// translate3d(24px, 15.251px, 0);
// translate(0, 40px);
private final static String TRANSLATE_VALUE_REGEX =
"translate(?:3d|)" // "translate" or "translate3d"
+ "\\(" // literal "("
+ "(" // start capturing the x argument
+ "[0-9]+" // the integer part of the value
+ "(?:" // start of the subpixel part of the value
+ "\\.[0-9]" // if we have a period, there must be at least one number after it
+ "[0-9]*" // any amount of accuracy afterwards is fine
+ ")?" // the subpixel part is optional
+ ")"
+ "(?:px)?" // we don't care if the values are suffixed by "px" or not.
+ ", "
+ "(" // start capturing the y argument
+ "[0-9]+" // the integer part of the value
+ "(?:" // start of the subpixel part of the value
+ "\\.[0-9]" // if we have a period, there must be at least one number after it
+ "[0-9]*" // any amount of accuracy afterwards is fine
+ ")?" // the subpixel part is optional
+ ")"
+ "(?:px)?" // we don't care if the values are suffixed by "px" or not.
+ "(?:, .*?)?" // the possible z argument, uninteresting (translate doesn't have one, translate3d does)
+ "\\)" // literal ")"
+ ";?"; // optional ending semicolon
// 40px;
// 12.34px
private final static String PIXEL_VALUE_REGEX =
"(" // capture the pixel value
+ "[0-9]+" // the pixel argument
+ "(?:" // start of the subpixel part of the value
+ "\\.[0-9]" // if we have a period, there must be at least one number after it
+ "[0-9]*" // any amount of accuracy afterwards is fine
+ ")?" // the subpixel part is optional
+ ")"
+ "(?:px)?" // optional "px" string
+ ";?"; // optional semicolon
//@formatter:on
// also matches "-webkit-transform";
private final static Pattern TRANSFORM_CSS_PATTERN = Pattern
.compile("transform: (.*?);");
private final static Pattern TOP_CSS_PATTERN = Pattern.compile(
"top: ([0-9]+(?:\\.[0-9]+)?(?:px)?);?", Pattern.CASE_INSENSITIVE);
private final static Pattern LEFT_CSS_PATTERN = Pattern.compile(
"left: ([0-9]+(?:\\.[0-9]+)?(?:px)?);?", Pattern.CASE_INSENSITIVE);
private final static Pattern TRANSLATE_VALUE_PATTERN = Pattern
.compile(TRANSLATE_VALUE_REGEX);
private final static Pattern PIXEL_VALUE_PATTERN = Pattern
.compile(PIXEL_VALUE_REGEX, Pattern.CASE_INSENSITIVE);
@Before
public void before() {
setDebug(true);
openTestURL("theme=reindeer");
selectMenuPath(COLUMNS_AND_ROWS, BODY_ROWS, "Set 20px default height");
populate();
}
@Test
public void openVisibleSpacer() {
assertFalse("No spacers should be shown at the start",
spacersAreFoundInDom());
selectMenuPath(FEATURES, SPACERS, ROW_1, SET_100PX);
assertNotNull("Spacer should be shown after setting it", getSpacer(1));
}
@Test
public void closeVisibleSpacer() {
selectMenuPath(FEATURES, SPACERS, ROW_1, SET_100PX);
selectMenuPath(FEATURES, SPACERS, ROW_1, REMOVE);
assertNull("Spacer should not exist after removing it", getSpacer(1));
}
@Test
public void spacerPushesVisibleRowsDown() {
double oldTop = getElementTop(getBodyRow(2));
selectMenuPath(FEATURES, SPACERS, ROW_1, SET_100PX);
double newTop = getElementTop(getBodyRow(2));
assertGreater("Row below a spacer was not pushed down", newTop, oldTop);
}
@Test
public void addingRowAboveSpacerPushesItDown() {
selectMenuPath(COLUMNS_AND_ROWS, BODY_ROWS, REMOVE_ALL_ROWS);
selectMenuPath(COLUMNS_AND_ROWS, BODY_ROWS, ADD_ONE_ROW_TO_BEGINNING);
selectMenuPath(COLUMNS_AND_ROWS, BODY_ROWS, ADD_ONE_ROW_TO_BEGINNING);
selectMenuPath(FEATURES, SPACERS, ROW_1, SET_100PX);
double oldTop = getElementTop(getSpacer(1));
selectMenuPath(COLUMNS_AND_ROWS, BODY_ROWS, ADD_ONE_ROW_TO_BEGINNING);
double newTop = getElementTop(getSpacer(2));
assertGreater("Spacer should've been pushed down (oldTop: " + oldTop
+ ", newTop: " + newTop + ")", newTop, oldTop);
}
@Test
public void addingRowBelowSpacerDoesNotPushItDown() {
selectMenuPath(COLUMNS_AND_ROWS, BODY_ROWS, REMOVE_ALL_ROWS);
selectMenuPath(COLUMNS_AND_ROWS, BODY_ROWS, ADD_ONE_ROW_TO_BEGINNING);
selectMenuPath(COLUMNS_AND_ROWS, BODY_ROWS, ADD_ONE_ROW_TO_BEGINNING);
selectMenuPath(FEATURES, SPACERS, ROW_1, SET_100PX);
double oldTop = getElementTop(getSpacer(1));
selectMenuPath(COLUMNS_AND_ROWS, BODY_ROWS, ADD_ONE_ROW_TO_END);
double newTop = getElementTop(getSpacer(1));
assertEquals("Spacer should've not been pushed down", newTop, oldTop,
WidgetUtil.PIXEL_EPSILON);
}
@Test
public void addingRowBelowSpacerIsActuallyRenderedBelowWhenEscalatorIsEmpty() {
selectMenuPath(COLUMNS_AND_ROWS, BODY_ROWS, REMOVE_ALL_ROWS);
selectMenuPath(COLUMNS_AND_ROWS, BODY_ROWS, ADD_ONE_ROW_TO_BEGINNING);
selectMenuPath(COLUMNS_AND_ROWS, BODY_ROWS, ADD_ONE_ROW_TO_BEGINNING);
selectMenuPath(FEATURES, SPACERS, ROW_1, SET_100PX);
double spacerTop = getElementTop(getSpacer(1));
selectMenuPath(COLUMNS_AND_ROWS, BODY_ROWS, ADD_ONE_ROW_TO_END);
double rowTop = getElementTop(getBodyRow(2));
assertEquals("Next row should've been rendered below the spacer",
spacerTop + 100, rowTop, WidgetUtil.PIXEL_EPSILON);
}
@Test
public void addSpacerAtBottomThenScrollThere() {
selectMenuPath(FEATURES, SPACERS, ROW_99, SET_100PX);
scrollVerticallyTo(999999);
assertFalse("Did not expect a notification",
$(NotificationElement.class).exists());
}
@Test
public void scrollToBottomThenAddSpacerThere() {
scrollVerticallyTo(999999);
long oldBottomScrollTop = getScrollTop();
selectMenuPath(FEATURES, SPACERS, ROW_99, SET_100PX);
assertEquals(
"Adding a spacer underneath the current viewport should "
+ "not scroll anywhere",
oldBottomScrollTop, getScrollTop());
assertFalse("Got an unexpected notification",
$(NotificationElement.class).exists());
scrollVerticallyTo(999999);
assertFalse("Got an unexpected notification",
$(NotificationElement.class).exists());
assertGreater("Adding a spacer should've made the scrollbar scroll "
+ "further", getScrollTop(), oldBottomScrollTop);
}
@Test
public void removingRowAboveSpacerMovesSpacerUp() {
selectMenuPath(FEATURES, SPACERS, ROW_1, SET_100PX);
WebElement spacer = getSpacer(1);
double originalElementTop = getElementTop(spacer);
selectMenuPath(COLUMNS_AND_ROWS, BODY_ROWS,
REMOVE_ONE_ROW_FROM_BEGINNING);
assertLessThan("spacer should've moved up", getElementTop(spacer),
originalElementTop);
assertNull("No spacer for row 1 should be found after removing the "
+ "top row", getSpacer(1));
}
@Test
public void removingSpacedRowRemovesSpacer() {
selectMenuPath(FEATURES, SPACERS, ROW_1, SET_100PX);
assertTrue("Spacer should've been found in the DOM",
spacersAreFoundInDom());
selectMenuPath(COLUMNS_AND_ROWS, BODY_ROWS,
REMOVE_ONE_ROW_FROM_BEGINNING);
selectMenuPath(COLUMNS_AND_ROWS, BODY_ROWS,
REMOVE_ONE_ROW_FROM_BEGINNING);
assertFalse("No spacers should be in the DOM after removing "
+ "associated spacer", spacersAreFoundInDom());
}
@Test
public void spacersAreFixedInViewport_firstFreezeThenScroll() {
selectMenuPath(FEATURES, FROZEN_COLUMNS, FREEZE_1_COLUMN);
selectMenuPath(FEATURES, SPACERS, ROW_1, SET_100PX);
assertEquals(
"Spacer's left position should've been 0 at the " + "beginning",
0d, getElementLeft(getSpacer(1)), WidgetUtil.PIXEL_EPSILON);
int scrollTo = 10;
scrollHorizontallyTo(scrollTo);
assertEquals(
"Spacer's left position should've been " + scrollTo
+ " after scrolling " + scrollTo + "px",
scrollTo, getElementLeft(getSpacer(1)),
WidgetUtil.PIXEL_EPSILON);
}
@Test
public void spacersAreFixedInViewport_firstScrollThenFreeze() {
selectMenuPath(FEATURES, FROZEN_COLUMNS, FREEZE_1_COLUMN);
int scrollTo = 10;
scrollHorizontallyTo(scrollTo);
selectMenuPath(FEATURES, SPACERS, ROW_1, SET_100PX);
assertEquals(
"Spacer's left position should've been " + scrollTo
+ " after scrolling " + scrollTo + "px",
scrollTo, getElementLeft(getSpacer(1)),
WidgetUtil.PIXEL_EPSILON);
}
@Test
public void addingMinusOneSpacerDoesNotScrollWhenScrolledAtTop() {
scrollVerticallyTo(5);
selectMenuPath(FEATURES, SPACERS, ROW_MINUS1, SET_100PX);
assertEquals(
"No scroll adjustment should've happened when adding the -1 spacer",
5, getScrollTop());
}
@Test
public void removingMinusOneSpacerScrolls() {
scrollVerticallyTo(5);
selectMenuPath(FEATURES, SPACERS, ROW_MINUS1, SET_100PX);
selectMenuPath(FEATURES, SPACERS, ROW_MINUS1, REMOVE);
assertEquals("Scroll adjustment should've happened when removing the "
+ "-1 spacer", 0, getScrollTop());
}
@Test
public void scrollToRowWorksProperlyWithSpacers() throws Exception {
selectMenuPath(FEATURES, SPACERS, ROW_MINUS1, SET_100PX);
selectMenuPath(FEATURES, SPACERS, ROW_1, SET_100PX);
/*
* we check for row -3 instead of -1, because escalator has two rows
* buffered underneath the footer
*/
selectMenuPath(COLUMNS_AND_ROWS, BODY_ROWS, SCROLL_TO, ROW_75);
Thread.sleep(500);
assertEquals("Row 75: 0,75", getBodyCell(-3, 0).getText());
selectMenuPath(COLUMNS_AND_ROWS, BODY_ROWS, SCROLL_TO, ROW_25);
Thread.sleep(500);
try {
assertEquals("Row 25: 0,25", getBodyCell(0, 0).getText());
} catch (ComparisonFailure retryForIE10andIE11) {
/*
* This seems to be some kind of subpixel/off-by-one-pixel error.
* Everything's scrolled correctly, but Escalator still loads one
* row above to the DOM, underneath the header. It's there, but it's
* not visible. We'll allow for that one pixel error.
*/
assertEquals("Row 24: 0,24", getBodyCell(0, 0).getText());
}
}
@Test
public void scrollToSpacerFromAbove() throws Exception {
selectMenuPath(FEATURES, SPACERS, ROW_50, SET_100PX);
selectMenuPath(FEATURES, SPACERS, ROW_50, SCROLL_HERE_ANY_0PADDING);
// Browsers might vary with a few pixels.
Range allowableScrollRange = Range.between(765, 780);
int scrollTop = (int) getScrollTop();
assertTrue("Scroll position was not " + allowableScrollRange + ", but "
+ scrollTop, allowableScrollRange.contains(scrollTop));
}
@Test
public void scrollToSpacerFromBelow() throws Exception {
selectMenuPath(FEATURES, SPACERS, ROW_50, SET_100PX);
scrollVerticallyTo(999999);
selectMenuPath(FEATURES, SPACERS, ROW_50, SCROLL_HERE_ANY_0PADDING);
// Browsers might vary with a few pixels.
Range allowableScrollRange = Range.between(1015, 1025);
int scrollTop = (int) getScrollTop();
assertTrue("Scroll position was not " + allowableScrollRange + ", but "
+ scrollTop, allowableScrollRange.contains(scrollTop));
}
@Test
public void scrollToSpacerAlreadyInViewport() throws Exception {
selectMenuPath(FEATURES, SPACERS, ROW_50, SET_100PX);
scrollVerticallyTo(1000);
selectMenuPath(FEATURES, SPACERS, ROW_50, SCROLL_HERE_ANY_0PADDING);
assertEquals(getScrollTop(), 1000);
}
@Test
public void scrollToRowAndSpacerFromAbove() throws Exception {
selectMenuPath(FEATURES, SPACERS, ROW_50, SET_100PX);
selectMenuPath(FEATURES, SPACERS, ROW_50,
SCROLL_HERE_SPACERBELOW_ANY_0PADDING);
// Browsers might vary with a few pixels.
Range allowableScrollRange = Range.between(765, 780);
int scrollTop = (int) getScrollTop();
assertTrue("Scroll position was not " + allowableScrollRange + ", but "
+ scrollTop, allowableScrollRange.contains(scrollTop));
}
@Test
public void scrollToRowAndSpacerFromBelow() throws Exception {
selectMenuPath(FEATURES, SPACERS, ROW_50, SET_100PX);
scrollVerticallyTo(999999);
selectMenuPath(FEATURES, SPACERS, ROW_50,
SCROLL_HERE_SPACERBELOW_ANY_0PADDING);
// Browsers might vary with a few pixels.
Range allowableScrollRange = Range.between(995, 1005);
int scrollTop = (int) getScrollTop();
assertTrue("Scroll position was not " + allowableScrollRange + ", but "
+ scrollTop, allowableScrollRange.contains(scrollTop));
}
@Test
public void scrollToRowAndSpacerAlreadyInViewport() throws Exception {
selectMenuPath(FEATURES, SPACERS, ROW_50, SET_100PX);
scrollVerticallyTo(950);
selectMenuPath(FEATURES, SPACERS, ROW_50,
SCROLL_HERE_SPACERBELOW_ANY_0PADDING);
assertEquals(getScrollTop(), 950);
}
@Test
public void domCanBeSortedWithFocusInSpacer() throws InterruptedException {
// Firefox behaves badly with focus-related tests - skip it.
if (BrowserUtil.isFirefox(super.getDesiredCapabilities())) {
return;
}
selectMenuPath(FEATURES, SPACERS, FOCUSABLE_UPDATER);
selectMenuPath(FEATURES, SPACERS, ROW_1, SET_100PX);
WebElement inputElement = getEscalator()
.findElement(By.tagName("input"));
inputElement.click();
scrollVerticallyTo(30);
// Sleep needed because of all the JS we're doing, and to let
// the DOM reordering to take place.
Thread.sleep(500);
assertFalse("Error message detected",
$(NotificationElement.class).exists());
}
@Test
public void spacersAreInsertedInCorrectDomPosition() {
selectMenuPath(FEATURES, SPACERS, ROW_1, SET_100PX);
WebElement tbody = getEscalator().findElement(By.tagName("tbody"));
WebElement spacer = getChild(tbody, 2);
String cssClass = spacer.getAttribute("class");
assertTrue(
"element index 2 was not a spacer (class=\"" + cssClass + "\")",
cssClass.contains("-spacer"));
}
@Test
public void spacersAreInCorrectDomPositionAfterScroll() {
selectMenuPath(FEATURES, SPACERS, ROW_1, SET_100PX);
scrollVerticallyTo(32); // roughly one row's worth
WebElement tbody = getEscalator().findElement(By.tagName("tbody"));
WebElement spacer = getChild(tbody, 1);
String cssClass = spacer.getAttribute("class");
assertTrue(
"element index 1 was not a spacer (class=\"" + cssClass + "\")",
cssClass.contains("-spacer"));
}
@Test
public void spacerScrolledIntoViewGetsFocus() {
selectMenuPath(FEATURES, SPACERS, FOCUSABLE_UPDATER);
selectMenuPath(FEATURES, SPACERS, ROW_50, SET_100PX);
selectMenuPath(FEATURES, SPACERS, ROW_50, SCROLL_HERE_ANY_0PADDING);
tryToTabIntoFocusUpdaterElement();
assertEquals("input", getFocusedElement().getTagName());
}
@Test
public void spacerScrolledOutOfViewDoesNotGetFocus() {
selectMenuPath(FEATURES, SPACERS, FOCUSABLE_UPDATER);
selectMenuPath(FEATURES, SPACERS, ROW_1, SET_100PX);
selectMenuPath(FEATURES, SPACERS, ROW_50, SCROLL_HERE_ANY_0PADDING);
tryToTabIntoFocusUpdaterElement();
assertNotEquals("input", getFocusedElement().getTagName());
}
@Test
public void spacerOpenedInViewGetsFocus() {
selectMenuPath(FEATURES, SPACERS, FOCUSABLE_UPDATER);
selectMenuPath(FEATURES, SPACERS, ROW_1, SET_100PX);
tryToTabIntoFocusUpdaterElement();
WebElement focusedElement = getFocusedElement();
assertEquals("input", focusedElement.getTagName());
}
@Test
public void spacerOpenedOutOfViewDoesNotGetFocus() {
selectMenuPath(FEATURES, SPACERS, FOCUSABLE_UPDATER);
selectMenuPath(FEATURES, SPACERS, ROW_50, SET_100PX);
tryToTabIntoFocusUpdaterElement();
assertNotEquals("input", getFocusedElement().getTagName());
}
@Test
public void spacerOpenedInViewAndScrolledOutAndBackAgainGetsFocus() {
selectMenuPath(FEATURES, SPACERS, FOCUSABLE_UPDATER);
selectMenuPath(FEATURES, SPACERS, ROW_1, SET_100PX);
selectMenuPath(COLUMNS_AND_ROWS, BODY_ROWS, SCROLL_TO, ROW_50);
selectMenuPath(FEATURES, SPACERS, ROW_1, SCROLL_HERE_ANY_0PADDING);
tryToTabIntoFocusUpdaterElement();
assertEquals("input", getFocusedElement().getTagName());
}
@Test
public void spacerOpenedOutOfViewAndScrolledInAndBackAgainDoesNotGetFocus() {
selectMenuPath(FEATURES, SPACERS, FOCUSABLE_UPDATER);
selectMenuPath(FEATURES, SPACERS, ROW_50, SET_100PX);
selectMenuPath(FEATURES, SPACERS, ROW_50, SCROLL_HERE_ANY_0PADDING);
selectMenuPath(COLUMNS_AND_ROWS, BODY_ROWS, SCROLL_TO, ROW_0);
tryToTabIntoFocusUpdaterElement();
assertNotEquals("input", getFocusedElement().getTagName());
}
private void tryToTabIntoFocusUpdaterElement() {
((TestBenchElement) findElement(By.className("gwt-MenuBar"))).focus();
WebElement focusedElement = getFocusedElement();
focusedElement.sendKeys(Keys.TAB);
}
private WebElement getChild(WebElement parent, int childIndex) {
return (WebElement) executeScript(
"return arguments[0].children[" + childIndex + "];", parent);
}
private static double[] getElementDimensions(WebElement element) {
/*
* we need to parse the style attribute, since using getCssValue gets a
* normalized value that is harder to parse.
*/
String style = element.getAttribute("style");
String transform = getTransformFromStyle(style);
if (transform != null) {
return getTranslateValues(transform);
}
double[] result = new double[] { -1, -1 };
String left = getLeftFromStyle(style);
if (left != null) {
result[0] = getPixelValue(left);
}
String top = getTopFromStyle(style);
if (top != null) {
result[1] = getPixelValue(top);
}
if (result[0] != -1 && result[1] != -1) {
return result;
} else {
throw new IllegalArgumentException("Could not parse the position "
+ "information from the CSS \"" + style + "\"");
}
}
private static double getElementTop(WebElement element) {
return getElementDimensions(element)[1];
}
private static double getElementLeft(WebElement element) {
return getElementDimensions(element)[0];
}
private static String getTransformFromStyle(String style) {
return getFromStyle(TRANSFORM_CSS_PATTERN, style);
}
private static String getTopFromStyle(String style) {
return getFromStyle(TOP_CSS_PATTERN, style);
}
private static String getLeftFromStyle(String style) {
return getFromStyle(LEFT_CSS_PATTERN, style);
}
private static String getFromStyle(Pattern pattern, String style) {
Matcher matcher = pattern.matcher(style);
if (matcher.find()) {
assertEquals("wrong amount of groups matched in " + style, 1,
matcher.groupCount());
return matcher.group(1);
} else {
return null;
}
}
/**
* @return {@code [0] == x}, {@code [1] == y}
*/
private static double[] getTranslateValues(String translate) {
Matcher matcher = TRANSLATE_VALUE_PATTERN.matcher(translate);
assertTrue("no matches for " + translate + " against "
+ TRANSLATE_VALUE_PATTERN, matcher.find());
assertEquals("wrong amout of groups matched in " + translate, 2,
matcher.groupCount());
return new double[] { Double.parseDouble(matcher.group(1)),
Double.parseDouble(matcher.group(2)) };
}
private static double getPixelValue(String top) {
Matcher matcher = PIXEL_VALUE_PATTERN.matcher(top);
assertTrue(
"no matches for \"" + top + "\" against " + PIXEL_VALUE_PATTERN,
matcher.find());
assertEquals("wrong amount of groups matched in " + top, 1,
matcher.groupCount());
return Double.parseDouble(matcher.group(1));
}
}
| |
package uk.me.graphe.client;
public class VertexDrawable {
private int mLeft;
private int mTop;
private int mWidth;
private int mHeight;
private int mStyle = UNDEFINED_STYLE;
public static final int UNDEFINED_STYLE = -1;
public static final int FILLED_CIRCLE_STYLE = 0x01;
public static final int STROKED_TERM_STYLE = 0x02;
public static final int STROKED_SQUARE_STYLE = 0x03;
public static final int STROKED_DIAMOND_STYLE = 0x04;
public static final int COLORED_FILLED_CIRCLE = 0x05;
private String mLabel;
private boolean mHilighted;
private float[] mColor = new float[]{0,0,0,1};
/**
* create a new VertexDrawable
*
* @param left
* left of the bounding box
* @param top
* top of the bounding box
* @param width
* width of the bounding box
* @param height
* height of the bounding box
* @param label
* label of the vertex
*/
public VertexDrawable(int left, int top, int width, int height, String label) {
this.mLeft = left;
this.mTop = top;
this.mWidth = width;
this.mHeight = height;
this.mLabel = label;
}
public VertexDrawable(int left, int top, int width, int height, String label, int style) {
this(left, top, width, height, label);
mStyle = style;
}
/**
* gets the left of the bounding box of the vertex
*
* @return gets the left of the bounding box of the vertex
*/
public int getLeft() {
return mLeft;
}
/**
* gets the top of the bounding box of the vertex
*
* @return gets the top of the bounding box of the vertex
*/
public int getTop() {
return mTop;
}
/**
* gets the width of the bounding box of the vertex
*
* @return gets the width of the bounding box of the vertex
*/
public int getWidth() {
return mWidth;
}
/**
* gets the height of the bounding box of the vertex
*
* @return gets the height of the bounding box of the vertex
*/
public int getHeight() {
return mHeight;
}
/**
* gets the string label of this vertex
*
* @return a string representing the label of this vertex
*/
public String getLabel() {
return mLabel;
}
/**
* updates the bounding rectangle of the drawable
*
* @param left
* the left of the bounding rectangle
* @param top
* the right of the bounding rectangle
* @param width
* the width of the bounding rectangle
* @param height
* the height of the bounding rectangle
*/
public void updateBoundingRectangle(int left, int top, int width, int height) {
mHeight = height;
mWidth = width;
mTop = top;
mLeft = left;
}
/**
* updates the bounding rectangle of the drawable
*
* @param width
* the width of the bounding rectangle
* @param height
* the height of the bounding rectangle
*/
public void updateSize(int width, int height){
mLeft = getCenterX()-width/2;
mTop = getCenterY()-height/2;
mHeight = height;
mWidth = width;
}
/**
* gets the center x of this vd
* @return
*/
public int getCenterX() {
return mLeft + mWidth / 2;
}
/**
* gets the center y of this vd
* @return
*/
public int getCenterY() {
return mTop + mHeight / 2;
}
/**
* determines if a vd contains a specific co-ordinate
* @param x
* @param y
* @return
*/
public boolean contains(int x, int y) {
return x >= mLeft && x <= mLeft + mWidth && y >= mTop && y <= mTop + mHeight;
}
public void setCoords(){
}
public void setStyle(int style) {
mStyle = style;
if (style == VertexDrawable.STROKED_SQUARE_STYLE) {
mWidth = 2*Graphemeui.VERTEX_SIZE;
mHeight = Graphemeui.VERTEX_SIZE;
} else if (style == VertexDrawable.STROKED_TERM_STYLE) {
mWidth = 2*Graphemeui.VERTEX_SIZE;
mHeight = Graphemeui.VERTEX_SIZE;
} else if (style == VertexDrawable.STROKED_DIAMOND_STYLE) {
mWidth = 2*Graphemeui.VERTEX_SIZE;
mHeight = Graphemeui.VERTEX_SIZE;
} else if (style == VertexDrawable.FILLED_CIRCLE_STYLE) {
mWidth = Graphemeui.VERTEX_SIZE;
mHeight = Graphemeui.VERTEX_SIZE;
}
}
public int getStyle() {
return mStyle;
}
public boolean isHilighted() {
return mHilighted;
}
public void setHilighted(boolean h) {
mHilighted = h;
}
public float[] getColor() {
float[] ret = new float[4];
for (int i = 0; i < 4; i++) ret[i] = mColor[i];
return mColor;
}
public void setColor(float[] color) {
for (int i = 0; i < 4; i++) {
mColor[i] = color[i];
}
}
public void rename(String name) {
mLabel = name;
}
}
| |
/**
* Copyright 2014 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package rx.internal.operators;
import java.util.*;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.atomic.AtomicLong;
import rx.*;
import rx.Observable;
import rx.Observable.Operator;
import rx.exceptions.*;
import rx.internal.util.*;
import rx.internal.util.atomic.*;
import rx.internal.util.unsafe.*;
import rx.subscriptions.CompositeSubscription;
/**
* Flattens a list of {@link Observable}s into one {@code Observable}, without any transformation.
* <p>
* <img width="640" height="380" src="https://raw.githubusercontent.com/wiki/ReactiveX/RxJava/images/rx-operators/merge.png" alt="">
* <p>
* You can combine the items emitted by multiple {@code Observable}s so that they act like a single {@code Observable}, by using the merge operation.
* <p>
* The {@code instance(true)} call behaves like {@link OperatorMerge} except that if any of the merged Observables notify of
* an error via {@code onError}, {@code mergeDelayError} will refrain from propagating that error
* notification until all of the merged Observables have finished emitting items.
* <p>
* <img width="640" src="https://github.com/ReactiveX/RxJava/wiki/images/rx-operators/mergeDelayError.png" alt="">
* <p>
* Even if multiple merged Observables send {@code onError} notifications, {@code mergeDelayError} will
* only invoke the {@code onError} method of its Observers once.
* <p>
* This operation allows an Observer to receive all successfully emitted items from all of the
* source Observables without being interrupted by an error notification from one of them.
* <p>
* <em>Note:</em> If this is used on an Observable that never completes, it will never call {@code onError} and will effectively swallow errors.
* @param <T>
* the type of the items emitted by both the source and merged {@code Observable}s
*/
public final class OperatorMerge<T> implements Operator<T, Observable<? extends T>> {
final boolean delayErrors;
final int maxConcurrent;
/** Lazy initialization via inner-class holder. */
static final class HolderNoDelay {
/** A singleton instance. */
static final OperatorMerge<Object> INSTANCE = new OperatorMerge<Object>(false, Integer.MAX_VALUE);
}
/** Lazy initialization via inner-class holder. */
static final class HolderDelayErrors {
/** A singleton instance. */
static final OperatorMerge<Object> INSTANCE = new OperatorMerge<Object>(true, Integer.MAX_VALUE);
}
/**
* @param <T> the common value type
* @param delayErrors should the merge delay errors?
* @return a singleton instance of this stateless operator.
*/
@SuppressWarnings("unchecked")
public static <T> OperatorMerge<T> instance(boolean delayErrors) {
if (delayErrors) {
return (OperatorMerge<T>)HolderDelayErrors.INSTANCE;
}
return (OperatorMerge<T>)HolderNoDelay.INSTANCE;
}
/**
* Creates a new instance of the operator with the given delayError and maxConcurrency settings.
* @param <T> the value type
* @param delayErrors
* @param maxConcurrent the maximum number of concurrent subscriptions or Integer.MAX_VALUE for unlimited
* @return the Operator instance with the given settings
*/
public static <T> OperatorMerge<T> instance(boolean delayErrors, int maxConcurrent) {
if (maxConcurrent <= 0) {
throw new IllegalArgumentException("maxConcurrent > 0 required but it was " + maxConcurrent);
}
if (maxConcurrent == Integer.MAX_VALUE) {
return instance(delayErrors);
}
return new OperatorMerge<T>(delayErrors, maxConcurrent);
}
OperatorMerge(boolean delayErrors, int maxConcurrent) {
this.delayErrors = delayErrors;
this.maxConcurrent = maxConcurrent;
}
@Override
public Subscriber<Observable<? extends T>> call(final Subscriber<? super T> child) {
MergeSubscriber<T> subscriber = new MergeSubscriber<T>(child, delayErrors, maxConcurrent);
MergeProducer<T> producer = new MergeProducer<T>(subscriber);
subscriber.producer = producer;
child.add(subscriber);
child.setProducer(producer);
return subscriber;
}
static final class MergeProducer<T> extends AtomicLong implements Producer {
/** */
private static final long serialVersionUID = -1214379189873595503L;
final MergeSubscriber<T> subscriber;
public MergeProducer(MergeSubscriber<T> subscriber) {
this.subscriber = subscriber;
}
@Override
public void request(long n) {
if (n > 0) {
if (get() == Long.MAX_VALUE) {
return;
}
BackpressureUtils.getAndAddRequest(this, n);
subscriber.emit();
} else
if (n < 0) {
throw new IllegalArgumentException("n >= 0 required");
}
}
public long produced(int n) {
return addAndGet(-n);
}
}
/**
* The subscriber that observes Observables.
* @param <T> the value type
*/
static final class MergeSubscriber<T> extends Subscriber<Observable<? extends T>> {
final Subscriber<? super T> child;
final boolean delayErrors;
final int maxConcurrent;
MergeProducer<T> producer;
volatile Queue<Object> queue;
/** Tracks the active subscriptions to sources. */
volatile CompositeSubscription subscriptions;
/** Due to the emission loop, we need to store errors somewhere if !delayErrors. */
volatile ConcurrentLinkedQueue<Throwable> errors;
final NotificationLite<T> nl;
volatile boolean done;
/** Guarded by this. */
boolean emitting;
/** Guarded by this. */
boolean missed;
final Object innerGuard;
/** Copy-on-write array, guarded by innerGuard. */
volatile InnerSubscriber<?>[] innerSubscribers;
/** Used to generate unique InnerSubscriber IDs. Modified from onNext only. */
long uniqueId;
/** Which was the last InnerSubscriber that emitted? Accessed if emitting == true. */
long lastId;
/** What was its index in the innerSubscribers array? Accessed if emitting == true. */
int lastIndex;
/** An empty array to avoid creating new empty arrays in removeInner. */
static final InnerSubscriber<?>[] EMPTY = new InnerSubscriber<?>[0];
final int scalarEmissionLimit;
int scalarEmissionCount;
public MergeSubscriber(Subscriber<? super T> child, boolean delayErrors, int maxConcurrent) {
this.child = child;
this.delayErrors = delayErrors;
this.maxConcurrent = maxConcurrent;
this.nl = NotificationLite.instance();
this.innerGuard = new Object();
this.innerSubscribers = EMPTY;
if (maxConcurrent == Integer.MAX_VALUE) {
scalarEmissionLimit = Integer.MAX_VALUE;
request(Long.MAX_VALUE);
} else {
scalarEmissionLimit = Math.max(1, maxConcurrent >> 1);
request(maxConcurrent);
}
}
Queue<Throwable> getOrCreateErrorQueue() {
ConcurrentLinkedQueue<Throwable> q = errors;
if (q == null) {
synchronized (this) {
q = errors;
if (q == null) {
q = new ConcurrentLinkedQueue<Throwable>();
errors = q;
}
}
}
return q;
}
CompositeSubscription getOrCreateComposite() {
CompositeSubscription c = subscriptions;
if (c == null) {
boolean shouldAdd = false;
synchronized (this) {
c = subscriptions;
if (c == null) {
c = new CompositeSubscription();
subscriptions = c;
shouldAdd = true;
}
}
if (shouldAdd) {
add(c);
}
}
return c;
}
@Override
public void onNext(Observable<? extends T> t) {
if (t == null) {
return;
}
if (t == Observable.empty()) {
emitEmpty();
} else
if (t instanceof ScalarSynchronousObservable) {
tryEmit(((ScalarSynchronousObservable<? extends T>)t).get());
} else {
InnerSubscriber<T> inner = new InnerSubscriber<T>(this, uniqueId++);
addInner(inner);
t.unsafeSubscribe(inner);
emit();
}
}
void emitEmpty() {
int produced = scalarEmissionCount + 1;
if (produced == scalarEmissionLimit) {
scalarEmissionCount = 0;
this.requestMore(produced);
} else {
scalarEmissionCount = produced;
}
}
private void reportError() {
List<Throwable> list = new ArrayList<Throwable>(errors);
if (list.size() == 1) {
child.onError(list.get(0));
} else {
child.onError(new CompositeException(list));
}
}
@Override
public void onError(Throwable e) {
getOrCreateErrorQueue().offer(e);
done = true;
emit();
}
@Override
public void onCompleted() {
done = true;
emit();
}
void addInner(InnerSubscriber<T> inner) {
getOrCreateComposite().add(inner);
synchronized (innerGuard) {
InnerSubscriber<?>[] a = innerSubscribers;
int n = a.length;
InnerSubscriber<?>[] b = new InnerSubscriber<?>[n + 1];
System.arraycopy(a, 0, b, 0, n);
b[n] = inner;
innerSubscribers = b;
}
}
void removeInner(InnerSubscriber<T> inner) {
RxRingBuffer q = inner.queue;
if (q != null) {
q.release();
}
// subscription is non-null here because the very first addInner will create it before
// this can be called
subscriptions.remove(inner);
synchronized (innerGuard) {
InnerSubscriber<?>[] a = innerSubscribers;
int n = a.length;
int j = -1;
// locate the inner
for (int i = 0; i < n; i++) {
if (inner.equals(a[i])) {
j = i;
break;
}
}
if (j < 0) {
return;
}
if (n == 1) {
innerSubscribers = EMPTY;
return;
}
InnerSubscriber<?>[] b = new InnerSubscriber<?>[n - 1];
System.arraycopy(a, 0, b, 0, j);
System.arraycopy(a, j + 1, b, j, n - j - 1);
innerSubscribers = b;
}
}
/**
* Tries to emit the value directly to the child if
* no concurrent emission is happening at the moment.
* <p>
* Since the scalar-value queue optimization applies
* to both the main source and the inner subscribers,
* we handle things in a shared manner.
*
* @param subscriber
* @param value
*/
void tryEmit(InnerSubscriber<T> subscriber, T value) {
boolean success = false;
long r = producer.get();
if (r != 0L) {
synchronized (this) {
// if nobody is emitting and child has available requests
r = producer.get();
if (!emitting && r != 0L) {
emitting = true;
success = true;
}
}
}
if (success) {
RxRingBuffer subscriberQueue = subscriber.queue;
if (subscriberQueue == null || subscriberQueue.isEmpty()) {
emitScalar(subscriber, value, r);
} else {
queueScalar(subscriber, value);
emitLoop();
}
} else {
queueScalar(subscriber, value);
emit();
}
}
protected void queueScalar(InnerSubscriber<T> subscriber, T value) {
/*
* If the attempt to make a fast-path emission failed
* due to lack of requests or an ongoing emission,
* enqueue the value and try the slow emission path.
*/
RxRingBuffer q = subscriber.queue;
if (q == null) {
q = RxRingBuffer.getSpscInstance();
subscriber.add(q);
subscriber.queue = q;
}
try {
q.onNext(nl.next(value));
} catch (MissingBackpressureException ex) {
subscriber.unsubscribe();
subscriber.onError(ex);
return;
} catch (IllegalStateException ex) {
if (!subscriber.isUnsubscribed()) {
subscriber.unsubscribe();
subscriber.onError(ex);
}
return;
}
}
protected void emitScalar(InnerSubscriber<T> subscriber, T value, long r) {
boolean skipFinal = false;
try {
try {
child.onNext(value);
} catch (Throwable t) {
if (!delayErrors) {
Exceptions.throwIfFatal(t);
skipFinal = true;
subscriber.unsubscribe();
subscriber.onError(t);
return;
}
getOrCreateErrorQueue().offer(t);
}
if (r != Long.MAX_VALUE) {
producer.produced(1);
}
subscriber.requestMore(1);
// check if some state changed while emitting
synchronized (this) {
skipFinal = true;
if (!missed) {
emitting = false;
return;
}
missed = false;
}
} finally {
if (!skipFinal) {
synchronized (this) {
emitting = false;
}
}
}
/*
* In the synchronized block below request(1) we check
* if there was a concurrent emission attempt and if there was,
* we stay in emission mode and enter the emission loop
* which will take care all the queued up state and
* emission possibilities.
*/
emitLoop();
}
public void requestMore(long n) {
request(n);
}
/**
* Tries to emit the value directly to the child if
* no concurrent emission is happening at the moment.
* <p>
* Since the scalar-value queue optimization applies
* to both the main source and the inner subscribers,
* we handle things in a shared manner.
*
* @param subscriber
* @param value
*/
void tryEmit(T value) {
boolean success = false;
long r = producer.get();
if (r != 0L) {
synchronized (this) {
// if nobody is emitting and child has available requests
r = producer.get();
if (!emitting && r != 0L) {
emitting = true;
success = true;
}
}
}
if (success) {
Queue<Object> mainQueue = queue;
if (mainQueue == null || mainQueue.isEmpty()) {
emitScalar(value, r);
} else {
queueScalar(value);
emitLoop();
}
} else {
queueScalar(value);
emit();
}
}
protected void queueScalar(T value) {
/*
* If the attempt to make a fast-path emission failed
* due to lack of requests or an ongoing emission,
* enqueue the value and try the slow emission path.
*/
Queue<Object> q = this.queue;
if (q == null) {
int mc = maxConcurrent;
if (mc == Integer.MAX_VALUE) {
q = new SpscUnboundedAtomicArrayQueue<Object>(RxRingBuffer.SIZE);
} else {
if (Pow2.isPowerOfTwo(mc)) {
if (UnsafeAccess.isUnsafeAvailable()) {
q = new SpscArrayQueue<Object>(mc);
} else {
q = new SpscAtomicArrayQueue<Object>(mc);
}
} else {
q = new SpscExactAtomicArrayQueue<Object>(mc);
}
}
this.queue = q;
}
if (!q.offer(nl.next(value))) {
unsubscribe();
onError(OnErrorThrowable.addValueAsLastCause(new MissingBackpressureException(), value));
return;
}
}
protected void emitScalar(T value, long r) {
boolean skipFinal = false;
try {
try {
child.onNext(value);
} catch (Throwable t) {
if (!delayErrors) {
Exceptions.throwIfFatal(t);
skipFinal = true;
this.unsubscribe();
this.onError(t);
return;
}
getOrCreateErrorQueue().offer(t);
}
if (r != Long.MAX_VALUE) {
producer.produced(1);
}
int produced = scalarEmissionCount + 1;
if (produced == scalarEmissionLimit) {
scalarEmissionCount = 0;
this.requestMore(produced);
} else {
scalarEmissionCount = produced;
}
// check if some state changed while emitting
synchronized (this) {
skipFinal = true;
if (!missed) {
emitting = false;
return;
}
missed = false;
}
} finally {
if (!skipFinal) {
synchronized (this) {
emitting = false;
}
}
}
/*
* In the synchronized block below request(1) we check
* if there was a concurrent emission attempt and if there was,
* we stay in emission mode and enter the emission loop
* which will take care all the queued up state and
* emission possibilities.
*/
emitLoop();
}
void emit() {
synchronized (this) {
if (emitting) {
missed = true;
return;
}
emitting = true;
}
emitLoop();
}
/**
* The standard emission loop serializing events and requests.
*/
void emitLoop() {
boolean skipFinal = false;
try {
final Subscriber<? super T> child = this.child;
for (;;) {
// eagerly check if child unsubscribed or we reached a terminal state.
if (checkTerminate()) {
skipFinal = true;
return;
}
Queue<Object> svq = queue;
long r = producer.get();
boolean unbounded = r == Long.MAX_VALUE;
// count the number of 'completed' sources to replenish them in batches
int replenishMain = 0;
// try emitting as many scalars as possible
if (svq != null) {
for (;;) {
int scalarEmission = 0;
Object o = null;
while (r > 0) {
o = svq.poll();
// eagerly check if child unsubscribed or we reached a terminal state.
if (checkTerminate()) {
skipFinal = true;
return;
}
if (o == null) {
break;
}
T v = nl.getValue(o);
// if child throws, report bounce it back immediately
try {
child.onNext(v);
} catch (Throwable t) {
if (!delayErrors) {
Exceptions.throwIfFatal(t);
skipFinal = true;
unsubscribe();
child.onError(t);
return;
}
getOrCreateErrorQueue().offer(t);
}
replenishMain++;
scalarEmission++;
r--;
}
if (scalarEmission > 0) {
if (unbounded) {
r = Long.MAX_VALUE;
} else {
r = producer.produced(scalarEmission);
}
}
if (r == 0L || o == null) {
break;
}
}
}
/*
* We need to read done before innerSubscribers because innerSubscribers are added
* before done is set to true. If it were the other way around, we could read an empty
* innerSubscribers, get paused and then read a done flag but an async producer
* might have added more subscribers between the two.
*/
boolean d = done;
// re-read svq because it could have been created
// asynchronously just before done was set to true.
svq = queue;
// read the current set of inner subscribers
InnerSubscriber<?>[] inner = innerSubscribers;
int n = inner.length;
// check if upstream is done, there are no scalar values
// and no active inner subscriptions
if (d && (svq == null || svq.isEmpty()) && n == 0) {
Queue<Throwable> e = errors;
if (e == null || e.isEmpty()) {
child.onCompleted();
} else {
reportError();
}
skipFinal = true;
return;
}
boolean innerCompleted = false;
if (n > 0) {
// let's continue the round-robin emission from last location
long startId = lastId;
int index = lastIndex;
// in case there were changes in the array or the index
// no longer points to the inner with the cached id
if (n <= index || inner[index].id != startId) {
if (n <= index) {
index = 0;
}
// try locating the inner with the cached index
int j = index;
for (int i = 0; i < n; i++) {
if (inner[j].id == startId) {
break;
}
// wrap around in round-robin fashion
j++;
if (j == n) {
j = 0;
}
}
// if we found it again, j will point to it
// otherwise, we continue with the replacement at j
index = j;
lastIndex = j;
lastId = inner[j].id;
}
int j = index;
// loop through all sources once to avoid delaying any new sources too much
for (int i = 0; i < n; i++) {
// eagerly check if child unsubscribed or we reached a terminal state.
if (checkTerminate()) {
skipFinal = true;
return;
}
@SuppressWarnings("unchecked")
InnerSubscriber<T> is = (InnerSubscriber<T>)inner[j];
Object o = null;
for (;;) {
int produced = 0;
while (r > 0) {
// eagerly check if child unsubscribed or we reached a terminal state.
if (checkTerminate()) {
skipFinal = true;
return;
}
RxRingBuffer q = is.queue;
if (q == null) {
break;
}
o = q.poll();
if (o == null) {
break;
}
T v = nl.getValue(o);
// if child throws, report bounce it back immediately
try {
child.onNext(v);
} catch (Throwable t) {
skipFinal = true;
Exceptions.throwIfFatal(t);
try {
child.onError(t);
} finally {
unsubscribe();
}
return;
}
r--;
produced++;
}
if (produced > 0) {
if (!unbounded) {
r = producer.produced(produced);
} else {
r = Long.MAX_VALUE;
}
is.requestMore(produced);
}
// if we run out of requests or queued values, break
if (r == 0 || o == null) {
break;
}
}
boolean innerDone = is.done;
RxRingBuffer innerQueue = is.queue;
if (innerDone && (innerQueue == null || innerQueue.isEmpty())) {
removeInner(is);
if (checkTerminate()) {
skipFinal = true;
return;
}
replenishMain++;
innerCompleted = true;
}
// if we run out of requests, don't try the other sources
if (r == 0) {
break;
}
// wrap around in round-robin fashion
j++;
if (j == n) {
j = 0;
}
}
// if we run out of requests or just completed a round, save the index and id
lastIndex = j;
lastId = inner[j].id;
}
if (replenishMain > 0) {
request(replenishMain);
}
// if one or more inner completed, loop again to see if we can terminate the whole stream
if (innerCompleted) {
continue;
}
// in case there were updates to the state, we loop again
synchronized (this) {
if (!missed) {
skipFinal = true;
emitting = false;
break;
}
missed = false;
}
}
} finally {
if (!skipFinal) {
synchronized (this) {
emitting = false;
}
}
}
}
/**
* Check if the operator reached some terminal state: child unsubscribed,
* an error was reported and we don't delay errors.
* @return true if the child unsubscribed or there are errors available and merge doesn't delay errors.
*/
boolean checkTerminate() {
if (child.isUnsubscribed()) {
return true;
}
Queue<Throwable> e = errors;
if (!delayErrors && (e != null && !e.isEmpty())) {
try {
reportError();
} finally {
unsubscribe();
}
return true;
}
return false;
}
}
static final class InnerSubscriber<T> extends Subscriber<T> {
final MergeSubscriber<T> parent;
final long id;
volatile boolean done;
volatile RxRingBuffer queue;
int outstanding;
static final int LIMIT = RxRingBuffer.SIZE / 4;
public InnerSubscriber(MergeSubscriber<T> parent, long id) {
this.parent = parent;
this.id = id;
}
@Override
public void onStart() {
outstanding = RxRingBuffer.SIZE;
request(RxRingBuffer.SIZE);
}
@Override
public void onNext(T t) {
parent.tryEmit(this, t);
}
@Override
public void onError(Throwable e) {
done = true;
parent.getOrCreateErrorQueue().offer(e);
parent.emit();
}
@Override
public void onCompleted() {
done = true;
parent.emit();
}
public void requestMore(long n) {
int r = outstanding - (int)n;
if (r > LIMIT) {
outstanding = r;
return;
}
outstanding = RxRingBuffer.SIZE;
int k = RxRingBuffer.SIZE - r;
if (k > 0) {
request(k);
}
}
}}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.samza.zk;
import com.google.common.collect.ImmutableMap;
import java.lang.reflect.Field;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CountDownLatch;
import java.util.function.BooleanSupplier;
import com.google.common.collect.ImmutableList;
import org.I0Itec.zkclient.IZkDataListener;
import org.I0Itec.zkclient.ZkClient;
import org.I0Itec.zkclient.ZkConnection;
import org.I0Itec.zkclient.exception.ZkInterruptedException;
import org.I0Itec.zkclient.exception.ZkNodeExistsException;
import org.apache.commons.lang3.reflect.FieldUtils;
import org.apache.samza.SamzaException;
import org.apache.samza.config.MapConfig;
import org.apache.samza.container.TaskName;
import org.apache.samza.job.model.ContainerModel;
import org.apache.samza.job.model.JobModel;
import org.apache.samza.runtime.LocationId;
import org.apache.samza.testUtils.EmbeddedZookeeper;
import org.apache.samza.util.NoOpMetricsRegistry;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import org.junit.rules.Timeout;
import org.mockito.Mockito;
public class TestZkUtils {
private static EmbeddedZookeeper zkServer = null;
private static final ZkKeyBuilder KEY_BUILDER = new ZkKeyBuilder("test");
private ZkClient zkClient = null;
private static final int SESSION_TIMEOUT_MS = 500;
private static final int CONNECTION_TIMEOUT_MS = 1000;
private ZkUtils zkUtils;
@Rule
// Declared public to honor junit contract.
public final ExpectedException expectedException = ExpectedException.none();
@Rule
public Timeout testTimeOutInMillis = new Timeout(120000);
@Before
public void testSetup() {
try {
zkServer = new EmbeddedZookeeper();
zkServer.setup();
zkClient = new ZkClient(
new ZkConnection("127.0.0.1:" + zkServer.getPort(), SESSION_TIMEOUT_MS),
CONNECTION_TIMEOUT_MS);
} catch (Exception e) {
Assert.fail("Client connection setup failed. Aborting tests..");
}
try {
zkClient.createPersistent(KEY_BUILDER.getProcessorsPath(), true);
} catch (ZkNodeExistsException e) {
// Do nothing
}
zkUtils = getZkUtils();
zkUtils.connect();
}
@After
public void testTeardown() {
if (zkClient != null) {
try {
zkUtils.close();
} finally {
zkServer.teardown();
}
}
}
private ZkUtils getZkUtils() {
return new ZkUtils(KEY_BUILDER, zkClient, CONNECTION_TIMEOUT_MS,
SESSION_TIMEOUT_MS, new NoOpMetricsRegistry());
}
@Test
public void testRegisterProcessorId() {
String assignedPath = zkUtils.registerProcessorAndGetId(new ProcessorData("host", "1"));
Assert.assertTrue(assignedPath.startsWith(KEY_BUILDER.getProcessorsPath()));
// Calling registerProcessorId again should return the same ephemeralPath as long as the session is valid
Assert.assertTrue(zkUtils.registerProcessorAndGetId(new ProcessorData("host", "1")).equals(assignedPath));
}
@Test
public void testGetActiveProcessors() {
Assert.assertEquals(0, zkUtils.getSortedActiveProcessorsZnodes().size());
zkUtils.registerProcessorAndGetId(new ProcessorData("processorData", "1"));
Assert.assertEquals(1, zkUtils.getSortedActiveProcessorsZnodes().size());
}
@Test
public void testGetActiveProcessorIdShouldReturnEmptyForNonExistingZookeeperNodes() {
List<String> processorsIDs = zkUtils.getActiveProcessorsIDs(ImmutableList.of("node1", "node2"));
Assert.assertEquals(0, processorsIDs.size());
}
@Test
public void testReadAfterWriteTaskLocality() {
zkUtils.writeTaskLocality(new TaskName("task-1"), new LocationId("LocationId-1"));
zkUtils.writeTaskLocality(new TaskName("task-2"), new LocationId("LocationId-2"));
Map<TaskName, LocationId> taskLocality = ImmutableMap.of(new TaskName("task-1"), new LocationId("LocationId-1"),
new TaskName("task-2"), new LocationId("LocationId-2"));
Assert.assertEquals(taskLocality, zkUtils.readTaskLocality());
}
@Test
public void testReadWhenTaskLocalityDoesNotExist() {
Map<TaskName, LocationId> taskLocality = zkUtils.readTaskLocality();
Assert.assertEquals(0, taskLocality.size());
}
@Test
public void testWriteTaskLocalityShouldUpdateTheExistingValue() {
zkUtils.writeTaskLocality(new TaskName("task-1"), new LocationId("LocationId-1"));
Map<TaskName, LocationId> taskLocality = ImmutableMap.of(new TaskName("task-1"), new LocationId("LocationId-1"));
Assert.assertEquals(taskLocality, zkUtils.readTaskLocality());
zkUtils.writeTaskLocality(new TaskName("task-1"), new LocationId("LocationId-2"));
taskLocality = ImmutableMap.of(new TaskName("task-1"), new LocationId("LocationId-2"));
Assert.assertEquals(taskLocality, zkUtils.readTaskLocality());
}
@Test
public void testReadTaskLocalityShouldReturnAllTheExistingLocalityValue() {
zkUtils.writeTaskLocality(new TaskName("task-1"), new LocationId("LocationId-1"));
zkUtils.writeTaskLocality(new TaskName("task-2"), new LocationId("LocationId-2"));
zkUtils.writeTaskLocality(new TaskName("task-3"), new LocationId("LocationId-3"));
zkUtils.writeTaskLocality(new TaskName("task-4"), new LocationId("LocationId-4"));
zkUtils.writeTaskLocality(new TaskName("task-5"), new LocationId("LocationId-5"));
Map<TaskName, LocationId> taskLocality = ImmutableMap.of(new TaskName("task-1"), new LocationId("LocationId-1"),
new TaskName("task-2"), new LocationId("LocationId-2"),
new TaskName("task-3"), new LocationId("LocationId-3"),
new TaskName("task-4"), new LocationId("LocationId-4"),
new TaskName("task-5"), new LocationId("LocationId-5"));
Assert.assertEquals(taskLocality, zkUtils.readTaskLocality());
}
@Test
public void testGetAllProcessorNodesShouldReturnEmptyForNonExistingZookeeperNodes() {
List<ZkUtils.ProcessorNode> processorsIDs = zkUtils.getAllProcessorNodes();
Assert.assertEquals(0, processorsIDs.size());
}
@Test
public void testZKProtocolVersion() {
// first time connect, version should be set to ZkUtils.ZK_PROTOCOL_VERSION
ZkLeaderElector le = new ZkLeaderElector("1", zkUtils);
zkUtils.validateZkVersion();
String root = zkUtils.getKeyBuilder().getRootPath();
String ver = zkUtils.getZkClient().readData(root);
Assert.assertEquals(ZkUtils.ZK_PROTOCOL_VERSION, ver);
// do it again (in case original value was null
zkUtils.validateZkVersion();
ver = zkUtils.getZkClient().readData(root);
Assert.assertEquals(ZkUtils.ZK_PROTOCOL_VERSION, ver);
// now negative case
zkUtils.getZkClient().writeData(root, "2.0");
try {
zkUtils.validateZkVersion();
Assert.fail("Expected to fail because of version mismatch 2.0 vs 1.0");
} catch (SamzaException e) {
// expected
}
// validate future values, let's say that current version should be 3.0
try {
Field f = zkUtils.getClass().getDeclaredField("ZK_PROTOCOL_VERSION");
FieldUtils.removeFinalModifier(f);
f.set(null, "3.0");
} catch (Exception e) {
System.out.println(e);
Assert.fail();
}
try {
zkUtils.validateZkVersion();
Assert.fail("Expected to fail because of version mismatch 2.0 vs 3.0");
} catch (SamzaException e) {
// expected
}
}
@Test
public void testGetProcessorsIDs() {
Assert.assertEquals(0, zkUtils.getSortedActiveProcessorsIDs().size());
zkUtils.registerProcessorAndGetId(new ProcessorData("host1", "1"));
List<String> l = zkUtils.getSortedActiveProcessorsIDs();
Assert.assertEquals(1, l.size());
new ZkUtils(KEY_BUILDER, zkClient, CONNECTION_TIMEOUT_MS, SESSION_TIMEOUT_MS, new NoOpMetricsRegistry()).registerProcessorAndGetId(
new ProcessorData("host2", "2"));
l = zkUtils.getSortedActiveProcessorsIDs();
Assert.assertEquals(2, l.size());
Assert.assertEquals(" ID1 didn't match", "1", l.get(0));
Assert.assertEquals(" ID2 didn't match", "2", l.get(1));
}
@Test
public void testSubscribeToJobModelVersionChange() {
ZkKeyBuilder keyBuilder = new ZkKeyBuilder("test");
String root = keyBuilder.getRootPath();
zkClient.deleteRecursive(root);
class Result {
String res = "";
public String getRes() {
return res;
}
public void updateRes(String newRes) {
res = newRes;
}
}
Assert.assertFalse(zkUtils.exists(root));
// create the paths
zkUtils.validatePaths(new String[]{root, keyBuilder.getJobModelVersionPath(), keyBuilder.getProcessorsPath()});
Assert.assertTrue(zkUtils.exists(root));
Assert.assertTrue(zkUtils.exists(keyBuilder.getJobModelVersionPath()));
Assert.assertTrue(zkUtils.exists(keyBuilder.getProcessorsPath()));
final Result res = new Result();
// define the callback
IZkDataListener dataListener = new IZkDataListener() {
@Override
public void handleDataChange(String dataPath, Object data)
throws Exception {
res.updateRes((String) data);
}
@Override
public void handleDataDeleted(String dataPath)
throws Exception {
Assert.fail("Data wasn't deleted;");
}
};
// subscribe
zkClient.subscribeDataChanges(keyBuilder.getJobModelVersionPath(), dataListener);
zkClient.subscribeDataChanges(keyBuilder.getProcessorsPath(), dataListener);
// update
zkClient.writeData(keyBuilder.getJobModelVersionPath(), "newVersion");
// verify
Assert.assertTrue(testWithDelayBackOff(() -> "newVersion".equals(res.getRes()), 2, 1000));
// update again
zkClient.writeData(keyBuilder.getProcessorsPath(), "newProcessor");
Assert.assertTrue(testWithDelayBackOff(() -> "newProcessor".equals(res.getRes()), 2, 1000));
}
/**
* Create two duplicate processors with same processorId.
* Second creation should fail with exception.
*/
@Test
public void testRegisterProcessorAndGetIdShouldFailForDuplicateProcessorRegistration() {
final String testHostName = "localhost";
final String testProcessId = "testProcessorId";
ProcessorData processorData1 = new ProcessorData(testHostName, testProcessId);
// Register processor 1 which is not duplicate, this registration should succeed.
zkUtils.registerProcessorAndGetId(processorData1);
ZkUtils zkUtils1 = getZkUtils();
zkUtils1.connect();
ProcessorData duplicateProcessorData = new ProcessorData(testHostName, testProcessId);
// Registration of the duplicate processor should fail.
expectedException.expect(SamzaException.class);
zkUtils1.registerProcessorAndGetId(duplicateProcessorData);
}
@Test
public void testPublishNewJobModel() {
ZkKeyBuilder keyBuilder = new ZkKeyBuilder("test");
String root = keyBuilder.getRootPath();
zkClient.deleteRecursive(root);
String version = "1";
String oldVersion = "0";
zkUtils.validatePaths(new String[]{root, keyBuilder.getJobModelPathPrefix(), keyBuilder.getJobModelVersionPath()});
zkUtils.publishJobModelVersion(oldVersion, version);
Assert.assertEquals(version, zkUtils.getJobModelVersion());
String newerVersion = Long.toString(Long.valueOf(version) + 1);
zkUtils.publishJobModelVersion(version, newerVersion);
Assert.assertEquals(newerVersion, zkUtils.getJobModelVersion());
try {
zkUtils.publishJobModelVersion(oldVersion, "10"); //invalid new version
Assert.fail("publish invalid version should've failed");
} catch (SamzaException e) {
// expected
}
// create job model
Map<String, String> configMap = new HashMap<>();
Map<String, ContainerModel> containers = new HashMap<>();
MapConfig config = new MapConfig(configMap);
JobModel jobModel = new JobModel(config, containers);
zkUtils.publishJobModel(version, jobModel);
Assert.assertEquals(jobModel, zkUtils.getJobModel(version));
}
@Test
public void testCleanUpZkJobModels() {
String root = zkUtils.getKeyBuilder().getJobModelPathPrefix();
System.out.println("root=" + root);
zkUtils.getZkClient().createPersistent(root, true);
// generate multiple version
for (int i = 101; i < 110; i++) {
zkUtils.publishJobModel(String.valueOf(i), null);
}
// clean all of the versions except 5 most recent ones
zkUtils.deleteOldJobModels(5);
Assert.assertEquals(Arrays.asList("105", "106", "107", "108", "109"), zkUtils.getZkClient().getChildren(root));
}
@Test
public void testCleanUpZkBarrierVersion() {
String root = zkUtils.getKeyBuilder().getJobModelVersionBarrierPrefix();
zkUtils.getZkClient().createPersistent(root, true);
ZkBarrierForVersionUpgrade barrier = new ZkBarrierForVersionUpgrade(root, zkUtils, null, null);
for (int i = 200; i < 210; i++) {
barrier.create(String.valueOf(i), new ArrayList<>(Arrays.asList(i + "a", i + "b", i + "c")));
}
zkUtils.deleteOldBarrierVersions(5);
List<String> zNodeIds = zkUtils.getZkClient().getChildren(root);
Collections.sort(zNodeIds);
Assert.assertEquals(Arrays.asList("barrier_205", "barrier_206", "barrier_207", "barrier_208", "barrier_209"),
zNodeIds);
}
@Test
public void testCleanUpZk() {
String pathA = "/path/testA";
String pathB = "/path/testB";
zkUtils.getZkClient().createPersistent(pathA, true);
zkUtils.getZkClient().createPersistent(pathB, true);
// Create 100 nodes
for (int i = 0; i < 20; i++) {
String p1 = pathA + "/" + i;
zkUtils.getZkClient().createPersistent(p1, true);
zkUtils.getZkClient().createPersistent(p1 + "/something1", true);
zkUtils.getZkClient().createPersistent(p1 + "/something2", true);
String p2 = pathB + "/some_" + i;
zkUtils.getZkClient().createPersistent(p2, true);
zkUtils.getZkClient().createPersistent(p2 + "/something1", true);
zkUtils.getZkClient().createPersistent(p2 + "/something2", true);
}
List<String> zNodeIds = new ArrayList<>();
// empty list
zkUtils.deleteOldVersionPath(pathA, zNodeIds, 10, new Comparator<String>() {
@Override
public int compare(String o1, String o2) {
return o1.compareTo(o2);
}
});
zNodeIds = zkUtils.getZkClient().getChildren(pathA);
zkUtils.deleteOldVersionPath(pathA, zNodeIds, 10, new Comparator<String>() {
@Override
public int compare(String o1, String o2) {
return Integer.valueOf(o1) - Integer.valueOf(o2);
}
});
for (int i = 0; i < 10; i++) {
// should be gone
String p1 = pathA + "/" + i;
Assert.assertFalse("path " + p1 + " exists", zkUtils.getZkClient().exists(p1));
}
for (int i = 10; i < 20; i++) {
// should be gone
String p1 = pathA + "/" + i;
Assert.assertTrue("path " + p1 + " exists", zkUtils.getZkClient().exists(p1));
}
zNodeIds = zkUtils.getZkClient().getChildren(pathB);
zkUtils.deleteOldVersionPath(pathB, zNodeIds, 1, new Comparator<String>() {
@Override
public int compare(String o1, String o2) {
return Integer.valueOf(o1.substring(o1.lastIndexOf("_") + 1)) - Integer
.valueOf(o2.substring(o2.lastIndexOf("_") + 1));
}
});
for (int i = 0; i < 19; i++) {
// should be gone
String p1 = pathB + "/" + i;
Assert.assertFalse("path " + p1 + " exists", zkUtils.getZkClient().exists(p1));
}
for (int i = 19; i < 20; i++) {
// should be gone
String p1 = pathB + "/some_" + i;
Assert.assertTrue("path " + p1 + " exists", zkUtils.getZkClient().exists(p1));
}
}
public static boolean testWithDelayBackOff(BooleanSupplier cond, long startDelayMs, long maxDelayMs) {
long delay = startDelayMs;
while (delay < maxDelayMs) {
if (cond.getAsBoolean())
return true;
try {
Thread.sleep(delay);
} catch (InterruptedException e) {
return false;
}
delay *= 2;
}
return false;
}
public static void sleepMs(long delay) {
try {
Thread.sleep(delay);
} catch (InterruptedException e) {
Assert.fail("Sleep was interrupted");
}
}
@Test
public void testgetNextJobModelVersion() {
// Set up the Zk base paths for testing.
ZkKeyBuilder keyBuilder = new ZkKeyBuilder("test");
String root = keyBuilder.getRootPath();
zkClient.deleteRecursive(root);
zkUtils.validatePaths(new String[]{root, keyBuilder.getJobModelPathPrefix(), keyBuilder.getJobModelVersionPath()});
String version = "1";
String oldVersion = "0";
// Set zkNode JobModelVersion to 1.
zkUtils.publishJobModelVersion(oldVersion, version);
Assert.assertEquals(version, zkUtils.getJobModelVersion());
// Publish JobModel with a higher version (2).
zkUtils.publishJobModel("2", new JobModel(new MapConfig(), new HashMap<>()));
// Get on the JobModel version should return 2, taking into account the published version 2.
Assert.assertEquals("3", zkUtils.getNextJobModelVersion(zkUtils.getJobModelVersion()));
}
@Test
public void testDeleteProcessorNodeShouldDeleteTheCorrectProcessorNode() {
String testProcessorId1 = "processorId1";
String testProcessorId2 = "processorId2";
ZkUtils zkUtils = getZkUtils();
ZkUtils zkUtils1 = getZkUtils();
zkUtils.registerProcessorAndGetId(new ProcessorData("host1", testProcessorId1));
zkUtils1.registerProcessorAndGetId(new ProcessorData("host2", testProcessorId2));
zkUtils.deleteProcessorNode(testProcessorId1);
List<String> expectedProcessors = ImmutableList.of(testProcessorId2);
List<String> actualProcessors = zkUtils.getSortedActiveProcessorsIDs();
Assert.assertEquals(expectedProcessors, actualProcessors);
}
@Test
public void testCloseShouldRetryOnceOnInterruptedException() {
ZkClient zkClient = Mockito.mock(ZkClient.class);
ZkUtils zkUtils = new ZkUtils(KEY_BUILDER, zkClient, CONNECTION_TIMEOUT_MS, SESSION_TIMEOUT_MS, new NoOpMetricsRegistry());
Mockito.doThrow(new ZkInterruptedException(new InterruptedException()))
.doAnswer(invocation -> null)
.when(zkClient).close();
zkUtils.close();
Mockito.verify(zkClient, Mockito.times(2)).close();
}
@Test
public void testCloseShouldTearDownZkConnectionOnInterruptedException() throws Exception {
CountDownLatch latch = new CountDownLatch(1);
// Establish connection with the zookeeper server.
ZkClient zkClient = new ZkClient("127.0.0.1:" + zkServer.getPort());
ZkUtils zkUtils = new ZkUtils(KEY_BUILDER, zkClient, CONNECTION_TIMEOUT_MS, SESSION_TIMEOUT_MS, new NoOpMetricsRegistry());
Thread threadToInterrupt = new Thread(() -> {
try {
latch.await();
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
zkUtils.close();
});
threadToInterrupt.start();
Field field = ZkClient.class.getDeclaredField("_closed");
field.setAccessible(true);
Assert.assertFalse(field.getBoolean(zkClient));
threadToInterrupt.interrupt();
threadToInterrupt.join();
Assert.assertTrue(field.getBoolean(zkClient));
}
}
| |
import objectdraw.*;
import java.awt.*;
import java.awt.image.*;
import javax.swing.*;
import javax.swing.event.*;
import java.awt.event.*;
/**
* A program to illustrate the use of matrices by implementing some simple
* transformations on the pixels of a digital image
*/
public class ImageEdit extends FrameWindowController implements ChangeListener, ActionListener {
/**
* Space to leave between the original and transformed images
*/
private static final double SPACER = 5;
/**
* range for JSlider
*/
private static final int ADJUSTMENTRANGE = 200;
/**
* maximum range of pixels used for blur
*/
private static final int MAXBLURRANGE = 5;
private static final double MAXBRIGHTNESS = Math.sqrt(3*255*255);
/**
* the "current" blur range (to avoid time consuming recomputation)
*/
private int oldRange;
/**
* the original image
*/
private Image source;
/**
* the original and transformed views of the image
*/
private VisibleImage orig, preview;
/**
* the scaling factor used when displaying images
*/
private double origScale;
/**
* width and height of original in pixels
*/
private int origWidth, origHeight;
/**
* the color values for the original pixels
*/
Color sourcePixels[][];
/**
* the GUIcontrols
*/
private JSlider adjustment;
private JComboBox optionSelection;
private String getImage;
/**
* Initialize the user interface and get and display the image
*/
public void begin() {
// create the transformation selection menu
optionSelection = new JComboBox();
optionSelection.addItem("Convert to grayscale");
optionSelection.addItem("Vertical inversion");
optionSelection.addItem("Horizontal reversal");
optionSelection.addItem("Rectangular averaging");
optionSelection.addItem("Rotate");
getContentPane().add(optionSelection, BorderLayout.SOUTH);
optionSelection.addActionListener(this);
// create the slider
adjustment = new JSlider(SwingConstants.VERTICAL,0,ADJUSTMENTRANGE,ADJUSTMENTRANGE);
getContentPane().add( adjustment,BorderLayout.EAST);
adjustment.addChangeListener(this);
validate();
// get an image
// source = getImage("chapelhopkins.gif");
// get its pixels and stores it in a 2-D array
sourcePixels = getPixelMap(source);
// display image scaled to fit on display
orig = new VisibleImage(source, 0, 0, canvas);
origScale = getScaleFactor( source );
orig.setWidth(origWidth*origScale);
orig.setHeight(origHeight*origScale);
updatePreview();
}
public ImageEdit(String image){
source = getImage(image);
}
/**
* convert all the pixels in an image into grayscale
*/
private void adjustSaturation() {
Color transformed[][] = new Color[ sourcePixels.length][sourcePixels[0].length];
for (int col = 0; col < transformed.length; col++ ) {
for (int row = 0; row < transformed[col].length; row++ ) {
transformed[col][row] = grayscalePixel( sourcePixels[col][row] );
}
}
displayPreview( transformed );
}
/**
* convert a single pixel into its grayscale equivalent
* Parameter c is the color of pixel to be changed to greyscale
*/
private Color grayscalePixel(Color c ) {
int red = c.getRed();
int green = c.getGreen();
int blue = c.getBlue();
int brightness = (int) ( 255*Math.sqrt(red*red + green*green + blue*blue)/
MAXBRIGHTNESS);
return new Color( brightness , brightness, brightness, c.getAlpha());
}
/*
* flip an image vertically
*/
private void invert() {
Color transformed[][] = new Color[ sourcePixels.length][sourcePixels[0].length];
for (int col = 0; col < transformed.length; col++ ) {
for (int row = 0; row < transformed[col].length; row++ ) {
transformed[col][row] = sourcePixels[col][sourcePixels[0].length-1-row];
}
}
displayPreview( transformed );
}
/**
* flip an image horizontally
*/
private void flip() {
Color transformed[][] = new Color[ sourcePixels.length][sourcePixels[0].length];
for (int col = 0; col < transformed.length; col++ ) {
for (int row = 0; row < transformed[col].length; row++ ) {
transformed[col][row] = sourcePixels[sourcePixels.length-1-col][row];
}
}
displayPreview( transformed );
}
/*
* blur an image by replacing each pixel with the average of the color values
* of its neighbors in a 2*range+1 by 2*range+1 square.
* Parameter range is the distance to reach out for neighbers to include
*/
private void blur(int range) {
Color transformed[][] = new Color[ sourcePixels.length][sourcePixels[0].length];
for (int col = 0; col < transformed.length; col++ ) {
for (int row = 0; row < transformed[col].length; row++ ) {
int counted = 0;
double redSum, greenSum, blueSum;
redSum = greenSum = blueSum = 0;
for ( int nearbyCol = Math.max(0,col-range);
nearbyCol < Math.min( transformed.length, col+range+1 );
nearbyCol++) {
for ( int rearbyRow = Math.max(0,row-range);
rearbyRow < Math.min( transformed[nearbyCol].length, row+range+1 );
rearbyRow++) {
redSum += sourcePixels[nearbyCol][rearbyRow].getRed();
blueSum += sourcePixels[nearbyCol][rearbyRow].getBlue();
greenSum += sourcePixels[nearbyCol][rearbyRow].getGreen();
counted++;
}
}
transformed[col][row] = new Color( (int) (Math.round( redSum/counted)),
(int) (Math.round( greenSum/counted)),
(int) (Math.round( blueSum/counted)),
sourcePixels[col][row].getAlpha()
);
}
}
displayPreview( transformed );
}
/**
* rotate the image
*/
private void rotate() {
// angle of rotation
double theta = 2*Math.PI*(1.0*adjustment.getValue()/ADJUSTMENTRANGE);
// work in square based on maximum image dimension
int maxDim = (int) Math.max(origWidth,origHeight);
Color transformed[][] = new Color[maxDim][maxDim];
// determine center about which to rotate
double origCenterX = origWidth/2.0;
double origCenterY = origHeight/2.0;
double transCenter = maxDim/2;
for (int row = 0; row < maxDim; row++ ) {
for (int col = 0; col < maxDim; col++ ) {
// find pixel position (r,c) in original corresponding to (i,j) in rotated image
double transDx = (row-transCenter);
double transDy = (col-transCenter);
int r = (int) Math.round(transCenter+(transDx*Math.cos(theta)-transDy*Math.sin(theta)));
int c = (int) Math.round(transCenter+(transDx*Math.sin(theta)+transDy*Math.cos(theta)));
// extract desired pixel if in range
if ( r < sourcePixels.length && c < sourcePixels[row].length && r >= 0 && c >= 0 ) {
transformed[row][col] = sourcePixels[r][c];
} else {
transformed[row][col] = Color.white;
}
}
}
displayPreview( transformed );
}
/**
* update the preview of the transformed image
* Parameter transformed is a 2-dimensional array of pixels
*/
private void displayPreview( Color transformed[][] ){
Image stillPic = makeImage(transformed);
if ( preview != null ) {
preview.removeFromCanvas();
}
preview = new VisibleImage( stillPic, orig.getWidth()+5,0, canvas);
preview.setWidth(origScale*preview.getWidth());
preview.setHeight(origScale*preview.getHeight());
}
/**
* update the transformed image depending on menu selection
*/
private void updatePreview() {
Object option = optionSelection.getSelectedItem();
if ( option.equals("Convert to grayscale")) {
adjustSaturation();
}
else if ( option.equals("Vertical inversion")) {
invert();
}
else if ( option.equals("Horizontal reversal")) {
flip();
}
else if ( option.equals("Rectangular averaging")) {
int range = (int) Math.round(MAXBLURRANGE *(adjustment.getValue()+0.0)/(ADJUSTMENTRANGE));
if ( range != oldRange ) {
blur(range);
oldRange = range;
}
}
else if ( option.equals("Rotate")) {
rotate();
}
}
/**
* update when the slider moves
* Parameter e is the state of the slider
*/
public void stateChanged(ChangeEvent e){
updatePreview();
}
/*
* update when a menu item is selected (and reset the slider)
* Parameter e is the selection of the combo box
*/
public void actionPerformed(ActionEvent e){
oldRange = MAXBLURRANGE + 1;
adjustment.setValue(0);
updatePreview();
}
/*
* determine how much to scale image to fit two copies in display
* Parameter source is the Image to be displayed
*/
private double getScaleFactor( Image source) {
double horizScale, vertScale;
origWidth = source.getWidth(null);
origHeight = source.getHeight(null);
if ( origWidth < (canvas.getWidth()-SPACER)/2 ){
horizScale = 1;
} else {
horizScale = (canvas.getWidth()-SPACER)/2/origWidth;
}
if ( origHeight < canvas.getHeight() ){
vertScale = 1;
} else {
vertScale = canvas.getHeight()/origHeight;
}
return Math.min(horizScale,vertScale);
}
/**
* convert an integer color value into an object of the Color class
* Parameter pixel is the the pixel to convert to a Color
*/
public Color int2Color(int pixel) {
int alpha = (pixel >> 24) & 0xff;
int red = (pixel >> 16) & 0xff;
int green = (pixel >> 8) & 0xff;
int blue = (pixel) & 0xff;
return new Color(red, green, blue, alpha);
}
/**
* get the 2D array of Colors that represent an Image
* Parameter img is the image to convert to a 2-d array of colors
*/
public Color[][] getPixelMap(Image img) {
// force Java to load the image
MediaTracker mediaTracker = new MediaTracker(this);
mediaTracker.addImage(img, 0);
try {
mediaTracker.waitForID(0);
} catch (InterruptedException ie) {
System.out.println(ie);
}
// get a 1D array of the pixels
int width = img.getWidth(null);
int height = img.getHeight(null);
int[] pixels = new int[width * height];
PixelGrabber pg = new PixelGrabber(img, 0, 0, width, height, pixels, 0, width);
try {
pg.grabPixels();
} catch (InterruptedException e) {
System.err.println("interrupted waiting for pixels!");
}
if ((pg.getStatus() & ImageObserver.ABORT) != 0) {
System.err.println("image fetch aborted or errored");
}
// regroup the pixels into a 2D array
Color result[][] = new Color[width][height];
for (int col = 0; col < height; col++) {
for (int row = 0; row < width; row++) {
result[row][col] = int2Color(pixels[col * width + row]);
}
}
return result;
}
/**
* Convert a 2D array of Colors into an Image
* Parameter pixelmap is the the 2-d array of pixels
*/
public Image makeImage(Color[][] pixelMap) {
int width = pixelMap.length;
int height = pixelMap[0].length;
// place the pixel values in an appropriate 1D array
int pix[] = new int[width * height];
int index = 0;
for (int col = 0; col < height; col++) {
for (int row = 0; row < width; row++) {
int red = pixelMap[row][col].getRed();
int green = pixelMap[row][col].getGreen();
int blue = pixelMap[row][col].getBlue();
int alpha = pixelMap[row][col].getAlpha();
pix[index++] =
(alpha << 24) | (red << 16) | (green << 8) | blue;
}
}
// create an image from the 1D array
return createImage(new MemoryImageSource(width, height, pix, 0, width));
}
}
| |
/******************************************************************************
* Confidential Proprietary *
* (c) Copyright Haifeng Li 2011, All Rights Reserved *
******************************************************************************/
package smile.classification;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.concurrent.Callable;
import smile.math.Math;
import smile.math.DifferentiableMultivariateFunction;
import smile.util.MulticoreExecutor;
/**
* Logistic regression. Logistic regression (logit model) is a generalized
* linear model used for binomial regression. Logistic regression applies
* maximum likelihood estimation after transforming the dependent into
* a logit variable. A logit is the natural log of the odds of the dependent
* equaling a certain value or not (usually 1 in binary logistic models,
* the highest value in multinomial models). In this way, logistic regression
* estimates the odds of a certain event (value) occurring.
* <p>
* Goodness-of-fit tests such as the likelihood ratio test are available
* as indicators of model appropriateness, as is the Wald statistic to test
* the significance of individual independent variables.
* <p>
* Logistic regression has many analogies to ordinary least squares (OLS)
* regression. Unlike OLS regression, however, logistic regression does not
* assume linearity of relationship between the raw values of the independent
* variables and the dependent, does not require normally distributed variables,
* does not assume homoscedasticity, and in general has less stringent
* requirements.
* <p>
* Compared with linear discriminant analysis, logistic regression has several
* advantages:
* <ul>
* <li> It is more robust: the independent variables don't have to be normally
* distributed, or have equal variance in each group
* <li> It does not assume a linear relationship between the independent
* variables and dependent variable.
* <li> It may handle nonlinear effects since one can add explicit interaction
* and power terms.
* </ul>
* However, it requires much more data to achieve stable, meaningful results.
* <p>
* Logistic regression also has strong connections with neural network and
* maximum entropy modeling. For example, binary logistic regression is
* equivalent to a one-layer, single-output neural network with a logistic
* activation function trained under log loss. Similarly, multinomial logistic
* regression is equivalent to a one-layer, softmax-output neural network.
* <p>
* Logistic regression estimation also obeys the maximum entropy principle, and
* thus logistic regression is sometimes called "maximum entropy modeling",
* and the resulting classifier the "maximum entropy classifier".
*
* @see NeuralNetwork
* @see Maxent
* @see LDA
*
* @author Haifeng Li
*/
public class LogisticRegression implements Classifier<double[]> {
/**
* The dimension of input space.
*/
private int p;
/**
* The number of classes.
*/
private int k;
/**
* The log-likelihood of learned model.
*/
private double L;
/**
* The linear weights for binary logistic regression.
*/
private double[] w;
/**
* The linear weights for multi-class logistic regression.
*/
private double[][] W;
/**
* Trainer for logistic regression.
*/
public static class Trainer extends ClassifierTrainer<double[]> {
/**
* Regularization factor. λ > 0 gives a "regularized" estimate
* of linear weights which often has superior generalization
* performance, especially when the dimensionality is high.
*/
private double lambda = 0.0;
/**
* The tolerance for BFGS stopping iterations.
*/
private double tol = 1E-5;
/**
* The maximum number of BFGS iterations.
*/
private int maxIter = 500;
/**
* Constructor.
*/
public Trainer() {
}
/**
* Sets the regularization factor. λ > 0 gives a "regularized"
* estimate of linear weights which often has superior generalization
* performance, especially when the dimensionality is high.
*
* @param lambda regularization factor.
*/
public void setRegularizationFactor(double lambda) {
this.lambda = lambda;
}
/**
* Sets the tolerance for BFGS stopping iterations.
*
* @param tol the tolerance for stopping iterations.
*/
public void setTolerance(double tol) {
if (tol <= 0.0) {
throw new IllegalArgumentException("Invalid tolerance: " + tol);
}
this.tol = tol;
}
/**
* Sets the maximum number of iterations.
*
* @param maxIter the maximum number of iterations.
*/
public void setMaxNumIteration(int maxIter) {
if (maxIter <= 0) {
throw new IllegalArgumentException("Invalid maximum number of iterations: " + maxIter);
}
this.maxIter = maxIter;
}
@Override
public LogisticRegression train(double[][] x, int[] y) {
return new LogisticRegression(x, y, lambda, tol, maxIter);
}
}
/**
* Constructor. No regularization.
*
* @param x training samples.
* @param y training labels in [0, k), where k is the number of classes.
*/
public LogisticRegression(double[][] x, int[] y) {
this(x, y, 0.0);
}
/**
* Constructor.
*
* @param x training samples.
* @param y training labels in [0, k), where k is the number of classes.
* @param lambda λ > 0 gives a "regularized" estimate of linear
* weights which often has superior generalization performance, especially
* when the dimensionality is high.
*/
public LogisticRegression(double[][] x, int[] y, double lambda) {
this(x, y, lambda, 1E-5, 500);
}
/**
* Constructor.
*
* @param x training samples.
* @param y training labels in [0, k), where k is the number of classes.
* @param lambda λ > 0 gives a "regularized" estimate of linear
* weights which often has superior generalization performance, especially
* when the dimensionality is high.
* @param tol the tolerance for stopping iterations.
* @param maxIter the maximum number of iterations.
*/
public LogisticRegression(double[][] x, int[] y, double lambda, double tol, int maxIter) {
if (x.length != y.length) {
throw new IllegalArgumentException(String.format("The sizes of X and Y don't match: %d != %d", x.length, y.length));
}
if (lambda < 0.0) {
throw new IllegalArgumentException("Invalid regularization factor: " + lambda);
}
if (tol <= 0.0) {
throw new IllegalArgumentException("Invalid tolerance: " + tol);
}
if (maxIter <= 0) {
throw new IllegalArgumentException("Invalid maximum number of iterations: " + maxIter);
}
// class label set.
int[] labels = Math.unique(y);
Arrays.sort(labels);
for (int i = 0; i < labels.length; i++) {
if (labels[i] < 0) {
throw new IllegalArgumentException("Negative class label: " + labels[i]);
}
if (i > 0 && labels[i] - labels[i-1] > 1) {
throw new IllegalArgumentException("Missing class: " + labels[i]+1);
}
}
k = labels.length;
if (k < 2) {
throw new IllegalArgumentException("Only one class.");
}
p = x[0].length;
if (k == 2) {
BinaryObjectiveFunction func = new BinaryObjectiveFunction(x, y, lambda);
w = new double[p + 1];
L = 0.0;
try {
L = -Math.min(func, 5, w, tol, maxIter);
} catch (Exception ex) {
// If L-BFGS doesn't work, let's try BFGS.
L = -Math.min(func, w, tol, maxIter);
}
} else {
MultiClassObjectiveFunction func = new MultiClassObjectiveFunction(x, y, k, lambda);
w = new double[k * (p + 1)];
L = 0.0;
try {
L = -Math.min(func, 5, w, tol, maxIter);
} catch (Exception ex) {
// If L-BFGS doesn't work, let's try BFGS.
L = -Math.min(func, w, tol, maxIter);
}
W = new double[k][p+1];
for (int i = 0, m = 0; i < k; i++) {
for (int j = 0; j <= p; j++, m++) {
W[i][j] = w[m];
}
}
w = null;
}
}
/**
* Returns natural log(1+exp(x)) without overflow.
*/
private static double log1pe(double x) {
double y = 0.0;
if (x > 15) {
y = x;
} else {
y += Math.log1p(Math.exp(x));
}
return y;
}
/**
* Binary-class logistic regression objective function.
*/
static class BinaryObjectiveFunction implements DifferentiableMultivariateFunction {
/**
* Training instances.
*/
double[][] x;
/**
* Training labels.
*/
int[] y;
/**
* Regularization factor.
*/
double lambda;
/**
* Parallel computing of objective function.
*/
List<FTask> ftasks = null;
/**
* Parallel computing of objective function and gradient.
*/
List<GTask> gtasks = null;
/**
* Constructor.
*/
BinaryObjectiveFunction(double[][] x, int[] y, double lambda) {
this.x = x;
this.y = y;
this.lambda = lambda;
int n = x.length;
int m = MulticoreExecutor.getThreadPoolSize();
if (n >= 1000 && m >= 2) {
ftasks = new ArrayList<FTask>(m + 1);
gtasks = new ArrayList<GTask>(m + 1);
int step = n / m;
if (step < 100) {
step = 100;
}
int start = 0;
int end = step;
for (int i = 0; i < m - 1; i++) {
ftasks.add(new FTask(start, end));
gtasks.add(new GTask(start, end));
start += step;
end += step;
}
ftasks.add(new FTask(start, n));
gtasks.add(new GTask(start, n));
}
}
/**
* Task to calculate the objective function.
*/
class FTask implements Callable<Double> {
/**
* The parameter vector.
*/
double[] w;
/**
* The start index of data portion for this task.
*/
int start;
/**
* The end index of data portion for this task.
*/
int end;
FTask(int start, int end) {
this.start = start;
this.end = end;
}
@Override
public Double call() {
double f = 0.0;
for (int i = start; i < end; i++) {
double wx = dot(x[i], w);
f += log1pe(wx) - y[i] * wx;
}
return f;
}
}
@Override
public double f(double[] w) {
double f = Double.NaN;
int p = w.length - 1;
if (ftasks != null) {
for (FTask task : ftasks) {
task.w = w;
}
try {
f = 0.0;
for (double fi : MulticoreExecutor.run(ftasks)) {
f += fi;
}
} catch (Exception ex) {
System.err.println(ex);
f = Double.NaN;
}
}
if (Double.isNaN(f)) {
f = 0.0;
int n = x.length;
for (int i = 0; i < n; i++) {
double wx = dot(x[i], w);
f += log1pe(wx) - y[i] * wx;
}
}
if (lambda != 0.0) {
double w2 = 0.0;
for (int i = 0; i < p; i++) {
w2 += w[i] * w[i];
}
f += 0.5 * lambda * w2;
}
return f;
}
/**
* Task to calculate the objective function and gradient.
*/
class GTask implements Callable<double[]> {
/**
* The parameter vector.
*/
double[] w;
/**
* The start index of data portion for this task.
*/
int start;
/**
* The end index of data portion for this task.
*/
int end;
GTask(int start, int end) {
this.start = start;
this.end = end;
}
@Override
public double[] call() {
double f = 0.0;
int p = w.length - 1;
double[] g = new double[w.length + 1];
for (int i = start; i < end; i++) {
double wx = dot(x[i], w);
f += log1pe(wx) - y[i] * wx;
double yi = y[i] - Math.logistic(wx);
for (int j = 0; j < p; j++) {
g[j] -= yi * x[i][j];
}
g[p] -= yi;
}
g[w.length] = f;
return g;
}
}
@Override
public double f(double[] w, double[] g) {
double f = Double.NaN;
int p = w.length - 1;
Arrays.fill(g, 0.0);
if (gtasks != null) {
for (GTask task : gtasks) {
task.w = w;
}
try {
f = 0.0;
for (double[] gi : MulticoreExecutor.run(gtasks)) {
f += gi[w.length];
for (int i = 0; i < w.length; i++) {
g[i] += gi[i];
}
}
} catch (Exception ex) {
System.err.println(ex);
f = Double.NaN;
}
}
if (Double.isNaN(f)) {
f = 0.0;
int n = x.length;
for (int i = 0; i < n; i++) {
double wx = dot(x[i], w);
f += log1pe(wx) - y[i] * wx;
double yi = y[i] - Math.logistic(wx);
for (int j = 0; j < p; j++) {
g[j] -= yi * x[i][j];
}
g[p] -= yi;
}
}
if (lambda != 0.0) {
double w2 = 0.0;
for (int i = 0; i < p; i++) {
w2 += w[i] * w[i];
}
f += 0.5 * lambda * w2;
for (int j = 0; j < p; j++) {
g[j] += lambda * w[j];
}
}
return f;
}
}
/**
* Returns natural log without underflow.
*/
private static double log(double x) {
double y = 0.0;
if (x < 1E-300) {
y = -690.7755;
} else {
y = Math.log(x);
}
return y;
}
/**
* Multi-class logistic regression objective function.
*/
static class MultiClassObjectiveFunction implements DifferentiableMultivariateFunction {
/**
* Training instances.
*/
double[][] x;
/**
* Training labels.
*/
int[] y;
/**
* The number of classes.
*/
int k;
/**
* Regularization factor.
*/
double lambda;
/**
* Parallel computing of objective function.
*/
List<FTask> ftasks = null;
/**
* Parallel computing of objective function and gradient.
*/
List<GTask> gtasks = null;
/**
* Constructor.
*/
MultiClassObjectiveFunction(double[][] x, int[] y, int k, double lambda) {
this.x = x;
this.y = y;
this.k = k;
this.lambda = lambda;
int n = x.length;
int m = MulticoreExecutor.getThreadPoolSize();
if (n >= 1000 && m >= 2) {
ftasks = new ArrayList<FTask>(m + 1);
gtasks = new ArrayList<GTask>(m + 1);
int step = n / m;
if (step < 100) {
step = 100;
}
int start = 0;
int end = step;
for (int i = 0; i < m - 1; i++) {
ftasks.add(new FTask(start, end));
gtasks.add(new GTask(start, end));
start += step;
end += step;
}
ftasks.add(new FTask(start, n));
gtasks.add(new GTask(start, n));
}
}
/**
* Task to calculate the objective function.
*/
class FTask implements Callable<Double> {
/**
* The parameter vector.
*/
double[] w;
/**
* The start index of data portion for this task.
*/
int start;
/**
* The end index of data portion for this task.
*/
int end;
FTask(int start, int end) {
this.start = start;
this.end = end;
}
@Override
public Double call() {
double f = 0.0;
int p = x[0].length;
double[] prob = new double[k];
for (int i = start; i < end; i++) {
for (int j = 0; j < k; j++) {
prob[j] = dot(x[i], w, j * (p + 1));
}
softmax(prob);
f -= log(prob[y[i]]);
}
return f;
}
}
@Override
public double f(double[] w) {
double f = Double.NaN;
int p = x[0].length;
double[] prob = new double[k];
if (ftasks != null) {
for (FTask task : ftasks) {
task.w = w;
}
try {
f = 0.0;
for (double fi : MulticoreExecutor.run(ftasks)) {
f += fi;
}
} catch (Exception ex) {
System.err.println(ex);
f = Double.NaN;
}
}
if (Double.isNaN(f)) {
f = 0.0;
int n = x.length;
for (int i = 0; i < n; i++) {
for (int j = 0; j < k; j++) {
prob[j] = dot(x[i], w, j * (p + 1));
}
softmax(prob);
f -= log(prob[y[i]]);
}
}
if (lambda != 0.0) {
double w2 = 0.0;
for (int i = 0; i < k; i++) {
for (int j = 0; j < p; j++) {
w2 += Math.sqr(w[i*(p+1) + j]);
}
}
f += 0.5 * lambda * w2;
}
return f;
}
/**
* Task to calculate the objective function and gradient.
*/
class GTask implements Callable<double[]> {
/**
* The parameter vector.
*/
double[] w;
/**
* The start index of data portion for this task.
*/
int start;
/**
* The end index of data portion for this task.
*/
int end;
GTask(int start, int end) {
this.start = start;
this.end = end;
}
@Override
public double[] call() {
double f = 0.0;
double[] g = new double[w.length+1];
int p = x[0].length;
double[] prob = new double[k];
for (int i = start; i < end; i++) {
for (int j = 0; j < k; j++) {
prob[j] = dot(x[i], w, j * (p + 1));
}
softmax(prob);
f -= log(prob[y[i]]);
double yi = 0.0;
for (int j = 0; j < k; j++) {
yi = (y[i] == j ? 1.0 : 0.0) - prob[j];
for (int l = 0, pos = j * (p + 1); l < p; l++) {
g[pos + l] -= yi * x[i][l];
}
g[j * (p + 1) + p] -= yi;
}
}
g[w.length] = f;
return g;
}
}
@Override
public double f(double[] w, double[] g) {
double f = Double.NaN;
int p = x[0].length;
double[] prob = new double[k];
Arrays.fill(g, 0.0);
if (gtasks != null) {
for (GTask task : gtasks) {
task.w = w;
}
try {
f = 0.0;
for (double[] gi : MulticoreExecutor.run(gtasks)) {
f += gi[w.length];
for (int i = 0; i < w.length; i++) {
g[i] += gi[i];
}
}
} catch (Exception ex) {
System.err.println(ex);
f = Double.NaN;
}
}
if (Double.isNaN(f)) {
f = 0.0;
int n = x.length;
for (int i = 0; i < n; i++) {
for (int j = 0; j < k; j++) {
prob[j] = dot(x[i], w, j * (p + 1));
}
softmax(prob);
f -= log(prob[y[i]]);
double yi = 0.0;
for (int j = 0; j < k; j++) {
yi = (y[i] == j ? 1.0 : 0.0) - prob[j];
for (int l = 0, pos = j * (p + 1); l < p; l++) {
g[pos + l] -= yi * x[i][l];
}
g[j * (p + 1) + p] -= yi;
}
}
}
if (lambda != 0.0) {
double w2 = 0.0;
for (int i = 0; i < k; i++) {
for (int j = 0; j < p; j++) {
int pos = i * (p+1) + j;
w2 += w[pos] * w[pos];
g[pos] += lambda * w[pos];
}
}
f += 0.5 * lambda * w2;
}
return f;
}
}
/**
* Calculate softmax function without overflow.
*/
private static void softmax(double[] prob) {
double max = Double.NEGATIVE_INFINITY;
for (int i = 0; i < prob.length; i++) {
if (prob[i] > max) {
max = prob[i];
}
}
double Z = 0.0;
for (int i = 0; i < prob.length; i++) {
double p = Math.exp(prob[i] - max);
prob[i] = p;
Z += p;
}
for (int i = 0; i < prob.length; i++) {
prob[i] /= Z;
}
}
/**
* Returns the dot product between weight vector and x (augmented with 1).
*/
private static double dot(double[] x, double[] w) {
int i = 0;
double dot = 0.0;
for (; i < x.length; i++) {
dot += x[i] * w[i];
}
return dot + w[i];
}
/**
* Returns the dot product between weight vector and x (augmented with 1).
*/
private static double dot(double[] x, double[] w, int pos) {
int i = 0;
double dot = 0.0;
for (; i < x.length; i++) {
dot += x[i] * w[pos+i];
}
return dot + w[pos+i];
}
/**
* Returns the log-likelihood of model.
*/
public double loglikelihood() {
return L;
}
@Override
public int predict(double[] x) {
return predict(x, null);
}
@Override
public int predict(double[] x, double[] posteriori) {
if (x.length != p) {
throw new IllegalArgumentException(String.format("Invalid input vector size: %d, expected: %d", x.length, p));
}
if (posteriori != null && posteriori.length != k) {
throw new IllegalArgumentException(String.format("Invalid posteriori vector size: %d, expected: %d", posteriori.length, k));
}
if (k == 2) {
double f = 1.0 / (1.0 + Math.exp(-dot(x, w)));
if (posteriori != null) {
posteriori[0] = f;
posteriori[1] = 1.0 - f;
}
if (f < 0.5) {
return 0;
} else {
return 1;
}
} else {
int label = -1;
double max = Double.NEGATIVE_INFINITY;
for (int i = 0; i < k; i++) {
double prob = dot(x, W[i]);
if (prob > max) {
max = prob;
label = i;
}
if (posteriori != null) {
posteriori[i] = prob;
}
}
if (posteriori != null) {
double Z = 0.0;
for (int i = 0; i < k; i++) {
posteriori[i] = Math.exp(posteriori[i] - max);
Z += posteriori[i];
}
for (int i = 0; i < k; i++) {
posteriori[i] /= Z;
}
}
return label;
}
}
}
| |
package br.ufsc.ftsm.method.lcss;
import java.util.ArrayDeque;
import java.util.HashMap;
import java.util.Map;
import java.util.Queue;
import br.ufsc.core.trajectory.Trajectory;
import br.ufsc.ftsm.base.TrajectorySimilarityCalculator;
import br.ufsc.utils.Distance;
public class FTSMOLCSS extends TrajectorySimilarityCalculator<Trajectory> {
double threshold;
public FTSMOLCSS(double threshold) {
this.threshold = threshold;
}
public double getSimilarity(Trajectory R, Trajectory S) {
// FTSM
Trajectory T1;
Trajectory T2;
if (R.length() <= S.length()) {
T1 = R;
T2 = S;
} else {
T1 = S;
T2 = R;
}
double dist[] = new double[T1.length()];
Map<Integer, ArrayDeque<Integer>> M = new HashMap<Integer, ArrayDeque<Integer>>();
M.put(0, new ArrayDeque<Integer>());
for (int i = 1; i < T1.length(); i++) {
dist[i] = dist[i - 1] + Distance.euclidean(T1.getPoint(i), T1.getPoint(i - 1));
M.put(i, new ArrayDeque<Integer>());
}
Queue<NodeLCSSNWBB> queue = new ArrayDeque<>();
Queue<IntervalLCSSNWBB> toCompare = new ArrayDeque<>();
toCompare.add(new IntervalLCSSNWBB(0, (T2.length() - 1)));
NodeLCSSNWBB root = new NodeLCSSNWBB(0, (T1.length() / 2), (T1.length() - 1), toCompare);
//System.out.println("t1: "+T1.length());
queue.add(root);
while (!queue.isEmpty()) {
NodeLCSSNWBB node = queue.poll();
if (!node.isLeaf) {
double radius = Math.max(dist[node.mid] - dist[node.begin], (dist[node.end] - dist[node.mid]))
+ threshold;
Queue<IntervalLCSSNWBB> matchingList = new ArrayDeque<>();
//System.out.println("Before Prunnig: "+Arrays.toString(toCompare.toArray(new IntervalLCSS8WBB[toCompare.size()] )));
for (IntervalLCSSNWBB interval : node.toCompare) {
int k = interval.begin;
int start = -1;
int end = -1;
while (k <= interval.end) {
if (Distance.euclidean(T2.getPoint(k), T1.getPoint(node.mid)) <= radius) {
if (start == -1) {
start = k;
}
} else {
if (start != -1) {
end = k - 1;
matchingList.add(new IntervalLCSSNWBB(start, end));
}
start = -1;
end = -1;
}
k++;
}
if (start != -1) {
end = k - 1;
matchingList.add(new IntervalLCSSNWBB(start, end));
}
}
if (!matchingList.isEmpty()) {
int total = node.end - node.begin;
// if (total == 1) {
// queue.add(new NodeLCSS8WBB(node.begin, node.begin, node.begin, matchingList));
// queue.add(new NodeLCSS8WBB(node.end, node.end, node.end, matchingList));
// } else
//System.out.println(total);
//System.out.println(node.begin);
if (total <=8) {
for (int i = 0; i<=total; i++){
int mid = node.begin+i;
//System.out.println("create node mid: "+mid);
//ystem.out.println("create node: "+mid+" mid: "+mid + "end: "+mid);
queue.add(new NodeLCSSNWBB(mid, mid, mid, matchingList));
}
} else {
//System.out.println("ceil: "+Math.ceil(12/8));
int partitionSize = (total + 8-1)/8;
//System.out.println(total);
//System.out.println("partitionSize:"+partitionSize);
//System.out.println(node.end);
for (int i = node.begin; i <=node.end;i+=partitionSize){
int end = Math.min(i+partitionSize, (node.end+1));
int mid = (i+end)/2;
// System.out.println("create node: "+i+" mid: "+mid + "end: "+(end-1));
//System.out.println("create node: "+i+" mid: "+mid + "end: "+(end-1));
queue.add(new NodeLCSSNWBB(i, mid, end-1, matchingList));
}
}
}
} else {
for (IntervalLCSSNWBB interval : node.toCompare) {
int k = interval.begin;
while (k <= interval.end) {
if (Distance.euclidean(T2.getPoint(k), T1.getPoint(node.mid)) <= threshold) {
M.get(node.mid).add(k + 1);
}
k++;
}
}
}
}
// Based on the FTSE Algorithm to compute FTSM based on the Matrching Lists
double result = 0;
int n2 = T1.length();
int[] matches = new int[n2 + 1];
matches[0] = 0;
int m2 = T2.length();
for (int i = 1; i < n2 + 1; i++) {
matches[i] = m2 + 1;
}
int max = 0;
for (int j = 1; j <= n2; j++) {
int c = 0;
int temp = matches[0];
for (Integer k : M.get(j - 1)) {
if (temp < k) {
while (matches[c] < k) {
c++;
}
temp = matches[c];
matches[c] = k;
if (c > max) {
max = c;
}
}
}
result = max;
}
return result / Math.min(T1.length(), T2.length());
}
}
class IntervalLCSSNWBB {
int begin;
int end;
public IntervalLCSSNWBB(int begin, int end) {
this.begin = begin;
this.end = end;
}
// @Override
// public String toString() {
// // TODO Auto-generated method stub
// return "["+begin+","+end+"]";
// }
}
class NodeLCSSNWBB {
int begin;
int end;
int mid;
// double radius;
boolean isLeaf;
Queue<IntervalLCSSNWBB> toCompare;
public NodeLCSSNWBB(int begin, int mid, int end, Queue<IntervalLCSSNWBB> toCompare) {
this.mid = mid;
this.begin = begin;
this.end = end;
// this.radius = radius;
this.toCompare = toCompare;
isLeaf = end - begin == 0 ? true : false;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hive.ql.lockmgr;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.ql.DriverState;
import org.apache.hadoop.hive.ql.lockmgr.HiveLockObject.HiveLockObjectData;
import org.apache.hadoop.hive.ql.metadata.*;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Stack;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.locks.ReentrantLock;
/**
* shared lock manager for dedicated hive server. all locks are managed in memory
*/
public class EmbeddedLockManager implements HiveLockManager {
private static final Logger LOG = LoggerFactory.getLogger("EmbeddedHiveLockManager");
private final Node root = new Node();
private HiveLockManagerCtx ctx;
private long sleepTime = 1000;
private int numRetriesForLock = 0;
private int numRetriesForUnLock = 0;
public EmbeddedLockManager() {
}
@Override
public void setContext(HiveLockManagerCtx ctx) throws LockException {
this.ctx = ctx;
refresh();
}
@Override
public HiveLock lock(HiveLockObject key, HiveLockMode mode, boolean keepAlive)
throws LockException {
LOG.debug("Acquiring lock for {} with mode {} {}", key.getName(), mode,
key.getData().getLockMode());
return lock(key, mode, numRetriesForLock, sleepTime);
}
@Override
public List<HiveLock> lock(List<HiveLockObj> objs, boolean keepAlive, DriverState driverState)
throws LockException {
return lock(objs, numRetriesForLock, sleepTime);
}
@Override
public void unlock(HiveLock hiveLock) throws LockException {
unlock(hiveLock, numRetriesForUnLock, sleepTime);
}
@Override
public void releaseLocks(List<HiveLock> hiveLocks) {
releaseLocks(hiveLocks, numRetriesForUnLock, sleepTime);
}
@Override
public List<HiveLock> getLocks(boolean verifyTablePartitions, boolean fetchData)
throws LockException {
return getLocks(verifyTablePartitions, fetchData, ctx.getConf());
}
@Override
public List<HiveLock> getLocks(HiveLockObject key, boolean verifyTablePartitions,
boolean fetchData) throws LockException {
return getLocks(key, verifyTablePartitions, fetchData, ctx.getConf());
}
@Override
public void prepareRetry() {
}
@Override
public void refresh() {
HiveConf conf = ctx.getConf();
sleepTime = conf.getTimeVar(
HiveConf.ConfVars.HIVE_LOCK_SLEEP_BETWEEN_RETRIES, TimeUnit.MILLISECONDS);
numRetriesForLock = conf.getIntVar(HiveConf.ConfVars.HIVE_LOCK_NUMRETRIES);
numRetriesForUnLock = conf.getIntVar(HiveConf.ConfVars.HIVE_UNLOCK_NUMRETRIES);
}
public HiveLock lock(HiveLockObject key, HiveLockMode mode, int numRetriesForLock, long sleepTime)
throws LockException {
for (int i = 0; i <= numRetriesForLock; i++) {
if (i > 0) {
sleep(sleepTime);
}
HiveLock lock = lockPrimitive(key, mode);
if (lock != null) {
return lock;
}
}
return null;
}
private void sleep(long sleepTime) {
try {
Thread.sleep(sleepTime);
} catch (InterruptedException e) {
// ignore
}
}
private List<HiveLock> lock(List<HiveLockObj> objs, int numRetriesForLock, long sleepTime)
throws LockException {
sortLocks(objs);
if (LOG.isDebugEnabled()) {
for (HiveLockObj obj : objs) {
LOG.debug("Acquiring lock for {} with mode {}", obj.getObj().getName(),
obj.getMode());
}
}
for (int i = 0; i <= numRetriesForLock; i++) {
if (i > 0) {
sleep(sleepTime);
}
List<HiveLock> locks = lockPrimitive(objs, numRetriesForLock, sleepTime);
if (locks != null) {
return locks;
}
}
return null;
}
private HiveLock lockPrimitive(HiveLockObject key, HiveLockMode mode) throws LockException {
if (root.lock(key.getPaths(), key.getData(), mode == HiveLockMode.EXCLUSIVE)) {
return new SimpleHiveLock(key, mode);
}
return null;
}
private List<HiveLock> lockPrimitive(List<HiveLockObj> objs, int numRetriesForLock,
long sleepTime) throws LockException {
List<HiveLock> locks = new ArrayList<HiveLock>();
for (HiveLockObj obj : objs) {
HiveLock lock = lockPrimitive(obj.getObj(), obj.getMode());
if (lock == null) {
releaseLocks(locks, numRetriesForLock, sleepTime);
return null;
}
locks.add(lock);
}
return locks;
}
private void sortLocks(List<HiveLockObj> objs) {
Collections.sort(objs, new Comparator<HiveLockObj>() {
@Override
public int compare(HiveLockObj o1, HiveLockObj o2) {
int cmp = o1.getName().compareTo(o2.getName());
if (cmp == 0) {
if (o1.getMode() == o2.getMode()) {
return cmp;
}
// EXCLUSIVE locks occur before SHARED locks
if (o1.getMode() == HiveLockMode.EXCLUSIVE) {
return -1;
}
return +1;
}
return cmp;
}
});
}
public void unlock(HiveLock hiveLock, int numRetriesForUnLock, long sleepTime)
throws LockException {
String[] paths = hiveLock.getHiveLockObject().getPaths();
HiveLockObjectData data = hiveLock.getHiveLockObject().getData();
for (int i = 0; i <= numRetriesForUnLock; i++) {
if (i > 0) {
sleep(sleepTime);
}
if (root.unlock(paths, data)) {
return;
}
}
throw new LockException("Failed to release lock " + hiveLock);
}
public void releaseLocks(List<HiveLock> hiveLocks, int numRetriesForUnLock, long sleepTime) {
for (HiveLock locked : hiveLocks) {
try {
unlock(locked, numRetriesForUnLock, sleepTime);
} catch (LockException e) {
LOG.info("Failed to unlock ", e);
}
}
}
public List<HiveLock> getLocks(boolean verifyTablePartitions, boolean fetchData, HiveConf conf)
throws LockException {
return root.getLocks(verifyTablePartitions, fetchData, conf);
}
public List<HiveLock> getLocks(HiveLockObject key, boolean verifyTablePartitions,
boolean fetchData, HiveConf conf) throws LockException {
return root.getLocks(key.getPaths(), verifyTablePartitions, fetchData, conf);
}
// from ZooKeeperHiveLockManager
private HiveLockObject verify(boolean verify, String[] names, HiveLockObjectData data,
HiveConf conf) throws LockException {
if (!verify) {
return new HiveLockObject(names, data);
}
String database = names[0];
String table = names[1];
try {
Hive db = Hive.get(conf);
Table tab = db.getTable(database, table, false);
if (tab == null) {
return null;
}
if (names.length == 2) {
return new HiveLockObject(tab, data);
}
Map<String, String> partSpec = new HashMap<String, String>();
for (int indx = 2; indx < names.length; indx++) {
String[] partVals = names[indx].split("=");
partSpec.put(partVals[0], partVals[1]);
}
Partition partn;
try {
partn = db.getPartition(tab, partSpec, false);
} catch (HiveException e) {
partn = null;
}
if (partn == null) {
return new HiveLockObject(new DummyPartition(tab, null, partSpec), data);
}
return new HiveLockObject(partn, data);
} catch (Exception e) {
throw new LockException(e);
}
}
@Override
public void close() {
root.lock.lock();
try {
root.datas = null;
root.children = null;
} finally {
root.lock.unlock();
}
}
private class Node {
private boolean exclusive;
private Map<String, Node> children;
private Map<String, HiveLockObjectData> datas;
private final ReentrantLock lock = new ReentrantLock();
public Node() {
}
public void set(HiveLockObjectData data, boolean exclusive) {
this.exclusive = exclusive;
if (datas == null) {
datas = new HashMap<String, HiveLockObjectData>(3);
}
datas.put(data.getQueryId(), data);
}
public boolean lock(String[] paths, HiveLockObjectData data, boolean exclusive) {
return lock(paths, 0, data, exclusive);
}
public boolean unlock(String[] paths, HiveLockObjectData data) {
return unlock(paths, 0, data);
}
private List<HiveLock> getLocks(boolean verify, boolean fetchData, HiveConf conf)
throws LockException {
if (!root.hasChild()) {
return Collections.emptyList();
}
List<HiveLock> locks = new ArrayList<HiveLock>();
getLocks(new Stack<String>(), verify, fetchData, locks, conf);
return locks;
}
private List<HiveLock> getLocks(String[] paths, boolean verify, boolean fetchData,
HiveConf conf) throws LockException {
if (!root.hasChild()) {
return Collections.emptyList();
}
List<HiveLock> locks = new ArrayList<HiveLock>();
getLocks(paths, 0, verify, fetchData, locks, conf);
return locks;
}
private boolean lock(String[] paths, int index, HiveLockObjectData data, boolean exclusive) {
if (!lock.tryLock()) {
return false;
}
try {
if (index == paths.length) {
if (this.exclusive || exclusive && hasLock()) {
return false;
}
set(data, exclusive);
return true;
}
Node child;
if (children == null) {
children = new HashMap<String, Node>(3);
children.put(paths[index], child = new Node());
} else {
child = children.get(paths[index]);
if (child == null) {
children.put(paths[index], child = new Node());
}
}
return child.lock(paths, index + 1, data, exclusive);
} finally {
lock.unlock();
}
}
private boolean unlock(String[] paths, int index, HiveLockObjectData data) {
if (!lock.tryLock()) {
return false;
}
try {
if (index == paths.length) {
if (hasLock()) {
datas.remove(data.getQueryId());
}
return true;
}
Node child = children == null ? null : children.get(paths[index]);
if (child == null) {
return true; // should not happen
}
if (child.unlock(paths, index + 1, data)) {
if (!child.hasLock() && !child.hasChild()) {
children.remove(paths[index]);
}
return true;
}
return false;
} finally {
lock.unlock();
}
}
private void getLocks(Stack<String> names, boolean verify,
boolean fetchData, List<HiveLock> locks, HiveConf conf) throws LockException {
lock.lock();
try {
if (hasLock()) {
getLocks(names.toArray(new String[names.size()]), verify, fetchData, locks, conf);
}
if (children != null) {
for (Map.Entry<String, Node> entry : children.entrySet()) {
names.push(entry.getKey());
entry.getValue().getLocks(names, verify, fetchData, locks, conf);
names.pop();
}
}
} finally {
lock.unlock();
}
}
private void getLocks(String[] paths, int index, boolean verify,
boolean fetchData, List<HiveLock> locks, HiveConf conf) throws LockException {
lock.lock();
try {
if (index == paths.length) {
getLocks(paths, verify, fetchData, locks, conf);
return;
}
Node child = children.get(paths[index]);
if (child != null) {
child.getLocks(paths, index + 1, verify, fetchData, locks, conf);
}
} finally {
lock.unlock();
}
}
private void getLocks(String[] paths, boolean verify, boolean fetchData, List<HiveLock> locks,
HiveConf conf) throws LockException {
HiveLockMode lockMode = getLockMode();
if (fetchData) {
for (HiveLockObjectData data : datas.values()) {
HiveLockObject lock = verify(verify, paths, data, conf);
if (lock != null) {
locks.add(new SimpleHiveLock(lock, lockMode));
}
}
} else {
HiveLockObject lock = verify(verify, paths, null, conf);
if (lock != null) {
locks.add(new SimpleHiveLock(lock, lockMode));
}
}
}
private HiveLockMode getLockMode() {
return exclusive ? HiveLockMode.EXCLUSIVE : HiveLockMode.SHARED;
}
private boolean hasLock() {
return datas != null && !datas.isEmpty();
}
private boolean hasChild() {
return children != null && !children.isEmpty();
}
}
private static class SimpleHiveLock extends HiveLock {
private final HiveLockObject lockObj;
private final HiveLockMode lockMode;
public SimpleHiveLock(HiveLockObject lockObj, HiveLockMode lockMode) {
this.lockObj = lockObj;
this.lockMode = lockMode;
}
@Override
public HiveLockObject getHiveLockObject() {
return lockObj;
}
@Override
public HiveLockMode getHiveLockMode() {
return lockMode;
}
@Override
public String toString() {
return lockMode + "=" + lockObj.getDisplayName() + "(" + lockObj.getData() + ")";
}
@Override
public boolean equals(Object o) {
if (!(o instanceof SimpleHiveLock)) {
return false;
}
SimpleHiveLock simpleLock = (SimpleHiveLock) o;
return lockObj.equals(simpleLock.getHiveLockObject()) &&
lockMode == simpleLock.getHiveLockMode();
}
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.common.block;
import org.openjdk.jol.info.ClassLayout;
import javax.annotation.Nullable;
import java.util.Optional;
import java.util.function.BiConsumer;
import static io.airlift.slice.SizeOf.sizeOf;
import static java.lang.String.format;
import static java.util.Objects.requireNonNull;
public class ArrayBlock
extends AbstractArrayBlock
{
private static final int INSTANCE_SIZE = ClassLayout.parseClass(ArrayBlock.class).instanceSize();
private final int arrayOffset;
private final int positionCount;
private final boolean[] valueIsNull;
private final Block values;
private final int[] offsets;
private volatile long sizeInBytes;
private volatile long logicalSizeInBytes;
private final long retainedSizeInBytes;
/**
* Create an array block directly from columnar nulls, values, and offsets into the values.
* A null array must have no entries.
*/
public static Block fromElementBlock(int positionCount, Optional<boolean[]> valueIsNull, int[] arrayOffset, Block values)
{
validateConstructorArguments(0, positionCount, valueIsNull.orElse(null), arrayOffset, values);
// for performance reasons per element checks are only performed on the public construction
for (int i = 0; i < positionCount; i++) {
int offset = arrayOffset[i];
int length = arrayOffset[i + 1] - offset;
if (length < 0) {
throw new IllegalArgumentException(format("Offset is not monotonically ascending. offsets[%s]=%s, offsets[%s]=%s", i, arrayOffset[i], i + 1, arrayOffset[i + 1]));
}
if (valueIsNull.isPresent() && valueIsNull.get()[i] && length != 0) {
throw new IllegalArgumentException("A null array must have zero entries");
}
}
return new ArrayBlock(0, positionCount, valueIsNull.orElse(null), arrayOffset, values);
}
/**
* Create an array block directly without per element validations.
*/
static ArrayBlock createArrayBlockInternal(int arrayOffset, int positionCount, @Nullable boolean[] valueIsNull, int[] offsets, Block values)
{
validateConstructorArguments(arrayOffset, positionCount, valueIsNull, offsets, values);
return new ArrayBlock(arrayOffset, positionCount, valueIsNull, offsets, values);
}
private static void validateConstructorArguments(int arrayOffset, int positionCount, @Nullable boolean[] valueIsNull, int[] offsets, Block values)
{
if (arrayOffset < 0) {
throw new IllegalArgumentException("arrayOffset is negative");
}
if (positionCount < 0) {
throw new IllegalArgumentException("positionCount is negative");
}
if (valueIsNull != null && valueIsNull.length - arrayOffset < positionCount) {
throw new IllegalArgumentException("isNull length is less than positionCount");
}
requireNonNull(offsets, "offsets is null");
if (offsets.length - arrayOffset < positionCount + 1) {
throw new IllegalArgumentException("offsets length is less than positionCount");
}
requireNonNull(values, "values is null");
}
/**
* Use createArrayBlockInternal or fromElementBlock instead of this method. The caller of this method is assumed to have
* validated the arguments with validateConstructorArguments.
*/
private ArrayBlock(int arrayOffset, int positionCount, @Nullable boolean[] valueIsNull, int[] offsets, Block values)
{
// caller must check arguments with validateConstructorArguments
this.arrayOffset = arrayOffset;
this.positionCount = positionCount;
this.valueIsNull = valueIsNull;
this.offsets = offsets;
this.values = requireNonNull(values);
sizeInBytes = -1;
logicalSizeInBytes = -1;
retainedSizeInBytes = INSTANCE_SIZE + values.getRetainedSizeInBytes() + sizeOf(offsets) + sizeOf(valueIsNull);
}
@Override
public int getPositionCount()
{
return positionCount;
}
@Override
public long getSizeInBytes()
{
if (sizeInBytes < 0) {
calculateSize();
}
return sizeInBytes;
}
@Override
public long getLogicalSizeInBytes()
{
if (logicalSizeInBytes < 0) {
calculateLogicalSize();
}
return logicalSizeInBytes;
}
private void calculateSize()
{
int valueStart = offsets[arrayOffset];
int valueEnd = offsets[arrayOffset + positionCount];
sizeInBytes = values.getRegionSizeInBytes(valueStart, valueEnd - valueStart) + ((Integer.BYTES + Byte.BYTES) * (long) this.positionCount);
}
private void calculateLogicalSize()
{
int valueStart = offsets[arrayOffset];
int valueEnd = offsets[arrayOffset + positionCount];
logicalSizeInBytes = values.getRegionLogicalSizeInBytes(valueStart, valueEnd - valueStart) + ((Integer.BYTES + Byte.BYTES) * (long) this.positionCount);
}
@Override
public long getRetainedSizeInBytes()
{
return retainedSizeInBytes;
}
@Override
public void retainedBytesForEachPart(BiConsumer<Object, Long> consumer)
{
consumer.accept(values, values.getRetainedSizeInBytes());
consumer.accept(offsets, sizeOf(offsets));
consumer.accept(valueIsNull, sizeOf(valueIsNull));
consumer.accept(this, (long) INSTANCE_SIZE);
}
@Override
protected Block getRawElementBlock()
{
return values;
}
@Override
protected int[] getOffsets()
{
return offsets;
}
@Override
public int getOffsetBase()
{
return arrayOffset;
}
@Override
@Nullable
protected boolean[] getValueIsNull()
{
return valueIsNull;
}
@Override
public String toString()
{
return format("ArrayBlock(%d){positionCount=%d}", hashCode(), getPositionCount());
}
@Override
public Block getLoadedBlock()
{
Block loadedValuesBlock = values.getLoadedBlock();
if (loadedValuesBlock == values) {
return this;
}
return createArrayBlockInternal(
arrayOffset,
positionCount,
valueIsNull,
offsets,
loadedValuesBlock);
}
private boolean isSinglePositionBlock(int position)
{
return position == 0 && positionCount == 1 && offsets.length == 2;
}
@Override
public Block getSingleValueBlock(int position)
{
if (isSinglePositionBlock(position)) {
return this;
}
return getSingleValueBlockInternal(position);
}
}
| |
/**
* Copyright (C) 2013 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.integration.tool.portfolio.xml.v1_0.jaxb;
import java.math.BigDecimal;
import java.util.Map;
import javax.xml.bind.annotation.XmlAttribute;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlEnumValue;
import javax.xml.bind.annotation.XmlRootElement;
import org.joda.beans.Bean;
import org.joda.beans.BeanBuilder;
import org.joda.beans.BeanDefinition;
import org.joda.beans.JodaBeanUtils;
import org.joda.beans.MetaProperty;
import org.joda.beans.Property;
import org.joda.beans.PropertyDefinition;
import org.joda.beans.impl.direct.DirectBeanBuilder;
import org.joda.beans.impl.direct.DirectMetaProperty;
import org.joda.beans.impl.direct.DirectMetaPropertyMap;
import org.threeten.bp.YearMonth;
import com.opengamma.financial.security.option.OptionType;
import com.opengamma.integration.tool.portfolio.xml.v1_0.conversion.ListedFutureOptionSecurityExtractor;
import com.opengamma.integration.tool.portfolio.xml.v1_0.conversion.ListedSecurityExtractor;
@BeanDefinition
@XmlRootElement(name = "futureOptionSecurity")
public class FutureOptionSecurityDefinition extends ListedSecurityDefinition {
public enum ListedFutureOptionType {
@XmlEnumValue(value = "equityIndexFutureOption")
EQUITY_INDEX_FUTURE_OPTION,
@XmlEnumValue(value = "equityDividendFutureOption")
EQUITY_DIVIDEND_FUTURE_OPTION
}
@XmlAttribute(name = "type", required = true)
@PropertyDefinition
private ListedFutureOptionType _listedFutureOptionType;
@XmlElement(name = "optionType", required = true)
@PropertyDefinition
private OptionType _optionType;
@XmlElement(name = "strike", required = true)
@PropertyDefinition
private BigDecimal _strike;
@XmlElement(name = "futureExpiry", required = true)
@PropertyDefinition
private YearMonth _futureExpiry;
/**
* At some point we may want to allow mid curve future options where
* option expiry is significantly earlier than the underlying future
*/
@XmlElement(name = "optionExpiry")
@PropertyDefinition
private YearMonth _optionExpiry;
@XmlElement(name = "exerciseType", required = true)
@PropertyDefinition
private ExerciseType _exerciseType;
@XmlElement(name = "margined", required = true)
@PropertyDefinition
private boolean _isMargined;
public ListedSecurityExtractor getSecurityExtractor() {
return new ListedFutureOptionSecurityExtractor(this);
}
//------------------------- AUTOGENERATED START -------------------------
///CLOVER:OFF
/**
* The meta-bean for {@code FutureOptionSecurityDefinition}.
* @return the meta-bean, not null
*/
public static FutureOptionSecurityDefinition.Meta meta() {
return FutureOptionSecurityDefinition.Meta.INSTANCE;
}
static {
JodaBeanUtils.registerMetaBean(FutureOptionSecurityDefinition.Meta.INSTANCE);
}
@Override
public FutureOptionSecurityDefinition.Meta metaBean() {
return FutureOptionSecurityDefinition.Meta.INSTANCE;
}
//-----------------------------------------------------------------------
/**
* Gets the listedFutureOptionType.
* @return the value of the property
*/
public ListedFutureOptionType getListedFutureOptionType() {
return _listedFutureOptionType;
}
/**
* Sets the listedFutureOptionType.
* @param listedFutureOptionType the new value of the property
*/
public void setListedFutureOptionType(ListedFutureOptionType listedFutureOptionType) {
this._listedFutureOptionType = listedFutureOptionType;
}
/**
* Gets the the {@code listedFutureOptionType} property.
* @return the property, not null
*/
public final Property<ListedFutureOptionType> listedFutureOptionType() {
return metaBean().listedFutureOptionType().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the optionType.
* @return the value of the property
*/
public OptionType getOptionType() {
return _optionType;
}
/**
* Sets the optionType.
* @param optionType the new value of the property
*/
public void setOptionType(OptionType optionType) {
this._optionType = optionType;
}
/**
* Gets the the {@code optionType} property.
* @return the property, not null
*/
public final Property<OptionType> optionType() {
return metaBean().optionType().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the strike.
* @return the value of the property
*/
public BigDecimal getStrike() {
return _strike;
}
/**
* Sets the strike.
* @param strike the new value of the property
*/
public void setStrike(BigDecimal strike) {
this._strike = strike;
}
/**
* Gets the the {@code strike} property.
* @return the property, not null
*/
public final Property<BigDecimal> strike() {
return metaBean().strike().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the futureExpiry.
* @return the value of the property
*/
public YearMonth getFutureExpiry() {
return _futureExpiry;
}
/**
* Sets the futureExpiry.
* @param futureExpiry the new value of the property
*/
public void setFutureExpiry(YearMonth futureExpiry) {
this._futureExpiry = futureExpiry;
}
/**
* Gets the the {@code futureExpiry} property.
* @return the property, not null
*/
public final Property<YearMonth> futureExpiry() {
return metaBean().futureExpiry().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets at some point we may want to allow mid curve future options where
* option expiry is significantly earlier than the underlying future
* @return the value of the property
*/
public YearMonth getOptionExpiry() {
return _optionExpiry;
}
/**
* Sets at some point we may want to allow mid curve future options where
* option expiry is significantly earlier than the underlying future
* @param optionExpiry the new value of the property
*/
public void setOptionExpiry(YearMonth optionExpiry) {
this._optionExpiry = optionExpiry;
}
/**
* Gets the the {@code optionExpiry} property.
* option expiry is significantly earlier than the underlying future
* @return the property, not null
*/
public final Property<YearMonth> optionExpiry() {
return metaBean().optionExpiry().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the exerciseType.
* @return the value of the property
*/
public ExerciseType getExerciseType() {
return _exerciseType;
}
/**
* Sets the exerciseType.
* @param exerciseType the new value of the property
*/
public void setExerciseType(ExerciseType exerciseType) {
this._exerciseType = exerciseType;
}
/**
* Gets the the {@code exerciseType} property.
* @return the property, not null
*/
public final Property<ExerciseType> exerciseType() {
return metaBean().exerciseType().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the isMargined.
* @return the value of the property
*/
public boolean isIsMargined() {
return _isMargined;
}
/**
* Sets the isMargined.
* @param isMargined the new value of the property
*/
public void setIsMargined(boolean isMargined) {
this._isMargined = isMargined;
}
/**
* Gets the the {@code isMargined} property.
* @return the property, not null
*/
public final Property<Boolean> isMargined() {
return metaBean().isMargined().createProperty(this);
}
//-----------------------------------------------------------------------
@Override
public FutureOptionSecurityDefinition clone() {
return JodaBeanUtils.cloneAlways(this);
}
@Override
public boolean equals(Object obj) {
if (obj == this) {
return true;
}
if (obj != null && obj.getClass() == this.getClass()) {
FutureOptionSecurityDefinition other = (FutureOptionSecurityDefinition) obj;
return JodaBeanUtils.equal(getListedFutureOptionType(), other.getListedFutureOptionType()) &&
JodaBeanUtils.equal(getOptionType(), other.getOptionType()) &&
JodaBeanUtils.equal(getStrike(), other.getStrike()) &&
JodaBeanUtils.equal(getFutureExpiry(), other.getFutureExpiry()) &&
JodaBeanUtils.equal(getOptionExpiry(), other.getOptionExpiry()) &&
JodaBeanUtils.equal(getExerciseType(), other.getExerciseType()) &&
(isIsMargined() == other.isIsMargined()) &&
super.equals(obj);
}
return false;
}
@Override
public int hashCode() {
int hash = 7;
hash = hash * 31 + JodaBeanUtils.hashCode(getListedFutureOptionType());
hash = hash * 31 + JodaBeanUtils.hashCode(getOptionType());
hash = hash * 31 + JodaBeanUtils.hashCode(getStrike());
hash = hash * 31 + JodaBeanUtils.hashCode(getFutureExpiry());
hash = hash * 31 + JodaBeanUtils.hashCode(getOptionExpiry());
hash = hash * 31 + JodaBeanUtils.hashCode(getExerciseType());
hash = hash * 31 + JodaBeanUtils.hashCode(isIsMargined());
return hash ^ super.hashCode();
}
@Override
public String toString() {
StringBuilder buf = new StringBuilder(256);
buf.append("FutureOptionSecurityDefinition{");
int len = buf.length();
toString(buf);
if (buf.length() > len) {
buf.setLength(buf.length() - 2);
}
buf.append('}');
return buf.toString();
}
@Override
protected void toString(StringBuilder buf) {
super.toString(buf);
buf.append("listedFutureOptionType").append('=').append(JodaBeanUtils.toString(getListedFutureOptionType())).append(',').append(' ');
buf.append("optionType").append('=').append(JodaBeanUtils.toString(getOptionType())).append(',').append(' ');
buf.append("strike").append('=').append(JodaBeanUtils.toString(getStrike())).append(',').append(' ');
buf.append("futureExpiry").append('=').append(JodaBeanUtils.toString(getFutureExpiry())).append(',').append(' ');
buf.append("optionExpiry").append('=').append(JodaBeanUtils.toString(getOptionExpiry())).append(',').append(' ');
buf.append("exerciseType").append('=').append(JodaBeanUtils.toString(getExerciseType())).append(',').append(' ');
buf.append("isMargined").append('=').append(JodaBeanUtils.toString(isIsMargined())).append(',').append(' ');
}
//-----------------------------------------------------------------------
/**
* The meta-bean for {@code FutureOptionSecurityDefinition}.
*/
public static class Meta extends ListedSecurityDefinition.Meta {
/**
* The singleton instance of the meta-bean.
*/
static final Meta INSTANCE = new Meta();
/**
* The meta-property for the {@code listedFutureOptionType} property.
*/
private final MetaProperty<ListedFutureOptionType> _listedFutureOptionType = DirectMetaProperty.ofReadWrite(
this, "listedFutureOptionType", FutureOptionSecurityDefinition.class, ListedFutureOptionType.class);
/**
* The meta-property for the {@code optionType} property.
*/
private final MetaProperty<OptionType> _optionType = DirectMetaProperty.ofReadWrite(
this, "optionType", FutureOptionSecurityDefinition.class, OptionType.class);
/**
* The meta-property for the {@code strike} property.
*/
private final MetaProperty<BigDecimal> _strike = DirectMetaProperty.ofReadWrite(
this, "strike", FutureOptionSecurityDefinition.class, BigDecimal.class);
/**
* The meta-property for the {@code futureExpiry} property.
*/
private final MetaProperty<YearMonth> _futureExpiry = DirectMetaProperty.ofReadWrite(
this, "futureExpiry", FutureOptionSecurityDefinition.class, YearMonth.class);
/**
* The meta-property for the {@code optionExpiry} property.
*/
private final MetaProperty<YearMonth> _optionExpiry = DirectMetaProperty.ofReadWrite(
this, "optionExpiry", FutureOptionSecurityDefinition.class, YearMonth.class);
/**
* The meta-property for the {@code exerciseType} property.
*/
private final MetaProperty<ExerciseType> _exerciseType = DirectMetaProperty.ofReadWrite(
this, "exerciseType", FutureOptionSecurityDefinition.class, ExerciseType.class);
/**
* The meta-property for the {@code isMargined} property.
*/
private final MetaProperty<Boolean> _isMargined = DirectMetaProperty.ofReadWrite(
this, "isMargined", FutureOptionSecurityDefinition.class, Boolean.TYPE);
/**
* The meta-properties.
*/
private final Map<String, MetaProperty<?>> _metaPropertyMap$ = new DirectMetaPropertyMap(
this, (DirectMetaPropertyMap) super.metaPropertyMap(),
"listedFutureOptionType",
"optionType",
"strike",
"futureExpiry",
"optionExpiry",
"exerciseType",
"isMargined");
/**
* Restricted constructor.
*/
protected Meta() {
}
@Override
protected MetaProperty<?> metaPropertyGet(String propertyName) {
switch (propertyName.hashCode()) {
case 1200280399: // listedFutureOptionType
return _listedFutureOptionType;
case 1373587791: // optionType
return _optionType;
case -891985998: // strike
return _strike;
case 797235414: // futureExpiry
return _futureExpiry;
case 1032553992: // optionExpiry
return _optionExpiry;
case -466331342: // exerciseType
return _exerciseType;
case -549878249: // isMargined
return _isMargined;
}
return super.metaPropertyGet(propertyName);
}
@Override
public BeanBuilder<? extends FutureOptionSecurityDefinition> builder() {
return new DirectBeanBuilder<FutureOptionSecurityDefinition>(new FutureOptionSecurityDefinition());
}
@Override
public Class<? extends FutureOptionSecurityDefinition> beanType() {
return FutureOptionSecurityDefinition.class;
}
@Override
public Map<String, MetaProperty<?>> metaPropertyMap() {
return _metaPropertyMap$;
}
//-----------------------------------------------------------------------
/**
* The meta-property for the {@code listedFutureOptionType} property.
* @return the meta-property, not null
*/
public final MetaProperty<ListedFutureOptionType> listedFutureOptionType() {
return _listedFutureOptionType;
}
/**
* The meta-property for the {@code optionType} property.
* @return the meta-property, not null
*/
public final MetaProperty<OptionType> optionType() {
return _optionType;
}
/**
* The meta-property for the {@code strike} property.
* @return the meta-property, not null
*/
public final MetaProperty<BigDecimal> strike() {
return _strike;
}
/**
* The meta-property for the {@code futureExpiry} property.
* @return the meta-property, not null
*/
public final MetaProperty<YearMonth> futureExpiry() {
return _futureExpiry;
}
/**
* The meta-property for the {@code optionExpiry} property.
* @return the meta-property, not null
*/
public final MetaProperty<YearMonth> optionExpiry() {
return _optionExpiry;
}
/**
* The meta-property for the {@code exerciseType} property.
* @return the meta-property, not null
*/
public final MetaProperty<ExerciseType> exerciseType() {
return _exerciseType;
}
/**
* The meta-property for the {@code isMargined} property.
* @return the meta-property, not null
*/
public final MetaProperty<Boolean> isMargined() {
return _isMargined;
}
//-----------------------------------------------------------------------
@Override
protected Object propertyGet(Bean bean, String propertyName, boolean quiet) {
switch (propertyName.hashCode()) {
case 1200280399: // listedFutureOptionType
return ((FutureOptionSecurityDefinition) bean).getListedFutureOptionType();
case 1373587791: // optionType
return ((FutureOptionSecurityDefinition) bean).getOptionType();
case -891985998: // strike
return ((FutureOptionSecurityDefinition) bean).getStrike();
case 797235414: // futureExpiry
return ((FutureOptionSecurityDefinition) bean).getFutureExpiry();
case 1032553992: // optionExpiry
return ((FutureOptionSecurityDefinition) bean).getOptionExpiry();
case -466331342: // exerciseType
return ((FutureOptionSecurityDefinition) bean).getExerciseType();
case -549878249: // isMargined
return ((FutureOptionSecurityDefinition) bean).isIsMargined();
}
return super.propertyGet(bean, propertyName, quiet);
}
@Override
protected void propertySet(Bean bean, String propertyName, Object newValue, boolean quiet) {
switch (propertyName.hashCode()) {
case 1200280399: // listedFutureOptionType
((FutureOptionSecurityDefinition) bean).setListedFutureOptionType((ListedFutureOptionType) newValue);
return;
case 1373587791: // optionType
((FutureOptionSecurityDefinition) bean).setOptionType((OptionType) newValue);
return;
case -891985998: // strike
((FutureOptionSecurityDefinition) bean).setStrike((BigDecimal) newValue);
return;
case 797235414: // futureExpiry
((FutureOptionSecurityDefinition) bean).setFutureExpiry((YearMonth) newValue);
return;
case 1032553992: // optionExpiry
((FutureOptionSecurityDefinition) bean).setOptionExpiry((YearMonth) newValue);
return;
case -466331342: // exerciseType
((FutureOptionSecurityDefinition) bean).setExerciseType((ExerciseType) newValue);
return;
case -549878249: // isMargined
((FutureOptionSecurityDefinition) bean).setIsMargined((Boolean) newValue);
return;
}
super.propertySet(bean, propertyName, newValue, quiet);
}
}
///CLOVER:ON
//-------------------------- AUTOGENERATED END --------------------------
}
| |
/*******************************************************************************
* Copyright (c) Intel Corporation
* Copyright (c) 2017
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*******************************************************************************/
package org.osc.core.broker.model.image;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.osc.core.broker.util.ValidateUtil;
import org.osc.core.broker.util.VersionUtil.Version;
import org.osc.core.common.virtualization.OpenstackSoftwareVersion;
import org.osc.core.common.virtualization.VirtualizationType;
import org.osc.sdk.controller.TagEncapsulationType;
public class ImageMetadata {
public static final String META_FILE_NAME = "meta.json";
private String metaDataVersion;
private String model;
private String managerType;
private String managerVersion;
private String virtualizationType;
private String virtualizationVersion;
private String softwareVersion;
private String imageName;
private Version minIscVersion;
private Integer minCpus;
private Integer memoryInMb;
private Integer diskSizeInGb;
private boolean additionalNicForInspection;
private List<String> encapsulationTypes = new ArrayList<String>();
/**
* The key value pair properties will be included as part glance image properties.
*/
private Map<String, String> imageProperties = new HashMap<>();
/**
* The config properties will be included in the config-drive content file
*/
private Map<String, String> configProperties = new HashMap<>();
public ImageMetadata() {
}
public String getMetaDataVersion() {
return this.metaDataVersion;
}
public String getModel() {
return this.model;
}
public String getManagerVersion() {
return this.managerVersion;
}
public String getSoftwareVersion() {
return this.softwareVersion;
}
public Integer getMinCpus() {
return this.minCpus;
}
public Integer getMemoryInMb() {
return this.memoryInMb;
}
public Integer getDiskSizeInGb() {
return this.diskSizeInGb;
}
public Version getMinIscVersion() {
return this.minIscVersion;
}
public String getManagerTypeString() {
return this.managerType;
}
public String getVirtualizationTypeString() {
return this.virtualizationType;
}
public String getVirtualizationVersionString() {
return this.virtualizationVersion;
}
public String getManagerType() {
return this.managerType;
}
public VirtualizationType getVirtualizationType() {
return VirtualizationType.fromText(this.virtualizationType);
}
public OpenstackSoftwareVersion getOpenstackVirtualizationVersion() {
return OpenstackSoftwareVersion.fromText(this.virtualizationVersion);
}
public List<TagEncapsulationType> getEncapsulationTypes() {
List<TagEncapsulationType> typesEnum = new ArrayList<TagEncapsulationType>();
for (String type : this.encapsulationTypes) {
typesEnum.add(TagEncapsulationType.fromText(type));
}
return typesEnum;
}
public void setMetaDataVersion(String metaDataVersion) {
this.metaDataVersion = metaDataVersion;
}
public void setModel(String model) {
this.model = model;
}
public void setManagerType(String managerType) {
this.managerType = managerType;
}
public void setManagerVersion(String managerVersion) {
this.managerVersion = managerVersion;
}
public void setVirtualizationType(String virtualizationType) {
this.virtualizationType = virtualizationType;
}
public void setVirtualizationVersion(String virtualizationVersion) {
this.virtualizationVersion = virtualizationVersion;
}
public void setSoftwareVersion(String softwareVersion) {
this.softwareVersion = softwareVersion;
}
public String getImageName() {
return this.imageName;
}
public void setImageName(String imageName) {
this.imageName = imageName;
}
public void setMinIscVersion(Version minIscVersion) {
this.minIscVersion = minIscVersion;
}
public void setMinCpus(int minCpus) {
this.minCpus = minCpus;
}
public void setMemoryInMb(int memoryInMb) {
this.memoryInMb = memoryInMb;
}
public void setDiskSizeInGb(int diskSizeInGb) {
this.diskSizeInGb = diskSizeInGb;
}
public Map<String, String> getImageProperties() {
return this.imageProperties;
}
public void setImageProperties(Map<String, String> glanceProperties) {
this.imageProperties = glanceProperties;
}
public Map<String, String> getConfigProperties() {
return this.configProperties;
}
public void setConfigProperties(Map<String, String> configProperties) {
this.configProperties = configProperties;
}
public boolean hasAdditionalNicForInspection() {
return this.additionalNicForInspection;
}
public static void checkForNullFields(ImageMetadata dto) throws Exception {
Map<String, Object> notNullFieldsMap = new HashMap<String, Object>();
notNullFieldsMap.put("Image Name", dto.getImageName());
notNullFieldsMap.put("Software Version", dto.getSoftwareVersion());
notNullFieldsMap.put("Manager Version", dto.getManagerVersion());
notNullFieldsMap.put("Model", dto.getModel());
notNullFieldsMap.put("Metadata Version", dto.getMetaDataVersion());
notNullFieldsMap.put("Virtualization Type", dto.getVirtualizationTypeString());
notNullFieldsMap.put("Virtualization Version", dto.getVirtualizationVersionString());
notNullFieldsMap.put("Manager Type", dto.getManagerTypeString());
notNullFieldsMap.put("Minimum OSC Version", dto.getMinIscVersion());
notNullFieldsMap.put("Disk Size", dto.getDiskSizeInGb());
notNullFieldsMap.put("Memory", dto.getMemoryInMb());
notNullFieldsMap.put("Minimum CPU", dto.getMinCpus());
ValidateUtil.checkForNullFields(notNullFieldsMap);
}
}
| |
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package io.github.flyboy8869.doctracker;
import java.awt.datatransfer.DataFlavor;
import java.awt.datatransfer.Transferable;
import java.awt.datatransfer.UnsupportedFlavorException;
import java.awt.event.WindowAdapter;
import java.awt.event.WindowEvent;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.nio.file.Files;
import static java.nio.file.LinkOption.NOFOLLOW_LINKS;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;
import java.util.StringTokenizer;
import java.util.TreeMap;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.swing.DefaultListModel;
import javax.swing.JFileChooser;
import javax.swing.event.ListSelectionListener;
import javax.swing.JOptionPane;
import javax.swing.TransferHandler;
import javax.swing.event.ListSelectionEvent;
/**
*
* @author charles
*/
public class DocTracker extends javax.swing.JFrame {
/**
* Creates new form DocTracker
*/
public DocTracker() {
initComponents();
jlstDocuments.setTransferHandler(new ListTransferHandler());
jlstAssemblies.setModel(new DefaultListModel<String>());
jlstDocuments.setModel(new DefaultListModel<String>());
jlstAssemblies.addListSelectionListener(new ListSelectionListener() {
@Override
public void valueChanged(ListSelectionEvent e) {
if (e.getValueIsAdjusting() == false) {
if (jlstAssemblies.getSelectedIndex() != -1) {
// Show appropriate documents for selected assembly
updateDocList((String) jlstAssemblies.getSelectedValue());
}
}
}
});
jlstDocuments.addListSelectionListener(new ListSelectionListener() {
@Override
public void valueChanged(ListSelectionEvent e) {
if (e.getValueIsAdjusting() == false) {
jlblStatus.setText(getDocumentLongName());
}
}
});
windowHandlers = new ApplicationTermination();
addWindowListener(windowHandlers);
loadData();
}
class ApplicationTermination extends WindowAdapter {
@Override
public void windowClosing(WindowEvent e) {
System.out.println("closing application...");
saveData();
}
public void saveData() {
BufferedWriter bw = null;
if (dataIsDirty) {
try {
System.out.println("saving data...");
bw = new BufferedWriter(new FileWriter("doctracker.data"));
for (String assembly : assemblies.keySet()) {
StringBuilder sb = new StringBuilder();
sb.append(assembly);
sb.append(":");
HashMap<String, String> docMap = assemblies.get(assembly);
String docMapString = docMap.toString();
docMapString = docMapString.replace("{", "");
docMapString = docMapString.replace("}", "");
sb.append(docMapString);
sb.append('\n');
bw.write(sb.toString());
}
} catch (IOException ex) {
Logger.getLogger(DocTracker.class.getName()).log(Level.SEVERE, null, ex);
} finally {
try {
if (bw != null) {
bw.close();
}
} catch (IOException ex) {
Logger.getLogger(DocTracker.class.getName()).log(Level.SEVERE, null, ex);
}
}
}
}
}
class ListTransferHandler extends TransferHandler {
@Override
public boolean importData(TransferSupport support) {
if (!canImport(support)) {
return false;
}
Transferable transferable = support.getTransferable();
Object obj;
ArrayList<File> files = new ArrayList<>();
try {
obj = transferable.getTransferData(DataFlavor.javaFileListFlavor);
} catch (UnsupportedFlavorException | IOException ex) {
return false;
}
for (File file : (ArrayList<File>) obj) {
files.add(file);
}
addDocuments(files.toArray(new File[files.size()]));
return true;
}
@Override
public boolean canImport(TransferSupport support) {
// this handler only supports drop not paste operations
if (!support.isDrop()) {
return false;
}
if (!support.isDataFlavorSupported(DataFlavor.stringFlavor)) {
return false;
}
boolean copySupported = (TransferHandler.COPY & support.getSourceDropActions()) == TransferHandler.COPY;
if (copySupported) {
support.setDropAction(TransferHandler.COPY);
return true;
}
return false;
}
}
/**
* This method is called from within the constructor to initialize the form.
* WARNING: Do NOT modify this code. The content of this method is always
* regenerated by the Form Editor.
*/
@SuppressWarnings("unchecked")
// <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents
private void initComponents() {
jFileChooser = new javax.swing.JFileChooser();
jScrollPane1 = new javax.swing.JScrollPane();
jLabel1 = new javax.swing.JLabel();
jScrollPane2 = new javax.swing.JScrollPane();
jlblStatus = new javax.swing.JLabel();
jMenuBar1 = new javax.swing.JMenuBar();
jmFile = new javax.swing.JMenu();
jsmiFileAdd = new javax.swing.JMenu();
jmiFileAddAssembly = new javax.swing.JMenuItem();
jmiFileAddDocument = new javax.swing.JMenuItem();
jSeparator1 = new javax.swing.JPopupMenu.Separator();
jmiFileExit = new javax.swing.JMenuItem();
jmSettings = new javax.swing.JMenu();
jFileChooser.setMultiSelectionEnabled(true);
setDefaultCloseOperation(javax.swing.WindowConstants.EXIT_ON_CLOSE);
jlstAssemblies.setSelectionMode(javax.swing.ListSelectionModel.SINGLE_SELECTION);
jScrollPane1.setViewportView(jlstAssemblies);
jLabel1.setText("Assembly:");
jlblDocuments.setText("Documents for:");
jlstDocuments.setDropMode(javax.swing.DropMode.ON);
jScrollPane2.setViewportView(jlstDocuments);
jmFile.setText("File");
jsmiFileAdd.setText("Add");
jmiFileAddAssembly.setText("Add Assembly");
jmiFileAddAssembly.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
jmiFileAddAssemblyActionPerformed(evt);
}
});
jsmiFileAdd.add(jmiFileAddAssembly);
jmiFileAddDocument.setText("Add Document");
jmiFileAddDocument.setEnabled(false);
jmiFileAddDocument.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
jmiFileAddDocumentActionPerformed(evt);
}
});
jsmiFileAdd.add(jmiFileAddDocument);
jmFile.add(jsmiFileAdd);
jmFile.add(jSeparator1);
jmiFileExit.setText("Exit");
jmiFileExit.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
jmiFileExitActionPerformed(evt);
}
});
jmFile.add(jmiFileExit);
jMenuBar1.add(jmFile);
jmSettings.setText("Settings");
jMenuBar1.add(jmSettings);
setJMenuBar(jMenuBar1);
javax.swing.GroupLayout layout = new javax.swing.GroupLayout(getContentPane());
getContentPane().setLayout(layout);
layout.setHorizontalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addContainerGap()
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(jlblStatus, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addGroup(layout.createSequentialGroup()
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(jLabel1)
.addComponent(jScrollPane1, javax.swing.GroupLayout.PREFERRED_SIZE, 247, javax.swing.GroupLayout.PREFERRED_SIZE))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addComponent(jlblDocuments)
.addGap(0, 0, Short.MAX_VALUE))
.addComponent(jScrollPane2, javax.swing.GroupLayout.DEFAULT_SIZE, 456, Short.MAX_VALUE))))
.addContainerGap())
);
layout.setVerticalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(javax.swing.GroupLayout.Alignment.TRAILING, layout.createSequentialGroup()
.addContainerGap()
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(jLabel1)
.addComponent(jlblDocuments))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(jScrollPane1, javax.swing.GroupLayout.DEFAULT_SIZE, 287, Short.MAX_VALUE)
.addComponent(jScrollPane2))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(jlblStatus)
.addContainerGap())
);
pack();
}// </editor-fold>//GEN-END:initComponents
private void jmiFileExitActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jmiFileExitActionPerformed
windowHandlers.saveData();
System.exit(0);
}//GEN-LAST:event_jmiFileExitActionPerformed
private void jmiFileAddAssemblyActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jmiFileAddAssemblyActionPerformed
String assembly = JOptionPane.showInputDialog(rootPane, "Enter an assembly:");
if ((assembly != null) && (!assembly.isEmpty())) {
DefaultListModel<String> assembliesListModel = (DefaultListModel<String>) jlstAssemblies.getModel();
if (addAssemblyToModelSorted(assembliesListModel, assembly)) {
assemblies.put(assembly, new HashMap<String, String>());
jlstAssemblies.setSelectedIndex(assembliesListModel.indexOf(assembly));
updateDocList(assembly);
this.jmiFileAddDocument.setEnabled(true);
dataIsDirty = true;
}
}
}//GEN-LAST:event_jmiFileAddAssemblyActionPerformed
private void jmiFileAddDocumentActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jmiFileAddDocumentActionPerformed
int retVal = jFileChooser.showOpenDialog(this);
if (retVal == JFileChooser.APPROVE_OPTION) {
File[] files = jFileChooser.getSelectedFiles();
addDocuments(files);
}
}//GEN-LAST:event_jmiFileAddDocumentActionPerformed
private void addDocuments(File[] files) {
String selectedAssembly = (String) jlstAssemblies.getSelectedValue();
HashMap<String, String> assemblyDocs = assemblies.get(selectedAssembly);
for (File file : files) {
if (addDocumentToMap(assemblyDocs, file.getPath())) {
updateDocList(selectedAssembly);
dataIsDirty = true;
}
}
}
/**
*
* @param model
* @param assembly
*/
boolean addAssemblyToModelSorted(DefaultListModel<String> model, String assembly) {
Object[] ass = model.toArray();
boolean retVal = false;
int index = Arrays.binarySearch(ass, assembly);
if (index < 0) {
model.add(-(index) - 1, assembly);
retVal = true;
}
return retVal;
}
/**
* Adds an item to a list, keeping the list sorted
*
* @param list
* @param item
*
* @return return true if item added, false if item is already on the list
*/
private boolean addDocumentToMap(HashMap<String, String> list, String item) {
boolean retcode = false;
String[] tokens = item.split("/");
String shortName = tokens[tokens.length - 1];
if (!list.containsKey(shortName)) {
list.put(shortName, item);
retcode = true;
}
return retcode;
}
private String getDocumentLongName() {
String currentAssembly = (String) jlstAssemblies.getSelectedValue();
String currentSelection = (String) jlstDocuments.getSelectedValue();
HashMap<String, String> docList = assemblies.get(currentAssembly);
String longName = docList.get(currentSelection);
return longName;
}
/**
* Loads assembly and document data into their respective lists
*/
private void loadData() {
if (!Files.exists(Paths.get("doctracker.data"), NOFOLLOW_LINKS)) {
System.out.println("doctracker.data does not exist...");
return;
}
try {
BufferedReader br = new BufferedReader(new FileReader("doctracker.data"));
StringTokenizer st;
DefaultListModel<String> model = (DefaultListModel<String>) jlstAssemblies.getModel();
String[] data;
String assembly;
HashMap<String, String> docs;
String line;
while ((line = br.readLine()) != null) {
if (line.isEmpty()) {
continue;
}
docs = new HashMap<>();
data = line.split(":");
assembly = data[0];
if (data.length > 1) {
st = new StringTokenizer(data[1]);
while (st.hasMoreTokens()) {
String token = st.nextToken(",");
String[] elements = token.split("=");
docs.put(elements[0].trim(), elements[1].trim());
}
}
if ((addAssemblyToModelSorted(model, assembly))) {
assemblies.put(assembly, docs);
}
}
jlstAssemblies.setSelectedIndex(0);
jmiFileAddDocument.setEnabled(true);
dataIsDirty = false;
} catch (IOException ex) {
Logger.getLogger(DocTracker.class.getName()).log(Level.SEVERE, null, ex);
}
}
private void updateDocList(String assembly) {
HashMap<String, String> docList = assemblies.get(assembly);
DefaultListModel<String> docListModel = (DefaultListModel<String>) jlstDocuments.getModel();
docListModel.clear();
jlblDocuments.setText("Documents for " + assembly + ":");
// check for null after clearing the model so the display
// updates correctly i.e., shows no documents for an empty assembly list
if (docList == null) {
return;
}
for (String shortName : docList.keySet()) {
ArrayList<String> docs = new ArrayList<>();
for (Object obj : docListModel.toArray()) {
docs.add( ((String) obj).toLowerCase());
}
int index = Arrays.binarySearch(docs.toArray(new String[docs.size()]), shortName.toLowerCase());
if (index < 0) {
docListModel.add(-(index) - 1, shortName);
}
}
}
/**
* @param args the command line arguments
*/
public static void main(String args[]) {
/* Set the Nimbus look and feel */
//<editor-fold defaultstate="collapsed" desc=" Look and feel setting code (optional) ">
/* If Nimbus (introduced in Java SE 6) is not available, stay with the default look and feel.
* For details see http://download.oracle.com/javase/tutorial/uiswing/lookandfeel/plaf.html
*/
try {
for (javax.swing.UIManager.LookAndFeelInfo info : javax.swing.UIManager.getInstalledLookAndFeels()) {
if ("Nimbus".equals(info.getName())) {
javax.swing.UIManager.setLookAndFeel(info.getClassName());
break;
}
}
} catch (ClassNotFoundException ex) {
java.util.logging.Logger.getLogger(DocTracker.class.getName()).log(java.util.logging.Level.SEVERE, null, ex);
} catch (InstantiationException ex) {
java.util.logging.Logger.getLogger(DocTracker.class.getName()).log(java.util.logging.Level.SEVERE, null, ex);
} catch (IllegalAccessException ex) {
java.util.logging.Logger.getLogger(DocTracker.class.getName()).log(java.util.logging.Level.SEVERE, null, ex);
} catch (javax.swing.UnsupportedLookAndFeelException ex) {
java.util.logging.Logger.getLogger(DocTracker.class.getName()).log(java.util.logging.Level.SEVERE, null, ex);
}
//</editor-fold>
/* Create and display the form */
java.awt.EventQueue.invokeLater(new Runnable() {
@Override
public void run() {
new DocTracker().setVisible(true);
}
});
}
// Variables declaration - do not modify//GEN-BEGIN:variables
private javax.swing.JFileChooser jFileChooser;
private javax.swing.JLabel jLabel1;
private javax.swing.JMenuBar jMenuBar1;
private javax.swing.JScrollPane jScrollPane1;
private javax.swing.JScrollPane jScrollPane2;
private javax.swing.JPopupMenu.Separator jSeparator1;
final javax.swing.JLabel jlblDocuments = new javax.swing.JLabel();
private javax.swing.JLabel jlblStatus;
final javax.swing.JList jlstAssemblies = new javax.swing.JList();
final javax.swing.JList jlstDocuments = new javax.swing.JList();
private javax.swing.JMenu jmFile;
private javax.swing.JMenu jmSettings;
private javax.swing.JMenuItem jmiFileAddAssembly;
private javax.swing.JMenuItem jmiFileAddDocument;
private javax.swing.JMenuItem jmiFileExit;
private javax.swing.JMenu jsmiFileAdd;
// End of variables declaration//GEN-END:variables
final Map<String, HashMap<String, String>> assemblies = new TreeMap<>();
//final JFileChooser fileChooser = new JFileChooser();
private final ApplicationTermination windowHandlers;
private boolean dataIsDirty;
}
| |
package com.adobe.epubcheck.ctc.xml;
import static com.adobe.epubcheck.opf.OPFChecker30.isBlessedAudioType;
import static com.adobe.epubcheck.opf.OPFChecker30.isCommonVideoType;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Locale;
import java.util.Stack;
import org.xml.sax.Attributes;
import org.xml.sax.Locator;
import org.xml.sax.SAXException;
import org.xml.sax.helpers.DefaultHandler;
import com.adobe.epubcheck.api.EPUBLocation;
import com.adobe.epubcheck.api.Report;
import com.adobe.epubcheck.messages.MessageId;
import com.adobe.epubcheck.util.EPUBVersion;
import com.adobe.epubcheck.util.EpubConstants;
import com.adobe.epubcheck.util.NamespaceHelper;
/**
* === WARNING ==========================================<br/>
* This class is scheduled to be refactored and integrated<br/>
* in another package.<br/>
* Please keep changes minimal (bug fixes only) until then.<br/>
* ========================================================<br/>
*/
public class HTMLTagsAnalyseHandler extends DefaultHandler
{
private String fileName;
private Report report;
private final HashSet<String> html5SpecTags;
private final HashSet<String> html4SpecTags;
private final HashSet<String> nonTextTagsAlt;
private final HashSet<String> nonTextTagsTitle;
private final HashSet<String> headerTags;
private final Stack<String> tagStack;
private int html4SpecTagsCounter = 0;
private int html5SpecTagsCounter = 0;
private final ArrayList<Integer> listItemCounters;
private HashMap<String, ControlMark> formInputMarks;
private Locator locator;
private boolean hasTitle;
private boolean inTitle;
private boolean inFigure;
private boolean inBlockQuote;
private NamespaceHelper namespaceHelper = new NamespaceHelper();
private final int HAS_INPUT = 1;
private final int HAS_LABEL = 2;
private boolean hasViewport = false;
private boolean isFixed = false;
private int landmarkNavCount = 0;
private EPUBVersion version;
public int getLandmarkNavCount()
{
return landmarkNavCount;
}
public int getHtml5SpecTagsCounter()
{
return html5SpecTagsCounter;
}
public HTMLTagsAnalyseHandler()
{
String[] HTML5SpecTags =
{
"article", "aside", "audio", "bdi", "canvas", "command", "datalist", "details", "dialog", "embed", "figcaption",
"figure", "footer", "header", "hgroup", "keygen", "mark", "meter", "nav", "output", "progress", "rp", "rt", "ruby",
"section", "source", "summary", "time", "track", "wbr", "video"
};
String[] HTML4SpecTags =
{
"acronym", "applet", "basefont", "big", "center", "dir", "font", "frame", "frameset", "noframes", "strike"
};
String[] NonTextTagsAlt =
{
"img", "area", // images
};
String[] NonTextTagsTitle =
{
"map", "figure", // images
"audio", // audio
"video", // video
};
String[] HeaderTags =
{
"h1", "h2", "h3", "h4", "h5", "h6", // headers
};
this.html4SpecTags = new HashSet<String>();
Collections.addAll(html4SpecTags, HTML4SpecTags);
this.html5SpecTags = new HashSet<String>();
Collections.addAll(html5SpecTags, HTML5SpecTags);
this.nonTextTagsAlt = new HashSet<String>();
Collections.addAll(nonTextTagsAlt, NonTextTagsAlt);
this.nonTextTagsTitle = new HashSet<String>();
Collections.addAll(nonTextTagsTitle, NonTextTagsTitle);
this.headerTags = new HashSet<String>();
Collections.addAll(headerTags, HeaderTags);
tagStack = new Stack<String>();
listItemCounters = new ArrayList<Integer>();
formInputMarks = new HashMap<String, ControlMark>();
}
String getFileName()
{
return fileName;
}
public void setFileName(String fileName)
{
this.fileName = fileName;
}
public void setVersion(EPUBVersion version)
{
this.version = version;
}
public EPUBVersion getVersion()
{
return version;
}
private class ControlMark
{
public String controlId;
public int mark;
public EPUBLocation location;
}
public void setDocumentLocator(Locator locator)
{
this.locator = locator;
}
public void setReport(Report report)
{
this.report = report;
}
public boolean isFixed()
{
return isFixed;
}
public void setIsFixed(boolean isFixed)
{
this.isFixed = isFixed;
}
@Override
public void notationDecl (String name, String publicId, String systemId)
throws SAXException
{
System.out.printf("%1$s : %2$s : %3$s ", name, publicId, systemId);
}
@Override
public void unparsedEntityDecl (String name, String publicId, String systemId, String notationName)
throws SAXException
{
System.out.printf("%1$s : %2$s : %3$s : %4$s", name, publicId, systemId, notationName);
}
@Override
public void startPrefixMapping (String prefix, String uri) throws SAXException
{
namespaceHelper.declareNamespace(prefix, uri, EPUBLocation.create(fileName, locator.getLineNumber(), locator.getColumnNumber(), prefix), report);
}
public void startElement(String uri, String localName, String qName, Attributes attributes) throws SAXException
{
namespaceHelper.onStartElement(fileName, locator, uri, qName, attributes, report);
//outWriter.println("Start Tag -->:<" +qName+">");
String tagName = qName.toLowerCase(Locale.ROOT);
if (html5SpecTags.contains(tagName))
{
html5SpecTagsCounter++;
}
if (html4SpecTags.contains(tagName))
{
html4SpecTagsCounter++;
}
if (("source".compareTo(tagName) == 0) && ("video".compareTo(tagStack.peek()) == 0))
{
String mimeType = attributes.getValue("type");
if (mimeType == null || !isCommonVideoType(mimeType))
{
if (mimeType == null)
{
mimeType = "null";
}
report.message(MessageId.OPF_036, EPUBLocation.create(this.getFileName(), locator.getLineNumber(), locator.getColumnNumber()), mimeType);
}
}
else if (("source".compareTo(tagName) == 0) && ("audio".compareTo(tagStack.peek()) == 0))
{
String mimeType = attributes.getValue("type");
if (mimeType == null || !isBlessedAudioType(mimeType))
{
if (mimeType == null)
{
mimeType = "null";
}
report.message(MessageId.OPF_056, EPUBLocation.create(this.getFileName(), locator.getLineNumber(), locator.getColumnNumber()), mimeType);
}
}
else if (("ul".compareTo(tagName) == 0) || ("ol".compareTo(tagName) == 0) || ("Dl".compareTo(tagName) == 0))
{
listItemCounters.add(0);
}
else if (("li".compareTo(tagName) == 0) &&
(("ul".compareTo(tagStack.peek()) == 0) ||
("ol".compareTo(tagStack.peek()) == 0)))
{
listItemCounters.set(listItemCounters.size() - 1, 1 + listItemCounters.get(listItemCounters.size() - 1));
}
else if (("dh".compareTo(tagName) == 0) && ("dl".compareTo(tagStack.peek()) == 0))
{
listItemCounters.set(listItemCounters.size() - 1, 1 + listItemCounters.get(listItemCounters.size() - 1));
}
else if ("input".compareTo(tagName) == 0)
{
String id = attributes.getValue("id");
String type = attributes.getValue("type");
if (id != null)
{
ControlMark mark = formInputMarks.get(id);
if (mark == null)
{
mark = new ControlMark();
mark.controlId = id;
}
mark.location = EPUBLocation.create(this.getFileName(), locator.getLineNumber(), locator.getColumnNumber(), id);
mark.mark |= HAS_INPUT;
formInputMarks.put(id, mark);
}
else if (type == null || "submit".compareToIgnoreCase(type) != 0) // submit buttons don't need a label
{
report.message(MessageId.HTM_028, EPUBLocation.create(this.fileName, locator.getLineNumber(), locator.getColumnNumber()), tagName);
}
}
else if ("label".compareTo(tagName) == 0)
{
String id = attributes.getValue("for");
if (id != null)
{
ControlMark mark = formInputMarks.get(id);
if (mark == null)
{
mark = new ControlMark();
mark.controlId = id;
// only set the location if we are creating the entry here. This location will be overwritten
// by the input control location, but if there is no input that overrides it, the label location will
// be the one reported.
mark.location = EPUBLocation.create(this.getFileName(), locator.getLineNumber(), locator.getColumnNumber(), id);
}
mark.mark |= HAS_LABEL;
formInputMarks.put(id, mark);
}
else
{
report.message(MessageId.HTM_029, EPUBLocation.create(this.getFileName(), locator.getLineNumber(), locator.getColumnNumber(), tagName));
}
}
else if ("form".compareTo(tagName) == 0)
{
this.formInputMarks = new HashMap<String, ControlMark>();
}
else if ("html".compareTo(tagName) == 0)
{
String ns = attributes.getValue("xmlns");
if (ns == null || EpubConstants.HtmlNamespaceUri.compareTo(ns) != 0)
{
report.message(MessageId.HTM_049, EPUBLocation.create(this.getFileName(), locator.getLineNumber(), locator.getColumnNumber(), tagName));
}
}
else if ("body".compareTo(tagName) == 0)
{
String title = attributes.getValue("title");
if (title != null && title.length() > 0)
{
hasTitle = true;
}
}
else if (("title".compareTo(tagName) == 0) && ("head".compareTo(tagStack.peek()) == 0))
{
inTitle = true;
}
else if ("nav".compareTo(tagName) == 0)
{
String epubPrefix = namespaceHelper.findPrefixForUri(EpubConstants.EpubTypeNamespaceUri);
String type = attributes.getValue(epubPrefix+":type");
if (type != null && "landmarks".compareToIgnoreCase(type) == 0)
{
++landmarkNavCount;
}
}
else if ("blockquote".compareTo(tagName) == 0)
{
inBlockQuote = true;
}
else if ("figure".compareTo(tagName) == 0)
{
inFigure = true;
}
else if ("meta".compareTo(tagName) == 0)
{
String nameAttribute = attributes.getValue("name");
if (nameAttribute != null && "viewport".compareTo(nameAttribute) == 0)
{
hasViewport = true;
String contentAttribute = attributes.getValue("content");
if (isFixed && (contentAttribute == null || !(contentAttribute.contains("width") && contentAttribute.contains("height"))))
{
report.message(MessageId.HTM_047, EPUBLocation.create(this.getFileName(), locator.getLineNumber(), locator.getColumnNumber(), tagName));
}
}
}
if (headerTags.contains(tagName))
{
if (inBlockQuote || inFigure)
{
report.message(MessageId.ACC_010, EPUBLocation.create(getFileName(), locator.getLineNumber(), locator.getColumnNumber(), tagName));
}
}
if (nonTextTagsAlt.contains(tagName))
{
if (null != this.getFileName() && null == attributes.getValue("alt"))
{
report.message(MessageId.ACC_001, EPUBLocation.create(this.getFileName(), locator.getLineNumber(), locator.getColumnNumber(), tagName));
}
}
if (nonTextTagsTitle.contains(tagName))
{
if (null != this.getFileName() && null == attributes.getValue("title"))
{
report.message(MessageId.ACC_003, EPUBLocation.create(this.getFileName(), locator.getLineNumber(), locator.getColumnNumber(), tagName));
}
}
String epubPrefix = namespaceHelper.findPrefixForUri(EpubConstants.EpubTypeNamespaceUri);
if (epubPrefix != null)
{
String typeAttr = attributes.getValue(epubPrefix+":type");
if (typeAttr != null)
{
if (typeAttr.contains("pagebreak"))
{
report.message(MessageId.HTM_050, EPUBLocation.create(this.getFileName(), locator.getLineNumber(), locator.getColumnNumber(), "pagebreak"));
}
}
}
tagStack.push(tagName);
}
public void endElement(String uri, String localName, String qName) throws SAXException
{
namespaceHelper.onEndElement(report);
String tagName = qName.toLowerCase(Locale.ROOT);
String top = tagStack.pop();
if (top.compareTo(tagName) == 0)
{
if (("ul".compareTo(tagName) == 0) || ("ol".compareTo(tagName) == 0) || ("Dl".compareTo(tagName) == 0))
{
Integer count = listItemCounters.remove(listItemCounters.size() - 1);
if (count < 1)
{
report.message(MessageId.HTM_027,
EPUBLocation.create(this.getFileName(), locator.getLineNumber(), locator.getColumnNumber(), qName)
);
}
}
if ("body".compareTo(tagName) == 0)
{
for (String id : formInputMarks.keySet())
{
ControlMark mark = formInputMarks.get(id);
if (((mark.mark & HAS_LABEL) != HAS_LABEL) && (mark.mark & HAS_INPUT) == HAS_INPUT)
{
report.message(MessageId.ACC_002, mark.location, id);
}
}
}
if (inTitle && "title".compareTo(tagName) == 0)
{
inTitle = false;
}
else if ("head".compareTo(tagName) == 0)
{
if (!hasTitle)
{
report.message(MessageId.HTM_033, EPUBLocation.create(this.getFileName(), locator.getLineNumber(), locator.getColumnNumber()));
}
if (isFixed() && !hasViewport)
{
report.message(MessageId.HTM_046, EPUBLocation.create(this.getFileName(), locator.getLineNumber(), locator.getColumnNumber()));
}
}
else if ("blockquote".compareTo(tagName) == 0)
{
inBlockQuote = false;
}
else if ("figure".compareTo(tagName) == 0)
{
inFigure = false;
}
}
}
public void characters(char ch[], int start, int length) throws SAXException
{
if (inTitle && (length > 0))
{
hasTitle = true;
}
}
}
| |
/*
* Copyright 2018 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.dialogflow.v2beta1.stub;
import static com.google.cloud.dialogflow.v2beta1.AgentsClient.SearchAgentsPagedResponse;
import com.google.api.core.ApiFunction;
import com.google.api.core.ApiFuture;
import com.google.api.core.BetaApi;
import com.google.api.gax.core.GaxProperties;
import com.google.api.gax.core.GoogleCredentialsProvider;
import com.google.api.gax.core.InstantiatingExecutorProvider;
import com.google.api.gax.grpc.GaxGrpcProperties;
import com.google.api.gax.grpc.GrpcTransportChannel;
import com.google.api.gax.grpc.InstantiatingGrpcChannelProvider;
import com.google.api.gax.grpc.ProtoOperationTransformers;
import com.google.api.gax.longrunning.OperationSnapshot;
import com.google.api.gax.longrunning.OperationTimedPollAlgorithm;
import com.google.api.gax.retrying.RetrySettings;
import com.google.api.gax.rpc.ApiCallContext;
import com.google.api.gax.rpc.ApiClientHeaderProvider;
import com.google.api.gax.rpc.ClientContext;
import com.google.api.gax.rpc.OperationCallSettings;
import com.google.api.gax.rpc.PageContext;
import com.google.api.gax.rpc.PagedCallSettings;
import com.google.api.gax.rpc.PagedListDescriptor;
import com.google.api.gax.rpc.PagedListResponseFactory;
import com.google.api.gax.rpc.StatusCode;
import com.google.api.gax.rpc.StubSettings;
import com.google.api.gax.rpc.TransportChannelProvider;
import com.google.api.gax.rpc.UnaryCallSettings;
import com.google.api.gax.rpc.UnaryCallable;
import com.google.cloud.dialogflow.v2beta1.Agent;
import com.google.cloud.dialogflow.v2beta1.ExportAgentRequest;
import com.google.cloud.dialogflow.v2beta1.ExportAgentResponse;
import com.google.cloud.dialogflow.v2beta1.GetAgentRequest;
import com.google.cloud.dialogflow.v2beta1.ImportAgentRequest;
import com.google.cloud.dialogflow.v2beta1.RestoreAgentRequest;
import com.google.cloud.dialogflow.v2beta1.SearchAgentsRequest;
import com.google.cloud.dialogflow.v2beta1.SearchAgentsResponse;
import com.google.cloud.dialogflow.v2beta1.TrainAgentRequest;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Lists;
import com.google.longrunning.Operation;
import com.google.protobuf.Empty;
import com.google.protobuf.Struct;
import java.io.IOException;
import java.util.List;
import javax.annotation.Generated;
import org.threeten.bp.Duration;
// AUTO-GENERATED DOCUMENTATION AND CLASS
/**
* Settings class to configure an instance of {@link AgentsStub}.
*
* <p>The default instance has everything set to sensible defaults:
*
* <ul>
* <li>The default service address (dialogflow.googleapis.com) and default port (443) are used.
* <li>Credentials are acquired automatically through Application Default Credentials.
* <li>Retries are configured for idempotent methods but not for non-idempotent methods.
* </ul>
*
* <p>The builder of this class is recursive, so contained classes are themselves builders. When
* build() is called, the tree of builders is called to create the complete settings object. For
* example, to set the total timeout of getAgent to 30 seconds:
*
* <pre>
* <code>
* AgentsStubSettings.Builder agentsSettingsBuilder =
* AgentsStubSettings.newBuilder();
* agentsSettingsBuilder.getAgentSettings().getRetrySettings().toBuilder()
* .setTotalTimeout(Duration.ofSeconds(30));
* AgentsStubSettings agentsSettings = agentsSettingsBuilder.build();
* </code>
* </pre>
*/
@Generated("by gapic-generator")
@BetaApi
public class AgentsStubSettings extends StubSettings<AgentsStubSettings> {
/** The default scopes of the service. */
private static final ImmutableList<String> DEFAULT_SERVICE_SCOPES =
ImmutableList.<String>builder().add("https://www.googleapis.com/auth/cloud-platform").build();
private final UnaryCallSettings<GetAgentRequest, Agent> getAgentSettings;
private final PagedCallSettings<
SearchAgentsRequest, SearchAgentsResponse, SearchAgentsPagedResponse>
searchAgentsSettings;
private final UnaryCallSettings<TrainAgentRequest, Operation> trainAgentSettings;
private final OperationCallSettings<TrainAgentRequest, Empty, Struct> trainAgentOperationSettings;
private final UnaryCallSettings<ExportAgentRequest, Operation> exportAgentSettings;
private final OperationCallSettings<ExportAgentRequest, ExportAgentResponse, Struct>
exportAgentOperationSettings;
private final UnaryCallSettings<ImportAgentRequest, Operation> importAgentSettings;
private final OperationCallSettings<ImportAgentRequest, Empty, Struct>
importAgentOperationSettings;
private final UnaryCallSettings<RestoreAgentRequest, Operation> restoreAgentSettings;
private final OperationCallSettings<RestoreAgentRequest, Empty, Struct>
restoreAgentOperationSettings;
/** Returns the object with the settings used for calls to getAgent. */
public UnaryCallSettings<GetAgentRequest, Agent> getAgentSettings() {
return getAgentSettings;
}
/** Returns the object with the settings used for calls to searchAgents. */
public PagedCallSettings<SearchAgentsRequest, SearchAgentsResponse, SearchAgentsPagedResponse>
searchAgentsSettings() {
return searchAgentsSettings;
}
/** Returns the object with the settings used for calls to trainAgent. */
public UnaryCallSettings<TrainAgentRequest, Operation> trainAgentSettings() {
return trainAgentSettings;
}
/** Returns the object with the settings used for calls to trainAgent. */
public OperationCallSettings<TrainAgentRequest, Empty, Struct> trainAgentOperationSettings() {
return trainAgentOperationSettings;
}
/** Returns the object with the settings used for calls to exportAgent. */
public UnaryCallSettings<ExportAgentRequest, Operation> exportAgentSettings() {
return exportAgentSettings;
}
/** Returns the object with the settings used for calls to exportAgent. */
public OperationCallSettings<ExportAgentRequest, ExportAgentResponse, Struct>
exportAgentOperationSettings() {
return exportAgentOperationSettings;
}
/** Returns the object with the settings used for calls to importAgent. */
public UnaryCallSettings<ImportAgentRequest, Operation> importAgentSettings() {
return importAgentSettings;
}
/** Returns the object with the settings used for calls to importAgent. */
public OperationCallSettings<ImportAgentRequest, Empty, Struct> importAgentOperationSettings() {
return importAgentOperationSettings;
}
/** Returns the object with the settings used for calls to restoreAgent. */
public UnaryCallSettings<RestoreAgentRequest, Operation> restoreAgentSettings() {
return restoreAgentSettings;
}
/** Returns the object with the settings used for calls to restoreAgent. */
public OperationCallSettings<RestoreAgentRequest, Empty, Struct> restoreAgentOperationSettings() {
return restoreAgentOperationSettings;
}
@BetaApi("A restructuring of stub classes is planned, so this may break in the future")
public AgentsStub createStub() throws IOException {
if (getTransportChannelProvider()
.getTransportName()
.equals(GrpcTransportChannel.getGrpcTransportName())) {
return GrpcAgentsStub.create(this);
} else {
throw new UnsupportedOperationException(
"Transport not supported: " + getTransportChannelProvider().getTransportName());
}
}
/** Returns a builder for the default ExecutorProvider for this service. */
public static InstantiatingExecutorProvider.Builder defaultExecutorProviderBuilder() {
return InstantiatingExecutorProvider.newBuilder();
}
/** Returns the default service endpoint. */
public static String getDefaultEndpoint() {
return "dialogflow.googleapis.com:443";
}
/** Returns the default service scopes. */
public static List<String> getDefaultServiceScopes() {
return DEFAULT_SERVICE_SCOPES;
}
/** Returns a builder for the default credentials for this service. */
public static GoogleCredentialsProvider.Builder defaultCredentialsProviderBuilder() {
return GoogleCredentialsProvider.newBuilder().setScopesToApply(DEFAULT_SERVICE_SCOPES);
}
/** Returns a builder for the default ChannelProvider for this service. */
public static InstantiatingGrpcChannelProvider.Builder defaultGrpcTransportProviderBuilder() {
return InstantiatingGrpcChannelProvider.newBuilder();
}
public static TransportChannelProvider defaultTransportChannelProvider() {
return defaultGrpcTransportProviderBuilder().build();
}
@BetaApi("The surface for customizing headers is not stable yet and may change in the future.")
public static ApiClientHeaderProvider.Builder defaultApiClientHeaderProviderBuilder() {
return ApiClientHeaderProvider.newBuilder()
.setGeneratedLibToken("gapic", GaxProperties.getLibraryVersion(AgentsStubSettings.class))
.setTransportToken(
GaxGrpcProperties.getGrpcTokenName(), GaxGrpcProperties.getGrpcVersion());
}
/** Returns a new builder for this class. */
public static Builder newBuilder() {
return Builder.createDefault();
}
/** Returns a new builder for this class. */
public static Builder newBuilder(ClientContext clientContext) {
return new Builder(clientContext);
}
/** Returns a builder containing all the values of this settings class. */
public Builder toBuilder() {
return new Builder(this);
}
protected AgentsStubSettings(Builder settingsBuilder) throws IOException {
super(settingsBuilder);
getAgentSettings = settingsBuilder.getAgentSettings().build();
searchAgentsSettings = settingsBuilder.searchAgentsSettings().build();
trainAgentSettings = settingsBuilder.trainAgentSettings().build();
trainAgentOperationSettings = settingsBuilder.trainAgentOperationSettings().build();
exportAgentSettings = settingsBuilder.exportAgentSettings().build();
exportAgentOperationSettings = settingsBuilder.exportAgentOperationSettings().build();
importAgentSettings = settingsBuilder.importAgentSettings().build();
importAgentOperationSettings = settingsBuilder.importAgentOperationSettings().build();
restoreAgentSettings = settingsBuilder.restoreAgentSettings().build();
restoreAgentOperationSettings = settingsBuilder.restoreAgentOperationSettings().build();
}
private static final PagedListDescriptor<SearchAgentsRequest, SearchAgentsResponse, Agent>
SEARCH_AGENTS_PAGE_STR_DESC =
new PagedListDescriptor<SearchAgentsRequest, SearchAgentsResponse, Agent>() {
@Override
public String emptyToken() {
return "";
}
@Override
public SearchAgentsRequest injectToken(SearchAgentsRequest payload, String token) {
return SearchAgentsRequest.newBuilder(payload).setPageToken(token).build();
}
@Override
public SearchAgentsRequest injectPageSize(SearchAgentsRequest payload, int pageSize) {
return SearchAgentsRequest.newBuilder(payload).setPageSize(pageSize).build();
}
@Override
public Integer extractPageSize(SearchAgentsRequest payload) {
return payload.getPageSize();
}
@Override
public String extractNextToken(SearchAgentsResponse payload) {
return payload.getNextPageToken();
}
@Override
public Iterable<Agent> extractResources(SearchAgentsResponse payload) {
return payload.getAgentsList();
}
};
private static final PagedListResponseFactory<
SearchAgentsRequest, SearchAgentsResponse, SearchAgentsPagedResponse>
SEARCH_AGENTS_PAGE_STR_FACT =
new PagedListResponseFactory<
SearchAgentsRequest, SearchAgentsResponse, SearchAgentsPagedResponse>() {
@Override
public ApiFuture<SearchAgentsPagedResponse> getFuturePagedResponse(
UnaryCallable<SearchAgentsRequest, SearchAgentsResponse> callable,
SearchAgentsRequest request,
ApiCallContext context,
ApiFuture<SearchAgentsResponse> futureResponse) {
PageContext<SearchAgentsRequest, SearchAgentsResponse, Agent> pageContext =
PageContext.create(callable, SEARCH_AGENTS_PAGE_STR_DESC, request, context);
return SearchAgentsPagedResponse.createAsync(pageContext, futureResponse);
}
};
/** Builder for AgentsStubSettings. */
public static class Builder extends StubSettings.Builder<AgentsStubSettings, Builder> {
private final ImmutableList<UnaryCallSettings.Builder<?, ?>> unaryMethodSettingsBuilders;
private final UnaryCallSettings.Builder<GetAgentRequest, Agent> getAgentSettings;
private final PagedCallSettings.Builder<
SearchAgentsRequest, SearchAgentsResponse, SearchAgentsPagedResponse>
searchAgentsSettings;
private final UnaryCallSettings.Builder<TrainAgentRequest, Operation> trainAgentSettings;
private final OperationCallSettings.Builder<TrainAgentRequest, Empty, Struct>
trainAgentOperationSettings;
private final UnaryCallSettings.Builder<ExportAgentRequest, Operation> exportAgentSettings;
private final OperationCallSettings.Builder<ExportAgentRequest, ExportAgentResponse, Struct>
exportAgentOperationSettings;
private final UnaryCallSettings.Builder<ImportAgentRequest, Operation> importAgentSettings;
private final OperationCallSettings.Builder<ImportAgentRequest, Empty, Struct>
importAgentOperationSettings;
private final UnaryCallSettings.Builder<RestoreAgentRequest, Operation> restoreAgentSettings;
private final OperationCallSettings.Builder<RestoreAgentRequest, Empty, Struct>
restoreAgentOperationSettings;
private static final ImmutableMap<String, ImmutableSet<StatusCode.Code>>
RETRYABLE_CODE_DEFINITIONS;
static {
ImmutableMap.Builder<String, ImmutableSet<StatusCode.Code>> definitions =
ImmutableMap.builder();
definitions.put(
"idempotent",
ImmutableSet.copyOf(
Lists.<StatusCode.Code>newArrayList(
StatusCode.Code.DEADLINE_EXCEEDED, StatusCode.Code.UNAVAILABLE)));
definitions.put("non_idempotent", ImmutableSet.copyOf(Lists.<StatusCode.Code>newArrayList()));
RETRYABLE_CODE_DEFINITIONS = definitions.build();
}
private static final ImmutableMap<String, RetrySettings> RETRY_PARAM_DEFINITIONS;
static {
ImmutableMap.Builder<String, RetrySettings> definitions = ImmutableMap.builder();
RetrySettings settings = null;
settings =
RetrySettings.newBuilder()
.setInitialRetryDelay(Duration.ofMillis(100L))
.setRetryDelayMultiplier(1.3)
.setMaxRetryDelay(Duration.ofMillis(60000L))
.setInitialRpcTimeout(Duration.ofMillis(20000L))
.setRpcTimeoutMultiplier(1.0)
.setMaxRpcTimeout(Duration.ofMillis(20000L))
.setTotalTimeout(Duration.ofMillis(600000L))
.build();
definitions.put("default", settings);
RETRY_PARAM_DEFINITIONS = definitions.build();
}
protected Builder() {
this((ClientContext) null);
}
protected Builder(ClientContext clientContext) {
super(clientContext);
getAgentSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
searchAgentsSettings = PagedCallSettings.newBuilder(SEARCH_AGENTS_PAGE_STR_FACT);
trainAgentSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
trainAgentOperationSettings = OperationCallSettings.newBuilder();
exportAgentSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
exportAgentOperationSettings = OperationCallSettings.newBuilder();
importAgentSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
importAgentOperationSettings = OperationCallSettings.newBuilder();
restoreAgentSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
restoreAgentOperationSettings = OperationCallSettings.newBuilder();
unaryMethodSettingsBuilders =
ImmutableList.<UnaryCallSettings.Builder<?, ?>>of(
getAgentSettings,
searchAgentsSettings,
trainAgentSettings,
exportAgentSettings,
importAgentSettings,
restoreAgentSettings);
initDefaults(this);
}
private static Builder createDefault() {
Builder builder = new Builder((ClientContext) null);
builder.setTransportChannelProvider(defaultTransportChannelProvider());
builder.setCredentialsProvider(defaultCredentialsProviderBuilder().build());
builder.setInternalHeaderProvider(defaultApiClientHeaderProviderBuilder().build());
builder.setEndpoint(getDefaultEndpoint());
return initDefaults(builder);
}
private static Builder initDefaults(Builder builder) {
builder
.getAgentSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("idempotent"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("default"));
builder
.searchAgentsSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("idempotent"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("default"));
builder
.trainAgentSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("idempotent"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("default"));
builder
.exportAgentSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("idempotent"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("default"));
builder
.importAgentSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("non_idempotent"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("default"));
builder
.restoreAgentSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("idempotent"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("default"));
builder
.trainAgentOperationSettings()
.setInitialCallSettings(
UnaryCallSettings.<TrainAgentRequest, OperationSnapshot>newUnaryCallSettingsBuilder()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("idempotent"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("default"))
.build())
.setResponseTransformer(
ProtoOperationTransformers.ResponseTransformer.create(Empty.class))
.setMetadataTransformer(
ProtoOperationTransformers.MetadataTransformer.create(Struct.class))
.setPollingAlgorithm(
OperationTimedPollAlgorithm.create(
RetrySettings.newBuilder()
.setInitialRetryDelay(Duration.ofMillis(500L))
.setRetryDelayMultiplier(1.5)
.setMaxRetryDelay(Duration.ofMillis(5000L))
.setInitialRpcTimeout(Duration.ZERO) // ignored
.setRpcTimeoutMultiplier(1.0) // ignored
.setMaxRpcTimeout(Duration.ZERO) // ignored
.setTotalTimeout(Duration.ofMillis(300000L))
.build()));
builder
.exportAgentOperationSettings()
.setInitialCallSettings(
UnaryCallSettings.<ExportAgentRequest, OperationSnapshot>newUnaryCallSettingsBuilder()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("idempotent"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("default"))
.build())
.setResponseTransformer(
ProtoOperationTransformers.ResponseTransformer.create(ExportAgentResponse.class))
.setMetadataTransformer(
ProtoOperationTransformers.MetadataTransformer.create(Struct.class))
.setPollingAlgorithm(
OperationTimedPollAlgorithm.create(
RetrySettings.newBuilder()
.setInitialRetryDelay(Duration.ofMillis(500L))
.setRetryDelayMultiplier(1.5)
.setMaxRetryDelay(Duration.ofMillis(5000L))
.setInitialRpcTimeout(Duration.ZERO) // ignored
.setRpcTimeoutMultiplier(1.0) // ignored
.setMaxRpcTimeout(Duration.ZERO) // ignored
.setTotalTimeout(Duration.ofMillis(300000L))
.build()));
builder
.importAgentOperationSettings()
.setInitialCallSettings(
UnaryCallSettings.<ImportAgentRequest, OperationSnapshot>newUnaryCallSettingsBuilder()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("idempotent"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("default"))
.build())
.setResponseTransformer(
ProtoOperationTransformers.ResponseTransformer.create(Empty.class))
.setMetadataTransformer(
ProtoOperationTransformers.MetadataTransformer.create(Struct.class))
.setPollingAlgorithm(
OperationTimedPollAlgorithm.create(
RetrySettings.newBuilder()
.setInitialRetryDelay(Duration.ofMillis(500L))
.setRetryDelayMultiplier(1.5)
.setMaxRetryDelay(Duration.ofMillis(5000L))
.setInitialRpcTimeout(Duration.ZERO) // ignored
.setRpcTimeoutMultiplier(1.0) // ignored
.setMaxRpcTimeout(Duration.ZERO) // ignored
.setTotalTimeout(Duration.ofMillis(300000L))
.build()));
builder
.restoreAgentOperationSettings()
.setInitialCallSettings(
UnaryCallSettings
.<RestoreAgentRequest, OperationSnapshot>newUnaryCallSettingsBuilder()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("idempotent"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("default"))
.build())
.setResponseTransformer(
ProtoOperationTransformers.ResponseTransformer.create(Empty.class))
.setMetadataTransformer(
ProtoOperationTransformers.MetadataTransformer.create(Struct.class))
.setPollingAlgorithm(
OperationTimedPollAlgorithm.create(
RetrySettings.newBuilder()
.setInitialRetryDelay(Duration.ofMillis(500L))
.setRetryDelayMultiplier(1.5)
.setMaxRetryDelay(Duration.ofMillis(5000L))
.setInitialRpcTimeout(Duration.ZERO) // ignored
.setRpcTimeoutMultiplier(1.0) // ignored
.setMaxRpcTimeout(Duration.ZERO) // ignored
.setTotalTimeout(Duration.ofMillis(300000L))
.build()));
return builder;
}
protected Builder(AgentsStubSettings settings) {
super(settings);
getAgentSettings = settings.getAgentSettings.toBuilder();
searchAgentsSettings = settings.searchAgentsSettings.toBuilder();
trainAgentSettings = settings.trainAgentSettings.toBuilder();
trainAgentOperationSettings = settings.trainAgentOperationSettings.toBuilder();
exportAgentSettings = settings.exportAgentSettings.toBuilder();
exportAgentOperationSettings = settings.exportAgentOperationSettings.toBuilder();
importAgentSettings = settings.importAgentSettings.toBuilder();
importAgentOperationSettings = settings.importAgentOperationSettings.toBuilder();
restoreAgentSettings = settings.restoreAgentSettings.toBuilder();
restoreAgentOperationSettings = settings.restoreAgentOperationSettings.toBuilder();
unaryMethodSettingsBuilders =
ImmutableList.<UnaryCallSettings.Builder<?, ?>>of(
getAgentSettings,
searchAgentsSettings,
trainAgentSettings,
exportAgentSettings,
importAgentSettings,
restoreAgentSettings);
}
// NEXT_MAJOR_VER: remove 'throws Exception'
/**
* Applies the given settings updater function to all of the unary API methods in this service.
*
* <p>Note: This method does not support applying settings to streaming methods.
*/
public Builder applyToAllUnaryMethods(
ApiFunction<UnaryCallSettings.Builder<?, ?>, Void> settingsUpdater) throws Exception {
super.applyToAllUnaryMethods(unaryMethodSettingsBuilders, settingsUpdater);
return this;
}
public ImmutableList<UnaryCallSettings.Builder<?, ?>> unaryMethodSettingsBuilders() {
return unaryMethodSettingsBuilders;
}
/** Returns the builder for the settings used for calls to getAgent. */
public UnaryCallSettings.Builder<GetAgentRequest, Agent> getAgentSettings() {
return getAgentSettings;
}
/** Returns the builder for the settings used for calls to searchAgents. */
public PagedCallSettings.Builder<
SearchAgentsRequest, SearchAgentsResponse, SearchAgentsPagedResponse>
searchAgentsSettings() {
return searchAgentsSettings;
}
/** Returns the builder for the settings used for calls to trainAgent. */
public UnaryCallSettings.Builder<TrainAgentRequest, Operation> trainAgentSettings() {
return trainAgentSettings;
}
/** Returns the builder for the settings used for calls to trainAgent. */
public OperationCallSettings.Builder<TrainAgentRequest, Empty, Struct>
trainAgentOperationSettings() {
return trainAgentOperationSettings;
}
/** Returns the builder for the settings used for calls to exportAgent. */
public UnaryCallSettings.Builder<ExportAgentRequest, Operation> exportAgentSettings() {
return exportAgentSettings;
}
/** Returns the builder for the settings used for calls to exportAgent. */
public OperationCallSettings.Builder<ExportAgentRequest, ExportAgentResponse, Struct>
exportAgentOperationSettings() {
return exportAgentOperationSettings;
}
/** Returns the builder for the settings used for calls to importAgent. */
public UnaryCallSettings.Builder<ImportAgentRequest, Operation> importAgentSettings() {
return importAgentSettings;
}
/** Returns the builder for the settings used for calls to importAgent. */
public OperationCallSettings.Builder<ImportAgentRequest, Empty, Struct>
importAgentOperationSettings() {
return importAgentOperationSettings;
}
/** Returns the builder for the settings used for calls to restoreAgent. */
public UnaryCallSettings.Builder<RestoreAgentRequest, Operation> restoreAgentSettings() {
return restoreAgentSettings;
}
/** Returns the builder for the settings used for calls to restoreAgent. */
public OperationCallSettings.Builder<RestoreAgentRequest, Empty, Struct>
restoreAgentOperationSettings() {
return restoreAgentOperationSettings;
}
@Override
public AgentsStubSettings build() throws IOException {
return new AgentsStubSettings(this);
}
}
}
| |
package com.badr.infodota.hero.fragment;
import android.app.Activity;
import android.content.Context;
import android.media.MediaPlayer;
import android.os.Bundle;
import android.os.Environment;
import android.support.v4.app.Fragment;
import android.text.Editable;
import android.text.TextWatcher;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.AdapterView;
import android.widget.EditText;
import android.widget.Filter;
import android.widget.ListView;
import android.widget.Toast;
import com.badr.infodota.R;
import com.badr.infodota.base.service.LocalSpiceService;
import com.badr.infodota.hero.adapter.HeroResponsesAdapter;
import com.badr.infodota.hero.api.Hero;
import com.badr.infodota.hero.api.responses.HeroResponse;
import com.badr.infodota.hero.api.responses.HeroResponsesSection;
import com.badr.infodota.hero.task.HeroResponseLoadRequest;
import com.badr.infodota.hero.task.MusicLoadRequest;
import com.octo.android.robospice.SpiceManager;
import com.octo.android.robospice.persistence.exception.SpiceException;
import com.octo.android.robospice.request.listener.RequestListener;
import java.io.File;
/**
* User: ABadretdinov
* Date: 05.02.14
* Time: 17:53
*/
public class HeroResponses extends Fragment implements RequestListener<HeroResponsesSection.List> {
private MediaPlayer mMediaPlayer;
private HeroResponsesAdapter mAdapter;
private Filter mFilter;
private EditText searchView;
private ListView listView;
private Hero hero;
private SpiceManager mSpiceManager = new SpiceManager(LocalSpiceService.class);
public static HeroResponses newInstance(Hero hero) {
HeroResponses fragment = new HeroResponses();
fragment.hero = hero;
return fragment;
}
@Override
public void onStart() {
if (!mSpiceManager.isStarted()) {
Activity activity = getActivity();
if (activity != null) {
mSpiceManager.start(activity);
mSpiceManager.execute(new HeroResponseLoadRequest(activity.getApplicationContext(), hero.getDotaId()), this);
}
}
super.onStart();
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
View view = inflater.inflate(R.layout.hero_responses, container, false);
listView = (ListView) view.findViewById(android.R.id.list);
searchView = (EditText) view.findViewById(R.id.search);
return view;
}
@Override
public void onActivityCreated(Bundle savedInstanceState) {
super.onActivityCreated(savedInstanceState);
if (mAdapter != null) {
listView.setAdapter(mAdapter);
}
searchView.addTextChangedListener(new TextWatcher() {
@Override
public void beforeTextChanged(CharSequence s, int start, int count, int after) {
}
@Override
public void onTextChanged(CharSequence s, int start, int before, int count) {
}
@Override
public void afterTextChanged(Editable s) {
if (mFilter != null) {
mFilter.filter(s);
}
}
});
final View root = getView();
if (root != null) {
root.findViewById(R.id.select_to_download).setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
mAdapter.changeEditMode(true);
root.findViewById(R.id.buttons_holder).setVisibility(View.VISIBLE);
}
});
root.findViewById(R.id.cancel).setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
mAdapter.changeEditMode(false);
root.findViewById(R.id.buttons_holder).setVisibility(View.GONE);
}
});
root.findViewById(R.id.download).setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
mAdapter.startLoadingFiles();
root.findViewById(R.id.buttons_holder).setVisibility(View.GONE);
}
});
root.findViewById(R.id.invert).setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
mAdapter.inverseChecked();
}
});
root.findViewById(R.id.clear).setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
searchView.setText("");
}
});
setOnClickListener();
}
}
private void killMediaPlayer() {
if (mMediaPlayer != null) {
try {
mMediaPlayer.release();
} catch (Exception e) {
e.printStackTrace();
}
}
}
private void setOnClickListener() {
listView.setOnItemClickListener(new AdapterView.OnItemClickListener() {
@Override
public void onItemClick(AdapterView<?> parent, View view, int position, long id) {
if (mAdapter.isEditMode()) {
mAdapter.setItemClicked(position);
} else {
Object object = mAdapter.getItem(position);
if (object instanceof HeroResponse) {
HeroResponse heroResponse = (HeroResponse) object;
mAdapter.addToPlayLoading(position);
mSpiceManager.execute(new MusicLoadRequest(mMediaPlayer, heroResponse), new MusicLoadRequestListener(position));
}
}
}
});
}
@Override
public void onDestroy() {
if (mSpiceManager.isStarted()) {
mSpiceManager.shouldStop();
}
killMediaPlayer();
super.onDestroy();
}
@Override
public void onRequestFailure(SpiceException spiceException) {
}
@Override
public void onRequestSuccess(HeroResponsesSection.List heroResponses) {
File musicFolder = new File(Environment.getExternalStorageDirectory() + File.separator + "Music" + File.separator + "dota2" + File.separator + hero.getDotaId() + File.separator);
String musicPath = musicFolder.getAbsolutePath();
mAdapter = new HeroResponsesAdapter(getActivity(), heroResponses, musicPath);
mFilter = mAdapter.getFilter();
listView.setAdapter(mAdapter);
}
public class MusicLoadRequestListener implements RequestListener<MediaPlayer> {
private int mPosition;
public MusicLoadRequestListener(int position) {
this.mPosition = position;
}
@Override
public void onRequestFailure(SpiceException spiceException) {
Context context = getActivity();
if (context != null) {
Toast.makeText(context, getString(R.string.loading_response_error), Toast.LENGTH_SHORT).show();
}
}
@Override
public void onRequestSuccess(MediaPlayer mediaPlayer) {
mMediaPlayer = mediaPlayer;
mAdapter.loaded(mPosition);
}
}
}
| |
/*
* Shaheed Ahmed Dewan Sagar
* AUST-12-01-04-085
* sdewan64@gmail.com
*/
package client;
import common.CommonInformation;
import java.awt.Color;
import java.awt.Container;
import java.awt.Font;
import java.awt.GridLayout;
import java.awt.Insets;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.IOException;
import java.util.Random;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.swing.BorderFactory;
import javax.swing.ImageIcon;
import javax.swing.JButton;
import javax.swing.JFrame;
import javax.swing.JLabel;
import javax.swing.JOptionPane;
import javax.swing.JPanel;
import javax.swing.Timer;
/**
*
* @author Shaheed Ahmed Dewan Sagar
* AUST-12.01.04.085
* sdewan64@gmail.com
*/
/**
*
* Board Image Credited to : lmoptics
*/
public class Board extends JFrame implements ActionListener{
private final int id;
private final DataInputStream dataInputStream;
private final DataOutputStream dataOutputStream;
private static final int BOARD_SIZE = 10;
private Container container;
private JPanel boardPanel;
private JPanel dataPanel;
private final JButton[][] squares = new JButton[BOARD_SIZE][BOARD_SIZE];
private JLabel statusLabel;
private JLabel showNumberLabel;
private JLabel showCurrentPosition;
private JButton rollButton;
private JButton startGameButton;
private JButton dice;
private Random random = new Random();
int[] numbers = {
100,99,98,97,96,95,94,93,92,91,
81,82,83,84,85,86,87,88,89,90,
80,79,78,77,76,75,74,73,72,71,
61,62,63,64,65,66,67,68,69,70,
60,59,58,57,56,55,54,53,52,51,
41,42,43,44,45,46,47,48,49,50,
40,39,38,37,36,35,34,33,32,31,
21,22,23,24,25,26,27,28,29,30,
20,19,18,17,16,15,14,13,12,11,
1,2,3,4,5,6,7,8,9,10
};
public Board(int id,DataInputStream dataInputStream,DataOutputStream dataOutputStream){
super(CommonInformation.frameHeader);
this.id = id;
this.dataInputStream = dataInputStream;
this.dataOutputStream = dataOutputStream;
random.setSeed(System.currentTimeMillis());
initializeBoard();
initializeDataPanel();
setWindowProperties();
}
private void initializeBoard() {
container = getContentPane();
container.setLayout(new GridLayout(1,2,10,10));
boardPanel = new JPanel(new GridLayout(10,10,0,0));
int index = 0;
for(int i=0;i<BOARD_SIZE;i++){
for(int j=0;j<BOARD_SIZE;j++){
squares[i][j] = new JButton(String.valueOf(numbers[index]));
squares[i][j].addActionListener(this);
squares[i][j].setMargin(new Insets(0,0,0,0));
squares[i][j].setBorder(null);
squares[i][j].setBorderPainted(false);
squares[i][j].setContentAreaFilled(false);
squares[i][j].setFocusPainted(false);
ImageIcon image = new ImageIcon("images/board/"+numbers[index++]+".jpg");
squares[i][j].setIcon(image);
squares[i][j].setRolloverIcon(image);
boardPanel.add(squares[i][j]);
}
}
boardPanel.setBorder(BorderFactory.createEmptyBorder());
container.add(boardPanel);
}
private void initializeDataPanel() {
dataPanel = new JPanel(new GridLayout(4,1));
JPanel firstPanel = new JPanel(new GridLayout(1,2));
startGameButton = new JButton("Start Game");
startGameButton.addActionListener(this);
startGameButton.setEnabled(false);
startGameButton.setBorder(null);
JButton selfGuti = new JButton("Your Pawn");
selfGuti.setBorder(null);
selfGuti.setIcon(new ImageIcon("images/circle/"+id+".png"));
selfGuti.setBackground(new Color(238, 238, 238));
selfGuti.setHorizontalTextPosition(JButton.LEFT);
firstPanel.add(startGameButton);
firstPanel.add(selfGuti);
statusLabel = new JLabel("Waiting to Connect...");
statusLabel.setFont(new Font("Serif", Font.BOLD, 28));
statusLabel.setForeground(new Color(44, 62, 80));
JPanel mediumPanel = new JPanel(new GridLayout(1,2));
showNumberLabel = new JLabel("Rolled : ");
showNumberLabel.setFont(new Font("Serif", Font.BOLD, 14));
showNumberLabel.setForeground(Color.MAGENTA);
mediumPanel.add(showNumberLabel);
dice = new JButton();
dice.setBorder(null);
dice.setBackground(new Color(238, 238, 238));
mediumPanel.add(dice);
JPanel lastPanel = new JPanel(new GridLayout(1,2));
rollButton = new JButton("Roll");
rollButton.addActionListener(this);
rollButton.setEnabled(false);
rollButton.setBorder(null);
lastPanel.add(rollButton);
showCurrentPosition = new JLabel("Current Position : 0");
lastPanel.add(showCurrentPosition);
statusLabel.setHorizontalAlignment(JLabel.CENTER);
showNumberLabel.setHorizontalAlignment(JLabel.CENTER);
rollButton.setHorizontalAlignment(JButton.CENTER);
startGameButton.setHorizontalAlignment(JButton.CENTER);
showCurrentPosition.setHorizontalAlignment(JLabel.CENTER);
dataPanel.add(firstPanel);
dataPanel.add(statusLabel);
dataPanel.add(mediumPanel);
dataPanel.add(lastPanel);
boardPanel.setBackground(new Color(52, 152, 219));
container.add(dataPanel);
}
private void setWindowProperties() {
setSize(1200, 600);
setLocationRelativeTo(null);
setResizable(false);
setDefaultCloseOperation(EXIT_ON_CLOSE);
setVisible(true);
}
@Override
public void actionPerformed(ActionEvent actionEvent) {
if(actionEvent.getSource() == rollButton){
rollButton.setEnabled(false);
int roller = random.nextInt(6)+1;
dice.setIcon(new ImageIcon("images/dice/"+roller+".png"));
showNumberLabel.setText("Rolled : "+roller);
try {
dataOutputStream.writeUTF("roll:"+roller);
} catch (IOException ex) {
Logger.getLogger(Board.class.getName()).log(Level.SEVERE, null, ex);
}
}else if(actionEvent.getSource() == startGameButton){
try {
dataOutputStream.writeUTF("Start Game");
startGameButton.setEnabled(false);
} catch (IOException ex) {
Logger.getLogger(Board.class.getName()).log(Level.SEVERE, null, ex);
}
}else{
for(int i=0;i<BOARD_SIZE;i++){
for(int j=0;j<BOARD_SIZE;j++){
if(actionEvent.getSource() == squares[i][j]){
System.out.println(squares[i][j].getText());
break;
}
}
}
}
}
public static void main(String[] args){
new Board(1,new DataInputStream(null),new DataOutputStream(null));
}
public void enableStartGameButton(){
startGameButton.setEnabled(true);
}
public void enableRollButton(){
rollButton.setEnabled(true);
}
public void connected(){
setStatus("Waiting for game to start...");
}
public void setStatus(String msg){
statusLabel.setText(msg);
}
public void roll(){
setStatus("Your Turn");
rollButton.setEnabled(true);
}
public void reposition(int player,int number,int oldPosition){
if(player == id){
showCurrentPosition.setText("Current Position : "+number);
if(oldPosition>number){
JOptionPane.showMessageDialog(this, "Snake got you.");
}
}
for(int i=0;i<BOARD_SIZE;i++){
for(int j=0;j<BOARD_SIZE;j++){
if(squares[i][j].getText().equals(String.valueOf(number))){
squares[i][j].setIcon(new ImageIcon("images/circle/"+player+".png"));
break;
}
}
}
for(int i=0;i<BOARD_SIZE;i++){
for(int j=0;j<BOARD_SIZE;j++){
if(squares[i][j].getText().equals(String.valueOf(oldPosition))){
squares[i][j].setIcon(new ImageIcon("images/board/"+oldPosition+".jpg"));
break;
}
}
}
}
}
| |
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.openapi.fileEditor;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.editor.Caret;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Key;
import com.intellij.openapi.vfs.VirtualFile;
import org.jetbrains.annotations.ApiStatus;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import java.util.List;
public abstract class FileEditorManager {
public static final Key<Boolean> USE_CURRENT_WINDOW = Key.create("OpenFile.searchForOpen");
public static final Key<Boolean> USE_MAIN_WINDOW = Key.create("OpenFile.useMainWindow");
public static FileEditorManager getInstance(@NotNull Project project) {
return project.getComponent(FileEditorManager.class);
}
/**
* @param file file to open. File should be valid.
* Must be called from <a href="https://docs.oracle.com/javase/tutorial/uiswing/concurrency/dispatch.html">EDT</a>.
* @return array of opened editors
*/
public abstract FileEditor @NotNull [] openFile(@NotNull VirtualFile file, boolean focusEditor);
/**
* Opens a file.
* Must be called from <a href="https://docs.oracle.com/javase/tutorial/uiswing/concurrency/dispatch.html">EDT</a>.
*
* @param file file to open
* @param focusEditor {@code true} if need to focus
* @return array of opened editors
*/
public FileEditor @NotNull [] openFile(@NotNull VirtualFile file, boolean focusEditor, boolean searchForOpen) {
throw new UnsupportedOperationException("Not implemented");
}
/**
* Closes all editors opened for the file.
* Must be called from <a href="https://docs.oracle.com/javase/tutorial/uiswing/concurrency/dispatch.html">EDT</a>.
*
* @param file file to be closed.
*/
public abstract void closeFile(@NotNull VirtualFile file);
/**
* Works as {@link #openFile(VirtualFile, boolean)} but forces opening of text editor (see {@link TextEditor}).
* If several text editors are opened, including the default one, default text editor is focused (if requested) and returned.
* Must be called from <a href="https://docs.oracle.com/javase/tutorial/uiswing/concurrency/dispatch.html">EDT</a>.
*
* @return opened text editor. The method returns {@code null} in case if text editor wasn't opened.
*/
public abstract @Nullable Editor openTextEditor(@NotNull OpenFileDescriptor descriptor, boolean focusEditor);
/**
* @deprecated use {@link #openTextEditor(OpenFileDescriptor, boolean)}
*/
@Deprecated
@ApiStatus.ScheduledForRemoval(inVersion = "2021.3")
public void navigateToTextEditor(@NotNull OpenFileDescriptor descriptor, boolean focusEditor) {
openTextEditor(descriptor, focusEditor);
}
/**
* @return currently selected text editor. The method returns {@code null} in case
* there is no selected editor at all or selected editor is not a text one.
* Must be called from <a href="https://docs.oracle.com/javase/tutorial/uiswing/concurrency/dispatch.html">EDT</a>.
*/
public abstract @Nullable Editor getSelectedTextEditor();
/**
* @return currently selected TEXT editors including ones which were opened by guests during a collaborative development session
* The method returns an empty array in case there are no selected editors or none of them is a text one.
* Must be called from <a href="https://docs.oracle.com/javase/tutorial/uiswing/concurrency/dispatch.html">EDT</a>.
*/
@ApiStatus.Experimental
public Editor @NotNull [] getSelectedTextEditorWithRemotes() {
Editor editor = getSelectedTextEditor();
return editor != null ? new Editor[]{editor} : Editor.EMPTY_ARRAY;
}
/**
* @return {@code true} if {@code file} is opened, {@code false} otherwise
*/
public abstract boolean isFileOpen(@NotNull VirtualFile file);
/**
* @return all opened files. Order of files in the array corresponds to the order of editor tabs.
*/
public abstract VirtualFile @NotNull [] getOpenFiles();
public boolean hasOpenFiles() {
return getOpenFiles().length > 0;
}
/**
* @return files currently selected. The method returns empty array if there are no selected files.
* If more than one file is selected (split), the file with most recent focused editor is returned first.
*/
public abstract VirtualFile @NotNull [] getSelectedFiles();
/**
* @return editors currently selected. The method returns empty array if no editors are open.
*/
public abstract FileEditor @NotNull [] getSelectedEditors();
/**
* @return editors currently selected including ones which were opened by guests during a collaborative development session
* The method returns an empty array if no editors are open.
*/
@ApiStatus.Experimental
public FileEditor @NotNull [] getSelectedEditorWithRemotes() {
return getSelectedEditors();
}
/**
* @return currently selected file editor or {@code null} if there is no selected editor at all.
*/
public @Nullable FileEditor getSelectedEditor() {
VirtualFile[] files = getSelectedFiles();
return files.length == 0 ? null : getSelectedEditor(files[0]);
}
/**
* @return editor which is currently selected for given file.
* The method returns {@code null} if {@code file} is not opened.
*/
public abstract @Nullable FileEditor getSelectedEditor(@NotNull VirtualFile file);
/**
* @return current editors for the specified {@code file}
*/
public abstract FileEditor @NotNull [] getEditors(@NotNull VirtualFile file);
/**
* @return all editors for the specified {@code file}
*/
public abstract FileEditor @NotNull [] getAllEditors(@NotNull VirtualFile file);
/**
* @return all open editors
*/
public abstract FileEditor @NotNull [] getAllEditors();
/**
* Adds the specified component above the editor and paints a separator line below it.
* If a separator line is not needed, set the client property to {@code true}:
* <pre> component.putClientProperty(SEPARATOR_DISABLED, true); </pre>
* Otherwise, a separator line will be painted by a
* {@link com.intellij.openapi.editor.colors.EditorColors#SEPARATOR_ABOVE_COLOR SEPARATOR_ABOVE_COLOR} or
* {@link com.intellij.openapi.editor.colors.EditorColors#TEARLINE_COLOR TEARLINE_COLOR} if it is not set.
* <p>
* This method allows to add several components above the editor.
* To change an order of components the specified component may implement the
* {@link com.intellij.openapi.util.Weighted Weighted} interface.
*/
public abstract void addTopComponent(final @NotNull FileEditor editor, final @NotNull JComponent component);
public abstract void removeTopComponent(final @NotNull FileEditor editor, final @NotNull JComponent component);
/**
* Adds the specified component below the editor and paints a separator line above it.
* If a separator line is not needed, set the client property to {@code true}:
* <pre> component.putClientProperty(SEPARATOR_DISABLED, true); </pre>
* Otherwise, a separator line will be painted by a
* {@link com.intellij.openapi.editor.colors.EditorColors#SEPARATOR_BELOW_COLOR SEPARATOR_BELOW_COLOR} or
* {@link com.intellij.openapi.editor.colors.EditorColors#TEARLINE_COLOR TEARLINE_COLOR} if it is not set.
* <p>
* This method allows to add several components below the editor.
* To change an order of components the specified component may implement the
* {@link com.intellij.openapi.util.Weighted Weighted} interface.
*/
public abstract void addBottomComponent(final @NotNull FileEditor editor, final @NotNull JComponent component);
public abstract void removeBottomComponent(final @NotNull FileEditor editor, final @NotNull JComponent component);
public static final Key<Boolean> SEPARATOR_DISABLED = Key.create("FileEditorSeparatorDisabled");
/**
* Adds specified {@code listener}
*
* @param listener listener to be added
* @deprecated Use {@link com.intellij.util.messages.MessageBus} instead: see {@link FileEditorManagerListener#FILE_EDITOR_MANAGER}
*/
@Deprecated
public void addFileEditorManagerListener(@NotNull FileEditorManagerListener listener) {
}
/**
* Removes specified {@code listener}
*
* @param listener listener to be removed
* @deprecated Use {@link FileEditorManagerListener#FILE_EDITOR_MANAGER} instead
*/
@Deprecated
public void removeFileEditorManagerListener(@NotNull FileEditorManagerListener listener) {
}
/**
* Must be called from <a href="https://docs.oracle.com/javase/tutorial/uiswing/concurrency/dispatch.html">EDT</a>.
*
* @return opened file editors
*/
public @NotNull List<FileEditor> openEditor(@NotNull OpenFileDescriptor descriptor, boolean focusEditor) {
return openFileEditor(descriptor, focusEditor);
}
public abstract @NotNull List<FileEditor> openFileEditor(@NotNull FileEditorNavigatable descriptor, boolean focusEditor);
/**
* Returns the project with which the file editor manager is associated.
*
* @return the project instance.
*/
public abstract @NotNull Project getProject();
public abstract void registerExtraEditorDataProvider(@NotNull EditorDataProvider provider, Disposable parentDisposable);
/**
* Returns data associated with given editor/caret context. Data providers are registered via
* {@link #registerExtraEditorDataProvider(EditorDataProvider, Disposable)} method.
*/
public abstract @Nullable Object getData(@NotNull String dataId, @NotNull Editor editor, @NotNull Caret caret);
/**
* Selects a specified file editor tab for the specified editor.
*
* @param file a file to switch the file editor tab for. The function does nothing if the file is not currently open in the editor.
* @param fileEditorProviderId the ID of the file editor to open; matches the return value of
* {@link FileEditorProvider#getEditorTypeId()}
*/
public abstract void setSelectedEditor(@NotNull VirtualFile file, @NotNull String fileEditorProviderId);
/**
* {@link FileEditorManager} supports asynchronous opening of text editors, i.e. when one of 'openFile' methods returns, returned
* editor might not be fully initialized yet. This method allows to delay (if needed) execution of given runnable until editor is
* fully loaded.
*/
public abstract void runWhenLoaded(@NotNull Editor editor, @NotNull Runnable runnable);
}
| |
/*
* Copyright Terracotta, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.ehcache.resilience;
import java.util.HashMap;
import java.util.Map;
import org.ehcache.exceptions.BulkCacheLoadingException;
import org.ehcache.exceptions.BulkCacheWritingException;
import org.ehcache.exceptions.CacheAccessException;
import org.ehcache.exceptions.CacheLoadingException;
import org.ehcache.exceptions.CacheWritingException;
import org.ehcache.exceptions.RethrowingCacheAccessException;
import static java.util.Collections.emptyMap;
/**
*
* @author Chris Dennis
*/
public abstract class RobustResilienceStrategy<K, V> implements ResilienceStrategy<K, V> {
private final RecoveryCache<K> cache;
public RobustResilienceStrategy(RecoveryCache<K> cache) {
this.cache = cache;
}
@Override
public V getFailure(K key, CacheAccessException e) {
cleanup(key, e);
return null;
}
@Override
public V getFailure(K key, V loaded, CacheAccessException e) {
cleanup(key, e);
return loaded;
}
@Override
public V getFailure(K key, CacheAccessException e, CacheLoadingException f) {
cleanup(key, e);
throw f;
}
@Override
public boolean containsKeyFailure(K key, CacheAccessException e) {
cleanup(key, e);
return false;
}
@Override
public void putFailure(K key, V value, CacheAccessException e) {
cleanup(key, e);
}
@Override
public void putFailure(K key, V value, CacheAccessException e, CacheWritingException f) {
cleanup(key, e);
throw f;
}
@Override
public void removeFailure(K key, CacheAccessException e) {
cleanup(key, e);
}
@Override
public void removeFailure(K key, CacheAccessException e, CacheWritingException f) {
cleanup(key, e);
throw f;
}
@Override
public void clearFailure(CacheAccessException e) {
cleanup(e);
}
@Override
public abstract void iteratorFailure(CacheAccessException e);
@Override
public V putIfAbsentFailure(K key, V value, CacheAccessException e, boolean knownToBeAbsent) {
cleanup(key, e);
return null;
}
@Override
public V putIfAbsentFailure(K key, V value, CacheAccessException e, CacheWritingException f) {
cleanup(key, e);
throw f;
}
@Override
public V putIfAbsentFailure(K key, V value, CacheAccessException e, CacheLoadingException f) {
cleanup(key, e);
throw f;
}
@Override
public boolean removeFailure(K key, V value, CacheAccessException e, boolean knownToBePresent) {
cleanup(key, e);
return knownToBePresent;
}
@Override
public boolean removeFailure(K key, V value, CacheAccessException e, CacheWritingException f) {
cleanup(key, e);
throw f;
}
@Override
public boolean removeFailure(K key, V value, CacheAccessException e, CacheLoadingException f) {
cleanup(key, e);
throw f;
}
@Override
public V replaceFailure(K key, V value, CacheAccessException e) {
cleanup(key, e);
return null;
}
@Override
public V replaceFailure(K key, V value, CacheAccessException e, CacheWritingException f) {
cleanup(key, e);
throw f;
}
@Override
public V replaceFailure(K key, V value, CacheAccessException e, CacheLoadingException f) {
cleanup(key, e);
throw f;
}
@Override
public boolean replaceFailure(K key, V value, V newValue, CacheAccessException e, boolean knownToMatch) {
cleanup(key, e);
return knownToMatch;
}
@Override
public boolean replaceFailure(K key, V value, V newValue, CacheAccessException e, CacheWritingException f) {
cleanup(key, e);
throw f;
}
@Override
public boolean replaceFailure(K key, V value, V newValue, CacheAccessException e, CacheLoadingException f) {
cleanup(key, e);
throw f;
}
@Override
public Map<K, V> getAllFailure(Iterable<? extends K> keys, CacheAccessException e) {
cleanup(keys, e);
HashMap<K, V> result = new HashMap<K, V>();
for (K key : keys) {
result.put(key, null);
}
return result;
}
@Override
public Map<K, V> getAllFailure(Iterable<? extends K> keys, Map<K, V> loaded, CacheAccessException e) {
cleanup(keys, e);
return loaded;
}
@Override
public Map<K, V> getAllFailure(Iterable<? extends K> keys, CacheAccessException e, BulkCacheLoadingException f) {
cleanup(keys, e);
throw f;
}
@Override
public void putAllFailure(Map<? extends K, ? extends V> entries, CacheAccessException e) {
cleanup(entries.keySet(), e);
}
@Override
public void putAllFailure(Map<? extends K, ? extends V> entries, CacheAccessException e, BulkCacheWritingException f) {
cleanup(entries.keySet(), e);
throw f;
}
@Override
public Map<K, V> removeAllFailure(Iterable<? extends K> entries, CacheAccessException e) {
cleanup(entries, e);
return emptyMap();
}
@Override
public Map<K, V> removeAllFailure(Iterable<? extends K> entries, CacheAccessException e, BulkCacheWritingException f) {
cleanup(entries, e);
throw f;
}
private void cleanup(CacheAccessException from) {
filterException(from);
try {
cache.obliterate();
} catch (CacheAccessException e) {
inconsistent(from, e);
return;
}
recovered(from);
}
private void cleanup(Iterable<? extends K> keys, CacheAccessException from) {
filterException(from);
try {
cache.obliterate(keys);
} catch (CacheAccessException e) {
inconsistent(keys, from, e);
return;
}
recovered(keys, from);
}
private void cleanup(K key, CacheAccessException from) {
filterException(from);
try {
cache.obliterate(key);
} catch (CacheAccessException e) {
inconsistent(key, from, e);
return;
}
recovered(key, from);
}
@Deprecated
void filterException(CacheAccessException cae) throws RuntimeException {
if (cae instanceof RethrowingCacheAccessException) {
throw ((RethrowingCacheAccessException) cae).getCause();
}
}
protected abstract void recovered(K key, CacheAccessException from);
protected abstract void recovered(Iterable<? extends K> keys, CacheAccessException from);
protected abstract void recovered(CacheAccessException from);
protected abstract void inconsistent(K key, CacheAccessException because, CacheAccessException ... cleanup);
protected abstract void inconsistent(Iterable<? extends K> keys, CacheAccessException because, CacheAccessException ... cleanup);
protected abstract void inconsistent(CacheAccessException because, CacheAccessException ... cleanup);
}
| |
/*******************************************************************************
* Copyright 2013 Technology Blueprint Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package uk.co.techblue.jboss.controller.as7;
import static uk.co.techblue.jboss.controller.ControllerConstants.*;
import java.io.IOException;
import java.net.InetAddress;
import java.net.NetworkInterface;
import java.net.SocketException;
import java.net.UnknownHostException;
import java.util.ArrayList;
import java.util.List;
import javax.security.auth.callback.CallbackHandler;
import org.jboss.as.controller.client.ModelControllerClient;
import org.jboss.as.controller.client.ModelControllerClientConfiguration;
import org.jboss.as.controller.client.OperationBuilder;
import org.jboss.as.controller.client.helpers.ClientConstants;
import org.jboss.as.controller.client.impl.ClientConfigurationImpl;
import org.jboss.dmr.ModelNode;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import uk.co.techblue.jboss.auth.AuthenticationCallbackHandler;
import uk.co.techblue.jboss.controller.ControllerOperationExecutor;
import uk.co.techblue.jboss.controller.exception.ControllerOperationException;
import uk.co.techblue.jboss.controller.vo.ControllerClientConfig;
import uk.co.techblue.jboss.controller.vo.JndiDataSource;
import uk.co.techblue.jboss.util.StringUtils;
// TODO: Auto-generated Javadoc
/**
* The service to execute operations on JBoss AS 7 management model controller.
*
* @author <a href="mailto:ajay.deshwal@techblue.co.uk">Ajay Deshwal</a>
* @follower <a href="mailto:yjmyzz@126.com">Jimmy Yang</a>
*/
public class JBoss7ControllerOpeartionExecutor implements ControllerOperationExecutor {
/**
* The logger instance.
*/
private final Logger logger = LoggerFactory.getLogger(getClass());
/**
* The main method. ONLY FOR TESTING!
*
* @param args the arguments
*/
public static void main(String[] args) {
try {
final JndiDataSource dataSource = new JndiDataSource("java:/mysql-testjboss7", "jdbc:mysql://localhost:3306/test",
"com.mysql", "root", "root");
dataSource.setMaxPoolSize(10);
dataSource.setPoolPrefill(true);
final ControllerClientConfig clientConfig = new ControllerClientConfig("127.0.0.1");
// clientConfig.setUserName("ajay");
// clientConfig.setPassword("ajay");
// List<ModelNode> dataSources = new JBoss7ControllerOpeartionExecutor().getDatasources(clientConfig, "",
// DatasourceStatus.ALL);
// for (ModelNode dataSource : dataSources) {
// System.out.println(dataSource.asProperty().getName());
// }
// new JBoss7ControllerOpeartionExecutor().disableDataSource(clientConfig, "java:/mysql-testjboss7","");
new JBoss7ControllerOpeartionExecutor().removeDatasource(clientConfig, "java:/mysql-testjboss7", "");
// new JBoss7ControllerOpeartionExecutor().createDatasource(clientConfig, dataSource, true);
// new JBoss7ControllerOpeartionExecutor().isDatasourceExists(clientConfig, dataSource.getJndiName());
// new JBoss7ControllerOpeartionExecutor().createDatasource(clientConfig, dataSource, true, "ha");
// new JBoss7ControllerOpeartionExecutor().isDatasourceExists(clientConfig, dataSource.getJndiName(), "full-ha");
} catch (Exception e) {
e.printStackTrace();
}
}
/*
* (non-Javadoc)
*
* @see uk.co.techblue.jboss.controller.ControllerOperationExecutor#createDatasource(uk.co.techblue.jboss.controller.vo.
* ControllerClientConfig, uk.co.techblue.jboss.controller.vo.JndiDataSource, boolean, java.lang.String[])
*/
public void createDatasource(final ControllerClientConfig controllerClientConfig, final JndiDataSource dataSource,
final boolean enable, final String... serverProfileNames) throws ControllerOperationException {
if (serverProfileNames != null && serverProfileNames.length > 0) {
for (final String serverProfile : serverProfileNames) {
createDatasource(controllerClientConfig, dataSource, enable, serverProfile);
}
} else {
createDatasource(controllerClientConfig, dataSource, enable, "");
}
}
/*
* (non-Javadoc)
*
* @see uk.co.techblue.jboss.controller.ControllerOperationExecutor#createDatasource(uk.co.techblue.jboss.controller.vo.
* ControllerClientConfig, uk.co.techblue.jboss.controller.vo.JndiDataSource, boolean, java.lang.String)
*/
/**
* Creates the datasource.
*
* @param controllerClientConfig the controller client config
* @param dataSource the data source
* @param enable the enable
* @param serverProfileName the server profile name
* @throws ControllerOperationException the controller operation exception
*/
private void createDatasource(final ControllerClientConfig controllerClientConfig, final JndiDataSource dataSource,
final boolean enable, final String serverProfileName) throws ControllerOperationException {
final String jndiName = dataSource.getJndiName();
final ModelNode request = new ModelNode();
request.get(ClientConstants.OP).set(ClientConstants.ADD);
if (StringUtils.isNotBlank(serverProfileName)) {
request.get(ClientConstants.OP_ADDR).add(ADDRESS_PROFILE, serverProfileName);
}
request.get(ClientConstants.OP_ADDR).add(ClientConstants.SUBSYSTEM, DATASOURCE_SUBSYSTEM);
request.get(ClientConstants.OP_ADDR).add(ADDRESS_DATASOURCE, jndiName);
addDatasourceProperties(request, dataSource);
final ModelControllerClient client = createControllerClient(controllerClientConfig);
ModelNode response = null;
try {
logger.info("Adding datasource '{}' ...", jndiName);
response = client.execute(new OperationBuilder(request).build());
} catch (IOException ioe) {
throw new ControllerOperationException("An error occurred while executing operation on JBoss controller", ioe);
} finally {
try {
client.close();
} catch (IOException ioe) {
logger.error(
"An error occurred when closing JBoss Controller connection with host "
+ controllerClientConfig.getHost() + " at port " + controllerClientConfig.getPort()
+ " while adding datasource '" + jndiName + "'", ioe);
}
}
if (!isOperationSuccess(response)) {
if (!response.isDefined()) {
throw new ControllerOperationException(
"A subsystem undefined response status recieved while adding datasource '" + jndiName
+ "'. Most probably the " + DATASOURCE_SUBSYSTEM + " subsystem is not defined.");
}
logger.error("Operation rolled back:" + response.get(RESPONSE_PROPERTY_ROLLEDBACK));
throw new ControllerOperationException("An error occurred while adding datasource '" + jndiName + ".\n"
+ response.get(ClientConstants.FAILURE_DESCRIPTION).asString());
}
logger.info("Datasource '{}' added successfully!", jndiName);
if (enable) {
enableDataSource(controllerClientConfig, jndiName, serverProfileName);
}
}
/*
* (non-Javadoc)
*
* @see uk.co.techblue.jboss.controller.ControllerOperationExecutor#removeDatasource(uk.co.techblue.jboss.controller.vo.
* ControllerClientConfig, uk.co.techblue.jboss.controller.vo.JndiDataSource, java.lang.String[])
*/
public void removeDatasource(final ControllerClientConfig controllerClientConfig, final String datasourceName,
final String... serverProfileNames) throws ControllerOperationException {
if (serverProfileNames != null && serverProfileNames.length > 0) {
for (final String serverProfile : serverProfileNames) {
removeDatasource(controllerClientConfig, datasourceName, serverProfile);
}
} else {
removeDatasource(controllerClientConfig, datasourceName, "");
}
}
/**
* Removes datasource.
*
* @param controllerClientConfig the controller client config
* @param datasourceName the datasource name
* @param serverProfileName the server profile name
* @throws ControllerOperationException the controller operation exception
*/
private void removeDatasource(final ControllerClientConfig controllerClientConfig, final String datasourceName,
final String serverProfileName) throws ControllerOperationException {
final ModelNode request = new ModelNode();
request.get(ClientConstants.OP).set(ClientConstants.DEPLOYMENT_REMOVE_OPERATION);
if (StringUtils.isNotBlank(serverProfileName)) {
request.get(ClientConstants.OP_ADDR).add(ADDRESS_PROFILE, serverProfileName);
}
request.get(ClientConstants.OP_ADDR).add(ClientConstants.SUBSYSTEM, DATASOURCE_SUBSYSTEM);
request.get(ClientConstants.OP_ADDR).add(ADDRESS_DATASOURCE, datasourceName);
final ModelControllerClient client = createControllerClient(controllerClientConfig);
ModelNode response = null;
try {
logger.info("Removing datasource '{}' ...", datasourceName);
response = client.execute(new OperationBuilder(request).build());
} catch (IOException ioe) {
throw new ControllerOperationException("An error occurred while removing datatsource '" + datasourceName
+ "' from JBoss model controller", ioe);
} finally {
try {
client.close();
} catch (IOException ioe) {
logger.error(
"An error occurred when closing JBoss Controller connection with host "
+ controllerClientConfig.getHost() + " at port " + controllerClientConfig.getPort()
+ " while removing datasource '" + datasourceName + "'", ioe);
}
}
if (!isOperationSuccess(response)) {
if (!response.isDefined()) {
throw new ControllerOperationException(
"A subsystem undefined response status recieved while removing datasource '" + datasourceName
+ "'. Most probably the " + DATASOURCE_SUBSYSTEM + " subsystem is not defined.");
}
logger.error("Operation rolled back:" + response.get(RESPONSE_PROPERTY_ROLLEDBACK));
throw new ControllerOperationException("An error occurred while removing datasource '" + datasourceName + ".\n"
+ response.get(ClientConstants.FAILURE_DESCRIPTION).asString());
}
logger.info("Datasource '{}' removed successfully!", datasourceName);
}
/**
* Creates the datasource request.
*
* @param request the request
* @param dataSource the data source
*/
private void addDatasourceProperties(final ModelNode request, final JndiDataSource dataSource) {
// DS ATTRIBUTES
request.get(DS_PROPERTY_JNDINAME).set(dataSource.getJndiName());
request.get(DS_PROPERTY_USEJAVACONTEXT).set(dataSource.isUseJavaContext());
request.get(DS_PROPERTY_SHARE_PREPARED_STATEMENTS).set(dataSource.isSharePreparedStatements());
request.get(DS_PROPERTY_PREPARED_STATEMENTS_CACHE_SIZE).set(dataSource.getStatementCacheSize());
request.get(DS_PROPERTY_POOLNAME).set(dataSource.getPoolName());
// CONNECTION PROPERTIES
request.get(DS_PROPERTY_CONNECTIONURL).set(dataSource.getConnectionURL());
setPropertyIfNotNull(request, DS_PROPERTY_NEWCONNECTIONSQL, dataSource.getNewConnectionSQL());
request.get(DS_PROPERTY_TRANSACTIONISOLATION).set(dataSource.getTransactionIsolation().name());
request.get(DS_PROPERTY_USE_CCM).set(dataSource.isUseCacheConnectionManager());
request.get(DS_PROPERTY_JTA_INTEGRATION).set(dataSource.isUseJTA());
// SECURITY
request.get(DS_PROPERTY_DRIVERNAME).set(dataSource.getDriverName());
request.get(DS_PROPERTY_USERNAME).set(dataSource.getUserName());
request.get(DS_PROPERTY_PASSWORD).set(dataSource.getPassword());
setPropertyIfNotNull(request, DS_PROPERTY_SECURITYDOMAIN, dataSource.getSecurityDomain());
// POOL PROPERTIES
request.get(DS_PROPERTY_MINPOOLSIZE).set(dataSource.getMinPoolSize());
request.get(DS_PROPERTY_MAXPOOLSIZE).set(dataSource.getMaxPoolSize());
request.get(DS_PROPERTY_POOLPREFILL).set(dataSource.isPoolPrefill());
request.get(DS_PROPERTY_POOL_STRICT_MINIMUM).set(dataSource.isPoolStrictMinimum());
// VALIDATION ATTRIBUTES
setPropertyIfNotNull(request, DS_PROPERTY_VALID_CONNECTION_SQL, dataSource.getCheckValidSQL());
setPropertyIfNotNull(request, DS_PROPERTY_VALID_CONNCHECKER_CLASSNAME, dataSource.getValidConnectionCheckerClassName());
setPropertyIfNotNull(request, DS_PROPERTY_EXCEPTION_SORTER_CLASSNAME, dataSource.getExceptionSorterClassName());
setPropertyIfNotNull(request, DS_PROPERTY_STALE_CONNCHECKER_CLASSNAME, dataSource.getStaleConnectionCheckerClassName());
request.get(DS_PROPERTY_BG_VALIDATION).set(dataSource.isBackgroundValidation());
if (dataSource.getBackgroundValidationMillisec() > 0) {
request.get(DS_PROPERTY_BG_VALIDATION_MILLIS).set(dataSource.getBackgroundValidationMillisec());
}
request.get(DS_PROPERTY_VALIDATE_ON_MATCH).set(dataSource.isValidateOnMatch());
}
/**
* Sets the property if not null.
*
* @param request the request
* @param propertyName the property name
* @param propertyValue the property value
*/
private void setPropertyIfNotNull(ModelNode request, String propertyName, String propertyValue) {
if (propertyValue != null) {
request.get(propertyName).set(propertyValue);
}
}
/**
* Creates the controller client.
*
* @param controllerClientConfig the controller client configuration
* @return the model controller client
* @throws ControllerOperationException the controller operation exception
*/
private ModelControllerClient createControllerClient(final ControllerClientConfig controllerClientConfig)
throws ControllerOperationException {
ModelControllerClient controllerClient = null;
try {
final CallbackHandler authCallbackHandler = getAuthCallbackHandler(controllerClientConfig);
final ModelControllerClientConfiguration controllerConfig = ClientConfigurationImpl.create(
controllerClientConfig.getHost(), controllerClientConfig.getPort(), authCallbackHandler,
controllerClientConfig.getSaslOptions());
controllerClient = ModelControllerClient.Factory.create(controllerConfig);
} catch (UnknownHostException uhe) {
throw new ControllerOperationException(
"Tried establishing connection with JBoss controller process. Unable to connect to host: "
+ controllerClientConfig.getHost() + " at port " + controllerClientConfig.getPort(), uhe);
}
return controllerClient;
}
/**
* Gets the authentication callback handler.
*
* @param controllerClient the controller client
* @return the authentication callback handler
*/
private CallbackHandler getAuthCallbackHandler(final ControllerClientConfig controllerClient) {
return new AuthenticationCallbackHandler(controllerClient.getUserName(), controllerClient.getPassword());
}
/**
* Checks if is local IP address.
*
* @param address the address
* @return true, if is local IP address
*/
@SuppressWarnings("unused")
private static boolean isLocalIpAddress(final InetAddress address) {
// Check if the address is a valid special local or loop back
if (address.isAnyLocalAddress() || address.isLoopbackAddress())
return true;
// Check if the address is defined on any interface
try {
return NetworkInterface.getByInetAddress(address) != null;
} catch (SocketException se) {
return false;
}
}
/*
* (non-Javadoc)
*
* @see uk.co.techblue.jboss.controller.ControllerOperationExecutor#isDatasourceExists(uk.co.techblue.jboss.controller.vo.
* ControllerClientConfig, java.lang.String)
*/
public boolean isDatasourceExists(final ControllerClientConfig controllerClientConfig, final String dataSourceName)
throws ControllerOperationException {
return isDatasourceExists(controllerClientConfig, dataSourceName, null);
}
/*
* (non-Javadoc)
*
* @see uk.co.techblue.jboss.controller.ControllerOperationExecutor#isDatasourceExists(uk.co.techblue.jboss.controller.vo.
* ControllerClientConfig, java.lang.String, java.lang.String)
*/
public boolean isDatasourceExists(final ControllerClientConfig controllerClientConfig, final String dataSourceName,
final String serverProfileName) throws ControllerOperationException {
logger.info("Checking if datasource '{}' exists...", dataSourceName);
final List<ModelNode> datasources = getDatasources(controllerClientConfig, serverProfileName, DatasourceStatus.ALL);
if (datasources != null && !datasources.isEmpty()) {
for (final ModelNode dataSource : datasources) {
final String existingSourceName = dataSource.asProperty().getName();
if (existingSourceName.equals(dataSourceName)) {
logger.info("Datasource '{}' exists in datasource subsystem!" + dataSourceName);
return true;
}
}
} else {
throw new ControllerOperationException(
"A subsystem undefined response status recieved while checking if datasource '" + dataSourceName
+ "' exists. Most probably the " + DATASOURCE_SUBSYSTEM + " subsystem is not defined.");
}
logger.info("Datasource '{}' does not exist in datasource subsystem!", dataSourceName);
return false;
}
/*
* (non-Javadoc)
*
* @see uk.co.techblue.jboss.controller.ControllerOperationExecutor#enableDataSource(uk.co.techblue.jboss.controller.vo.
* ControllerClientConfig, java.lang.String, java.lang.String[])
*/
public void enableDataSource(final ControllerClientConfig controllerClientConfig, final String datasourceName,
final String... serverProfileNames) throws ControllerOperationException {
if (serverProfileNames != null && serverProfileNames.length > 0) {
for (final String serverProfile : serverProfileNames) {
enableDataSource(controllerClientConfig, datasourceName, serverProfile);
}
} else {
enableDataSource(controllerClientConfig, datasourceName, "");
}
}
/*
* (non-Javadoc)
*
* @see uk.co.techblue.jboss.controller.ControllerOperationExecutor#enableDataSource(uk.co.techblue.jboss.controller.vo.
* ControllerClientConfig, java.lang.String, java.lang.String)
*/
/**
* Enable data source.
*
* @param controllerClientConfig the controller client config
* @param datasourceName the datasource name
* @param serverProfileName the server profile name
* @throws ControllerOperationException the controller operation exception
*/
private void enableDataSource(final ControllerClientConfig controllerClientConfig, final String datasourceName,
final String serverProfileName) throws ControllerOperationException {
final ModelNode request = new ModelNode();
request.get(ClientConstants.OP).set(OPERATION_ENABLE);
if (StringUtils.isNotBlank(serverProfileName)) {
request.get(ClientConstants.OP_ADDR).add(ADDRESS_PROFILE, serverProfileName);
}
request.get(ClientConstants.OP_ADDR).add(ClientConstants.SUBSYSTEM, DATASOURCE_SUBSYSTEM);
request.get(ClientConstants.OP_ADDR).add(ADDRESS_DATASOURCE, datasourceName);
ModelControllerClient client = createControllerClient(controllerClientConfig);
ModelNode response = null;
try {
logger.info("Enabling datasource '{}' ...", datasourceName);
response = client.execute(new OperationBuilder(request).build());
} catch (IOException ioe) {
throw new ControllerOperationException(
"An error occurred while executing operation on JBoss controller to enable datasource '" + datasourceName
+ "'", ioe);
} finally {
try {
client.close();
} catch (IOException ioe) {
logger.error(
"An error occurred while closing JBoss Controller connection with host "
+ controllerClientConfig.getHost() + " at port " + controllerClientConfig.getPort()
+ " during the process of enabling datasource '" + datasourceName + "'", ioe);
}
}
if (!isOperationSuccess(response)) {
if (!response.isDefined()) {
throw new ControllerOperationException(
"A subsystem undefined response status recieved while enabling datasource '" + datasourceName
+ "'. Most probably the " + DATASOURCE_SUBSYSTEM + " subsystem is not defined.");
}
throw new ControllerOperationException("An error thrown from JBoss controller while enabling datasource:'"
+ datasourceName + "'.\n" + response.get(ClientConstants.FAILURE_DESCRIPTION).asString());
}
logger.info("Datasource '{}' enabled successfully!", datasourceName);
}
/**
* Checks if the operation executed successfully.
*
* @param response the operation response
* @return true, if the operation is success
*/
private boolean isOperationSuccess(final ModelNode response) {
if (!response.isDefined()) {
return false;
}
return ClientConstants.SUCCESS.equals(response.get(ClientConstants.OUTCOME).asString());
}
/*
* (non-Javadoc)
*
* @see uk.co.techblue.jboss.controller.ControllerOperationExecutor#getDatasources(uk.co.techblue.jboss.controller.vo.
* ControllerClientConfig, java.lang.String, uk.co.techblue.jboss.controller.ControllerOperationExecutor.DatasourceStatus)
*/
@Override
public List<ModelNode> getDatasources(final ControllerClientConfig controllerClientConfig, final String serverProfileName,
final DatasourceStatus datasourceStatus) throws ControllerOperationException {
final ModelNode request = new ModelNode();
request.get(ClientConstants.OP).set(ClientConstants.READ_RESOURCE_OPERATION);
if (StringUtils.isNotBlank(serverProfileName)) {
request.get(ClientConstants.OP_ADDR).add(ADDRESS_PROFILE, serverProfileName);
}
request.get(ClientConstants.RECURSIVE).set(false);
request.get(ClientConstants.OP_ADDR).add(ClientConstants.SUBSYSTEM, DATASOURCE_SUBSYSTEM);
final ModelControllerClient controllerClient = createControllerClient(controllerClientConfig);
ModelNode response = null;
try {
response = controllerClient.execute(new OperationBuilder(request).build());
} catch (IOException ioe) {
throw new ControllerOperationException(
"An error occurred while executing operation on JBoss controller to get the datasources", ioe);
} finally {
try {
controllerClient.close();
} catch (IOException ioe) {
logger.error("An error occurred while closing JBoss Controller client connection with host "
+ controllerClientConfig.getHost() + " at port " + controllerClientConfig.getPort()
+ " during the process of getting all the datasources", ioe);
}
}
if (!isOperationSuccess(response)) {
if (!response.isDefined()) {
throw new ControllerOperationException(
"A subsystem undefined response status recieved while getting datasources. Most probably the "
+ DATASOURCE_SUBSYSTEM + " subsystem is not defined.");
}
throw new ControllerOperationException("An error thrown from JBoss controller while getting datasources.\n"
+ response.get(ClientConstants.FAILURE_DESCRIPTION).asString());
}
final ModelNode datasources = response.get(ClientConstants.RESULT).get(ADDRESS_DATASOURCE);
if (datasources.isDefined()) {
if (datasourceStatus == DatasourceStatus.ALL) {
return datasources.asList();
} else {
return getFilteredDataSources(controllerClientConfig, datasourceStatus, datasources.asList());
}
} else {
throw new ControllerOperationException(
"A subsystem undefined response status recieved while getting datasources. Most probably the "
+ DATASOURCE_SUBSYSTEM + " subsystem is not defined.");
}
}
/**
* Gets the filtered data sources.
*
* @param controllerClientConfig the controller client config
* @param datasourceStatus the datasource status
* @param datasourceList the datasource list
* @return the filtered data sources
* @throws ControllerOperationException the controller operation exception
*/
private List<ModelNode> getFilteredDataSources(final ControllerClientConfig controllerClientConfig,
final DatasourceStatus datasourceStatus, final List<ModelNode> datasourceList) throws ControllerOperationException {
List<ModelNode> datasources = null;
if (datasourceList == null || datasourceList.isEmpty()) {
return datasources;
}
datasources = new ArrayList<ModelNode>();
for (ModelNode datasource : datasourceList) {
final boolean enabled = isDatasourceEnabled(controllerClientConfig, "", datasource.asProperty().getName());
if (datasourceStatus == DatasourceStatus.ENABLED && enabled) {
datasources.add(datasource);
} else if (datasourceStatus == DatasourceStatus.DISABLED && !enabled) {
datasources.add(datasource);
}
}
return datasources;
}
/*
* (non-Javadoc)
*
* @see uk.co.techblue.jboss.controller.ControllerOperationExecutor#isDatasourceEnabled(uk.co.techblue.jboss.controller.vo.
* ControllerClientConfig, java.lang.String, java.lang.String)
*/
public boolean isDatasourceEnabled(final ControllerClientConfig controllerClientConfig, final String serverProfileName,
final String datasource) throws ControllerOperationException {
final ModelNode request = new ModelNode();
request.get(ClientConstants.OP).set(ClientConstants.READ_ATTRIBUTE_OPERATION);
if (StringUtils.isNotBlank(serverProfileName)) {
request.get(ClientConstants.OP_ADDR).add(ADDRESS_PROFILE, serverProfileName);
}
request.get(ClientConstants.RECURSIVE).set(false);
request.get(ClientConstants.OP_ADDR).add(ClientConstants.SUBSYSTEM, DATASOURCE_SUBSYSTEM);
request.get(ClientConstants.OP_ADDR).add(ADDRESS_DATASOURCE, datasource);
request.get(ClientConstants.NAME).set(ATTRIBUTE_ENABLED);
final ModelControllerClient controllerClient = createControllerClient(controllerClientConfig);
ModelNode response = null;
try {
response = controllerClient.execute(new OperationBuilder(request).build());
} catch (IOException ioe) {
throw new ControllerOperationException(
"An error occurred while executing operation on JBoss controller to get the datasource status", ioe);
} finally {
try {
controllerClient.close();
} catch (IOException ioe) {
logger.error("An error occurred while closing JBoss Controller client connection with host "
+ controllerClientConfig.getHost() + " at port " + controllerClientConfig.getPort()
+ " during the process of getting the status of datasource '" + datasource + "'", ioe);
}
}
if (!isOperationSuccess(response)) {
if (!response.isDefined()) {
throw new ControllerOperationException(
"A subsystem undefined response status recieved while getting the status of datasource '" + datasource
+ "'. Most probably the " + DATASOURCE_SUBSYSTEM + " subsystem is not defined.");
}
throw new ControllerOperationException(
"An error thrown from JBoss controller while getting the status of datasource '" + datasource + "'.\n"
+ response.get(ClientConstants.FAILURE_DESCRIPTION).asString());
}
return response.get(ClientConstants.RESULT).asBoolean();
}
/*
* (non-Javadoc)
*
* @see uk.co.techblue.jboss.controller.ControllerOperationExecutor#disableDataSource(uk.co.techblue.jboss.controller.vo.
* ControllerClientConfig, java.lang.String, java.lang.String[])
*/
@Override
public void disableDataSource(ControllerClientConfig controllerClientConfig, String datasourceName,
String... serverProfileNames) throws ControllerOperationException {
if (serverProfileNames != null && serverProfileNames.length > 0) {
for (final String serverProfile : serverProfileNames) {
disableDataSource(controllerClientConfig, datasourceName, serverProfile);
}
} else {
disableDataSource(controllerClientConfig, datasourceName, "");
}
}
/**
* Disable data source.
*
* @param controllerClientConfig the controller client config
* @param datasourceName the datasource name
* @param serverProfileName the server profile name
* @throws ControllerOperationException the controller operation exception
*/
private void disableDataSource(final ControllerClientConfig controllerClientConfig, final String datasourceName,
final String serverProfileName) throws ControllerOperationException {
final ModelNode request = new ModelNode();
request.get(ClientConstants.OP).set(OPERATION_DISABLE);
if (StringUtils.isNotBlank(serverProfileName)) {
request.get(ClientConstants.OP_ADDR).add(ADDRESS_PROFILE, serverProfileName);
}
request.get(ClientConstants.OP_ADDR).add(ClientConstants.SUBSYSTEM, DATASOURCE_SUBSYSTEM);
request.get(ClientConstants.OP_ADDR).add(ADDRESS_DATASOURCE, datasourceName);
ModelControllerClient client = createControllerClient(controllerClientConfig);
ModelNode response = null;
try {
logger.info("Disabling datasource '{}' ...", datasourceName);
response = client.execute(new OperationBuilder(request).build());
} catch (IOException ioe) {
throw new ControllerOperationException(
"An error occurred while executing operation on JBoss controller to disable datasource '" + datasourceName
+ "'", ioe);
} finally {
try {
client.close();
} catch (IOException ioe) {
logger.error(
"An error occurred while closing JBoss Controller connection with host "
+ controllerClientConfig.getHost() + " at port " + controllerClientConfig.getPort()
+ " during the process of disabling datasource '" + datasourceName + "'", ioe);
}
}
if (!isOperationSuccess(response)) {
if (!response.isDefined()) {
throw new ControllerOperationException(
"A subsystem undefined response status recieved while disabling datasource '" + datasourceName
+ "'. Most probably the " + DATASOURCE_SUBSYSTEM + " subsystem is not defined.");
}
throw new ControllerOperationException("An error thrown from JBoss controller while disabling datasource:'"
+ datasourceName + "'.\n" + response.get(ClientConstants.FAILURE_DESCRIPTION).asString());
}
logger.info("Datasource '{}' disabled successfully!", datasourceName);
}
/*
* (non-Javadoc)
*
* @see uk.co.techblue.jboss.controller.ControllerOperationExecutor#enableDataSources(uk.co.techblue.jboss.controller.vo.
* ControllerClientConfig, java.util.List, java.lang.String[])
*/
@Override
public void enableDataSources(final ControllerClientConfig controllerClientConfig, final List<String> dataSourceNames,
final String... serverProfileNames) throws ControllerOperationException {
if (dataSourceNames == null || dataSourceNames.isEmpty()) {
throw new IllegalArgumentException("Datasource list cannot be blank or null.");
}
for (final String dataSourceName : dataSourceNames) {
enableDataSource(controllerClientConfig, dataSourceName, serverProfileNames);
}
}
/*
* (non-Javadoc)
*
* @see uk.co.techblue.jboss.controller.ControllerOperationExecutor#disableDataSources(uk.co.techblue.jboss.controller.vo.
* ControllerClientConfig, java.util.List, java.lang.String[])
*/
@Override
public void disableDataSources(final ControllerClientConfig controllerClientConfig, final List<String> dataSourceNames,
final String... serverProfileNames) throws ControllerOperationException {
if (dataSourceNames == null || dataSourceNames.isEmpty()) {
throw new IllegalArgumentException("Datasource list cannot be blank or null.");
}
for (final String dataSourceName : dataSourceNames) {
disableDataSource(controllerClientConfig, dataSourceName, serverProfileNames);
}
}
/* (non-Javadoc)
* @see uk.co.techblue.jboss.controller.ControllerOperationExecutor#createDatasources(uk.co.techblue.jboss.controller.vo.ControllerClientConfig, java.util.List, boolean, java.lang.String[])
*/
public void createDatasources(final ControllerClientConfig controllerClientConfig, final List<JndiDataSource> dataSources,
final boolean enable, final String... serverProfileNames) throws ControllerOperationException {
if (dataSources == null || dataSources.isEmpty()) {
throw new IllegalArgumentException("Datasource list cannot be blank or null.");
}
final List<String> addedDatasourceNames = new ArrayList<String>();
for (final JndiDataSource dataSource : dataSources) {
try {
createDatasource(controllerClientConfig, dataSource, enable, serverProfileNames);
addedDatasourceNames.add(dataSource.getName());
} catch (ControllerOperationException coe) {
try {
if (!addedDatasourceNames.isEmpty()) {
removeDatasources(controllerClientConfig, addedDatasourceNames, serverProfileNames);
}
} catch (ControllerOperationException coexp) {
logger.error("An error occurred while rolling back the datasource deployemnt", coexp);
}
throw coe;
}
}
}
/* (non-Javadoc)
* @see uk.co.techblue.jboss.controller.ControllerOperationExecutor#removeDatasources(uk.co.techblue.jboss.controller.vo.ControllerClientConfig, java.util.List, java.lang.String[])
*/
public void removeDatasources(final ControllerClientConfig controllerClientConfig, final List<String> datasourceNames,
final String... serverProfileNames) throws ControllerOperationException {
if (datasourceNames == null || datasourceNames.isEmpty()) {
throw new IllegalArgumentException("Datasource list cannot be blank or null.");
}
for (final String datasourceName : datasourceNames) {
removeDatasource(controllerClientConfig, datasourceName, serverProfileNames);
}
}
@Override
public List<ModelNode> getServers(ControllerClientConfig controllerClientConfig, String host) throws ControllerOperationException {
final ModelNode request = new ModelNode();
request.get(ClientConstants.OP).set(ClientConstants.READ_RESOURCE_OPERATION);
request.get(ClientConstants.RECURSIVE).set(false);
request.get(ClientConstants.OP_ADDR).add(ClientConstants.HOST, host);
final ModelControllerClient controllerClient = createControllerClient(controllerClientConfig);
ModelNode response = null;
try {
response = controllerClient.execute(new OperationBuilder(request).build());
} catch (IOException ioe) {
throw new ControllerOperationException(
"An error occurred while executing operation on JBoss controller to get the servers", ioe);
} finally {
try {
controllerClient.close();
} catch (IOException ioe) {
logger.error("An error occurred while closing JBoss Controller client connection with host "
+ controllerClientConfig.getHost() + " at port " + controllerClientConfig.getPort()
+ " during the process of getting all the servers", ioe);
}
}
if (!isOperationSuccess(response)) {
if (!response.isDefined()) {
throw new ControllerOperationException(
"undefined response status recieved while getting servers. Most probably the "
+ ClientConstants.HOST + " " + ClientConstants.SERVER_CONFIG + " is not defined.");
}
throw new ControllerOperationException("An error thrown from JBoss controller while getting servers.\n"
+ response.get(ClientConstants.FAILURE_DESCRIPTION).asString());
}
final ModelNode servers = response.get(ClientConstants.RESULT).get(ClientConstants.SERVER_CONFIG);
if (servers.isDefined()) {
return servers.asList();
} else {
throw new ControllerOperationException(
"undefined response status recieved while getting servers. Most probably the "
+ ClientConstants.HOST + " " + ClientConstants.SERVER_CONFIG + " is not defined.");
}
}
@Override
public ModelNode startServer(ControllerClientConfig controllerClientConfig, String host, String serverName) throws ControllerOperationException {
final ModelNode request = new ModelNode();
request.get(ClientConstants.OP).set(OPERATION_START);
request.get(ClientConstants.OP_ADDR).add(ClientConstants.HOST, host);
request.get(ClientConstants.OP_ADDR).add(ClientConstants.SERVER_CONFIG, serverName);
final ModelControllerClient controllerClient = createControllerClient(controllerClientConfig);
ModelNode response = null;
try {
response = controllerClient.execute(new OperationBuilder(request).build());
} catch (IOException ioe) {
throw new ControllerOperationException(
"An error occurred while executing operation on JBoss controller to start server", ioe);
} finally {
try {
controllerClient.close();
} catch (IOException ioe) {
logger.error("An error occurred while closing JBoss Controller client connection with host "
+ controllerClientConfig.getHost() + " at port " + controllerClientConfig.getPort()
+ " during the process of start server", ioe);
}
}
if (!isOperationSuccess(response)) {
if (!response.isDefined()) {
throw new ControllerOperationException(
"undefined response status recieved while starting server. Most probably the "
+ ClientConstants.HOST + " " + serverName + " is not defined.");
}
throw new ControllerOperationException("An error thrown from JBoss controller while starting server.\n"
+ response.get(ClientConstants.FAILURE_DESCRIPTION).asString());
}
return response.get(ClientConstants.RESULT);
}
@Override
public ModelNode stopServer(ControllerClientConfig controllerClientConfig, String host, String serverName) throws ControllerOperationException {
final ModelNode request = new ModelNode();
request.get(ClientConstants.OP).set(OPERATION_STOP);
request.get(ClientConstants.OP_ADDR).add(ClientConstants.HOST, host);
request.get(ClientConstants.OP_ADDR).add(ClientConstants.SERVER_CONFIG, serverName);
final ModelControllerClient controllerClient = createControllerClient(controllerClientConfig);
ModelNode response = null;
try {
response = controllerClient.execute(new OperationBuilder(request).build());
} catch (IOException ioe) {
throw new ControllerOperationException(
"An error occurred while executing operation on JBoss controller to start server", ioe);
} finally {
try {
controllerClient.close();
} catch (IOException ioe) {
logger.error("An error occurred while closing JBoss Controller client connection with host "
+ controllerClientConfig.getHost() + " at port " + controllerClientConfig.getPort()
+ " during the process of start server", ioe);
}
}
if (!isOperationSuccess(response)) {
if (!response.isDefined()) {
throw new ControllerOperationException(
"undefined response status recieved while starting server. Most probably the "
+ ClientConstants.HOST + " " + serverName + " is not defined.");
}
throw new ControllerOperationException("An error thrown from JBoss controller while starting server.\n"
+ response.get(ClientConstants.FAILURE_DESCRIPTION).asString());
}
return response.get(ClientConstants.RESULT);
}
@Override
public ModelNode getServerState(ControllerClientConfig controllerClientConfig, String host, String serverName) throws ControllerOperationException {
final ModelNode request = new ModelNode();
request.get(ClientConstants.OP).set(ClientConstants.READ_ATTRIBUTE_OPERATION);
request.get(ClientConstants.OP_ADDR).add(ClientConstants.HOST, host);
request.get(ClientConstants.OP_ADDR).add(ClientConstants.SERVER, serverName);
request.get(ClientConstants.NAME).set(ATTRIBUTE_SERVER_STATE);
final ModelControllerClient controllerClient = createControllerClient(controllerClientConfig);
ModelNode response = null;
try {
response = controllerClient.execute(new OperationBuilder(request).build());
} catch (IOException ioe) {
throw new ControllerOperationException(
"An error occurred while executing operation on JBoss controller to get server state", ioe);
} finally {
try {
controllerClient.close();
} catch (IOException ioe) {
logger.error("An error occurred while closing JBoss Controller client connection with host "
+ controllerClientConfig.getHost() + " at port " + controllerClientConfig.getPort()
+ " during the process of get server state", ioe);
}
}
if (!isOperationSuccess(response)) {
if (!response.isDefined()) {
throw new ControllerOperationException(
"undefined response status recieved while getting server state. Most probably the "
+ ClientConstants.HOST + " " + serverName + " is not defined.");
}
throw new ControllerOperationException("An error thrown from JBoss controller while getting server state.Most probably the server is stopped.\n"
+ response.get(ClientConstants.FAILURE_DESCRIPTION).asString());
}
return response.get(ClientConstants.RESULT);
}
@Override
public ModelNode getServerStatus(ControllerClientConfig controllerClientConfig, String host, String serverName) throws ControllerOperationException {
final ModelNode request = new ModelNode();
request.get(ClientConstants.OP).set(ClientConstants.READ_ATTRIBUTE_OPERATION);
request.get(ClientConstants.OP_ADDR).add(ClientConstants.HOST, host);
request.get(ClientConstants.OP_ADDR).add(ClientConstants.SERVER_CONFIG, serverName);
request.get(ClientConstants.NAME).set(ClientConstants.STATUS);
final ModelControllerClient controllerClient = createControllerClient(controllerClientConfig);
ModelNode response = null;
try {
response = controllerClient.execute(new OperationBuilder(request).build());
} catch (IOException ioe) {
throw new ControllerOperationException(
"An error occurred while executing operation on JBoss controller to get server status", ioe);
} finally {
try {
controllerClient.close();
} catch (IOException ioe) {
logger.error("An error occurred while closing JBoss Controller client connection with host "
+ controllerClientConfig.getHost() + " at port " + controllerClientConfig.getPort()
+ " during the process of get server status", ioe);
}
}
if (!isOperationSuccess(response)) {
if (!response.isDefined()) {
throw new ControllerOperationException(
"undefined response status recieved while getting server status. Most probably the "
+ ClientConstants.HOST + " " + serverName + " is not defined.");
}
throw new ControllerOperationException("An error thrown from JBoss controller while getting server status.\n"
+ response.get(ClientConstants.FAILURE_DESCRIPTION).asString());
}
return response.get(ClientConstants.RESULT);
}
@Override
public ModelNode startServerGroup(ControllerClientConfig controllerClientConfig, String serverGroup) throws ControllerOperationException {
final ModelNode request = new ModelNode();
request.get(ClientConstants.OP).set(OPERATION_START_SERVERS);
request.get(ClientConstants.OP_ADDR).add(ClientConstants.SERVER_GROUP, serverGroup);
final ModelControllerClient controllerClient = createControllerClient(controllerClientConfig);
ModelNode response = null;
try {
response = controllerClient.execute(new OperationBuilder(request).build());
} catch (IOException ioe) {
throw new ControllerOperationException(
"An error occurred while executing operation on JBoss controller to start all server of group", ioe);
} finally {
try {
controllerClient.close();
} catch (IOException ioe) {
logger.error("An error occurred while closing JBoss Controller client connection with host "
+ controllerClientConfig.getHost() + " at port " + controllerClientConfig.getPort()
+ " during the process of start all server of group", ioe);
}
}
if (!isOperationSuccess(response)) {
if (!response.isDefined()) {
throw new ControllerOperationException(
"undefined response status recieved while starting all server of group. Most probably the "
+ ClientConstants.HOST + " " + serverGroup + " is not defined.");
}
throw new ControllerOperationException("An error thrown from JBoss controller while starting all server of group.\n"
+ response.get(ClientConstants.FAILURE_DESCRIPTION).asString());
}
return response.get(ClientConstants.OUTCOME);
}
@Override
public ModelNode stopServerGroup(ControllerClientConfig controllerClientConfig, String serverGroup) throws ControllerOperationException {
final ModelNode request = new ModelNode();
request.get(ClientConstants.OP).set(OPERATION_STOP_SERVERS);
request.get(ClientConstants.OP_ADDR).add(ClientConstants.SERVER_GROUP, serverGroup);
final ModelControllerClient controllerClient = createControllerClient(controllerClientConfig);
ModelNode response = null;
try {
response = controllerClient.execute(new OperationBuilder(request).build());
} catch (IOException ioe) {
throw new ControllerOperationException(
"An error occurred while executing operation on JBoss controller to start all server of group", ioe);
} finally {
try {
controllerClient.close();
} catch (IOException ioe) {
logger.error("An error occurred while closing JBoss Controller client connection with host "
+ controllerClientConfig.getHost() + " at port " + controllerClientConfig.getPort()
+ " during the process of start all server of group", ioe);
}
}
if (!isOperationSuccess(response)) {
if (!response.isDefined()) {
throw new ControllerOperationException(
"undefined response status recieved while starting all server of group. Most probably the "
+ ClientConstants.HOST + " " + serverGroup + " is not defined.");
}
throw new ControllerOperationException("An error thrown from JBoss controller while starting all server of group.\n"
+ response.get(ClientConstants.FAILURE_DESCRIPTION).asString());
}
return response.get(ClientConstants.OUTCOME);
}
}
| |
/*
* Copyright 2000-2014 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.jetbrains.python.codeInsight.intentions;
import com.intellij.codeInsight.intention.impl.BaseIntentionAction;
import com.intellij.lang.ASTNode;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Pair;
import com.intellij.openapi.util.TextRange;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiFile;
import com.intellij.psi.PsiWhiteSpace;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.util.IncorrectOperationException;
import com.jetbrains.python.PyBundle;
import com.jetbrains.python.PyNames;
import com.jetbrains.python.PyTokenTypes;
import com.jetbrains.python.psi.*;
import com.jetbrains.python.psi.impl.PyBuiltinCache;
import com.jetbrains.python.psi.impl.PyPsiUtils;
import com.jetbrains.python.psi.impl.PyStringLiteralExpressionImpl;
import com.jetbrains.python.psi.types.PyClassType;
import com.jetbrains.python.psi.types.PyType;
import com.jetbrains.python.psi.types.PyTypeChecker;
import com.jetbrains.python.psi.types.TypeEvalContext;
import org.jetbrains.annotations.NotNull;
import java.util.ArrayList;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import static com.jetbrains.python.psi.PyUtil.sure;
/**
* Replaces expressions like <code>"%s" % values</code> with likes of <code>"{0:s}".format(values)</code>.
* <br/>
* Author: Alexey.Ivanov, dcheryasov
*/
public class ConvertFormatOperatorToMethodIntention extends BaseIntentionAction {
private static final Pattern FORMAT_PATTERN =
Pattern.compile("%(?:\\((\\w+)\\))?([-#0+ ]*)((?:\\*|\\d+)?(?:\\.(?:\\*|\\d+))?)?[hlL]?([diouxXeEfFgGcrs%])");
// groups: %:ignored, 1:key 2:mods 3:width-and---preci.sion x:len 4: conversion-type
private static final Pattern BRACE_PATTERN = Pattern.compile("(\\{|\\})");
/**
* copy source to target, doubling every brace.
*/
private static void appendDoublingBraces(CharSequence source, StringBuilder target) {
int index = 0;
Matcher scanner = BRACE_PATTERN.matcher(source);
boolean skipClosingBrace = false;
while (scanner.find(index)) {
if (scanner.start() > 1) {
// handle escaping sequences PY-977
if ("{".equals(scanner.group(0)) && "\\N".equals(source.subSequence(scanner.start()-2, scanner.start()).toString())) {
skipClosingBrace = true;
target.append(source.subSequence(index, scanner.end()));
index = scanner.end();
continue;
}
}
if (skipClosingBrace && "}".equals(scanner.group(0))) {
skipClosingBrace = false;
target.append(source.subSequence(index, scanner.end()));
index = scanner.end();
continue;
}
target.append(source.subSequence(index, scanner.start()));
if ("{".equals(scanner.group(0))) target.append("{{");
else target.append("}}");
index = scanner.end();
}
target.append(source.subSequence(index, source.length()));
}
/**
* Converts format expressions inside a string
* @return a pair of string builder with resulting string expression and a flag which is true if formats inside use mapping by name.
*/
private static Pair<StringBuilder, Boolean> convertFormat(PyStringLiteralExpression stringLiteralExpression, String prefix) {
// python string may be made of several literals, all different
List<StringBuilder> constants = new ArrayList<StringBuilder>();
boolean usesNamedFormat = false;
final List<ASTNode> stringNodes = stringLiteralExpression.getStringNodes();
sure(stringNodes);
sure(stringNodes.size() > 0);
for (ASTNode stringNode : stringNodes) {
// preserve prefixes and quote form
CharSequence text = stringNode.getChars();
int openPos = 0;
boolean hasPrefix = false;
final int prefixLength = PyStringLiteralExpressionImpl.getPrefixLength(String.valueOf(text));
if (prefixLength != 0) hasPrefix = true;
openPos += prefixLength;
char quote = text.charAt(openPos);
sure("\"'".indexOf(quote) >= 0);
if (text.length() - openPos >= 6) {
// triple-quoted?
if (text.charAt(openPos+1) == quote && text.charAt(openPos+2) == quote) {
openPos += 2;
}
}
int index = openPos + 1; // from quote to first in-string char
StringBuilder out = new StringBuilder(text.subSequence(0, openPos+1));
if (!hasPrefix) out.insert(0, prefix);
int position_count = 0;
Matcher scanner = FORMAT_PATTERN.matcher(text);
while (scanner.find(index)) {
// store previous non-format part
appendDoublingBraces(text.subSequence(index, scanner.start()), out);
//out.append(text.subSequence(index, scanner.start()));
// unpack format
final String f_key = scanner.group(1);
final String f_modifier = scanner.group(2);
final String f_width = scanner.group(3);
String f_conversion = scanner.group(4);
// convert to format()'s
if ("%%".equals(scanner.group(0))) {
// shortcut to put a literal %
out.append("%");
}
else {
sure(f_conversion);
sure(!"%".equals(f_conversion)); // a padded percent literal; can't bother to autoconvert, and in 3k % is different.
out.append("{");
if (f_key != null) {
out.append(f_key);
usesNamedFormat = true;
}
else {
out.append(position_count);
position_count += 1;
}
if ("r".equals(f_conversion)) out.append("!r");
// don't convert %s -> !s, for %s is the normal way to output the default representation
out.append(":");
if (f_modifier != null) {
// in strict order
if (has(f_modifier, '-')) out.append("<"); // left align
else if ("s".equals(f_conversion) && !StringUtil.isEmptyOrSpaces(f_width)) {
// "%20s" aligns right, "{0:20s}" aligns left; to preserve align, make it explicit
out.append(">");
}
if (has(f_modifier, '+')) out.append("+"); // signed
else if (has(f_modifier, ' ')) out.append(" "); // default-signed
if (has(f_modifier, '#')) out.append("#"); // alt numbers
if (has(f_modifier, '0')) out.append("0"); // padding
// anything else can't be here
}
if (f_width != null) {
out.append(f_width);
}
if ("i".equals(f_conversion) || "u".equals(f_conversion)) out.append("d");
else if ("r".equals(f_conversion)) out.append("s"); // we want our raw string as a string
else out.append(f_conversion);
//
out.append("}");
}
index = scanner.end();
}
// store non-format final part
//out.append(text.subSequence(index, text.length()-1));
appendDoublingBraces(text.subSequence(index, text.length()), out);
constants.add(out);
}
// form the entire literal filling possible gaps between constants.
// we assume that a string literal begins with its first constant, without a gap.
TextRange full_range = stringLiteralExpression.getTextRange();
int full_start = full_range.getStartOffset();
CharSequence full_text = stringLiteralExpression.getNode().getChars();
TextRange prev_range = stringNodes.get(0).getTextRange();
int fragment_no = 1; // look at second and further fragments
while (fragment_no < stringNodes.size()) {
TextRange next_range = stringNodes.get(fragment_no).getTextRange();
int left = prev_range.getEndOffset() - full_start;
int right = next_range.getStartOffset() - full_start;
if (left < right) {
constants.get(fragment_no-1).append(full_text.subSequence(left, right));
}
fragment_no += 1;
prev_range = next_range;
}
final int left = prev_range.getEndOffset() - full_start;
final int right = full_range.getEndOffset() - full_start;
if (left < right) {
// the barely possible case of last dangling "\"
constants.get(constants.size()-1).append(full_text.subSequence(left, right));
}
// join everything
StringBuilder result = new StringBuilder();
for (StringBuilder one : constants) result.append(one);
return new Pair<StringBuilder, Boolean>(result, usesNamedFormat);
}
private static boolean has(String where, char what) {
return where.indexOf(what) >= 0;
}
@NotNull
public String getFamilyName() {
return PyBundle.message("INTN.format.operator.to.method");
}
public boolean isAvailable(@NotNull Project project, Editor editor, PsiFile file) {
if (!(file instanceof PyFile)) {
return false;
}
PyBinaryExpression binaryExpression =
PsiTreeUtil.getParentOfType(file.findElementAt(editor.getCaretModel().getOffset()), PyBinaryExpression.class, false);
if (binaryExpression == null) {
return false;
}
final LanguageLevel languageLevel = LanguageLevel.forElement(binaryExpression);
if (languageLevel.isOlderThan(LanguageLevel.PYTHON26)) {
return false;
}
if (binaryExpression.getLeftExpression() instanceof PyStringLiteralExpression && binaryExpression.getOperator() == PyTokenTypes.PERC) {
setText(PyBundle.message("INTN.replace.with.method"));
return true;
}
return false;
}
public void invoke(@NotNull Project project, Editor editor, PsiFile file) throws IncorrectOperationException {
final PsiElement elementAt = file.findElementAt(editor.getCaretModel().getOffset());
final PyBinaryExpression element = PsiTreeUtil.getParentOfType(elementAt, PyBinaryExpression.class, false);
if (element == null) return;
final PyElementGenerator elementGenerator = PyElementGenerator.getInstance(project);
final PyExpression rightExpression = sure(element).getRightExpression();
if (rightExpression == null) {
return;
}
final PyExpression rhs = PyPsiUtils.flattenParens(rightExpression);
if (rhs == null) return;
final String paramText = sure(rhs).getText();
final TypeEvalContext context = TypeEvalContext.userInitiated(file.getProject(), file);
final PyType rhsType = context.getType(rhs);
String prefix = "";
if (PyTypeChecker.match(PyBuiltinCache.getInstance(rhs).getObjectType("unicode"), rhsType, context)) {
prefix = "u";
}
final PyStringLiteralExpression leftExpression = (PyStringLiteralExpression)element.getLeftExpression();
final Pair<StringBuilder, Boolean> converted = convertFormat(leftExpression, prefix);
final StringBuilder target = converted.getFirst();
final String separator = getSeparator(leftExpression);
target.append(separator).append(".format");
if (rhs instanceof PyDictLiteralExpression) target.append("(**").append(paramText).append(")");
else if (rhs instanceof PyCallExpression) { // potential dict(foo=1) -> format(foo=1)
final PyCallExpression callExpression = (PyCallExpression)rhs;
final PyExpression callee = callExpression.getCallee();
if (callee instanceof PyReferenceExpression) {
PsiElement maybeDict = ((PyReferenceExpression)callee).getReference().resolve();
if (maybeDict instanceof PyFunction) {
PyFunction dictInit = (PyFunction)maybeDict;
if (PyNames.INIT.equals(dictInit.getName())) {
final PyClassType dictType = PyBuiltinCache.getInstance(file).getDictType();
if (dictType != null && dictType.getPyClass() == dictInit.getContainingClass()) {
target.append(sure(sure(callExpression.getArgumentList()).getNode()).getChars());
}
}
else { // just a call, reuse
target.append("(");
if (converted.getSecond()) target.append("**"); // map-by-name formatting was detected
target.append(paramText).append(")");
}
}
}
}
else target.append("(").append(paramText).append(")"); // tuple is ok as is
// Correctly handle multiline implicitly concatenated string literals (PY-9176)
target.insert(0, '(').append(')');
final PyExpression parenthesized = elementGenerator.createExpressionFromText(LanguageLevel.forElement(element), target.toString());
element.replace(sure(((PyParenthesizedExpression)parenthesized).getContainedExpression()));
}
private static String getSeparator(PyStringLiteralExpression leftExpression) {
String separator = ""; // detect nontrivial whitespace around the "%"
Pair<String, PsiElement> crop = collectWhitespace(leftExpression);
String maybeSeparator = crop.getFirst();
if (maybeSeparator != null && !maybeSeparator.isEmpty() && !" ".equals(maybeSeparator))
separator = maybeSeparator;
else { // after "%"
crop = collectWhitespace(crop.getSecond());
maybeSeparator = crop.getFirst();
if (maybeSeparator != null && !maybeSeparator.isEmpty() && !" ".equals(maybeSeparator))
separator = maybeSeparator;
}
return separator;
}
private static Pair<String, PsiElement> collectWhitespace(PsiElement start) {
StringBuilder sb = new StringBuilder();
PsiElement seeker = start;
while (seeker != null) {
seeker = seeker.getNextSibling();
if (seeker != null && seeker instanceof PsiWhiteSpace) sb.append(seeker.getText());
else break;
}
return Pair.create(sb.toString(), seeker);
}
}
| |
/**Copyright 2015 Research Studios Austria Forschungsgesellschaft mBH
*
* This file is part of easyrec.
*
* easyrec is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* easyrec is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with easyrec. If not, see <http://www.gnu.org/licenses/>.
*/
package org.easyrec.plugin.aggregator.store.dao.impl;
import com.google.common.collect.Lists;
import com.google.common.primitives.Ints;
import gnu.trove.map.hash.TObjectIntHashMap;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.easyrec.model.core.ItemVO;
import org.easyrec.store.dao.BaseActionDAO;
import org.easyrec.utils.spring.store.dao.annotation.DAO;
import org.springframework.jdbc.core.ResultSetExtractor;
import org.springframework.jdbc.core.support.JdbcDaoSupport;
import javax.sql.DataSource;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Types;
import java.util.Date;
import java.util.List;
import org.easyrec.model.core.ActionVO;
import org.easyrec.plugin.aggregator.model.AggregatorConfigurationInt;
import org.easyrec.plugin.aggregator.store.dao.AggregatorActionDAO;
import static org.easyrec.store.dao.BaseActionDAO.DEFAULT_ACTIONINFO_COLUMN_NAME;
import static org.easyrec.store.dao.BaseActionDAO.DEFAULT_ACTION_TIME_COLUMN_NAME;
import static org.easyrec.store.dao.BaseActionDAO.DEFAULT_ACTION_TYPE_COLUMN_NAME;
import static org.easyrec.store.dao.BaseActionDAO.DEFAULT_ID_COLUMN_NAME;
import static org.easyrec.store.dao.BaseActionDAO.DEFAULT_IP_COLUMN_NAME;
import static org.easyrec.store.dao.BaseActionDAO.DEFAULT_ITEM_COLUMN_NAME;
import static org.easyrec.store.dao.BaseActionDAO.DEFAULT_ITEM_TYPE_COLUMN_NAME;
import static org.easyrec.store.dao.BaseActionDAO.DEFAULT_RATING_VALUE_COLUMN_NAME;
import static org.easyrec.store.dao.BaseActionDAO.DEFAULT_SESSION_COLUMN_NAME;
import static org.easyrec.store.dao.BaseActionDAO.DEFAULT_TENANT_COLUMN_NAME;
import static org.easyrec.store.dao.BaseActionDAO.DEFAULT_USER_COLUMN_NAME;
import org.easyrec.utils.spring.store.dao.DaoUtils;
import org.springframework.jdbc.core.RowMapper;
/**
* This class provides methods to access data in a datamining/rulemining database.
* <p/>
* <p><b>Company: </b>
* SAT, Research Studios Austria</p>
* <p/>
* <p><b>Copyright: </b>
* (c) 2006</p>
* <p/>
* <p><b>last modified:</b><br/>
* $Author: szavrel $<br/>
* $Date: 2011-02-11 18:35:47 +0100 (Fr, 11 Feb 2011) $<br/>
* $Revision: 17681 $</p>
*
* @author Stephan Zavrel
*/
@DAO
public class AggregatorActionDAOMysqlImpl extends JdbcDaoSupport implements AggregatorActionDAO {
private ActionVORowMapper actionVORowMapper = new ActionVORowMapper();
//////////////////////////////////////////////////////////////////////////////
// constructor
public AggregatorActionDAOMysqlImpl(DataSource dataSource) {
setDataSource(dataSource);
}
@Override
public List<Integer> getUsersWithActions(AggregatorConfigurationInt configuration) {
//Todo: only get non-anonymous users! -> done using idMappingDAO later
//Todo: move to iterator
List<Object> args = Lists.newArrayList();
List<Integer> argt = Lists.newArrayList();
// get all Baskets
StringBuilder query = new StringBuilder();
query.append("SELECT DISTINCT(a.").append(BaseActionDAO.DEFAULT_USER_COLUMN_NAME);
query.append(") FROM ").append(BaseActionDAO.DEFAULT_TABLE_NAME).append(" a, idmapping i");
query.append(" WHERE ").append(BaseActionDAO.DEFAULT_TENANT_COLUMN_NAME).append("=")
.append(configuration.getTenantId());
if (configuration.getActionType() != null) {
query.append(" AND ").append(BaseActionDAO.DEFAULT_ACTION_TYPE_COLUMN_NAME).append("=?");
args.add(configuration.getActionType());
argt.add(Types.INTEGER);
}
if (configuration.getLastRun() != null) {
query.append(" AND ").append(BaseActionDAO.DEFAULT_ACTION_TIME_COLUMN_NAME).append(">=?");
args.add(configuration.getLastRun());
argt.add(Types.TIMESTAMP);
}
// filter anonymous users
query.append(" AND a.userId=i.intId AND a.sessionId!=i.stringId");
List<Integer> baskets = getJdbcTemplate().queryForList(query.toString(), args.toArray(),
Ints.toArray(argt), Integer.class);
return baskets;
}
@Override
public List<ActionVO<Integer,Integer>> getActionsForUsers(Integer userId, AggregatorConfigurationInt configuration) {
//sort by itemID, then check on itemID,typeID change look in profile -> this way only 1 query is needed
List<Object> args = Lists.newArrayList();
List<Integer> argt = Lists.newArrayList();
// get all Baskets
StringBuilder query = new StringBuilder();
query.append("SELECT * FROM ").append(BaseActionDAO.DEFAULT_TABLE_NAME);
query.append(" WHERE ").append(BaseActionDAO.DEFAULT_TENANT_COLUMN_NAME).append("=")
.append(configuration.getTenantId()).append(" AND ").append(BaseActionDAO.DEFAULT_USER_COLUMN_NAME)
.append("=").append(userId);
if (configuration.getActionType() != null) {
query.append(" AND ").append(BaseActionDAO.DEFAULT_ACTION_TYPE_COLUMN_NAME).append("=?");
args.add(configuration.getActionType());
argt.add(Types.INTEGER);
}
// delta updates don't work since we only store Top x of every field
// if (configuration.getLastRun() != null) {
// query.append(" AND ").append(BaseActionDAO.DEFAULT_ACTION_TIME_COLUMN_NAME).append(">=?");
// args.add(configuration.getLastRun());
// argt.add(Types.TIMESTAMP);
// }
query.append(" ORDER BY ").append(BaseActionDAO.DEFAULT_ITEM_COLUMN_NAME).append(" ASC");
return getJdbcTemplate().query(query.toString(), args.toArray(), Ints.toArray(argt), actionVORowMapper);
// only if config for itemprofile, load the item profile
}
@Override
public int getNumberOfActions(Integer tenantId, Integer actionType, Date lastRun) {
List<Object> args = Lists.newArrayList();
List<Integer> argt = Lists.newArrayList();
StringBuilder query = new StringBuilder("SELECT count(1) as cnt FROM ");
query.append(BaseActionDAO.DEFAULT_TABLE_NAME);
query.append(" WHERE ").append(BaseActionDAO.DEFAULT_TENANT_COLUMN_NAME).append("=?");
args.add(tenantId);
argt.add(Types.INTEGER);
if (actionType!=null) {
query.append(" AND ").append(BaseActionDAO.DEFAULT_ACTION_TYPE_COLUMN_NAME).append("=?");
args.add(actionType);
argt.add(Types.INTEGER);
}
// we always need to consider all actions since we don't store the complete user profile
// if (lastRun != null) {
// query.append(" AND ").append(BaseActionDAO.DEFAULT_ACTION_TIME_COLUMN_NAME).append(">=?");
// args.add(lastRun);
// argt.add(Types.TIMESTAMP);
// }
return getJdbcTemplate().queryForInt(query.toString(), args.toArray(), Ints.toArray(argt));
}
private class ActionVORowMapper implements RowMapper<ActionVO<Integer, Integer>> {
@Override
public ActionVO<Integer, Integer> mapRow(ResultSet rs, int rowNum)
throws SQLException {
ActionVO<Integer, Integer> actionVO =
new ActionVO<>(
DaoUtils.getLong(rs, DEFAULT_ID_COLUMN_NAME),
DaoUtils.getInteger(rs, DEFAULT_TENANT_COLUMN_NAME),
DaoUtils.getInteger(rs, DEFAULT_USER_COLUMN_NAME),
DaoUtils.getStringIfPresent(rs, DEFAULT_SESSION_COLUMN_NAME),
DaoUtils.getStringIfPresent(rs, DEFAULT_IP_COLUMN_NAME),
new ItemVO<>(DaoUtils.getInteger(rs, DEFAULT_TENANT_COLUMN_NAME),
DaoUtils.getInteger(rs, DEFAULT_ITEM_COLUMN_NAME),
DaoUtils.getInteger(rs, DEFAULT_ITEM_TYPE_COLUMN_NAME)),
DaoUtils.getInteger(rs, DEFAULT_ACTION_TYPE_COLUMN_NAME),
DaoUtils.getInteger(rs, DEFAULT_RATING_VALUE_COLUMN_NAME),
DaoUtils.getStringIfPresent(rs, DEFAULT_ACTIONINFO_COLUMN_NAME),
DaoUtils.getDate(rs, DEFAULT_ACTION_TIME_COLUMN_NAME));
return actionVO;
}
}
private static class ActionResultSetExtractor
implements ResultSetExtractor<TObjectIntHashMap<ItemVO<Integer, Integer>>> {
private int minSupp;
// logging
private final Log logger = LogFactory.getLog(this.getClass());
@Override
public TObjectIntHashMap<ItemVO<Integer, Integer>> extractData(ResultSet rs) {
TObjectIntHashMap<ItemVO<Integer, Integer>> map = new TObjectIntHashMap<>();
int itemId, itemTypeId, tenantId, cnt = 0;
try {
while (rs.next()) {
itemId = rs.getInt(BaseActionDAO.DEFAULT_ITEM_COLUMN_NAME);
itemTypeId = rs.getInt(BaseActionDAO.DEFAULT_ITEM_TYPE_COLUMN_NAME);
tenantId = rs.getInt(BaseActionDAO.DEFAULT_TENANT_COLUMN_NAME);
cnt = rs.getInt("cnt");
map.put(new ItemVO<>(tenantId, itemId, itemTypeId), cnt);
}
// optimization: replaces former adjustSupport method
minSupp = cnt;
} catch (SQLException e) {
logger.error("An error occured during ResultSet extraction", e);
throw new RuntimeException(e);
}
return map;
}
public Integer getMinSupp() {
return this.minSupp;
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.codec.prefixtree;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.ByteBuffer;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hbase.codec.prefixtree.encode.other.LongEncoder;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.vint.UVIntTool;
import org.apache.hadoop.hbase.util.vint.UVLongTool;
/**
* Information about the block. Stored at the beginning of the byte[]. Contains things
* like minimum timestamp and width of FInts in the row tree.
*
* Most fields stored in VInts that get decoded on the first access of each new block.
*/
@InterfaceAudience.Private
public class PrefixTreeBlockMeta {
/******************* static fields ********************/
public static final int VERSION = 0;
public static final int MAX_FAMILY_LENGTH = Byte.MAX_VALUE;// hard-coded in KeyValue
public static final int
NUM_LONGS = 2,
NUM_INTS = 28,
NUM_SHORTS = 0,//keyValueTypeWidth not persisted
NUM_SINGLE_BYTES = 2,
MAX_BYTES = Bytes.SIZEOF_LONG * NUM_LONGS
+ Bytes.SIZEOF_SHORT * NUM_SHORTS
+ Bytes.SIZEOF_INT * NUM_INTS
+ NUM_SINGLE_BYTES;
/**************** transient fields *********************/
protected int arrayOffset;
protected int bufferOffset;
/**************** persisted fields **********************/
// PrefixTree version to allow future format modifications
protected int version;
protected int numMetaBytes;
protected int numKeyValueBytes;
protected boolean includesMvccVersion;//probably don't need this explicitly, but only 1 byte
// split the byte[] into 6 sections for the different data types
protected int numRowBytes;
protected int numFamilyBytes;
protected int numQualifierBytes;
protected int numTimestampBytes;
protected int numMvccVersionBytes;
protected int numValueBytes;
protected int numTagsBytes;
// number of bytes in each section of fixed width FInts
protected int nextNodeOffsetWidth;
protected int familyOffsetWidth;
protected int qualifierOffsetWidth;
protected int timestampIndexWidth;
protected int mvccVersionIndexWidth;
protected int valueOffsetWidth;
protected int valueLengthWidth;
protected int tagsOffsetWidth;
// used to pre-allocate structures for reading
protected int rowTreeDepth;
protected int maxRowLength;
protected int maxQualifierLength;
protected int maxTagsLength;
// the timestamp from which the deltas are calculated
protected long minTimestamp;
protected int timestampDeltaWidth;
protected long minMvccVersion;
protected int mvccVersionDeltaWidth;
protected boolean allSameType;
protected byte allTypes;
protected int numUniqueRows;
protected int numUniqueFamilies;
protected int numUniqueQualifiers;
protected int numUniqueTags;
/***************** constructors ********************/
public PrefixTreeBlockMeta() {
}
public PrefixTreeBlockMeta(InputStream is) throws IOException{
this.version = VERSION;
this.arrayOffset = 0;
this.bufferOffset = 0;
readVariableBytesFromInputStream(is);
}
/**
* @param buffer positioned at start of PtBlockMeta
*/
public PrefixTreeBlockMeta(ByteBuffer buffer) {
initOnBlock(buffer);
}
public void initOnBlock(ByteBuffer buffer) {
arrayOffset = buffer.arrayOffset();
bufferOffset = buffer.position();
readVariableBytesFromArray(buffer.array(), arrayOffset + bufferOffset);
}
/**************** operate on each field **********************/
public int calculateNumMetaBytes(){
int numBytes = 0;
numBytes += UVIntTool.numBytes(version);
numBytes += UVLongTool.numBytes(numMetaBytes);
numBytes += UVIntTool.numBytes(numKeyValueBytes);
++numBytes;//os.write(getIncludesMvccVersion());
numBytes += UVIntTool.numBytes(numRowBytes);
numBytes += UVIntTool.numBytes(numFamilyBytes);
numBytes += UVIntTool.numBytes(numQualifierBytes);
numBytes += UVIntTool.numBytes(numTagsBytes);
numBytes += UVIntTool.numBytes(numTimestampBytes);
numBytes += UVIntTool.numBytes(numMvccVersionBytes);
numBytes += UVIntTool.numBytes(numValueBytes);
numBytes += UVIntTool.numBytes(nextNodeOffsetWidth);
numBytes += UVIntTool.numBytes(familyOffsetWidth);
numBytes += UVIntTool.numBytes(qualifierOffsetWidth);
numBytes += UVIntTool.numBytes(tagsOffsetWidth);
numBytes += UVIntTool.numBytes(timestampIndexWidth);
numBytes += UVIntTool.numBytes(mvccVersionIndexWidth);
numBytes += UVIntTool.numBytes(valueOffsetWidth);
numBytes += UVIntTool.numBytes(valueLengthWidth);
numBytes += UVIntTool.numBytes(rowTreeDepth);
numBytes += UVIntTool.numBytes(maxRowLength);
numBytes += UVIntTool.numBytes(maxQualifierLength);
numBytes += UVIntTool.numBytes(maxTagsLength);
numBytes += UVLongTool.numBytes(minTimestamp);
numBytes += UVIntTool.numBytes(timestampDeltaWidth);
numBytes += UVLongTool.numBytes(minMvccVersion);
numBytes += UVIntTool.numBytes(mvccVersionDeltaWidth);
++numBytes;//os.write(getAllSameTypeByte());
++numBytes;//os.write(allTypes);
numBytes += UVIntTool.numBytes(numUniqueRows);
numBytes += UVIntTool.numBytes(numUniqueFamilies);
numBytes += UVIntTool.numBytes(numUniqueQualifiers);
numBytes += UVIntTool.numBytes(numUniqueTags);
return numBytes;
}
public void writeVariableBytesToOutputStream(OutputStream os) throws IOException{
UVIntTool.writeBytes(version, os);
UVIntTool.writeBytes(numMetaBytes, os);
UVIntTool.writeBytes(numKeyValueBytes, os);
os.write(getIncludesMvccVersionByte());
UVIntTool.writeBytes(numRowBytes, os);
UVIntTool.writeBytes(numFamilyBytes, os);
UVIntTool.writeBytes(numQualifierBytes, os);
UVIntTool.writeBytes(numTagsBytes, os);
UVIntTool.writeBytes(numTimestampBytes, os);
UVIntTool.writeBytes(numMvccVersionBytes, os);
UVIntTool.writeBytes(numValueBytes, os);
UVIntTool.writeBytes(nextNodeOffsetWidth, os);
UVIntTool.writeBytes(familyOffsetWidth, os);
UVIntTool.writeBytes(qualifierOffsetWidth, os);
UVIntTool.writeBytes(tagsOffsetWidth, os);
UVIntTool.writeBytes(timestampIndexWidth, os);
UVIntTool.writeBytes(mvccVersionIndexWidth, os);
UVIntTool.writeBytes(valueOffsetWidth, os);
UVIntTool.writeBytes(valueLengthWidth, os);
UVIntTool.writeBytes(rowTreeDepth, os);
UVIntTool.writeBytes(maxRowLength, os);
UVIntTool.writeBytes(maxQualifierLength, os);
UVIntTool.writeBytes(maxTagsLength, os);
UVLongTool.writeBytes(minTimestamp, os);
UVIntTool.writeBytes(timestampDeltaWidth, os);
UVLongTool.writeBytes(minMvccVersion, os);
UVIntTool.writeBytes(mvccVersionDeltaWidth, os);
os.write(getAllSameTypeByte());
os.write(allTypes);
UVIntTool.writeBytes(numUniqueRows, os);
UVIntTool.writeBytes(numUniqueFamilies, os);
UVIntTool.writeBytes(numUniqueQualifiers, os);
UVIntTool.writeBytes(numUniqueTags, os);
}
public void readVariableBytesFromInputStream(InputStream is) throws IOException{
version = UVIntTool.getInt(is);
numMetaBytes = UVIntTool.getInt(is);
numKeyValueBytes = UVIntTool.getInt(is);
setIncludesMvccVersion((byte) is.read());
numRowBytes = UVIntTool.getInt(is);
numFamilyBytes = UVIntTool.getInt(is);
numQualifierBytes = UVIntTool.getInt(is);
numTagsBytes = UVIntTool.getInt(is);
numTimestampBytes = UVIntTool.getInt(is);
numMvccVersionBytes = UVIntTool.getInt(is);
numValueBytes = UVIntTool.getInt(is);
nextNodeOffsetWidth = UVIntTool.getInt(is);
familyOffsetWidth = UVIntTool.getInt(is);
qualifierOffsetWidth = UVIntTool.getInt(is);
tagsOffsetWidth = UVIntTool.getInt(is);
timestampIndexWidth = UVIntTool.getInt(is);
mvccVersionIndexWidth = UVIntTool.getInt(is);
valueOffsetWidth = UVIntTool.getInt(is);
valueLengthWidth = UVIntTool.getInt(is);
rowTreeDepth = UVIntTool.getInt(is);
maxRowLength = UVIntTool.getInt(is);
maxQualifierLength = UVIntTool.getInt(is);
maxTagsLength = UVIntTool.getInt(is);
minTimestamp = UVLongTool.getLong(is);
timestampDeltaWidth = UVIntTool.getInt(is);
minMvccVersion = UVLongTool.getLong(is);
mvccVersionDeltaWidth = UVIntTool.getInt(is);
setAllSameType((byte) is.read());
allTypes = (byte) is.read();
numUniqueRows = UVIntTool.getInt(is);
numUniqueFamilies = UVIntTool.getInt(is);
numUniqueQualifiers = UVIntTool.getInt(is);
numUniqueTags = UVIntTool.getInt(is);
}
public void readVariableBytesFromArray(byte[] bytes, int offset) {
int position = offset;
version = UVIntTool.getInt(bytes, position);
position += UVIntTool.numBytes(version);
numMetaBytes = UVIntTool.getInt(bytes, position);
position += UVIntTool.numBytes(numMetaBytes);
numKeyValueBytes = UVIntTool.getInt(bytes, position);
position += UVIntTool.numBytes(numKeyValueBytes);
setIncludesMvccVersion(bytes[position]);
++position;
numRowBytes = UVIntTool.getInt(bytes, position);
position += UVIntTool.numBytes(numRowBytes);
numFamilyBytes = UVIntTool.getInt(bytes, position);
position += UVIntTool.numBytes(numFamilyBytes);
numQualifierBytes = UVIntTool.getInt(bytes, position);
position += UVIntTool.numBytes(numQualifierBytes);
numTagsBytes = UVIntTool.getInt(bytes, position);
position += UVIntTool.numBytes(numTagsBytes);
numTimestampBytes = UVIntTool.getInt(bytes, position);
position += UVIntTool.numBytes(numTimestampBytes);
numMvccVersionBytes = UVIntTool.getInt(bytes, position);
position += UVIntTool.numBytes(numMvccVersionBytes);
numValueBytes = UVIntTool.getInt(bytes, position);
position += UVIntTool.numBytes(numValueBytes);
nextNodeOffsetWidth = UVIntTool.getInt(bytes, position);
position += UVIntTool.numBytes(nextNodeOffsetWidth);
familyOffsetWidth = UVIntTool.getInt(bytes, position);
position += UVIntTool.numBytes(familyOffsetWidth);
qualifierOffsetWidth = UVIntTool.getInt(bytes, position);
position += UVIntTool.numBytes(qualifierOffsetWidth);
tagsOffsetWidth = UVIntTool.getInt(bytes, position);
position += UVIntTool.numBytes(tagsOffsetWidth);
timestampIndexWidth = UVIntTool.getInt(bytes, position);
position += UVIntTool.numBytes(timestampIndexWidth);
mvccVersionIndexWidth = UVIntTool.getInt(bytes, position);
position += UVIntTool.numBytes(mvccVersionIndexWidth);
valueOffsetWidth = UVIntTool.getInt(bytes, position);
position += UVIntTool.numBytes(valueOffsetWidth);
valueLengthWidth = UVIntTool.getInt(bytes, position);
position += UVIntTool.numBytes(valueLengthWidth);
rowTreeDepth = UVIntTool.getInt(bytes, position);
position += UVIntTool.numBytes(rowTreeDepth);
maxRowLength = UVIntTool.getInt(bytes, position);
position += UVIntTool.numBytes(maxRowLength);
maxQualifierLength = UVIntTool.getInt(bytes, position);
position += UVIntTool.numBytes(maxQualifierLength);
maxTagsLength = UVIntTool.getInt(bytes, position);
position += UVIntTool.numBytes(maxTagsLength);
minTimestamp = UVLongTool.getLong(bytes, position);
position += UVLongTool.numBytes(minTimestamp);
timestampDeltaWidth = UVIntTool.getInt(bytes, position);
position += UVIntTool.numBytes(timestampDeltaWidth);
minMvccVersion = UVLongTool.getLong(bytes, position);
position += UVLongTool.numBytes(minMvccVersion);
mvccVersionDeltaWidth = UVIntTool.getInt(bytes, position);
position += UVIntTool.numBytes(mvccVersionDeltaWidth);
setAllSameType(bytes[position]);
++position;
allTypes = bytes[position];
++position;
numUniqueRows = UVIntTool.getInt(bytes, position);
position += UVIntTool.numBytes(numUniqueRows);
numUniqueFamilies = UVIntTool.getInt(bytes, position);
position += UVIntTool.numBytes(numUniqueFamilies);
numUniqueQualifiers = UVIntTool.getInt(bytes, position);
position += UVIntTool.numBytes(numUniqueQualifiers);
numUniqueTags = UVIntTool.getInt(bytes, position);
position += UVIntTool.numBytes(numUniqueTags);
}
//TODO method that can read directly from ByteBuffer instead of InputStream
/*************** methods *************************/
public int getKeyValueTypeWidth() {
return allSameType ? 0 : 1;
}
public byte getIncludesMvccVersionByte() {
return includesMvccVersion ? (byte) 1 : (byte) 0;
}
public void setIncludesMvccVersion(byte includesMvccVersionByte) {
includesMvccVersion = includesMvccVersionByte != 0;
}
public byte getAllSameTypeByte() {
return allSameType ? (byte) 1 : (byte) 0;
}
public void setAllSameType(byte allSameTypeByte) {
allSameType = allSameTypeByte != 0;
}
public boolean isAllSameTimestamp() {
return timestampIndexWidth == 0;
}
public boolean isAllSameMvccVersion() {
return mvccVersionIndexWidth == 0;
}
public void setTimestampFields(LongEncoder encoder){
this.minTimestamp = encoder.getMin();
this.timestampIndexWidth = encoder.getBytesPerIndex();
this.timestampDeltaWidth = encoder.getBytesPerDelta();
this.numTimestampBytes = encoder.getTotalCompressedBytes();
}
public void setMvccVersionFields(LongEncoder encoder){
this.minMvccVersion = encoder.getMin();
this.mvccVersionIndexWidth = encoder.getBytesPerIndex();
this.mvccVersionDeltaWidth = encoder.getBytesPerDelta();
this.numMvccVersionBytes = encoder.getTotalCompressedBytes();
}
/*************** Object methods *************************/
/**
* Generated by Eclipse
*/
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
PrefixTreeBlockMeta other = (PrefixTreeBlockMeta) obj;
if (allSameType != other.allSameType)
return false;
if (allTypes != other.allTypes)
return false;
if (arrayOffset != other.arrayOffset)
return false;
if (bufferOffset != other.bufferOffset)
return false;
if (valueLengthWidth != other.valueLengthWidth)
return false;
if (valueOffsetWidth != other.valueOffsetWidth)
return false;
if (familyOffsetWidth != other.familyOffsetWidth)
return false;
if (includesMvccVersion != other.includesMvccVersion)
return false;
if (maxQualifierLength != other.maxQualifierLength)
return false;
if (maxTagsLength != other.maxTagsLength)
return false;
if (maxRowLength != other.maxRowLength)
return false;
if (mvccVersionDeltaWidth != other.mvccVersionDeltaWidth)
return false;
if (mvccVersionIndexWidth != other.mvccVersionIndexWidth)
return false;
if (minMvccVersion != other.minMvccVersion)
return false;
if (minTimestamp != other.minTimestamp)
return false;
if (nextNodeOffsetWidth != other.nextNodeOffsetWidth)
return false;
if (numValueBytes != other.numValueBytes)
return false;
if (numFamilyBytes != other.numFamilyBytes)
return false;
if (numMvccVersionBytes != other.numMvccVersionBytes)
return false;
if (numMetaBytes != other.numMetaBytes)
return false;
if (numQualifierBytes != other.numQualifierBytes)
return false;
if (numTagsBytes != other.numTagsBytes)
return false;
if (numRowBytes != other.numRowBytes)
return false;
if (numTimestampBytes != other.numTimestampBytes)
return false;
if (numUniqueFamilies != other.numUniqueFamilies)
return false;
if (numUniqueQualifiers != other.numUniqueQualifiers)
return false;
if (numUniqueTags != other.numUniqueTags)
return false;
if (numUniqueRows != other.numUniqueRows)
return false;
if (numKeyValueBytes != other.numKeyValueBytes)
return false;
if (qualifierOffsetWidth != other.qualifierOffsetWidth)
return false;
if(tagsOffsetWidth != other.tagsOffsetWidth)
return false;
if (rowTreeDepth != other.rowTreeDepth)
return false;
if (timestampDeltaWidth != other.timestampDeltaWidth)
return false;
if (timestampIndexWidth != other.timestampIndexWidth)
return false;
if (version != other.version)
return false;
return true;
}
/**
* Generated by Eclipse
*/
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + (allSameType ? 1231 : 1237);
result = prime * result + allTypes;
result = prime * result + arrayOffset;
result = prime * result + bufferOffset;
result = prime * result + valueLengthWidth;
result = prime * result + valueOffsetWidth;
result = prime * result + familyOffsetWidth;
result = prime * result + (includesMvccVersion ? 1231 : 1237);
result = prime * result + maxQualifierLength;
result = prime * result + maxTagsLength;
result = prime * result + maxRowLength;
result = prime * result + mvccVersionDeltaWidth;
result = prime * result + mvccVersionIndexWidth;
result = prime * result + (int) (minMvccVersion ^ (minMvccVersion >>> 32));
result = prime * result + (int) (minTimestamp ^ (minTimestamp >>> 32));
result = prime * result + nextNodeOffsetWidth;
result = prime * result + numValueBytes;
result = prime * result + numFamilyBytes;
result = prime * result + numMvccVersionBytes;
result = prime * result + numMetaBytes;
result = prime * result + numQualifierBytes;
result = prime * result + numTagsBytes;
result = prime * result + numRowBytes;
result = prime * result + numTimestampBytes;
result = prime * result + numUniqueFamilies;
result = prime * result + numUniqueQualifiers;
result = prime * result + numUniqueTags;
result = prime * result + numUniqueRows;
result = prime * result + numKeyValueBytes;
result = prime * result + qualifierOffsetWidth;
result = prime * result + tagsOffsetWidth;
result = prime * result + rowTreeDepth;
result = prime * result + timestampDeltaWidth;
result = prime * result + timestampIndexWidth;
result = prime * result + version;
return result;
}
/**
* Generated by Eclipse
*/
@Override
public String toString() {
StringBuilder builder = new StringBuilder();
builder.append("PtBlockMeta [arrayOffset=");
builder.append(arrayOffset);
builder.append(", bufferOffset=");
builder.append(bufferOffset);
builder.append(", version=");
builder.append(version);
builder.append(", numMetaBytes=");
builder.append(numMetaBytes);
builder.append(", numKeyValueBytes=");
builder.append(numKeyValueBytes);
builder.append(", includesMvccVersion=");
builder.append(includesMvccVersion);
builder.append(", numRowBytes=");
builder.append(numRowBytes);
builder.append(", numFamilyBytes=");
builder.append(numFamilyBytes);
builder.append(", numQualifierBytes=");
builder.append(numQualifierBytes);
builder.append(", numTimestampBytes=");
builder.append(numTimestampBytes);
builder.append(", numMvccVersionBytes=");
builder.append(numMvccVersionBytes);
builder.append(", numValueBytes=");
builder.append(numValueBytes);
builder.append(", numTagBytes=");
builder.append(numTagsBytes);
builder.append(", nextNodeOffsetWidth=");
builder.append(nextNodeOffsetWidth);
builder.append(", familyOffsetWidth=");
builder.append(familyOffsetWidth);
builder.append(", qualifierOffsetWidth=");
builder.append(qualifierOffsetWidth);
builder.append(", tagOffsetWidth=");
builder.append(tagsOffsetWidth);
builder.append(", timestampIndexWidth=");
builder.append(timestampIndexWidth);
builder.append(", mvccVersionIndexWidth=");
builder.append(mvccVersionIndexWidth);
builder.append(", valueOffsetWidth=");
builder.append(valueOffsetWidth);
builder.append(", valueLengthWidth=");
builder.append(valueLengthWidth);
builder.append(", rowTreeDepth=");
builder.append(rowTreeDepth);
builder.append(", maxRowLength=");
builder.append(maxRowLength);
builder.append(", maxQualifierLength=");
builder.append(maxQualifierLength);
builder.append(", maxTagLength=");
builder.append(maxTagsLength);
builder.append(", minTimestamp=");
builder.append(minTimestamp);
builder.append(", timestampDeltaWidth=");
builder.append(timestampDeltaWidth);
builder.append(", minMvccVersion=");
builder.append(minMvccVersion);
builder.append(", mvccVersionDeltaWidth=");
builder.append(mvccVersionDeltaWidth);
builder.append(", allSameType=");
builder.append(allSameType);
builder.append(", allTypes=");
builder.append(allTypes);
builder.append(", numUniqueRows=");
builder.append(numUniqueRows);
builder.append(", numUniqueFamilies=");
builder.append(numUniqueFamilies);
builder.append(", numUniqueQualifiers=");
builder.append(numUniqueQualifiers);
builder.append(", numUniqueTags=");
builder.append(numUniqueTags);
builder.append("]");
return builder.toString();
}
/************** absolute getters *******************/
public int getAbsoluteMetaOffset() {
return arrayOffset + bufferOffset;
}
public int getAbsoluteRowOffset() {
return getAbsoluteMetaOffset() + numMetaBytes;
}
public int getAbsoluteFamilyOffset() {
return getAbsoluteRowOffset() + numRowBytes;
}
public int getAbsoluteQualifierOffset() {
return getAbsoluteFamilyOffset() + numFamilyBytes;
}
public int getAbsoluteTagsOffset() {
return getAbsoluteQualifierOffset() + numQualifierBytes;
}
public int getAbsoluteTimestampOffset() {
return getAbsoluteTagsOffset() + numTagsBytes;
}
public int getAbsoluteMvccVersionOffset() {
return getAbsoluteTimestampOffset() + numTimestampBytes;
}
public int getAbsoluteValueOffset() {
return getAbsoluteMvccVersionOffset() + numMvccVersionBytes;
}
/*************** get/set ***************************/
public int getTimestampDeltaWidth() {
return timestampDeltaWidth;
}
public void setTimestampDeltaWidth(int timestampDeltaWidth) {
this.timestampDeltaWidth = timestampDeltaWidth;
}
public int getValueOffsetWidth() {
return valueOffsetWidth;
}
public int getTagsOffsetWidth() {
return tagsOffsetWidth;
}
public void setValueOffsetWidth(int dataOffsetWidth) {
this.valueOffsetWidth = dataOffsetWidth;
}
public void setTagsOffsetWidth(int dataOffsetWidth) {
this.tagsOffsetWidth = dataOffsetWidth;
}
public int getValueLengthWidth() {
return valueLengthWidth;
}
public void setValueLengthWidth(int dataLengthWidth) {
this.valueLengthWidth = dataLengthWidth;
}
public int getMaxRowLength() {
return maxRowLength;
}
public void setMaxRowLength(int maxRowLength) {
this.maxRowLength = maxRowLength;
}
public long getMinTimestamp() {
return minTimestamp;
}
public void setMinTimestamp(long minTimestamp) {
this.minTimestamp = minTimestamp;
}
public byte getAllTypes() {
return allTypes;
}
public void setAllTypes(byte allTypes) {
this.allTypes = allTypes;
}
public boolean isAllSameType() {
return allSameType;
}
public void setAllSameType(boolean allSameType) {
this.allSameType = allSameType;
}
public int getNextNodeOffsetWidth() {
return nextNodeOffsetWidth;
}
public void setNextNodeOffsetWidth(int nextNodeOffsetWidth) {
this.nextNodeOffsetWidth = nextNodeOffsetWidth;
}
public int getNumRowBytes() {
return numRowBytes;
}
public void setNumRowBytes(int numRowBytes) {
this.numRowBytes = numRowBytes;
}
public int getNumTimestampBytes() {
return numTimestampBytes;
}
public void setNumTimestampBytes(int numTimestampBytes) {
this.numTimestampBytes = numTimestampBytes;
}
public int getNumValueBytes() {
return numValueBytes;
}
public int getNumTagsBytes() {
return numTagsBytes;
}
public void setNumTagsBytes(int numTagBytes){
this.numTagsBytes = numTagBytes;
}
public void setNumValueBytes(int numValueBytes) {
this.numValueBytes = numValueBytes;
}
public int getNumMetaBytes() {
return numMetaBytes;
}
public void setNumMetaBytes(int numMetaBytes) {
this.numMetaBytes = numMetaBytes;
}
public int getArrayOffset() {
return arrayOffset;
}
public void setArrayOffset(int arrayOffset) {
this.arrayOffset = arrayOffset;
}
public int getBufferOffset() {
return bufferOffset;
}
public void setBufferOffset(int bufferOffset) {
this.bufferOffset = bufferOffset;
}
public int getNumKeyValueBytes() {
return numKeyValueBytes;
}
public void setNumKeyValueBytes(int numKeyValueBytes) {
this.numKeyValueBytes = numKeyValueBytes;
}
public int getRowTreeDepth() {
return rowTreeDepth;
}
public void setRowTreeDepth(int rowTreeDepth) {
this.rowTreeDepth = rowTreeDepth;
}
public int getNumMvccVersionBytes() {
return numMvccVersionBytes;
}
public void setNumMvccVersionBytes(int numMvccVersionBytes) {
this.numMvccVersionBytes = numMvccVersionBytes;
}
public int getMvccVersionDeltaWidth() {
return mvccVersionDeltaWidth;
}
public void setMvccVersionDeltaWidth(int mvccVersionDeltaWidth) {
this.mvccVersionDeltaWidth = mvccVersionDeltaWidth;
}
public long getMinMvccVersion() {
return minMvccVersion;
}
public void setMinMvccVersion(long minMvccVersion) {
this.minMvccVersion = minMvccVersion;
}
public int getNumFamilyBytes() {
return numFamilyBytes;
}
public void setNumFamilyBytes(int numFamilyBytes) {
this.numFamilyBytes = numFamilyBytes;
}
public int getFamilyOffsetWidth() {
return familyOffsetWidth;
}
public void setFamilyOffsetWidth(int familyOffsetWidth) {
this.familyOffsetWidth = familyOffsetWidth;
}
public int getNumUniqueRows() {
return numUniqueRows;
}
public void setNumUniqueRows(int numUniqueRows) {
this.numUniqueRows = numUniqueRows;
}
public int getNumUniqueFamilies() {
return numUniqueFamilies;
}
public void setNumUniqueFamilies(int numUniqueFamilies) {
this.numUniqueFamilies = numUniqueFamilies;
}
public int getNumUniqueQualifiers() {
return numUniqueQualifiers;
}
public void setNumUniqueQualifiers(int numUniqueQualifiers) {
this.numUniqueQualifiers = numUniqueQualifiers;
}
public void setNumUniqueTags(int numUniqueTags) {
this.numUniqueTags = numUniqueTags;
}
public int getNumUniqueTags() {
return numUniqueTags;
}
public int getNumQualifierBytes() {
return numQualifierBytes;
}
public void setNumQualifierBytes(int numQualifierBytes) {
this.numQualifierBytes = numQualifierBytes;
}
public int getQualifierOffsetWidth() {
return qualifierOffsetWidth;
}
public void setQualifierOffsetWidth(int qualifierOffsetWidth) {
this.qualifierOffsetWidth = qualifierOffsetWidth;
}
public int getMaxQualifierLength() {
return maxQualifierLength;
}
// TODO : decide on some max value for this ? INTEGER_MAX?
public void setMaxQualifierLength(int maxQualifierLength) {
this.maxQualifierLength = maxQualifierLength;
}
public int getMaxTagsLength() {
return this.maxTagsLength;
}
public void setMaxTagsLength(int maxTagLength) {
this.maxTagsLength = maxTagLength;
}
public int getTimestampIndexWidth() {
return timestampIndexWidth;
}
public void setTimestampIndexWidth(int timestampIndexWidth) {
this.timestampIndexWidth = timestampIndexWidth;
}
public int getMvccVersionIndexWidth() {
return mvccVersionIndexWidth;
}
public void setMvccVersionIndexWidth(int mvccVersionIndexWidth) {
this.mvccVersionIndexWidth = mvccVersionIndexWidth;
}
public int getVersion() {
return version;
}
public void setVersion(int version) {
this.version = version;
}
public boolean isIncludesMvccVersion() {
return includesMvccVersion;
}
public void setIncludesMvccVersion(boolean includesMvccVersion) {
this.includesMvccVersion = includesMvccVersion;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.internal.jta;
import static org.apache.geode.distributed.ConfigurationProperties.CACHE_XML_FILE;
import static org.apache.geode.distributed.ConfigurationProperties.MCAST_PORT;
import static org.apache.geode.test.util.ResourceUtils.createTempFileFromResource;
import static org.junit.Assert.fail;
import java.sql.Connection;
import java.sql.SQLException;
import java.util.Properties;
import javax.naming.Context;
import javax.transaction.RollbackException;
import javax.transaction.Status;
import javax.transaction.Synchronization;
import javax.transaction.Transaction;
import javax.transaction.TransactionManager;
import javax.transaction.UserTransaction;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.apache.geode.cache.Cache;
import org.apache.geode.cache.CacheFactory;
import org.apache.geode.distributed.DistributedSystem;
import org.apache.geode.internal.datasource.GemFireBasicDataSource;
import org.apache.geode.internal.datasource.GemFireTransactionDataSource;
public class GlobalTransactionJUnitTest {
private static Properties props = null;
private static DistributedSystem ds1 = null;
private static Cache cache = null;
private static UserTransaction utx = null;
private static TransactionManager tm = null;
@Before
public void setUp() throws Exception {
props = new Properties();
props.setProperty(MCAST_PORT, "0");
String path =
createTempFileFromResource(GlobalTransactionJUnitTest.class, "/jta/cachejta.xml")
.getAbsolutePath();
props.setProperty(CACHE_XML_FILE, path);
ds1 = DistributedSystem.connect(props);
cache = CacheFactory.create(ds1);
utx = new UserTransactionImpl();
tm = TransactionManagerImpl.getTransactionManager();
}
@After
public void tearDown() throws Exception {
ds1.disconnect();
}
@Test
public void testGetSimpleDataSource() throws Exception {
Context ctx = cache.getJNDIContext();
GemFireBasicDataSource ds = (GemFireBasicDataSource) ctx.lookup("java:/SimpleDataSource");
Connection conn = ds.getConnection();
if (conn == null) {
fail(
"DataSourceFactoryTest-testGetSimpleDataSource() Error in creating the GemFireBasicDataSource");
}
}
@Test
public void testSetRollbackOnly() throws Exception {
utx.begin();
utx.setRollbackOnly();
Transaction txn = tm.getTransaction();
if (txn.getStatus() != Status.STATUS_MARKED_ROLLBACK) {
utx.rollback();
fail("testSetRollbackonly failed");
}
utx.rollback();
}
@Test
public void testEnlistResource() throws Exception {
try {
boolean exceptionoccurred = false;
utx.begin();
try {
Context ctx = cache.getJNDIContext();
GemFireTransactionDataSource ds =
(GemFireTransactionDataSource) ctx.lookup("java:/XAPooledDataSource");
ds.getConnection();
} catch (SQLException e) {
exceptionoccurred = true;
}
if (exceptionoccurred) {
fail("SQLException occurred while trying to enlist resource");
}
utx.rollback();
} catch (Exception e) {
try {
utx.rollback();
} catch (Exception e1) {
e1.printStackTrace();
}
fail("exception in testEnlistResource due to " + e);
e.printStackTrace();
}
}
@Test
public void testRegisterSynchronization() throws Exception {
try {
boolean exceptionoccurred = false;
utx.begin();
try {
Transaction txn = tm.getTransaction();
Synchronization sync = new SyncImpl();
txn.registerSynchronization(sync);
} catch (RollbackException e) {
exceptionoccurred = true;
}
if (exceptionoccurred) {
fail("exception occurred while trying to register synchronization ");
}
utx.rollback();
} catch (Exception e) {
try {
utx.rollback();
} catch (Exception e1) {
e1.printStackTrace();
}
fail("exception in testRegisterSynchronization due to " + e);
e.printStackTrace();
}
}
@Test
public void testEnlistResourceAfterRollBack() throws Exception {
try {
boolean exceptionoccurred = false;
utx.begin();
utx.setRollbackOnly();
Context ctx = cache.getJNDIContext();
try {
GemFireTransactionDataSource ds =
(GemFireTransactionDataSource) ctx.lookup("java:/XAPooledDataSource");
ds.getConnection();
} catch (SQLException e) {
exceptionoccurred = true;
}
if (!exceptionoccurred) {
fail("SQLException not occurred although the transaction was marked for rollback");
}
utx.rollback();
} catch (Exception e) {
try {
utx.rollback();
} catch (Exception e1) {
e1.printStackTrace();
}
fail("exception in testSetRollbackonly due to " + e);
e.printStackTrace();
}
}
@Test
public void testRegisterSynchronizationAfterRollBack() throws Exception {
try {
boolean exceptionoccurred = false;
utx.begin();
utx.setRollbackOnly();
Context ctx = cache.getJNDIContext();
try {
Transaction txn = tm.getTransaction();
Synchronization sync = new SyncImpl();
txn.registerSynchronization(sync);
} catch (RollbackException e) {
exceptionoccurred = true;
}
if (!exceptionoccurred) {
fail("RollbackException not occurred although the transaction was marked for rollback");
}
utx.rollback();
} catch (Exception e) {
try {
utx.rollback();
} catch (Exception e1) {
e1.printStackTrace();
}
fail("exception in testSetRollbackonly due to " + e);
e.printStackTrace();
}
}
@Test
public void testSuspend() throws Exception {
utx.begin();
tm.suspend();
Transaction txn1 = tm.getTransaction();
if (txn1 != null) {
fail("suspend failed to suspend the transaction");
}
}
@Test
public void testResume() throws Exception {
utx.begin();
Transaction txn = tm.getTransaction();
Transaction txn1 = tm.suspend();
tm.resume(txn1);
if (txn != tm.getTransaction()) {
fail("resume failed ");
}
utx.commit();
}
}
| |
package org.jai.search.setup;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import java.io.IOException;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.jai.search.model.ElasticSearchIndexConfig;
import org.jai.search.model.ElasticSearchReservedWords;
import org.jai.search.model.SearchDocumentFieldName;
import org.jai.search.model.SearchFacetName;
import org.jai.search.util.SearchDateUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class IndexSchemaBuilder
{
private static final Logger logger = LoggerFactory.getLogger(IndexSchemaBuilder.class);
public Settings getSettingForIndex(ElasticSearchIndexConfig config) throws IOException
{
logger.debug("Generating settings for index: {}", config.getIndexAliasName());
Settings settings = Settings.builder().loadFromSource(jsonBuilder()
.startObject()
//disable dynamic mapping adding, set it to false
.field(ElasticSearchReservedWords.INDEX_MAPPER_DYNAMIC.getText(), false)
//Add analyzer settings
.startObject(ElasticSearchReservedWords.ANALYSIS.getText())
.startObject(ElasticSearchReservedWords.FILTER.getText())
.startObject(config.getStopwordsCustomFilterName())
.field(ElasticSearchReservedWords.TYPE.getText(), ElasticSearchReservedWords.STOP.getText())
.field(ElasticSearchReservedWords.STOPWORDS_PATH.getText(), "stopwords/stop_" + config.getSupportedLocale().getText())
.endObject()
.startObject(config.getSnowballCustomFilterName())
.field(ElasticSearchReservedWords.TYPE.getText(), ElasticSearchReservedWords.SNOWBALL.getText())
.field(ElasticSearchReservedWords.LANGUAGE.getText(), config.getSupportedLocale().getLang())
.endObject()
.startObject(config.getWorddelimiterCustomFilterName())
.field(ElasticSearchReservedWords.TYPE.getText(), ElasticSearchReservedWords.WORD_DELIMITER.getText())
.field(ElasticSearchReservedWords.PROTECTED_WORDS_PATH.getText(), "worddelimiters/protectedwords_" + config.getSupportedLocale().getText())
.field(ElasticSearchReservedWords.TYPE_TABLE_PATH.getText(), "worddelimiters/typetable")
.field("split_on_numerics", "true")
.field("generate_number_parts", "true")
.field("preserve_original", "true")
.endObject()
.startObject(config.getSynonymsCustomFilterName())
.field(ElasticSearchReservedWords.TYPE.getText(), ElasticSearchReservedWords.SYNONYM.getText())
.field(ElasticSearchReservedWords.SYNONYMS_PATH.getText(), "synonyms/synonyms_" + config.getSupportedLocale().getText())
.field(ElasticSearchReservedWords.SYNONYMS_IGNORE_CASE.getText(), true)
.field(ElasticSearchReservedWords.SYNONYMS_EXPAND.getText(), true)
.endObject()
.startObject(config.getShingleTokenFilterName())
.field("type", "shingle")
.field("min_shingle_size", 2)
.field("max_shingle_size", 4)
.endObject()
.startObject(config.getNGramTokenFilterName())
.field("type", "edgeNGram")
.field("min_gram", 4)
.field("max_gram", 30)
.endObject()
.endObject()
.startObject(ElasticSearchReservedWords.ANALYZER.getText())
.startObject(config.getStandardTextAnalyzerName())
.field(ElasticSearchReservedWords.TYPE.getText(), ElasticSearchReservedWords.CUSTOM.getText())
.field(ElasticSearchReservedWords.TOKENIZER.getText(), ElasticSearchReservedWords.STANDARD.getText())
.field(ElasticSearchReservedWords.FILTER.getText(), new String[]{ElasticSearchReservedWords.LOWERCASE.getText(),
config.getStopwordsCustomFilterName(),
config.getSynonymsCustomFilterName(),
config.getSnowballCustomFilterName()
})
.endObject()
.startObject(config.getCustomFreeTextAnalyzerName())
.field(ElasticSearchReservedWords.TYPE.getText(), ElasticSearchReservedWords.CUSTOM.getText())
.field(ElasticSearchReservedWords.TOKENIZER.getText(), ElasticSearchReservedWords.WHITESPACE.getText())
.field(ElasticSearchReservedWords.FILTER.getText(), new String[]{ElasticSearchReservedWords.LOWERCASE.getText(),
config.getWorddelimiterCustomFilterName(),
config.getStopwordsCustomFilterName(),
config.getSynonymsCustomFilterName(),
config.getSnowballCustomFilterName()
})
.field(ElasticSearchReservedWords.CHAR_FILTER.getText(), ElasticSearchReservedWords.HTML_STRIP.getText())
.endObject()
.startObject(config.getAutoSuggestionAnalyzerName())
.field(ElasticSearchReservedWords.TYPE.getText(), ElasticSearchReservedWords.CUSTOM.getText())
.field(ElasticSearchReservedWords.TOKENIZER.getText(), ElasticSearchReservedWords.KEYWORD.getText())
// .field(ElasticSearchReservedWords.TOKENIZER.getText(), "letter")
.field(ElasticSearchReservedWords.FILTER.getText(), new String[]{ElasticSearchReservedWords.LOWERCASE.getText()
// config.getNGramTokenFilterName()
})
.endObject()
.startObject(config.getCustomFacetAnalyzerName())
.field(ElasticSearchReservedWords.TYPE.getText(), ElasticSearchReservedWords.CUSTOM.getText())
.field(ElasticSearchReservedWords.TOKENIZER.getText(), ElasticSearchReservedWords.STANDARD.getText())
.field(ElasticSearchReservedWords.FILTER.getText(), new String[]{ElasticSearchReservedWords.LOWERCASE.getText(),
config.getSnowballCustomFilterName(),
config.getSynonymsCustomFilterName()
})
.endObject()
.endObject()
.endObject()
.endObject().string()).build();
logger.debug("Generated settings for index {} is: {}", new Object[]{config.getIndexAliasName(), settings.getAsMap()});
return settings;
}
public XContentBuilder getDocumentTypeMapping(ElasticSearchIndexConfig elasticSearchIndexConfig, String documentType, boolean parentRelationship) throws IOException
{
XContentBuilder builder = jsonBuilder().prettyPrint().startObject().startObject(documentType);
//disable dynamics mapping of fields
builder.field(ElasticSearchReservedWords.DYNAMIC.getText(), "strict");
if(documentType.equals(elasticSearchIndexConfig.getDocumentType()))
{
//Used for parent-child relationship
if(parentRelationship)
{
builder.startObject("_parent").field("type", elasticSearchIndexConfig.getGroupDocumentType()).endObject();
}
builder.startObject(ElasticSearchReservedWords.PROPERTIES.getText());
addProductBooleanFieldMappingForNEW(builder);
addLiveDateMapping(builder);
addCustomBoostFactorMapping(builder);
addProductDynamicValues(builder);
addContentInformationFieldsMapping(builder, elasticSearchIndexConfig);
addSpecifications(builder, elasticSearchIndexConfig);
}
else if(documentType.equals(elasticSearchIndexConfig.getGroupDocumentType()))
{
builder.startObject(ElasticSearchReservedWords.PROPERTIES.getText());
//Let it be dynamic for now
builder.startObject(SearchDocumentFieldName.TITLEPG.getFieldName())
.field(ElasticSearchReservedWords.TYPE.getText(), ElasticSearchReservedWords.KEYWORD.getText())
.field(ElasticSearchReservedWords.STORE.getText(), ElasticSearchReservedWords.TRUE.getText())
.field(ElasticSearchReservedWords.INDEX.getText(), ElasticSearchReservedWords.TRUE.getText())
// .field(ElasticSearchReservedWords.ANALYZER.getText(), elasticSearchIndexConfig.getCustomFreeTextAnalyzerName())
// .field(ElasticSearchReservedWords.FIELD_DATA.getText(), ElasticSearchReservedWords.TRUE.getText())
.endObject()
.startObject(SearchDocumentFieldName.DESCRIPTIONPG.getFieldName())
.field(ElasticSearchReservedWords.TYPE.getText(), ElasticSearchReservedWords.KEYWORD.getText())
.field(ElasticSearchReservedWords.STORE.getText(), ElasticSearchReservedWords.TRUE.getText())
.field(ElasticSearchReservedWords.INDEX.getText(), ElasticSearchReservedWords.TRUE.getText())
// .field(ElasticSearchReservedWords.ANALYZER.getText(), elasticSearchIndexConfig.getCustomFreeTextAnalyzerName())
// .field(ElasticSearchReservedWords.FIELD_DATA.getText(), ElasticSearchReservedWords.TRUE.getText())
.endObject()
;
}
else if(documentType.equals(elasticSearchIndexConfig.getPropertiesDocumentType()))
{
//Used for parent-child relationship
if(parentRelationship)
{
builder.startObject("_parent").field("type", elasticSearchIndexConfig.getDocumentType()).endObject();
}
builder.startObject(ElasticSearchReservedWords.PROPERTIES.getText());
//Let it be dynamic for now
builder.startObject(SearchDocumentFieldName.SIZE.getFieldName())
.field(ElasticSearchReservedWords.TYPE.getText(), ElasticSearchReservedWords.KEYWORD.getText())
.field(ElasticSearchReservedWords.STORE.getText(), ElasticSearchReservedWords.TRUE.getText())
.field(ElasticSearchReservedWords.INDEX.getText(), ElasticSearchReservedWords.TRUE.getText())
.endObject()
.startObject(SearchDocumentFieldName.COLOR.getFieldName())
.field(ElasticSearchReservedWords.TYPE.getText(), ElasticSearchReservedWords.KEYWORD.getText())
.field(ElasticSearchReservedWords.STORE.getText(), ElasticSearchReservedWords.TRUE.getText())
.field(ElasticSearchReservedWords.INDEX.getText(), ElasticSearchReservedWords.TRUE.getText())
.endObject();
}
// else
// {
// builder.startObject(ElasticSearchReservedWords.PROPERTIES.getText());
// }
//end properties
builder.endObject()
//end two start ones
.endObject()
.endObject();
// System.out.println(builder.string());
logger.debug("Generated mapping for document type {} is: {}", new Object[]{elasticSearchIndexConfig, builder.prettyPrint().string()});
return builder;
}
private void addSpecifications(XContentBuilder builder, ElasticSearchIndexConfig elasticSearchIndexConfig) throws IOException
{
builder.startObject(SearchDocumentFieldName.SPECIFICATIONS.getFieldName())
.startObject(ElasticSearchReservedWords.PROPERTIES.getText())
.startObject(SearchDocumentFieldName.RESOLUTION.getFieldName())
.field(ElasticSearchReservedWords.TYPE.getText(), ElasticSearchReservedWords.KEYWORD.getText())
.field(ElasticSearchReservedWords.INDEX.getText(), ElasticSearchReservedWords.TRUE.getText())
.field(ElasticSearchReservedWords.STORE.getText(), ElasticSearchReservedWords.TRUE.getText())
.endObject()
.startObject(SearchDocumentFieldName.MEMORY.getFieldName())
.field(ElasticSearchReservedWords.TYPE.getText(), ElasticSearchReservedWords.KEYWORD.getText())
.field(ElasticSearchReservedWords.INDEX.getText(), ElasticSearchReservedWords.TRUE.getText())
.field(ElasticSearchReservedWords.STORE.getText(), ElasticSearchReservedWords.TRUE.getText())
.endObject()
.endObject()
.field(ElasticSearchReservedWords.TYPE.getText(), ElasticSearchReservedWords.NESTED.getText())
.endObject();
}
private void addProductBooleanFieldMappingForNEW(XContentBuilder builder) throws IOException
{
//standard fields
builder.startObject(SearchDocumentFieldName.SOLD_OUT.getFieldName())
.field(ElasticSearchReservedWords.TYPE.getText(), ElasticSearchReservedWords.BOOLEAN.getText())
.field(ElasticSearchReservedWords.INDEX.getText(), ElasticSearchReservedWords.TRUE.getText())
.field(ElasticSearchReservedWords.STORE.getText(), ElasticSearchReservedWords.TRUE.getText())
.endObject();
}
private void addLiveDateMapping(XContentBuilder builder) throws IOException
{
builder.startObject(SearchDocumentFieldName.AVAILABLE_DATE.getFieldName())
.field(ElasticSearchReservedWords.TYPE.getText(), ElasticSearchReservedWords.DATE.getText())
.field(ElasticSearchReservedWords.FORMAT.getText(), SearchDateUtils.SEARCH_DATE_FORMAT_YYYY_MM_DD_T_HH_MM_SSSZZ)
.field(ElasticSearchReservedWords.STORE.getText(), ElasticSearchReservedWords.TRUE.getText())
.field(ElasticSearchReservedWords.INDEX.getText(), ElasticSearchReservedWords.TRUE.getText())
.endObject();
}
private void addCustomBoostFactorMapping(XContentBuilder builder) throws IOException
{
builder.startObject(SearchDocumentFieldName.BOOSTFACTOR.getFieldName())
.field(ElasticSearchReservedWords.TYPE.getText(), ElasticSearchReservedWords.FLOAT.getText())
.field(ElasticSearchReservedWords.STORE.getText(), ElasticSearchReservedWords.TRUE.getText())
.field(ElasticSearchReservedWords.INDEX.getText(), ElasticSearchReservedWords.TRUE.getText())
.endObject();
}
private void addProductDynamicValues(XContentBuilder builder) throws IOException
{
//Add out of stock information
builder.startObject(SearchDocumentFieldName.PRICE.getFieldName())
.field(ElasticSearchReservedWords.TYPE.getText(), ElasticSearchReservedWords.DOUBLE.getText())
.field(ElasticSearchReservedWords.INDEX.getText(), ElasticSearchReservedWords.TRUE.getText())
.endObject()
;
}
private XContentBuilder addContentInformationFieldsMapping(XContentBuilder builder, ElasticSearchIndexConfig elasticSearchIndexConfig) throws IOException
{
//content information fields
builder.startObject(SearchDocumentFieldName.TITLE.getFieldName())
.field(ElasticSearchReservedWords.TYPE.getText(), ElasticSearchReservedWords.TEXT.getText())
.field(ElasticSearchReservedWords.STORE.getText(), ElasticSearchReservedWords.TRUE.getText())
.field(ElasticSearchReservedWords.INDEX.getText(), ElasticSearchReservedWords.TRUE.getText())
.field(ElasticSearchReservedWords.ANALYZER.getText(), elasticSearchIndexConfig.getCustomFreeTextAnalyzerName())
.field(ElasticSearchReservedWords.FIELD_DATA.getText(), ElasticSearchReservedWords.TRUE.getText())
.endObject()
.startObject(SearchDocumentFieldName.DESCRIPTION.getFieldName())
.field(ElasticSearchReservedWords.TYPE.getText(), ElasticSearchReservedWords.TEXT.getText())
.field(ElasticSearchReservedWords.STORE.getText(), ElasticSearchReservedWords.TRUE.getText())
.field(ElasticSearchReservedWords.INDEX.getText(), ElasticSearchReservedWords.TRUE.getText())
.field(ElasticSearchReservedWords.ANALYZER.getText(), elasticSearchIndexConfig.getCustomFreeTextAnalyzerName())
.field(ElasticSearchReservedWords.FIELD_DATA.getText(), ElasticSearchReservedWords.TRUE.getText())
.endObject()
//Add content categories mapping
.startObject(SearchDocumentFieldName.CATEGORIES_ARRAY.getFieldName())
.field(ElasticSearchReservedWords.TYPE.getText(), "nested")
.startObject(ElasticSearchReservedWords.PROPERTIES.getText());
//Add each category facet
for (SearchFacetName facetName : SearchFacetName.categoryFacetValues())
{
int hierarchyLevel = SearchFacetName.getSupportedFacetParentChildHierarchyLevel();
for (int i = 1; i <= hierarchyLevel; i++)
{
builder
.startObject(facetName.getFacetSequencedFieldNameAtLevel(i))
.field(ElasticSearchReservedWords.TYPE.getText(), ElasticSearchReservedWords.KEYWORD.getText())
.field(ElasticSearchReservedWords.INDEX.getText(), ElasticSearchReservedWords.TRUE.getText())
.endObject()
.startObject(facetName.getFacetFieldNameAtLevel(i) + "." + SearchDocumentFieldName.FACET.getFieldName())
.field(ElasticSearchReservedWords.TYPE.getText(), ElasticSearchReservedWords.KEYWORD.getText())
.field(ElasticSearchReservedWords.STORE.getText(), ElasticSearchReservedWords.TRUE.getText())
.field(ElasticSearchReservedWords.INDEX.getText(), ElasticSearchReservedWords.TRUE.getText())
.endObject()
.startObject(facetName.getFacetFieldNameAtLevel(i) + "." + SearchDocumentFieldName.FACETFILTER.getFieldName())
.field(ElasticSearchReservedWords.TYPE.getText(), ElasticSearchReservedWords.KEYWORD.getText())
.field(ElasticSearchReservedWords.INDEX.getText(), ElasticSearchReservedWords.TRUE.getText())
.endObject()
.startObject(facetName.getFacetFieldNameAtLevel(i) + "." + SearchDocumentFieldName.SUGGEST.getFieldName())
.field(ElasticSearchReservedWords.TYPE.getText(), ElasticSearchReservedWords.TEXT.getText())
.field(ElasticSearchReservedWords.STORE.getText(), ElasticSearchReservedWords.TRUE.getText())
.field(ElasticSearchReservedWords.INDEX.getText(), ElasticSearchReservedWords.TRUE.getText())
.field(ElasticSearchReservedWords.ANALYZER.getText(), elasticSearchIndexConfig.getAutoSuggestionAnalyzerName())
.field(ElasticSearchReservedWords.FIELD_DATA.getText(), ElasticSearchReservedWords.TRUE.getText())
.endObject();
}
}
builder.endObject()
.endObject()
//Add keywords mapping
.startObject(SearchDocumentFieldName.KEYWORDS.getFieldName())
.field(ElasticSearchReservedWords.TYPE.getText(), ElasticSearchReservedWords.COMPLETION.getText())
// .field(ElasticSearchReservedWords.STORE.getText(), ElasticSearchReservedWords.TRUE.getText())
.field(ElasticSearchReservedWords.ANALYZER.getText(), elasticSearchIndexConfig.getAutoSuggestionAnalyzerName())
.endObject();
return builder;
}
}
| |
/**
* Licensed to the Austrian Association for Software Tool Integration (AASTI)
* under one or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information regarding copyright
* ownership. The AASTI licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.openengsb.core.ekb.api;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.UUID;
import org.openengsb.core.api.model.OpenEngSBModel;
/**
* The EKBCommit class contains all necessary information to do a commit to the EDB through the EKB PersistInterface.
*/
public class EKBCommit {
private List<OpenEngSBModel> inserts;
private List<OpenEngSBModel> updates;
private List<OpenEngSBModel> deletes;
private String domainId;
private String connectorId;
private String instanceId;
private UUID revisionNumber;
private UUID parentRevisionNumber;
public EKBCommit() {
inserts = new ArrayList<OpenEngSBModel>();
updates = new ArrayList<OpenEngSBModel>();
deletes = new ArrayList<OpenEngSBModel>();
}
/**
* Adds a model to the list of models which shall be inserted into the EDB. If the given object is not a model, an
* IllegalArgumentException is thrown.
*/
public EKBCommit addInsert(Object insert) {
if (insert != null) {
checkIfModel(insert);
inserts.add((OpenEngSBModel) insert);
}
return this;
}
/**
* Adds a collection of models which shall be inserted into the EDB. If one of the given objects is not a model, an
* IllegalArgumentException is thrown.
*/
public EKBCommit addInserts(Collection<?> inserts) {
if (inserts != null) {
for (Object insert : inserts) {
checkIfModel(insert);
this.inserts.add((OpenEngSBModel) insert);
}
}
return this;
}
/**
* Adds a model to the list of models which shall be updated in the EDB. If the given object is not a model, an
* IllegalArgumentException is thrown.
*/
public EKBCommit addUpdate(Object update) {
if (update != null) {
checkIfModel(update);
updates.add((OpenEngSBModel) update);
}
return this;
}
/**
* Adds a collection of models which shall be updated in the EDB. If one of the given objects is not a model, an
* IllegalArgumentException is thrown.
*/
public EKBCommit addUpdates(Collection<?> updates) {
if (updates != null) {
for (Object update : updates) {
checkIfModel(update);
this.updates.add((OpenEngSBModel) update);
}
}
return this;
}
/**
* Adds an model to the list of models which shall be deleted from the EDB. If the given object is not a model, an
* IllegalArgumentException is thrown.
*/
public EKBCommit addDelete(Object delete) {
if (delete != null) {
checkIfModel(delete);
deletes.add((OpenEngSBModel) delete);
}
return this;
}
/**
* Adds a collection of models which shall be deleted from the EDB. If one of the given objects is not a model, an
* IllegalArgumentException is thrown.
*/
public EKBCommit addDeletes(Collection<?> deletes) {
if (deletes != null) {
for (Object delete : deletes) {
checkIfModel(delete);
this.deletes.add((OpenEngSBModel) delete);
}
}
return this;
}
/**
* Defines the id of the domain from where the commit comes from.
*/
public EKBCommit setDomainId(String domainId) {
this.domainId = domainId;
return this;
}
/**
* Defines the id of the connector from where the commit comes from.
*/
public EKBCommit setConnectorId(String connectorId) {
this.connectorId = connectorId;
return this;
}
/**
* Defines the id of the instance from where the commit comes from.
*/
public EKBCommit setInstanceId(String instanceId) {
this.instanceId = instanceId;
return this;
}
/**
* Returns the list of OpenEngSBModels which shall be inserted into the EDB.
*/
public List<OpenEngSBModel> getInserts() {
return inserts;
}
/**
* Returns the list of OpenEngSBModels which shall be updated in the EDB.
*/
public List<OpenEngSBModel> getUpdates() {
return updates;
}
/**
* Returns the list of OpenEngSBModels which shall be deleted from the EDB.
*/
public List<OpenEngSBModel> getDeletes() {
return deletes;
}
/**
* Returns the id of the domain from where the commit comes from.
*/
public String getDomainId() {
return domainId;
}
/**
* Returns the id of the connector from where the commit comes from.
*/
public String getConnectorId() {
return connectorId;
}
/**
* Returns the id of the instance from where the commit comes from.
*/
public String getInstanceId() {
return instanceId;
}
/**
* Checks if an object is an OpenEngSBModel and throws an IllegalArgumentException if the object is no model.
*/
private void checkIfModel(Object model) {
if (!OpenEngSBModel.class.isAssignableFrom(model.getClass())) {
throw new IllegalArgumentException("Only models can be committed");
}
}
public UUID getParentRevisionNumber() {
return parentRevisionNumber;
}
public void setParentRevisionNumber(UUID parentRevisionNumber) {
this.parentRevisionNumber = parentRevisionNumber;
}
public UUID getRevisionNumber() {
return revisionNumber;
}
public void setRevisionNumber(UUID revisionNumber) {
this.revisionNumber = revisionNumber;
}
}
| |
package com.dianping.cat.message;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.PooledByteBufAllocator;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.util.List;
import java.util.Queue;
import java.util.concurrent.LinkedBlockingQueue;
import junit.framework.Assert;
import org.codehaus.plexus.PlexusContainer;
import org.junit.Test;
import org.unidal.helper.Files;
import org.unidal.helper.Reflects;
import org.unidal.lookup.ComponentTestCase;
import com.dianping.cat.Cat;
import com.dianping.cat.configuration.ClientConfigManager;
import com.dianping.cat.configuration.client.entity.ClientConfig;
import com.dianping.cat.configuration.client.entity.Server;
import com.dianping.cat.message.internal.DefaultTransaction;
import com.dianping.cat.message.io.MessageSender;
import com.dianping.cat.message.io.TransportManager;
import com.dianping.cat.message.spi.MessageCodec;
import com.dianping.cat.message.spi.MessageTree;
public class MessageTest extends ComponentTestCase {
private Queue<MessageTree> m_queue = new LinkedBlockingQueue<MessageTree>();
private void checkMessage(String expected) {
StringBuilder sb = new StringBuilder(1024);
MessageCodec codec = new MockMessageCodec(sb);
while (true) {
MessageTree tree = m_queue.poll();
if (tree != null) {
codec.encode(tree);
} else {
break;
}
}
Assert.assertEquals(expected, sb.toString());
}
protected ClientConfig getConfigurationFile() {
try {
ClientConfig config = new ClientConfig();
config.setDomain("cat");
config.addServer(new Server("localhost"));
config.setMaxMessageSize(8);
return config;
} catch (Exception e) {
throw new RuntimeException("Unable to create cat-config.xml file!");
}
}
@Override
public void setUp() throws Exception {
super.setUp();
defineComponent(TransportManager.class, MockTransportManager.class);
MockTransportManager transportManager = (MockTransportManager) lookup(TransportManager.class);
transportManager.setQueue(m_queue);
ClientConfig config = getConfigurationFile();
Cat.initialize(config);
ClientConfigManager configManager = lookup(ClientConfigManager.class);
configManager.initialize(config);
m_queue.clear();
Reflects.forMethod().invokeDeclaredMethod(Cat.getInstance(), "setContainer", PlexusContainer.class,
getContainer());
}
@Test
public void testEvent() throws InterruptedException {
Event event = Cat.getProducer().newEvent("Review", "New");
event.addData("id", 12345);
event.addData("user", "john");
event.setStatus(Message.SUCCESS);
event.complete();
checkMessage("E Review New 0 id=12345&user=john\n");
}
@Test
public void testHeartbeat() {
Heartbeat heartbeat = Cat.getProducer().newHeartbeat("System", "Status");
heartbeat.addData("ip", "192.168.10.111");
heartbeat.addData("host", "host-1");
heartbeat.addData("load", "2.1");
heartbeat.addData("cpu", "0.12,0.10");
heartbeat.addData("memory.total", "2G");
heartbeat.addData("memory.free", "456M");
heartbeat.setStatus(Message.SUCCESS);
heartbeat.complete();
checkMessage("H System Status 0 ip=192.168.10.111&host=host-1&load=2.1&cpu=0.12,0.10&memory.total=2G&memory.free=456M\n");
}
@Test
public void testMessageTruncatedForDuration() throws IOException {
Transaction t = Cat.newTransaction("URL", "MyPage");
try {
// do your business here
t.addData("k1", "v1");
for (int i = 0; i < 3; i++) {
Cat.logEvent("Event", "Name" + i);
}
Transaction t1 = Cat.newTransaction("URL1", "MyPage");
t1.setStatus(Message.SUCCESS);
t1.complete();
// move root transaction to one hour ago
((DefaultTransaction) t).setTimestamp(t.getTimestamp() - 3600 * 1000L + 1);
Transaction t2 = Cat.newTransaction("URL2", "MyPage");
for (int i = 0; i < 3; i++) {
Cat.logEvent("Event2", "Name" + i);
}
t2.setStatus(Message.SUCCESS);
t2.complete();
t.setStatus(Message.SUCCESS);
} catch (Exception e) {
t.setStatus(e);
} finally {
t.complete();
}
String expected = Files.forIO().readFrom(getClass().getResourceAsStream("message-truncated-for-duration.txt"),
"utf-8");
checkMessage(expected);
}
@Test
public void testMessageTruncatedForSize() throws IOException {
Transaction t = Cat.newTransaction("URL", "MyPage");
try {
t.addData("k1", "v1");
for (int i = 0; i < 20; i++) {
Thread.sleep(1); // make sure total duration is larger than 1 millsecond
Transaction t0 = Cat.newTransaction("URL0", "MyPage" + i);
t0.setStatus(Message.SUCCESS);
t0.complete();
}
Transaction t1 = Cat.newTransaction("URL1", "MyPageT1");
Transaction t2 = Cat.newTransaction("URL2", "MyPageT2");
for (int i = 0; i < 20; i++) {
Cat.logEvent("Event", "Name" + i);
}
t2.complete();
t1.complete();
t.setStatus(Message.SUCCESS);
} catch (Exception e) {
t.setStatus(e);
} finally {
t.complete();
}
String expected = Files.forIO().readFrom(getClass().getResourceAsStream("message-truncated-for-size.txt"),
"utf-8");
checkMessage(expected);
}
@Test
public void testTransaction() throws Exception {
Transaction t = Cat.newTransaction("URL", "MyPage");
try {
// do your business here
t.addData("k1", "v1");
t.addData("k2", "v2");
t.addData("k3", "v3");
t.setStatus(Message.SUCCESS);
} catch (Exception e) {
t.setStatus(e);
} finally {
t.complete();
}
checkMessage("A URL MyPage 0 k1=v1&k2=v2&k3=v3\n");
}
protected static class MockMessageCodec implements MessageCodec {
private StringBuilder m_sb;
public MockMessageCodec(StringBuilder sb) {
m_sb = sb;
}
@Override
public MessageTree decode(ByteBuf buf) {
throw new UnsupportedOperationException();
}
@Override
public ByteBuf encode(MessageTree tree) {
return encodeMessage(tree.getMessage());
}
private void encodeEvent(Event e, ByteBuf buf) {
m_sb.append('E');
m_sb.append(' ').append(e.getType());
m_sb.append(' ').append(e.getName());
m_sb.append(' ').append(e.getStatus());
if (!e.getType().equals("RemoteCall") && !e.getType().equals("TruncatedTransaction")) {
m_sb.append(' ').append(e.getData());
}
m_sb.append('\n');
}
private void encodeHeartbeat(Heartbeat h, ByteBuf buf) {
m_sb.append('H');
m_sb.append(' ').append(h.getType());
m_sb.append(' ').append(h.getName());
m_sb.append(' ').append(h.getStatus());
m_sb.append(' ').append(h.getData());
m_sb.append('\n');
}
private ByteBuf encodeMessage(Message message) {
ByteBuf buf = PooledByteBufAllocator.DEFAULT.buffer(4 * 1024);
if (message instanceof Transaction) {
encodeTransaction((Transaction) message, buf);
} else if (message instanceof Event) {
encodeEvent((Event) message, buf);
} else if (message instanceof Heartbeat) {
encodeHeartbeat((Heartbeat) message, buf);
}
return buf;
}
private void encodeTransaction(Transaction t, ByteBuf buf) {
List<Message> children = t.getChildren();
if (children.isEmpty()) {
m_sb.append('A');
m_sb.append(' ').append(t.getType());
m_sb.append(' ').append(t.getName());
m_sb.append(' ').append(t.getStatus());
m_sb.append(' ').append(t.getData());
m_sb.append('\n');
} else {
m_sb.append('t');
m_sb.append(' ').append(t.getType());
m_sb.append(' ').append(t.getName());
m_sb.append('\n');
for (Message message : children) {
encodeMessage(message);
}
m_sb.append('T');
m_sb.append(' ').append(t.getType());
m_sb.append(' ').append(t.getName());
m_sb.append(' ').append(t.getStatus());
m_sb.append(' ').append(t.getData());
m_sb.append('\n');
}
}
@Override
public void reset() {
}
@Override
public void decode(ByteBuf buf, MessageTree tree) {
}
@Override
public void encode(MessageTree tree, ByteBuf buf) {
}
}
public static class MockTransportManager implements TransportManager {
private MessageSender m_sender;
public MockTransportManager() {
}
@Override
public MessageSender getSender() {
return m_sender;
}
public void setQueue(final Queue<MessageTree> queue) {
m_sender = new MessageSender() {
@Override
public void send(MessageTree tree) {
queue.offer(tree);
}
@Override
public void shutdown() {
}
@Override
public void initialize(List<InetSocketAddress> addresses) {
}
};
}
}
}
| |
/*
* Copyright 2015-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.cxx;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotEquals;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue;
import com.facebook.buck.cli.BuckConfig;
import com.facebook.buck.cli.BuildTargetNodeToBuildRuleTransformer;
import com.facebook.buck.cli.FakeBuckConfig;
import com.facebook.buck.io.MorePaths;
import com.facebook.buck.io.ProjectFilesystem;
import com.facebook.buck.model.BuildTarget;
import com.facebook.buck.model.BuildTargetFactory;
import com.facebook.buck.rules.BuildRule;
import com.facebook.buck.rules.BuildRuleParams;
import com.facebook.buck.rules.BuildRuleResolver;
import com.facebook.buck.rules.BuildTargetSourcePath;
import com.facebook.buck.rules.FakeBuildRule;
import com.facebook.buck.rules.FakeBuildRuleParamsBuilder;
import com.facebook.buck.rules.FakeSourcePath;
import com.facebook.buck.rules.PathSourcePath;
import com.facebook.buck.rules.SourcePath;
import com.facebook.buck.rules.SourcePathResolver;
import com.facebook.buck.rules.TargetGraph;
import com.facebook.buck.testutil.AllExistingProjectFilesystem;
import com.facebook.buck.testutil.FakeProjectFilesystem;
import com.google.common.base.Joiner;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSortedSet;
import org.hamcrest.Matchers;
import org.junit.Assume;
import org.junit.Test;
import org.junit.experimental.runners.Enclosed;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import java.nio.file.Paths;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
@SuppressWarnings("PMD.TestClassWithoutTestCases")
@RunWith(Enclosed.class)
public class CxxSourceRuleFactoryTest {
private static final ProjectFilesystem PROJECT_FILESYSTEM = new FakeProjectFilesystem();
private static final CxxPlatform CXX_PLATFORM = DefaultCxxPlatforms.build(
new CxxBuckConfig(FakeBuckConfig.builder().build()));
private static <T> void assertContains(ImmutableList<T> container, Iterable<T> items) {
for (T item : items) {
assertThat(container, Matchers.hasItem(item));
}
}
public static class CxxSourceRuleFactoryTests {
private static FakeBuildRule createFakeBuildRule(
String target,
SourcePathResolver resolver,
BuildRule... deps) {
return new FakeBuildRule(
new FakeBuildRuleParamsBuilder(BuildTargetFactory.newInstance(target))
.setDeclaredDeps(ImmutableSortedSet.copyOf(deps))
.build(),
resolver);
}
@Test
public void createPreprocessBuildRulePropagatesCxxPreprocessorDeps() {
BuildTarget target = BuildTargetFactory.newInstance("//foo:bar");
BuildRuleParams params = new FakeBuildRuleParamsBuilder(target).build();
BuildRuleResolver resolver =
new BuildRuleResolver(TargetGraph.EMPTY, new BuildTargetNodeToBuildRuleTransformer());
SourcePathResolver pathResolver = new SourcePathResolver(resolver);
FakeBuildRule dep = resolver.addToIndex(new FakeBuildRule("//:dep1", pathResolver));
CxxPreprocessorInput cxxPreprocessorInput =
CxxPreprocessorInput.builder()
.addRules(dep.getBuildTarget())
.build();
CxxSourceRuleFactory cxxSourceRuleFactory = CxxSourceRuleFactory.builder()
.setParams(params)
.setResolver(resolver)
.setPathResolver(pathResolver)
.setCxxPlatform(CXX_PLATFORM)
.addCxxPreprocessorInput(cxxPreprocessorInput)
.setPicType(CxxSourceRuleFactory.PicType.PDC)
.build();
String name = "foo/bar.cpp";
SourcePath input = new PathSourcePath(PROJECT_FILESYSTEM, target.getBasePath().resolve(name));
CxxSource cxxSource = CxxSource.of(
CxxSource.Type.CXX,
input,
ImmutableList.<String>of());
BuildRule cxxPreprocess =
cxxSourceRuleFactory.requirePreprocessBuildRule(
name,
cxxSource);
assertEquals(ImmutableSortedSet.<BuildRule>of(dep), cxxPreprocess.getDeps());
cxxPreprocess =
cxxSourceRuleFactory.requirePreprocessAndCompileBuildRule(
name,
cxxSource,
CxxPreprocessMode.SEPARATE);
assertEquals(ImmutableSortedSet.<BuildRule>of(dep), cxxPreprocess.getDeps());
}
@Test
public void preprocessFlagsFromPlatformArePropagated() {
BuildTarget target = BuildTargetFactory.newInstance("//foo:bar");
BuildRuleParams params = new FakeBuildRuleParamsBuilder(target).build();
BuildRuleResolver resolver =
new BuildRuleResolver(TargetGraph.EMPTY, new BuildTargetNodeToBuildRuleTransformer());
SourcePathResolver pathResolver = new SourcePathResolver(resolver);
ImmutableList<String> platformFlags = ImmutableList.of("-some", "-flags");
CxxPlatform platform = DefaultCxxPlatforms.build(
new CxxBuckConfig(
FakeBuckConfig.builder().setSections(
ImmutableMap.of(
"cxx", ImmutableMap.of("cxxppflags", Joiner.on(" ").join(platformFlags))))
.build()));
CxxPreprocessorInput cxxPreprocessorInput = CxxPreprocessorInput.EMPTY;
CxxSourceRuleFactory cxxSourceRuleFactory = CxxSourceRuleFactory.builder()
.setParams(params)
.setResolver(resolver)
.setPathResolver(pathResolver)
.setCxxPlatform(platform)
.addCxxPreprocessorInput(cxxPreprocessorInput)
.setPicType(CxxSourceRuleFactory.PicType.PDC)
.build();
String name = "source.cpp";
CxxSource cxxSource = CxxSource.of(
CxxSource.Type.CXX,
new FakeSourcePath(name),
ImmutableList.<String>of());
// Verify that platform flags make it to the compile rule.
CxxPreprocessAndCompile cxxPreprocess =
cxxSourceRuleFactory.requirePreprocessBuildRule(
name,
cxxSource);
assertNotEquals(
-1,
Collections.indexOfSubList(
cxxPreprocess.getPreprocessorDelegate().get().getCommand(CxxToolFlags.of()),
platformFlags));
CxxPreprocessAndCompile cxxPreprocessAndCompile =
cxxSourceRuleFactory.requirePreprocessAndCompileBuildRule(
name,
cxxSource,
CxxPreprocessMode.SEPARATE);
assertNotEquals(
-1,
Collections.indexOfSubList(
cxxPreprocessAndCompile.getPreprocessorDelegate().get().getCommand(CxxToolFlags.of()),
platformFlags));
}
@Test
public void createCompileBuildRulePropagatesBuildRuleSourcePathDeps() {
BuildTarget target = BuildTargetFactory.newInstance("//foo:bar");
BuildRuleParams params = new FakeBuildRuleParamsBuilder(target).build();
BuildRuleResolver resolver =
new BuildRuleResolver(TargetGraph.EMPTY, new BuildTargetNodeToBuildRuleTransformer());
FakeBuildRule dep = createFakeBuildRule("//:test", new SourcePathResolver(resolver));
resolver.addToIndex(dep);
SourcePath input = new BuildTargetSourcePath(dep.getBuildTarget());
CxxSourceRuleFactory cxxSourceRuleFactory = CxxSourceRuleFactory.builder()
.setParams(params)
.setResolver(resolver)
.setPathResolver(new SourcePathResolver(resolver))
.setCxxPlatform(CXX_PLATFORM)
.setPicType(CxxSourceRuleFactory.PicType.PDC)
.build();
String nameCompile = "foo/bar.ii";
CxxSource cxxSourceCompile = CxxSource.of(
CxxSource.Type.CXX_CPP_OUTPUT,
input,
ImmutableList.<String>of());
CxxPreprocessAndCompile cxxCompile =
cxxSourceRuleFactory.requireCompileBuildRule(
nameCompile,
cxxSourceCompile);
assertEquals(ImmutableSortedSet.<BuildRule>of(dep), cxxCompile.getDeps());
String namePreprocessAndCompile = "foo/bar.cpp";
CxxSource cxxSourcePreprocessAndCompile = CxxSource.of(
CxxSource.Type.CXX,
input,
ImmutableList.<String>of());
CxxPreprocessAndCompile cxxPreprocessAndCompile =
cxxSourceRuleFactory.requirePreprocessAndCompileBuildRule(
namePreprocessAndCompile,
cxxSourcePreprocessAndCompile,
CxxPreprocessMode.SEPARATE);
assertEquals(ImmutableSortedSet.<BuildRule>of(dep), cxxPreprocessAndCompile.getDeps());
}
@Test
@SuppressWarnings("PMD.UseAssertTrueInsteadOfAssertEquals")
public void createCompileBuildRulePicOption() {
BuildTarget target = BuildTargetFactory.newInstance("//foo:bar");
BuildRuleParams params = new FakeBuildRuleParamsBuilder(target).build();
BuildRuleResolver resolver =
new BuildRuleResolver(TargetGraph.EMPTY, new BuildTargetNodeToBuildRuleTransformer());
CxxSourceRuleFactory.Builder cxxSourceRuleFactoryBuilder = CxxSourceRuleFactory.builder()
.setParams(params)
.setResolver(resolver)
.setPathResolver(new SourcePathResolver(resolver))
.setCxxPlatform(CXX_PLATFORM);
CxxSourceRuleFactory cxxSourceRuleFactoryPDC = cxxSourceRuleFactoryBuilder
.setPicType(CxxSourceRuleFactory.PicType.PDC)
.build();
CxxSourceRuleFactory cxxSourceRuleFactoryPIC = cxxSourceRuleFactoryBuilder
.setPicType(CxxSourceRuleFactory.PicType.PIC)
.build();
String name = "foo/bar.ii";
CxxSource cxxSource = CxxSource.of(
CxxSource.Type.CXX_CPP_OUTPUT,
new FakeSourcePath(name),
ImmutableList.<String>of());
// Verify building a non-PIC compile rule does *not* have the "-fPIC" flag and has the
// expected compile target.
CxxPreprocessAndCompile noPicCompile =
cxxSourceRuleFactoryPDC.requireCompileBuildRule(name, cxxSource);
assertFalse(noPicCompile.makeMainStep().getCommand().contains("-fPIC"));
assertEquals(
cxxSourceRuleFactoryPDC.createCompileBuildTarget(name),
noPicCompile.getBuildTarget());
// Verify building a PIC compile rule *does* have the "-fPIC" flag and has the
// expected compile target.
CxxPreprocessAndCompile picCompile =
cxxSourceRuleFactoryPIC.requireCompileBuildRule(name, cxxSource);
assertTrue(picCompile.makeMainStep().getCommand().contains("-fPIC"));
assertEquals(
cxxSourceRuleFactoryPIC.createCompileBuildTarget(name),
picCompile.getBuildTarget());
name = "foo/bar.cpp";
cxxSource = CxxSource.of(
CxxSource.Type.CXX,
new FakeSourcePath(name),
ImmutableList.<String>of());
// Verify building a non-PIC compile rule does *not* have the "-fPIC" flag and has the
// expected compile target.
CxxPreprocessAndCompile noPicPreprocessAndCompile =
cxxSourceRuleFactoryPDC.requirePreprocessAndCompileBuildRule(
name,
cxxSource,
CxxPreprocessMode.SEPARATE);
assertFalse(noPicPreprocessAndCompile.makeMainStep().getCommand().contains("-fPIC"));
assertEquals(
cxxSourceRuleFactoryPDC.createCompileBuildTarget(name),
noPicPreprocessAndCompile.getBuildTarget());
// Verify building a PIC compile rule *does* have the "-fPIC" flag and has the
// expected compile target.
CxxPreprocessAndCompile picPreprocessAndCompile =
cxxSourceRuleFactoryPIC.requirePreprocessAndCompileBuildRule(
name,
cxxSource,
CxxPreprocessMode.SEPARATE);
assertTrue(picPreprocessAndCompile.makeMainStep().getCommand().contains("-fPIC"));
assertEquals(
cxxSourceRuleFactoryPIC.createCompileBuildTarget(name),
picPreprocessAndCompile.getBuildTarget());
}
@Test
public void checkPrefixHeaderIsIncluded() {
BuildRuleResolver buildRuleResolver =
new BuildRuleResolver(TargetGraph.EMPTY, new BuildTargetNodeToBuildRuleTransformer());
BuildTarget target = BuildTargetFactory.newInstance("//:target");
BuildRuleParams params = new FakeBuildRuleParamsBuilder(target).build();
ProjectFilesystem filesystem = new AllExistingProjectFilesystem();
BuckConfig buckConfig = FakeBuckConfig.builder().setFilesystem(filesystem).build();
CxxPlatform platform = DefaultCxxPlatforms.build(new CxxBuckConfig(buckConfig));
String prefixHeaderName = "test.pch";
SourcePath prefixHeaderSourcePath = new FakeSourcePath(filesystem, prefixHeaderName);
CxxSourceRuleFactory cxxSourceRuleFactory = CxxSourceRuleFactory.builder()
.setParams(params)
.setResolver(buildRuleResolver)
.setPathResolver(new SourcePathResolver(buildRuleResolver))
.setCxxPlatform(platform)
.setPrefixHeader(prefixHeaderSourcePath)
.setPicType(CxxSourceRuleFactory.PicType.PDC)
.build();
String objcSourceName = "test.m";
CxxSource objcSource = CxxSource.of(
CxxSource.Type.OBJC,
new FakeSourcePath(objcSourceName),
ImmutableList.<String>of());
CxxPreprocessAndCompile objcPreprocessAndCompile =
cxxSourceRuleFactory.requirePreprocessAndCompileBuildRule(
objcSourceName,
objcSource,
CxxPreprocessMode.SEPARATE);
ImmutableList<String> explicitPrefixHeaderRelatedFlags = ImmutableList.of(
"-include", filesystem.resolve(prefixHeaderName).toString());
CxxPreprocessAndCompileStep step = objcPreprocessAndCompile.makeMainStep();
assertContains(step.getCommand(), explicitPrefixHeaderRelatedFlags);
}
@Test
public void duplicateRuleFetchedFromResolver() {
BuildRuleResolver buildRuleResolver =
new BuildRuleResolver(TargetGraph.EMPTY, new BuildTargetNodeToBuildRuleTransformer());
BuildTarget target = BuildTargetFactory.newInstance("//:target");
BuildRuleParams params = new FakeBuildRuleParamsBuilder(target).build();
ProjectFilesystem filesystem = new AllExistingProjectFilesystem();
BuckConfig buckConfig = FakeBuckConfig.builder().setFilesystem(filesystem).build();
CxxPlatform platform = DefaultCxxPlatforms.build(new CxxBuckConfig(buckConfig));
CxxSourceRuleFactory cxxSourceRuleFactory = CxxSourceRuleFactory.builder()
.setParams(params)
.setResolver(buildRuleResolver)
.setPathResolver(new SourcePathResolver(buildRuleResolver))
.setCxxPlatform(platform)
.setPicType(CxxSourceRuleFactory.PicType.PDC)
.build();
String objcSourceName = "test.m";
CxxSource objcSource = CxxSource.of(
CxxSource.Type.OBJC,
new FakeSourcePath(objcSourceName),
ImmutableList.<String>of());
CxxPreprocessAndCompile objcCompile =
cxxSourceRuleFactory.requirePreprocessAndCompileBuildRule(
objcSourceName,
objcSource,
CxxPreprocessMode.SEPARATE);
// Make sure we can get the same build rule twice.
CxxPreprocessAndCompile objcCompile2 =
cxxSourceRuleFactory.requirePreprocessAndCompileBuildRule(
objcSourceName,
objcSource,
CxxPreprocessMode.SEPARATE);
assertEquals(objcCompile.getBuildTarget(), objcCompile2.getBuildTarget());
}
}
@RunWith(Parameterized.class)
public static class CorrectFlagsAreUsedForCompileAndPreprocessBuildRules {
private static final SourcePath as = new FakeSourcePath("as");
private static final SourcePath cc = new FakeSourcePath("cc");
private static final SourcePath cpp = new FakeSourcePath("cpp");
private static final SourcePath cxx = new FakeSourcePath("cxx");
private static final SourcePath cxxpp = new FakeSourcePath("cxxpp");
private static final ImmutableList<String> asflags = ImmutableList.of("-asflag", "-asflag");
private static final ImmutableList<String> cflags = ImmutableList.of("-cflag", "-cflag");
private static final ImmutableList<String> cxxflags = ImmutableList.of("-cxxflag", "-cxxflag");
private static final ImmutableList<String> asppflags =
ImmutableList.of("-asppflag", "-asppflag");
private static final ImmutableList<String> cppflags = ImmutableList.of("-cppflag", "-cppflag");
private static final ImmutableList<String> cxxppflags =
ImmutableList.of("-cxxppflag", "-cxxppflag");
private static final ImmutableList<String> explicitCompilerFlags =
ImmutableList.of("-explicit-compilerflag");
private static final ImmutableList<String> explicitCppflags =
ImmutableList.of("-explicit-cppflag");
private static final ImmutableList<String> explicitCxxppflags =
ImmutableList.of("-explicit-cxxppflag");
private static final ImmutableList<String> empty = ImmutableList.of();
@Parameterized.Parameters(name = "{0}")
public static Collection<Object[]> data() {
return Arrays.asList(new Object[][]{
{"test.i", cflags, explicitCompilerFlags, empty, empty},
{"test.c", cflags, explicitCompilerFlags, cppflags, explicitCppflags},
{"test.ii", cxxflags, explicitCompilerFlags, empty, empty},
{"test.cpp", cxxflags, explicitCompilerFlags, cxxppflags, explicitCxxppflags},
// asm do not have compiler specific flags, nor (non-as) file type specific flags.
{"test.s", empty, empty, empty, empty},
{"test.S", empty, empty, asppflags, explicitCppflags},
// ObjC
{"test.mi", cflags, explicitCompilerFlags, empty, empty},
{"test.m", cflags, explicitCompilerFlags, cppflags, explicitCppflags},
{"test.mii", cxxflags, explicitCompilerFlags, empty, empty},
{"test.mm", cxxflags, explicitCompilerFlags, cxxppflags, explicitCxxppflags},
});
}
@Parameterized.Parameter(0)
public String sourceName;
@Parameterized.Parameter(1)
public ImmutableList<String> expectedTypeSpecificFlags;
@Parameterized.Parameter(2)
public ImmutableList<String> expectedCompilerFlags;
@Parameterized.Parameter(3)
public ImmutableList<String> expectedTypeSpecificPreprocessorFlags;
@Parameterized.Parameter(4)
public ImmutableList<String> expectedPreprocessorFlags;
// Some common boilerplate.
private BuildRuleResolver buildRuleResolver =
new BuildRuleResolver(TargetGraph.EMPTY, new BuildTargetNodeToBuildRuleTransformer());
private SourcePathResolver sourcePathResolver = new SourcePathResolver(buildRuleResolver);
private BuildTarget target = BuildTargetFactory.newInstance("//:target");
private BuildRuleParams params = new FakeBuildRuleParamsBuilder(target).build();
private Joiner space = Joiner.on(" ");
@Test
public void forPreprocess() {
CxxSource.Type sourceType =
CxxSource.Type.fromExtension(MorePaths.getFileExtension(Paths.get(sourceName))).get();
Assume.assumeTrue(sourceType.isPreprocessable());
CxxPreprocessorInput cxxPreprocessorInput =
CxxPreprocessorInput.builder()
.putAllPreprocessorFlags(CxxSource.Type.C, explicitCppflags)
.putAllPreprocessorFlags(CxxSource.Type.OBJC, explicitCppflags)
.putAllPreprocessorFlags(CxxSource.Type.ASSEMBLER_WITH_CPP, explicitCppflags)
.putAllPreprocessorFlags(CxxSource.Type.CXX, explicitCxxppflags)
.putAllPreprocessorFlags(CxxSource.Type.OBJCXX, explicitCxxppflags)
.build();
BuckConfig buckConfig = FakeBuckConfig.builder()
.setSections(
ImmutableMap.of(
"cxx", ImmutableMap.<String, String>builder()
.put("asppflags", space.join(asppflags))
.put("cpp", sourcePathResolver.deprecatedGetPath(cpp).toString())
.put("cppflags", space.join(cppflags))
.put("cxxpp", sourcePathResolver.deprecatedGetPath(cxxpp).toString())
.put("cxxppflags", space.join(cxxppflags))
.build()))
.setFilesystem(PROJECT_FILESYSTEM)
.build();
CxxPlatform platform = DefaultCxxPlatforms.build(new CxxBuckConfig(buckConfig));
CxxSourceRuleFactory cxxSourceRuleFactory = CxxSourceRuleFactory.builder()
.setParams(params)
.setResolver(buildRuleResolver)
.setPathResolver(sourcePathResolver)
.setCxxPlatform(platform)
.addCxxPreprocessorInput(cxxPreprocessorInput)
.setPicType(CxxSourceRuleFactory.PicType.PDC)
.build();
List<String> perFileFlags = ImmutableList.of("-per-file-flag", "-and-another-per-file-flag");
CxxSource cSource = CxxSource.of(sourceType, new FakeSourcePath(sourceName), perFileFlags);
CxxPreprocessAndCompile cPreprocess =
cxxSourceRuleFactory.requirePreprocessBuildRule(sourceName, cSource);
ImmutableList<String> cPreprocessCommand =
cPreprocess.getPreprocessorDelegate().get().getCommand(CxxToolFlags.of());
assertContains(cPreprocessCommand, expectedTypeSpecificPreprocessorFlags);
assertContains(cPreprocessCommand, expectedPreprocessorFlags);
assertContains(cPreprocessCommand, perFileFlags);
}
@Test
public void forCompile() {
CxxSource.Type sourceType =
CxxSource.Type.fromExtension(MorePaths.getFileExtension(Paths.get(sourceName))).get();
BuckConfig buckConfig = FakeBuckConfig.builder()
.setSections(
ImmutableMap.of(
"cxx", ImmutableMap.<String, String>builder()
.put("as", sourcePathResolver.deprecatedGetPath(as).toString())
.put("asflags", space.join(asflags))
.put("cc", sourcePathResolver.deprecatedGetPath(cc).toString())
.put("cflags", space.join(cflags))
.put("cxx", sourcePathResolver.deprecatedGetPath(cxx).toString())
.put("cxxflags", space.join(cxxflags))
.build()))
.setFilesystem(PROJECT_FILESYSTEM)
.build();
CxxPlatform platform = DefaultCxxPlatforms.build(
new CxxBuckConfig(buckConfig));
CxxSourceRuleFactory cxxSourceRuleFactory = CxxSourceRuleFactory.builder()
.setParams(params)
.setResolver(buildRuleResolver)
.setPathResolver(sourcePathResolver)
.setCxxPlatform(platform)
.setCompilerFlags(explicitCompilerFlags)
.setPicType(CxxSourceRuleFactory.PicType.PDC)
.build();
List<String> perFileFlags = ImmutableList.of("-per-file-flag");
CxxSource source = CxxSource.of(sourceType, new FakeSourcePath(sourceName), perFileFlags);
CxxPreprocessAndCompile rule;
if (source.getType().isPreprocessable()) {
rule = cxxSourceRuleFactory.requirePreprocessAndCompileBuildRule(
sourceName,
source,
CxxPreprocessMode.SEPARATE);
} else {
rule = cxxSourceRuleFactory.requireCompileBuildRule(sourceName, source);
}
ImmutableList<String> command = rule.makeMainStep().getCommand();
assertContains(command, expectedCompilerFlags);
assertContains(command, expectedTypeSpecificFlags);
assertContains(command, asflags);
assertContains(command, perFileFlags);
}
}
@RunWith(Parameterized.class)
public static class LanguageFlagsArePassed {
@Parameterized.Parameters(name = "{0} -> {1}")
public static Collection<Object[]> data() {
return Arrays.asList(new Object[][]{
{"foo/bar.ii", "c++-cpp-output"},
{"foo/bar.mi", "objective-c-cpp-output"},
{"foo/bar.mii", "objective-c++-cpp-output"},
{"foo/bar.i", "cpp-output"},
});
}
@Parameterized.Parameter(0)
public String name;
@Parameterized.Parameter(1)
public String expected;
@Test
public void test() {
BuildRuleResolver buildRuleResolver =
new BuildRuleResolver(TargetGraph.EMPTY, new BuildTargetNodeToBuildRuleTransformer());
SourcePathResolver sourcePathResolver = new SourcePathResolver(buildRuleResolver);
BuildTarget target = BuildTargetFactory.newInstance("//:target");
BuildRuleParams params = new FakeBuildRuleParamsBuilder(target).build();
CxxSourceRuleFactory cxxSourceRuleFactory = CxxSourceRuleFactory.builder()
.setParams(params)
.setResolver(buildRuleResolver)
.setPathResolver(sourcePathResolver)
.setCxxPlatform(CXX_PLATFORM)
.setPicType(CxxSourceRuleFactory.PicType.PDC)
.build();
SourcePath input = new PathSourcePath(PROJECT_FILESYSTEM, target.getBasePath().resolve(name));
CxxSource cxxSource = CxxSource.of(
CxxSource.Type.fromExtension(MorePaths.getFileExtension(Paths.get(name))).get(),
input,
ImmutableList.<String>of());
CxxPreprocessAndCompile cxxCompile =
cxxSourceRuleFactory.createCompileBuildRule(name, cxxSource);
assertThat(cxxCompile.makeMainStep().getCommand(), Matchers.hasItems("-x", expected));
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.activemq.artemis.tests.integration.mqtt.imported;
import javax.jms.ConnectionFactory;
import javax.net.ssl.KeyManager;
import javax.net.ssl.SSLContext;
import javax.net.ssl.TrustManager;
import javax.net.ssl.X509TrustManager;
import java.io.File;
import java.io.IOException;
import java.security.ProtectionDomain;
import java.security.SecureRandom;
import java.security.cert.CertificateException;
import java.security.cert.X509Certificate;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import io.netty.handler.codec.mqtt.MqttMessage;
import io.netty.handler.codec.mqtt.MqttPublishMessage;
import org.apache.activemq.artemis.api.core.ActiveMQException;
import org.apache.activemq.artemis.api.core.SimpleString;
import org.apache.activemq.artemis.api.core.TransportConfiguration;
import org.apache.activemq.artemis.core.config.Configuration;
import org.apache.activemq.artemis.core.protocol.mqtt.MQTTInterceptor;
import org.apache.activemq.artemis.core.protocol.mqtt.MQTTProtocolManager;
import org.apache.activemq.artemis.core.protocol.mqtt.MQTTSessionState;
import org.apache.activemq.artemis.core.remoting.impl.AbstractAcceptor;
import org.apache.activemq.artemis.core.remoting.impl.netty.TransportConstants;
import org.apache.activemq.artemis.core.security.Role;
import org.apache.activemq.artemis.core.server.ActiveMQServer;
import org.apache.activemq.artemis.core.settings.HierarchicalRepository;
import org.apache.activemq.artemis.core.settings.impl.AddressSettings;
import org.apache.activemq.artemis.jms.client.ActiveMQConnectionFactory;
import org.apache.activemq.artemis.spi.core.protocol.ProtocolManager;
import org.apache.activemq.artemis.spi.core.protocol.RemotingConnection;
import org.apache.activemq.artemis.spi.core.remoting.Acceptor;
import org.apache.activemq.artemis.spi.core.security.ActiveMQJAASSecurityManager;
import org.apache.activemq.artemis.tests.util.ActiveMQTestBase;
import org.fusesource.mqtt.client.MQTT;
import org.fusesource.mqtt.client.Tracer;
import org.fusesource.mqtt.codec.MQTTFrame;
import org.junit.After;
import org.junit.Before;
import org.junit.Rule;
import org.junit.rules.TestName;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static java.util.Collections.singletonList;
public class MQTTTestSupport extends ActiveMQTestBase {
protected ActiveMQServer server;
private static final Logger LOG = LoggerFactory.getLogger(MQTTTestSupport.class);
protected int port = 1883;
protected ConnectionFactory cf;
protected LinkedList<Throwable> exceptions = new LinkedList<>();
protected boolean persistent;
protected String protocolConfig;
protected String protocolScheme;
protected boolean useSSL;
protected static final int NUM_MESSAGES = 250;
public static final int AT_MOST_ONCE = 0;
public static final int AT_LEAST_ONCE = 1;
public static final int EXACTLY_ONCE = 2;
protected String noprivUser = "noprivs";
protected String noprivPass = "noprivs";
protected String browseUser = "browser";
protected String browsePass = "browser";
protected String guestUser = "guest";
protected String guestPass = "guest";
protected String fullUser = "user";
protected String fullPass = "pass";
@Rule
public TestName name = new TestName();
public MQTTTestSupport() {
this.protocolScheme = "mqtt";
this.useSSL = false;
}
public File basedir() throws IOException {
ProtectionDomain protectionDomain = getClass().getProtectionDomain();
return new File(new File(protectionDomain.getCodeSource().getLocation().getPath()), "../..").getCanonicalFile();
}
@Override
public String getName() {
return name.getMethodName();
}
public ActiveMQServer getServer() {
return server;
}
@Override
@Before
public void setUp() throws Exception {
String basedir = basedir().getPath();
System.setProperty("javax.net.ssl.trustStore", basedir + "/src/test/resources/client.keystore");
System.setProperty("javax.net.ssl.trustStorePassword", "password");
System.setProperty("javax.net.ssl.trustStoreType", "jks");
System.setProperty("javax.net.ssl.keyStore", basedir + "/src/test/resources/server.keystore");
System.setProperty("javax.net.ssl.keyStorePassword", "password");
System.setProperty("javax.net.ssl.keyStoreType", "jks");
exceptions.clear();
startBroker();
createJMSConnection();
}
@Override
@After
public void tearDown() throws Exception {
System.clearProperty("javax.net.ssl.trustStore");
System.clearProperty("javax.net.ssl.trustStorePassword");
System.clearProperty("javax.net.ssl.trustStoreType");
System.clearProperty("javax.net.ssl.keyStore");
System.clearProperty("javax.net.ssl.keyStorePassword");
System.clearProperty("javax.net.ssl.keyStoreType");
stopBroker();
super.tearDown();
}
public void configureBroker() throws Exception {
// TODO Add SSL
super.setUp();
server = createServerForMQTT();
addCoreConnector();
addMQTTConnector();
AddressSettings addressSettings = new AddressSettings();
addressSettings.setMaxSizeBytes(999999999);
addressSettings.setAutoCreateQueues(true);
addressSettings.setAutoCreateAddresses(true);
configureBrokerSecurity(server);
server.getAddressSettingsRepository().addMatch("#", addressSettings);
}
/**
* Copied from org.apache.activemq.artemis.tests.integration.amqp.AmqpClientTestSupport#configureBrokerSecurity()
*/
protected void configureBrokerSecurity(ActiveMQServer server) {
if (isSecurityEnabled()) {
ActiveMQJAASSecurityManager securityManager = (ActiveMQJAASSecurityManager) server.getSecurityManager();
// User additions
securityManager.getConfiguration().addUser(noprivUser, noprivPass);
securityManager.getConfiguration().addRole(noprivUser, "nothing");
securityManager.getConfiguration().addUser(browseUser, browsePass);
securityManager.getConfiguration().addRole(browseUser, "browser");
securityManager.getConfiguration().addUser(guestUser, guestPass);
securityManager.getConfiguration().addRole(guestUser, "guest");
securityManager.getConfiguration().addUser(fullUser, fullPass);
securityManager.getConfiguration().addRole(fullUser, "full");
// Configure roles
HierarchicalRepository<Set<Role>> securityRepository = server.getSecurityRepository();
HashSet<Role> value = new HashSet<>();
value.add(new Role("nothing", false, false, false, false, false, false, false, false, false, false));
value.add(new Role("browser", false, false, false, false, false, false, false, true, false, false));
value.add(new Role("guest", false, true, false, false, false, false, false, true, false, false));
value.add(new Role("full", true, true, true, true, true, true, true, true, true, true));
securityRepository.addMatch(getQueueName(), value);
server.getConfiguration().setSecurityEnabled(true);
} else {
server.getConfiguration().setSecurityEnabled(false);
}
}
public void startBroker() throws Exception {
configureBroker();
server.start();
server.waitForActivation(10, TimeUnit.SECONDS);
}
public void createJMSConnection() throws Exception {
cf = new ActiveMQConnectionFactory(false, new TransportConfiguration(ActiveMQTestBase.NETTY_CONNECTOR_FACTORY));
}
private ActiveMQServer createServerForMQTT() throws Exception {
Configuration defaultConfig = createDefaultConfig(true).setIncomingInterceptorClassNames(singletonList(MQTTIncomingInterceptor.class.getName())).setOutgoingInterceptorClassNames(singletonList(MQTTOutoingInterceptor.class.getName()));
AddressSettings addressSettings = new AddressSettings();
addressSettings.setDeadLetterAddress(SimpleString.toSimpleString("DLA"));
addressSettings.setExpiryAddress(SimpleString.toSimpleString("EXPIRY"));
defaultConfig.getAddressesSettings().put("#", addressSettings);
return createServer(true, defaultConfig);
}
protected void addCoreConnector() throws Exception {
// Overrides of this method can add additional configuration options or add multiple
// MQTT transport connectors as needed, the port variable is always supposed to be
// assigned the primary MQTT connector's port.
Map<String, Object> params = new HashMap<>();
params.put(TransportConstants.PORT_PROP_NAME, "" + 5445);
params.put(TransportConstants.PROTOCOLS_PROP_NAME, "CORE");
TransportConfiguration transportConfiguration = new TransportConfiguration(NETTY_ACCEPTOR_FACTORY, params);
server.getConfiguration().getAcceptorConfigurations().add(transportConfiguration);
LOG.info("Added connector {} to broker", getProtocolScheme());
}
protected void addMQTTConnector() throws Exception {
// Overrides of this method can add additional configuration options or add multiple
// MQTT transport connectors as needed, the port variable is always supposed to be
// assigned the primary MQTT connector's port.
Map<String, Object> params = new HashMap<>();
params.put(TransportConstants.PORT_PROP_NAME, "" + port);
params.put(TransportConstants.PROTOCOLS_PROP_NAME, "MQTT");
server.getConfiguration().addAcceptorConfiguration("MQTT", "tcp://localhost:" + port + "?protocols=MQTT;anycastPrefix=anycast:;multicastPrefix=multicast:");
LOG.info("Added connector {} to broker", getProtocolScheme());
}
public void stopBroker() throws Exception {
if (server.isStarted()) {
server.stop();
server = null;
}
}
protected String getQueueName() {
return getClass().getName() + "." + name.getMethodName();
}
protected String getTopicName() {
return getClass().getName() + "." + name.getMethodName();
}
/**
* Initialize an MQTTClientProvider instance. By default this method uses the port that's
* assigned to be the TCP based port using the base version of addMQTTConnector. A subclass
* can either change the value of port or override this method to assign the correct port.
*
* @param provider the MQTTClientProvider instance to initialize.
* @throws Exception if an error occurs during initialization.
*/
protected void initializeConnection(MQTTClientProvider provider) throws Exception {
if (!isUseSSL()) {
provider.connect("tcp://localhost:" + port);
} else {
SSLContext ctx = SSLContext.getInstance("TLS");
ctx.init(new KeyManager[0], new TrustManager[]{new DefaultTrustManager()}, new SecureRandom());
provider.setSslContext(ctx);
provider.connect("ssl://localhost:" + port);
}
}
public String getProtocolScheme() {
return protocolScheme;
}
public void setProtocolScheme(String scheme) {
this.protocolScheme = scheme;
}
public boolean isUseSSL() {
return this.useSSL;
}
public void setUseSSL(boolean useSSL) {
this.useSSL = useSSL;
}
public boolean isPersistent() {
return persistent;
}
public int getPort() {
return this.port;
}
public boolean isSchedulerSupportEnabled() {
return false;
}
public boolean isSecurityEnabled() {
return false;
}
protected interface Task {
void run() throws Exception;
}
protected void within(int time, TimeUnit unit, Task task) throws InterruptedException {
long timeMS = unit.toMillis(time);
long deadline = System.currentTimeMillis() + timeMS;
while (true) {
try {
task.run();
return;
} catch (Throwable e) {
long remaining = deadline - System.currentTimeMillis();
if (remaining <= 0) {
if (e instanceof RuntimeException) {
throw (RuntimeException) e;
}
if (e instanceof Error) {
throw (Error) e;
}
throw new RuntimeException(e);
}
Thread.sleep(Math.min(timeMS / 10, remaining));
}
}
}
protected MQTTClientProvider getMQTTClientProvider() {
return new FuseMQTTClientProvider();
}
protected MQTT createMQTTConnection() throws Exception {
MQTT client = createMQTTConnection(null, false);
client.setVersion("3.1.1");
return client;
}
protected MQTT createMQTTConnection(String clientId, boolean clean) throws Exception {
if (isUseSSL()) {
return createMQTTSslConnection(clientId, clean);
} else {
return createMQTTTcpConnection(clientId, clean);
}
}
private MQTT createMQTTTcpConnection(String clientId, boolean clean) throws Exception {
MQTT mqtt = new MQTT();
mqtt.setConnectAttemptsMax(1);
mqtt.setReconnectAttemptsMax(0);
mqtt.setTracer(createTracer());
mqtt.setVersion("3.1.1");
if (clientId != null) {
mqtt.setClientId(clientId);
}
mqtt.setCleanSession(clean);
mqtt.setHost("localhost", port);
return mqtt;
}
public Map<String, MQTTSessionState> getSessions() {
Acceptor acceptor = server.getRemotingService().getAcceptor("MQTT");
if (acceptor instanceof AbstractAcceptor) {
ProtocolManager protocolManager = ((AbstractAcceptor) acceptor).getProtocolMap().get("MQTT");
if (protocolManager instanceof MQTTProtocolManager) {
return ((MQTTProtocolManager) protocolManager).getSessionStates();
}
}
return Collections.emptyMap();
}
private MQTT createMQTTSslConnection(String clientId, boolean clean) throws Exception {
MQTT mqtt = new MQTT();
mqtt.setConnectAttemptsMax(1);
mqtt.setReconnectAttemptsMax(0);
mqtt.setTracer(createTracer());
mqtt.setHost("ssl://localhost:" + port);
if (clientId != null) {
mqtt.setClientId(clientId);
}
mqtt.setCleanSession(clean);
SSLContext ctx = SSLContext.getInstance("TLS");
ctx.init(new KeyManager[0], new TrustManager[]{new DefaultTrustManager()}, new SecureRandom());
mqtt.setSslContext(ctx);
return mqtt;
}
protected Tracer createTracer() {
return new Tracer() {
@Override
public void onReceive(MQTTFrame frame) {
LOG.info("Client Received:\n" + frame);
}
@Override
public void onSend(MQTTFrame frame) {
LOG.info("Client Sent:\n" + frame);
}
@Override
public void debug(String message, Object... args) {
LOG.info(String.format(message, args));
}
};
}
static class DefaultTrustManager implements X509TrustManager {
@Override
public void checkClientTrusted(X509Certificate[] x509Certificates, String s) throws CertificateException {
}
@Override
public void checkServerTrusted(X509Certificate[] x509Certificates, String s) throws CertificateException {
}
@Override
public X509Certificate[] getAcceptedIssuers() {
return new X509Certificate[0];
}
}
public static class MQTTIncomingInterceptor implements MQTTInterceptor {
private static int messageCount = 0;
@Override
public boolean intercept(MqttMessage packet, RemotingConnection connection) throws ActiveMQException {
if (packet.getClass() == MqttPublishMessage.class) {
messageCount++;
}
return true;
}
public static void clear() {
messageCount = 0;
}
public static int getMessageCount() {
return messageCount;
}
}
public static class MQTTOutoingInterceptor implements MQTTInterceptor {
private static int messageCount = 0;
@Override
public boolean intercept(MqttMessage packet, RemotingConnection connection) throws ActiveMQException {
if (packet.getClass() == MqttPublishMessage.class) {
messageCount++;
}
return true;
}
public static void clear() {
messageCount = 0;
}
public static int getMessageCount() {
return messageCount;
}
}
}
| |
import edu.princeton.cs.algs4.In;
import edu.princeton.cs.algs4.StdOut;
import edu.princeton.cs.algs4.Queue;
import java.util.Iterator;
import java.lang.IllegalArgumentException;
import java.lang.Math;
public class Board
{
private final int[][] board;
private long hash = 0;
private int blankI;
private int blankJ;
private int dimension;
private int manhattan = -1;
private void rollHash()
{
int sq = this.dimension * this.dimension;
for (int i = 0; i < this.dimension; i++)
{
for (int j = 0; j < this.dimension; j++)
{
this.hash = this.board[i][j] + (sq * this.hash);
}
}
}
// construct a board from an n-by-n array of blocks
// (where blocks[i][j] = block in row i, column j)
public Board(int[][] blocks)
{
if (blocks == null)
{
throw new IllegalArgumentException();
}
this.dimension = blocks.length;
this.board = new int[this.dimension][this.dimension];
for (int i = 0; i < this.dimension; i++)
{
for (int j = 0; j < this.dimension; j++)
{
this.board[i][j] = blocks[i][j];
if (blocks[i][j] == 0)
{
this.blankI = i;
this.blankJ = j;
}
}
}
rollHash();
}
// board dimension n
public int dimension()
{
return this.dimension;
}
// number of blocks out of place
public int hamming()
{
int dist = 0;
for (int i = 0; i < this.dimension; i++)
{
for (int j = 0; j < this.dimension; j++)
{
if (this.board[i][j] != 0 &&
this.board[i][j] != (this.dimension * i) + j + 1
) {
dist++;
}
}
}
return dist;
}
// Get correct I-position of tile a
private int getI(int a)
{
return (int) Math.floor((a - 1) / this.dimension);
}
// Get correct J-position of tile a
private int getJ(int a)
{
return (a - 1) % this.dimension;
}
// sum of Manhattan distances between blocks and goal
public int manhattan()
{
if (this.manhattan < 0)
{
this.manhattan = 0;
for (int i = 0; i < this.dimension; i++)
{
for (int j = 0; j < this.dimension; j++)
{
if (this.board[i][j] != 0)
{
int correct = (this.dimension * i) + j + 1;
if (this.board[i][j] != correct)
{
this.manhattan += Math.abs(getI(this.board[i][j]) - i);
this.manhattan += Math.abs(getJ(this.board[i][j]) - j);
}
}
}
}
}
return this.manhattan;
}
// is this board the goal board?
public boolean isGoal()
{
if (hamming() == 0)
{
return true;
}
else
{
return false;
}
}
// a board that is obtained by exchanging any pair of blocks
// the idea is the fact that a board is solvable if has odd inversion,
// if we swap any tile (except zero tile) left or right, then the
// inversion will changed from odd to even or otherwise.
// One of origin board or its twin must be solvable and the other not.
public Board twin()
{
int[][] b = new int[this.dimension][this.dimension];
int k = 0;
if (this.board[k][0] == 0 || this.board[k][1] == 0)
{ // If 0,0 or 0,1 contains 0, then change swap index to next column
k = 1;
}
for (int i = 0; i < this.dimension; i++)
{
for (int j = 0; j < this.dimension; j++)
{
if (i == k && j == 0)
{
b[i][j] = this.board[k][1];
}
else if (i == k && j == 1)
{
b[i][j] = this.board[k][0];
}
else
{
b[i][j] = this.board[i][j];
}
}
}
return new Board(b);
}
// does this board equal y?
public boolean equals(Object y)
{
if (y == this) return true;
if (y == null) return false;
if (y.getClass() != this.getClass()) return false;
if (y instanceof Board)
{
Board yb = (Board) y;
if (this.dimension != yb.dimension()) return false;
if (this.hash != yb.hash) return false;
}
return true;
}
// Return a neighbor of blank tile given by pair of index i and j
private Board getNeighbor(int x, int y)
{
int [][] neighbor = new int[this.dimension][this.dimension];
for (int i = 0; i < this.dimension; i++)
{
for (int j = 0; j < this.dimension; j++)
{
if (i == this.blankI && j == this.blankJ)
{
neighbor[i][j] = this.board[x][y];
}
else if (i == x && j == y)
{
neighbor[i][j] = this.board[this.blankI][this.blankJ];
}
else
{
neighbor[i][j] = this.board[i][j];
}
}
}
return new Board(neighbor);
}
// all neighboring boards
public Iterable<Board> neighbors()
{
// Move clockwise
Queue<Board> neighbors = new Queue<>();
if (this.blankI != this.dimension - 1)
{ // Move up
neighbors.enqueue(getNeighbor(this.blankI + 1, this.blankJ));
}
if (this.blankJ != this.dimension - 1)
{ // Move left
neighbors.enqueue(getNeighbor(this.blankI, this.blankJ + 1));
}
if (this.blankI != 0)
{ // Move down
neighbors.enqueue(getNeighbor(this.blankI - 1, this.blankJ));
}
if (this.blankJ != 0)
{ // Move right
neighbors.enqueue(getNeighbor(this.blankI, this.blankJ - 1));
}
return neighbors;
}
// string representation of this board
// (in the output format specified below)
public String toString()
{
StringBuilder s = new StringBuilder();
s.append(this.dimension + "\n");
for (int i = 0; i < this.dimension; i++)
{
for (int j = 0; j < this.dimension; j++)
{
s.append(String.format("%2d ", this.board[i][j]));
}
s.append("\n");
}
return s.toString();
}
// unit tests (not graded)
public static void main(String[] args)
{
// create initial board from file
In in = new In(args[0]);
int n = in.readInt();
int[][] blocks = new int[n][n];
for (int i = 0; i < n; i++)
for (int j = 0; j < n; j++)
blocks[i][j] = in.readInt();
Board initial = new Board(blocks);
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.hive.metastore.thrift;
import com.facebook.presto.hive.HiveBucketProperty;
import com.facebook.presto.hive.HiveType;
import com.facebook.presto.hive.metastore.Column;
import com.facebook.presto.hive.metastore.Database;
import com.facebook.presto.hive.metastore.HiveColumnStatistics;
import com.facebook.presto.hive.metastore.HivePrivilegeInfo;
import com.facebook.presto.hive.metastore.Partition;
import com.facebook.presto.hive.metastore.PrincipalPrivileges;
import com.facebook.presto.hive.metastore.PrincipalType;
import com.facebook.presto.hive.metastore.Storage;
import com.facebook.presto.hive.metastore.StorageFormat;
import com.facebook.presto.hive.metastore.Table;
import com.facebook.presto.spi.PrestoException;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import org.apache.hadoop.hive.metastore.api.BinaryColumnStatsData;
import org.apache.hadoop.hive.metastore.api.BooleanColumnStatsData;
import org.apache.hadoop.hive.metastore.api.ColumnStatisticsObj;
import org.apache.hadoop.hive.metastore.api.Date;
import org.apache.hadoop.hive.metastore.api.DateColumnStatsData;
import org.apache.hadoop.hive.metastore.api.Decimal;
import org.apache.hadoop.hive.metastore.api.DecimalColumnStatsData;
import org.apache.hadoop.hive.metastore.api.DoubleColumnStatsData;
import org.apache.hadoop.hive.metastore.api.FieldSchema;
import org.apache.hadoop.hive.metastore.api.LongColumnStatsData;
import org.apache.hadoop.hive.metastore.api.PrincipalPrivilegeSet;
import org.apache.hadoop.hive.metastore.api.PrivilegeGrantInfo;
import org.apache.hadoop.hive.metastore.api.SerDeInfo;
import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
import org.apache.hadoop.hive.metastore.api.StringColumnStatsData;
import javax.annotation.Nullable;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.time.LocalDate;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.OptionalDouble;
import java.util.OptionalLong;
import java.util.Set;
import static com.facebook.presto.hive.HiveErrorCode.HIVE_INVALID_METADATA;
import static com.facebook.presto.hive.metastore.HivePrivilegeInfo.parsePrivilege;
import static com.google.common.base.Strings.emptyToNull;
import static com.google.common.base.Strings.nullToEmpty;
import static java.lang.String.format;
import static java.util.stream.Collectors.toList;
public final class ThriftMetastoreUtil
{
private ThriftMetastoreUtil() {}
public static org.apache.hadoop.hive.metastore.api.Database toMetastoreApiDatabase(Database database)
{
org.apache.hadoop.hive.metastore.api.Database result = new org.apache.hadoop.hive.metastore.api.Database();
result.setName(database.getDatabaseName());
database.getLocation().ifPresent(result::setLocationUri);
result.setOwnerName(database.getOwnerName());
result.setOwnerType(toMetastoreApiPrincipalType(database.getOwnerType()));
database.getComment().ifPresent(result::setDescription);
result.setParameters(database.getParameters());
return result;
}
public static org.apache.hadoop.hive.metastore.api.Table toMetastoreApiTable(Table table, PrincipalPrivileges privileges)
{
org.apache.hadoop.hive.metastore.api.Table result = new org.apache.hadoop.hive.metastore.api.Table();
result.setDbName(table.getDatabaseName());
result.setTableName(table.getTableName());
result.setOwner(table.getOwner());
result.setTableType(table.getTableType());
result.setParameters(table.getParameters());
result.setPartitionKeys(table.getPartitionColumns().stream().map(ThriftMetastoreUtil::toMetastoreApiFieldSchema).collect(toList()));
result.setSd(makeStorageDescriptor(table.getTableName(), table.getDataColumns(), table.getStorage()));
result.setPrivileges(toMetastoreApiPrincipalPrivilegeSet(table.getOwner(), privileges));
result.setViewOriginalText(table.getViewOriginalText().orElse(null));
result.setViewExpandedText(table.getViewExpandedText().orElse(null));
return result;
}
public static PrincipalPrivilegeSet toMetastoreApiPrincipalPrivilegeSet(String grantee, PrincipalPrivileges privileges)
{
ImmutableMap.Builder<String, List<PrivilegeGrantInfo>> userPrivileges = ImmutableMap.builder();
for (Map.Entry<String, Collection<HivePrivilegeInfo>> entry : privileges.getUserPrivileges().asMap().entrySet()) {
userPrivileges.put(entry.getKey(), entry.getValue().stream()
.map(privilegeInfo -> toMetastoreApiPrivilegeGrantInfo(grantee, privilegeInfo))
.collect(toList()));
}
ImmutableMap.Builder<String, List<PrivilegeGrantInfo>> rolePrivileges = ImmutableMap.builder();
for (Map.Entry<String, Collection<HivePrivilegeInfo>> entry : privileges.getRolePrivileges().asMap().entrySet()) {
rolePrivileges.put(entry.getKey(), entry.getValue().stream()
.map(privilegeInfo -> toMetastoreApiPrivilegeGrantInfo(grantee, privilegeInfo))
.collect(toList()));
}
return new PrincipalPrivilegeSet(userPrivileges.build(), ImmutableMap.of(), rolePrivileges.build());
}
public static PrivilegeGrantInfo toMetastoreApiPrivilegeGrantInfo(String grantee, HivePrivilegeInfo privilegeInfo)
{
return new PrivilegeGrantInfo(
privilegeInfo.getHivePrivilege().name().toLowerCase(),
0,
grantee,
org.apache.hadoop.hive.metastore.api.PrincipalType.USER, privilegeInfo.isGrantOption());
}
public static org.apache.hadoop.hive.metastore.api.PrincipalType toMetastoreApiPrincipalType(PrincipalType principalType)
{
switch (principalType) {
case USER:
return org.apache.hadoop.hive.metastore.api.PrincipalType.USER;
case ROLE:
return org.apache.hadoop.hive.metastore.api.PrincipalType.ROLE;
default:
throw new IllegalArgumentException("Unsupported principal type: " + principalType);
}
}
public static org.apache.hadoop.hive.metastore.api.Partition toMetastoreApiPartition(Partition partition)
{
org.apache.hadoop.hive.metastore.api.Partition result = new org.apache.hadoop.hive.metastore.api.Partition();
result.setDbName(partition.getDatabaseName());
result.setTableName(partition.getTableName());
result.setValues(partition.getValues());
result.setSd(makeStorageDescriptor(partition.getTableName(), partition.getColumns(), partition.getStorage()));
result.setParameters(partition.getParameters());
return result;
}
public static Set<HivePrivilegeInfo> toGrants(List<PrivilegeGrantInfo> userGrants)
{
if (userGrants == null) {
return ImmutableSet.of();
}
ImmutableSet.Builder<HivePrivilegeInfo> privileges = ImmutableSet.builder();
for (PrivilegeGrantInfo userGrant : userGrants) {
privileges.addAll(parsePrivilege(userGrant));
}
return privileges.build();
}
public static Database fromMetastoreApiDatabase(org.apache.hadoop.hive.metastore.api.Database database)
{
String ownerName = "PUBLIC";
PrincipalType ownerType = PrincipalType.ROLE;
if (database.getOwnerName() != null) {
ownerName = database.getOwnerName();
ownerType = fromMetastoreApiPrincipalType(database.getOwnerType());
}
Map<String, String> parameters = database.getParameters();
if (parameters == null) {
parameters = ImmutableMap.of();
}
return Database.builder()
.setDatabaseName(database.getName())
.setLocation(Optional.ofNullable(database.getLocationUri()))
.setOwnerName(ownerName)
.setOwnerType(ownerType)
.setComment(Optional.ofNullable(database.getDescription()))
.setParameters(parameters)
.build();
}
public static Table fromMetastoreApiTable(org.apache.hadoop.hive.metastore.api.Table table)
{
StorageDescriptor storageDescriptor = table.getSd();
if (storageDescriptor == null) {
throw new PrestoException(HIVE_INVALID_METADATA, "Table is missing storage descriptor");
}
Table.Builder tableBuilder = Table.builder()
.setDatabaseName(table.getDbName())
.setTableName(table.getTableName())
.setOwner(nullToEmpty(table.getOwner()))
.setTableType(table.getTableType())
.setDataColumns(storageDescriptor.getCols().stream()
.map(ThriftMetastoreUtil::fromMetastoreApiFieldSchema)
.collect(toList()))
.setPartitionColumns(table.getPartitionKeys().stream()
.map(ThriftMetastoreUtil::fromMetastoreApiFieldSchema)
.collect(toList()))
.setParameters(table.getParameters() == null ? ImmutableMap.of() : table.getParameters())
.setViewOriginalText(Optional.ofNullable(emptyToNull(table.getViewOriginalText())))
.setViewExpandedText(Optional.ofNullable(emptyToNull(table.getViewExpandedText())));
fromMetastoreApiStorageDescriptor(storageDescriptor, tableBuilder.getStorageBuilder(), table.getTableName());
return tableBuilder.build();
}
public static Partition fromMetastoreApiPartition(org.apache.hadoop.hive.metastore.api.Partition partition)
{
StorageDescriptor storageDescriptor = partition.getSd();
if (storageDescriptor == null) {
throw new PrestoException(HIVE_INVALID_METADATA, "Partition does not contain a storage descriptor: " + partition);
}
Partition.Builder partitionBuilder = Partition.builder()
.setDatabaseName(partition.getDbName())
.setTableName(partition.getTableName())
.setValues(partition.getValues())
.setColumns(storageDescriptor.getCols().stream()
.map(ThriftMetastoreUtil::fromMetastoreApiFieldSchema)
.collect(toList()))
.setParameters(partition.getParameters());
fromMetastoreApiStorageDescriptor(storageDescriptor, partitionBuilder.getStorageBuilder(), format("%s.%s", partition.getTableName(), partition.getValues()));
return partitionBuilder.build();
}
public static HiveColumnStatistics fromMetastoreApiColumnStatistics(ColumnStatisticsObj columnStatistics)
{
if (columnStatistics.getStatsData().isSetLongStats()) {
LongColumnStatsData longStatsData = columnStatistics.getStatsData().getLongStats();
return new HiveColumnStatistics<>(
longStatsData.isSetLowValue() ? Optional.of(longStatsData.getLowValue()) : Optional.empty(),
longStatsData.isSetHighValue() ? Optional.of(longStatsData.getHighValue()) : Optional.empty(),
OptionalLong.empty(),
OptionalDouble.empty(),
OptionalLong.empty(),
OptionalLong.empty(),
OptionalLong.of(longStatsData.getNumNulls()),
OptionalLong.of(longStatsData.getNumDVs()));
}
else if (columnStatistics.getStatsData().isSetDoubleStats()) {
DoubleColumnStatsData doubleStatsData = columnStatistics.getStatsData().getDoubleStats();
return new HiveColumnStatistics<>(
doubleStatsData.isSetLowValue() ? Optional.of(doubleStatsData.getLowValue()) : Optional.empty(),
doubleStatsData.isSetHighValue() ? Optional.of(doubleStatsData.getHighValue()) : Optional.empty(),
OptionalLong.empty(),
OptionalDouble.empty(),
OptionalLong.empty(),
OptionalLong.empty(),
OptionalLong.of(doubleStatsData.getNumNulls()),
OptionalLong.of(doubleStatsData.getNumDVs()));
}
else if (columnStatistics.getStatsData().isSetDecimalStats()) {
DecimalColumnStatsData decimalStatsData = columnStatistics.getStatsData().getDecimalStats();
return new HiveColumnStatistics<>(
decimalStatsData.isSetLowValue() ? fromMetastoreDecimal(decimalStatsData.getLowValue()) : Optional.empty(),
decimalStatsData.isSetHighValue() ? fromMetastoreDecimal(decimalStatsData.getHighValue()) : Optional.empty(),
OptionalLong.empty(),
OptionalDouble.empty(),
OptionalLong.empty(),
OptionalLong.empty(),
OptionalLong.of(decimalStatsData.getNumNulls()),
OptionalLong.of(decimalStatsData.getNumDVs()));
}
else if (columnStatistics.getStatsData().isSetBooleanStats()) {
BooleanColumnStatsData booleanStatsData = columnStatistics.getStatsData().getBooleanStats();
return new HiveColumnStatistics<>(
Optional.empty(),
Optional.empty(),
OptionalLong.empty(),
OptionalDouble.empty(),
OptionalLong.of(booleanStatsData.getNumTrues()),
OptionalLong.of(booleanStatsData.getNumFalses()),
OptionalLong.of(booleanStatsData.getNumNulls()),
OptionalLong.of((booleanStatsData.getNumFalses() > 0 ? 1 : 0) + (booleanStatsData.getNumTrues() > 0 ? 1 : 0)));
}
else if (columnStatistics.getStatsData().isSetDateStats()) {
DateColumnStatsData dateStatsData = columnStatistics.getStatsData().getDateStats();
return new HiveColumnStatistics<>(
dateStatsData.isSetLowValue() ? fromMetastoreDate(dateStatsData.getLowValue()) : Optional.empty(),
dateStatsData.isSetHighValue() ? fromMetastoreDate(dateStatsData.getHighValue()) : Optional.empty(),
OptionalLong.empty(),
OptionalDouble.empty(),
OptionalLong.empty(),
OptionalLong.empty(),
OptionalLong.of(dateStatsData.getNumNulls()),
OptionalLong.of(dateStatsData.getNumDVs()));
}
else if (columnStatistics.getStatsData().isSetStringStats()) {
StringColumnStatsData stringStatsData = columnStatistics.getStatsData().getStringStats();
return new HiveColumnStatistics<>(
Optional.empty(),
Optional.empty(),
OptionalLong.of(stringStatsData.getMaxColLen()),
OptionalDouble.of(stringStatsData.getAvgColLen()),
OptionalLong.empty(),
OptionalLong.empty(),
OptionalLong.of(stringStatsData.getNumNulls()),
OptionalLong.of(stringStatsData.getNumDVs()));
}
else if (columnStatistics.getStatsData().isSetBinaryStats()) {
BinaryColumnStatsData binaryStatsData = columnStatistics.getStatsData().getBinaryStats();
return new HiveColumnStatistics<>(
Optional.empty(),
Optional.empty(),
OptionalLong.of(binaryStatsData.getMaxColLen()),
OptionalDouble.of(binaryStatsData.getAvgColLen()),
OptionalLong.empty(),
OptionalLong.empty(),
OptionalLong.of(binaryStatsData.getNumNulls()),
OptionalLong.empty());
}
else {
throw new PrestoException(HIVE_INVALID_METADATA, "Invalid column statistics data: " + columnStatistics);
}
}
public static Optional<LocalDate> fromMetastoreDate(Date date)
{
if (date == null) {
return Optional.empty();
}
return Optional.of(LocalDate.ofEpochDay(date.getDaysSinceEpoch()));
}
public static Optional<BigDecimal> fromMetastoreDecimal(@Nullable Decimal decimal)
{
if (decimal == null) {
return Optional.empty();
}
return Optional.of(new BigDecimal(new BigInteger(decimal.getUnscaled()), decimal.getScale()));
}
public static PrincipalType fromMetastoreApiPrincipalType(org.apache.hadoop.hive.metastore.api.PrincipalType principalType)
{
switch (principalType) {
case USER:
return PrincipalType.USER;
case ROLE:
return PrincipalType.ROLE;
default:
throw new IllegalArgumentException("Unsupported principal type: " + principalType);
}
}
public static FieldSchema toMetastoreApiFieldSchema(Column column)
{
return new FieldSchema(column.getName(), column.getType().getHiveTypeName().toString(), column.getComment().orElse(null));
}
public static Column fromMetastoreApiFieldSchema(FieldSchema fieldSchema)
{
return new Column(fieldSchema.getName(), HiveType.valueOf(fieldSchema.getType()), Optional.ofNullable(emptyToNull(fieldSchema.getComment())));
}
public static void fromMetastoreApiStorageDescriptor(StorageDescriptor storageDescriptor, Storage.Builder builder, String tablePartitionName)
{
SerDeInfo serdeInfo = storageDescriptor.getSerdeInfo();
if (serdeInfo == null) {
throw new PrestoException(HIVE_INVALID_METADATA, "Table storage descriptor is missing SerDe info");
}
builder.setStorageFormat(StorageFormat.createNullable(serdeInfo.getSerializationLib(), storageDescriptor.getInputFormat(), storageDescriptor.getOutputFormat()))
.setLocation(nullToEmpty(storageDescriptor.getLocation()))
.setBucketProperty(HiveBucketProperty.fromStorageDescriptor(storageDescriptor, tablePartitionName))
.setSorted(storageDescriptor.isSetSortCols() && !storageDescriptor.getSortCols().isEmpty())
.setSkewed(storageDescriptor.isSetSkewedInfo() && storageDescriptor.getSkewedInfo().isSetSkewedColNames() && !storageDescriptor.getSkewedInfo().getSkewedColNames().isEmpty())
.setSerdeParameters(serdeInfo.getParameters() == null ? ImmutableMap.of() : serdeInfo.getParameters());
}
private static StorageDescriptor makeStorageDescriptor(String tableName, List<Column> columns, Storage storage)
{
if (storage.isSorted() || storage.isSkewed()) {
throw new IllegalArgumentException("Writing to sorted and/or skewed table/partition is not supported");
}
SerDeInfo serdeInfo = new SerDeInfo();
serdeInfo.setName(tableName);
serdeInfo.setSerializationLib(storage.getStorageFormat().getSerDeNullable());
serdeInfo.setParameters(storage.getSerdeParameters());
StorageDescriptor sd = new StorageDescriptor();
sd.setLocation(emptyToNull(storage.getLocation()));
sd.setCols(columns.stream()
.map(ThriftMetastoreUtil::toMetastoreApiFieldSchema)
.collect(toList()));
sd.setSerdeInfo(serdeInfo);
sd.setInputFormat(storage.getStorageFormat().getInputFormatNullable());
sd.setOutputFormat(storage.getStorageFormat().getOutputFormatNullable());
sd.setParameters(ImmutableMap.of());
Optional<HiveBucketProperty> bucketProperty = storage.getBucketProperty();
if (bucketProperty.isPresent()) {
sd.setNumBuckets(bucketProperty.get().getBucketCount());
sd.setBucketCols(bucketProperty.get().getBucketedBy());
}
return sd;
}
}
| |
/*
* Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.ec2.model;
import java.io.Serializable;
import javax.annotation.Generated;
/**
* <p>
* Describes the configuration of Spot Instances in an EC2 Fleet request.
* </p>
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/SpotOptionsRequest" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class SpotOptionsRequest implements Serializable, Cloneable {
/**
* <p>
* Indicates how to allocate the target Spot Instance capacity across the Spot Instance pools specified by the EC2
* Fleet.
* </p>
* <p>
* If the allocation strategy is <code>lowestPrice</code>, EC2 Fleet launches instances from the Spot Instance pools
* with the lowest price. This is the default allocation strategy.
* </p>
* <p>
* If the allocation strategy is <code>diversified</code>, EC2 Fleet launches instances from all the Spot Instance
* pools that you specify.
* </p>
* <p>
* If the allocation strategy is <code>capacityOptimized</code>, EC2 Fleet launches instances from Spot Instance
* pools that are optimally chosen based on the available Spot Instance capacity.
* </p>
*/
private String allocationStrategy;
/**
* <p>
* The behavior when a Spot Instance is interrupted. The default is <code>terminate</code>.
* </p>
*/
private String instanceInterruptionBehavior;
/**
* <p>
* The number of Spot pools across which to allocate your target Spot capacity. Valid only when Spot
* <b>AllocationStrategy</b> is set to <code>lowest-price</code>. EC2 Fleet selects the cheapest Spot pools and
* evenly allocates your target Spot capacity across the number of Spot pools that you specify.
* </p>
*/
private Integer instancePoolsToUseCount;
/**
* <p>
* Indicates that the fleet uses a single instance type to launch all Spot Instances in the fleet.
* </p>
*/
private Boolean singleInstanceType;
/**
* <p>
* Indicates that the fleet launches all Spot Instances into a single Availability Zone.
* </p>
*/
private Boolean singleAvailabilityZone;
/**
* <p>
* The minimum target capacity for Spot Instances in the fleet. If the minimum target capacity is not reached, the
* fleet launches no instances.
* </p>
*/
private Integer minTargetCapacity;
/**
* <p>
* The maximum amount per hour for Spot Instances that you're willing to pay.
* </p>
*/
private String maxTotalPrice;
/**
* <p>
* Indicates how to allocate the target Spot Instance capacity across the Spot Instance pools specified by the EC2
* Fleet.
* </p>
* <p>
* If the allocation strategy is <code>lowestPrice</code>, EC2 Fleet launches instances from the Spot Instance pools
* with the lowest price. This is the default allocation strategy.
* </p>
* <p>
* If the allocation strategy is <code>diversified</code>, EC2 Fleet launches instances from all the Spot Instance
* pools that you specify.
* </p>
* <p>
* If the allocation strategy is <code>capacityOptimized</code>, EC2 Fleet launches instances from Spot Instance
* pools that are optimally chosen based on the available Spot Instance capacity.
* </p>
*
* @param allocationStrategy
* Indicates how to allocate the target Spot Instance capacity across the Spot Instance pools specified by
* the EC2 Fleet.</p>
* <p>
* If the allocation strategy is <code>lowestPrice</code>, EC2 Fleet launches instances from the Spot
* Instance pools with the lowest price. This is the default allocation strategy.
* </p>
* <p>
* If the allocation strategy is <code>diversified</code>, EC2 Fleet launches instances from all the Spot
* Instance pools that you specify.
* </p>
* <p>
* If the allocation strategy is <code>capacityOptimized</code>, EC2 Fleet launches instances from Spot
* Instance pools that are optimally chosen based on the available Spot Instance capacity.
* @see SpotAllocationStrategy
*/
public void setAllocationStrategy(String allocationStrategy) {
this.allocationStrategy = allocationStrategy;
}
/**
* <p>
* Indicates how to allocate the target Spot Instance capacity across the Spot Instance pools specified by the EC2
* Fleet.
* </p>
* <p>
* If the allocation strategy is <code>lowestPrice</code>, EC2 Fleet launches instances from the Spot Instance pools
* with the lowest price. This is the default allocation strategy.
* </p>
* <p>
* If the allocation strategy is <code>diversified</code>, EC2 Fleet launches instances from all the Spot Instance
* pools that you specify.
* </p>
* <p>
* If the allocation strategy is <code>capacityOptimized</code>, EC2 Fleet launches instances from Spot Instance
* pools that are optimally chosen based on the available Spot Instance capacity.
* </p>
*
* @return Indicates how to allocate the target Spot Instance capacity across the Spot Instance pools specified by
* the EC2 Fleet.</p>
* <p>
* If the allocation strategy is <code>lowestPrice</code>, EC2 Fleet launches instances from the Spot
* Instance pools with the lowest price. This is the default allocation strategy.
* </p>
* <p>
* If the allocation strategy is <code>diversified</code>, EC2 Fleet launches instances from all the Spot
* Instance pools that you specify.
* </p>
* <p>
* If the allocation strategy is <code>capacityOptimized</code>, EC2 Fleet launches instances from Spot
* Instance pools that are optimally chosen based on the available Spot Instance capacity.
* @see SpotAllocationStrategy
*/
public String getAllocationStrategy() {
return this.allocationStrategy;
}
/**
* <p>
* Indicates how to allocate the target Spot Instance capacity across the Spot Instance pools specified by the EC2
* Fleet.
* </p>
* <p>
* If the allocation strategy is <code>lowestPrice</code>, EC2 Fleet launches instances from the Spot Instance pools
* with the lowest price. This is the default allocation strategy.
* </p>
* <p>
* If the allocation strategy is <code>diversified</code>, EC2 Fleet launches instances from all the Spot Instance
* pools that you specify.
* </p>
* <p>
* If the allocation strategy is <code>capacityOptimized</code>, EC2 Fleet launches instances from Spot Instance
* pools that are optimally chosen based on the available Spot Instance capacity.
* </p>
*
* @param allocationStrategy
* Indicates how to allocate the target Spot Instance capacity across the Spot Instance pools specified by
* the EC2 Fleet.</p>
* <p>
* If the allocation strategy is <code>lowestPrice</code>, EC2 Fleet launches instances from the Spot
* Instance pools with the lowest price. This is the default allocation strategy.
* </p>
* <p>
* If the allocation strategy is <code>diversified</code>, EC2 Fleet launches instances from all the Spot
* Instance pools that you specify.
* </p>
* <p>
* If the allocation strategy is <code>capacityOptimized</code>, EC2 Fleet launches instances from Spot
* Instance pools that are optimally chosen based on the available Spot Instance capacity.
* @return Returns a reference to this object so that method calls can be chained together.
* @see SpotAllocationStrategy
*/
public SpotOptionsRequest withAllocationStrategy(String allocationStrategy) {
setAllocationStrategy(allocationStrategy);
return this;
}
/**
* <p>
* Indicates how to allocate the target Spot Instance capacity across the Spot Instance pools specified by the EC2
* Fleet.
* </p>
* <p>
* If the allocation strategy is <code>lowestPrice</code>, EC2 Fleet launches instances from the Spot Instance pools
* with the lowest price. This is the default allocation strategy.
* </p>
* <p>
* If the allocation strategy is <code>diversified</code>, EC2 Fleet launches instances from all the Spot Instance
* pools that you specify.
* </p>
* <p>
* If the allocation strategy is <code>capacityOptimized</code>, EC2 Fleet launches instances from Spot Instance
* pools that are optimally chosen based on the available Spot Instance capacity.
* </p>
*
* @param allocationStrategy
* Indicates how to allocate the target Spot Instance capacity across the Spot Instance pools specified by
* the EC2 Fleet.</p>
* <p>
* If the allocation strategy is <code>lowestPrice</code>, EC2 Fleet launches instances from the Spot
* Instance pools with the lowest price. This is the default allocation strategy.
* </p>
* <p>
* If the allocation strategy is <code>diversified</code>, EC2 Fleet launches instances from all the Spot
* Instance pools that you specify.
* </p>
* <p>
* If the allocation strategy is <code>capacityOptimized</code>, EC2 Fleet launches instances from Spot
* Instance pools that are optimally chosen based on the available Spot Instance capacity.
* @return Returns a reference to this object so that method calls can be chained together.
* @see SpotAllocationStrategy
*/
public SpotOptionsRequest withAllocationStrategy(SpotAllocationStrategy allocationStrategy) {
this.allocationStrategy = allocationStrategy.toString();
return this;
}
/**
* <p>
* The behavior when a Spot Instance is interrupted. The default is <code>terminate</code>.
* </p>
*
* @param instanceInterruptionBehavior
* The behavior when a Spot Instance is interrupted. The default is <code>terminate</code>.
* @see SpotInstanceInterruptionBehavior
*/
public void setInstanceInterruptionBehavior(String instanceInterruptionBehavior) {
this.instanceInterruptionBehavior = instanceInterruptionBehavior;
}
/**
* <p>
* The behavior when a Spot Instance is interrupted. The default is <code>terminate</code>.
* </p>
*
* @return The behavior when a Spot Instance is interrupted. The default is <code>terminate</code>.
* @see SpotInstanceInterruptionBehavior
*/
public String getInstanceInterruptionBehavior() {
return this.instanceInterruptionBehavior;
}
/**
* <p>
* The behavior when a Spot Instance is interrupted. The default is <code>terminate</code>.
* </p>
*
* @param instanceInterruptionBehavior
* The behavior when a Spot Instance is interrupted. The default is <code>terminate</code>.
* @return Returns a reference to this object so that method calls can be chained together.
* @see SpotInstanceInterruptionBehavior
*/
public SpotOptionsRequest withInstanceInterruptionBehavior(String instanceInterruptionBehavior) {
setInstanceInterruptionBehavior(instanceInterruptionBehavior);
return this;
}
/**
* <p>
* The behavior when a Spot Instance is interrupted. The default is <code>terminate</code>.
* </p>
*
* @param instanceInterruptionBehavior
* The behavior when a Spot Instance is interrupted. The default is <code>terminate</code>.
* @return Returns a reference to this object so that method calls can be chained together.
* @see SpotInstanceInterruptionBehavior
*/
public SpotOptionsRequest withInstanceInterruptionBehavior(SpotInstanceInterruptionBehavior instanceInterruptionBehavior) {
this.instanceInterruptionBehavior = instanceInterruptionBehavior.toString();
return this;
}
/**
* <p>
* The number of Spot pools across which to allocate your target Spot capacity. Valid only when Spot
* <b>AllocationStrategy</b> is set to <code>lowest-price</code>. EC2 Fleet selects the cheapest Spot pools and
* evenly allocates your target Spot capacity across the number of Spot pools that you specify.
* </p>
*
* @param instancePoolsToUseCount
* The number of Spot pools across which to allocate your target Spot capacity. Valid only when Spot
* <b>AllocationStrategy</b> is set to <code>lowest-price</code>. EC2 Fleet selects the cheapest Spot pools
* and evenly allocates your target Spot capacity across the number of Spot pools that you specify.
*/
public void setInstancePoolsToUseCount(Integer instancePoolsToUseCount) {
this.instancePoolsToUseCount = instancePoolsToUseCount;
}
/**
* <p>
* The number of Spot pools across which to allocate your target Spot capacity. Valid only when Spot
* <b>AllocationStrategy</b> is set to <code>lowest-price</code>. EC2 Fleet selects the cheapest Spot pools and
* evenly allocates your target Spot capacity across the number of Spot pools that you specify.
* </p>
*
* @return The number of Spot pools across which to allocate your target Spot capacity. Valid only when Spot
* <b>AllocationStrategy</b> is set to <code>lowest-price</code>. EC2 Fleet selects the cheapest Spot pools
* and evenly allocates your target Spot capacity across the number of Spot pools that you specify.
*/
public Integer getInstancePoolsToUseCount() {
return this.instancePoolsToUseCount;
}
/**
* <p>
* The number of Spot pools across which to allocate your target Spot capacity. Valid only when Spot
* <b>AllocationStrategy</b> is set to <code>lowest-price</code>. EC2 Fleet selects the cheapest Spot pools and
* evenly allocates your target Spot capacity across the number of Spot pools that you specify.
* </p>
*
* @param instancePoolsToUseCount
* The number of Spot pools across which to allocate your target Spot capacity. Valid only when Spot
* <b>AllocationStrategy</b> is set to <code>lowest-price</code>. EC2 Fleet selects the cheapest Spot pools
* and evenly allocates your target Spot capacity across the number of Spot pools that you specify.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public SpotOptionsRequest withInstancePoolsToUseCount(Integer instancePoolsToUseCount) {
setInstancePoolsToUseCount(instancePoolsToUseCount);
return this;
}
/**
* <p>
* Indicates that the fleet uses a single instance type to launch all Spot Instances in the fleet.
* </p>
*
* @param singleInstanceType
* Indicates that the fleet uses a single instance type to launch all Spot Instances in the fleet.
*/
public void setSingleInstanceType(Boolean singleInstanceType) {
this.singleInstanceType = singleInstanceType;
}
/**
* <p>
* Indicates that the fleet uses a single instance type to launch all Spot Instances in the fleet.
* </p>
*
* @return Indicates that the fleet uses a single instance type to launch all Spot Instances in the fleet.
*/
public Boolean getSingleInstanceType() {
return this.singleInstanceType;
}
/**
* <p>
* Indicates that the fleet uses a single instance type to launch all Spot Instances in the fleet.
* </p>
*
* @param singleInstanceType
* Indicates that the fleet uses a single instance type to launch all Spot Instances in the fleet.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public SpotOptionsRequest withSingleInstanceType(Boolean singleInstanceType) {
setSingleInstanceType(singleInstanceType);
return this;
}
/**
* <p>
* Indicates that the fleet uses a single instance type to launch all Spot Instances in the fleet.
* </p>
*
* @return Indicates that the fleet uses a single instance type to launch all Spot Instances in the fleet.
*/
public Boolean isSingleInstanceType() {
return this.singleInstanceType;
}
/**
* <p>
* Indicates that the fleet launches all Spot Instances into a single Availability Zone.
* </p>
*
* @param singleAvailabilityZone
* Indicates that the fleet launches all Spot Instances into a single Availability Zone.
*/
public void setSingleAvailabilityZone(Boolean singleAvailabilityZone) {
this.singleAvailabilityZone = singleAvailabilityZone;
}
/**
* <p>
* Indicates that the fleet launches all Spot Instances into a single Availability Zone.
* </p>
*
* @return Indicates that the fleet launches all Spot Instances into a single Availability Zone.
*/
public Boolean getSingleAvailabilityZone() {
return this.singleAvailabilityZone;
}
/**
* <p>
* Indicates that the fleet launches all Spot Instances into a single Availability Zone.
* </p>
*
* @param singleAvailabilityZone
* Indicates that the fleet launches all Spot Instances into a single Availability Zone.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public SpotOptionsRequest withSingleAvailabilityZone(Boolean singleAvailabilityZone) {
setSingleAvailabilityZone(singleAvailabilityZone);
return this;
}
/**
* <p>
* Indicates that the fleet launches all Spot Instances into a single Availability Zone.
* </p>
*
* @return Indicates that the fleet launches all Spot Instances into a single Availability Zone.
*/
public Boolean isSingleAvailabilityZone() {
return this.singleAvailabilityZone;
}
/**
* <p>
* The minimum target capacity for Spot Instances in the fleet. If the minimum target capacity is not reached, the
* fleet launches no instances.
* </p>
*
* @param minTargetCapacity
* The minimum target capacity for Spot Instances in the fleet. If the minimum target capacity is not
* reached, the fleet launches no instances.
*/
public void setMinTargetCapacity(Integer minTargetCapacity) {
this.minTargetCapacity = minTargetCapacity;
}
/**
* <p>
* The minimum target capacity for Spot Instances in the fleet. If the minimum target capacity is not reached, the
* fleet launches no instances.
* </p>
*
* @return The minimum target capacity for Spot Instances in the fleet. If the minimum target capacity is not
* reached, the fleet launches no instances.
*/
public Integer getMinTargetCapacity() {
return this.minTargetCapacity;
}
/**
* <p>
* The minimum target capacity for Spot Instances in the fleet. If the minimum target capacity is not reached, the
* fleet launches no instances.
* </p>
*
* @param minTargetCapacity
* The minimum target capacity for Spot Instances in the fleet. If the minimum target capacity is not
* reached, the fleet launches no instances.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public SpotOptionsRequest withMinTargetCapacity(Integer minTargetCapacity) {
setMinTargetCapacity(minTargetCapacity);
return this;
}
/**
* <p>
* The maximum amount per hour for Spot Instances that you're willing to pay.
* </p>
*
* @param maxTotalPrice
* The maximum amount per hour for Spot Instances that you're willing to pay.
*/
public void setMaxTotalPrice(String maxTotalPrice) {
this.maxTotalPrice = maxTotalPrice;
}
/**
* <p>
* The maximum amount per hour for Spot Instances that you're willing to pay.
* </p>
*
* @return The maximum amount per hour for Spot Instances that you're willing to pay.
*/
public String getMaxTotalPrice() {
return this.maxTotalPrice;
}
/**
* <p>
* The maximum amount per hour for Spot Instances that you're willing to pay.
* </p>
*
* @param maxTotalPrice
* The maximum amount per hour for Spot Instances that you're willing to pay.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public SpotOptionsRequest withMaxTotalPrice(String maxTotalPrice) {
setMaxTotalPrice(maxTotalPrice);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getAllocationStrategy() != null)
sb.append("AllocationStrategy: ").append(getAllocationStrategy()).append(",");
if (getInstanceInterruptionBehavior() != null)
sb.append("InstanceInterruptionBehavior: ").append(getInstanceInterruptionBehavior()).append(",");
if (getInstancePoolsToUseCount() != null)
sb.append("InstancePoolsToUseCount: ").append(getInstancePoolsToUseCount()).append(",");
if (getSingleInstanceType() != null)
sb.append("SingleInstanceType: ").append(getSingleInstanceType()).append(",");
if (getSingleAvailabilityZone() != null)
sb.append("SingleAvailabilityZone: ").append(getSingleAvailabilityZone()).append(",");
if (getMinTargetCapacity() != null)
sb.append("MinTargetCapacity: ").append(getMinTargetCapacity()).append(",");
if (getMaxTotalPrice() != null)
sb.append("MaxTotalPrice: ").append(getMaxTotalPrice());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof SpotOptionsRequest == false)
return false;
SpotOptionsRequest other = (SpotOptionsRequest) obj;
if (other.getAllocationStrategy() == null ^ this.getAllocationStrategy() == null)
return false;
if (other.getAllocationStrategy() != null && other.getAllocationStrategy().equals(this.getAllocationStrategy()) == false)
return false;
if (other.getInstanceInterruptionBehavior() == null ^ this.getInstanceInterruptionBehavior() == null)
return false;
if (other.getInstanceInterruptionBehavior() != null && other.getInstanceInterruptionBehavior().equals(this.getInstanceInterruptionBehavior()) == false)
return false;
if (other.getInstancePoolsToUseCount() == null ^ this.getInstancePoolsToUseCount() == null)
return false;
if (other.getInstancePoolsToUseCount() != null && other.getInstancePoolsToUseCount().equals(this.getInstancePoolsToUseCount()) == false)
return false;
if (other.getSingleInstanceType() == null ^ this.getSingleInstanceType() == null)
return false;
if (other.getSingleInstanceType() != null && other.getSingleInstanceType().equals(this.getSingleInstanceType()) == false)
return false;
if (other.getSingleAvailabilityZone() == null ^ this.getSingleAvailabilityZone() == null)
return false;
if (other.getSingleAvailabilityZone() != null && other.getSingleAvailabilityZone().equals(this.getSingleAvailabilityZone()) == false)
return false;
if (other.getMinTargetCapacity() == null ^ this.getMinTargetCapacity() == null)
return false;
if (other.getMinTargetCapacity() != null && other.getMinTargetCapacity().equals(this.getMinTargetCapacity()) == false)
return false;
if (other.getMaxTotalPrice() == null ^ this.getMaxTotalPrice() == null)
return false;
if (other.getMaxTotalPrice() != null && other.getMaxTotalPrice().equals(this.getMaxTotalPrice()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getAllocationStrategy() == null) ? 0 : getAllocationStrategy().hashCode());
hashCode = prime * hashCode + ((getInstanceInterruptionBehavior() == null) ? 0 : getInstanceInterruptionBehavior().hashCode());
hashCode = prime * hashCode + ((getInstancePoolsToUseCount() == null) ? 0 : getInstancePoolsToUseCount().hashCode());
hashCode = prime * hashCode + ((getSingleInstanceType() == null) ? 0 : getSingleInstanceType().hashCode());
hashCode = prime * hashCode + ((getSingleAvailabilityZone() == null) ? 0 : getSingleAvailabilityZone().hashCode());
hashCode = prime * hashCode + ((getMinTargetCapacity() == null) ? 0 : getMinTargetCapacity().hashCode());
hashCode = prime * hashCode + ((getMaxTotalPrice() == null) ? 0 : getMaxTotalPrice().hashCode());
return hashCode;
}
@Override
public SpotOptionsRequest clone() {
try {
return (SpotOptionsRequest) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
}
| |
<<<<<<< HEAD
/*
Part of the G4P library for Processing
http://www.lagers.org.uk/g4p/index.html
http://sourceforge.net/projects/g4p/files/?source=navbar
Copyright (c) 2012 Peter Lager
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General
Public License along with this library; if not, write to the
Free Software Foundation, Inc., 59 Temple Place, Suite 330,
Boston, MA 02111-1307 USA
*/
package g4p_controls;
import g4p_controls.HotSpot.HSalpha;
import processing.core.PApplet;
import processing.core.PImage;
import processing.event.MouseEvent;
/**
* Buttons created from this class have 2 or more toggle states. If the number of states
* is N then the button's value will be in the range 0 to N-1. Most toggle buttons will
* have just two states and these have values 0 and 1. <br>
* Clicking on the button advances the state by one, restarting at zero after the last
* state. <br>
* Each state must have its own 'picture' and the user must supply these as a tiled image
* where the pictures are tiled in 1D or 2D arrangement without 'empty space' around the
* tiles. <br>
* If for any reason the library is unable to use the specified graphics then it will
* provide a default two state toggle switch. <br>
* It is also possible to provide an over-button image set for when the mouse moves
* over the button - this is optional. <br>
* The button control will always be resized to suit the state picture size (tile size). <br>
* The mouse is considered to be over the button it its position is over an opaque pixel
* in the state picture. Since transparent pixels are not included then the button shape
* can be different for each state. <br>
*
*
*
* Three types of event can be generated :- <br>
* <b> GEvent.PRESSED GEvent.RELEASED GEvent.CLICKED </b><br>
*
* To simplify event handling the button only fires off CLICKED events
* when the mouse button is pressed and released over the button face
* (the default behaviour). <br>
*
* Using <pre>button1.fireAllEvents(true);</pre> enables the other 2 events
* for button <b>button1</b>. A PRESSED event is created if the mouse button
* is pressed down over the button face, the CLICKED event is then generated
* if the mouse button is released over the button face. Releasing the
* button off the button face creates a RELEASED event. This is included for
* completeness since it is unlikely you will need to detect these events
* for this type of control. <br>
*
*
* @author Peter Lager
*
*/
public class GImageToggleButton extends GAbstractControl {
private static PImage toggle = null;
private static final String TOGGLE = "toggle.png";
protected int nbrStates = 2;
protected int stateValue = 0;
protected PImage[] offImage;
protected PImage[] overImage;
protected int status;
protected boolean reportAllButtonEvents = false;
/**
* Create the library default image-toggle-button at the stated position. <br>
*
* @param theApplet
* @param p0 horizontal position of the control
* @param p1 vertical position of the control
*/
public GImageToggleButton(PApplet theApplet, float p0, float p1){
this(theApplet, p0, p1, null, null, 1, 1);
}
/**
* Create an image-toggle-button. <br>
* Single row of tiles.
*
* @param theApplet
* @param p0 horizontal position of the control
* @param p1 vertical position of the control
* @param offPicture the filename of bitmap containing toggle state pictures
* @param nbrCols number of tiles horizontally
*/
public GImageToggleButton(PApplet theApplet, float p0, float p1, String offPicture, int nbrCols){
this(theApplet, p0, p1, offPicture, null, nbrCols, 1);
}
/**
* Create an image-toggle-button. <br>
*
* @param theApplet
* @param p0 horizontal position of the control
* @param p1 vertical position of the control
* @param offPicture the filename of bitmap containing toggle state pictures
* @param nbrCols number of tiles horizontally
* @param nbrRows number of tiles vertically
*/
public GImageToggleButton(PApplet theApplet, float p0, float p1, String offPicture, int nbrCols, int nbrRows){
this(theApplet, p0, p1, offPicture, null, nbrCols, nbrRows);
}
/**
* Create an image-toggle-button. <br>
* Single row of tiles.
*
* @param theApplet
* @param p0 horizontal position of the control
* @param p1 vertical position of the control
* @param offPicture the filename of bitmap containing toggle state pictures
* @param overPicture the filename of bitmap containing mouse-over button toggle state pictures
* @param nbrCols number of tiles horizontally
*/
public GImageToggleButton(PApplet theApplet, float p0, float p1, String offPicture, String overPicture, int nbrCols){
this(theApplet, p0, p1, offPicture, overPicture, nbrCols, 1);
}
/**
* Create an image-toggle-button. <br>
*
* @param theApplet
* @param p0 horizontal position of the control
* @param p1 vertical position of the control
* @param offPicture the filename of bitmap containing toggle state pictures
* @param overPicture the filename of bitmap containing mouse-over button toggle state pictures
* @param nbrCols number of tiles horizontally
* @param nbrRows number of tiles vertically
*/
public GImageToggleButton(PApplet theApplet, float p0, float p1, String offPicture, String overPicture, int nbrCols, int nbrRows){
super(theApplet, p0, p1);
// Attempt to get off-control image data
PImage temp = null;
if(nbrCols < 1 || nbrRows < 1 || offPicture == null || null == (temp = ImageManager.loadImage(winApp, offPicture))){
// Invalid data use default
nbrStates = 2;
if(toggle == null)
toggle = ImageManager.loadImage(winApp, TOGGLE);
offImage = ImageManager.makeTiles1D(winApp, toggle, 2, 1);
}
else {
// Off-control image data valid
nbrStates = nbrCols * nbrRows;
offImage = ImageManager.makeTiles1D(winApp, temp, nbrCols, nbrRows);
// Now check for over-control image data
if(overPicture != null && null != (temp = ImageManager.loadImage(winApp, overPicture))){
overImage = ImageManager.makeTiles1D(winApp, temp, nbrCols, nbrRows);
}
}
// The control will always be resized to match the image size
resize(offImage[0].width, offImage[0].height);
//========================================================================
// Setup the hotspots
hotspots = new HotSpot[]{
new HSalpha(1, 0, 0, offImage[stateValue], PApplet.CORNER)
};
//========================================================================
z = Z_SLIPPY;
// Now register control with applet
createEventHandler(G4P.sketchWindow, "handleToggleButtonEvents",
new Class<?>[]{ GImageToggleButton.class, GEvent.class },
new String[]{ "button", "event" }
);
registeredMethods = DRAW_METHOD | MOUSE_METHOD;
cursorOver = HAND;
G4P.registerControl(this);
}
public void draw(){
if(!visible) return;
winApp.pushStyle();
winApp.pushMatrix();
// Perform the rotation
winApp.translate(cx, cy);
winApp.rotate(rotAngle);
// Move matrix to line up with top-left corner
winApp.translate(-halfWidth, -halfHeight);
// Draw buffer
winApp.imageMode(PApplet.CORNER);
if(alphaLevel < 255)
winApp.tint(TINT_FOR_ALPHA, alphaLevel);
if(status == OVER_CONTROL && overImage != null)
winApp.image(overImage[stateValue], 0, 0);
else
winApp.image(offImage[stateValue], 0, 0);
winApp.popMatrix();
winApp.popStyle();
}
/**
*
* When a mouse button is clicked on a GImageToggleButton it generates the GEvent.CLICKED event. If
* you also want the button to generate GEvent.PRESSED and GEvent.RELEASED events
* then you need the following statement.<br>
* <pre>btnName.fireAllEvents(true); </pre><br>
* <pre>
* void handleButtonEvents(void handleToggleButtonEvents(GImageToggleButton button, GEvent event) {
* if(button == btnName && event == GEvent.CLICKED){
* int buttonState = btnName.stateValue();
* }
* </pre> <br>
* Where <pre><b>btnName</b></pre> is the GImageToggleButton identifier (variable name) <br><br>
*
*/
public void mouseEvent(MouseEvent event){
if(!visible || !enabled || !available) return;
calcTransformedOrigin(winApp.mouseX, winApp.mouseY);
currSpot = whichHotSpot(ox, oy);
if(currSpot >= 0 || focusIsWith == this)
cursorIsOver = this;
else if(cursorIsOver == this)
cursorIsOver = null;
switch(event.getAction()){
case MouseEvent.PRESS:
if(focusIsWith != this && currSpot >= 0 && z > focusObjectZ()){
dragging = false;
status = PRESS_CONTROL;
takeFocus();
if(reportAllButtonEvents)
fireEvent(this, GEvent.PRESSED);
}
break;
case MouseEvent.CLICK:
// No need to test for isOver() since if the component has focus
// and the mouse has not moved since MOUSE_PRESSED otherwise we
// would not get the Java MouseEvent.MOUSE_CLICKED event
if(focusIsWith == this){
status = OFF_CONTROL;
loseFocus(null);
dragging = false;
nextState();
fireEvent(this, GEvent.CLICKED);
}
break;
case MouseEvent.RELEASE:
// if the mouse has moved then release focus otherwise
// MOUSE_CLICKED will handle it
if(focusIsWith == this && dragging){
if(currSpot >= 0){
nextState();
fireEvent(this, GEvent.CLICKED);
}
else {
if(reportAllButtonEvents){
fireEvent(this, GEvent.RELEASED);
}
}
dragging = false;
loseFocus(null);
status = OFF_CONTROL;
}
break;
case MouseEvent.MOVE:
// If dragged state will stay as PRESSED
if(currSpot >= 0)
status = OVER_CONTROL;
else
status = OFF_CONTROL;
break;
case MouseEvent.DRAG:
dragging = (focusIsWith == this);
break;
}
}
/**
* Advance to the next state and adjust the hotspot to use the current image
*/
private void nextState(){
stateValue++;
stateValue %= nbrStates;
hotspots[0].adjust(0,0,offImage[stateValue]);
}
/**
* Get the current state value of the button.
* @deprecated use getState()
*/
@Deprecated
public int stateValue(){
return stateValue;
}
/**
* Get the current state value of the button.
*/
public int getState(){
return stateValue;
}
/**
* Change the current toggle state. <br>
* If the parameter is not a valid toggle state value then it
* is ignored and the button's state value is unchanged.
* @deprecated use setState(int)
* @param newState
*/
@Deprecated
public void stateValue(int newState){
if(newState >= 0 && newState < nbrStates && newState != stateValue){
stateValue = newState;
hotspots[0].adjust(0,0,offImage[stateValue]);
bufferInvalid = true;
}
}
/**
* Change the current toggle state. <br>
* If the parameter is not a valid toggle state value then it
* is ignored and the button's state value is unchanged.
* @param newState
*/
public void setState(int newState){
if(newState >= 0 && newState < nbrStates && newState != stateValue){
stateValue = newState;
hotspots[0].adjust(0,0,offImage[stateValue]);
bufferInvalid = true;
}
}
/**
* If the parameter is true all 3 event types are generated, if false
* only CLICKED events are generated (default behaviour). <br>
* For this toggle control I can't see the need for anything but
* CLICKED events
* @param all
*/
public void fireAllEvents(boolean all){
reportAllButtonEvents = all;
}
}
=======
/*
Part of the G4P library for Processing
http://www.lagers.org.uk/g4p/index.html
http://sourceforge.net/projects/g4p/files/?source=navbar
Copyright (c) 2012 Peter Lager
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General
Public License along with this library; if not, write to the
Free Software Foundation, Inc., 59 Temple Place, Suite 330,
Boston, MA 02111-1307 USA
*/
package g4p_controls;
import g4p_controls.HotSpot.HSalpha;
import processing.core.PApplet;
import processing.core.PImage;
import processing.event.MouseEvent;
/**
* Buttons created from this class have 2 or more toggle states. If the number of states
* is N then the button's value will be in the range 0 to N-1. Most toggle buttons will
* have just two states and these have values 0 and 1. <br>
* Clicking on the button advances the state by one, restarting at zero after the last
* state. <br>
* Each state must have its own 'picture' and the user must supply these as a tiled image
* where the pictures are tiled in 1D or 2D arrangement without 'empty space' around the
* tiles. <br>
* If for any reason the library is unable to use the specified graphics then it will
* provide a default two state toggle switch. <br>
* It is also possible to provide an over-button image set for when the mouse moves
* over the button - this is optional. <br>
* The button control will always be resized to suit the state picture size (tile size). <br>
* The mouse is considered to be over the button it its position is over an opaque pixel
* in the state picture. Since transparent pixels are not included then the button shape
* can be different for each state. <br>
*
*
*
* Three types of event can be generated :- <br>
* <b> GEvent.PRESSED GEvent.RELEASED GEvent.CLICKED </b><br>
*
* To simplify event handling the button only fires off CLICKED events
* when the mouse button is pressed and released over the button face
* (the default behaviour). <br>
*
* Using <pre>button1.fireAllEvents(true);</pre> enables the other 2 events
* for button <b>button1</b>. A PRESSED event is created if the mouse button
* is pressed down over the button face, the CLICKED event is then generated
* if the mouse button is released over the button face. Releasing the
* button off the button face creates a RELEASED event. This is included for
* completeness since it is unlikely you will need to detect these events
* for this type of control. <br>
*
*
* @author Peter Lager
*
*/
public class GImageToggleButton extends GAbstractControl {
private static PImage toggle = null;
private static final String TOGGLE = "toggle.png";
protected int nbrStates = 2;
protected int stateValue = 0;
protected PImage[] offImage;
protected PImage[] overImage;
protected int status;
protected boolean reportAllButtonEvents = false;
/**
* Create the library default image-toggle-button at the stated position. <br>
*
* @param theApplet
* @param p0 horizontal position of the control
* @param p1 vertical position of the control
*/
public GImageToggleButton(PApplet theApplet, float p0, float p1){
this(theApplet, p0, p1, null, null, 1, 1);
}
/**
* Create an image-toggle-button. <br>
* Single row of tiles.
*
* @param theApplet
* @param p0 horizontal position of the control
* @param p1 vertical position of the control
* @param offPicture the filename of bitmap containing toggle state pictures
* @param nbrCols number of tiles horizontally
*/
public GImageToggleButton(PApplet theApplet, float p0, float p1, String offPicture, int nbrCols){
this(theApplet, p0, p1, offPicture, null, nbrCols, 1);
}
/**
* Create an image-toggle-button. <br>
*
* @param theApplet
* @param p0 horizontal position of the control
* @param p1 vertical position of the control
* @param offPicture the filename of bitmap containing toggle state pictures
* @param nbrCols number of tiles horizontally
* @param nbrRows number of tiles vertically
*/
public GImageToggleButton(PApplet theApplet, float p0, float p1, String offPicture, int nbrCols, int nbrRows){
this(theApplet, p0, p1, offPicture, null, nbrCols, nbrRows);
}
/**
* Create an image-toggle-button. <br>
* Single row of tiles.
*
* @param theApplet
* @param p0 horizontal position of the control
* @param p1 vertical position of the control
* @param offPicture the filename of bitmap containing toggle state pictures
* @param overPicture the filename of bitmap containing mouse-over button toggle state pictures
* @param nbrCols number of tiles horizontally
*/
public GImageToggleButton(PApplet theApplet, float p0, float p1, String offPicture, String overPicture, int nbrCols){
this(theApplet, p0, p1, offPicture, overPicture, nbrCols, 1);
}
/**
* Create an image-toggle-button. <br>
*
* @param theApplet
* @param p0 horizontal position of the control
* @param p1 vertical position of the control
* @param offPicture the filename of bitmap containing toggle state pictures
* @param overPicture the filename of bitmap containing mouse-over button toggle state pictures
* @param nbrCols number of tiles horizontally
* @param nbrRows number of tiles vertically
*/
public GImageToggleButton(PApplet theApplet, float p0, float p1, String offPicture, String overPicture, int nbrCols, int nbrRows){
super(theApplet, p0, p1);
// Attempt to get off-control image data
PImage temp = null;
if(nbrCols < 1 || nbrRows < 1 || offPicture == null || null == (temp = ImageManager.loadImage(winApp, offPicture))){
// Invalid data use default
nbrStates = 2;
if(toggle == null)
toggle = ImageManager.loadImage(winApp, TOGGLE);
offImage = ImageManager.makeTiles1D(winApp, toggle, 2, 1);
}
else {
// Off-control image data valid
nbrStates = nbrCols * nbrRows;
offImage = ImageManager.makeTiles1D(winApp, temp, nbrCols, nbrRows);
// Now check for over-control image data
if(overPicture != null && null != (temp = ImageManager.loadImage(winApp, overPicture))){
overImage = ImageManager.makeTiles1D(winApp, temp, nbrCols, nbrRows);
}
}
// The control will always be resized to match the image size
resize(offImage[0].width, offImage[0].height);
//========================================================================
// Setup the hotspots
hotspots = new HotSpot[]{
new HSalpha(1, 0, 0, offImage[stateValue], PApplet.CORNER)
};
//========================================================================
z = Z_SLIPPY;
// Now register control with applet
createEventHandler(G4P.sketchWindow, "handleToggleButtonEvents",
new Class<?>[]{ GImageToggleButton.class, GEvent.class },
new String[]{ "button", "event" }
);
registeredMethods = DRAW_METHOD | MOUSE_METHOD;
cursorOver = HAND;
G4P.registerControl(this);
}
public void draw(){
if(!visible) return;
winApp.pushStyle();
winApp.pushMatrix();
// Perform the rotation
winApp.translate(cx, cy);
winApp.rotate(rotAngle);
// Move matrix to line up with top-left corner
winApp.translate(-halfWidth, -halfHeight);
// Draw buffer
winApp.imageMode(PApplet.CORNER);
if(alphaLevel < 255)
winApp.tint(TINT_FOR_ALPHA, alphaLevel);
if(status == OVER_CONTROL && overImage != null)
winApp.image(overImage[stateValue], 0, 0);
else
winApp.image(offImage[stateValue], 0, 0);
winApp.popMatrix();
winApp.popStyle();
}
/**
*
* When a mouse button is clicked on a GImageToggleButton it generates the GEvent.CLICKED event. If
* you also want the button to generate GEvent.PRESSED and GEvent.RELEASED events
* then you need the following statement.<br>
* <pre>btnName.fireAllEvents(true); </pre><br>
* <pre>
* void handleButtonEvents(void handleToggleButtonEvents(GImageToggleButton button, GEvent event) {
* if(button == btnName && event == GEvent.CLICKED){
* int buttonState = btnName.stateValue();
* }
* </pre> <br>
* Where <pre><b>btnName</b></pre> is the GImageToggleButton identifier (variable name) <br><br>
*
*/
public void mouseEvent(MouseEvent event){
if(!visible || !enabled || !available) return;
calcTransformedOrigin(winApp.mouseX, winApp.mouseY);
currSpot = whichHotSpot(ox, oy);
if(currSpot >= 0 || focusIsWith == this)
cursorIsOver = this;
else if(cursorIsOver == this)
cursorIsOver = null;
switch(event.getAction()){
case MouseEvent.PRESS:
if(focusIsWith != this && currSpot >= 0 && z > focusObjectZ()){
dragging = false;
status = PRESS_CONTROL;
takeFocus();
if(reportAllButtonEvents)
fireEvent(this, GEvent.PRESSED);
}
break;
case MouseEvent.CLICK:
// No need to test for isOver() since if the component has focus
// and the mouse has not moved since MOUSE_PRESSED otherwise we
// would not get the Java MouseEvent.MOUSE_CLICKED event
if(focusIsWith == this){
status = OFF_CONTROL;
loseFocus(null);
dragging = false;
nextState();
fireEvent(this, GEvent.CLICKED);
}
break;
case MouseEvent.RELEASE:
// if the mouse has moved then release focus otherwise
// MOUSE_CLICKED will handle it
if(focusIsWith == this && dragging){
if(currSpot >= 0){
nextState();
fireEvent(this, GEvent.CLICKED);
}
else {
if(reportAllButtonEvents){
fireEvent(this, GEvent.RELEASED);
}
}
dragging = false;
loseFocus(null);
status = OFF_CONTROL;
}
break;
case MouseEvent.MOVE:
// If dragged state will stay as PRESSED
if(currSpot >= 0)
status = OVER_CONTROL;
else
status = OFF_CONTROL;
break;
case MouseEvent.DRAG:
dragging = (focusIsWith == this);
break;
}
}
/**
* Advance to the next state and adjust the hotspot to use the current image
*/
private void nextState(){
stateValue++;
stateValue %= nbrStates;
hotspots[0].adjust(0,0,offImage[stateValue]);
}
/**
* Get the current state value of the button.
* @deprecated use getState()
*/
@Deprecated
public int stateValue(){
return stateValue;
}
/**
* Get the current state value of the button.
*/
public int getState(){
return stateValue;
}
/**
* Change the current toggle state. <br>
* If the parameter is not a valid toggle state value then it
* is ignored and the button's state value is unchanged.
* @deprecated use setState(int)
* @param newState
*/
@Deprecated
public void stateValue(int newState){
if(newState >= 0 && newState < nbrStates && newState != stateValue){
stateValue = newState;
hotspots[0].adjust(0,0,offImage[stateValue]);
bufferInvalid = true;
}
}
/**
* Change the current toggle state. <br>
* If the parameter is not a valid toggle state value then it
* is ignored and the button's state value is unchanged.
* @param newState
*/
public void setState(int newState){
if(newState >= 0 && newState < nbrStates && newState != stateValue){
stateValue = newState;
hotspots[0].adjust(0,0,offImage[stateValue]);
bufferInvalid = true;
}
}
/**
* If the parameter is true all 3 event types are generated, if false
* only CLICKED events are generated (default behaviour). <br>
* For this toggle control I can't see the need for anything but
* CLICKED events
* @param all
*/
public void fireAllEvents(boolean all){
reportAllButtonEvents = all;
}
}
>>>>>>> origin/master
| |
// Copyright (C) 2020 The Android Open Source Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.gerrit.plugins.codeowners.backend;
import static com.google.common.base.Preconditions.checkState;
import static com.google.common.collect.ImmutableSet.toImmutableSet;
import static java.util.Objects.requireNonNull;
import com.google.auto.value.AutoValue;
import com.google.common.collect.ImmutableMultimap;
import com.google.common.collect.ImmutableSet;
import java.util.Arrays;
import java.util.Set;
/**
* A code owner set defines a set of code owners for a set of path expressions.
*
* <p>The code owners own the files that match any of the path expressions.
*
* <p>Code owner sets are stored in {@link CodeOwnerConfig}s which define code owners for the folder
* in which they are stored.
*
* <p>The path expressions are relative to the folder of the {@link CodeOwnerConfig} that contains
* the owner set.
*
* <p>If the set of path expressions is empty the code owners apply for all files in the folder of
* the {@link CodeOwnerConfig} (including files in sub folders).
*/
@AutoValue
public abstract class CodeOwnerSet {
/**
* Gets whether global code owners (code owners from code owner sets without path expression in
* the same code owner config) and code owners from parent code owner configs (code owner configs
* in parent folders) should be ignored.
*
* <p>Code owner configs are organized hierarchically, e.g. the code owner config at "/" is the
* parent config of the code owner config at "/foo" which in turn is the parent config of the code
* owner config at "/foo/bar". Code owners from the parent config can be ignored by setting {@link
* CodeOwnerConfig#ignoreParentCodeOwners()} on code owner config level.
*
* <p>In addition there are 2 hierarchy levels within each code owner config. 1. global code
* owners applying to all files in the folder (represented by code owner sets without path
* expressions), 2. per file code owners (represented by code owner sets with path expressions).
* On per file level it is possible to ignore the global code owners (code owner sets without path
* expressions) by setting {@link #ignoreGlobalAndParentCodeOwners()} on the code owner set. If
* {@link #ignoreGlobalAndParentCodeOwners()} is set, implicitly for matching files also all code
* owners inherited from parent code owner configs are ignored.
*
* <p>If a matching code owner set ignores global and parent code owners, matching sibling code
* owner sets (other code owner sets with matching path expressions in the same code owner config)
* are still honored.
*/
public abstract boolean ignoreGlobalAndParentCodeOwners();
/** Path expressions that match the files that are owned by the {@link #codeOwners()}. */
public abstract ImmutableSet<String> pathExpressions();
/** Gets references to the code owner configs that should be imported. */
public abstract ImmutableSet<CodeOwnerConfigReference> imports();
/** Gets the code owners of this code owner set. */
public abstract ImmutableSet<CodeOwnerReference> codeOwners();
/** Gets the annotations of the {@link #codeOwners()}. */
public abstract ImmutableMultimap<CodeOwnerReference, CodeOwnerAnnotation> annotations();
/**
* Creates a builder from this code owner set.
*
* @return builder that was created from this code owner set
*/
public abstract Builder toBuilder();
/** Creates a builder for a {@link CodeOwnerSet}. */
public static CodeOwnerSet.Builder builder() {
return new AutoValue_CodeOwnerSet.Builder()
.setIgnoreGlobalAndParentCodeOwners(false)
.setPathExpressions(ImmutableSet.of());
}
/**
* Creates a {@link CodeOwnerSet} instance without path expressions.
*
* @param codeOwners the code owners of the code owner set
*/
public static CodeOwnerSet createWithoutPathExpressions(
ImmutableSet<CodeOwnerReference> codeOwners) {
return builder().setCodeOwners(codeOwners).build();
}
/**
* Creates a {@link CodeOwnerSet} instance without path expressions.
*
* @param emails the emails of the code owners
*/
public static CodeOwnerSet createWithoutPathExpressions(String... emails) {
return createWithoutPathExpressions(
Arrays.stream(emails).map(CodeOwnerReference::create).collect(toImmutableSet()));
}
@AutoValue.Builder
public abstract static class Builder {
/**
* Sets whether global code owners (code owners from code owner sets without path expression in
* the same code owner config) and code owners from parent code owner configs (code owner
* configs in parent folders) should be ignored.
*
* @param ignoreGlobalAndParentCodeOwners whether global code owners and code owners from parent
* code owner configs should be ignored
* @return the Builder instance for chaining calls
* @see CodeOwnerSet#ignoreGlobalAndParentCodeOwners()
*/
public abstract Builder setIgnoreGlobalAndParentCodeOwners(
boolean ignoreGlobalAndParentCodeOwners);
/**
* Sets that global code owners (code owners from code owner sets without path expression in the
* same code owner config) and code owners from parent code owner configs (code owner configs in
* parent folders) should be ignored.
*
* @return the Builder instance for chaining calls
* @see CodeOwnerSet#ignoreGlobalAndParentCodeOwners()
*/
public Builder setIgnoreGlobalAndParentCodeOwners() {
return setIgnoreGlobalAndParentCodeOwners(true);
}
/**
* Sets the path expressions that match the files that are owned by the code owners.
*
* @param pathExpressions the path expressions
* @return the Builder instance for chaining calls
*/
public abstract Builder setPathExpressions(ImmutableSet<String> pathExpressions);
/** Gets a builder to add path expressions. */
abstract ImmutableSet.Builder<String> pathExpressionsBuilder();
/**
* Adds a path expression.
*
* @param pathExpression path expression that should be added
* @return the Builder instance for chaining calls
*/
public Builder addPathExpression(String pathExpression) {
pathExpressionsBuilder().add(requireNonNull(pathExpression, "pathExpression"));
return this;
}
/**
* Sets the imports of this code owner set.
*
* @param codeOwnerConfigReferences references to the code owner configs that should be imported
* @return the Builder instance for chaining calls
*/
public abstract Builder setImports(
ImmutableSet<CodeOwnerConfigReference> codeOwnerConfigReferences);
/** Gets a builder to add imports. */
abstract ImmutableSet.Builder<CodeOwnerConfigReference> importsBuilder();
/**
* Adds an import.
*
* @param codeOwnerConfigReference reference to the code owner config that should be imported
* @return the Builder instance for chaining calls
*/
public Builder addImport(CodeOwnerConfigReference codeOwnerConfigReference) {
importsBuilder().add(requireNonNull(codeOwnerConfigReference, "codeOwnerConfigReference"));
return this;
}
/**
* Sets the code owners of this code owner set.
*
* @param codeOwners the code owners of this code owner set
* @return the Builder instance for chaining calls
*/
public abstract Builder setCodeOwners(ImmutableSet<CodeOwnerReference> codeOwners);
/** Gets a builder to add code owner references. */
abstract ImmutableSet.Builder<CodeOwnerReference> codeOwnersBuilder();
/**
* Adds a code owner.
*
* @param codeOwnerReference reference to the code owner
* @return the Builder instance for chaining calls
*/
public Builder addCodeOwner(CodeOwnerReference codeOwnerReference) {
codeOwnersBuilder().add(requireNonNull(codeOwnerReference, "codeOwnerReference"));
return this;
}
/** Gets a builder to add code owner annotations. */
abstract ImmutableMultimap.Builder<CodeOwnerReference, CodeOwnerAnnotation>
annotationsBuilder();
/**
* Adds an annotation for a code owner.
*
* @param email email of the code owner for which the annotation should be added
* @param annotation annotation that should be added
* @return the Builder instance for chaining calls
*/
public Builder addAnnotation(String email, CodeOwnerAnnotation annotation) {
return addAnnotations(CodeOwnerReference.create(email), ImmutableSet.of(annotation));
}
/**
* Adds annotations for a code owner.
*
* @param codeOwnerReference reference to the code owner for which the annotations should be
* added
* @param annotations annotations that should be added
* @return the Builder instance for chaining calls
*/
public Builder addAnnotations(
CodeOwnerReference codeOwnerReference, Set<CodeOwnerAnnotation> annotations) {
requireNonNull(codeOwnerReference, "codeOwnerReference");
requireNonNull(annotations, "annotations");
annotationsBuilder().putAll(codeOwnerReference, annotations);
return this;
}
/**
* Adds a code owner for the given email.
*
* @param email email of the code owner
* @return the Builder instance for chaining calls
*/
public Builder addCodeOwnerEmail(String email) {
return addCodeOwner(CodeOwnerReference.create(requireNonNull(email, "codeOwnerEmail")));
}
/** Builds the {@link CodeOwnerSet} instance. */
public CodeOwnerSet build() {
CodeOwnerSet codeOwnerSet = autoBuild();
checkState(
!(codeOwnerSet.ignoreGlobalAndParentCodeOwners()
&& codeOwnerSet.pathExpressions().isEmpty()),
"ignoreGlobalAndParentCodeOwners = true is not allowed for code owner set without path"
+ " expressions");
checkState(
!(!codeOwnerSet.imports().isEmpty() && codeOwnerSet.pathExpressions().isEmpty()),
"imports are not allowed for code owner set without path expressions");
checkState(
codeOwnerSet.imports().stream()
.allMatch(
codeOwnerConfigReference ->
CodeOwnerConfigImportMode.GLOBAL_CODE_OWNER_SETS_ONLY.equals(
codeOwnerConfigReference.importMode())),
"imports in code owner set must have have import mode %s",
CodeOwnerConfigImportMode.GLOBAL_CODE_OWNER_SETS_ONLY.name());
return codeOwnerSet;
}
abstract CodeOwnerSet autoBuild();
}
}
| |
package org.dainst.gazetteer.search;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.action.search.SearchScrollRequest;
import org.elasticsearch.client.RequestOptions;
import org.elasticsearch.client.RestHighLevelClient;
import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.geo.ShapeRelation;
import org.elasticsearch.common.geo.builders.ShapeBuilders;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.index.query.BoolQueryBuilder;
import org.elasticsearch.index.query.GeoBoundingBoxQueryBuilder;
import org.elasticsearch.index.query.GeoDistanceQueryBuilder;
import org.elasticsearch.index.query.GeoPolygonQueryBuilder;
import org.elasticsearch.index.query.GeoShapeQueryBuilder;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.index.query.Operator;
import org.elasticsearch.index.query.functionscore.FunctionScoreQueryBuilder;
import org.elasticsearch.index.query.functionscore.FunctionScoreQueryBuilder.FilterFunctionBuilder;
import org.elasticsearch.index.query.functionscore.ScriptScoreFunctionBuilder;
import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptType;
import org.elasticsearch.search.SearchHits;
import org.elasticsearch.search.aggregations.AggregationBuilders;
import org.elasticsearch.search.aggregations.Aggregations;
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder;
import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.search.sort.GeoDistanceSortBuilder;
import org.elasticsearch.search.sort.SortBuilders;
import org.elasticsearch.search.sort.SortOrder;
import org.locationtech.jts.geom.Coordinate;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class ElasticSearchPlaceQuery {
private static final Logger logger = LoggerFactory.getLogger(ElasticSearchPlaceQuery.class);
private RestHighLevelClient client;
private SearchSourceBuilder searchSourceBuilder;
private BoolQueryBuilder queryBuilder;
private long totalHits = -1;
private Aggregations aggregations;
private boolean childrenBoost = false;
private String lastScrollId;
public ElasticSearchPlaceQuery(RestHighLevelClient client) {
this.client = client;
this.searchSourceBuilder = new SearchSourceBuilder();
this.queryBuilder = QueryBuilders.boolQuery();
}
public ElasticSearchPlaceQuery metaSearch(String query) {
if (query == null || "".equals(query) || "*".equals(query))
listAll();
// _id can't be added to all, so it's appended here, titles are
// added in order to boost them and prevent their score from being
// diminished by norms when occurring together with other fields in all
else {
String queryString = "(" + query + ")";
queryString += " OR _id:\"" + query + "\"";
if (!query.contains(":")) {
queryString += " OR prefName.title:\"" + query + "\"^2";
queryString += " OR names.title:\"" + query + "\"";
}
queryBuilder.must(QueryBuilders.queryStringQuery(queryString).defaultField("all")
.defaultOperator(Operator.AND));
}
return this;
}
public ElasticSearchPlaceQuery extendedSearch(String jsonQuery) {
queryBuilder.must(QueryBuilders.wrapperQuery(jsonQuery));
return this;
}
public ElasticSearchPlaceQuery queryStringSearch(String query) {
queryBuilder.must(QueryBuilders.queryStringQuery(query).defaultField("all"));
return this;
}
public ElasticSearchPlaceQuery fuzzySearch(String query) {
queryBuilder.must(QueryBuilders.fuzzyQuery("all", query));
return this;
}
public ElasticSearchPlaceQuery prefixSearch(String query) {
query = query.toLowerCase();
queryBuilder.must(QueryBuilders.boolQuery().should(QueryBuilders.termQuery("prefName.title.autocomplete", query))
.should(QueryBuilders.termQuery("names.title.autocomplete", query)));
return this;
}
public ElasticSearchPlaceQuery geoDistanceSearch(double lon, double lat, int distance) {
GeoDistanceQueryBuilder geoDistanceQueryBuilder = QueryBuilders.geoDistanceQuery("prefLocation.coordinates");
geoDistanceQueryBuilder.distance(Integer.toString(distance) + "km");
geoDistanceQueryBuilder.point(lat, lon);
queryBuilder.must(geoDistanceQueryBuilder);
return this;
}
public void addBoostForChildren() {
childrenBoost = true;
}
public ElasticSearchPlaceQuery addSort(String field, String order) {
if ("asc".equals(order))
searchSourceBuilder.sort(field, SortOrder.ASC);
else
searchSourceBuilder.sort(field, SortOrder.DESC);
return this;
}
public ElasticSearchPlaceQuery addTermsAggregation(String field) {
TermsAggregationBuilder aggregation = AggregationBuilders.terms(field).field(field).size(50);
searchSourceBuilder.aggregation(aggregation);
return this;
}
public ElasticSearchPlaceQuery addGeoDistanceSort(double lon, double lat) {
GeoDistanceSortBuilder sortBuilder = SortBuilders.geoDistanceSort("prefLocation.coordinates", new GeoPoint(lat, lon));
sortBuilder.order(SortOrder.ASC);
searchSourceBuilder.sort(sortBuilder);
return this;
}
public ElasticSearchPlaceQuery addFilter(String filterQuery) {
queryBuilder.filter(QueryBuilders.queryStringQuery(filterQuery));
return this;
}
public ElasticSearchPlaceQuery addBBoxFilter(double northLat, double eastLon, double southLat, double westLon) {
GeoBoundingBoxQueryBuilder boundingBoxQueryBuilder = QueryBuilders
.geoBoundingBoxQuery("prefLocation.coordinates")
.setCorners(northLat, westLon, southLat, eastLon);
queryBuilder.filter(boundingBoxQueryBuilder);
return this;
}
public ElasticSearchPlaceQuery addPolygonFilter(double[][] coordinates) {
List<GeoPoint> points = new ArrayList<GeoPoint>();
List<Coordinate> coords = new ArrayList<Coordinate>();
for (double[] lngLat : coordinates) {
points.add(new GeoPoint(lngLat[1], lngLat[0]));
coords.add(new Coordinate(lngLat[0], lngLat[1]));
}
try {
GeoPolygonQueryBuilder geoPolygonQueryBuilder = QueryBuilders.geoPolygonQuery("prefLocation.coordinates", points);
GeoShapeQueryBuilder geoShapeQueryBuilder = QueryBuilders.geoShapeQuery("prefLocation.shape", ShapeBuilders.newPolygon(coords));
geoShapeQueryBuilder.relation(ShapeRelation.INTERSECTS);
BoolQueryBuilder boolQueryBuilder = QueryBuilders.boolQuery();
boolQueryBuilder.should(geoPolygonQueryBuilder).should(geoShapeQueryBuilder);
queryBuilder.filter(boolQueryBuilder);
} catch (IOException e) {
logger.error("Failed to create polygon filter query for coordinates: " + coordinates, e);
}
return this;
};
public void listAll() {
queryBuilder.must(QueryBuilders.matchAllQuery());
}
public ElasticSearchPlaceQuery offset(int offset) {
searchSourceBuilder.from(offset);
return this;
}
public ElasticSearchPlaceQuery limit(int limit) {
searchSourceBuilder.size(limit);
return this;
}
public long getHits() {
return totalHits;
}
public String[] execute() {
return execute(false);
}
public String[] execute(boolean startScroll) {
SearchRequest request = getSearchRequest();
if (startScroll) request.scroll(TimeValue.timeValueMinutes(30L));
logger.debug("Query: {}", queryBuilder.toString());
try {
SearchResponse response = client.search(request, RequestOptions.DEFAULT);
aggregations = response.getAggregations();
if (startScroll) lastScrollId = response.getScrollId();
return responseAsList(response);
} catch (IOException e) {
logger.error("Error while executing search query", e);
return new String[0];
}
}
public String[] execute(String scrollId) {
SearchScrollRequest request = new SearchScrollRequest(scrollId);
request.scroll(TimeValue.timeValueMinutes(30L));
try {
SearchResponse response = client.scroll(request, RequestOptions.DEFAULT);
aggregations = response.getAggregations();
lastScrollId = response.getScrollId();
return responseAsList(response);
} catch (IOException e) {
logger.error("Error while executing search query", e);
return new String[0];
}
}
public String getScrollId() {
return lastScrollId;
}
public Aggregations getTermsAggregations() {
return aggregations;
}
private SearchRequest getSearchRequest() {
if (childrenBoost)
searchSourceBuilder.query(addChildrenBoostScriptFunction(queryBuilder));
else
searchSourceBuilder.query(queryBuilder);
SearchRequest request = new SearchRequest("gazetteer");
request.source(searchSourceBuilder);
request.types("place");
return request;
}
private String[] responseAsList(SearchResponse response) {
SearchHits hits = response.getHits();
totalHits = hits.getTotalHits();
String[] result = new String[hits.getHits().length];
for (int i = 0; i < result.length; i++) {
result[i] = hits.getAt(i).getId();
}
return result;
}
private FunctionScoreQueryBuilder addChildrenBoostScriptFunction(BoolQueryBuilder query) {
// places with many children should get a higher score
Script script = new Script(ScriptType.INLINE, "painless",
"_score + (1.0 - 1.0 / ( 0.001 * doc['children'].value + 1.0 ) )",
new HashMap<String, Object>());
FilterFunctionBuilder[] functions = {
new FunctionScoreQueryBuilder.FilterFunctionBuilder(new ScriptScoreFunctionBuilder(script))
};
return QueryBuilders.functionScoreQuery(query, functions);
}
}
| |
/*
* Copyright 2015 - 2017 Xyanid
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and limitations under the License.
*/
package de.saxsys.svgfx.core;
import javafx.scene.Group;
import javafx.scene.paint.LinearGradient;
import javafx.scene.shape.Ellipse;
import javafx.scene.shape.Rectangle;
import javafx.scene.shape.SVGPath;
import org.junit.Test;
import org.xml.sax.SAXParseException;
import java.io.IOException;
import java.net.URL;
import static de.saxsys.svgfx.core.TestUtil.MINIMUM_DEVIATION;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
/**
* @author Xyanid on 05.10.2015.
*/
@SuppressWarnings ("ConstantConditions")
public final class BlackBoxTest {
// region Description
@Test
public void anAbsoluteGradientWithNoTransformAPathWithNoTransformAGroupWithNoTransformWillCreateTheCorrectGradientCoordinates()
throws SAXParseException, IOException {
final LinearGradient gradient = getLinearGradient("de/saxsys/svgfx/core/parent-0-path-0-gradient-0-absolute.svg");
assertLinearGradient(gradient);
}
@Test
public void aRelativeGradientWithNoTransformAPathWithNoTransformAGroupWithNoTransformWillCreateTheCorrectGradientCoordinates()
throws SAXParseException, IOException {
final LinearGradient gradient = getLinearGradient("de/saxsys/svgfx/core/parent-0-path-0-gradient-0-relative.svg");
assertLinearGradient(gradient);
}
@Test
public void anAbsoluteGradientWithTransformAPathWithNoTransformAGroupWithNoTransformWillCreateTheCorrectGradientCoordinates()
throws SAXParseException, IOException {
final LinearGradient gradient = getLinearGradient("de/saxsys/svgfx/core/parent-0-path-0-gradient-1-absolute.svg");
assertLinearGradient(gradient);
}
@Test
public void aRelativeGradientWithTransformAPathWithNoTransformAGroupWithNoTransformWillCreateTheCorrectGradientCoordinates()
throws SAXParseException, IOException {
final LinearGradient gradient = getLinearGradient("de/saxsys/svgfx/core/parent-0-path-0-gradient-1-relative.svg");
assertLinearGradient(gradient);
}
@Test
public void anAbsoluteGradientWithNoTransformAPathWithTransformAGroupWithNoTransformWillCreateTheCorrectGradientCoordinates()
throws SAXParseException, IOException {
final LinearGradient gradient = getLinearGradient("de/saxsys/svgfx/core/parent-0-path-1-gradient-0-absolute.svg");
assertLinearGradient(gradient);
}
@Test
public void aRelativeGradientWithNoTransformAPathWithTransformAGroupWithNoTransformWillCreateTheCorrectGradientCoordinates()
throws SAXParseException, IOException {
final LinearGradient gradient = getLinearGradient("de/saxsys/svgfx/core/parent-0-path-1-gradient-0-relative.svg");
assertLinearGradient(gradient);
}
@Test
public void anAbsoluteGradientWithTransformAPathWithTransformAGroupWithNoTransformWillCreateTheCorrectGradientCoordinates()
throws SAXParseException, IOException {
final LinearGradient gradient = getLinearGradient("de/saxsys/svgfx/core/parent-0-path-1-gradient-1-absolute.svg");
assertLinearGradient(gradient);
}
@Test
public void aRelativeGradientWithTransformAPathWithTransformAGroupWithNoTransformWillCreateTheCorrectGradientCoordinates()
throws SAXParseException, IOException {
final LinearGradient gradient = getLinearGradient("de/saxsys/svgfx/core/parent-0-path-1-gradient-1-relative.svg");
assertLinearGradient(gradient);
}
@Test
public void anAbsoluteGradientWithNoTransformAPathWithNoTransformAGroupWithTransformWillCreateTheCorrectGradientCoordinates()
throws SAXParseException, IOException {
final LinearGradient gradient = getLinearGradient("de/saxsys/svgfx/core/parent-1-path-0-gradient-0-absolute.svg");
assertLinearGradient(gradient);
}
@Test
public void aRelativeGradientWithNoTransformAPathWithNoTransformAGroupWithTransformWillCreateTheCorrectGradientCoordinates()
throws SAXParseException, IOException {
final LinearGradient gradient = getLinearGradient("de/saxsys/svgfx/core/parent-1-path-0-gradient-0-relative.svg");
assertLinearGradient(gradient);
}
@Test
public void anAbsoluteGradientWithTransformAPathWithNoTransformAGroupWithTransformWillCreateTheCorrectGradientCoordinates()
throws SAXParseException, IOException {
final LinearGradient gradient = getLinearGradient("de/saxsys/svgfx/core/parent-1-path-0-gradient-1-absolute.svg");
assertLinearGradient(gradient);
}
@Test
public void aRelativeGradientWithTransformAPathWithNoTransformAGroupWithTransformWillCreateTheCorrectGradientCoordinates()
throws SAXParseException, IOException {
final LinearGradient gradient = getLinearGradient("de/saxsys/svgfx/core/parent-1-path-0-gradient-1-relative.svg");
assertLinearGradient(gradient);
}
@Test
public void anAbsoluteGradientWithNoTransformAPathWithTransformAGroupWithTransformWillCreateTheCorrectGradientCoordinates()
throws SAXParseException, IOException {
final LinearGradient gradient = getLinearGradient("de/saxsys/svgfx/core/parent-1-path-1-gradient-0-absolute.svg");
assertLinearGradient(gradient);
}
@Test
public void aRelativeGradientWithNoTransformAPathWithTransformAGroupWithTransformWillCreateTheCorrectGradientCoordinates()
throws SAXParseException, IOException {
final LinearGradient gradient = getLinearGradient("de/saxsys/svgfx/core/parent-1-path-1-gradient-0-relative.svg");
assertLinearGradient(gradient);
}
@Test
public void anAbsoluteGradientWithTransformAPathWithTransformAGroupWithTransformWillCreateTheCorrectGradientCoordinates()
throws SAXParseException, IOException {
final LinearGradient gradient = getLinearGradient("de/saxsys/svgfx/core/parent-1-path-1-gradient-1-absolute.svg");
assertLinearGradient(gradient);
}
@Test
public void aRelativeGradientWithTransformAPathWithTransformAGroupWithTransformWillCreateTheCorrectGradientCoordinates()
throws SAXParseException, IOException {
final LinearGradient gradient = getLinearGradient("de/saxsys/svgfx/core/parent-1-path-1-gradient-1-relative.svg");
assertLinearGradient(gradient);
}
/**
* Ensures that when parsing the hierarchy is correctly understood, leading to th desired result.
*/
@Test
public void theHierarchyOfAnSVGFileIsCorrectlyParsed()
throws SAXParseException, IOException {
final Group result = getResult("de/saxsys/svgfx/core/correctHierarchy.svg");
assertNotNull(result);
assertEquals(1, result.getChildren().size());
final Group group = (Group) result.getChildren().get(0);
assertEquals(3, group.getChildren().size());
final Rectangle rectangle = (Rectangle) group.getChildren().get(0);
assertNotNull(rectangle);
final SVGPath svgPath = (SVGPath) group.getChildren().get(1);
final Ellipse ellipse = (Ellipse) group.getChildren().get(2);
}
// endregion
// region Private
private LinearGradient getLinearGradient(final String file) throws SAXParseException, IOException {
final Group main = (Group) getResult(file).getChildren().get(0);
final SVGPath path = (SVGPath) main.getChildren().get(0);
return (LinearGradient) path.getStroke();
}
private Group getResult(final String file) throws SAXParseException, IOException {
final URL url = getClass().getClassLoader().getResource(file);
final SVGParser parser = new SVGParser();
parser.parse(url.getFile());
return parser.getResult();
}
private void assertLinearGradient(final LinearGradient gradient) {
assertEquals(0.0d, gradient.getStartX(), MINIMUM_DEVIATION);
assertEquals(0.5d, gradient.getStartY(), MINIMUM_DEVIATION);
assertEquals(1.0d, gradient.getEndX(), MINIMUM_DEVIATION);
assertEquals(0.5d, gradient.getEndY(), MINIMUM_DEVIATION);
}
// endregion
}
| |
/*
* Copyright (c) 2010-2016 Evolveum
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.evolveum.midpoint.web.component.data;
import com.evolveum.midpoint.gui.api.page.PageBase;
import com.evolveum.midpoint.gui.api.util.WebComponentUtil;
import com.evolveum.midpoint.model.api.ModelInteractionService;
import com.evolveum.midpoint.model.api.ModelService;
import com.evolveum.midpoint.model.api.TaskService;
import com.evolveum.midpoint.model.api.WorkflowService;
import com.evolveum.midpoint.prism.PrismContext;
import com.evolveum.midpoint.prism.query.ObjectPaging;
import com.evolveum.midpoint.prism.query.ObjectQuery;
import com.evolveum.midpoint.prism.query.OrderDirection;
import com.evolveum.midpoint.repo.api.RepositoryService;
import com.evolveum.midpoint.schema.SchemaConstantsGenerated;
import com.evolveum.midpoint.task.api.TaskManager;
import com.evolveum.midpoint.util.logging.Trace;
import com.evolveum.midpoint.util.logging.TraceManager;
import com.evolveum.midpoint.web.page.PageDialog;
import com.evolveum.midpoint.web.security.MidPointApplication;
import com.evolveum.midpoint.wf.api.WorkflowManager;
import org.apache.commons.lang.Validate;
import org.apache.wicket.Component;
import org.apache.wicket.extensions.markup.html.repeater.data.sort.SortOrder;
import org.apache.wicket.extensions.markup.html.repeater.util.SortParam;
import org.apache.wicket.extensions.markup.html.repeater.util.SortableDataProvider;
import org.apache.wicket.model.AbstractReadOnlyModel;
import org.apache.wicket.model.IModel;
import org.apache.wicket.model.Model;
import java.io.Serializable;
import java.util.*;
/**
* @author lazyman
*/
public abstract class BaseSortableDataProvider<T extends Serializable> extends SortableDataProvider<T, String> {
private static final Trace LOGGER = TraceManager.getTrace(BaseSortableDataProvider.class);
private Component component;
private List<T> availableData;
private ObjectQuery query;
// after this amount of time cached size will be removed
// from cache and replaced by new value, time in seconds
private Map<Serializable, CachedSize> cache = new HashMap<Serializable, CachedSize>();
private int cacheCleanupThreshold = 60;
private boolean useCache;
public BaseSortableDataProvider(Component component) {
this(component, false, true);
}
public BaseSortableDataProvider(Component component, boolean useCache) {
this(component, useCache, true);
}
public BaseSortableDataProvider(Component component, boolean useCache, boolean useDefaultSortingField) {
Validate.notNull(component, "Component must not be null.");
this.component = component;
this.useCache = useCache;
if (useDefaultSortingField) {
setSort("name", SortOrder.ASCENDING);
}
}
protected ModelService getModel() {
MidPointApplication application = (MidPointApplication) MidPointApplication.get();
return application.getModel();
}
protected RepositoryService getRepositoryService() {
MidPointApplication application = (MidPointApplication) MidPointApplication.get();
return application.getRepositoryService();
}
protected TaskManager getTaskManager() {
MidPointApplication application = (MidPointApplication) MidPointApplication.get();
return application.getTaskManager();
}
protected PrismContext getPrismContext() {
MidPointApplication application = (MidPointApplication) MidPointApplication.get();
return application.getPrismContext();
}
protected TaskService getTaskService() {
MidPointApplication application = (MidPointApplication) MidPointApplication.get();
return application.getTaskService();
}
protected ModelInteractionService getModelInteractionService() {
MidPointApplication application = (MidPointApplication) MidPointApplication.get();
return application.getModelInteractionService();
}
protected WorkflowService getWorkflowService() {
MidPointApplication application = (MidPointApplication) MidPointApplication.get();
return application.getWorkflowService();
}
protected WorkflowManager getWorkflowManager() {
MidPointApplication application = (MidPointApplication) MidPointApplication.get();
return application.getWorkflowManager();
}
public List<T> getAvailableData() {
if (availableData == null) {
availableData = new ArrayList<T>();
}
return availableData;
}
@Override
public IModel<T> model(T object) {
return new Model<T>(object);
}
protected PageBase getPage() {
if (component instanceof PageBase) {
return (PageBase) component;
}
if (component.getPage() instanceof PageBase) {
return (PageBase) component.getPage();
}
if (component.getPage() instanceof PageDialog) {
return ((PageDialog) component.getPage()).getPageBase();
}
throw new IllegalStateException("Component is not instance of '" + PageBase.class.getName()
+ "' or is not placed on page of that instance.");
}
public ObjectQuery getQuery() {
return query;
}
public void setQuery(ObjectQuery query) {
this.query = query;
}
/**
* Flag method for {@link TablePanel}. If true navigation panel with paging "X to Y from Z results is shown",
* otherwise only "previous and next" simple paging is used.
*
* @return By defaults it returns true.
*/
public IModel<Boolean> isSizeAvailableModel() {
return new AbstractReadOnlyModel<Boolean>() {
@Override
public Boolean getObject() {
return true;
}
};
}
protected ObjectPaging createPaging(long first, long count) {
SortParam sortParam = getSort();
if (sortParam != null) {
OrderDirection order;
if (sortParam.isAscending()) {
order = OrderDirection.ASCENDING;
} else {
order = OrderDirection.DESCENDING;
}
return ObjectPaging.createPaging(WebComponentUtil.safeLongToInteger(first), WebComponentUtil.safeLongToInteger(count),
(String) sortParam.getProperty(), SchemaConstantsGenerated.NS_COMMON, order);
} else {
return ObjectPaging.createPaging(WebComponentUtil.safeLongToInteger(first), WebComponentUtil.safeLongToInteger(count));
}
}
public void clearCache() {
cache.clear();
getAvailableData().clear();
}
public int getCacheCleanupThreshold() {
return cacheCleanupThreshold;
}
public void setCacheCleanupThreshold(int cacheCleanupThreshold) {
Validate.isTrue(cacheCleanupThreshold > 0, "Cache cleanup threshold must be bigger than zero.");
this.cacheCleanupThreshold = cacheCleanupThreshold;
}
@Override
public Iterator<? extends T> iterator(long first, long count) {
Iterator<? extends T> iterator = internalIterator(first, count);
saveProviderPaging(getQuery(), createPaging(first, count));
return iterator;
}
protected void saveProviderPaging(ObjectQuery query, ObjectPaging paging) {
}
protected abstract Iterator<? extends T> internalIterator(long first, long count);
@Override
public long size() {
LOGGER.trace("begin::size()");
if (!useCache) {
return internalSize();
}
long size;
CachedSize cachedSize = getCachedSize(cache);
if (cachedSize != null) {
long timestamp = cachedSize.getTimestamp();
if (System.currentTimeMillis() - timestamp > cacheCleanupThreshold * 1000) {
//recreate
size = internalSize();
addCachedSize(cache, new CachedSize(size, System.currentTimeMillis()));
} else {
LOGGER.trace("Size returning from cache.");
size = cachedSize.getSize();
}
} else {
//recreate
size = internalSize();
addCachedSize(cache, new CachedSize(size, System.currentTimeMillis()));
}
LOGGER.trace("end::size(): {}", size);
return size;
}
protected abstract int internalSize();
protected CachedSize getCachedSize(Map<Serializable, CachedSize> cache) {
return cache.get(query);
}
protected void addCachedSize(Map<Serializable, CachedSize> cache, CachedSize newSize) {
cache.put(query, newSize);
}
public static class CachedSize implements Serializable {
private long timestamp;
private long size;
private CachedSize(long size, long timestamp) {
this.size = size;
this.timestamp = timestamp;
}
public long getSize() {
return size;
}
public long getTimestamp() {
return timestamp;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
CachedSize that = (CachedSize) o;
if (size != that.size) return false;
if (timestamp != that.timestamp) return false;
return true;
}
@Override
public int hashCode() {
int result = (int) (timestamp ^ (timestamp >>> 32));
result = 31 * result + (int) (size ^ (size >>> 32));
return result;
}
@Override
public String toString() {
return "CachedSize(size=" + size + ", timestamp=" + timestamp + ")";
}
}
}
| |
package com.planet_ink.coffee_mud.Common.interfaces;
import com.planet_ink.coffee_mud.core.interfaces.*;
import com.planet_ink.coffee_mud.core.*;
import com.planet_ink.coffee_mud.core.collections.*;
import com.planet_ink.coffee_mud.Abilities.interfaces.*;
import com.planet_ink.coffee_mud.Areas.interfaces.*;
import com.planet_ink.coffee_mud.Behaviors.interfaces.*;
import com.planet_ink.coffee_mud.CharClasses.interfaces.*;
import com.planet_ink.coffee_mud.Commands.interfaces.*;
import com.planet_ink.coffee_mud.Common.interfaces.*;
import com.planet_ink.coffee_mud.Exits.interfaces.*;
import com.planet_ink.coffee_mud.Items.interfaces.*;
import com.planet_ink.coffee_mud.Libraries.interfaces.XMLLibrary;
import com.planet_ink.coffee_mud.Locales.interfaces.*;
import com.planet_ink.coffee_mud.MOBS.interfaces.*;
import com.planet_ink.coffee_mud.Races.interfaces.*;
import java.util.Iterator;
import java.util.List;
/*
Copyright 2005-2016 Bo Zimmerman
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
/**
* A CoffeeShop is an object for storing the inventory of a shopkeeper, banker,
* auctionhouse, merchant, or other object that implements the ShopKeeper interface
* for the purpose of selling goods and services.
*
* ShopKeepers maintain two types of inventory, the base inventory, and the stock
* inventory. The stock or store inventory is the list of items the shopkeeper
* currently has for sale, the amounts, base prices, etc.
* The base inventory is used only for shopkeepers who only buy things like
* they originally had in stock, and so the base inventory is always populated with
* a single copy of the original store inventory, to be used as a base of comparison
* for situations where the stock is empty, but someone is wanting to sell.
*
* @see com.planet_ink.coffee_mud.core.interfaces.ShopKeeper
* @see com.planet_ink.coffee_mud.core.interfaces.ShopKeeper#isSold(int)
* @see com.planet_ink.coffee_mud.core.interfaces.ShopKeeper#DEAL_INVENTORYONLY
*/
public interface CoffeeShop extends CMCommon
{
/**
* Returns whether an item sufficiently like the given item originally
* existed in this shops inventory when it was created. Applies only
* to shops where their whatIsSold method returns ONLY_INVENTORY
* @see com.planet_ink.coffee_mud.core.interfaces.ShopKeeper#isSold(int)
* @see com.planet_ink.coffee_mud.core.interfaces.ShopKeeper#DEAL_INVENTORYONLY
* @param thisThang the thing to compare against the base inventory
* @return whether the item, or one just like it, is in the base inventory
*/
public boolean inEnumerableInventory(Environmental thisThang);
/**
* Adds a new item to the store inventory. Use this method when an item is sold
* to the store, as pricing and other information will have to be derived.
* @see com.planet_ink.coffee_mud.Common.interfaces.CoffeeShop#addStoreInventory(Environmental, int, int)
* @param thisThang the thing to sell
* @return the core store inventory item added
*/
public Environmental addStoreInventory(Environmental thisThang);
/**
* Returns the number of items in the stores base inventory. Only really useful
* for historical reasons, or if the shop sells inventory only.
* @see com.planet_ink.coffee_mud.core.interfaces.ShopKeeper#isSold(int)
* @see com.planet_ink.coffee_mud.core.interfaces.ShopKeeper#DEAL_INVENTORYONLY
* @return the number of items in the base inventory
*/
public int enumerableStockSize();
/**
* Returns the number of items this shop currently has for sale. Does not
* take number of duplicates into account. For that call totalStockSizeIncludingDuplicates
* @see com.planet_ink.coffee_mud.Common.interfaces.CoffeeShop#totalStockSizeIncludingDuplicates()
* @return the number of items for sale.
*/
public int totalStockSize();
/**
* Destroys all the items in this shop.
*/
public void destroyStoreInventory();
/**
* Returns a iterator of all the Environmental objects this shop has for sale.
* Will only return one of each item, even if multiple are available.
* @return a iterator of objects for sale.
*/
public Iterator<Environmental> getStoreInventory();
/**
* Returns a iterator of all the Environmental objects this shop has for sale
* which match the given search string.
* Will only return one of each item, even if multiple are available.
* @param srchStr the item to hunt for.
* @return a iterator of objects for sale.
*/
public Iterator<Environmental> getStoreInventory(String srchStr);
/**
* Returns a iterator of all the Environmental objects this shop has in its base
* inventory. Only useful for historical reasons, or if the shop sells inventory
* only.
* @see com.planet_ink.coffee_mud.core.interfaces.ShopKeeper#isSold(int)
* @see com.planet_ink.coffee_mud.core.interfaces.ShopKeeper#DEAL_INVENTORYONLY
* @return a iterator of objects in base inventory
*/
public Iterator<Environmental> getEnumerableInventory();
/**
* Clears both the base and stock/store inventories.
*/
public void emptyAllShelves();
/**
* Adds a new item to the store inventory so the shopkeeper can sell it. All items
* added go cumulatively into the store inventory, and one copy is kept in the
* base inventory for historical reasons. The method is called when multiple items
* need to be added, or if the price is available. This method is usually used to
* build an original shop inventory.
* @param thisThang the item/mob/ability to sell
* @param number the number of items to sell
* @param price the price of the item (in base currency) or -1 to have it determined
* @return the actual object stored in the inventory
*/
public Environmental addStoreInventory(Environmental thisThang, int number, int price);
/**
* Total weight, in pounds, of all items in the store inventory, taking number in
* stock into account.
* @return the total weight in pounds
*/
public int totalStockWeight();
/**
* The number of items in the store inventory, taking number in stock into account.
* Call this method to see how crowded the shop really is, as opposed to totalStockSize.
* @see com.planet_ink.coffee_mud.Common.interfaces.CoffeeShop#totalStockSize()
* @return the total number of all items in stock
*/
public int totalStockSizeIncludingDuplicates();
/**
* Removes all items like the given item from the base and store inventory.
* @see com.planet_ink.coffee_mud.core.interfaces.ShopKeeper#isSold(int)
* @param thisThang the item like which to remove
*/
public void delAllStoreInventory(Environmental thisThang);
/**
* Returns whether an item with the given name is presently in this stores
* stock inventory, and available for sale.
* @see com.planet_ink.coffee_mud.core.interfaces.ShopKeeper#isSold(int)
* @param name the name of the item to search for
* @param mob the mob who is interested (stock can differ depending on customer)
* @return whether the item is available
*/
public boolean doIHaveThisInStock(String name, MOB mob);
/**
* Returns the base stock price (not the final price by any means) that the shop
* will use as a foundation for determining the given items price. -1 would mean
* that the shopkeeper uses the valuation of the item as a basis, whereas another
* value is in base gold. Best to get likeThis item from the getStoreInventory()
* @see com.planet_ink.coffee_mud.Common.interfaces.CoffeeShop#getStoreInventory()
* @param likeThis the item like which to compare
* @return the stock price of the item given.
*/
public int stockPrice(Environmental likeThis);
/**
* Returns the number of items like the one given that the shopkeeper presently
* has in stock and available for sale.
* @see com.planet_ink.coffee_mud.Common.interfaces.CoffeeShop#getStoreInventory()
* @param likeThis the item like which to compare
* @return the number currently in stock.
*/
public int numberInStock(Environmental likeThis);
/**
* Searches this shops stock of items for sale for one matching the given name.
* @see com.planet_ink.coffee_mud.Common.interfaces.CoffeeShop#getStoreInventory()
* @param name the name of the item to search for
* @param mob the mob who is interested (stock can differ depending on customer)
* @return the available item, if found
*/
public Environmental getStock(String name, MOB mob);
/**
* Searches this shops stock of items for sale for one matching the given name.
* If one is found, it copies the item, removes one from the available stock, and
* returns the copy.
* @see com.planet_ink.coffee_mud.Common.interfaces.CoffeeShop#getStoreInventory()
* @param name the name of the item to search for
* @param mob the mob who is interested (stock can differ depending on customer)
* @return the available item, if found
*/
public Environmental removeStock(String name, MOB mob);
/**
* Searches this shops stock of items for sale for one matching the given name.
* If one is found, it copies the item, removes one from the available stock, and
* prepares it for sale by adding it to a list along with any necessary accessories,
* such as necessary keys, or if a container, any contents of the container.
* @see com.planet_ink.coffee_mud.Common.interfaces.CoffeeShop#getStoreInventory()
* @param named the name of the item to search for
* @param mob the mob who is interested (stock can differ depending on customer)
* @return the available items, if found, as a list of Environmental objects
*/
public List<Environmental> removeSellableProduct(String named, MOB mob);
/**
* Generates an XML document of all available shop inventory, prices, and availability.
* @see com.planet_ink.coffee_mud.Common.interfaces.CoffeeShop#getStoreInventory()
* @see com.planet_ink.coffee_mud.Common.interfaces.CoffeeShop#buildShopFromXML(String)
* @return an XML document.
*/
public String makeXML();
/**
* Repopulates this shop inventory from a given xml document, restoring store inventory,
* base inventory, prices, and availability.
* @see com.planet_ink.coffee_mud.Common.interfaces.CoffeeShop#makeXML()
* @param text the xml document to restore from
*/
public void buildShopFromXML(String text);
/**
* A method for quickly making wholesale changes to a shopkeepers inventory.
* getStoreInventory should be called to get the list of items. The items can
* then be modified, and this method called to properly "resubmit" them to
* the shopkeeper.
* @param shopItems the items for inventory
*/
public void resubmitInventory(List<Environmental> shopItems);
/**
* Initializes this shop object with its host ShopKeeper
* @param SK the shopkeeper that hosts this object
* @return always this
*/
public CoffeeShop build(ShopKeeper SK);
/**
* Returns the shopKeeper that is hosting this shop
* @return the shopKeeper that is hosting this shop
*/
public ShopKeeper shopKeeper();
/**
* Returns whether the whatIsSold code applies to the shopkeeper hosting this shop.
* @see ShopKeeper#DEAL_DESCS
* @param code the whatIsSold code
* @return whether the whatIsSold code applies to the shopkeeper hosting this shop.
*/
public boolean isSold(int code);
/**
* Class for representing a shelf product, holding
* an item prototype, the number in stock, and the
* price. A price of -1 means to use the items
* calculated value (common).
* @author Bo Zimmermanimmerman
*/
public static class ShelfProduct
{
public Environmental product;
public int number;
public int price;
public ShelfProduct(Environmental E, int number, int price)
{
this.product=E;this.number=number;this.price=price;
}
}
}
| |
package gudusoft.gsqlparser.sql2xml.generator.ansi;
import gudusoft.gsqlparser.TCustomSqlStatement;
import gudusoft.gsqlparser.stmt.TAlterTableStatement;
import gudusoft.gsqlparser.stmt.TCreateTableSqlStatement;
import gudusoft.gsqlparser.stmt.TDeleteSqlStatement;
import gudusoft.gsqlparser.stmt.TInsertSqlStatement;
import gudusoft.gsqlparser.stmt.TMergeSqlStatement;
import gudusoft.gsqlparser.stmt.TSelectSqlStatement;
import gudusoft.gsqlparser.stmt.TUpdateSqlStatement;
import java.util.ArrayList;
import java.util.List;
public class Utility
{
public static boolean isDirect_sql_data_statement( TCustomSqlStatement stmt )
{
if ( isSql_data_statement( stmt ) )
return false;
if ( stmt instanceof TSelectSqlStatement
|| stmt instanceof TInsertSqlStatement
|| stmt instanceof TUpdateSqlStatement
|| stmt instanceof TDeleteSqlStatement
|| stmt instanceof TMergeSqlStatement )
return true;
return false;
}
public static boolean isSql_data_statement( TCustomSqlStatement stmt )
{
if ( stmt instanceof TDeleteSqlStatement )
{
if ( stmt.getWhereClause( ) != null
&& stmt.getWhereClause( ).isCurerntOf( ) )
{
return true;
}
}
return false;
}
public static boolean isSql_schema_statement( TCustomSqlStatement stmt )
{
if ( stmt instanceof TCreateTableSqlStatement )
return true;
if ( stmt instanceof TAlterTableStatement )
return true;
return false;
}
public static boolean isDirectly_executable_statement(
TCustomSqlStatement stmt )
{
if ( stmt instanceof TSelectSqlStatement
|| stmt instanceof TInsertSqlStatement
|| stmt instanceof TUpdateSqlStatement
|| stmt instanceof TDeleteSqlStatement
|| stmt instanceof TMergeSqlStatement
|| stmt instanceof TCreateTableSqlStatement
|| stmt instanceof TAlterTableStatement )
return true;
return false;
}
static List<String> numericValueFunctions = new ArrayList<String>( );
static
{
numericValueFunctions.add( "POSITION" );
numericValueFunctions.add( "OCCURRENCES_REGEX" );
numericValueFunctions.add( "POSITION_REGEX" );
numericValueFunctions.add( "EXTRACT" );
numericValueFunctions.add( "CHAR_LENGTH" );
numericValueFunctions.add( "CHARACTER_LENGTH" );
numericValueFunctions.add( "OCTET_LENGTH" );
numericValueFunctions.add( "CARDINALITY" );
numericValueFunctions.add( "ABS" );
numericValueFunctions.add( "MOD" );
numericValueFunctions.add( "LN" );
numericValueFunctions.add( "EXP" );
numericValueFunctions.add( "POWER" );
numericValueFunctions.add( "SORT" );
numericValueFunctions.add( "FLOOR" );
numericValueFunctions.add( "SORT" );
numericValueFunctions.add( "CEIL" );
numericValueFunctions.add( "CEILING" );
numericValueFunctions.add( "WIDTH_BUCKET" );
}
public static boolean isNumericValueFunction( String functionName )
{
for ( int i = 0; i < numericValueFunctions.size( ); i++ )
{
if ( numericValueFunctions.get( i ).equalsIgnoreCase( functionName ) )
{
return true;
}
}
return false;
}
static List<String> aggregateFunctions = new ArrayList<String>( );
static
{
aggregateFunctions.add( "COUNT" );
aggregateFunctions.add( "AVG" );
aggregateFunctions.add( "MAX" );
aggregateFunctions.add( "MIN" );
aggregateFunctions.add( "SUM" );
aggregateFunctions.add( "EVERY" );
aggregateFunctions.add( "ANY" );
aggregateFunctions.add( "SOME" );
aggregateFunctions.add( "STDDEV_POP" );
aggregateFunctions.add( "STDDEV_SAMP" );
aggregateFunctions.add( "VAR_SAMP" );
aggregateFunctions.add( "VAR_POP" );
aggregateFunctions.add( "COLLECT" );
aggregateFunctions.add( "FUSION" );
aggregateFunctions.add( "INTERSECTION" );
aggregateFunctions.add( "SORT" );
aggregateFunctions.add( "CEIL" );
aggregateFunctions.add( "CEILING" );
aggregateFunctions.add( "WIDTH_BUCKET" );
}
public static boolean isAggregateFunction( String functionName )
{
for ( int i = 0; i < aggregateFunctions.size( ); i++ )
{
if ( aggregateFunctions.get( i ).equalsIgnoreCase( functionName ) )
{
return true;
}
}
return false;
}
public static boolean isNumber( String string )
{
try
{
Integer.parseInt( string );
return true;
}
catch ( NumberFormatException e )
{
}
try
{
Float.parseFloat( string );
return true;
}
catch ( NumberFormatException e )
{
}
try
{
Double.parseDouble( string );
return true;
}
catch ( NumberFormatException e )
{
}
return false;
}
static List<String> stringValueFunctions = new ArrayList<String>( );
static
{
stringValueFunctions.add( "SUBSTRING" );
stringValueFunctions.add( "SUBSTRING_REGEX" );
stringValueFunctions.add( "UPPER" );
stringValueFunctions.add( "LOWER" );
stringValueFunctions.add( "CONVERT" );
stringValueFunctions.add( "TRANSLATE" );
stringValueFunctions.add( "TRANSLATE_REGEX" );
stringValueFunctions.add( "TRIM" );
stringValueFunctions.add( "OVERLAY" );
stringValueFunctions.add( "NORMALIZE" );
stringValueFunctions.add( "SPECIFICTYPE" );
}
public static boolean isStringValueFunction( String functionName )
{
for ( int i = 0; i < stringValueFunctions.size( ); i++ )
{
if ( stringValueFunctions.get( i ).equalsIgnoreCase( functionName ) )
{
return true;
}
}
return false;
}
public static boolean isString( String string )
{
if ( string != null
&& string.startsWith( "'" )
&& string.endsWith( "'" ) )
return true;
else if ( isNationalString( string ) )
return true;
return false;
}
public static boolean isNationalString( String string )
{
if ( string.toUpperCase( ).startsWith( "N'" ) && string.endsWith( "'" ) )
return true;
return false;
}
public static boolean isUnicodeString( String string )
{
return false;
}
public static boolean isDateTypeValue( String string )
{
if ( string == null )
return false;
if ( string.trim( )
.matches( "(?i)((Date)|(d)|(TIME)|(TIMESTAMP))\\s*'.+?'" ) )
return true;
return false;
}
public static boolean isIntervalTypeValue( String string )
{
if ( string == null )
return false;
if ( string.trim( ).matches( "(?i)(INTERVAL)\\s*.+?" ) )
return true;
return false;
}
static List<String> dateTypeValueFunctions = new ArrayList<String>( );
static
{
dateTypeValueFunctions.add( "CURRENT_DATE" );
dateTypeValueFunctions.add( "CURRENT_TIME" );
dateTypeValueFunctions.add( "CURRENT_TIMESTAMP" );
dateTypeValueFunctions.add( "LOCALTIME" );
dateTypeValueFunctions.add( "LOCALTIMESTAMP" );
}
public static boolean isDateTypeValueFunction( String functionName )
{
for ( int i = 0; i < dateTypeValueFunctions.size( ); i++ )
{
if ( dateTypeValueFunctions.get( i )
.equalsIgnoreCase( functionName ) )
{
return true;
}
}
return false;
}
static List<String> windowFunctions = new ArrayList<String>( );
static
{
windowFunctions.add( "RANK" );
windowFunctions.add( "DENSE_RANK" );
windowFunctions.add( "PERCENT_RANK" );
windowFunctions.add( "CUME_DIST" );
windowFunctions.add( "ROW_NUMBER" );
}
public static boolean isWindowFunction( String functionName )
{
for ( int i = 0; i < windowFunctions.size( ); i++ )
{
if ( windowFunctions.get( i ).equalsIgnoreCase( functionName ) )
{
return true;
}
}
return false;
}
static List<String> caseAbbreviationFunctions = new ArrayList<String>( );
static
{
caseAbbreviationFunctions.add( "NULLIF" );
caseAbbreviationFunctions.add( "COALESCE" );
}
public static boolean isCaseAbbreviationFunction( String functionName )
{
for ( int i = 0; i < caseAbbreviationFunctions.size( ); i++ )
{
if ( caseAbbreviationFunctions.get( i )
.equalsIgnoreCase( functionName ) )
{
return true;
}
}
return false;
}
}
| |
/*
* Copyright 2014 http://Bither.net
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.bither.ui.base.dialog;
import android.app.Dialog;
import android.content.Context;
import android.content.DialogInterface;
import android.content.DialogInterface.OnDismissListener;
import android.text.Editable;
import android.text.TextWatcher;
import android.view.KeyEvent;
import android.view.View;
import android.view.animation.Animation;
import android.view.animation.AnimationUtils;
import android.view.inputmethod.EditorInfo;
import android.widget.Button;
import android.widget.EditText;
import android.widget.LinearLayout;
import android.widget.TextView;
import net.bither.R;
import net.bither.bitherj.crypto.SecureCharSequence;
import net.bither.bitherj.utils.Utils;
import net.bither.model.Check;
import net.bither.model.Check.CheckListener;
import net.bither.model.Check.ICheckAction;
import net.bither.bitherj.crypto.PasswordSeed;
import net.bither.preference.AppSharedPreference;
import net.bither.ui.base.keyboard.password.PasswordEntryKeyboardView;
import net.bither.ui.base.listener.ICheckPasswordListener;
import net.bither.ui.base.listener.IDialogPasswordListener;
import net.bither.util.CheckUtil;
import java.util.ArrayList;
import java.util.concurrent.ExecutorService;
public class DialogPasswordWithOther extends Dialog implements OnDismissListener,
TextView.OnEditorActionListener {
private View container;
private LinearLayout llInput;
private LinearLayout llChecking;
private TextView tvTitle;
private TextView tvError;
private EditText etPassword;
private EditText etPasswordConfirm;
private Button btnOk;
private Button btnCancel;
private PasswordEntryKeyboardView kv;
private PasswordSeed passwordSeed;
private IDialogPasswordListener listener;
private ICheckPasswordListener checkPasswordListener;
private boolean passwordEntered = false;
private boolean checkPre = true;
private boolean cancelable = true;
private ExecutorService executor;
public DialogPasswordWithOther(Context context, IDialogPasswordListener listener) {
super(context, R.style.password_dialog);
setContentView(R.layout.dialog_password_with_other);
this.listener = listener;
setOnDismissListener(this);
passwordSeed = getPasswordSeed();
initView();
}
private void initView() {
container = findViewById(R.id.fl_container);
llInput = (LinearLayout) findViewById(R.id.ll_input);
llChecking = (LinearLayout) findViewById(R.id.ll_checking);
tvTitle = (TextView) findViewById(R.id.tv_title);
tvError = (TextView) findViewById(R.id.tv_error);
etPassword = (EditText) findViewById(R.id.et_password);
etPasswordConfirm = (EditText) findViewById(R.id.et_password_confirm);
btnOk = (Button) findViewById(R.id.btn_ok);
btnCancel = (Button) findViewById(R.id.btn_cancel);
kv = (PasswordEntryKeyboardView) findViewById(R.id.kv);
etPassword.addTextChangedListener(passwordWatcher);
etPasswordConfirm.addTextChangedListener(passwordWatcher);
etPassword.setOnEditorActionListener(this);
etPasswordConfirm.setOnEditorActionListener(this);
configureCheckPre();
configureEditTextActionId();
btnOk.setOnClickListener(okClick);
btnCancel.setOnClickListener(cancelClick);
btnOk.setEnabled(false);
passwordCheck.setCheckListener(passwordCheckListener);
kv.registerEditText(etPassword, etPasswordConfirm);
}
private PasswordSeed getPasswordSeed() {
return AppSharedPreference.getInstance().getPasswordSeed();
}
private void configureCheckPre() {
if (checkPre) {
if (passwordSeed != null) {
etPasswordConfirm.setVisibility(View.GONE);
} else {
etPasswordConfirm.setVisibility(View.VISIBLE);
}
} else {
etPasswordConfirm.setVisibility(View.GONE);
}
}
@Override
public void onDismiss(DialogInterface dialog) {
if (passwordEntered && listener != null) {
listener.onPasswordEntered(new SecureCharSequence(etPassword.getText()));
etPassword.setText("");
etPasswordConfirm.setText("");
}
}
private void checkValid() {
btnOk.setEnabled(true);
}
private void shake() {
Animation shake = AnimationUtils.loadAnimation(getContext(), R.anim.password_wrong_warning);
container.startAnimation(shake);
}
public void setCheckPre(boolean check) {
checkPre = check;
configureCheckPre();
}
public void setCheckPasswordListener(ICheckPasswordListener checkPasswordListener) {
this.checkPasswordListener = checkPasswordListener;
}
public void show() {
if (checkPre) {
if (etPasswordConfirm.getVisibility() != View.VISIBLE) {
setTitle(R.string.add_address_generate_address_password_label);
} else {
setTitle(R.string.add_address_generate_address_password_set_label);
}
}
if (cancelable) {
btnCancel.setVisibility(View.VISIBLE);
} else {
btnCancel.setVisibility(View.GONE);
}
super.show();
}
public void setTitle(int resource) {
tvTitle.setText(resource);
}
private View.OnClickListener okClick = new View.OnClickListener() {
@Override
public void onClick(View v) {
SecureCharSequence password = new SecureCharSequence(etPassword.getText());
SecureCharSequence passwordConfirm = new SecureCharSequence(etPasswordConfirm.getText());
if (passwordSeed == null && !password.equals(passwordConfirm) && checkPre) {
password.wipe();
passwordConfirm.wipe();
tvError.setText(R.string.add_address_generate_address_password_not_same);
tvError.setVisibility(View.VISIBLE);
etPasswordConfirm.requestFocus();
return;
}
password.wipe();
passwordConfirm.wipe();
if ((passwordSeed != null && checkPre) || checkPasswordListener != null) {
ArrayList<Check> checks = new ArrayList<Check>();
checks.add(passwordCheck);
executor = CheckUtil.runChecks(checks, 1);
} else {
passwordEntered = true;
dismiss();
}
}
};
private CheckListener passwordCheckListener = new CheckListener() {
@Override
public void onCheckBegin(Check check) {
llChecking.setVisibility(View.VISIBLE);
llInput.setVisibility(View.INVISIBLE);
kv.hideKeyboard();
}
@Override
public void onCheckEnd(Check check, boolean success) {
if (executor != null) {
executor.shutdown();
executor = null;
}
if (success) {
passwordEntered = true;
dismiss();
} else {
llChecking.setVisibility(View.GONE);
llInput.setVisibility(View.VISIBLE);
etPassword.setText("");
checkValid();
tvError.setText(R.string.password_wrong);
tvError.setVisibility(View.VISIBLE);
shake();
kv.showKeyboard();
}
}
};
private TextWatcher passwordWatcher = new TextWatcher() {
private SecureCharSequence password;
private SecureCharSequence passwordConfirm;
@Override
public void beforeTextChanged(CharSequence s, int start, int count, int after) {
if (password != null) {
password.wipe();
}
if (passwordConfirm != null) {
passwordConfirm.wipe();
}
password = new SecureCharSequence(etPassword.getText());
passwordConfirm = new SecureCharSequence(etPasswordConfirm.getText());
}
@Override
public void onTextChanged(CharSequence s, int start, int before, int count) {
}
@Override
public void afterTextChanged(Editable s) {
tvError.setVisibility(View.GONE);
SecureCharSequence p = new SecureCharSequence(etPassword.getText());
if (p.length() > 0) {
if (!Utils.validPassword(p)) {
etPassword.setText(password);
}
}
p.wipe();
if (etPasswordConfirm.getVisibility() == View.VISIBLE) {
SecureCharSequence pc = new SecureCharSequence(etPasswordConfirm.getText());
if (pc.length() > 0) {
if (!Utils.validPassword(pc)) {
etPasswordConfirm.setText(passwordConfirm);
}
}
pc.wipe();
}
checkValid();
password.wipe();
passwordConfirm.wipe();
}
};
@Override
public void setCancelable(boolean flag) {
this.cancelable = flag;
super.setCancelable(flag);
}
public void setTitle(String title) {
tvTitle.setText(title);
}
private View.OnClickListener cancelClick = new View.OnClickListener() {
@Override
public void onClick(View v) {
passwordEntered = false;
dismiss();
}
};
private Check passwordCheck = new Check("", new ICheckAction() {
@Override
public boolean check() {
SecureCharSequence password = new SecureCharSequence(etPassword.getText());
if (checkPasswordListener != null) {
boolean result = checkPasswordListener.checkPassword(password);
password.wipe();
return result;
} else if (passwordSeed != null) {
boolean result = passwordSeed.checkPassword(password);
password.wipe();
return result;
} else {
return true;
}
}
});
private void configureEditTextActionId() {
if (etPasswordConfirm.getVisibility() == View.VISIBLE) {
etPassword.setImeActionLabel(null, EditorInfo.IME_ACTION_NEXT);
} else {
etPassword.setImeActionLabel(null, EditorInfo.IME_ACTION_DONE);
}
etPasswordConfirm.setImeActionLabel(null, EditorInfo.IME_ACTION_DONE);
}
@Override
public boolean onEditorAction(TextView v, int actionId, KeyEvent event) {
if (v == etPassword) {
if (etPasswordConfirm.getVisibility() == View.VISIBLE) {
return false;
} else if (btnOk.isEnabled()) {
okClick.onClick(btnOk);
return true;
}
}
if (v == etPasswordConfirm && btnOk.isEnabled()) {
okClick.onClick(btnOk);
return true;
}
return false;
}
}
| |
/* Generated By:JavaCC: Do not edit this line. RSLParserTokenManager.java */
/*
* Copyright 1999-2006 University of Chicago
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.globus.rsl;
import java.io.StringReader;
import java.util.LinkedList;
import java.util.ListIterator;
public class RSLParserTokenManager implements RSLParserConstants
{
private final int jjStopStringLiteralDfa_0(int pos, long active0)
{
switch (pos)
{
case 0:
if ((active0 & 0x8000L) != 0L)
return 0;
if ((active0 & 0x8L) != 0L)
return 7;
return -1;
default :
return -1;
}
}
private final int jjStartNfa_0(int pos, long active0)
{
return jjMoveNfa_0(jjStopStringLiteralDfa_0(pos, active0), pos + 1);
}
private final int jjStopAtPos(int pos, int kind)
{
jjmatchedKind = kind;
jjmatchedPos = pos;
return pos + 1;
}
private final int jjStartNfaWithStates_0(int pos, int kind, int state)
{
jjmatchedKind = kind;
jjmatchedPos = pos;
try { curChar = input_stream.readChar(); }
catch(java.io.IOException e) { return pos + 1; }
return jjMoveNfa_0(state, pos + 1);
}
private final int jjMoveStringLiteralDfa0_0()
{
switch(curChar)
{
case 9:
return jjStopAtPos(0, 2);
case 10:
return jjStopAtPos(0, 4);
case 13:
return jjStartNfaWithStates_0(0, 3, 7);
case 32:
return jjStopAtPos(0, 1);
case 33:
return jjMoveStringLiteralDfa1_0(0x80L);
case 35:
return jjStopAtPos(0, 27);
case 36:
return jjMoveStringLiteralDfa1_0(0x20000L);
case 38:
return jjStopAtPos(0, 12);
case 40:
return jjStartNfaWithStates_0(0, 15, 0);
case 41:
return jjStopAtPos(0, 16);
case 43:
return jjStopAtPos(0, 14);
case 60:
jjmatchedKind = 10;
return jjMoveStringLiteralDfa1_0(0x800L);
case 61:
return jjStopAtPos(0, 6);
case 62:
jjmatchedKind = 8;
return jjMoveStringLiteralDfa1_0(0x200L);
case 124:
return jjStopAtPos(0, 13);
default :
return jjMoveNfa_0(6, 0);
}
}
private final int jjMoveStringLiteralDfa1_0(long active0)
{
try { curChar = input_stream.readChar(); }
catch(java.io.IOException e) {
jjStopStringLiteralDfa_0(0, active0);
return 1;
}
switch(curChar)
{
case 40:
if ((active0 & 0x20000L) != 0L)
return jjStopAtPos(1, 17);
break;
case 61:
if ((active0 & 0x80L) != 0L)
return jjStopAtPos(1, 7);
else if ((active0 & 0x200L) != 0L)
return jjStopAtPos(1, 9);
else if ((active0 & 0x800L) != 0L)
return jjStopAtPos(1, 11);
break;
default :
break;
}
return jjStartNfa_0(0, active0);
}
private final void jjCheckNAdd(int state)
{
if (jjrounds[state] != jjround)
{
jjstateSet[jjnewStateCnt++] = state;
jjrounds[state] = jjround;
}
}
private final void jjAddStates(int start, int end)
{
do {
jjstateSet[jjnewStateCnt++] = jjnextStates[start];
} while (start++ != end);
}
private final void jjCheckNAddTwoStates(int state1, int state2)
{
jjCheckNAdd(state1);
jjCheckNAdd(state2);
}
private final void jjCheckNAddStates(int start, int end)
{
do {
jjCheckNAdd(jjnextStates[start]);
} while (start++ != end);
}
private final void jjCheckNAddStates(int start)
{
jjCheckNAdd(jjnextStates[start]);
jjCheckNAdd(jjnextStates[start + 1]);
}
static final long[] jjbitVec0 = {
0x0L, 0x0L, 0xffffffffffffffffL, 0xffffffffffffffffL
};
private final int jjMoveNfa_0(int startState, int curPos)
{
int[] nextStates;
int startsAt = 0;
jjnewStateCnt = 111;
int i = 1;
jjstateSet[0] = startState;
int j, kind = 0x7fffffff;
for (;;)
{
if (++jjround == 0x7fffffff)
ReInitRounds();
if (curChar < 64)
{
long l = 1L << curChar;
MatchLoop: do
{
switch(jjstateSet[--i])
{
case 6:
if ((0x8ffff420fffff9ffL & l) != 0L)
{
if (kind > 24)
kind = 24;
jjCheckNAdd(7);
}
else if (curChar == 39)
jjCheckNAddStates(0, 4);
else if (curChar == 34)
jjCheckNAddStates(5, 9);
else if (curChar == 40)
jjstateSet[jjnewStateCnt++] = 0;
break;
case 0:
if (curChar == 42)
jjCheckNAddTwoStates(1, 2);
break;
case 1:
if ((0xfffffbffffffffffL & l) != 0L)
jjCheckNAddTwoStates(1, 2);
break;
case 2:
if (curChar == 42)
jjCheckNAddStates(10, 12);
break;
case 3:
if ((0xfffff9ffffffffffL & l) != 0L)
jjCheckNAddTwoStates(4, 2);
break;
case 4:
if ((0xfffffbffffffffffL & l) != 0L)
jjCheckNAddTwoStates(4, 2);
break;
case 5:
if (curChar == 41 && kind > 5)
kind = 5;
break;
case 7:
if ((0x8ffff420fffff9ffL & l) == 0L)
break;
if (kind > 24)
kind = 24;
jjCheckNAdd(7);
break;
case 37:
if (curChar == 34)
jjCheckNAddStates(5, 9);
break;
case 39:
if (curChar == 34 && kind > 19)
kind = 19;
break;
case 70:
if ((0xfffffffbffffffffL & l) != 0L)
jjCheckNAddStates(13, 15);
break;
case 71:
if (curChar == 34)
jjCheckNAddStates(13, 15);
break;
case 72:
if (curChar == 34)
jjstateSet[jjnewStateCnt++] = 71;
break;
case 73:
if (curChar == 34 && kind > 25)
kind = 25;
break;
case 74:
if (curChar == 39)
jjCheckNAddStates(0, 4);
break;
case 76:
if (curChar == 39 && kind > 20)
kind = 20;
break;
case 107:
if ((0xffffff7fffffffffL & l) != 0L)
jjCheckNAddStates(16, 18);
break;
case 108:
if (curChar == 39)
jjCheckNAddStates(16, 18);
break;
case 109:
if (curChar == 39)
jjstateSet[jjnewStateCnt++] = 108;
break;
case 110:
if (curChar == 39 && kind > 26)
kind = 26;
break;
default : break;
}
} while(i != startsAt);
}
else if (curChar < 128)
{
long l = 1L << (curChar & 077);
MatchLoop: do
{
switch(jjstateSet[--i])
{
case 6:
if ((0xefffffffbfffffffL & l) != 0L)
{
if (kind > 24)
kind = 24;
jjCheckNAdd(7);
}
if ((0x4000000040000L & l) != 0L)
jjAddStates(19, 20);
break;
case 1:
jjCheckNAddTwoStates(1, 2);
break;
case 3:
case 4:
jjCheckNAddTwoStates(4, 2);
break;
case 7:
if ((0xefffffffbfffffffL & l) == 0L)
break;
if (kind > 24)
kind = 24;
jjCheckNAdd(7);
break;
case 8:
if ((0x4000000040000L & l) != 0L)
jjAddStates(19, 20);
break;
case 9:
if ((0x400000004000L & l) != 0L && kind > 18)
kind = 18;
break;
case 10:
case 24:
if ((0x800000008000L & l) != 0L)
jjCheckNAdd(9);
break;
case 11:
if ((0x20000000200L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 10;
break;
case 12:
if ((0x10000000100000L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 11;
break;
case 13:
if ((0x20000000200000L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 12;
break;
case 14:
if ((0x10000000100000L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 13;
break;
case 15:
if ((0x20000000200L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 14;
break;
case 16:
if ((0x10000000100000L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 15;
break;
case 17:
if ((0x8000000080000L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 16;
break;
case 18:
if ((0x400000004L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 17;
break;
case 19:
if ((0x20000000200000L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 18;
break;
case 20:
if ((0x8000000080000L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 19;
break;
case 21:
if (curChar == 95)
jjstateSet[jjnewStateCnt++] = 20;
break;
case 22:
if ((0x100000001000L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 21;
break;
case 23:
if ((0x8000000080000L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 22;
break;
case 25:
if ((0x20000000200L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 24;
break;
case 26:
if ((0x10000000100000L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 25;
break;
case 27:
if ((0x20000000200000L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 26;
break;
case 28:
if ((0x10000000100000L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 27;
break;
case 29:
if ((0x20000000200L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 28;
break;
case 30:
if ((0x10000000100000L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 29;
break;
case 31:
if ((0x8000000080000L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 30;
break;
case 32:
if ((0x400000004L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 31;
break;
case 33:
if ((0x20000000200000L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 32;
break;
case 34:
if ((0x8000000080000L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 33;
break;
case 35:
if ((0x100000001000L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 34;
break;
case 36:
if ((0x8000000080000L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 35;
break;
case 38:
case 55:
if ((0x400000004000L & l) != 0L)
jjCheckNAdd(39);
break;
case 40:
if ((0x800000008000L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 38;
break;
case 41:
if ((0x20000000200L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 40;
break;
case 42:
if ((0x10000000100000L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 41;
break;
case 43:
if ((0x20000000200000L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 42;
break;
case 44:
if ((0x10000000100000L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 43;
break;
case 45:
if ((0x20000000200L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 44;
break;
case 46:
if ((0x10000000100000L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 45;
break;
case 47:
if ((0x8000000080000L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 46;
break;
case 48:
if ((0x400000004L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 47;
break;
case 49:
if ((0x20000000200000L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 48;
break;
case 50:
if ((0x8000000080000L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 49;
break;
case 51:
if (curChar == 95)
jjstateSet[jjnewStateCnt++] = 50;
break;
case 52:
if ((0x100000001000L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 51;
break;
case 53:
if ((0x8000000080000L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 52;
break;
case 54:
if ((0x4000000040000L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 53;
break;
case 56:
if ((0x800000008000L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 55;
break;
case 57:
if ((0x20000000200L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 56;
break;
case 58:
if ((0x10000000100000L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 57;
break;
case 59:
if ((0x20000000200000L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 58;
break;
case 60:
if ((0x10000000100000L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 59;
break;
case 61:
if ((0x20000000200L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 60;
break;
case 62:
if ((0x10000000100000L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 61;
break;
case 63:
if ((0x8000000080000L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 62;
break;
case 64:
if ((0x400000004L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 63;
break;
case 65:
if ((0x20000000200000L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 64;
break;
case 66:
if ((0x8000000080000L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 65;
break;
case 67:
if ((0x100000001000L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 66;
break;
case 68:
if ((0x8000000080000L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 67;
break;
case 69:
if ((0x4000000040000L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 68;
break;
case 70:
jjAddStates(13, 15);
break;
case 75:
case 92:
if ((0x400000004000L & l) != 0L)
jjCheckNAdd(76);
break;
case 77:
if ((0x800000008000L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 75;
break;
case 78:
if ((0x20000000200L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 77;
break;
case 79:
if ((0x10000000100000L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 78;
break;
case 80:
if ((0x20000000200000L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 79;
break;
case 81:
if ((0x10000000100000L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 80;
break;
case 82:
if ((0x20000000200L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 81;
break;
case 83:
if ((0x10000000100000L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 82;
break;
case 84:
if ((0x8000000080000L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 83;
break;
case 85:
if ((0x400000004L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 84;
break;
case 86:
if ((0x20000000200000L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 85;
break;
case 87:
if ((0x8000000080000L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 86;
break;
case 88:
if (curChar == 95)
jjstateSet[jjnewStateCnt++] = 87;
break;
case 89:
if ((0x100000001000L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 88;
break;
case 90:
if ((0x8000000080000L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 89;
break;
case 91:
if ((0x4000000040000L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 90;
break;
case 93:
if ((0x800000008000L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 92;
break;
case 94:
if ((0x20000000200L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 93;
break;
case 95:
if ((0x10000000100000L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 94;
break;
case 96:
if ((0x20000000200000L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 95;
break;
case 97:
if ((0x10000000100000L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 96;
break;
case 98:
if ((0x20000000200L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 97;
break;
case 99:
if ((0x10000000100000L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 98;
break;
case 100:
if ((0x8000000080000L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 99;
break;
case 101:
if ((0x400000004L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 100;
break;
case 102:
if ((0x20000000200000L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 101;
break;
case 103:
if ((0x8000000080000L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 102;
break;
case 104:
if ((0x100000001000L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 103;
break;
case 105:
if ((0x8000000080000L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 104;
break;
case 106:
if ((0x4000000040000L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 105;
break;
case 107:
jjAddStates(16, 18);
break;
default : break;
}
} while(i != startsAt);
}
else
{
int i2 = (curChar & 0xff) >> 6;
long l2 = 1L << (curChar & 077);
MatchLoop: do
{
switch(jjstateSet[--i])
{
case 6:
case 7:
if ((jjbitVec0[i2] & l2) == 0L)
break;
if (kind > 24)
kind = 24;
jjCheckNAdd(7);
break;
case 1:
if ((jjbitVec0[i2] & l2) != 0L)
jjCheckNAddTwoStates(1, 2);
break;
case 3:
case 4:
if ((jjbitVec0[i2] & l2) != 0L)
jjCheckNAddTwoStates(4, 2);
break;
case 70:
if ((jjbitVec0[i2] & l2) != 0L)
jjAddStates(13, 15);
break;
case 107:
if ((jjbitVec0[i2] & l2) != 0L)
jjAddStates(16, 18);
break;
default : break;
}
} while(i != startsAt);
}
if (kind != 0x7fffffff)
{
jjmatchedKind = kind;
jjmatchedPos = curPos;
kind = 0x7fffffff;
}
++curPos;
if ((i = jjnewStateCnt) == (startsAt = 111 - (jjnewStateCnt = startsAt)))
return curPos;
try { curChar = input_stream.readChar(); }
catch(java.io.IOException e) { return curPos; }
}
}
static final int[] jjnextStates = {
91, 106, 107, 109, 110, 54, 69, 70, 72, 73, 2, 3, 5, 70, 72, 73,
107, 109, 110, 23, 36,
};
public static final String[] jjstrLiteralImages = {
"", null, null, null, null, null, "\75", "\41\75", "\76", "\76\75", "\74",
"\74\75", "\46", "\174", "\53", "\50", "\51", "\44\50", null, null, null, null, null,
null, null, null, null, "\43", };
public static final String[] lexStateNames = {
"DEFAULT",
};
static final long[] jjtoToken = {
0xf1fffc1L,
};
static final long[] jjtoSkip = {
0x3eL,
};
static final long[] jjtoSpecial = {
0x1eL,
};
private ASCII_CharStream input_stream;
private final int[] jjrounds = new int[111];
private final int[] jjstateSet = new int[222];
protected char curChar;
public RSLParserTokenManager(ASCII_CharStream stream)
{
if (ASCII_CharStream.staticFlag)
throw new Error("ERROR: Cannot use a static CharStream class with a non-static lexical analyzer.");
input_stream = stream;
}
public RSLParserTokenManager(ASCII_CharStream stream, int lexState)
{
this(stream);
SwitchTo(lexState);
}
public void ReInit(ASCII_CharStream stream)
{
jjmatchedPos = jjnewStateCnt = 0;
curLexState = defaultLexState;
input_stream = stream;
ReInitRounds();
}
private final void ReInitRounds()
{
int i;
jjround = 0x80000001;
for (i = 111; i-- > 0;)
jjrounds[i] = 0x80000000;
}
public void ReInit(ASCII_CharStream stream, int lexState)
{
ReInit(stream);
SwitchTo(lexState);
}
public void SwitchTo(int lexState)
{
if (lexState >= 1 || lexState < 0)
throw new TokenMgrError("Error: Ignoring invalid lexical state : " + lexState + ". State unchanged.", TokenMgrError.INVALID_LEXICAL_STATE);
else
curLexState = lexState;
}
private final Token jjFillToken()
{
Token t = Token.newToken(jjmatchedKind);
t.kind = jjmatchedKind;
String im = jjstrLiteralImages[jjmatchedKind];
t.image = (im == null) ? input_stream.GetImage() : im;
t.beginLine = input_stream.getBeginLine();
t.beginColumn = input_stream.getBeginColumn();
t.endLine = input_stream.getEndLine();
t.endColumn = input_stream.getEndColumn();
return t;
}
int curLexState = 0;
int defaultLexState = 0;
int jjnewStateCnt;
int jjround;
int jjmatchedPos;
int jjmatchedKind;
public final Token getNextToken()
{
int kind;
Token specialToken = null;
Token matchedToken;
int curPos = 0;
EOFLoop :
for (;;)
{
try
{
curChar = input_stream.BeginToken();
}
catch(java.io.IOException e)
{
jjmatchedKind = 0;
matchedToken = jjFillToken();
matchedToken.specialToken = specialToken;
return matchedToken;
}
jjmatchedKind = 0x7fffffff;
jjmatchedPos = 0;
curPos = jjMoveStringLiteralDfa0_0();
if (jjmatchedKind != 0x7fffffff)
{
if (jjmatchedPos + 1 < curPos)
input_stream.backup(curPos - jjmatchedPos - 1);
if ((jjtoToken[jjmatchedKind >> 6] & (1L << (jjmatchedKind & 077))) != 0L)
{
matchedToken = jjFillToken();
matchedToken.specialToken = specialToken;
return matchedToken;
}
else
{
if ((jjtoSpecial[jjmatchedKind >> 6] & (1L << (jjmatchedKind & 077))) != 0L)
{
matchedToken = jjFillToken();
if (specialToken == null)
specialToken = matchedToken;
else
{
matchedToken.specialToken = specialToken;
specialToken = (specialToken.next = matchedToken);
}
}
continue EOFLoop;
}
}
int error_line = input_stream.getEndLine();
int error_column = input_stream.getEndColumn();
String error_after = null;
boolean EOFSeen = false;
try { input_stream.readChar(); input_stream.backup(1); }
catch (java.io.IOException e1) {
EOFSeen = true;
error_after = curPos <= 1 ? "" : input_stream.GetImage();
if (curChar == '\n' || curChar == '\r') {
error_line++;
error_column = 0;
}
else
error_column++;
}
if (!EOFSeen) {
input_stream.backup(1);
error_after = curPos <= 1 ? "" : input_stream.GetImage();
}
throw new TokenMgrError(EOFSeen, curLexState, error_line, error_column, error_after, curChar, TokenMgrError.LEXICAL_ERROR);
}
}
}
| |
/*
* Copyright 2000-2012 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.ide.dnd;
import com.intellij.icons.AllIcons;
import com.intellij.openapi.ui.MessageType;
import com.intellij.openapi.ui.popup.Balloon;
import com.intellij.openapi.ui.popup.JBPopupFactory;
import com.intellij.openapi.util.registry.Registry;
import com.intellij.ui.JBColor;
import com.intellij.ui.awt.RelativePoint;
import consulo.awt.TargetAWT;
import consulo.ui.image.Image;
import javax.swing.*;
import java.awt.*;
import java.util.ArrayList;
import java.util.List;
public class Highlighters implements DnDEvent.DropTargetHighlightingType {
private static final List<DropTargetHighlighter> ourHightlighters = new ArrayList<DropTargetHighlighter>();
private static final ArrayList<DropTargetHighlighter> ourCurrentHighlighters = new ArrayList<DropTargetHighlighter>();
static {
ourHightlighters.add(new RectangleHighlighter());
ourHightlighters.add(new FilledRectangleHighlighter());
ourHightlighters.add(new HorizontalLinesHighlighter());
ourHightlighters.add(new TextHighlighter());
ourHightlighters.add(new ErrorTextHighlighter());
ourHightlighters.add(new VerticalLinesHighlighter());
}
static void show(int aType, JLayeredPane aPane, Rectangle aRectangle, DnDEvent aEvent) {
List<DropTargetHighlighter> toShow = new ArrayList<DropTargetHighlighter>();
for (DropTargetHighlighter ourHightlighter : ourHightlighters) {
DropTargetHighlighter each = (DropTargetHighlighter)ourHightlighter;
if ((each.getMask() & aType) != 0) {
toShow.add(each);
}
}
for (int i = 0; i < toShow.size(); i++) {
DropTargetHighlighter each = toShow.get(i);
each.show(aPane, aRectangle, aEvent);
}
ourCurrentHighlighters.addAll(toShow);
}
static void hideAllBut(int aType) {
for (int i = 0; i < ourCurrentHighlighters.size(); i++) {
final DropTargetHighlighter each = ourCurrentHighlighters.get(i);
if ((each.getMask() & aType) == 0) {
each.vanish();
ourCurrentHighlighters.remove(each);
}
}
}
static void hide() {
for (int i = 0; i < ourCurrentHighlighters.size(); i++) {
(ourCurrentHighlighters.get(i)).vanish();
}
ourCurrentHighlighters.clear();
}
static void hide(int aType) {
for (int i = 0; i < ourCurrentHighlighters.size(); i++) {
final DropTargetHighlighter each = ourCurrentHighlighters.get(i);
if ((each.getMask() & aType) != 0) {
each.vanish();
ourCurrentHighlighters.remove(each);
}
}
}
static boolean isVisibleExcept(int type) {
int resultType = type;
for (int i = 0; i < ourCurrentHighlighters.size(); i++) {
final DropTargetHighlighter each = ourCurrentHighlighters.get(i);
resultType = resultType | each.getMask();
}
return type != resultType;
}
static boolean isVisible() {
return ourCurrentHighlighters.size() > 0;
}
private static abstract class AbstractComponentHighlighter extends JPanel implements DropTargetHighlighter {
protected AbstractComponentHighlighter() {
setOpaque(false);
setLayout(new BorderLayout());
}
public final void show(JLayeredPane aPane, Rectangle aRectangle, DnDEvent aEvent) {
if (getParent() != aPane) {
vanish();
aPane.add(this, getLayer());
}
_show(aPane, aRectangle, aEvent);
}
protected Integer getLayer() {
return JLayeredPane.MODAL_LAYER;
}
public void vanish() {
final Container parent = getParent();
Rectangle bounds = getBounds();
if (parent != null) {
parent.remove(this);
parent.repaint(bounds.x, bounds.y, bounds.width, bounds.height);
}
}
protected abstract void _show(JLayeredPane aPane, Rectangle aRectangle, DnDEvent aEvent);
}
public abstract static class BaseTextHighlighter implements DropTargetHighlighter {
private Balloon myCurrentBalloon;
private MessageType myMessageType;
public BaseTextHighlighter(MessageType type) {
myMessageType = type;
}
public void show(JLayeredPane aPane, Rectangle aRectangle, DnDEvent aEvent) {
if (!Registry.is("ide.dnd.textHints")) return;
final String result = aEvent.getExpectedDropResult();
if (result != null && result.length() > 0) {
RelativePoint point = null;
for (DropTargetHighlighter each : ourHightlighters) {
if (each instanceof AbstractComponentHighlighter) {
Rectangle rec = ((AbstractComponentHighlighter)each).getBounds();
point = new RelativePoint(aPane, new Point(rec.x + rec.width, rec.y + rec.height / 2));
break;
}
}
if (point == null) {
point = new RelativePoint(aPane, new Point(aRectangle.x + aRectangle.width, aRectangle.y + aRectangle.height / 2));
}
myCurrentBalloon = JBPopupFactory.getInstance().createHtmlTextBalloonBuilder(result, myMessageType, null).createBalloon();
myCurrentBalloon.show(point, Balloon.Position.atRight);
}
}
public void vanish() {
if (myCurrentBalloon != null) {
myCurrentBalloon.hide();
myCurrentBalloon = null;
}
}
protected Integer getLayer() {
return JLayeredPane.POPUP_LAYER;
}
}
public static class TextHighlighter extends BaseTextHighlighter {
public TextHighlighter() {
super(MessageType.INFO);
}
public int getMask() {
return TEXT;
}
}
private static class ErrorTextHighlighter extends BaseTextHighlighter {
public ErrorTextHighlighter() {
super(MessageType.ERROR);
}
public int getMask() {
return ERROR_TEXT;
}
}
private static class FilledRectangleHighlighter extends AbstractComponentHighlighter {
public FilledRectangleHighlighter() {
super();
setOpaque(true);
setBorder(BorderFactory.createLineBorder(JBColor.RED));
setBackground(JBColor.RED);
}
protected void _show(JLayeredPane aPane, Rectangle aRectangle, DnDEvent aEvent) {
setBounds(aRectangle);
}
public int getMask() {
return FILLED_RECTANGLE;
}
}
private static class RectangleHighlighter extends AbstractComponentHighlighter {
public RectangleHighlighter() {
super();
setOpaque(false);
setBorder(BorderFactory.createLineBorder(JBColor.RED));
}
protected void _show(JLayeredPane aPane, Rectangle aRectangle, DnDEvent aEvent) {
setBounds(aRectangle);
}
public int getMask() {
return RECTANGLE;
}
}
private static class HorizontalLinesHighlighter extends AbstractComponentHighlighter {
protected void _show(JLayeredPane aPane, Rectangle aRectangle, DnDEvent aEvent) {
final Rectangle rectangle = new Rectangle(aRectangle.x - AllIcons.Ide.Dnd.Left.getWidth(), aRectangle.y - AllIcons.Ide.Dnd.Left
.getHeight(), aRectangle.width + AllIcons.Ide.Dnd.Left.getWidth() + AllIcons.Ide.Dnd.Right.getWidth(), aRectangle.height + AllIcons.Ide.Dnd.Left
.getHeight());
setBounds(rectangle);
}
protected void paintComponent(Graphics g) {
TargetAWT.to(AllIcons.Ide.Dnd.Left).paintIcon(this, g, 0, (getHeight() / 2));
TargetAWT.to(AllIcons.Ide.Dnd.Right).paintIcon(this, g, getWidth() - AllIcons.Ide.Dnd.Right.getWidth(), (getHeight() / 2));
}
public int getMask() {
return H_ARROWS;
}
}
private static class VerticalLinesHighlighter extends AbstractComponentHighlighter {
private static final Image TOP = AllIcons.Ide.Dnd.Top;
private static final Image BOTTOM = AllIcons.Ide.Dnd.Bottom;
protected void _show(JLayeredPane aPane, Rectangle aRectangle, DnDEvent aEvent) {
final Rectangle rectangle = new Rectangle(aRectangle.x, aRectangle.y - TOP.getHeight(), aRectangle.width, aRectangle.height + TOP.getHeight() + BOTTOM
.getHeight());
setBounds(rectangle);
}
protected void paintComponent(Graphics g) {
TargetAWT.to(TOP).paintIcon(this, g, (getWidth() - TOP.getWidth()) / 2, 0);
TargetAWT.to(BOTTOM).paintIcon(this, g, (getWidth() - BOTTOM.getWidth()) / 2, getHeight() - BOTTOM.getHeight());
}
public int getMask() {
return V_ARROWS;
}
}
}
| |
package se.lth.cs.docforia.io.multipart;
/*
* Copyright 2016 Marcus Klang
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import com.google.protobuf.ByteString;
import it.unimi.dsi.fastutil.ints.*;
import it.unimi.dsi.fastutil.objects.*;
import se.lth.cs.docforia.*;
import se.lth.cs.docforia.data.BinaryCoreWriter;
import se.lth.cs.docforia.data.CoreRef;
import se.lth.cs.docforia.data.DataRef;
import se.lth.cs.docforia.data.StringRef;
import se.lth.cs.docforia.io.mem.Output;
import se.lth.cs.docforia.memstore.MemoryCoreEdgeLayer;
import se.lth.cs.docforia.memstore.MemoryCoreNodeLayer;
import java.io.Serializable;
import java.util.*;
import java.util.stream.Collectors;
import java.util.stream.StreamSupport;
/**
* Customizable Memory Document encoder
* <p>
* <b>Remarks:</b> Once core properties has been initiated, instances of this class are thread safe for encoding.
*/
public class MultipartWriterV1 implements Serializable {
private ObjectOpenHashSet<String> coreproperties = new ObjectOpenHashSet<>();
protected static class Property {
public byte type;
public String key;
public Property(byte type, String key) {
this.type = type;
this.key = key;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Property property = (Property) o;
if (type != property.type) return false;
return key != null ? key.equals(property.key) : property.key == null;
}
@Override
public int hashCode() {
int result = key != null ? key.hashCode() : 0;
result = 31 * result + (int) type;
return result;
}
}
private static class BuildContext {
Object2IntLinkedOpenHashMap<String> nodeLayers = new Object2IntLinkedOpenHashMap<>();
Object2IntLinkedOpenHashMap<String> edgeLayers = new Object2IntLinkedOpenHashMap<>();
Object2IntOpenHashMap<String> numNodesLayer = new Object2IntOpenHashMap<>();
Object2IntOpenHashMap<NodeRef> nodeIndex = new Object2IntOpenHashMap<>();
}
/**
* Default constructor, separates
*/
public MultipartWriterV1() {
}
public void addCoreProperty(String propertyKey) {
coreproperties.add(propertyKey);
}
private static ArrayList<String> nodeLayers(Document doc) {
ArrayList<String> layers = new ArrayList<>();
String lastLayer = "";
for (DocumentNodeLayer nodeLayer : doc.store().nodeLayers()) {
if(!lastLayer.equals(nodeLayer.getLayer())) {
layers.add(nodeLayer.getLayer());
lastLayer = nodeLayer.getLayer();
}
}
return layers;
}
private static ArrayList<String> edgeLayers(Document doc) {
ArrayList<String> layers = new ArrayList<>();
String lastLayer = "";
for (DocumentEdgeLayer nodeLayer : doc.store().edgeLayers()) {
if(!lastLayer.equals(nodeLayer.getLayer())) {
layers.add(nodeLayer.getLayer());
lastLayer = nodeLayer.getLayer();
}
}
return layers;
}
private static IntAVLTreeSet findEdgeLayerDependencies(Document doc, Object2IntLinkedOpenHashMap<String> layer2id, String edgeLayer) {
IntAVLTreeSet uniqueLayers = new IntAVLTreeSet();
for (String variant : doc.engine().edgeLayerVariants(edgeLayer)) {
for (EdgeRef edgeRef : doc.engine().edges(edgeLayer, variant)) {
EdgeStore store = edgeRef.get();
uniqueLayers.add(layer2id.getInt(store.getHead().layer().getLayer()));
uniqueLayers.add(layer2id.getInt(store.getTail().layer().getLayer()));
}
}
return uniqueLayers;
}
private MultipartBinary.Properties.Entry convert(String key, DataRef value) {
MultipartBinary.Properties.Entry.Builder entry = MultipartBinary.Properties.Entry.newBuilder();
entry.setKey(key);
MultipartBinary.Value.Builder valueBuilder = MultipartBinary.Value.newBuilder();
if(!(value instanceof CoreRef)) {
throw new UnsupportedOperationException("Non Core-Ref properties are not supported, when converting property with key: " + key + ", data: " + value.stringValue());
}
CoreRef corevalue = (CoreRef)value;
valueBuilder.setType(MultipartBinary.ValueType.valueOf(Byte.toUnsignedInt(corevalue.id().value)));
switch (corevalue.id()) {
case STRING:
valueBuilder.setStringValue(corevalue.stringValue());
break;
case INT:
valueBuilder.setIntValue(corevalue.intValue());
break;
case LONG:
valueBuilder.setLongValue(corevalue.longValue());
break;
case BOOLEAN:
valueBuilder.setBoolValue(corevalue.booleanValue());
break;
case FLOAT:
valueBuilder.setFloatValue(corevalue.floatValue());
break;
case DOUBLE:
valueBuilder.setDoubleValue(corevalue.doubleValue());
break;
default:
BinaryCoreWriter writer = new BinaryCoreWriter(new Output(32, 1<<30));
corevalue.write(writer);
valueBuilder.setBinaryValue(ByteString.copyFrom(writer.getWriter().getBuffer(), 0, writer.getWriter().position()));
break;
}
entry.setValue(valueBuilder);
return entry.build();
}
private MultipartBinary.Header buildHeader(Document doc, MultipartBinary.Header prevheader, BuildContext context) {
Output headerWriter = new Output(256, 256<<20);
MultipartBinary.Header.Builder builder = prevheader != null ?
MultipartBinary.Header.newBuilder(prevheader) : MultipartBinary.Header.newBuilder();
if(doc.hasProperty(Document.PROP_URI))
builder.addAllUri(Arrays.asList(doc.getStringArrayProperty(Document.PROP_URI)));
if(doc.id() != null)
builder.setId(doc.id());
if(doc.language() != null)
builder.setLang(doc.language());
if(doc.type() != null)
builder.setType(doc.type());
builder.setLength(doc.length());
if(prevheader != null) {
List<String> nodeLayerList = prevheader.getNodeLayerList();
for (int i = 0; i < nodeLayerList.size(); i++) {
context.nodeLayers.add(nodeLayerList.get(i), i);
}
}
//Layers - find ids and dependencies
int i = 0;
for (String s : nodeLayers(doc)) {
if(!context.nodeLayers.containsKey(s)) {
context.nodeLayers.put(s, builder.getNodeLayerCount());
builder.addNodeLayer(s);
}
i++;
}
for (String s : edgeLayers(doc)) {
if(!context.edgeLayers.containsKey(s)) {
context.edgeLayers.put(s, builder.getNodeLayerCount()+builder.getEdgeLayerCount());
builder.addEdgeLayer(s);
}
IntAVLTreeSet deps = findEdgeLayerDependencies(doc, context.nodeLayers, s);
if(builder.getEdgeLayerDepsCount() <= i) {
builder.addEdgeLayerDeps(MultipartBinary.Header.Dependency.newBuilder().addAllNodeLayerId(deps).build());
} else {
builder.setEdgeLayerDeps(i, MultipartBinary.Header.Dependency.newBuilder().addAllNodeLayerId(deps).build());
}
i++;
}
MultipartBinary.Properties.Builder coreprops = MultipartBinary.Properties.newBuilder();
for (String coreproperty : coreproperties) {
DataRef property = doc.getRefProperty(coreproperty);
coreprops.addEntry(convert(coreproperty, property));
}
builder.setCoreproperties(coreprops.build());
return builder.build();
}
private void writeEscapedProperty(Output output, CoreRef ref) {
Output data = new Output(32,2<<29);
switch (ref.id()) {
case STRING:
StringRef stringRef = (StringRef)ref;
data.writeString(stringRef.stringValue());
break;
default:
BinaryCoreWriter writer = new BinaryCoreWriter(data);
ref.write(writer);
break;
}
if(data.getBuffer()[0] == 0) {
output.writeByte((byte)0xFF);
}
else if(data.getBuffer()[0] == (byte)0xFF) {
output.writeByte((byte)0xFF);
}
else if(data.getBuffer()[0] == (byte)0xFE) {
output.writeByte((byte)0xFF);
}
data.writeTo(output);
}
private MultipartBinary.PropertyColumn encodePropertyColumn(Property prop, Int2ObjectArrayMap<CoreRef> values) {
MultipartBinary.PropertyColumn.Builder colBuilder = MultipartBinary.PropertyColumn.newBuilder();
colBuilder.setKey(prop.key);
colBuilder.setType(MultipartBinary.ValueType.valueOf(Byte.toUnsignedInt(prop.type)));
//1. Write layout description, NLE (Null-Length-Encoding), min 3 delta
Output writer = new Output(32, 1<<30); //Max 1 GB
//0x00 == skip, 0xFF == escaped byte follows
int lastIndex = 0;
for (Int2ObjectMap.Entry<CoreRef> entry : values.int2ObjectEntrySet()) {
if(entry.getIntKey() - lastIndex >= 3) {
writer.writeByte((byte)0xFE);
writer.writeVarInt(entry.getIntKey() - lastIndex, true);
} else {
for(int i = 0; i < entry.getIntKey() - lastIndex; i++) {
writer.writeByte(0);
}
}
writeEscapedProperty(writer, entry.getValue());
lastIndex = entry.getIntKey()+1;
}
colBuilder.setData(ByteString.copyFrom(writer.getBuffer(), 0, writer.position()));
return colBuilder.build();
}
private MultipartBinary.NodeLayer buildNodeLayer(Document doc, BuildContext context, String nodeLayer) {
MultipartBinary.NodeLayer.Builder nodeLayerBuilder = MultipartBinary.NodeLayer.newBuilder();
nodeLayerBuilder.setIdx(context.nodeLayers.getInt(nodeLayer));
MemoryCoreNodeLayer memoryCoreNodeLayer = MemoryCoreNodeLayer.fromLayerName(nodeLayer);
if(memoryCoreNodeLayer == MemoryCoreNodeLayer.UNKNOWN) {
nodeLayerBuilder.setUserdefined(nodeLayer);
} else {
nodeLayerBuilder.setId(memoryCoreNodeLayer.id);
}
int nodeCounter = 0;
Object2ObjectOpenHashMap<Property, Int2ObjectArrayMap<CoreRef>> property2indicies = new Object2ObjectOpenHashMap<>();
for (Optional<String> variant : doc.engine().nodeLayerAllVariants(nodeLayer)) {
MultipartBinary.NodeLayer.Variant.Builder variantBuilder = MultipartBinary.NodeLayer.Variant.newBuilder();
if(variant.isPresent())
variantBuilder.setName(variant.get());
DocumentNodeLayer nodeRefs = doc.store().nodeLayer(nodeLayer, variant.orElse(null));
variantBuilder.setNumNodes(nodeRefs.size());
int currentRange = 0;
for (NodeRef nodeRef : nodeRefs) {
NodeStore store = nodeRef.get();
for (Map.Entry<String, DataRef> entry : store.properties()) {
if(!(entry.getValue() instanceof CoreRef)) {
throw new UnsupportedOperationException(
"In layer " + nodeLayer + "#" + variant + ", node "
+ nodeCounter + " contains a property "
+ entry.getKey() + " that is not a CoreRef this is unsupported");
}
CoreRef corevalue = (CoreRef)entry.getValue();
Property prop = new Property(corevalue.id().value, entry.getKey());
Int2ObjectArrayMap<CoreRef> columns = property2indicies.get(prop);
if(columns == null) {
property2indicies.put(prop, columns = new Int2ObjectArrayMap<>());
}
columns.put(nodeCounter, corevalue);
}
if(store.isAnnotation()) {
variantBuilder.addRanges(store.getStart() - currentRange);
currentRange = store.getStart();
variantBuilder.addRanges(store.getEnd() - currentRange);
currentRange = store.getEnd();
}
context.nodeIndex.put(store, nodeCounter);
nodeCounter++;
}
nodeLayerBuilder.addVariants(variantBuilder);
}
context.numNodesLayer.put(nodeLayer, nodeCounter);
for (Object2ObjectMap.Entry<Property, Int2ObjectArrayMap<CoreRef>> entry : property2indicies.object2ObjectEntrySet()) {
nodeLayerBuilder.addProperties(encodePropertyColumn(entry.getKey(), entry.getValue()));
}
return nodeLayerBuilder.build();
}
private static int compareLayers(DocumentNodeLayer x, DocumentNodeLayer y) {
String xlayer = x.getLayer();
String ylayer = y.getLayer();
int result = xlayer.compareTo(ylayer);
if(result == 0)
{
String xvariant = x.getVariant();
String yvariant = y.getVariant();
if(xvariant == null && yvariant == null)
return 0;
else if(xvariant == null)
return -1;
else if(yvariant == null)
return 1;
else
return xvariant.compareTo(yvariant);
}
else
return result;
}
private static int compareEdges(EdgeRef x, EdgeRef y) {
EdgeStore xv = x.get();
EdgeStore yv = y.get();
int result = compareLayers(xv.getHead().layer(), yv.getHead().layer());
if(result == 0)
return compareLayers(xv.getTail().layer(), yv.getTail().layer());
else
return result;
}
private MultipartBinary.EdgeLayer buildEdgeLayer(Document doc, BuildContext context, String edgeLayer) {
MultipartBinary.EdgeLayer.Builder edgeLayerBuilder = MultipartBinary.EdgeLayer.newBuilder();
edgeLayerBuilder.setIdx(context.edgeLayers.getInt(edgeLayer));
MemoryCoreEdgeLayer memoryCoreEdgeLayer = MemoryCoreEdgeLayer.fromLayerName(edgeLayer);
if(memoryCoreEdgeLayer == MemoryCoreEdgeLayer.UNKNOWN) {
edgeLayerBuilder.setUserdefined(edgeLayer);
} else {
edgeLayerBuilder.setId(memoryCoreEdgeLayer.id);
}
Object2ObjectOpenHashMap<Property, Int2ObjectArrayMap<CoreRef>> property2indicies = new Object2ObjectOpenHashMap<>();
int edgeCounter = 0;
for (Optional<String> variant : doc.engine().edgeLayerAllVariants(edgeLayer)) {
MultipartBinary.EdgeLayer.Variant.Builder variantBuilder = MultipartBinary.EdgeLayer.Variant.newBuilder();
if(variant.isPresent())
variantBuilder.setName(variant.get());
DocumentEdgeLayer edgeRefs = doc.store().edgeLayer(edgeLayer, variant.orElse(null));
List<EdgeRef> edges = StreamSupport.stream(edgeRefs.spliterator(), false).collect(Collectors.toList());
Collections.sort(edges, MultipartWriterV1::compareEdges);
IntArrayList layerids = new IntArrayList();
Int2IntOpenHashMap layerstarts = new Int2IntOpenHashMap();
int allocatedStarts = 0;
for (EdgeRef edge : edges) {
EdgeStore edgeStore = edge.get();
String headLayer = edgeStore.getHead().layer().getLayer();
if(!layerstarts.containsKey(context.nodeLayers.getInt(headLayer))) {
int layerid = context.nodeLayers.getInt(headLayer);
layerids.add(layerid);
layerstarts.put(layerid, allocatedStarts);
allocatedStarts += context.numNodesLayer.getInt(headLayer);
}
}
for (EdgeRef edge : edges) {
EdgeStore edgeStore = edge.get();
String tailLayer = edgeStore.getTail().layer().getLayer();
if(!layerstarts.containsKey(context.nodeLayers.getInt(tailLayer))) {
int layerid = context.nodeLayers.getInt(tailLayer);
layerids.add(layerid);
layerstarts.put(layerid, allocatedStarts);
allocatedStarts += context.numNodesLayer.getInt(tailLayer);
}
}
int lastHead = 0;
int lastTail = 0;
for (EdgeRef edge : edges) {
EdgeStore store = edge.get();
for (Map.Entry<String, DataRef> entry : store.properties()) {
if(!(entry.getValue() instanceof CoreRef)) {
throw new UnsupportedOperationException(
"In edge layer " + edgeLayer + "#" + variant + ", node "
+ edgeCounter + " contains a property "
+ entry.getKey() + " that is not a CoreRef this is unsupported");
}
CoreRef corevalue = (CoreRef)entry.getValue();
Property prop = new Property(corevalue.id().value, entry.getKey());
Int2ObjectArrayMap<CoreRef> columns = property2indicies.get(prop);
if(columns == null) {
property2indicies.put(prop, columns = new Int2ObjectArrayMap<>());
}
columns.put(edgeCounter, corevalue);
}
int layerId = context.nodeLayers.getInt(store.getHead().layer().getLayer());
int id = context.nodeIndex.getInt(store.getHead())+layerstarts.get(layerId);
variantBuilder.addHead(id - lastHead);
lastHead = id;
layerId = context.nodeLayers.getInt(store.getTail().layer().getLayer());
id = context.nodeIndex.getInt(store.getTail())+layerstarts.get(layerId);
variantBuilder.addTail(id - lastTail);
lastTail = id;
edgeCounter++;
}
IntListIterator iter = layerids.iterator();
while(iter.hasNext()) {
int layerid;
variantBuilder.addNodestarts(layerstarts.get(layerid = iter.nextInt()));
variantBuilder.addNodelayers(layerid);
}
edgeLayerBuilder.addVariants(variantBuilder.build());
}
for (Object2ObjectMap.Entry<Property, Int2ObjectArrayMap<CoreRef>> entry : property2indicies.object2ObjectEntrySet()) {
edgeLayerBuilder.addProperties(encodePropertyColumn(entry.getKey(), entry.getValue()));
}
return edgeLayerBuilder.build();
}
private MultipartBinary.Properties buildProperties(Document doc, BuildContext context) {
MultipartBinary.Properties.Builder propBuilder = MultipartBinary.Properties.newBuilder();
for (Map.Entry<String, DataRef> entry : doc.properties()) {
if(coreproperties.contains(entry.getKey()) || Document.PROP_ALL.contains(entry.getKey()))
continue;
propBuilder.addEntry(convert(entry.getKey(), entry.getValue()));
}
return propBuilder.build();
}
/**
* Encode given document
* @param doc the document to encode
* @param messages previous messages to merge, may be null
*/
public MultipartMessages encode(Document doc, MultipartMessages messages) {
MultipartMessages output = messages != null ? messages : new MultipartMessages();
BuildContext context = new BuildContext();
output.header = buildHeader(doc, output.header, context);
output.text = doc.text();
output.properties = buildProperties(doc, context);
for (String nodeLayer : nodeLayers(doc)) {
MultipartBinary.NodeLayer nodeLayerMessage = buildNodeLayer(doc, context, nodeLayer);
output.nodeLayers.put(nodeLayer, nodeLayerMessage);
}
for (String edgeLayer : edgeLayers(doc)) {
MultipartBinary.EdgeLayer edgeLayerMessage = buildEdgeLayer(doc, context, edgeLayer);
output.edgeLayers.put(edgeLayer, edgeLayerMessage);
}
return output;
}
}
| |
package org.zstack.configuration;
import org.springframework.beans.factory.annotation.Autowire;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Configurable;
import org.zstack.core.cascade.CascadeConstant;
import org.zstack.core.cascade.CascadeFacade;
import org.zstack.core.cloudbus.CloudBus;
import org.zstack.core.db.DatabaseFacade;
import org.zstack.core.errorcode.ErrorFacade;
import org.zstack.header.core.NopeCompletion;
import org.zstack.header.core.workflow.*;
import org.zstack.header.errorcode.SysErrors;
import org.zstack.core.workflow.*;
import org.zstack.header.configuration.*;
import org.zstack.header.core.Completion;
import org.zstack.header.errorcode.ErrorCode;
import org.zstack.header.message.APIDeleteMessage;
import org.zstack.header.message.APIMessage;
import org.zstack.header.message.Message;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
/**
* Created with IntelliJ IDEA.
* User: frank
* Time: 9:09 PM
* To change this template use File | Settings | File Templates.
*/
@Configurable(preConstruction = true, autowire = Autowire.BY_TYPE)
public class InstanceOfferingBase implements InstanceOffering {
@Autowired
protected CloudBus bus;
@Autowired
protected DatabaseFacade dbf;
@Autowired
protected CascadeFacade casf;
@Autowired
protected ErrorFacade errf;
protected InstanceOfferingVO self;
public InstanceOfferingBase(InstanceOfferingVO vo) {
self = vo;
}
protected InstanceOfferingInventory getInventory() {
return InstanceOfferingInventory.valueOf(self);
}
@Override
public void handleMessage(Message msg) {
try {
if (msg instanceof APIMessage) {
handleApiMessage((APIMessage)msg);
} else {
handleLocalMessage(msg);
}
} catch (Exception e) {
bus.logExceptionWithMessageDump(msg ,e);
bus.replyErrorByMessageType(msg, e);
}
}
@Override
public void deleteHook() {
}
private void handleLocalMessage(Message msg) {
if (msg instanceof InstanceOfferingDeletionMsg) {
handle((InstanceOfferingDeletionMsg) msg);
} else {
bus.dealWithUnknownMessage(msg);
}
}
private void handle(InstanceOfferingDeletionMsg msg) {
deleteHook();
InstanceOfferingDeletionReply reply = new InstanceOfferingDeletionReply();
dbf.removeByPrimaryKey(msg.getInstanceOfferingUuid(), InstanceOfferingVO.class);
bus.reply(msg, reply);
}
private void handleApiMessage(APIMessage msg) {
if (msg instanceof APIChangeInstanceOfferingStateMsg) {
handle((APIChangeInstanceOfferingStateMsg)msg);
} else if (msg instanceof APIDeleteInstanceOfferingMsg) {
handle((APIDeleteInstanceOfferingMsg) msg);
} else if (msg instanceof APIUpdateInstanceOfferingMsg) {
handle((APIUpdateInstanceOfferingMsg) msg);
} else {
bus.dealWithUnknownMessage(msg);
}
}
protected InstanceOfferingVO updateInstanceOffering(APIUpdateInstanceOfferingMsg msg) {
boolean update = false;
if (msg.getName() != null) {
self.setName(msg.getName());
update = true;
}
if (msg.getDescription() != null) {
self.setDescription(msg.getDescription());
update = true;
}
if (msg.getAllocatorStrategy() != null) {
self.setAllocatorStrategy(msg.getAllocatorStrategy());
update = true;
}
return update ? self : null;
}
private void handle(APIUpdateInstanceOfferingMsg msg) {
InstanceOfferingVO vo = updateInstanceOffering(msg);
if (vo != null) {
self = dbf.updateAndRefresh(vo);
}
APIUpdateInstanceOfferingEvent evt = new APIUpdateInstanceOfferingEvent(msg.getId());
evt.setInventory(getInventory());
bus.publish(evt);
}
private void handle(APIDeleteInstanceOfferingMsg msg) {
final APIDeleteInstanceOfferingEvent evt = new APIDeleteInstanceOfferingEvent(msg.getId());
final String issuer = InstanceOfferingVO.class.getSimpleName();
final List<InstanceOfferingInventory> ctx = InstanceOfferingInventory.valueOf(Arrays.asList(self));
FlowChain chain = FlowChainBuilder.newSimpleFlowChain();
chain.setName(String.format("delete-instance-offering-%s", msg.getUuid()));
if (msg.getDeletionMode() == APIDeleteMessage.DeletionMode.Permissive) {
chain.then(new NoRollbackFlow() {
@Override
public void run(final FlowTrigger trigger, Map data) {
casf.asyncCascade(CascadeConstant.DELETION_CHECK_CODE, issuer, ctx, new Completion(trigger) {
@Override
public void success() {
trigger.next();
}
@Override
public void fail(ErrorCode errorCode) {
trigger.fail(errorCode);
}
});
}
}).then(new NoRollbackFlow() {
@Override
public void run(final FlowTrigger trigger, Map data) {
casf.asyncCascade(CascadeConstant.DELETION_DELETE_CODE, issuer, ctx, new Completion(trigger) {
@Override
public void success() {
trigger.next();
}
@Override
public void fail(ErrorCode errorCode) {
trigger.fail(errorCode);
}
});
}
});
} else {
chain.then(new NoRollbackFlow() {
@Override
public void run(final FlowTrigger trigger, Map data) {
casf.asyncCascade(CascadeConstant.DELETION_FORCE_DELETE_CODE, issuer, ctx, new Completion(trigger) {
@Override
public void success() {
trigger.next();
}
@Override
public void fail(ErrorCode errorCode) {
trigger.fail(errorCode);
}
});
}
});
}
chain.done(new FlowDoneHandler(msg) {
@Override
public void handle(Map data) {
casf.asyncCascadeFull(CascadeConstant.DELETION_CLEANUP_CODE, issuer, ctx, new NopeCompletion());
bus.publish(evt);
}
}).error(new FlowErrorHandler(msg) {
@Override
public void handle(ErrorCode errCode, Map data) {
evt.setError(errf.instantiateErrorCode(SysErrors.DELETE_RESOURCE_ERROR, errCode));
bus.publish(evt);
}
}).start();
}
private void handle(APIChangeInstanceOfferingStateMsg msg) {
InstanceOfferingStateEvent sevt = InstanceOfferingStateEvent.valueOf(msg.getStateEvent());
if (sevt == InstanceOfferingStateEvent.disable) {
self.setState(InstanceOfferingState.Disabled);
} else {
self.setState(InstanceOfferingState.Enabled);
}
self = dbf.updateAndRefresh(self);
APIChangeInstanceOfferingStateEvent evt = new APIChangeInstanceOfferingStateEvent(msg.getId());
evt.setInventory(getInventory());
bus.publish(evt);
}
}
| |
package com.google.android.finsky.utils;
import android.animation.ObjectAnimator;
import android.animation.TimeInterpolator;
import android.annotation.TargetApi;
import android.app.Activity;
import android.app.UiModeManager;
import android.content.Context;
import android.content.pm.PackageManager;
import android.content.res.Resources;
import android.graphics.Color;
import android.graphics.Rect;
import android.graphics.drawable.Drawable;
import android.os.Build.VERSION;
import android.os.Handler;
import android.os.Looper;
import android.support.v4.view.accessibility.AccessibilityEventCompat;
import android.support.v4.view.accessibility.AccessibilityRecordCompat;
import android.text.TextUtils;
import android.util.DisplayMetrics;
import android.util.Pair;
import android.view.Display;
import android.view.TouchDelegate;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.ViewGroup;
import android.view.ViewParent;
import android.view.WindowManager;
import android.view.accessibility.AccessibilityEvent;
import android.view.accessibility.AccessibilityManager;
import android.view.animation.Animation;
import android.view.inputmethod.InputMethodManager;
import android.widget.EditText;
import android.widget.TextView;
import com.google.android.finsky.FinskyApp;
import com.google.android.finsky.api.DfeApiConfig;
import com.google.android.finsky.api.model.Document;
import com.google.android.finsky.experiments.FinskyExperiments;
import com.google.android.finsky.protos.Common.Image;
import com.google.android.finsky.protos.Containers.ContainerMetadata;
import com.google.android.finsky.protos.DocV2;
import com.google.android.finsky.protos.EditorialSeriesContainer;
import com.google.android.finsky.protos.NextBanner;
import com.google.android.finsky.protos.SeriesAntenna;
import com.google.android.play.utils.config.GservicesValue;
import java.util.List;
public final class UiUtils
{
private static Boolean sIsAndroidTv = null;
private static Rect sTempRect = new Rect();
public static void ensureMinimumTouchTargetSize(View paramView, Rect paramRect1, Rect paramRect2, int paramInt)
{
ViewParent localViewParent = paramView.getParent();
if (!(localViewParent instanceof View)) {}
View localView;
do
{
return;
localView = (View)localViewParent;
if ((paramView.getVisibility() != 0) || ((paramView.getWidth() >= paramInt) && (paramView.getHeight() >= paramInt)))
{
paramRect1.setEmpty();
localView.setTouchDelegate(null);
return;
}
getTouchTarget(paramView, paramRect1, paramInt, paramInt);
} while (paramRect1.equals(paramRect2));
paramRect2.set(paramRect1);
localView.setTouchDelegate(new TouchDelegate(paramRect1, paramView));
}
public static void fadeOutCluster(View paramView, final ClusterFadeOutListener paramClusterFadeOutListener, long paramLong)
{
new Handler(Looper.myLooper()).postDelayed(new Runnable()
{
public final void run()
{
Animation localAnimation = PlayAnimationUtils.getFadeOutAnimation(this.val$cluster.getContext(), 150L, new PlayAnimationUtils.AnimationListenerAdapter()
{
public final void onAnimationEnd(Animation paramAnonymous2Animation)
{
if (UiUtils.3.this.val$listener != null) {
UiUtils.3.this.val$listener.onClusterFadeOutFinish();
}
}
});
this.val$cluster.startAnimation(localAnimation);
}
}, paramLong);
}
private static int getColor(String paramString, int paramInt)
{
if (paramString.length() > 0) {}
try
{
int i = Color.parseColor(paramString.trim());
paramInt = i;
}
catch (IllegalArgumentException localIllegalArgumentException)
{
while (!((Boolean)DfeApiConfig.showStagingData.get()).booleanValue()) {}
FinskyLog.wtf("Bad color: " + paramString, new Object[] { localIllegalArgumentException });
throw localIllegalArgumentException;
}
return paramInt;
}
public static int getDetailsCardColumnCount(Resources paramResources)
{
int i = getFeaturedGridColumnCount(paramResources, 1.0D);
if (paramResources.getBoolean(2131427339)) {
i = paramResources.getInteger(2131623941);
}
return i;
}
public static int getFeaturedGridColumnCount(Resources paramResources, double paramDouble)
{
if (paramResources.getBoolean(2131427340))
{
int i = getGridColumnContentWidth(paramResources);
int j = paramResources.getDimensionPixelSize(2131493088);
return Math.min((int)(paramDouble * i) / j, 5);
}
return paramResources.getInteger(2131623939);
}
public static int getFillColor(Common.Image paramImage, int paramInt)
{
return getColor(paramImage.fillColorRgb, paramInt);
}
public static int getFillColor(EditorialSeriesContainer paramEditorialSeriesContainer, int paramInt)
{
return getColor(paramEditorialSeriesContainer.colorThemeArgb, paramInt);
}
public static int getFillColor(SeriesAntenna paramSeriesAntenna, int paramInt)
{
return getColor(paramSeriesAntenna.colorThemeArgb, paramInt);
}
public static int getGridColumnContentWidth(Resources paramResources)
{
return paramResources.getDisplayMetrics().widthPixels - 2 * getGridHorizontalPadding(paramResources);
}
public static int getGridHorizontalPadding(Resources paramResources)
{
int i = paramResources.getDisplayMetrics().widthPixels;
return Math.max(paramResources.getDimensionPixelSize(2131492924), (i - paramResources.getDimensionPixelSize(2131492882)) / 2);
}
public static String getMoreResultsStringForCluster(Context paramContext, Document paramDocument, int paramInt, View.OnClickListener paramOnClickListener, String paramString, boolean paramBoolean)
{
String str1;
if (paramOnClickListener == null) {
str1 = null;
}
for (;;)
{
return str1;
str1 = paramContext.getString(2131362331);
Containers.ContainerMetadata localContainerMetadata = paramDocument.mDocument.containerMetadata;
int i;
if (localContainerMetadata != null) {
if (localContainerMetadata.ordered)
{
i = 1;
int m = (int)localContainerMetadata.estimatedResults;
if (m > 0)
{
Object[] arrayOfObject = new Object[1];
arrayOfObject[0] = Integer.valueOf(m);
str1 = paramContext.getString(2131362330, arrayOfObject);
}
}
}
while (i == 0)
{
return null;
int j = paramDocument.getChildCount();
int k = Math.min(j, paramInt);
label107:
String str2;
if (paramString != null)
{
str2 = localContainerMetadata.nextPageUrl;
if (!TextUtils.isEmpty(paramString)) {
break label138;
}
i = 0;
}
for (;;)
{
break;
paramString = localContainerMetadata.browseUrl;
break label107;
label138:
if ((j <= k) && (paramBoolean) && (TextUtils.isEmpty(str2))) {
i = 0;
} else {
i = 1;
}
}
boolean bool = paramDocument.hasAntennaInfo();
i = 0;
if (bool) {
i = 1;
}
}
}
}
public static Pair<Integer, Integer> getPortraitScreenDimensions(Context paramContext)
{
WindowManager localWindowManager = (WindowManager)paramContext.getSystemService("window");
DisplayMetrics localDisplayMetrics = new DisplayMetrics();
localWindowManager.getDefaultDisplay().getMetrics(localDisplayMetrics);
int i = Math.min(localDisplayMetrics.widthPixels, localDisplayMetrics.heightPixels);
int j = Math.max(localDisplayMetrics.widthPixels, localDisplayMetrics.heightPixels);
return new Pair(Integer.valueOf(i), Integer.valueOf(j));
}
public static int getRegularGridColumnCount(Resources paramResources)
{
if (paramResources.getBoolean(2131427340)) {
return Math.min(getGridColumnContentWidth(paramResources) / paramResources.getDimensionPixelSize(2131493088), 5);
}
return paramResources.getInteger(2131623940);
}
public static int getStatusBarHeight(Context paramContext)
{
Resources localResources = paramContext.getResources();
int i = localResources.getIdentifier("status_bar_height", "dimen", "android");
if (i > 0) {
return localResources.getDimensionPixelSize(i);
}
return localResources.getDimensionPixelSize(2131493522);
}
public static int getStreamQuickLinkColumnCount(Resources paramResources, int paramInt1, int paramInt2)
{
int i = getFeaturedGridColumnCount(paramResources, 1.0D);
int k;
if (paramInt1 > i) {
for (k = i;; k--)
{
int m = paramInt1 % k;
if ((m == 0) || (k - m <= 1) || (k <= 2)) {
break;
}
}
}
int j = paramInt1 + paramInt2;
if ((j == 1) && (i == 2))
{
k = i;
return k;
}
return Math.min(i, Math.max((int)Math.ceil(i / 2.0F), j));
}
public static int getTextColor(NextBanner paramNextBanner, int paramInt)
{
return getColor(paramNextBanner.colorTextArgb, paramInt);
}
public static void getTouchTarget(View paramView, Rect paramRect, int paramInt1, int paramInt2)
{
int i = (paramInt1 - paramView.getHeight()) / 2;
int j = (paramInt2 - paramView.getWidth()) / 2;
paramView.getHitRect(paramRect);
if (j > 0) {}
for (int k = -j;; k = 0)
{
int m = 0;
if (i > 0) {
m = -i;
}
paramRect.inset(k, m);
return;
}
}
public static void hideKeyboard(Activity paramActivity, View paramView)
{
((InputMethodManager)paramActivity.getSystemService("input_method")).hideSoftInputFromWindow(paramView.getWindowToken(), 0);
}
public static int interpolateColor$4868c7be(int paramInt)
{
int i = 0xFF & paramInt >> 24;
int j = 0xFF & paramInt >> 16;
int k = 0xFF & paramInt >> 8;
int m = paramInt & 0xFF;
int n = (int)(216.75F + 0.15F * i);
int i1 = (int)(216.75F + 0.15F * j);
int i2 = (int)(216.75F + 0.15F * k);
return (int)(216.75F + 0.15F * m) | n << 24 | i1 << 16 | i2 << 8;
}
@TargetApi(14)
public static boolean isAccessibilityEnabled(Context paramContext)
{
AccessibilityManager localAccessibilityManager = (AccessibilityManager)paramContext.getSystemService("accessibility");
if (Build.VERSION.SDK_INT >= 14) {
return localAccessibilityManager.isTouchExplorationEnabled();
}
return localAccessibilityManager.isEnabled();
}
public static boolean isAndroidTv()
{
try
{
if (sIsAndroidTv == null)
{
FinskyApp localFinskyApp = FinskyApp.get();
boolean bool2 = localFinskyApp.getPackageManager().hasSystemFeature("android.software.leanback");
boolean bool3 = false;
if (bool2)
{
UiModeManager localUiModeManager = (UiModeManager)localFinskyApp.getSystemService("uimode");
bool3 = false;
if (localUiModeManager != null)
{
int i = localUiModeManager.getCurrentModeType();
bool3 = false;
if (i == 4) {
bool3 = true;
}
}
}
sIsAndroidTv = Boolean.valueOf(bool3);
}
boolean bool1 = sIsAndroidTv.booleanValue();
return bool1;
}
finally {}
}
public static boolean isColorBright(int paramInt)
{
int i = Color.red(paramInt);
int j = Color.green(paramInt);
int k = Color.blue(paramInt);
return i * 21 + j * 72 + k * 7 >= 12800;
}
public static boolean isSvgExperimentEnabled()
{
return FinskyApp.get().getExperiments().isEnabled(12603159L);
}
public static boolean isVisibleOnScreen(View paramView)
{
return paramView.getGlobalVisibleRect(sTempRect);
}
@TargetApi(11)
public static void playShakeAnimationIfPossible(Context paramContext, TextView paramTextView)
{
if (Build.VERSION.SDK_INT < 11) {
return;
}
ObjectAnimator localObjectAnimator = ObjectAnimator.ofFloat(paramTextView, "translationX", new float[] { 0.0F, 1.0F });
localObjectAnimator.setInterpolator(new TimeInterpolator()
{
public final float getInterpolation(float paramAnonymousFloat)
{
return (1.0F - paramAnonymousFloat) * this.val$shakeDelta * (float)Math.sin(3.0F * (3.141593F * (2.0F * paramAnonymousFloat)));
}
});
localObjectAnimator.start();
}
@TargetApi(16)
public static void sendAccessibilityEventWithText(Context paramContext, CharSequence paramCharSequence, View paramView)
{
sendAccessibilityEventWithText(paramContext, paramCharSequence, paramView, false);
}
@TargetApi(16)
public static void sendAccessibilityEventWithText(Context paramContext, CharSequence paramCharSequence, View paramView, boolean paramBoolean)
{
if (!isAccessibilityEnabled(paramContext)) {
return;
}
if ((paramBoolean) && (Build.VERSION.SDK_INT >= 19))
{
paramView.setAccessibilityLiveRegion(1);
return;
}
if (Build.VERSION.SDK_INT >= 16) {}
for (int i = 16384;; i = 8)
{
AccessibilityEvent localAccessibilityEvent = AccessibilityEvent.obtain(i);
localAccessibilityEvent.getText().add(paramCharSequence);
localAccessibilityEvent.setEnabled(true);
if (paramView != null) {
AccessibilityEventCompat.asRecord(localAccessibilityEvent).setSource(paramView);
}
((AccessibilityManager)paramContext.getSystemService("accessibility")).sendAccessibilityEvent(localAccessibilityEvent);
return;
}
}
public static void setBackground(View paramView, Drawable paramDrawable)
{
if (Build.VERSION.SDK_INT < 16)
{
paramView.setBackgroundDrawable(paramDrawable);
return;
}
paramView.setBackground(paramDrawable);
}
public static void setErrorOnTextView(TextView paramTextView, String paramString1, String paramString2)
{
paramTextView.setError(paramString2);
String str = paramTextView.getResources().getString(2131361813, new Object[] { paramString1, paramString2 });
sendAccessibilityEventWithText(paramTextView.getContext(), str, paramTextView, false);
}
public static void showKeyboard(Activity paramActivity, final EditText paramEditText)
{
paramEditText.requestFocus();
paramEditText.postDelayed(new Runnable()
{
public final void run()
{
this.val$imm.showSoftInput(paramEditText, 1);
}
}, 300L);
}
public static void syncContainerVisibility(ViewGroup paramViewGroup, int paramInt)
{
int i = paramViewGroup.getChildCount();
for (int j = 0; j < i; j++) {
if (paramViewGroup.getChildAt(j).getVisibility() == 0)
{
paramViewGroup.setVisibility(0);
return;
}
}
paramViewGroup.setVisibility(paramInt);
}
public static abstract interface ClusterFadeOutListener
{
public abstract void onClusterFadeOutFinish();
}
}
/* Location: F:\apktool\apktool\Google_Play_Store6.0.5\classes-dex2jar.jar
* Qualified Name: com.google.android.finsky.utils.UiUtils
* JD-Core Version: 0.7.0.1
*/
| |
/*
* Copyright (C) 2018 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer2.extractor.amr;
import androidx.annotation.IntDef;
import androidx.annotation.Nullable;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.Format;
import com.google.android.exoplayer2.ParserException;
import com.google.android.exoplayer2.extractor.ConstantBitrateSeekMap;
import com.google.android.exoplayer2.extractor.Extractor;
import com.google.android.exoplayer2.extractor.ExtractorInput;
import com.google.android.exoplayer2.extractor.ExtractorOutput;
import com.google.android.exoplayer2.extractor.ExtractorsFactory;
import com.google.android.exoplayer2.extractor.PositionHolder;
import com.google.android.exoplayer2.extractor.SeekMap;
import com.google.android.exoplayer2.extractor.TrackOutput;
import com.google.android.exoplayer2.util.MimeTypes;
import com.google.android.exoplayer2.util.Util;
import java.io.EOFException;
import java.io.IOException;
import java.lang.annotation.Documented;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.util.Arrays;
/**
* Extracts data from the AMR containers format (either AMR or AMR-WB). This follows RFC-4867,
* section 5.
*
* <p>This extractor only supports single-channel AMR container formats.
*/
public final class AmrExtractor implements Extractor {
/** Factory for {@link AmrExtractor} instances. */
public static final ExtractorsFactory FACTORY = () -> new Extractor[] {new AmrExtractor()};
/**
* Flags controlling the behavior of the extractor. Possible flag value is {@link
* #FLAG_ENABLE_CONSTANT_BITRATE_SEEKING}.
*/
@Documented
@Retention(RetentionPolicy.SOURCE)
@IntDef(
flag = true,
value = {FLAG_ENABLE_CONSTANT_BITRATE_SEEKING})
public @interface Flags {}
/**
* Flag to force enable seeking using a constant bitrate assumption in cases where seeking would
* otherwise not be possible.
*/
public static final int FLAG_ENABLE_CONSTANT_BITRATE_SEEKING = 1;
/**
* The frame size in bytes, including header (1 byte), for each of the 16 frame types for AMR
* narrow band.
*/
private static final int[] frameSizeBytesByTypeNb = {
13,
14,
16,
18,
20,
21,
27,
32,
6, // AMR SID
7, // GSM-EFR SID
6, // TDMA-EFR SID
6, // PDC-EFR SID
1, // Future use
1, // Future use
1, // Future use
1 // No data
};
/**
* The frame size in bytes, including header (1 byte), for each of the 16 frame types for AMR wide
* band.
*/
private static final int[] frameSizeBytesByTypeWb = {
18,
24,
33,
37,
41,
47,
51,
59,
61,
6, // AMR-WB SID
1, // Future use
1, // Future use
1, // Future use
1, // Future use
1, // speech lost
1 // No data
};
private static final byte[] amrSignatureNb = Util.getUtf8Bytes("#!AMR\n");
private static final byte[] amrSignatureWb = Util.getUtf8Bytes("#!AMR-WB\n");
/** Theoretical maximum frame size for a AMR frame. */
private static final int MAX_FRAME_SIZE_BYTES = frameSizeBytesByTypeWb[8];
/**
* The required number of samples in the stream with same sample size to classify the stream as a
* constant-bitrate-stream.
*/
private static final int NUM_SAME_SIZE_CONSTANT_BIT_RATE_THRESHOLD = 20;
private static final int SAMPLE_RATE_WB = 16_000;
private static final int SAMPLE_RATE_NB = 8_000;
private static final int SAMPLE_TIME_PER_FRAME_US = 20_000;
private final byte[] scratch;
private final @Flags int flags;
private boolean isWideBand;
private long currentSampleTimeUs;
private int currentSampleSize;
private int currentSampleBytesRemaining;
private boolean hasOutputSeekMap;
private long firstSamplePosition;
private int firstSampleSize;
private int numSamplesWithSameSize;
private long timeOffsetUs;
private ExtractorOutput extractorOutput;
private TrackOutput trackOutput;
private @Nullable SeekMap seekMap;
private boolean hasOutputFormat;
public AmrExtractor() {
this(/* flags= */ 0);
}
/** @param flags Flags that control the extractor's behavior. */
public AmrExtractor(@Flags int flags) {
this.flags = flags;
scratch = new byte[1];
firstSampleSize = C.LENGTH_UNSET;
}
// Extractor implementation.
@Override
public boolean sniff(ExtractorInput input) throws IOException, InterruptedException {
return readAmrHeader(input);
}
@Override
public void init(ExtractorOutput extractorOutput) {
this.extractorOutput = extractorOutput;
trackOutput = extractorOutput.track(/* id= */ 0, C.TRACK_TYPE_AUDIO);
extractorOutput.endTracks();
}
@Override
public int read(ExtractorInput input, PositionHolder seekPosition)
throws IOException, InterruptedException {
if (input.getPosition() == 0) {
if (!readAmrHeader(input)) {
throw new ParserException("Could not find AMR header.");
}
}
maybeOutputFormat();
int sampleReadResult = readSample(input);
maybeOutputSeekMap(input.getLength(), sampleReadResult);
return sampleReadResult;
}
@Override
public void seek(long position, long timeUs) {
currentSampleTimeUs = 0;
currentSampleSize = 0;
currentSampleBytesRemaining = 0;
if (position != 0 && seekMap instanceof ConstantBitrateSeekMap) {
timeOffsetUs = ((ConstantBitrateSeekMap) seekMap).getTimeUsAtPosition(position);
} else {
timeOffsetUs = 0;
}
}
@Override
public void release() {
// Do nothing
}
/* package */ static int frameSizeBytesByTypeNb(int frameType) {
return frameSizeBytesByTypeNb[frameType];
}
/* package */ static int frameSizeBytesByTypeWb(int frameType) {
return frameSizeBytesByTypeWb[frameType];
}
/* package */ static byte[] amrSignatureNb() {
return Arrays.copyOf(amrSignatureNb, amrSignatureNb.length);
}
/* package */ static byte[] amrSignatureWb() {
return Arrays.copyOf(amrSignatureWb, amrSignatureWb.length);
}
// Internal methods.
/**
* Peeks the AMR header from the beginning of the input, and consumes it if it exists.
*
* @param input The {@link ExtractorInput} from which data should be peeked/read.
* @return Whether the AMR header has been read.
*/
private boolean readAmrHeader(ExtractorInput input) throws IOException, InterruptedException {
if (peekAmrSignature(input, amrSignatureNb)) {
isWideBand = false;
input.skipFully(amrSignatureNb.length);
return true;
} else if (peekAmrSignature(input, amrSignatureWb)) {
isWideBand = true;
input.skipFully(amrSignatureWb.length);
return true;
}
return false;
}
/** Peeks from the beginning of the input to see if the given AMR signature exists. */
private boolean peekAmrSignature(ExtractorInput input, byte[] amrSignature)
throws IOException, InterruptedException {
input.resetPeekPosition();
byte[] header = new byte[amrSignature.length];
input.peekFully(header, 0, amrSignature.length);
return Arrays.equals(header, amrSignature);
}
private void maybeOutputFormat() {
if (!hasOutputFormat) {
hasOutputFormat = true;
String mimeType = isWideBand ? MimeTypes.AUDIO_AMR_WB : MimeTypes.AUDIO_AMR_NB;
int sampleRate = isWideBand ? SAMPLE_RATE_WB : SAMPLE_RATE_NB;
trackOutput.format(
Format.createAudioSampleFormat(
/* id= */ null,
mimeType,
/* codecs= */ null,
/* bitrate= */ Format.NO_VALUE,
MAX_FRAME_SIZE_BYTES,
/* channelCount= */ 1,
sampleRate,
/* pcmEncoding= */ Format.NO_VALUE,
/* initializationData= */ null,
/* drmInitData= */ null,
/* selectionFlags= */ 0,
/* language= */ null));
}
}
private int readSample(ExtractorInput extractorInput) throws IOException, InterruptedException {
if (currentSampleBytesRemaining == 0) {
try {
currentSampleSize = peekNextSampleSize(extractorInput);
} catch (EOFException e) {
return RESULT_END_OF_INPUT;
}
currentSampleBytesRemaining = currentSampleSize;
if (firstSampleSize == C.LENGTH_UNSET) {
firstSamplePosition = extractorInput.getPosition();
firstSampleSize = currentSampleSize;
}
if (firstSampleSize == currentSampleSize) {
numSamplesWithSameSize++;
}
}
int bytesAppended =
trackOutput.sampleData(
extractorInput, currentSampleBytesRemaining, /* allowEndOfInput= */ true);
if (bytesAppended == C.RESULT_END_OF_INPUT) {
return RESULT_END_OF_INPUT;
}
currentSampleBytesRemaining -= bytesAppended;
if (currentSampleBytesRemaining > 0) {
return RESULT_CONTINUE;
}
trackOutput.sampleMetadata(
timeOffsetUs + currentSampleTimeUs,
C.BUFFER_FLAG_KEY_FRAME,
currentSampleSize,
/* offset= */ 0,
/* encryptionData= */ null);
currentSampleTimeUs += SAMPLE_TIME_PER_FRAME_US;
return RESULT_CONTINUE;
}
private int peekNextSampleSize(ExtractorInput extractorInput)
throws IOException, InterruptedException {
extractorInput.resetPeekPosition();
extractorInput.peekFully(scratch, /* offset= */ 0, /* length= */ 1);
byte frameHeader = scratch[0];
if ((frameHeader & 0x83) > 0) {
// The padding bits are at bit-1 positions in the following pattern: 1000 0011
// Padding bits must be 0.
throw new ParserException("Invalid padding bits for frame header " + frameHeader);
}
int frameType = (frameHeader >> 3) & 0x0f;
return getFrameSizeInBytes(frameType);
}
private int getFrameSizeInBytes(int frameType) throws ParserException {
if (!isValidFrameType(frameType)) {
throw new ParserException(
"Illegal AMR " + (isWideBand ? "WB" : "NB") + " frame type " + frameType);
}
return isWideBand ? frameSizeBytesByTypeWb[frameType] : frameSizeBytesByTypeNb[frameType];
}
private boolean isValidFrameType(int frameType) {
return frameType >= 0
&& frameType <= 15
&& (isWideBandValidFrameType(frameType) || isNarrowBandValidFrameType(frameType));
}
private boolean isWideBandValidFrameType(int frameType) {
// For wide band, type 10-13 are for future use.
return isWideBand && (frameType < 10 || frameType > 13);
}
private boolean isNarrowBandValidFrameType(int frameType) {
// For narrow band, type 12-14 are for future use.
return !isWideBand && (frameType < 12 || frameType > 14);
}
private void maybeOutputSeekMap(long inputLength, int sampleReadResult) {
if (hasOutputSeekMap) {
return;
}
if ((flags & FLAG_ENABLE_CONSTANT_BITRATE_SEEKING) == 0
|| inputLength == C.LENGTH_UNSET
|| (firstSampleSize != C.LENGTH_UNSET && firstSampleSize != currentSampleSize)) {
seekMap = new SeekMap.Unseekable(C.TIME_UNSET);
extractorOutput.seekMap(seekMap);
hasOutputSeekMap = true;
} else if (numSamplesWithSameSize >= NUM_SAME_SIZE_CONSTANT_BIT_RATE_THRESHOLD
|| sampleReadResult == RESULT_END_OF_INPUT) {
seekMap = getConstantBitrateSeekMap(inputLength);
extractorOutput.seekMap(seekMap);
hasOutputSeekMap = true;
}
}
private SeekMap getConstantBitrateSeekMap(long inputLength) {
int bitrate = getBitrateFromFrameSize(firstSampleSize, SAMPLE_TIME_PER_FRAME_US);
return new ConstantBitrateSeekMap(inputLength, firstSamplePosition, bitrate, firstSampleSize);
}
/**
* Returns the stream bitrate, given a frame size and the duration of that frame in microseconds.
*
* @param frameSize The size of each frame in the stream.
* @param durationUsPerFrame The duration of the given frame in microseconds.
* @return The stream bitrate.
*/
private static int getBitrateFromFrameSize(int frameSize, long durationUsPerFrame) {
return (int) ((frameSize * C.BITS_PER_BYTE * C.MICROS_PER_SECOND) / durationUsPerFrame);
}
}
| |
package org.luaj.vm2.lib;
import java.lang.ref.WeakReference;
import org.luaj.vm2.Lua;
import org.luaj.vm2.LuaBoolean;
import org.luaj.vm2.LuaClosure;
import org.luaj.vm2.LuaError;
import org.luaj.vm2.LuaFunction;
import org.luaj.vm2.LuaNil;
import org.luaj.vm2.LuaNumber;
import org.luaj.vm2.LuaString;
import org.luaj.vm2.LuaTable;
import org.luaj.vm2.LuaThread;
import org.luaj.vm2.LuaValue;
import org.luaj.vm2.Print;
import org.luaj.vm2.Prototype;
import org.luaj.vm2.Varargs;
/**
* Subclass of {@link LibFunction} which implements the lua standard {@code debug}
* library.
* <p>
* The debug library in luaj tries to emulate the behavior of the corresponding C-based lua library.
* To do this, it must maintain a separate stack of calls to {@link LuaClosure} and {@link LibFunction}
* instances.
* Especially when lua-to-java bytecode compiling is being used
* via a {@link LuaCompiler} such as {@link LuaJC},
* this cannot be done in all cases.
* <p>
* Typically, this library is included as part of a call to {@link JsePlatform#debugGlobals()}
* <p>
* To instantiate and use it directly,
* link it into your globals table via {@link LuaValue#load(LuaValue)} using code such as:
* <pre> {@code
* LuaTable _G = new LuaTable();
* _G.load(new DebugLib());
* } </pre>
* Doing so will ensure the library is properly initialized
* and loaded into the globals table.
* <p>
* @see LibFunction
* @see <a href="http://www.lua.org/manual/5.1/manual.html#5.9">http://www.lua.org/manual/5.1/manual.html#5.9</a>
*/
public final class LibDebug extends LibFunctionV
{
public static final boolean CALLS = (System.getProperty("CALLS") != null);
public static final boolean TRACE = (System.getProperty("TRACE") != null);
// leave this unset to allow obfuscators to
// remove it in production builds
public static boolean DEBUG_ENABLED;
static final String[] NAMES = {
"debug",
"getfenv",
"gethook",
"getinfo",
"getlocal",
"getmetatable",
"getregistry",
"getupvalue",
"setfenv",
"sethook",
"setlocal",
"setmetatable",
"setupvalue",
"traceback",
};
private static final int INIT = 0;
private static final int DEBUG = 1;
private static final int GETFENV = 2;
private static final int GETHOOK = 3;
private static final int GETINFO = 4;
private static final int GETLOCAL = 5;
private static final int GETMETATABLE = 6;
private static final int GETREGISTRY = 7;
private static final int GETUPVALUE = 8;
private static final int SETFENV = 9;
private static final int SETHOOK = 10;
private static final int SETLOCAL = 11;
private static final int SETMETATABLE = 12;
private static final int SETUPVALUE = 13;
private static final int TRACEBACK = 14;
/* maximum stack for a Lua function */
private static final int MAXSTACK = 250;
private static final LuaString LUA = valueOf("Lua");
private static final LuaString JAVA = valueOf("Java");
private static final LuaString QMARK = valueOf("?");
private static final LuaString GLOBAL = valueOf("global");
private static final LuaString LOCAL = valueOf("local");
private static final LuaString METHOD = valueOf("method");
private static final LuaString UPVALUE = valueOf("upvalue");
private static final LuaString FIELD = valueOf("field");
@SuppressWarnings("hiding")
private static final LuaString CALL = valueOf("call");
private static final LuaString LINE = valueOf("line");
private static final LuaString COUNT = valueOf("count");
private static final LuaString RETURN = valueOf("return");
private static final LuaString FUNC = valueOf("func");
private static final LuaString NUPS = valueOf("nups");
private static final LuaString NAME = valueOf("name");
private static final LuaString NAMEWHAT = valueOf("namewhat");
private static final LuaString WHAT = valueOf("what");
private static final LuaString SOURCE = valueOf("source");
private static final LuaString SHORT_SRC = valueOf("short_src");
private static final LuaString LINEDEFINED = valueOf("linedefined");
private static final LuaString LASTLINEDEFINED = valueOf("lastlinedefined");
private static final LuaString CURRENTLINE = valueOf("currentline");
private static final LuaString ACTIVELINES = valueOf("activelines");
private LuaTable init()
{
DEBUG_ENABLED = true;
LuaTable t = new LuaTable();
bind(t, LibDebug.class, NAMES, DEBUG);
_env.set("debug", t);
LibPackage.instance.LOADED.set("debug", t);
return t;
}
@Override
public Varargs invoke(Varargs args)
{
switch(_opcode)
{
case INIT:
return init();
case DEBUG:
return _debug();
case GETFENV:
return _getfenv(args);
case GETHOOK:
return _gethook(args);
case GETINFO:
return _getinfo(args, this);
case GETLOCAL:
return _getlocal(args);
case GETMETATABLE:
return _getmetatable(args);
case GETREGISTRY:
return _getregistry();
case GETUPVALUE:
return _getupvalue(args);
case SETFENV:
return _setfenv(args);
case SETHOOK:
return _sethook(args);
case SETLOCAL:
return _setlocal(args);
case SETMETATABLE:
return _setmetatable(args);
case SETUPVALUE:
return _setupvalue(args);
case TRACEBACK:
return _traceback(args);
default:
return NONE;
}
}
// ------------------------ Debug Info management --------------------------
//
// when DEBUG_ENABLED is set to true, these functions will be called
// by Closure instances as they process bytecodes.
//
// Each thread will get a DebugState attached to it by the debug library
// which will track function calls, hook functions, etc.
//
static class DebugInfo
{
LuaValue _func;
LuaClosure _closure;
LuaValue[] _stack;
Varargs _varargs, _extras;
int _pc, _top;
private DebugInfo()
{
_func = NIL;
}
private DebugInfo(LuaValue func)
{
_pc = -1;
setfunction(func);
}
void setargs(Varargs varargs, LuaValue[] stack)
{
_varargs = varargs;
_stack = stack;
}
void setfunction(LuaValue func)
{
_func = func;
_closure = (func instanceof LuaClosure ? (LuaClosure)func : null);
}
void clear()
{
_func = NIL;
_closure = null;
_stack = null;
_varargs = _extras = null;
_pc = _top = 0;
}
public void bytecode(int pc, Varargs extras, int top)
{
_pc = pc;
_top = top;
_extras = extras;
}
public int currentline()
{
if(_closure == null) return -1;
int[] li = _closure._p.lineinfo;
return li == null || _pc < 0 || _pc >= li.length ? -1 : li[_pc];
}
public LuaString[] getfunckind()
{
if(_closure == null || _pc < 0) return null;
int stackpos = (_closure._p.code[_pc] >> 6) & 0xff;
return getobjname(this, stackpos);
}
public String sourceline()
{
if(_closure == null) return _func.tojstring();
String s = _closure._p.source.tojstring();
int line = currentline();
return (s.startsWith("@") || s.startsWith("=") ? s.substring(1) : s) + ":" + line;
}
public String tracename()
{
// if ( func != null )
// return func.tojstring();
LuaString[] kind = getfunckind();
if(kind == null)
return "function ?";
return "function " + kind[0].tojstring();
}
public LuaString getlocalname(int index)
{
if(_closure == null) return null;
return _closure._p.getlocalname(index, _pc);
}
public String tojstring()
{
return tracename() + " " + sourceline();
}
}
/** DebugState is associated with a Thread */
static class DebugState
{
private final WeakReference<LuaThread> thread_ref;
private int debugCalls;
private DebugInfo[] debugInfo = new DebugInfo[LuaThread.MAX_CALLSTACK + 1];
private LuaValue hookfunc;
private boolean hookcall, hookline, hookrtrn, inhook;
private int hookcount, hookcodes;
private int line;
DebugState(LuaThread thread)
{
thread_ref = new WeakReference<LuaThread>(thread);
}
public DebugInfo nextInfo()
{
DebugInfo di = debugInfo[debugCalls];
if(di == null)
debugInfo[debugCalls] = di = new DebugInfo();
return di;
}
public DebugInfo pushInfo(int calls)
{
while(debugCalls < calls)
{
nextInfo();
++debugCalls;
}
return debugInfo[debugCalls - 1];
}
public void popInfo(int calls)
{
while(debugCalls > calls)
debugInfo[--debugCalls].clear();
}
void callHookFunc(DebugState ds, LuaString type, LuaValue arg)
{
if(inhook || hookfunc == null)
return;
inhook = true;
try
{
int n = debugCalls;
ds.nextInfo().setargs(arg, null);
ds.pushInfo(n + 1).setfunction(hookfunc);
try
{
hookfunc.call(type, arg);
}
finally
{
ds.popInfo(n);
}
}
catch(Exception e)
{
e.printStackTrace();
}
finally
{
inhook = false;
}
}
public void sethook(LuaValue func, boolean call, boolean line, boolean rtrn, int count)
{
hookcount = count;
hookcall = call;
hookline = line;
hookrtrn = rtrn;
hookfunc = func;
}
DebugInfo getDebugInfo()
{
try
{
return debugInfo[debugCalls - 1];
}
catch(Exception e)
{
if(debugCalls <= 0)
return debugInfo[debugCalls++] = new DebugInfo();
return null;
}
}
DebugInfo getDebugInfo(int level)
{
return level < 0 || level >= debugCalls ? null : debugInfo[debugCalls - level - 1];
}
public DebugInfo findDebugInfo(LuaValue func)
{
for(int i = debugCalls; --i >= 0;)
{
if(debugInfo[i]._func == func)
{
return debugInfo[i];
}
}
return new DebugInfo(func);
}
public String tojstring()
{
LuaThread thread = thread_ref.get();
return thread != null ? LibDebug.traceback(thread, 0) : "orphaned thread";
}
}
static DebugState getDebugState(LuaThread thread)
{
if(thread._debugState == null)
thread._debugState = new DebugState(thread);
return (DebugState)thread._debugState;
}
static DebugState getDebugState()
{
return getDebugState(LuaThread.getRunning());
}
/** Called by Closures to set up stack and arguments to next call */
public static void debugSetupCall(Varargs args, LuaValue[] stack)
{
DebugState ds = getDebugState();
if(ds.inhook)
return;
ds.nextInfo().setargs(args, stack);
}
/** Called by Closures and recursing java functions on entry
* @param thread the thread for the call
* @param calls the number of calls in the call stack
* @param func the function called
*/
public static void debugOnCall(LuaThread thread, int calls, LuaFunction func)
{
DebugState ds = getDebugState();
if(ds.inhook)
return;
DebugInfo di = ds.pushInfo(calls);
di.setfunction(func);
if(CALLS) System.out.println("calling " + func);
if(ds.hookcall)
ds.callHookFunc(ds, CALL, LuaValue.NIL);
}
/** Called by Closures and recursing java functions on return
* @param thread the thread for the call
* @param calls the number of calls in the call stack
*/
public static void debugOnReturn(LuaThread thread, int calls)
{
DebugState ds = getDebugState(thread);
if(ds.inhook)
return;
if(CALLS) System.out.println("returning");
try
{
if(ds.hookrtrn)
ds.callHookFunc(ds, RETURN, LuaValue.NIL);
}
finally
{
getDebugState().popInfo(calls);
}
}
/** Called by Closures on bytecode execution */
public static void debugBytecode(int pc, Varargs extras, int top)
{
DebugState ds = getDebugState();
if(ds.inhook)
return;
DebugInfo di = ds.getDebugInfo();
if(TRACE) Print.printState(di._closure, pc, di._stack, top, di._varargs);
di.bytecode(pc, extras, top);
if(ds.hookcount > 0)
{
if(++ds.hookcodes >= ds.hookcount)
{
ds.hookcodes = 0;
ds.callHookFunc(ds, COUNT, LuaValue.NIL);
}
}
if(ds.hookline)
{
int newline = di.currentline();
if(newline != ds.line)
{
int c = di._closure._p.code[pc];
if((c & 0x3f) != Lua.OP_JMP || ((c >>> 14) - 0x1ffff) >= 0)
{
ds.line = newline;
ds.callHookFunc(ds, LINE, LuaValue.valueOf(newline));
}
}
}
}
// ------------------- library function implementations -----------------
// j2se subclass may wish to override and provide actual console here.
// j2me platform has not System.in to provide console.
static Varargs _debug()
{
return NONE;
}
static Varargs _gethook(Varargs args)
{
int a = 1;
LuaThread thread = args.arg(a).isthread() ? args.arg(a++).checkthread() : LuaThread.getRunning();
DebugState ds = getDebugState(thread);
return varargsOf(
ds.hookfunc,
valueOf((ds.hookcall ? "c" : "") + (ds.hookline ? "l" : "") + (ds.hookrtrn ? "r" : "")),
valueOf(ds.hookcount));
}
static Varargs _sethook(Varargs args)
{
int a = 1;
LuaThread thread = args.arg(a).isthread() ? args.arg(a++).checkthread() : LuaThread.getRunning();
LuaValue func = args.arg(a++).optfunction(null);
String str = args.optjstring(a++, "");
int count = args.optint(a++, 0);
boolean call = false, line = false, rtrn = false;
for(int i = 0; i < str.length(); i++)
switch(str.charAt(i))
{
case 'c':
call = true;
break;
case 'l':
line = true;
break;
case 'r':
rtrn = true;
break;
}
getDebugState(thread).sethook(func, call, line, rtrn, count);
return NONE;
}
static Varargs _getfenv(Varargs args)
{
LuaValue object = args.arg1();
LuaValue env = object.getfenv();
return env != null ? env : LuaValue.NIL;
}
static Varargs _setfenv(Varargs args)
{
LuaValue object = args.arg1();
LuaTable table = args.checktable(2);
object.setfenv(table);
return object;
}
protected static Varargs _getinfo(Varargs args, LuaValue level0func)
{
int a = 1;
LuaThread thread = args.arg(a).isthread() ? args.arg(a++).checkthread() : LuaThread.getRunning();
LuaValue func = args.arg(a++);
String what = args.optjstring(a++, "nSluf");
// find the stack info
DebugState ds = getDebugState(thread);
DebugInfo di = null;
if(func.isnumber())
{
int level = func.checkint();
di = level > 0 ?
ds.getDebugInfo(level - 1) :
new DebugInfo(level0func);
}
else
{
di = ds.findDebugInfo(func.checkfunction());
}
if(di == null)
return NIL;
// start a table
LuaTable info = new LuaTable();
LuaClosure c = di._closure;
for(int i = 0, j = what.length(); i < j; i++)
{
switch(what.charAt(i))
{
case 'S':
{
if(c != null)
{
Prototype p = c._p;
info.set(WHAT, LUA);
info.set(SOURCE, p.source);
info.set(SHORT_SRC, valueOf(sourceshort(p)));
info.set(LINEDEFINED, valueOf(p.linedefined));
info.set(LASTLINEDEFINED, valueOf(p.lastlinedefined));
}
else
{
String shortName = di._func.tojstring();
LuaString name = LuaString.valueOf("[Java] " + shortName);
info.set(WHAT, JAVA);
info.set(SOURCE, name);
info.set(SHORT_SRC, valueOf(shortName));
info.set(LINEDEFINED, LuaValue.MINUSONE);
info.set(LASTLINEDEFINED, LuaValue.MINUSONE);
}
break;
}
case 'l':
{
int line = di.currentline();
info.set(CURRENTLINE, valueOf(line));
break;
}
case 'u':
{
info.set(NUPS, valueOf(c != null ? c._p.nups : 0));
break;
}
case 'n':
{
LuaString[] kind = di.getfunckind();
info.set(NAME, kind != null ? kind[0] : QMARK);
info.set(NAMEWHAT, kind != null ? kind[1] : EMPTYSTRING);
break;
}
case 'f':
{
info.set(FUNC, di._func);
break;
}
case 'L':
{
LuaTable lines = new LuaTable();
info.set(ACTIVELINES, lines);
// if ( di.luainfo != null ) {
// int line = di.luainfo.currentline();
// if ( line >= 0 )
// lines.set(1, IntValue.valueOf(line));
// }
break;
}
}
}
return info;
}
public static String sourceshort(Prototype p)
{
String name = p.source.tojstring();
if(name.startsWith("@") || name.startsWith("="))
name = name.substring(1);
else if(name.startsWith("\033"))
name = "binary string";
return name;
}
static Varargs _getlocal(Varargs args)
{
int a = 1;
LuaThread thread = args.arg(a).isthread() ? args.arg(a++).checkthread() : LuaThread.getRunning();
int level = args.checkint(a++);
int local = args.checkint(a++);
DebugState ds = getDebugState(thread);
DebugInfo di = ds.getDebugInfo(level - 1);
LuaString name = (di != null ? di.getlocalname(local) : null);
if(name != null)
{
@SuppressWarnings("null")
LuaValue value = di._stack[local - 1];
return varargsOf(name, value);
}
return NIL;
}
@SuppressWarnings("null")
static Varargs _setlocal(Varargs args)
{
int a = 1;
LuaThread thread = args.arg(a).isthread() ? args.arg(a++).checkthread() : LuaThread.getRunning();
int level = args.checkint(a++);
int local = args.checkint(a++);
LuaValue value = args.arg(a++);
DebugState ds = getDebugState(thread);
DebugInfo di = ds.getDebugInfo(level - 1);
LuaString name = (di != null ? di.getlocalname(local) : null);
if(name != null)
{
di._stack[local - 1] = value;
return name;
}
return NIL;
}
static LuaValue _getmetatable(Varargs args)
{
LuaValue object = args.arg(1);
LuaValue mt = object.getmetatable();
return mt != null ? mt : NIL;
}
static Varargs _setmetatable(Varargs args)
{
LuaValue object = args.arg(1);
try
{
LuaValue mt = args.arg(2).opttable(null);
switch(object.type())
{
case TNIL:
LuaNil.s_metatable = mt;
break;
case TNUMBER:
LuaNumber.s_metatable = mt;
break;
case TBOOLEAN:
LuaBoolean.s_metatable = mt;
break;
case TSTRING:
LuaString.s_metatable = mt;
break;
case TFUNCTION:
LuaFunction.s_metatable = mt;
break;
case TTHREAD:
LuaThread.s_metatable = mt;
break;
default:
object.setmetatable(mt);
}
return LuaValue.TRUE;
}
catch(LuaError e)
{
return varargsOf(FALSE, valueOf(e.toString()));
}
}
static Varargs _getregistry()
{
return new LuaTable();
}
static LuaString findupvalue(LuaClosure c, int up)
{
if(c._upValues != null && up > 0 && up <= c._upValues.length)
{
if(c._p.upvalues != null && up <= c._p.upvalues.length)
return c._p.upvalues[up - 1];
return LuaString.valueOf("." + up);
}
return null;
}
static Varargs _getupvalue(Varargs args)
{
LuaValue func = args.checkfunction(1);
int up = args.checkint(2);
if(func instanceof LuaClosure)
{
LuaClosure c = (LuaClosure)func;
LuaString name = findupvalue(c, up);
if(name != null)
{
return varargsOf(name, c._upValues[up - 1].getValue());
}
}
return NIL;
}
static LuaValue _setupvalue(Varargs args)
{
LuaValue func = args.checkfunction(1);
int up = args.checkint(2);
LuaValue value = args.arg(3);
if(func instanceof LuaClosure)
{
LuaClosure c = (LuaClosure)func;
LuaString name = findupvalue(c, up);
if(name != null)
{
c._upValues[up - 1].setValue(value);
return name;
}
}
return NIL;
}
static LuaValue _traceback(Varargs args)
{
int a = 1;
LuaThread thread = args.arg(a).isthread() ? args.arg(a++).checkthread() : LuaThread.getRunning();
String message = args.optjstring(a++, null);
int level = args.optint(a++, 1);
String tb = LibDebug.traceback(thread, level - 1);
return valueOf(message != null ? message + "\n" + tb : tb);
}
// =================== public utilities ====================
/**
* Get a traceback as a string for the current thread
*/
public static String traceback(int level)
{
return traceback(LuaThread.getRunning(), level);
}
/**
* Get a traceback for a particular thread.
* @param thread LuaThread to provide stack trace for
* @param level 0-based level to start reporting on
* @return String containing the stack trace.
*/
public static String traceback(LuaThread thread, int level)
{
StringBuffer sb = new StringBuffer();
DebugState ds = getDebugState(thread);
sb.append("stack traceback:");
DebugInfo di = ds.getDebugInfo(level);
if(di != null)
{
sb.append("\n\t");
sb.append(di.sourceline());
sb.append(" in ");
while((di = ds.getDebugInfo(++level)) != null)
{
sb.append(di.tracename());
sb.append("\n\t");
sb.append(di.sourceline());
sb.append(" in ");
}
sb.append("main chunk");
}
return sb.toString();
}
/**
* Get file and line for the nearest calling closure.
* @return String identifying the file and line of the nearest lua closure,
* or the function name of the Java call if no closure is being called.
*/
public static String fileline()
{
DebugState ds = getDebugState(LuaThread.getRunning());
DebugInfo di;
for(int i = 0, n = ds.debugCalls; i < n; i++)
{
di = ds.getDebugInfo(i);
if(di != null && di._func.isclosure())
return di.sourceline();
}
return fileline(0);
}
/**
* Get file and line for a particular level, even if it is a java function.
*
* @param level 0-based index of level to get
* @return String containing file and line info if available
*/
public static String fileline(int level)
{
DebugState ds = getDebugState(LuaThread.getRunning());
DebugInfo di = ds.getDebugInfo(level);
return di != null ? di.sourceline() : null;
}
// =======================================================
static void lua_assert(boolean x)
{
if(!x) throw new RuntimeException("lua_assert failed");
}
// return StrValue[] { name, namewhat } if found, null if not
static LuaString[] getobjname(DebugInfo di, int stackpos)
{
LuaString name;
if(di._closure != null)
{ /* a Lua function? */
Prototype p = di._closure._p;
int pc = di._pc; // currentpc(L, ci);
int i;// Instruction i;
name = p.getlocalname(stackpos + 1, pc);
if(name != null) /* is a local? */
return new LuaString[] { name, LOCAL };
i = symbexec(p, pc, stackpos); /* try symbolic execution */
lua_assert(pc != -1);
switch(Lua.GET_OPCODE(i))
{
case Lua.OP_GETGLOBAL:
{
int g = Lua.GETARG_Bx(i); /* global index */
// lua_assert(p.k[g].isString());
return new LuaString[] { p.k[g].strvalue(), GLOBAL };
}
case Lua.OP_MOVE:
{
int a = Lua.GETARG_A(i);
int b = Lua.GETARG_B(i); /* move from `b' to `a' */
if(b < a)
return getobjname(di, b); /* get name for `b' */
break;
}
case Lua.OP_GETTABLE:
{
int k = Lua.GETARG_C(i); /* key index */
name = kname(p, k);
return new LuaString[] { name, FIELD };
}
case Lua.OP_GETUPVAL:
{
int u = Lua.GETARG_B(i); /* upvalue index */
name = u < p.upvalues.length ? p.upvalues[u] : QMARK;
return new LuaString[] { name, UPVALUE };
}
case Lua.OP_SELF:
{
int k = Lua.GETARG_C(i); /* key index */
name = kname(p, k);
return new LuaString[] { name, METHOD };
}
default:
break;
}
}
return null; /* no useful name found */
}
static LuaString kname(Prototype p, int c)
{
if(Lua.ISK(c) && p.k[Lua.INDEXK(c)].isstring())
return p.k[Lua.INDEXK(c)].strvalue();
return QMARK;
}
static boolean checkreg(Prototype pt, int reg)
{
return (reg < pt.maxstacksize);
}
static boolean precheck(Prototype pt)
{
if(!(pt.maxstacksize <= MAXSTACK)) return false;
lua_assert(pt.numparams + (pt.is_vararg & Lua.VARARG_HASARG) <= pt.maxstacksize);
lua_assert((pt.is_vararg & Lua.VARARG_NEEDSARG) == 0
|| (pt.is_vararg & Lua.VARARG_HASARG) != 0);
if(!(pt.upvalues.length <= pt.nups)) return false;
if(!(pt.lineinfo.length == pt.code.length || pt.lineinfo.length == 0)) return false;
if(!(Lua.GET_OPCODE(pt.code[pt.code.length - 1]) == Lua.OP_RETURN)) return false;
return true;
}
static boolean checkopenop(Prototype pt, int pc)
{
int i = pt.code[(pc) + 1];
switch(Lua.GET_OPCODE(i))
{
case Lua.OP_CALL:
case Lua.OP_TAILCALL:
case Lua.OP_RETURN:
case Lua.OP_SETLIST:
{
if(!(Lua.GETARG_B(i) == 0)) return false;
return true;
}
default:
return false; /* invalid instruction after an open call */
}
}
//static int checkArgMode (Prototype pt, int r, enum OpArgMask mode) {
static boolean checkArgMode(Prototype pt, int r, int mode)
{
switch(mode)
{
case Lua.OpArgN:
if(!(r == 0)) return false;
break;
case Lua.OpArgU:
break;
case Lua.OpArgR:
checkreg(pt, r);
break;
case Lua.OpArgK:
if(!(Lua.ISK(r) ? Lua.INDEXK(r) < pt.k.length : r < pt.maxstacksize)) return false;
break;
}
return true;
}
// return last instruction, or 0 if error
static int symbexec(Prototype pt, int lastpc, int reg)
{
int pc;
int last; /* stores position of last instruction that changed `reg' */
last = pt.code.length - 1; /*
* points to final return (a `neutral'
* instruction)
*/
if(!(precheck(pt))) return 0;
for(pc = 0; pc < lastpc; pc++)
{
int i = pt.code[pc];
int op = Lua.GET_OPCODE(i);
int a = Lua.GETARG_A(i);
int b = 0;
int c = 0;
if(!(op < Lua.NUM_OPCODES)) return 0;
if(!checkreg(pt, a)) return 0;
switch(Lua.getOpMode(op))
{
case Lua.iABC:
{
b = Lua.GETARG_B(i);
c = Lua.GETARG_C(i);
if(!(checkArgMode(pt, b, Lua.getBMode(op)))) return 0;
if(!(checkArgMode(pt, c, Lua.getCMode(op)))) return 0;
break;
}
case Lua.iABx:
{
b = Lua.GETARG_Bx(i);
if(Lua.getBMode(op) == Lua.OpArgK)
if(!(b < pt.k.length)) return 0;
break;
}
case Lua.iAsBx:
{
b = Lua.GETARG_sBx(i);
if(Lua.getBMode(op) == Lua.OpArgR)
{
int dest = pc + 1 + b;
if(!(0 <= dest && dest < pt.code.length)) return 0;
if(dest > 0)
{
/* cannot jump to a setlist count */
int d = pt.code[dest - 1];
if((Lua.GET_OPCODE(d) == Lua.OP_SETLIST && Lua.GETARG_C(d) == 0)) return 0;
}
}
break;
}
}
if(Lua.testAMode(op))
{
if(a == reg)
last = pc; /* change register `a' */
}
if(Lua.testTMode(op))
{
if(!(pc + 2 < pt.code.length)) return 0; /* check skip */
if(!(Lua.GET_OPCODE(pt.code[pc + 1]) == Lua.OP_JMP)) return 0;
}
switch(op)
{
case Lua.OP_LOADBOOL:
{
if(!(c == 0 || pc + 2 < pt.code.length)) return 0; /* check its jump */
break;
}
case Lua.OP_LOADNIL:
{
if(a <= reg && reg <= b)
last = pc; /* set registers from `a' to `b' */
break;
}
case Lua.OP_GETUPVAL:
case Lua.OP_SETUPVAL:
{
if(!(b < pt.nups)) return 0;
break;
}
case Lua.OP_GETGLOBAL:
case Lua.OP_SETGLOBAL:
{
if(!(pt.k[b].isstring())) return 0;
break;
}
case Lua.OP_SELF:
{
if(!checkreg(pt, a + 1)) return 0;
if(reg == a + 1)
last = pc;
break;
}
case Lua.OP_CONCAT:
{
if(!(b < c)) return 0; /* at least two operands */
break;
}
case Lua.OP_TFORLOOP:
{
if(!(c >= 1)) return 0; /* at least one result (control variable) */
if(!checkreg(pt, a + 2 + c)) return 0; /* space for results */
if(reg >= a + 2)
last = pc; /* affect all regs above its base */
break;
}
case Lua.OP_FORLOOP:
case Lua.OP_FORPREP:
if(!checkreg(pt, a + 3)) return 0;
//$FALL-THROUGH$
case Lua.OP_JMP:
{
int dest = pc + 1 + b;
/* not full check and jump is forward and do not skip `lastpc'? */
if(reg != Lua.NO_REG && pc < dest && dest <= lastpc)
pc += b; /* do the jump */
break;
}
case Lua.OP_CALL:
case Lua.OP_TAILCALL:
{
if(b != 0)
{
if(!checkreg(pt, a + b - 1)) return 0;
}
c--; /* c = num. returns */
if(c == Lua.LUA_MULTRET)
{
if(!(checkopenop(pt, pc))) return 0;
}
else if(c != 0)
if(!checkreg(pt, a + c - 1)) return 0;
if(reg >= a)
last = pc; /* affect all registers above base */
break;
}
case Lua.OP_RETURN:
{
b--; /* b = num. returns */
if(b > 0)
if(!checkreg(pt, a + b - 1)) return 0;
break;
}
case Lua.OP_SETLIST:
{
if(b > 0)
if(!checkreg(pt, a + b)) return 0;
if(c == 0)
pc++;
break;
}
case Lua.OP_CLOSURE:
{
int nup, j;
if(!(b < pt.p.length)) return 0;
nup = pt.p[b].nups;
if(!(pc + nup < pt.code.length)) return 0;
for(j = 1; j <= nup; j++)
{
int op1 = Lua.GET_OPCODE(pt.code[pc + j]);
if(!(op1 == Lua.OP_GETUPVAL || op1 == Lua.OP_MOVE)) return 0;
}
if(reg != Lua.NO_REG) /* tracing? */
pc += nup; /* do not 'execute' these pseudo-instructions */
break;
}
case Lua.OP_VARARG:
{
if(!((pt.is_vararg & Lua.VARARG_ISVARARG) != 0
&& (pt.is_vararg & Lua.VARARG_NEEDSARG) == 0)) return 0;
b--;
if(b == Lua.LUA_MULTRET)
if(!(checkopenop(pt, pc))) return 0;
if(!checkreg(pt, a + b - 1)) return 0;
break;
}
default:
break;
}
}
return pt.code[last];
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cocoon.components.elementprocessor.impl.poi.hssf.elements;
import org.apache.cocoon.components.elementprocessor.types.Attribute;
import org.apache.cocoon.components.elementprocessor.types.BooleanConverter;
import org.apache.cocoon.components.elementprocessor.types.BooleanResult;
import org.apache.cocoon.components.elementprocessor.types.NumericConverter;
import org.apache.cocoon.components.elementprocessor.types.NumericResult;
import org.apache.cocoon.components.elementprocessor.types.Validator;
import java.io.IOException;
/**
* No-op implementation of ElementProcessor to handle the "ColInfo"
* tag
*
* This element has several attributes and has no content
*
* @version $Id$
*/
public class EPColInfo extends BaseElementProcessor {
// column number
private NumericResult _no;
// size, in points
private NumericResult _unit;
// top margin, in points
private NumericResult _margin_a;
// bottom margin, in points
private NumericResult _margin_b;
// true if size is explicitly set
private BooleanResult _hard_size;
// true if column is hidden
private BooleanResult _hidden;
// true if column is collapsed
private BooleanResult _collapsed;
// outline level
private NumericResult _outline_level;
// rle count
private NumericResult _count;
private static final String _no_attribute = "No";
private static final String _unit_attribute = "Unit";
private static final String _margin_a_attribute = "MarginA";
private static final String _margin_b_attribute = "MarginB";
private static final String _hard_size_attribute = "HardSize";
private static final String _hidden_attribute = "Hidden";
private static final String _collapsed_attribute = "Collapsed";
private static final String _outline_level_attribute = "OutlineLevel";
private static final String _count_attribute = "Count";
private static final Validator _margin_validator = new Validator() {
public IOException validate(final Number number) {
int val = number.intValue();
return (val >= 0 && val <= 7) ? null
: new IOException("\"" + number + "\" is not a legal value");
}
};
private static final Attribute[] _implied_attributes =
{
new Attribute(_hard_size_attribute, "0"),
new Attribute(_hidden_attribute, "0"),
new Attribute(_collapsed_attribute, "0"),
new Attribute(_outline_level_attribute, "0"),
new Attribute(_count_attribute, "1")
};
/**
* constructor
*/
public EPColInfo() {
super(_implied_attributes);
_no = null;
_unit = null;
_margin_a = null;
_margin_b = null;
_hard_size = null;
_hidden = null;
_collapsed = null;
_outline_level = null;
_count = null;
}
/**
* @return column number
* @exception IOException
*/
public int getColumnNo() throws IOException {
if (_no == null) {
_no = NumericConverter.extractNonNegativeInteger(
getValue(_no_attribute));
}
return _no.intValue();
}
/**
* @return column size in points
* @exception IOException
*/
public double getPoints() throws IOException {
if (_unit == null) {
_unit = NumericConverter.extractDouble(getValue(_unit_attribute));
}
return _unit.doubleValue();
}
/**
* @return top margin
* @exception IOException
*/
public int getTopMargin() throws IOException {
if (_margin_a == null) {
_margin_a = NumericConverter.extractInteger(
getValue(_margin_a_attribute), _margin_validator);
}
return _margin_a.intValue();
}
/**
* @return bottom margin
* @exception IOException
*/
public int getBottomMargin() throws IOException {
if (_margin_b == null) {
_margin_b = NumericConverter.extractInteger(
getValue(_margin_b_attribute), _margin_validator);
}
return _margin_b.intValue();
}
/**
* @return hard size
* @exception IOException
*/
public boolean getHardSize() throws IOException {
if (_hard_size == null) {
_hard_size =
BooleanConverter.extractBoolean(getValue(_hard_size_attribute));
}
return _hard_size.booleanValue();
}
/**
* @return hidden state
* @exception IOException
*/
public boolean getHidden() throws IOException {
if (_hidden == null) {
_hidden =
BooleanConverter.extractBoolean(getValue(_hidden_attribute));
}
return _hidden.booleanValue();
}
/**
* @return collapsed state
* @exception IOException
*/
public boolean getCollapsed() throws IOException {
if (_collapsed == null) {
_collapsed =
BooleanConverter.extractBoolean(getValue(_collapsed_attribute));
}
return _collapsed.booleanValue();
}
/**
* @return outline level
* @exception IOException
*/
public int getOutlineLevel() throws IOException {
if (_outline_level == null) {
_outline_level =
NumericConverter.extractInteger(
getValue(_outline_level_attribute));
}
return _outline_level.intValue();
}
/**
* @return rle count
* @exception IOException
*/
public int getRLECount() throws IOException {
if (_count == null) {
_count =
NumericConverter.extractInteger(getValue(_count_attribute));
}
return _count.intValue();
}
/**
* Set this column's width
* @exception IOException
*/
public void endProcessing() throws IOException {
getSheet().setColumnWidth(getColumnNo(), getPoints());
}
} // end public class EPColInfo
| |
/*
* Copyright 2014-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.jvm.java;
import static com.facebook.buck.jvm.common.ResourceValidator.validateResources;
import com.facebook.buck.maven.AetherUtil;
import com.facebook.buck.model.BuildTarget;
import com.facebook.buck.model.Flavor;
import com.facebook.buck.model.Flavored;
import com.facebook.buck.model.HasTests;
import com.facebook.buck.rules.BuildRule;
import com.facebook.buck.rules.BuildRuleParams;
import com.facebook.buck.rules.BuildRuleResolver;
import com.facebook.buck.rules.BuildRuleType;
import com.facebook.buck.rules.BuildRules;
import com.facebook.buck.rules.BuildTargetSourcePath;
import com.facebook.buck.rules.Description;
import com.facebook.buck.rules.Hint;
import com.facebook.buck.rules.SourcePath;
import com.facebook.buck.rules.SourcePathResolver;
import com.facebook.buck.rules.SourcePaths;
import com.facebook.buck.rules.TargetGraph;
import com.facebook.infer.annotation.SuppressFieldNotInitialized;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Function;
import com.google.common.base.Optional;
import com.google.common.base.Preconditions;
import com.google.common.base.Suppliers;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.ImmutableSortedSet;
import com.google.common.collect.Iterables;
import java.nio.file.Path;
public class JavaLibraryDescription implements Description<JavaLibraryDescription.Arg>, Flavored {
public static final BuildRuleType TYPE = BuildRuleType.of("java_library");
public static final ImmutableSet<Flavor> SUPPORTED_FLAVORS = ImmutableSet.of(
JavaLibrary.SRC_JAR,
JavaLibrary.MAVEN_JAR);
@VisibleForTesting
final JavacOptions defaultOptions;
public JavaLibraryDescription(JavacOptions defaultOptions) {
this.defaultOptions = defaultOptions;
}
@Override
public BuildRuleType getBuildRuleType() {
return TYPE;
}
@Override
public boolean hasFlavors(ImmutableSet<Flavor> flavors) {
return SUPPORTED_FLAVORS.containsAll(flavors);
}
@Override
public Arg createUnpopulatedConstructorArg() {
return new Arg();
}
@Override
public <A extends Arg> BuildRule createBuildRule(
TargetGraph targetGraph,
BuildRuleParams params,
BuildRuleResolver resolver,
A args) {
SourcePathResolver pathResolver = new SourcePathResolver(resolver);
BuildTarget target = params.getBuildTarget();
// We know that the flavour we're being asked to create is valid, since the check is done when
// creating the action graph from the target graph.
ImmutableSortedSet<Flavor> flavors = target.getFlavors();
BuildRuleParams paramsWithMavenFlavor = null;
if (flavors.contains(JavaLibrary.MAVEN_JAR)) {
paramsWithMavenFlavor = params;
// Maven rules will depend upon their vanilla versions, so the latter have to be constructed
// without the maven flavor to prevent output-path conflict
params = params.copyWithBuildTarget(
params.getBuildTarget().withoutFlavors(ImmutableSet.of(JavaLibrary.MAVEN_JAR)));
}
if (flavors.contains(JavaLibrary.SRC_JAR)) {
args.mavenCoords = args.mavenCoords.transform(
new Function<String, String>() {
@Override
public String apply(String input) {
return AetherUtil.addClassifier(input, AetherUtil.CLASSIFIER_SOURCES);
}
});
if (!flavors.contains(JavaLibrary.MAVEN_JAR)) {
return new JavaSourceJar(
params,
pathResolver,
args.srcs.get(),
args.mavenCoords);
} else {
return MavenUberJar.SourceJar.create(
Preconditions.checkNotNull(paramsWithMavenFlavor),
pathResolver,
args.srcs.get(),
args.mavenCoords);
}
}
JavacOptions javacOptions = JavacOptionsFactory.create(
defaultOptions,
params,
resolver,
pathResolver,
args
);
BuildTarget abiJarTarget = params.getBuildTarget().withAppendedFlavor(CalculateAbi.FLAVOR);
ImmutableSortedSet<BuildRule> exportedDeps = resolver.getAllRules(args.exportedDeps.get());
DefaultJavaLibrary defaultJavaLibrary =
resolver.addToIndex(
new DefaultJavaLibrary(
params.appendExtraDeps(
Iterables.concat(
BuildRules.getExportedRules(
Iterables.concat(
params.getDeclaredDeps().get(),
exportedDeps,
resolver.getAllRules(args.providedDeps.get()))),
pathResolver.filterBuildRuleInputs(
javacOptions.getInputs(pathResolver)))),
pathResolver,
args.srcs.get(),
validateResources(
pathResolver,
params.getProjectFilesystem(),
args.resources.get()),
javacOptions.getGeneratedSourceFolderName(),
args.proguardConfig.transform(
SourcePaths.toSourcePath(params.getProjectFilesystem())),
args.postprocessClassesCommands.get(),
exportedDeps,
resolver.getAllRules(args.providedDeps.get()),
new BuildTargetSourcePath(abiJarTarget),
javacOptions.trackClassUsage(),
/* additionalClasspathEntries */ ImmutableSet.<Path>of(),
new JavacToJarStepFactory(javacOptions, JavacOptionsAmender.IDENTITY),
args.resourcesRoot,
args.mavenCoords,
args.tests.get()));
resolver.addToIndex(
CalculateAbi.of(
abiJarTarget,
pathResolver,
params,
new BuildTargetSourcePath(defaultJavaLibrary.getBuildTarget())));
addGwtModule(
resolver,
pathResolver,
params,
args);
if (!flavors.contains(JavaLibrary.MAVEN_JAR)) {
return defaultJavaLibrary;
} else {
return MavenUberJar.create(
defaultJavaLibrary,
Preconditions.checkNotNull(paramsWithMavenFlavor),
pathResolver,
args.mavenCoords);
}
}
/**
* Creates a {@link BuildRule} with the {@link JavaLibrary#GWT_MODULE_FLAVOR}, if appropriate.
* <p>
* If {@code arg.srcs} or {@code arg.resources} is non-empty, then the return value will not be
* absent.
*/
@VisibleForTesting
static Optional<GwtModule> addGwtModule(
BuildRuleResolver resolver,
SourcePathResolver pathResolver,
BuildRuleParams javaLibraryParams,
Arg arg) {
BuildTarget libraryTarget = javaLibraryParams.getBuildTarget();
if (arg.srcs.get().isEmpty() &&
arg.resources.get().isEmpty() &&
!libraryTarget.isFlavored()) {
return Optional.absent();
}
BuildTarget gwtModuleBuildTarget = BuildTarget.of(
libraryTarget.getUnflavoredBuildTarget(),
ImmutableSet.of(JavaLibrary.GWT_MODULE_FLAVOR));
ImmutableSortedSet<SourcePath> filesForGwtModule = ImmutableSortedSet
.<SourcePath>naturalOrder()
.addAll(arg.srcs.get())
.addAll(arg.resources.get())
.build();
// If any of the srcs or resources are BuildTargetSourcePaths, then their respective BuildRules
// must be included as deps.
ImmutableSortedSet<BuildRule> deps =
ImmutableSortedSet.copyOf(pathResolver.filterBuildRuleInputs(filesForGwtModule));
GwtModule gwtModule = new GwtModule(
javaLibraryParams.copyWithChanges(
gwtModuleBuildTarget,
Suppliers.ofInstance(deps),
/* extraDeps */ Suppliers.ofInstance(ImmutableSortedSet.<BuildRule>of())),
pathResolver,
filesForGwtModule);
resolver.addToIndex(gwtModule);
return Optional.of(gwtModule);
}
@SuppressFieldNotInitialized
public static class Arg extends JvmLibraryArg implements HasTests {
public Optional<ImmutableSortedSet<SourcePath>> srcs;
public Optional<ImmutableSortedSet<SourcePath>> resources;
public Optional<Path> proguardConfig;
public Optional<ImmutableList<String>> postprocessClassesCommands;
@Hint(isInput = false)
public Optional<Path> resourcesRoot;
public Optional<String> mavenCoords;
public Optional<Boolean> autodeps;
public Optional<ImmutableSortedSet<String>> generatedSymbols;
public Optional<ImmutableSortedSet<BuildTarget>> providedDeps;
public Optional<ImmutableSortedSet<BuildTarget>> exportedDeps;
public Optional<ImmutableSortedSet<BuildTarget>> deps;
@Hint(isDep = false) public Optional<ImmutableSortedSet<BuildTarget>> tests;
@Override
public ImmutableSortedSet<BuildTarget> getTests() {
return tests.get();
}
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.action.admin.indices.validate.query;
import org.elasticsearch.ElasticsearchGenerationException;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.action.support.IndicesOptions;
import org.elasticsearch.action.support.QuerySourceBuilder;
import org.elasticsearch.action.support.broadcast.BroadcastOperationRequest;
import org.elasticsearch.client.Requests;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentType;
import java.io.IOException;
import java.util.Arrays;
import java.util.Map;
/**
* A request to validate a specific query.
* <p/>
* <p>The request requires the query source to be set either using {@link #source(QuerySourceBuilder)},
* or {@link #source(byte[])}.
*/
public class ValidateQueryRequest extends BroadcastOperationRequest<ValidateQueryRequest> {
private static final XContentType contentType = Requests.CONTENT_TYPE;
private BytesReference source;
private boolean sourceUnsafe;
private boolean explain;
private String[] types = Strings.EMPTY_ARRAY;
long nowInMillis;
ValidateQueryRequest() {
this(Strings.EMPTY_ARRAY);
}
/**
* Constructs a new validate request against the provided indices. No indices provided means it will
* run against all indices.
*/
public ValidateQueryRequest(String... indices) {
super(indices);
indicesOptions(IndicesOptions.fromOptions(false, false, true, false));
}
@Override
public ActionRequestValidationException validate() {
ActionRequestValidationException validationException = super.validate();
return validationException;
}
@Override
protected void beforeStart() {
if (sourceUnsafe) {
source = source.copyBytesArray();
sourceUnsafe = false;
}
}
/**
* The source to execute.
*/
BytesReference source() {
return source;
}
public ValidateQueryRequest source(QuerySourceBuilder sourceBuilder) {
this.source = sourceBuilder.buildAsBytes(contentType);
this.sourceUnsafe = false;
return this;
}
/**
* The source to execute in the form of a map.
*/
public ValidateQueryRequest source(Map source) {
try {
XContentBuilder builder = XContentFactory.contentBuilder(contentType);
builder.map(source);
return source(builder);
} catch (IOException e) {
throw new ElasticsearchGenerationException("Failed to generate [" + source + "]", e);
}
}
public ValidateQueryRequest source(XContentBuilder builder) {
this.source = builder.bytes();
this.sourceUnsafe = false;
return this;
}
/**
* The query source to validate. It is preferable to use either {@link #source(byte[])}
* or {@link #source(QuerySourceBuilder)}.
*/
public ValidateQueryRequest source(String source) {
this.source = new BytesArray(source);
this.sourceUnsafe = false;
return this;
}
/**
* The source to validate.
*/
public ValidateQueryRequest source(byte[] source) {
return source(source, 0, source.length, false);
}
/**
* The source to validate.
*/
public ValidateQueryRequest source(byte[] source, int offset, int length, boolean unsafe) {
return source(new BytesArray(source, offset, length), unsafe);
}
/**
* The source to validate.
*/
public ValidateQueryRequest source(BytesReference source, boolean unsafe) {
this.source = source;
this.sourceUnsafe = unsafe;
return this;
}
/**
* The types of documents the query will run against. Defaults to all types.
*/
public String[] types() {
return this.types;
}
/**
* The types of documents the query will run against. Defaults to all types.
*/
public ValidateQueryRequest types(String... types) {
this.types = types;
return this;
}
/**
* Indicate if detailed information about query is requested
*/
public void explain(boolean explain) {
this.explain = explain;
}
/**
* Indicates if detailed information about query is requested
*/
public boolean explain() {
return explain;
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
sourceUnsafe = false;
source = in.readBytesReference();
int typesSize = in.readVInt();
if (typesSize > 0) {
types = new String[typesSize];
for (int i = 0; i < typesSize; i++) {
types[i] = in.readString();
}
}
explain = in.readBoolean();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeBytesReference(source);
out.writeVInt(types.length);
for (String type : types) {
out.writeString(type);
}
out.writeBoolean(explain);
}
@Override
public String toString() {
String sSource = "_na_";
try {
sSource = XContentHelper.convertToJson(source, false);
} catch (Exception e) {
// ignore
}
return "[" + Arrays.toString(indices) + "]" + Arrays.toString(types) + ", source[" + sSource + "], explain:" + explain;
}
}
| |
/*
* Copyright (c) 2008-2018, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.client.impl;
import com.hazelcast.cache.impl.JCacheDetector;
import com.hazelcast.client.impl.operations.ClientDisconnectionOperation;
import com.hazelcast.client.impl.operations.GetConnectedClientsOperation;
import com.hazelcast.client.impl.operations.OnJoinClientOperation;
import com.hazelcast.client.impl.protocol.ClientExceptions;
import com.hazelcast.client.impl.protocol.ClientMessage;
import com.hazelcast.client.impl.protocol.MessageTaskFactory;
import com.hazelcast.client.impl.protocol.task.AuthenticationCustomCredentialsMessageTask;
import com.hazelcast.client.impl.protocol.task.AuthenticationMessageTask;
import com.hazelcast.client.impl.protocol.task.GetPartitionsMessageTask;
import com.hazelcast.client.impl.protocol.task.MessageTask;
import com.hazelcast.client.impl.protocol.task.PingMessageTask;
import com.hazelcast.client.impl.protocol.task.map.AbstractMapQueryMessageTask;
import com.hazelcast.config.Config;
import com.hazelcast.core.Client;
import com.hazelcast.core.ClientListener;
import com.hazelcast.core.ClientType;
import com.hazelcast.core.Member;
import com.hazelcast.instance.MemberImpl;
import com.hazelcast.instance.Node;
import com.hazelcast.internal.cluster.ClusterService;
import com.hazelcast.internal.util.RuntimeAvailableProcessors;
import com.hazelcast.logging.ILogger;
import com.hazelcast.nio.Address;
import com.hazelcast.nio.Connection;
import com.hazelcast.nio.ConnectionListener;
import com.hazelcast.nio.tcp.TcpIpConnection;
import com.hazelcast.security.SecurityContext;
import com.hazelcast.spi.CoreService;
import com.hazelcast.spi.EventPublishingService;
import com.hazelcast.spi.EventService;
import com.hazelcast.spi.ExecutionService;
import com.hazelcast.spi.ManagedService;
import com.hazelcast.spi.MemberAttributeServiceEvent;
import com.hazelcast.spi.MembershipAwareService;
import com.hazelcast.spi.MembershipServiceEvent;
import com.hazelcast.spi.NodeEngine;
import com.hazelcast.spi.Operation;
import com.hazelcast.spi.OperationService;
import com.hazelcast.spi.PreJoinAwareService;
import com.hazelcast.spi.ProxyService;
import com.hazelcast.spi.UrgentSystemOperation;
import com.hazelcast.spi.impl.NodeEngineImpl;
import com.hazelcast.spi.impl.PartitionSpecificRunnable;
import com.hazelcast.spi.impl.executionservice.InternalExecutionService;
import com.hazelcast.spi.impl.operationservice.InternalOperationService;
import com.hazelcast.spi.partition.IPartitionService;
import com.hazelcast.spi.properties.GroupProperty;
import com.hazelcast.spi.serialization.SerializationService;
import com.hazelcast.transaction.TransactionManagerService;
import com.hazelcast.util.ConcurrencyUtil;
import com.hazelcast.util.ConstructorFunction;
import com.hazelcast.util.executor.ExecutorType;
import javax.security.auth.login.LoginException;
import java.net.InetSocketAddress;
import java.util.Collection;
import java.util.EnumMap;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.Executor;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Future;
import java.util.concurrent.RejectedExecutionException;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong;
import static com.hazelcast.spi.ExecutionService.CLIENT_MANAGEMENT_EXECUTOR;
import static com.hazelcast.util.SetUtil.createHashSet;
/**
* Class that requests, listeners from client handled in node side.
*/
@SuppressWarnings("checkstyle:classdataabstractioncoupling")
public class ClientEngineImpl implements ClientEngine, CoreService, PreJoinAwareService,
ManagedService, MembershipAwareService, EventPublishingService<ClientEvent, ClientListener> {
/**
* Service name to be used in requests.
*/
public static final String SERVICE_NAME = "hz:core:clientEngine";
private static final int EXECUTOR_QUEUE_CAPACITY_PER_CORE = 100000;
private static final int THREADS_PER_CORE = 20;
private static final int QUERY_THREADS_PER_CORE = 1;
private static final ConstructorFunction<String, AtomicLong> LAST_AUTH_CORRELATION_ID_CONSTRUCTOR_FUNC =
new ConstructorFunction<String, AtomicLong>() {
@Override
public AtomicLong createNew(String arg) {
return new AtomicLong();
}
};
private final Node node;
private final NodeEngineImpl nodeEngine;
private final Executor executor;
private final ExecutorService clientManagementExecutor;
private final Executor queryExecutor;
private final SerializationService serializationService;
// client UUID -> member UUID
private final ConcurrentMap<String, String> ownershipMappings = new ConcurrentHashMap<String, String>();
// client UUID -> last authentication correlation ID
private final ConcurrentMap<String, AtomicLong> lastAuthenticationCorrelationIds
= new ConcurrentHashMap<String, AtomicLong>();
private final ClientEndpointManagerImpl endpointManager;
private final ILogger logger;
private final ConnectionListener connectionListener = new ConnectionListenerImpl();
private final MessageTaskFactory messageTaskFactory;
private final ClientExceptions clientExceptions;
private final int endpointRemoveDelaySeconds;
private final ClientPartitionListenerService partitionListenerService;
public ClientEngineImpl(Node node) {
this.logger = node.getLogger(ClientEngine.class);
this.node = node;
this.serializationService = node.getSerializationService();
this.nodeEngine = node.nodeEngine;
this.endpointManager = new ClientEndpointManagerImpl(nodeEngine);
this.executor = newClientExecutor();
this.queryExecutor = newClientQueryExecutor();
this.clientManagementExecutor = newClientsManagementExecutor();
this.messageTaskFactory = new CompositeMessageTaskFactory(nodeEngine);
this.clientExceptions = initClientExceptionFactory();
this.endpointRemoveDelaySeconds = node.getProperties().getInteger(GroupProperty.CLIENT_ENDPOINT_REMOVE_DELAY_SECONDS);
this.partitionListenerService = new ClientPartitionListenerService(nodeEngine);
}
private ClientExceptions initClientExceptionFactory() {
boolean jcacheAvailable = JCacheDetector.isJCacheAvailable(nodeEngine.getConfigClassLoader());
return new ClientExceptions(jcacheAvailable);
}
private ExecutorService newClientsManagementExecutor() {
//CLIENT_MANAGEMENT_EXECUTOR is a single threaded executor to ensure that disconnect/auth are executed in correct order.
InternalExecutionService executionService = nodeEngine.getExecutionService();
return executionService.register(CLIENT_MANAGEMENT_EXECUTOR, 1, Integer.MAX_VALUE, ExecutorType.CACHED);
}
public ExecutorService getClientManagementExecutor() {
return clientManagementExecutor;
}
private Executor newClientExecutor() {
final ExecutionService executionService = nodeEngine.getExecutionService();
int coreSize = RuntimeAvailableProcessors.get();
int threadCount = node.getProperties().getInteger(GroupProperty.CLIENT_ENGINE_THREAD_COUNT);
if (threadCount <= 0) {
threadCount = coreSize * THREADS_PER_CORE;
}
logger.finest("Creating new client executor with threadCount=" + threadCount);
return executionService.register(ExecutionService.CLIENT_EXECUTOR,
threadCount, coreSize * EXECUTOR_QUEUE_CAPACITY_PER_CORE,
ExecutorType.CONCRETE);
}
private Executor newClientQueryExecutor() {
final ExecutionService executionService = nodeEngine.getExecutionService();
int coreSize = RuntimeAvailableProcessors.get();
int threadCount = node.getProperties().getInteger(GroupProperty.CLIENT_ENGINE_QUERY_THREAD_COUNT);
if (threadCount <= 0) {
threadCount = coreSize * QUERY_THREADS_PER_CORE;
}
logger.finest("Creating new client query executor with threadCount=" + threadCount);
return executionService.register(ExecutionService.CLIENT_QUERY_EXECUTOR,
threadCount, coreSize * EXECUTOR_QUEUE_CAPACITY_PER_CORE,
ExecutorType.CONCRETE);
}
//needed for testing purposes
public ConnectionListener getConnectionListener() {
return connectionListener;
}
@Override
public SerializationService getSerializationService() {
return serializationService;
}
@Override
public int getClientEndpointCount() {
return endpointManager.size();
}
@Override
public void accept(ClientMessage clientMessage) {
int partitionId = clientMessage.getPartitionId();
Connection connection = clientMessage.getConnection();
MessageTask messageTask = messageTaskFactory.create(clientMessage, connection);
InternalOperationService operationService = nodeEngine.getOperationService();
if (partitionId < 0) {
if (isUrgent(messageTask)) {
operationService.execute(new PriorityPartitionSpecificRunnable(messageTask));
} else if (isQuery(messageTask)) {
queryExecutor.execute(messageTask);
} else {
executor.execute(messageTask);
}
} else {
operationService.execute(messageTask);
}
}
private boolean isUrgent(MessageTask messageTask) {
Class clazz = messageTask.getClass();
return clazz == PingMessageTask.class
|| clazz == GetPartitionsMessageTask.class
|| clazz == AuthenticationMessageTask.class
|| clazz == AuthenticationCustomCredentialsMessageTask.class
;
}
private boolean isQuery(MessageTask messageTask) {
return messageTask instanceof AbstractMapQueryMessageTask;
}
@Override
public IPartitionService getPartitionService() {
return nodeEngine.getPartitionService();
}
@Override
public ClusterService getClusterService() {
return nodeEngine.getClusterService();
}
@Override
public EventService getEventService() {
return nodeEngine.getEventService();
}
@Override
public ProxyService getProxyService() {
return nodeEngine.getProxyService();
}
@Override
public Address getMasterAddress() {
return node.getMasterAddress();
}
@Override
public Address getThisAddress() {
return node.getThisAddress();
}
@Override
public String getThisUuid() {
return node.getThisUuid();
}
@Override
public MemberImpl getLocalMember() {
return node.getLocalMember();
}
@Override
public Config getConfig() {
return node.getConfig();
}
@Override
public ILogger getLogger(Class clazz) {
return node.getLogger(clazz);
}
public ClientEndpointManager getEndpointManager() {
return endpointManager;
}
public ClientExceptions getClientExceptions() {
return clientExceptions;
}
@Override
public SecurityContext getSecurityContext() {
return node.securityContext;
}
public void bind(final ClientEndpoint endpoint) {
final Connection conn = endpoint.getConnection();
if (conn instanceof TcpIpConnection) {
InetSocketAddress socketAddress = conn.getRemoteSocketAddress();
//socket address can be null if connection closed before bind
if (socketAddress != null) {
Address address = new Address(socketAddress);
((TcpIpConnection) conn).setEndPoint(address);
}
}
}
@Override
public void dispatchEvent(ClientEvent event, ClientListener listener) {
if (event.getEventType() == ClientEventType.CONNECTED) {
listener.clientConnected(event);
} else {
listener.clientDisconnected(event);
}
}
@Override
public void memberAdded(MembershipServiceEvent event) {
}
@Override
public void memberRemoved(MembershipServiceEvent event) {
if (event.getMember().localMember()) {
return;
}
final String deadMemberUuid = event.getMember().getUuid();
try {
nodeEngine.getExecutionService().schedule(new DestroyEndpointTask(deadMemberUuid),
endpointRemoveDelaySeconds, TimeUnit.SECONDS);
} catch (RejectedExecutionException e) {
if (logger.isFinestEnabled()) {
logger.finest(e);
}
}
}
@Override
public void memberAttributeChanged(MemberAttributeServiceEvent event) {
}
public Collection<Client> getClients() {
final Collection<ClientEndpoint> endpoints = endpointManager.getEndpoints();
final Set<Client> clients = createHashSet(endpoints.size());
for (ClientEndpoint endpoint : endpoints) {
clients.add(endpoint);
}
return clients;
}
@Override
public void init(NodeEngine nodeEngine, Properties properties) {
node.getConnectionManager().addConnectionListener(connectionListener);
ClientHeartbeatMonitor heartbeatMonitor = new ClientHeartbeatMonitor(
endpointManager, getLogger(ClientHeartbeatMonitor.class), nodeEngine.getExecutionService(), node.getProperties());
heartbeatMonitor.start();
}
@Override
public void reset() {
clear("Resetting clientEngine");
}
@Override
public void shutdown(boolean terminate) {
clear("Shutting down clientEngine");
}
private void clear(String reason) {
for (ClientEndpoint ce : endpointManager.getEndpoints()) {
ClientEndpointImpl endpoint = (ClientEndpointImpl) ce;
try {
endpoint.destroy();
} catch (LoginException e) {
logger.finest(e.getMessage());
}
try {
final Connection conn = endpoint.getConnection();
if (conn.isAlive()) {
conn.close(reason, null);
}
} catch (Exception e) {
logger.finest(e);
}
}
endpointManager.clear();
ownershipMappings.clear();
}
public boolean trySetLastAuthenticationCorrelationId(String clientUuid, long newCorrelationId) {
AtomicLong lastCorrelationId = ConcurrencyUtil.getOrPutIfAbsent(lastAuthenticationCorrelationIds,
clientUuid,
LAST_AUTH_CORRELATION_ID_CONSTRUCTOR_FUNC);
return ConcurrencyUtil.setIfEqualOrGreaterThan(lastCorrelationId, newCorrelationId);
}
public String addOwnershipMapping(String clientUuid, String ownerUuid) {
return ownershipMappings.put(clientUuid, ownerUuid);
}
public boolean removeOwnershipMapping(String clientUuid, String memberUuid) {
lastAuthenticationCorrelationIds.remove(clientUuid);
return ownershipMappings.remove(clientUuid, memberUuid);
}
public String getOwnerUuid(String clientUuid) {
return ownershipMappings.get(clientUuid);
}
public TransactionManagerService getTransactionManagerService() {
return node.nodeEngine.getTransactionManagerService();
}
public ClientPartitionListenerService getPartitionListenerService() {
return partitionListenerService;
}
private final class ConnectionListenerImpl implements ConnectionListener {
@Override
public void connectionAdded(Connection conn) {
//no-op
//unfortunately we can't do the endpoint creation here, because this event is only called when the
//connection is bound, but we need to use the endpoint connection before that.
}
@Override
public void connectionRemoved(Connection connection) {
if (!connection.isClient() || !nodeEngine.isRunning()) {
return;
}
final ClientEndpointImpl endpoint = (ClientEndpointImpl) endpointManager.getEndpoint(connection);
if (endpoint == null) {
logger.finest("connectionRemoved: No endpoint for connection:" + connection);
return;
}
endpointManager.removeEndpoint(endpoint);
if (!endpoint.isOwnerConnection()) {
logger.finest("connectionRemoved: Not the owner conn:" + connection + " for endpoint " + endpoint);
return;
}
String localMemberUuid = node.getThisUuid();
final String clientUuid = endpoint.getUuid();
String ownerUuid = ownershipMappings.get(clientUuid);
if (localMemberUuid.equals(ownerUuid)) {
final long authenticationCorrelationId = endpoint.getAuthenticationCorrelationId();
try {
nodeEngine.getExecutionService().schedule(new Runnable() {
@Override
public void run() {
callDisconnectionOperation(clientUuid, authenticationCorrelationId);
}
}, endpointRemoveDelaySeconds, TimeUnit.SECONDS);
} catch (RejectedExecutionException e) {
if (logger.isFinestEnabled()) {
logger.finest(e);
}
}
}
}
private void callDisconnectionOperation(String clientUuid, long authenticationCorrelationId) {
Collection<Member> memberList = nodeEngine.getClusterService().getMembers();
OperationService operationService = nodeEngine.getOperationService();
String memberUuid = getLocalMember().getUuid();
String ownerMember = ownershipMappings.get(clientUuid);
if (!memberUuid.equals(ownerMember)) {
// do nothing if the owner already changed (double checked locking)
return;
}
if (lastAuthenticationCorrelationIds.get(clientUuid).get() > authenticationCorrelationId) {
//a new authentication already made for that client. This check is needed to detect
// "a disconnected client is reconnected back to same node"
return;
}
for (Member member : memberList) {
ClientDisconnectionOperation op = new ClientDisconnectionOperation(clientUuid, memberUuid);
operationService.createInvocationBuilder(SERVICE_NAME, op, member.getAddress()).invoke();
}
}
}
private class DestroyEndpointTask implements Runnable {
private final String deadMemberUuid;
DestroyEndpointTask(String deadMemberUuid) {
this.deadMemberUuid = deadMemberUuid;
}
@Override
public void run() {
InternalOperationService service = nodeEngine.getOperationService();
Address thisAddr = getLocalMember().getAddress();
for (Map.Entry<String, String> entry : ownershipMappings.entrySet()) {
String clientUuid = entry.getKey();
String memberUuid = entry.getValue();
if (deadMemberUuid.equals(memberUuid)) {
ClientDisconnectionOperation op = new ClientDisconnectionOperation(clientUuid, memberUuid);
service.createInvocationBuilder(ClientEngineImpl.SERVICE_NAME, op, thisAddr).invoke();
}
}
}
}
@Override
public Operation getPreJoinOperation() {
Set<Member> members = nodeEngine.getClusterService().getMembers();
HashSet<String> liveMemberUUIDs = new HashSet<String>();
for (Member member : members) {
liveMemberUUIDs.add(member.getUuid());
}
Map<String, String> liveMappings = new HashMap<String, String>(ownershipMappings);
liveMappings.values().retainAll(liveMemberUUIDs);
return liveMappings.isEmpty() ? null : new OnJoinClientOperation(liveMappings);
}
@SuppressWarnings("checkstyle:methodlength")
@Override
public Map<ClientType, Integer> getConnectedClientStats() {
int numberOfCppClients = 0;
int numberOfDotNetClients = 0;
int numberOfJavaClients = 0;
int numberOfNodeJSClients = 0;
int numberOfPythonClients = 0;
int numberOfGoClients = 0;
int numberOfOtherClients = 0;
OperationService operationService = node.nodeEngine.getOperationService();
Map<String, ClientType> clientsMap = new HashMap<String, ClientType>();
for (Member member : node.getClusterService().getMembers()) {
Address target = member.getAddress();
Operation clientInfoOperation = new GetConnectedClientsOperation();
Future<Map<String, ClientType>> future
= operationService.invokeOnTarget(SERVICE_NAME, clientInfoOperation, target);
try {
Map<String, ClientType> endpoints = future.get();
if (endpoints == null) {
continue;
}
//Merge connected clients according to their UUID
for (Map.Entry<String, ClientType> entry : endpoints.entrySet()) {
clientsMap.put(entry.getKey(), entry.getValue());
}
} catch (Exception e) {
logger.warning("Cannot get client information from: " + target.toString(), e);
}
}
//Now we are regrouping according to the client type
for (ClientType clientType : clientsMap.values()) {
switch (clientType) {
case JAVA:
numberOfJavaClients++;
break;
case CSHARP:
numberOfDotNetClients++;
break;
case CPP:
numberOfCppClients++;
break;
case NODEJS:
numberOfNodeJSClients++;
break;
case PYTHON:
numberOfPythonClients++;
break;
case GO:
numberOfGoClients++;
break;
default:
numberOfOtherClients++;
}
}
final Map<ClientType, Integer> resultMap = new EnumMap<ClientType, Integer>(ClientType.class);
resultMap.put(ClientType.CPP, numberOfCppClients);
resultMap.put(ClientType.CSHARP, numberOfDotNetClients);
resultMap.put(ClientType.JAVA, numberOfJavaClients);
resultMap.put(ClientType.NODEJS, numberOfNodeJSClients);
resultMap.put(ClientType.PYTHON, numberOfPythonClients);
resultMap.put(ClientType.GO, numberOfGoClients);
resultMap.put(ClientType.OTHER, numberOfOtherClients);
return resultMap;
}
@Override
public Map<String, String> getClientStatistics() {
Collection<ClientEndpoint> clientEndpoints = endpointManager.getEndpoints();
Map<String, String> statsMap = new HashMap<String, String>(clientEndpoints.size());
for (ClientEndpoint e : clientEndpoints) {
String statistics = e.getClientStatistics();
if (null != statistics) {
statsMap.put(e.getUuid(), statistics);
}
}
return statsMap;
}
private static class PriorityPartitionSpecificRunnable implements PartitionSpecificRunnable, UrgentSystemOperation {
private final MessageTask task;
public PriorityPartitionSpecificRunnable(MessageTask task) {
this.task = task;
}
@Override
public void run() {
task.run();
}
@Override
public int getPartitionId() {
return task.getPartitionId();
}
@Override
public String toString() {
return "PriorityPartitionSpecificRunnable:{ " + task + "}";
}
}
}
| |
/**
*
* Copyright 2005-2008 Jive Software.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jivesoftware.smackx.commands;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.WeakHashMap;
import java.util.concurrent.ConcurrentHashMap;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.jivesoftware.smack.ConnectionCreationListener;
import org.jivesoftware.smack.Manager;
import org.jivesoftware.smack.SmackException;
import org.jivesoftware.smack.SmackException.NoResponseException;
import org.jivesoftware.smack.SmackException.NotConnectedException;
import org.jivesoftware.smack.XMPPConnection;
import org.jivesoftware.smack.XMPPConnectionRegistry;
import org.jivesoftware.smack.XMPPException;
import org.jivesoftware.smack.XMPPException.XMPPErrorException;
import org.jivesoftware.smack.iqrequest.AbstractIqRequestHandler;
import org.jivesoftware.smack.iqrequest.IQRequestHandler.Mode;
import org.jivesoftware.smack.packet.IQ;
import org.jivesoftware.smack.packet.XMPPError;
import org.jivesoftware.smack.util.StringUtils;
import org.jivesoftware.smackx.commands.AdHocCommand.Action;
import org.jivesoftware.smackx.commands.AdHocCommand.Status;
import org.jivesoftware.smackx.commands.packet.AdHocCommandData;
import org.jivesoftware.smackx.disco.AbstractNodeInformationProvider;
import org.jivesoftware.smackx.disco.ServiceDiscoveryManager;
import org.jivesoftware.smackx.disco.packet.DiscoverInfo;
import org.jivesoftware.smackx.disco.packet.DiscoverItems;
import org.jivesoftware.smackx.xdata.Form;
/**
* An AdHocCommandManager is responsible for keeping the list of available
* commands offered by a service and for processing commands requests.
*
* Pass in an XMPPConnection instance to
* {@link #getAddHocCommandsManager(org.jivesoftware.smack.XMPPConnection)} in order to
* get an instance of this class.
*
* @author Gabriel Guardincerri
*/
public class AdHocCommandManager extends Manager {
public static final String NAMESPACE = "http://jabber.org/protocol/commands";
private static final Logger LOGGER = Logger.getLogger(AdHocCommandManager.class.getName());
/**
* The session time out in seconds.
*/
private static final int SESSION_TIMEOUT = 2 * 60;
/**
* Map an XMPPConnection with it AdHocCommandManager. This map have a key-value
* pair for every active connection.
*/
private static Map<XMPPConnection, AdHocCommandManager> instances = new WeakHashMap<>();
/**
* Register the listener for all the connection creations. When a new
* connection is created a new AdHocCommandManager is also created and
* related to that connection.
*/
static {
XMPPConnectionRegistry.addConnectionCreationListener(new ConnectionCreationListener() {
public void connectionCreated(XMPPConnection connection) {
getAddHocCommandsManager(connection);
}
});
}
/**
* Returns the <code>AdHocCommandManager</code> related to the
* <code>connection</code>.
*
* @param connection the XMPP connection.
* @return the AdHocCommandManager associated with the connection.
*/
public static synchronized AdHocCommandManager getAddHocCommandsManager(XMPPConnection connection) {
AdHocCommandManager ahcm = instances.get(connection);
if (ahcm == null) {
ahcm = new AdHocCommandManager(connection);
instances.put(connection, ahcm);
}
return ahcm;
}
/**
* Map a command node with its AdHocCommandInfo. Note: Key=command node,
* Value=command. Command node matches the node attribute sent by command
* requesters.
*/
private final Map<String, AdHocCommandInfo> commands = new ConcurrentHashMap<String, AdHocCommandInfo>();
/**
* Map a command session ID with the instance LocalCommand. The LocalCommand
* is the an objects that has all the information of the current state of
* the command execution. Note: Key=session ID, Value=LocalCommand. Session
* ID matches the sessionid attribute sent by command responders.
*/
private final Map<String, LocalCommand> executingCommands = new ConcurrentHashMap<String, LocalCommand>();
private final ServiceDiscoveryManager serviceDiscoveryManager;
/**
* Thread that reaps stale sessions.
*/
// FIXME The session sweeping is horrible implemented. The thread will never stop running. A different approach must
// be implemented. For example one that does stop reaping sessions and the thread if there are no more, and restarts
// the reaping process on demand. Or for every command a scheduled task should be created that removes the session
// if it's timed out. See SMACK-624.
private Thread sessionsSweeper;
private AdHocCommandManager(XMPPConnection connection) {
super(connection);
this.serviceDiscoveryManager = ServiceDiscoveryManager.getInstanceFor(connection);
// Add the feature to the service discovery manage to show that this
// connection supports the AdHoc-Commands protocol.
// This information will be used when another client tries to
// discover whether this client supports AdHoc-Commands or not.
ServiceDiscoveryManager.getInstanceFor(connection).addFeature(
NAMESPACE);
// Set the NodeInformationProvider that will provide information about
// which AdHoc-Commands are registered, whenever a disco request is
// received
ServiceDiscoveryManager.getInstanceFor(connection)
.setNodeInformationProvider(NAMESPACE,
new AbstractNodeInformationProvider() {
@Override
public List<DiscoverItems.Item> getNodeItems() {
List<DiscoverItems.Item> answer = new ArrayList<DiscoverItems.Item>();
Collection<AdHocCommandInfo> commandsList = getRegisteredCommands();
for (AdHocCommandInfo info : commandsList) {
DiscoverItems.Item item = new DiscoverItems.Item(
info.getOwnerJID());
item.setName(info.getName());
item.setNode(info.getNode());
answer.add(item);
}
return answer;
}
});
// The packet listener and the filter for processing some AdHoc Commands
// Packets
connection.registerIQRequestHandler(new AbstractIqRequestHandler(AdHocCommandData.ELEMENT,
AdHocCommandData.NAMESPACE, IQ.Type.set, Mode.async) {
@Override
public IQ handleIQRequest(IQ iqRequest) {
AdHocCommandData requestData = (AdHocCommandData) iqRequest;
try {
return processAdHocCommand(requestData);
}
catch (NoResponseException | NotConnectedException e) {
LOGGER.log(Level.INFO, "processAdHocCommand threw exceptino", e);
return null;
}
}
});
sessionsSweeper = null;
}
/**
* Registers a new command with this command manager, which is related to a
* connection. The <tt>node</tt> is an unique identifier of that command for
* the connection related to this command manager. The <tt>name</tt> is the
* human readable name of the command. The <tt>class</tt> is the class of
* the command, which must extend {@link LocalCommand} and have a default
* constructor.
*
* @param node the unique identifier of the command.
* @param name the human readable name of the command.
* @param clazz the class of the command, which must extend {@link LocalCommand}.
*/
public void registerCommand(String node, String name, final Class<? extends LocalCommand> clazz) {
registerCommand(node, name, new LocalCommandFactory() {
public LocalCommand getInstance() throws InstantiationException, IllegalAccessException {
return clazz.newInstance();
}
});
}
/**
* Registers a new command with this command manager, which is related to a
* connection. The <tt>node</tt> is an unique identifier of that
* command for the connection related to this command manager. The <tt>name</tt>
* is the human readeale name of the command. The <tt>factory</tt> generates
* new instances of the command.
*
* @param node the unique identifier of the command.
* @param name the human readable name of the command.
* @param factory a factory to create new instances of the command.
*/
public void registerCommand(String node, final String name, LocalCommandFactory factory) {
AdHocCommandInfo commandInfo = new AdHocCommandInfo(node, name, connection().getUser(), factory);
commands.put(node, commandInfo);
// Set the NodeInformationProvider that will provide information about
// the added command
serviceDiscoveryManager.setNodeInformationProvider(node,
new AbstractNodeInformationProvider() {
@Override
public List<String> getNodeFeatures() {
List<String> answer = new ArrayList<String>();
answer.add(NAMESPACE);
// TODO: check if this service is provided by the
// TODO: current connection.
answer.add("jabber:x:data");
return answer;
}
@Override
public List<DiscoverInfo.Identity> getNodeIdentities() {
List<DiscoverInfo.Identity> answer = new ArrayList<DiscoverInfo.Identity>();
DiscoverInfo.Identity identity = new DiscoverInfo.Identity(
"automation", name, "command-node");
answer.add(identity);
return answer;
}
});
}
/**
* Discover the commands of an specific JID. The <code>jid</code> is a
* full JID.
*
* @param jid the full JID to retrieve the commands for.
* @return the discovered items.
* @throws XMPPException if the operation failed for some reason.
* @throws SmackException if there was no response from the server.
*/
public DiscoverItems discoverCommands(String jid) throws XMPPException, SmackException {
return serviceDiscoveryManager.discoverItems(jid, NAMESPACE);
}
/**
* Publish the commands to an specific JID.
*
* @param jid the full JID to publish the commands to.
* @throws XMPPException if the operation failed for some reason.
* @throws SmackException if there was no response from the server.
*/
public void publishCommands(String jid) throws XMPPException, SmackException {
// Collects the commands to publish as items
DiscoverItems discoverItems = new DiscoverItems();
Collection<AdHocCommandInfo> xCommandsList = getRegisteredCommands();
for (AdHocCommandInfo info : xCommandsList) {
DiscoverItems.Item item = new DiscoverItems.Item(info.getOwnerJID());
item.setName(info.getName());
item.setNode(info.getNode());
discoverItems.addItem(item);
}
serviceDiscoveryManager.publishItems(jid, NAMESPACE, discoverItems);
}
/**
* Returns a command that represents an instance of a command in a remote
* host. It is used to execute remote commands. The concept is similar to
* RMI. Every invocation on this command is equivalent to an invocation in
* the remote command.
*
* @param jid the full JID of the host of the remote command
* @param node the identifier of the command
* @return a local instance equivalent to the remote command.
*/
public RemoteCommand getRemoteCommand(String jid, String node) {
return new RemoteCommand(connection(), node, jid);
}
/**
* Process the AdHoc-Command stanza(/packet) that request the execution of some
* action of a command. If this is the first request, this method checks,
* before executing the command, if:
* <ul>
* <li>The requested command exists</li>
* <li>The requester has permissions to execute it</li>
* <li>The command has more than one stage, if so, it saves the command and
* session ID for further use</li>
* </ul>
*
* <br>
* <br>
* If this is not the first request, this method checks, before executing
* the command, if:
* <ul>
* <li>The session ID of the request was stored</li>
* <li>The session life do not exceed the time out</li>
* <li>The action to execute is one of the available actions</li>
* </ul>
*
* @param requestData
* the stanza(/packet) to process.
* @throws NotConnectedException
* @throws NoResponseException
*/
private IQ processAdHocCommand(AdHocCommandData requestData) throws NoResponseException, NotConnectedException {
// Creates the response with the corresponding data
AdHocCommandData response = new AdHocCommandData();
response.setTo(requestData.getFrom());
response.setStanzaId(requestData.getStanzaId());
response.setNode(requestData.getNode());
response.setId(requestData.getTo());
String sessionId = requestData.getSessionID();
String commandNode = requestData.getNode();
if (sessionId == null) {
// A new execution request has been received. Check that the
// command exists
if (!commands.containsKey(commandNode)) {
// Requested command does not exist so return
// item_not_found error.
return respondError(response, XMPPError.Condition.item_not_found);
}
// Create new session ID
sessionId = StringUtils.randomString(15);
try {
// Create a new instance of the command with the
// corresponding sessioid
LocalCommand command = newInstanceOfCmd(commandNode, sessionId);
response.setType(IQ.Type.result);
command.setData(response);
// Check that the requester has enough permission.
// Answer forbidden error if requester permissions are not
// enough to execute the requested command
if (!command.hasPermission(requestData.getFrom())) {
return respondError(response, XMPPError.Condition.forbidden);
}
Action action = requestData.getAction();
// If the action is unknown then respond an error.
if (action != null && action.equals(Action.unknown)) {
return respondError(response, XMPPError.Condition.bad_request,
AdHocCommand.SpecificErrorCondition.malformedAction);
}
// If the action is not execute, then it is an invalid action.
if (action != null && !action.equals(Action.execute)) {
return respondError(response, XMPPError.Condition.bad_request,
AdHocCommand.SpecificErrorCondition.badAction);
}
// Increase the state number, so the command knows in witch
// stage it is
command.incrementStage();
// Executes the command
command.execute();
if (command.isLastStage()) {
// If there is only one stage then the command is completed
response.setStatus(Status.completed);
}
else {
// Else it is still executing, and is registered to be
// available for the next call
response.setStatus(Status.executing);
executingCommands.put(sessionId, command);
// See if the session reaping thread is started. If not, start it.
if (sessionsSweeper == null) {
sessionsSweeper = new Thread(new Runnable() {
public void run() {
while (true) {
for (String sessionId : executingCommands.keySet()) {
LocalCommand command = executingCommands.get(sessionId);
// Since the command could be removed in the meanwhile
// of getting the key and getting the value - by a
// processed packet. We must check if it still in the
// map.
if (command != null) {
long creationStamp = command.getCreationDate();
// Check if the Session data has expired (default is
// 10 minutes)
// To remove it from the session list it waits for
// the double of the of time out time. This is to
// let
// the requester know why his execution request is
// not accepted. If the session is removed just
// after the time out, then whe the user request to
// continue the execution he will recieved an
// invalid session error and not a time out error.
if (System.currentTimeMillis() - creationStamp > SESSION_TIMEOUT * 1000 * 2) {
// Remove the expired session
executingCommands.remove(sessionId);
}
}
}
try {
Thread.sleep(1000);
}
catch (InterruptedException ie) {
// Ignore.
}
}
}
});
sessionsSweeper.setDaemon(true);
sessionsSweeper.start();
}
}
// Sends the response packet
return response;
}
catch (XMPPErrorException e) {
// If there is an exception caused by the next, complete,
// prev or cancel method, then that error is returned to the
// requester.
XMPPError error = e.getXMPPError();
// If the error type is cancel, then the execution is
// canceled therefore the status must show that, and the
// command be removed from the executing list.
if (XMPPError.Type.CANCEL.equals(error.getType())) {
response.setStatus(Status.canceled);
executingCommands.remove(sessionId);
}
return respondError(response, error);
}
}
else {
LocalCommand command = executingCommands.get(sessionId);
// Check that a command exists for the specified sessionID
// This also handles if the command was removed in the meanwhile
// of getting the key and the value of the map.
if (command == null) {
return respondError(response, XMPPError.Condition.bad_request,
AdHocCommand.SpecificErrorCondition.badSessionid);
}
// Check if the Session data has expired (default is 10 minutes)
long creationStamp = command.getCreationDate();
if (System.currentTimeMillis() - creationStamp > SESSION_TIMEOUT * 1000) {
// Remove the expired session
executingCommands.remove(sessionId);
// Answer a not_allowed error (session-expired)
return respondError(response, XMPPError.Condition.not_allowed,
AdHocCommand.SpecificErrorCondition.sessionExpired);
}
/*
* Since the requester could send two requests for the same
* executing command i.e. the same session id, all the execution of
* the action must be synchronized to avoid inconsistencies.
*/
synchronized (command) {
Action action = requestData.getAction();
// If the action is unknown the respond an error
if (action != null && action.equals(Action.unknown)) {
return respondError(response, XMPPError.Condition.bad_request,
AdHocCommand.SpecificErrorCondition.malformedAction);
}
// If the user didn't specify an action or specify the execute
// action then follow the actual default execute action
if (action == null || Action.execute.equals(action)) {
action = command.getExecuteAction();
}
// Check that the specified action was previously
// offered
if (!command.isValidAction(action)) {
return respondError(response, XMPPError.Condition.bad_request,
AdHocCommand.SpecificErrorCondition.badAction);
}
try {
// TODO: Check that all the required fields of the form are
// TODO: filled, if not throw an exception. This will simplify the
// TODO: construction of new commands
// Since all errors were passed, the response is now a
// result
response.setType(IQ.Type.result);
// Set the new data to the command.
command.setData(response);
if (Action.next.equals(action)) {
command.incrementStage();
command.next(new Form(requestData.getForm()));
if (command.isLastStage()) {
// If it is the last stage then the command is
// completed
response.setStatus(Status.completed);
}
else {
// Otherwise it is still executing
response.setStatus(Status.executing);
}
}
else if (Action.complete.equals(action)) {
command.incrementStage();
command.complete(new Form(requestData.getForm()));
response.setStatus(Status.completed);
// Remove the completed session
executingCommands.remove(sessionId);
}
else if (Action.prev.equals(action)) {
command.decrementStage();
command.prev();
}
else if (Action.cancel.equals(action)) {
command.cancel();
response.setStatus(Status.canceled);
// Remove the canceled session
executingCommands.remove(sessionId);
}
return response;
}
catch (XMPPErrorException e) {
// If there is an exception caused by the next, complete,
// prev or cancel method, then that error is returned to the
// requester.
XMPPError error = e.getXMPPError();
// If the error type is cancel, then the execution is
// canceled therefore the status must show that, and the
// command be removed from the executing list.
if (XMPPError.Type.CANCEL.equals(error.getType())) {
response.setStatus(Status.canceled);
executingCommands.remove(sessionId);
}
return respondError(response, error);
}
}
}
}
/**
* Responds an error with an specific condition.
*
* @param response the response to send.
* @param condition the condition of the error.
* @throws NotConnectedException
*/
private IQ respondError(AdHocCommandData response,
XMPPError.Condition condition) {
return respondError(response, new XMPPError(condition));
}
/**
* Responds an error with an specific condition.
*
* @param response the response to send.
* @param condition the condition of the error.
* @param specificCondition the adhoc command error condition.
* @throws NotConnectedException
*/
private static IQ respondError(AdHocCommandData response, XMPPError.Condition condition,
AdHocCommand.SpecificErrorCondition specificCondition)
{
XMPPError error = new XMPPError(condition, new AdHocCommandData.SpecificError(specificCondition));
return respondError(response, error);
}
/**
* Responds an error with an specific error.
*
* @param response the response to send.
* @param error the error to send.
* @throws NotConnectedException
*/
private static IQ respondError(AdHocCommandData response, XMPPError error) {
response.setType(IQ.Type.error);
response.setError(error);
return response;
}
/**
* Creates a new instance of a command to be used by a new execution request
*
* @param commandNode the command node that identifies it.
* @param sessionID the session id of this execution.
* @return the command instance to execute.
* @throws XMPPErrorException if there is problem creating the new instance.
*/
private LocalCommand newInstanceOfCmd(String commandNode, String sessionID) throws XMPPErrorException
{
AdHocCommandInfo commandInfo = commands.get(commandNode);
LocalCommand command;
try {
command = (LocalCommand) commandInfo.getCommandInstance();
command.setSessionID(sessionID);
command.setName(commandInfo.getName());
command.setNode(commandInfo.getNode());
}
catch (InstantiationException e) {
throw new XMPPErrorException(new XMPPError(
XMPPError.Condition.internal_server_error));
}
catch (IllegalAccessException e) {
throw new XMPPErrorException(new XMPPError(
XMPPError.Condition.internal_server_error));
}
return command;
}
/**
* Returns the registered commands of this command manager, which is related
* to a connection.
*
* @return the registered commands.
*/
private Collection<AdHocCommandInfo> getRegisteredCommands() {
return commands.values();
}
/**
* Stores ad-hoc command information.
*/
private static class AdHocCommandInfo {
private String node;
private String name;
private String ownerJID;
private LocalCommandFactory factory;
public AdHocCommandInfo(String node, String name, String ownerJID,
LocalCommandFactory factory)
{
this.node = node;
this.name = name;
this.ownerJID = ownerJID;
this.factory = factory;
}
public LocalCommand getCommandInstance() throws InstantiationException,
IllegalAccessException
{
return factory.getInstance();
}
public String getName() {
return name;
}
public String getNode() {
return node;
}
public String getOwnerJID() {
return ownerJID;
}
}
}
| |
/* ###
* IP: GHIDRA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ghidra.framework.main.datatree;
import static org.junit.Assert.*;
import java.util.Set;
import javax.swing.*;
import org.junit.Assert;
import org.junit.Test;
import docking.AbstractErrDialog;
import docking.ActionContext;
import docking.action.DockingActionIf;
import docking.widgets.OptionDialog;
import docking.widgets.table.GTable;
import docking.widgets.tree.GTreeNode;
import ghidra.framework.main.projectdata.actions.VersionControlAction;
import ghidra.framework.model.DomainFile;
import ghidra.framework.model.DomainFolder;
import ghidra.program.model.listing.CodeUnit;
import ghidra.program.model.listing.Program;
import ghidra.program.model.symbol.SourceType;
import ghidra.program.model.symbol.SymbolTable;
import ghidra.util.task.TaskMonitor;
import resources.MultiIcon;
import resources.ResourceManager;
/**
* Tests for version control (not multi user).
*/
public class VersionControlAction2Test extends AbstractVersionControlActionTest {
@Test
public void testActionsEnabledForFile() throws Exception {
GTreeNode node = getNode(PROGRAM_C);
selectNode(node);
Set<DockingActionIf> actions = frontEnd.getFrontEndActions();
for (DockingActionIf action : actions) {
if (!(action instanceof VersionControlAction)) {
continue;
}
String actionName = action.getName();
ActionContext context = getDomainFileActionContext(node);
if (actionName.equals("Add to Version Control")) {
assertTrue(action.isEnabledForContext(context));
}
else {
assertTrue(!action.isEnabledForContext(context));
}
if (actionName.equals("Find Checkouts")) {
assertTrue(!action.isAddToPopup(context));
}
else {
assertTrue(action.isAddToPopup(context));
}
}
}
@Test
public void testActionsEnabledForFolder() throws Exception {
DomainFolder rootFolder = frontEnd.getRootFolder();
DomainFolder f = rootFolder.createFolder("myFolder");
waitForSwing();
Program p = frontEnd.buildProgram(this);
f.createFile("Sample", p, TaskMonitor.DUMMY);
p.release(this);
waitForSwing();
GTreeNode node = getFolderNode("myFolder");
assertNotNull(node);
expandNode(node);
selectNode(node);
Set<DockingActionIf> actions = frontEnd.getFrontEndActions();
for (DockingActionIf action : actions) {
if (!(action instanceof VersionControlAction)) {
continue;
}
if (action.getName().equals("Find Checkouts")) {
assertTrue(action.isEnabledForContext(getDomainFileActionContext(node)));
}
else {
assertTrue(!action.isEnabledForContext(getDomainFileActionContext(node)));
}
}
}
@Test
public void testAddToVersionControlKeepCheckedOut() throws Exception {
GTreeNode node = getNode(PROGRAM_C);
selectNode(node);
DockingActionIf action = getAction("Add to Version Control");
performAction(action, getDomainFileActionContext(node), false);
VersionControlDialog dialog = waitForDialogComponent(VersionControlDialog.class);
assertNotNull(dialog);
JTextArea textArea = findComponent(dialog, JTextArea.class);
assertNotNull(textArea);
runSwing(() -> textArea.setText("This is a test"));
pressButtonByText(dialog, "OK");
waitForTasks();
DomainFile df = ((DomainFileNode) node).getDomainFile();
assertTrue(df.isVersioned());
assertTrue(df.isCheckedOut());
assertEquals(1, df.getLatestVersion());
}
@Test
public void testAddToVersionControl() throws Exception {
GTreeNode node = getNode(PROGRAM_C);
frontEnd.addToVersionControl(node, false);
DomainFile df = ((DomainFileNode) node).getDomainFile();
assertTrue(!df.isCheckedOut());
assertEquals(1, df.getLatestVersion());
}
@Test
public void testAddMultipleToVersionControl() throws Exception {
GTreeNode nodeA = getNode(PROGRAM_A);
GTreeNode nodeC = getNode(PROGRAM_C);
selectNodes(nodeA, nodeC);
DockingActionIf action = getAction("Add to Version Control");
SwingUtilities.invokeLater(
() -> action.actionPerformed(getDomainFileActionContext(nodeA, nodeC)));
waitForSwing();
VersionControlDialog dialog = waitForDialogComponent(VersionControlDialog.class);
assertNotNull(dialog);
JTextArea textArea = findComponent(dialog, JTextArea.class);
assertNotNull(textArea);
JCheckBox cb = findComponent(dialog, JCheckBox.class);
assertNotNull(cb);
runSwing(() -> {
textArea.setText("This is a test");
cb.setSelected(false);
});
pressButtonByText(dialog, "Apply to All");
waitForTasks();
DomainFile df = ((DomainFileNode) nodeC).getDomainFile();
assertTrue(df.isVersioned());
assertTrue(!df.isCheckedOut());
assertEquals(1, df.getLatestVersion());
df = ((DomainFileNode) nodeA).getDomainFile();
assertTrue(df.isVersioned());
assertTrue(!df.isCheckedOut());
assertEquals(1, df.getLatestVersion());
}
@Test
public void testCheckOut() throws Exception {
// add program to version control
GTreeNode node = getNode(PROGRAM_A);
addToVersionControl(node, false);
selectNode(node);
DockingActionIf action = getAction("CheckOut");
SwingUtilities.invokeLater(() -> action.actionPerformed(getDomainFileActionContext(node)));
waitForSwing();
waitForTasks();
DomainFile df = ((DomainFileNode) node).getDomainFile();
assertTrue(df.isCheckedOut());
Icon icon = df.getIcon(false);
assertTrue(icon instanceof MultiIcon);
Icon[] icons = ((MultiIcon) icon).getIcons();
Icon checkOutIcon = ResourceManager.loadImage("images/checkex.png");
boolean found = false;
for (Icon element : icons) {
if (checkOutIcon.equals(element)) {
found = true;
break;
}
}
if (!found) {
Assert.fail("Did not find checkout icon!");
}
}
@Test
public void testCheckIn() throws Exception {
GTreeNode node = getNode(PROGRAM_A);
addToVersionControl(node, false);
selectNode(node);
DockingActionIf action = getAction("CheckOut");
runSwing(() -> action.actionPerformed(getDomainFileActionContext(node)), false);
waitForSwing();
waitForTasks();
Program program = (Program) ((DomainFileNode) node).getDomainFile()
.getDomainObject(this,
true, false, TaskMonitor.DUMMY);
int transactionID = program.startTransaction("test");
try {
SymbolTable symTable = program.getSymbolTable();
symTable.createLabel(program.getMinAddress().getNewAddress(0x010001000), "fred",
SourceType.USER_DEFINED);
}
finally {
program.endTransaction(transactionID, true);
program.save(null, TaskMonitor.DUMMY);
}
program.release(this);
DockingActionIf checkInAction = getAction("CheckIn");
runSwing(() -> checkInAction.actionPerformed(getDomainFileActionContext(node)), false);
waitForSwing();
VersionControlDialog dialog = waitForDialogComponent(VersionControlDialog.class);
assertNotNull(dialog);
JTextArea textArea = findComponent(dialog, JTextArea.class);
assertNotNull(textArea);
JCheckBox cb = findComponent(dialog, JCheckBox.class);
assertNotNull(cb);
runSwing(() -> {
textArea.setText("This is a test");
cb.setSelected(false);
});
pressButtonByText(dialog, "OK");
waitForTasks();
DomainFile df = ((DomainFileNode) node).getDomainFile();
assertTrue(!df.isCheckedOut());
}
@Test
public void testDeleteVersionCheckedOut() throws Exception {
// cannot delete a version that is checked out
setErrorGUIEnabled(true);// expect an error dialog
// create 3 versions of the program
doCreateVersions();
GTreeNode node = getNode(PROGRAM_A);
selectNode(node);
DockingActionIf historyAction = getAction("Show History");
runSwing(() -> historyAction.actionPerformed(getDomainFileActionContext(node)));
VersionHistoryDialog dialog = waitForDialogComponent(VersionHistoryDialog.class);
DockingActionIf deleteAction = getDeleteAction(dialog);
GTable table = findComponent(dialog, GTable.class);
runSwing(() -> table.selectRow(0));
performAction(deleteAction, false);
// cannot delete a file that is checked out
AbstractErrDialog d = waitForErrorDialog();
assertEquals("File version has one or more checkouts.", d.getMessage());
close(d);
}
@Test
public void testDeleteVersionNotFirstOrLast() throws Exception {
// can delete only the first or last version of the file
setErrorGUIEnabled(true);// expect an error dialog
doCreateVersions();
GTreeNode node = getNode(PROGRAM_A);
selectNode(node);
DockingActionIf historyAction = getAction("Show History");
runSwing(() -> historyAction.actionPerformed(getDomainFileActionContext(node)));
VersionHistoryDialog dialog = waitForDialogComponent(VersionHistoryDialog.class);
DockingActionIf deleteAction = getDeleteAction(dialog);
GTable table = findComponent(dialog, GTable.class);
runSwing(() -> table.selectRow(1));
performAction(deleteAction, false);
// can delete only the first or last version of the file
AbstractErrDialog d = waitForErrorDialog();
assertEquals("Only first and last version may be deleted.", d.getMessage());
close(d);
close(dialog);
}
@Test
public void testDeleteVersion() throws Exception {
doCreateVersions();
GTreeNode node = getNode(PROGRAM_A);
selectNode(node);
DockingActionIf undoAction = getAction("UndoCheckOut");
performAction(undoAction, getDomainFileActionContext(node), true);
selectNode(node);
DockingActionIf historyAction = getAction("Show History");
performAction(historyAction, getDomainFileActionContext(node), true);
VersionHistoryDialog dialog = waitForDialogComponent(VersionHistoryDialog.class);
DockingActionIf deleteAction = getDeleteAction(dialog);
GTable table = findComponent(dialog, GTable.class);
runSwing(() -> table.selectRow(0));
int rowCount = table.getRowCount();
performAction(deleteAction, false);
OptionDialog confirmDialog = waitForDialogComponent(OptionDialog.class);
assertNotNull(confirmDialog);
pressButtonByText(confirmDialog, "Delete");
waitForTasks();
assertEquals(rowCount - 1, table.getRowCount());
close(dialog);
}
@Test
public void testFindCheckoutsInSubFolder() throws Exception {
DomainFolder rootFolder = frontEnd.getRootFolder();
DomainFolder folder = rootFolder.createFolder("myFolder_1");
folder = folder.createFolder("myFolder_2");
Program p = frontEnd.buildProgram(this);
folder.createFile("My_Program", p, TaskMonitor.DUMMY);
p.release(this);
waitForSwing();
GTreeNode node = getFolderNode("myFolder_1");
assertNotNull(node);
node = node.getChild("myFolder_2");
assertNotNull(node);
node = node.getChild("My_Program");
assertNotNull(node);
addToVersionControl(node, true);
selectNode(getFolderNode("myFolder_1"));
DockingActionIf action = getAction("Find Checkouts");
performFrontEndAction(action);
FindCheckoutsDialog dialog = waitForDialogComponent(FindCheckoutsDialog.class);
assertNotNull(dialog);
GTable table = findComponent(dialog.getComponent(), GTable.class);
assertNotNull(table);
waitForBusyTable(table);
FindCheckoutsTableModel model = (FindCheckoutsTableModel) table.getModel();
assertEquals(1, model.getRowCount());
CheckoutInfo checkoutInfo = model.getRowObject(0);
DomainFile file = checkoutInfo.getFile();
DomainFolder parent = file.getParent();
assertEquals("/myFolder_1/myFolder_2", parent.getPathname());
pressButtonByText(dialog, "Dismiss");
}
@Test
public void testCheckInFromFindDialog() throws Exception {
// verify that you get the Check In dialog to popup
GTreeNode node = getNode(PROGRAM_B);
addToVersionControl(node, true);
node = getNode(PROGRAM_C);
addToVersionControl(node, true);
DomainFile df = ((DomainFileNode) node).getDomainFile();
Program p = (Program) df.getDomainObject(this, true, false, TaskMonitor.DUMMY);
editProgram(p, program -> {
CodeUnit cu = program.getListing().getCodeUnitAt(program.getMinAddress());
cu.setComment(CodeUnit.PLATE_COMMENT, "my Plate Comment");
});
p.release(this);
GTreeNode rootNode = frontEnd.getRootNode();
selectNode(rootNode);
DockingActionIf action = getAction("Find Checkouts");
performFrontEndAction(action);
FindCheckoutsDialog dialog = waitForDialogComponent(FindCheckoutsDialog.class);
assertNotNull(dialog);
GTable table = findComponent(dialog.getComponent(), GTable.class);
assertNotNull(table);
waitForBusyTable(table);
FindCheckoutsTableModel model = (FindCheckoutsTableModel) table.getModel();
assertEquals(2, model.getRowCount());
selectInTable(table, node);
DockingActionIf checkInAction = getAction("CheckIn");
ActionContext context = dialog.getActionContext(null);
assertTrue(checkInAction.isEnabledForContext(context));
performAction(checkInAction, context, false);
VersionControlDialog d = waitForDialogComponent(VersionControlDialog.class);
pressButtonByText(d, "Cancel");
pressButtonByText(dialog, "Dismiss");
waitForTasks();
}
}
| |
/*
* Copyright (c) 2008-2016 Computer Network Information Center (CNIC), Chinese Academy of Sciences.
*
* This file is part of Duckling project.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
/**
*
*/
package net.duckling.vmt.service.rest;
import java.util.List;
import java.util.Map;
import net.duckling.cloudy.common.CommonUtils;
import net.duckling.falcon.api.serialize.JSONMapper;
import net.duckling.vmt.api.IRestGroupService;
import net.duckling.vmt.api.domain.ErrorCode;
import net.duckling.vmt.api.domain.TreeNode;
import net.duckling.vmt.api.domain.VmtGroup;
import net.duckling.vmt.api.domain.VmtUser;
import net.duckling.vmt.api.util.NameRuleUtils;
import net.duckling.vmt.common.adapter.VmtGroup2LdapGroupAdapter;
import net.duckling.vmt.common.adapter.VmtUser2LdapUserAdapter;
import net.duckling.vmt.common.spring.BeanFactory;
import net.duckling.vmt.common.util.LdapUtils;
import net.duckling.vmt.domain.ldap.LdapGroup;
import net.duckling.vmt.domain.ldap.LdapUser;
import net.duckling.vmt.service.IAttributeService;
import net.duckling.vmt.service.ICommonService;
import net.duckling.vmt.service.IGroupService;
import net.duckling.vmt.service.INodeService;
import net.duckling.vmt.service.ISearchService;
import net.duckling.vmt.service.IUserService;
import net.duckling.vmt.service.IVmtIndexService;
import net.duckling.vmt.service.mq.MQMessageSenderExt;
import org.apache.log4j.Logger;
import cn.vlabs.duckling.api.umt.rmi.userv7.UserService;
import cn.vlabs.rest.ServiceException;
import cn.vlabs.rest.server.annotation.RestMethod;
/**
* The Service of the Group Object
* @author lvly
* @since 2013-5-20
*/
public class RestGroupServiceImpl implements IRestGroupService{
private static final Logger LOG=Logger.getLogger(RestGroupServiceImpl.class);
private IGroupService groupService;
private IAttributeService attrService;
private ICommonService commonService;
private INodeService nodeService;
private IUserService userService;
private UserService umtService;
private ISearchService searchService;
private IVmtIndexService indexService;
private MQMessageSenderExt sender;
/**
*
*/
public RestGroupServiceImpl(){
groupService=BeanFactory.getBean(IGroupService.class);
attrService=BeanFactory.getBean(IAttributeService.class);
commonService=BeanFactory.getBean(ICommonService.class);
nodeService=BeanFactory.getBean(INodeService.class);
userService=BeanFactory.getBean(IUserService.class);
umtService=BeanFactory.getBean(UserService.class);
searchService=BeanFactory.getBean(ISearchService.class);
indexService=BeanFactory.getBean(IVmtIndexService.class);
sender=BeanFactory.getBean(MQMessageSenderExt.class);
}
@Override
@RestMethod("create")
public String create(VmtGroup group) throws ServiceException{
if(group==null){
throw new ServiceException(ErrorCode.FILELD_REQUIRED,"can not create null group");
}
if(CommonUtils.isNull(group.getCreator())){
throw new ServiceException(ErrorCode.FILELD_REQUIRED,"must be specify a creator like[10000201]");
}
if(CommonUtils.isNull(group.getName())){
throw new ServiceException(ErrorCode.FILELD_REQUIRED,"the group name required");
}
if(CommonUtils.isNull(group.getSymbol())){
throw new ServiceException(ErrorCode.FILELD_REQUIRED,"the group symbol required");
}
if(hasSymbolUsed(group.getSymbol())){
throw new ServiceException(ErrorCode.SYMBOL_USED, "the symbol ["+group.getSymbol()+"] is used");
}
if(!NameRuleUtils.isDDLNameMatch(group.getName())){
throw new ServiceException(ErrorCode.PATTERN_ERROR,"the name is not match to ddlNameRule");
}
if(!NameRuleUtils.isSymbolMatch(group.getSymbol())){
throw new ServiceException(ErrorCode.PATTERN_ERROR,"the ortsymbol is not match to symbolrule");
}
group.setAdmins(new String[]{group.getCreator()});
LdapGroup ldapGroup=VmtGroup2LdapGroupAdapter.convert(group);
groupService.createGroup(ldapGroup,true);
LOG.info("API-Group.create:"+JSONMapper.getJSONString(group));
sender.sendCreateGroupMessage(ldapGroup);
indexService.buildAIndexSynchronous(ldapGroup.getDn());
return ldapGroup.getDn();
}
protected void setTelephoneNull(List<LdapUser> users){
if(CommonUtils.isNull(users)){
for(LdapUser u:users){
u.setTelephone(null);
}
}
}
@Override
@RestMethod("getAdmins")
public List<VmtUser> getAdmins(String groupDn) throws ServiceException {
String decodeDN =LdapUtils.decode(groupDn);
checkGroup(decodeDN);
LdapGroup group=groupService.getGroupByDN(decodeDN);
List<LdapUser> users=userService.searchUsersByUmtId(groupDn, group.getAdmins());
if(group.isHideMobile()){
setTelephoneNull(users);
}
return VmtUser2LdapUserAdapter.convert(users);
}
private void checkGroup(String decodeDN)throws ServiceException{
if(CommonUtils.isNull(decodeDN)){
throw new ServiceException(ErrorCode.DN_NOT_EXISTS,"this dn["+decodeDN+"] is null");
}
if(!LdapUtils.isGroupSub(decodeDN)){
throw new ServiceException(ErrorCode.NOT_A_GROUP,"this dn is not a group");
}
if(!commonService.isExist(decodeDN)){
throw new ServiceException(ErrorCode.DN_NOT_EXISTS,"this dn["+decodeDN+"] is not exists");
}
}
@Override
@RestMethod("rename")
public void rename(String groupDN,String newName) throws ServiceException{
String decodeDN=LdapUtils.decode(groupDN);
checkGroup(decodeDN);
String beforeName=CommonUtils.first(attrService.get(decodeDN, "vmt-name"));
attrService.update(decodeDN, "vmt-name", newName);
sender.sendUpdateMessage(groupDN,beforeName);
nodeService.updateSonAndSelfDisplayName(decodeDN, newName);
LOG.info("API-Group.rename:"+groupDN+","+newName);
}
@Override
@RestMethod("delete")
public void delete(String groupDN)throws ServiceException{
String decodeDN=LdapUtils.decode(groupDN);
checkGroup(decodeDN);
if(!commonService.isExist(groupDN)){
throw new ServiceException(ErrorCode.DN_NOT_EXISTS,"this dn is not exists");
}
sender.sendUnbindMessage(groupDN);
commonService.unbind(decodeDN);
indexService.deleteIndex(decodeDN);
LOG.info("API-Group.delete:"+groupDN);
}
@Override
@RestMethod("addAdminByUmtId")
public void addAdminByUmtId(String groupDN,String umtId) throws ServiceException{
String decodeDN=LdapUtils.decode(groupDN);
checkGroup(decodeDN);
if(!userService.isExistsSubTree(decodeDN, umtId,true)){
throw new ServiceException(ErrorCode.USER_NOT_EXISTS,"user not exits in this group:"+umtId);
}
LdapGroup ldapGroup=groupService.getGroupByDN(decodeDN);
if(!CommonUtils.isEqualsContain(ldapGroup.getAdmins(), umtId)){
userService.addAdmin(decodeDN, umtId);
sender.sendUpdateMessage(groupDN);
}
}
@Override
@RestMethod("addAdminByCstnetId")
public void addAdminByCstnetId(String groupDN, String cstnetId) throws ServiceException {
String[] umtIds=umtService.generateUmtId(new String[]{cstnetId});
addAdminByUmtId(groupDN, umtIds[0]);
}
@Override
@RestMethod("removeAdminByUmtId")
public void removeAdminByUmtId(String groupDN,String umtId) throws ServiceException{
String decodeDN=LdapUtils.decode(groupDN);
checkGroup(decodeDN);
LdapGroup ldapGroup=groupService.getGroupByDN(decodeDN);
if(CommonUtils.isEqualsContain(ldapGroup.getAdmins(), umtId)){
userService.removeAdmin(decodeDN, umtId);
sender.sendUpdateMessage(groupDN);
}
}
@Override
@RestMethod("removeAdminByCstnetId")
public void removeAdminByCstnetId(String groupDN, String cstnetId) throws ServiceException {
String[] umtIds=umtService.generateUmtId(new String[]{cstnetId});
removeAdminByUmtId(groupDN, umtIds[0]);
}
@Override
@RestMethod("getSbGroup")
public List<VmtGroup> getSbGroup(String umtId) throws ServiceException {
return VmtGroup2LdapGroupAdapter.convert(groupService.getMyGroups(umtId));
}
@Override
@RestMethod("hasSymbolUsed")
public boolean hasSymbolUsed(String symbol) throws ServiceException {
return groupService.isSymbolUsed(symbol);
}
@Override
@RestMethod("getMyThirdPartyGroupByCstnetId")
public List<VmtGroup> getMyThirdPartyGroupByCstnetId(String from,
String cstnetId) throws ServiceException {
String umtId=umtService.generateUmtId(new String[]{cstnetId})[0];
return getMyThirdPartyGroupByUmtId(from,umtId);
}
@Override
@RestMethod("getMyThirdPartyGroupByUmtId")
public List<VmtGroup> getMyThirdPartyGroupByUmtId(String from, String umtId)
throws ServiceException {
checkFrom(from);
if(CommonUtils.isNull(umtId)){
throw new ServiceException(ErrorCode.FILELD_REQUIRED,"umtId required!");
}
return VmtGroup2LdapGroupAdapter.convert(groupService.getThirdPartyGroupByUmtId(from,umtId));
}
@Override
@RestMethod("update")
public void update(VmtGroup group) throws ServiceException {
if(CommonUtils.isNull(group.getSymbol())){
throw new ServiceException(ErrorCode.FILELD_REQUIRED,"symbol required");
}
LdapGroup lg=groupService.getGroupBySymbol(group.getSymbol(),null);
if(lg==null){
throw new ServiceException(ErrorCode.SYMBOL_NOT_FOUND,"group not found,that symbol is "+group.getSymbol());
}
attrService.update(lg.getDn(),"vmt-description", group.getDescription());
sender.sendUpdateMessage(lg.getDn());
}
private void checkFrom(String from) throws ServiceException{
if(!VmtGroup.FROM_CORE_MAIL.equals(from)&&
!VmtGroup.FROM_DDL.equals(from)&&
!VmtGroup.FROM_DCHAT.equals(from)){
throw new ServiceException(ErrorCode.FILELD_EXPECT,"unexpected from value["+from+"]");
}
}
@Override
@RestMethod("hasNameUsed")
public boolean hasNameUsed(String groupName) throws ServiceException {
return groupService.isNameUsed(groupName);
}
@Override
@RestMethod("getGroupBySymbol")
public VmtGroup getGroupBySymbol(String symbol) throws ServiceException {
if(CommonUtils.isNull(symbol)){
throw new ServiceException(ErrorCode.FILELD_REQUIRED,"the symbol is required");
}
return VmtGroup2LdapGroupAdapter.convert(groupService.getGroupBySymbol(symbol,null));
}
@Override
@RestMethod("getMember")
public TreeNode getMember(String groupDn) throws ServiceException {
String decodeDN=LdapUtils.decode(groupDn);
checkGroup(decodeDN);
LdapGroup group=groupService.getGroupByDN(decodeDN);
List<?> result=searchService.searchByListLocalData(decodeDN);
TreeNode node=new TreeNode(VmtGroup2LdapGroupAdapter.convert(group));
if(CommonUtils.isNull(result)){
return null;
}else{
for(Object obj:result){
if(obj instanceof LdapUser){
VmtUser vu=VmtUser2LdapUserAdapter.convert((LdapUser)(obj));
if(vu!=null){
if(group.isHideMobile()){
vu.setTelephone(null);
}
node.addChildren(new TreeNode(vu));
}
}
}
}
return node;
}
@Override
@RestMethod("searchUserAttribute")
public Map<String, String> searchUserAttribute(String dn,
String attributeName) {
return this.attrService.search(dn, attributeName);
}
}
| |
/*
* Copyright (c) 2010, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.wso2.maven.capp.model;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.zip.ZipOutputStream;
import org.apache.maven.artifact.Artifact;
import org.apache.maven.model.Dependency;
import org.apache.maven.plugin.MojoExecutionException;
import org.apache.maven.project.MavenProject;
import org.wso2.developerstudio.eclipse.utils.archive.ArchiveManipulator;
import org.wso2.developerstudio.eclipse.utils.file.FileUtils;
import org.wso2.maven.capp.utils.CAppMavenUtils;
import org.wso2.maven.capp.utils.CAppArtifactPriorityMapping;
public class CAppArtifactDependency implements Comparable<CAppArtifactDependency> {
private CAppArtifact cAppArtifact;
private Dependency mavenDependency;
private File[] artifactFiles;
private boolean dummyDependency=false;
public CAppArtifactDependency(Dependency mavenDependency,String serverRole) throws MojoExecutionException {
setMavenDependency(mavenDependency);
Artifact resolvedArtifactPom = CAppMavenUtils.getResolvedArtifactPom(mavenDependency, CAppMavenUtils.getArtifactFactory(), CAppMavenUtils.getRemoteRepositories(), CAppMavenUtils.getLocalRepository(), CAppMavenUtils.getResolver());
MavenProject mavenProject = CAppMavenUtils.getMavenProject(resolvedArtifactPom.getFile());
setcAppArtifact(new CAppArtifact(mavenProject,serverRole));
}
public CAppArtifactDependency(MavenProject project, String type, String serverRole) {
setcAppArtifact(new CAppArtifact(project,serverRole));
getMavenDependency().setType(type);
}
public String getDependencyId(){
return cAppArtifact.getId();
}
public String getName() {
return cAppArtifact.getName();
}
public String getVersion() {
return cAppArtifact.getVersion();
}
private Dependency createMavenDependency() {
Dependency mavenDependency = new Dependency();
MavenProject project = cAppArtifact.getProject();
mavenDependency.setGroupId(project.getGroupId());
mavenDependency.setArtifactId(project.getArtifactId());
mavenDependency.setVersion(project.getVersion());
mavenDependency.setScope(CAppMavenUtils.CAPP_SCOPE_PREFIX);
return mavenDependency;
}
public void setArtifactFiles(File[] artifactFiles) {
this.artifactFiles = artifactFiles;
}
public File[] getCappArtifactFile() throws MojoExecutionException, IOException{
if (artifactFiles == null) {
Artifact resolvedArtifact;
if (null != getMavenDependency().getScope()) {
String artifactSystemPath = getArtifactSystemPath();
if (null != artifactSystemPath) {
resolvedArtifact = CAppMavenUtils.getResolvedArtifact(getMavenDependency(), CAppMavenUtils
.getArtifactFactory(), CAppMavenUtils.getRemoteRepositories(), CAppMavenUtils
.getLocalRepository(), CAppMavenUtils.getResolver(), getMavenDependency().getScope(),
artifactSystemPath);
} else {
resolvedArtifact = CAppMavenUtils.getResolvedArtifact(getMavenDependency(), CAppMavenUtils
.getArtifactFactory(), CAppMavenUtils.getRemoteRepositories(), CAppMavenUtils
.getLocalRepository(), CAppMavenUtils.getResolver(), getMavenDependency().getScope(),
getMavenDependency().getSystemPath());
}
} else {
resolvedArtifact = CAppMavenUtils.getResolvedArtifact(getMavenDependency(),
CAppMavenUtils.getArtifactFactory(), CAppMavenUtils.getRemoteRepositories(),
CAppMavenUtils.getLocalRepository(), CAppMavenUtils.getResolver(),
CAppMavenUtils.CAPP_SCOPE_PREFIX, null);
}
File mavenArtifact = resolvedArtifact.getFile();
String[] split = mavenArtifact.getName().split("\\.");
if (getcAppArtifact().getType().equals("registry/resource")) {
File tempDirectory = FileUtils.createTempDirectory();
FileUtils.extract(mavenArtifact, tempDirectory);
File[] listFiles = tempDirectory.listFiles();
for (File file : listFiles) {
if (file.isFile()
&& file.getName().toLowerCase().endsWith(".xml")) {
getcAppArtifact().setFile(file.getName());
}
}
artifactFiles=listFiles;
} else {
getcAppArtifact().setFile(mavenArtifact.getName());
artifactFiles = new File[] { mavenArtifact };
}
}
return artifactFiles;
}
private String getArtifactSystemPath() throws IOException {
Dependency artifactDependency = getMavenDependency();
if (null != artifactDependency.getSystemPath()) {
// Generate system path for xml based artifacts
if (CAppMavenUtils.XML_DEPENDENCY_TYPE.equals(artifactDependency.getType())) {
String artifactName = artifactDependency.getArtifactId().concat(
"-" + artifactDependency.getVersion() + "." + CAppMavenUtils.XML_DEPENDENCY_TYPE);
String pomFileSystemPath = artifactDependency.getSystemPath();
return pomFileSystemPath.substring(0, pomFileSystemPath.lastIndexOf(CAppMavenUtils.POM_FILE_NAME))
.concat(CAppMavenUtils.TARGET_DIR_NAME + File.separator + artifactName);
} else if (CAppMavenUtils.ZIP_DEPENDENCY_TYPE.equals(artifactDependency.getType())) {
// Generate system path for registry resources
String artifactName = artifactDependency.getArtifactId().concat("-" + artifactDependency.getVersion());
String pomFileSystemPath = artifactDependency.getSystemPath();
String baseDirPath = pomFileSystemPath.substring(0,
pomFileSystemPath.lastIndexOf(CAppMavenUtils.POM_FILE_NAME));
String resourcesDirPath = baseDirPath.concat(CAppMavenUtils.RESOURCES_DIR_NAME);
File resourcesDir = new File(resourcesDirPath);
String registryInfoFilePath = baseDirPath.concat(CAppMavenUtils.REGISTRY_INFO_FILE_NAME);
File registryInfoFile = new File(registryInfoFilePath);
String destinationDirPath = baseDirPath.concat(artifactName);
// Copy contents to destination directory
FileUtils.copyDirectory(resourcesDir,
new File(destinationDirPath.concat(File.separator + CAppMavenUtils.RESOURCES_DIR_NAME)));
FileUtils.copy(registryInfoFile,
new File(destinationDirPath.concat(File.separator + CAppMavenUtils.REGISTRY_INFO_FILE_NAME)));
// Create zip file in the same directory
String zipFilePath = destinationDirPath.concat("." + CAppMavenUtils.ZIP_DEPENDENCY_TYPE);
File zipFile = new File(zipFilePath);
ArchiveManipulator archiveManupulator = new ArchiveManipulator();
archiveManupulator.archiveDir(zipFile, new File(destinationDirPath));
// Delete the temp directory
File destinationDir = new File(destinationDirPath);
if (destinationDir.exists()) {
FileUtils.deleteDir(destinationDir);
}
return zipFilePath;
}
}
return null;
}
public String toString() {
return getCaption();
}
public String getCaption(){
return getName()+" - "+getVersion();
}
public String getType() {
return getMavenDependency().getType();
}
private void setcAppArtifact(CAppArtifact cAppArtifact) {
this.cAppArtifact = cAppArtifact;
}
public CAppArtifact getcAppArtifact() {
return cAppArtifact;
}
public String getServerRole() {
return cAppArtifact.getServerRole();
}
public Dependency getMavenDependency() {
if (mavenDependency==null){
mavenDependency=createMavenDependency();
}
return mavenDependency;
}
public void setMavenDependency(Dependency mavenDependency) {
this.mavenDependency = mavenDependency;
}
public void setDummyDependency(boolean dummyDependency) {
this.dummyDependency = dummyDependency;
}
public boolean isDummyDependency() {
return dummyDependency;
}
public int compareTo(CAppArtifactDependency compareArtifactDependency) {
return CAppArtifactPriorityMapping.getPriority(this.getcAppArtifact()
.getType())
- CAppArtifactPriorityMapping.getPriority(compareArtifactDependency
.getcAppArtifact().getType());
}
}
| |
package ftc.evlib.vision.framegrabber;
import android.graphics.Bitmap;
import android.support.annotation.IdRes;
import com.google.common.collect.ImmutableList;
import com.qualcomm.robotcore.util.RobotLog;
import com.vuforia.Frame;
import com.vuforia.HINT;
import com.vuforia.Image;
import com.vuforia.Matrix34F;
import com.vuforia.PIXEL_FORMAT;
import com.vuforia.State;
import com.vuforia.Tool;
import com.vuforia.Vec2F;
import com.vuforia.Vec3F;
import com.vuforia.Vuforia;
import org.firstinspires.ftc.robotcore.external.navigation.VuforiaTrackable;
import org.firstinspires.ftc.robotcore.external.navigation.VuforiaTrackables;
import org.firstinspires.ftc.robotcore.internal.VuforiaLocalizerImpl;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import ftc.electronvolts.util.BasicResultReceiver;
import ftc.electronvolts.util.ResultReceiver;
import ftc.electronvolts.util.Vector2D;
import ftc.evlib.vision.processors.BeaconName;
import static ftc.evlib.vision.framegrabber.GlobalFrameGrabber.frameGrabber;
/**
* This file was adapted by the electronVolts, FTC team 7393
* source: https://www.dropbox.com/s/lbezww0y8pi1ibg/VuforiaLocalizerImplSubclass.txt?dl=0
* Credit to team 3491 FIX IT
* Date Created: 10/6/16
*
* This initializes Vuforia and feeds the frames to the FrameGrabber
*
* @see GlobalFrameGrabber
* @see RealFrameGrabber
*/
public class VuforiaFrameFeeder extends VuforiaLocalizerImpl {
public static Map<BeaconName, VuforiaTrackable> beacons = null;
private boolean frameGrabberInitialized = false;
private final int widthRequest, heightRequest;
private int width, height;
/**
* Initialize vuforia
* This method quits immediately, but the ResultReceiver takes a few seconds before it has the result
*
* @param licenseKey the Vuforia PTC licence key from https://developer.vuforia.com/license-manager
* @param cameraMonitorViewIdParent the place in the layout where the camera display is
* @param widthRequest the width to scale the image to for the FrameGrabber
* @param heightRequest the height to scale the image to for the FrameGrabber
* @return the ResultReceiver that will contain the VuforiaFrameFeeder object
*/
public static ResultReceiver<VuforiaFrameFeeder> initInNewThread(final String licenseKey, final @IdRes int cameraMonitorViewIdParent, final int widthRequest, final int heightRequest) {
//create the ResultReceiver that will store the VuforiaFrameFeeder
final ResultReceiver<VuforiaFrameFeeder> receiver = new BasicResultReceiver<>();
//start the init in a new thread
new Thread(new Runnable() {
@Override
public void run() {
receiver.setValue(VuforiaFrameFeeder.init(licenseKey, cameraMonitorViewIdParent, widthRequest, heightRequest));
}
}).start();
//exit immediately
return receiver;
}
/**
* Initialize vuforia
* This method takes a few seconds to complete
*
* @param licenseKey the Vuforia PTC licence key from https://developer.vuforia.com/license-manager
* @param cameraMonitorViewIdParent the place in the layout where the camera display is
* @param widthRequest the width to scale the image to for the FrameGrabber
* @param heightRequest the height to scale the image to for the FrameGrabber
* @return the VuforiaFrameFeeder object
*/
public static VuforiaFrameFeeder init(String licenseKey, @IdRes int cameraMonitorViewIdParent, int widthRequest, int heightRequest) {
//display the camera on the screen
Parameters params = new Parameters(cameraMonitorViewIdParent);
//do not display the camera on the screen
// VuforiaLocalizer.Parameters params = new VuforiaLocalizer.Parameters();
params.cameraDirection = CameraDirection.BACK;
params.vuforiaLicenseKey = licenseKey;
params.cameraMonitorFeedback = Parameters.CameraMonitorFeedback.AXES;
//create a new VuforiaFrameFeeder object (this takes a few seconds)
VuforiaFrameFeeder vuforia = new VuforiaFrameFeeder(params, widthRequest, heightRequest);
//there are 4 beacons, so set the max image targets to 4
Vuforia.setHint(HINT.HINT_MAX_SIMULTANEOUS_IMAGE_TARGETS, 4);
return vuforia;
}
private final FrameGrabber.CameraOrientation cameraOrientation;
private final boolean ignoreOrientationForDisplay;
public VuforiaFrameFeeder(Parameters params, int widthRequest, int heightRequest) {
this(params, widthRequest, heightRequest, FrameGrabber.CameraOrientation.PORTRAIT_UP, false);
}
public VuforiaFrameFeeder(Parameters params, FrameGrabber.CameraOrientation cameraOrientation, int widthRequest, int heightRequest) {
this(params, widthRequest, heightRequest, cameraOrientation, false);
}
public VuforiaFrameFeeder(Parameters params, boolean ignoreOrientationForDisplay, int heightRequest, int widthRequest) {
this(params, widthRequest, heightRequest, FrameGrabber.CameraOrientation.PORTRAIT_UP, ignoreOrientationForDisplay);
}
class CloseableFrame extends Frame {
public CloseableFrame(Frame other) { // clone the frame so we can be useful beyond callback
super(other);
}
public void close() {
super.delete();
}
}
public class VuforiaCallbackSubclass extends VuforiaCallback {
@Override
public synchronized void Vuforia_onUpdate(State state) {
super.Vuforia_onUpdate(state);
// We wish to accomplish two things: (a) get a clone of the Frame so we can use
// it beyond the callback, and (b) get a variant that will allow us to proactively
// reduce memory pressure rather than relying on the garbage collector (which here
// has been observed to interact poorly with the image data which is allocated on a
// non-garbage-collected heap). Note that both of this concerns are independent of
// how the Frame is obtained in the first place.
CloseableFrame closeableFrame = new CloseableFrame(state.getFrame());
RobotLog.vv(TAG, "received Vuforia frame#=%d", closeableFrame.getIndex());
//get the frames from vuforia
long num = closeableFrame.getNumImages();
for (int i = 0; i < num; i++) {
if (closeableFrame.getImage(i).getFormat() == PIXEL_FORMAT.RGB565) {
Image rgb = closeableFrame.getImage(i);
width = rgb.getWidth();
height = rgb.getHeight();
//if this is the first frame
if (!frameGrabberInitialized) {
//initialize the FrameGrabber
new RealFrameGrabber(TAG, widthRequest, heightRequest, cameraOrientation, ignoreOrientationForDisplay, true);
frameGrabberInitialized = true;
}
//convert the vuforia image into a Bitmap
Bitmap bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.RGB_565);
bitmap.copyPixelsFromBuffer(rgb.getPixels());
//scale the Bitmap to the requested size
Bitmap resized = Bitmap.createScaledBitmap(bitmap, heightRequest, widthRequest, false);
//pass the resized Bitmap to the FrameGrabber
frameGrabber.receiveFrame(resized);
//TODO display resulting image on the screen
}
}
closeableFrame.close();
}
}
public static Vector2D vef2FToVector2D(Vec2F vec2F) {
return new Vector2D(vec2F.getData()[1], vec2F.getData()[0]);
}
/**
* The width of the beacon target images
*/
private static final int BEACON_TARGET_WIDTH = 127 * 2;
/**
* The height of the beacon target images
*/
private static final int BEACON_TARGET_HEIGHT = 92 * 2;
/**
* @param rawPose the pose of the beacon image VuforiaTrackable object
* @return the list of 4 Vector2D objects that represent the points of the corners of the beacon image
*/
public List<Vector2D> getImageCorners(Matrix34F rawPose) {
//top left, top right, bottom left, bottom right
List<Vec2F> vec2fList = ImmutableList.of(
Tool.projectPoint(getCameraCalibration(), rawPose, new Vec3F(-BEACON_TARGET_WIDTH / 2, BEACON_TARGET_HEIGHT / 2, 0)), //top left
Tool.projectPoint(getCameraCalibration(), rawPose, new Vec3F(BEACON_TARGET_WIDTH / 2, BEACON_TARGET_HEIGHT / 2, 0)), //top right
Tool.projectPoint(getCameraCalibration(), rawPose, new Vec3F(-BEACON_TARGET_WIDTH / 2, -BEACON_TARGET_HEIGHT / 2, 0)), //bottom left
Tool.projectPoint(getCameraCalibration(), rawPose, new Vec3F(BEACON_TARGET_WIDTH / 2, -BEACON_TARGET_HEIGHT / 2, 0)) //bottom right
);
// Log.i(TAG, "unscaled frame size: " + new Vector2D(width, height));
//
// Log.i(TAG, "unscaled tl: " + vef2FToVector2D(vec2fList.get(0)));
// Log.i(TAG, "unscaled tr: " + vef2FToVector2D(vec2fList.get(1)));
// Log.i(TAG, "unscaled bl: " + vef2FToVector2D(vec2fList.get(2)));
// Log.i(TAG, "unscaled br: " + vef2FToVector2D(vec2fList.get(3)));
//get average width from the top width and bottom width
// double w = ((vec2fList.get(1).getData()[1] - vec2fList.get(0).getData()[1]) + (vec2fList.get(3).getData()[1] - vec2fList.get(2).getData()[1])) / 2;
//same for height
// double h = ((vec2fList.get(2).getData()[0] - vec2fList.get(0).getData()[0]) + (vec2fList.get(3).getData()[0] - vec2fList.get(1).getData()[0])) / 2;
// Log.i(TAG, "beacon picture size: " + new Vector2D(w, h));
//convert the Vec2F list to a Vector2D list and scale it to match the requested frame size
List<Vector2D> corners = new ArrayList<>();
for (Vec2F vec2f : vec2fList) {
corners.add(new Vector2D(
(height - vec2f.getData()[1]) * widthRequest / height,
vec2f.getData()[0] * heightRequest / width
));
}
return corners;
}
/**
* @param parameters the vuforia parameters
* @param widthRequest the width to resize the input frame to
* @param heightRequest the height to resize the input frame to
* @param cameraOrientation the orientation of the camera on the robot
* @param ignoreOrientationForDisplay whether or not to rotate the output frame to display on the phone
*/
public VuforiaFrameFeeder(Parameters parameters, int widthRequest, int heightRequest, FrameGrabber.CameraOrientation cameraOrientation, boolean ignoreOrientationForDisplay) {
super(parameters);
this.widthRequest = widthRequest;
this.heightRequest = heightRequest;
this.cameraOrientation = cameraOrientation;
this.ignoreOrientationForDisplay = ignoreOrientationForDisplay;
stopAR();
clearGlSurface();
this.vuforiaCallback = new VuforiaCallbackSubclass();
startAR();
// Optional: set the pixel format(s) that you want to have in the callback
Vuforia.setFrameFormat(PIXEL_FORMAT.RGB565, true);
//load the beacon trackables
VuforiaTrackables beaconsVuforiaTrackables = loadTrackablesFromAsset("FTC_2016-17");
//set the names of the beacon trackables
beaconsVuforiaTrackables.get(0).setName("Wheels");
beaconsVuforiaTrackables.get(1).setName("Tools");
beaconsVuforiaTrackables.get(2).setName("Legos");
beaconsVuforiaTrackables.get(3).setName("Gears");
//create a Map to connect BeaconName to beacon VuforiaTrackable
beacons = new HashMap<>();
//add the beacon trackables to the Map
beacons.put(BeaconName.WHEELS, beaconsVuforiaTrackables.get(0));
beacons.put(BeaconName.TOOLS, beaconsVuforiaTrackables.get(1));
beacons.put(BeaconName.LEGOS, beaconsVuforiaTrackables.get(2));
beacons.put(BeaconName.GEARS, beaconsVuforiaTrackables.get(3));
}
public void clearGlSurface() {
if (this.glSurfaceParent != null) {
appUtil.synchronousRunOnUiThread(new Runnable() {
@Override
public void run() {
glSurfaceParent.removeAllViews();
glSurfaceParent.getOverlay().clear();
glSurface = null;
}
});
}
}
}
| |
package org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts;
import java.util.ArrayList;
import java.util.LinkedList;
import java.util.List;
import org.eclipse.draw2d.IFigure;
import org.eclipse.draw2d.Shape;
import org.eclipse.draw2d.StackLayout;
import org.eclipse.draw2d.geometry.Dimension;
import org.eclipse.gef.EditPart;
import org.eclipse.gef.EditPolicy;
import org.eclipse.gef.Request;
import org.eclipse.gef.commands.Command;
import org.eclipse.gef.editpolicies.LayoutEditPolicy;
import org.eclipse.gef.editpolicies.NonResizableEditPolicy;
import org.eclipse.gef.requests.CreateRequest;
import org.eclipse.gmf.runtime.diagram.ui.editparts.AbstractBorderItemEditPart;
import org.eclipse.gmf.runtime.diagram.ui.editpolicies.EditPolicyRoles;
import org.eclipse.gmf.runtime.emf.type.core.IElementType;
import org.eclipse.gmf.runtime.gef.ui.figures.DefaultSizeNodeFigure;
import org.eclipse.gmf.runtime.gef.ui.figures.NodeFigure;
import org.eclipse.gmf.runtime.notation.View;
import org.eclipse.swt.graphics.Color;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.custom.AbstractEndpointInputConnectorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.custom.EastPointerShape;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.policies.TemplateEndpointInputConnectorItemSemanticEditPolicy;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.providers.EsbElementTypes;
/**
* @generated NOT
*/
public class TemplateEndpointInputConnectorEditPart extends AbstractEndpointInputConnectorEditPart {
/**
* @generated
*/
public static final int VISUAL_ID = 3717;
/**
* @generated
*/
protected IFigure contentPane;
/**
* @generated
*/
protected IFigure primaryShape;
public final boolean isInput = true;
public NodeFigure figure_;
public NodeFigure getNodeFigureInput() {
return figure_;
}
/**
* @generated
*/
public TemplateEndpointInputConnectorEditPart(View view) {
super(view);
}
/**
* @generated NOT
*/
protected void createDefaultEditPolicies() {
super.createDefaultEditPolicies();
installEditPolicy(EditPolicy.PRIMARY_DRAG_ROLE, getPrimaryDragEditPolicy());
installEditPolicy(EditPolicyRoles.SEMANTIC_ROLE, new TemplateEndpointInputConnectorItemSemanticEditPolicy());
installEditPolicy(EditPolicy.LAYOUT_ROLE, createLayoutEditPolicy());
// XXX need an SCR to runtime to have another abstract superclass that would let children add reasonable
// editpolicies
removeEditPolicy(org.eclipse.gmf.runtime.diagram.ui.editpolicies.EditPolicyRoles.CONNECTION_HANDLES_ROLE);
}
/**
* @generated
*/
protected LayoutEditPolicy createLayoutEditPolicy() {
org.eclipse.gmf.runtime.diagram.ui.editpolicies.LayoutEditPolicy lep = new org.eclipse.gmf.runtime.diagram.ui.editpolicies.LayoutEditPolicy() {
protected EditPolicy createChildEditPolicy(EditPart child) {
EditPolicy result = child.getEditPolicy(EditPolicy.PRIMARY_DRAG_ROLE);
if (result == null) {
result = new NonResizableEditPolicy();
}
return result;
}
protected Command getMoveChildrenCommand(Request request) {
return null;
}
protected Command getCreateCommand(CreateRequest request) {
return null;
}
};
return lep;
}
/**
* @generated
*/
protected IFigure createNodeShape() {
return primaryShape = new EastPointerFigure();
}
/**
* @generated
*/
public EastPointerFigure getPrimaryShape() {
return (EastPointerFigure) primaryShape;
}
/**
* @generated
*/
protected NodeFigure createNodePlate() {
DefaultSizeNodeFigure result = new DefaultSizeNodeFigure(12, 10);
// FIXME: workaround for #154536
result.getBounds().setSize(result.getPreferredSize());
return result;
}
/**
* Creates figure for this edit part.
*
* Body of this method does not depend on settings in generation model
* so you may safely remove <i>generated</i> tag and modify it.
*
* @generated NOT
*/
protected NodeFigure createNodeFigure() {
NodeFigure figure = createNodePlate();
figure.setLayoutManager(new StackLayout());
IFigure shape = createNodeShapeForward();
figure.add(shape);
contentPane = setupContentPane(shape);
figure_ = figure;
createNodeShapeReverse();
return figure;
}
/**
* Default implementation treats passed figure as content pane.
* Respects layout one may have set for generated figure.
*
* @param nodeShape instance of generated figure class
* @generated
*/
protected IFigure setupContentPane(IFigure nodeShape) {
return nodeShape; // use nodeShape itself as contentPane
}
/**
* @generated
*/
public IFigure getContentPane() {
if (contentPane != null) {
return contentPane;
}
return super.getContentPane();
}
/**
* @generated
*/
protected void setForegroundColor(Color color) {
if (primaryShape != null) {
primaryShape.setForegroundColor(color);
}
}
/**
* @generated
*/
protected void setBackgroundColor(Color color) {
if (primaryShape != null) {
primaryShape.setBackgroundColor(color);
}
}
/**
* @generated
*/
protected void setLineWidth(int width) {
if (primaryShape instanceof Shape) {
((Shape) primaryShape).setLineWidth(width);
}
}
/**
* @generated
*/
protected void setLineType(int style) {
if (primaryShape instanceof Shape) {
((Shape) primaryShape).setLineStyle(style);
}
}
/**
* @generated
*/
public List<IElementType> getMARelTypesOnTarget() {
ArrayList<IElementType> types = new ArrayList<IElementType>(1);
types.add(EsbElementTypes.EsbLink_4001);
return types;
}
/**
* @generated
*/
public List<IElementType> getMATypesForSource(IElementType relationshipType) {
LinkedList<IElementType> types = new LinkedList<IElementType>();
if (relationshipType == EsbElementTypes.EsbLink_4001) {
types.add(EsbElementTypes.ProxyOutputConnector_3002);
types.add(EsbElementTypes.PropertyMediatorOutputConnector_3034);
types.add(EsbElementTypes.PropertyGroupMediatorOutputConnector_3790);
types.add(EsbElementTypes.ThrottleMediatorOutputConnector_3122);
types.add(EsbElementTypes.ThrottleMediatorOnAcceptOutputConnector_3581);
types.add(EsbElementTypes.ThrottleMediatorOnRejectOutputConnector_3582);
types.add(EsbElementTypes.FilterMediatorOutputConnector_3534);
types.add(EsbElementTypes.FilterMediatorPassOutputConnector_3011);
types.add(EsbElementTypes.FilterMediatorFailOutputConnector_3012);
types.add(EsbElementTypes.LogMediatorOutputConnector_3019);
types.add(EsbElementTypes.EnrichMediatorOutputConnector_3037);
types.add(EsbElementTypes.XSLTMediatorOutputConnector_3040);
types.add(EsbElementTypes.SwitchCaseBranchOutputConnector_3043);
types.add(EsbElementTypes.SwitchDefaultBranchOutputConnector_3044);
types.add(EsbElementTypes.SwitchMediatorOutputConnector_3499);
types.add(EsbElementTypes.SequenceOutputConnector_3050);
types.add(EsbElementTypes.EventMediatorOutputConnector_3053);
types.add(EsbElementTypes.EntitlementMediatorOutputConnector_3056);
types.add(EsbElementTypes.ClassMediatorOutputConnector_3059);
types.add(EsbElementTypes.SpringMediatorOutputConnector_3062);
types.add(EsbElementTypes.ScriptMediatorOutputConnector_3065);
types.add(EsbElementTypes.FaultMediatorOutputConnector_3068);
types.add(EsbElementTypes.XQueryMediatorOutputConnector_3071);
types.add(EsbElementTypes.CommandMediatorOutputConnector_3074);
types.add(EsbElementTypes.DBLookupMediatorOutputConnector_3077);
types.add(EsbElementTypes.DBReportMediatorOutputConnector_3080);
types.add(EsbElementTypes.SmooksMediatorOutputConnector_3083);
types.add(EsbElementTypes.SendMediatorOutputConnector_3086);
types.add(EsbElementTypes.SendMediatorEndpointOutputConnector_3539);
types.add(EsbElementTypes.HeaderMediatorOutputConnector_3101);
types.add(EsbElementTypes.CloneMediatorOutputConnector_3104);
types.add(EsbElementTypes.CloneMediatorTargetOutputConnector_3133);
types.add(EsbElementTypes.CacheMediatorOutputConnector_3107);
types.add(EsbElementTypes.CacheMediatorOnHitOutputConnector_3618);
types.add(EsbElementTypes.IterateMediatorOutputConnector_3110);
types.add(EsbElementTypes.IterateMediatorTargetOutputConnector_3606);
types.add(EsbElementTypes.CalloutMediatorOutputConnector_3116);
types.add(EsbElementTypes.TransactionMediatorOutputConnector_3119);
types.add(EsbElementTypes.RMSequenceMediatorOutputConnector_3125);
types.add(EsbElementTypes.RuleMediatorOutputConnector_3128);
types.add(EsbElementTypes.RuleMediatorChildMediatorsOutputConnector_3640);
types.add(EsbElementTypes.OAuthMediatorOutputConnector_3131);
types.add(EsbElementTypes.AggregateMediatorOutputConnector_3113);
types.add(EsbElementTypes.AggregateMediatorOnCompleteOutputConnector_3132);
types.add(EsbElementTypes.StoreMediatorOutputConnector_3590);
types.add(EsbElementTypes.BuilderMediatorOutputConector_3593);
types.add(EsbElementTypes.CallTemplateMediatorOutputConnector_3596);
types.add(EsbElementTypes.PayloadFactoryMediatorOutputConnector_3599);
types.add(EsbElementTypes.EnqueueMediatorOutputConnector_3602);
types.add(EsbElementTypes.URLRewriteMediatorOutputConnector_3622);
types.add(EsbElementTypes.ValidateMediatorOutputConnector_3625);
types.add(EsbElementTypes.ValidateMediatorOnFailOutputConnector_3626);
types.add(EsbElementTypes.RouterMediatorOutputConnector_3630);
types.add(EsbElementTypes.RouterMediatorTargetOutputConnector_3631);
types.add(EsbElementTypes.ConditionalRouterMediatorOutputConnector_3637);
types.add(EsbElementTypes.ConditionalRouterMediatorAdditionalOutputConnector_3638);
types.add(EsbElementTypes.BAMMediatorOutputConnector_3682);
types.add(EsbElementTypes.BeanMediatorOutputConnector_3685);
types.add(EsbElementTypes.EJBMediatorOutputConnector_3688);
types.add(EsbElementTypes.DefaultEndPointOutputConnector_3022);
types.add(EsbElementTypes.AddressEndPointOutputConnector_3031);
types.add(EsbElementTypes.FailoverEndPointOutputConnector_3090);
types.add(EsbElementTypes.FailoverEndPointWestOutputConnector_3097);
types.add(EsbElementTypes.RecipientListEndPointOutputConnector_3694);
types.add(EsbElementTypes.RecipientListEndPointWestOutputConnector_3695);
types.add(EsbElementTypes.WSDLEndPointOutputConnector_3093);
types.add(EsbElementTypes.NamedEndpointOutputConnector_3662);
types.add(EsbElementTypes.LoadBalanceEndPointOutputConnector_3096);
types.add(EsbElementTypes.LoadBalanceEndPointWestOutputConnector_3098);
types.add(EsbElementTypes.APIResourceEndpointOutputConnector_3676);
types.add(EsbElementTypes.AddressingEndpointOutputConnector_3691);
types.add(EsbElementTypes.HTTPEndPointOutputConnector_3711);
types.add(EsbElementTypes.TemplateEndpointOutputConnector_3718);
types.add(EsbElementTypes.MessageOutputConnector_3047);
types.add(EsbElementTypes.MergeNodeOutputConnector_3016);
types.add(EsbElementTypes.SequencesOutputConnector_3617);
types.add(EsbElementTypes.DefaultEndPointOutputConnector_3645);
types.add(EsbElementTypes.AddressEndPointOutputConnector_3648);
types.add(EsbElementTypes.FailoverEndPointOutputConnector_3651);
types.add(EsbElementTypes.FailoverEndPointWestOutputConnector_3652);
types.add(EsbElementTypes.RecipientListEndPointOutputConnector_3698);
types.add(EsbElementTypes.RecipientListEndPointWestOutputConnector_3699);
types.add(EsbElementTypes.WSDLEndPointOutputConnector_3655);
types.add(EsbElementTypes.LoadBalanceEndPointOutputConnector_3658);
types.add(EsbElementTypes.LoadBalanceEndPointWestOutputConnector_3659);
types.add(EsbElementTypes.HTTPEndPointOutputConnector_3714);
types.add(EsbElementTypes.APIResourceOutputConnector_3671);
types.add(EsbElementTypes.ComplexEndpointsOutputConnector_3679);
types.add(EsbElementTypes.JsonTransformMediatorOutputConnector_3793);
}
return types;
}
/**
* @generated
*/
public class EastPointerFigure extends EastPointerShape {
/**
* @generated
*/
public EastPointerFigure() {
this.setBackgroundColor(THIS_BACK);
this.setPreferredSize(new Dimension(getMapMode().DPtoLP(12), getMapMode().DPtoLP(10)));
}
}
/**
* @generated
*/
static final Color THIS_BACK = new Color(null, 50, 50, 50);
}
| |
/*
* Copyright 2016 Google Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.datastore.snippets;
import static java.util.Calendar.DECEMBER;
import static java.util.Calendar.JANUARY;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import com.google.cloud.datastore.Cursor;
import com.google.cloud.datastore.Datastore;
import com.google.cloud.datastore.DatastoreException;
import com.google.cloud.datastore.DateTime;
import com.google.cloud.datastore.Entity;
import com.google.cloud.datastore.EntityQuery;
import com.google.cloud.datastore.FullEntity;
import com.google.cloud.datastore.IncompleteKey;
import com.google.cloud.datastore.Key;
import com.google.cloud.datastore.KeyFactory;
import com.google.cloud.datastore.PathElement;
import com.google.cloud.datastore.ProjectionEntity;
import com.google.cloud.datastore.Query;
import com.google.cloud.datastore.Query.ResultType;
import com.google.cloud.datastore.QueryResults;
import com.google.cloud.datastore.ReadOption;
import com.google.cloud.datastore.StringValue;
import com.google.cloud.datastore.StructuredQuery;
import com.google.cloud.datastore.StructuredQuery.CompositeFilter;
import com.google.cloud.datastore.StructuredQuery.OrderBy;
import com.google.cloud.datastore.StructuredQuery.PropertyFilter;
import com.google.cloud.datastore.Transaction;
import com.google.cloud.datastore.testing.LocalDatastoreHelper;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Iterators;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.TimeZone;
/**
* Contains Cloud Datastore snippets demonstrating concepts for documentation.
*/
@RunWith(JUnit4.class)
public class ConceptsTest {
private static final LocalDatastoreHelper HELPER = LocalDatastoreHelper.create(1.0);
private static final FullEntity<IncompleteKey> TEST_FULL_ENTITY = FullEntity.builder().build();
private Datastore datastore;
private KeyFactory keyFactory;
private Key taskKey;
private Entity testEntity;
private DateTime startDate;
private DateTime endDate;
private DateTime includedDate;
@Rule
public ExpectedException thrown = ExpectedException.none();
/**
* Starts the local Datastore emulator.
*
* @throws IOException if there are errors starting the local Datastore
* @throws InterruptedException if there are errors starting the local Datastore
*/
@BeforeClass
public static void beforeClass() throws IOException, InterruptedException {
HELPER.start();
}
/**
* Initializes Datastore and cleans out any residual values. Also initializes global variables
* used for testing.
*/
@Before
public void setUp() {
datastore = HELPER.options().toBuilder().namespace("ghijklmnop").build().service();
StructuredQuery<Key> query = Query.keyQueryBuilder().build();
QueryResults<Key> result = datastore.run(query);
datastore.delete(Iterators.toArray(result, Key.class));
keyFactory = datastore.newKeyFactory().kind("Task");
taskKey = keyFactory.newKey("some-arbitrary-key");
testEntity = Entity.builder(taskKey, TEST_FULL_ENTITY).build();
Calendar calendar = Calendar.getInstance(TimeZone.getTimeZone("UTC"));
calendar.set(1990, JANUARY, 1);
startDate = DateTime.copyFrom(calendar);
calendar.set(2000, JANUARY, 1);
endDate = DateTime.copyFrom(calendar);
calendar.set(1999, DECEMBER, 31);
includedDate = DateTime.copyFrom(calendar);
}
/**
* Stops the local Datastore emulator.
*
* @throws IOException if there are errors stopping the local Datastore
* @throws InterruptedException if there are errors stopping the local Datastore
*/
@AfterClass
public static void afterClass() throws IOException, InterruptedException {
HELPER.stop();
}
private void assertValidKey(Key taskKey) {
datastore.put(Entity.builder(taskKey, TEST_FULL_ENTITY).build());
}
@Test
public void testIncompleteKey() {
// [START incomplete_key]
KeyFactory keyFactory = datastore.newKeyFactory().kind("Task");
Key taskKey = datastore.allocateId(keyFactory.newKey());
// [END incomplete_key]
assertValidKey(taskKey);
}
@Test
public void testNamedKey() {
// [START named_key]
Key taskKey = datastore.newKeyFactory().kind("Task").newKey("sampleTask");
// [END named_key]
assertValidKey(taskKey);
}
@Test
public void testKeyWithParent() {
// [START key_with_parent]
Key taskKey = datastore.newKeyFactory()
.ancestors(PathElement.of("TaskList", "default"))
.kind("Task")
.newKey("sampleTask");
// [END key_with_parent]
assertValidKey(taskKey);
}
@Test
public void testKeyWithMultilevelParent() {
// [START key_with_multilevel_parent]
KeyFactory keyFactory = datastore.newKeyFactory()
.ancestors(PathElement.of("User", "Alice"), PathElement.of("TaskList", "default"))
.kind("Task");
Key taskKey = keyFactory.newKey("sampleTask");
// [END key_with_multilevel_parent]
assertValidKey(taskKey);
}
private void assertValidEntity(Entity original) {
datastore.put(original);
assertEquals(original, datastore.get(original.key()));
}
@Test
public void testEntityWithParent() {
// [START entity_with_parent]
Key taskKey = datastore.newKeyFactory()
.ancestors(PathElement.of("TaskList", "default"))
.kind("Task")
.newKey("sampleTask");
Entity task = Entity.builder(taskKey)
.set("category", "Personal")
.set("done", false)
.set("priority", 4)
.set("description", "Learn Cloud Datastore")
.build();
// [END entity_with_parent]
assertValidEntity(task);
}
@Test
public void testProperties() {
// [START properties]
Entity task = Entity.builder(taskKey)
.set("category", "Personal")
.set("created", DateTime.now())
.set("done", false)
.set("priority", 4)
.set("percent_complete", 10.0)
.set("description",
StringValue.builder("Learn Cloud Datastore").excludeFromIndexes(true).build())
.build();
// [END properties]
assertValidEntity(task);
}
@Test
public void testArrayValue() {
// [START array_value]
Entity task = Entity.builder(taskKey)
.set("tags", "fun", "programming")
.set("collaborators", "alice", "bob")
.build();
// [END array_value]
assertValidEntity(task);
}
@Test
public void testBasicEntity() {
// [START basic_entity]
Entity task = Entity.builder(taskKey)
.set("category", "Personal")
.set("done", false)
.set("priority", 4)
.set("description", "Learn Cloud Datastore")
.build();
// [END basic_entity]
assertValidEntity(task);
}
@Test
public void testUpsert() {
// [START upsert]
Entity task = Entity.builder(keyFactory.newKey("sampleTask")).build();
datastore.put(task);
// [END upsert]
assertEquals(task, datastore.get(task.key()));
}
@Test
public void testInsert() {
// [START insert]
Key taskKey = datastore.add(FullEntity.builder(keyFactory.newKey()).build()).key();
// [END insert]
assertEquals(FullEntity.builder(taskKey).build(), datastore.get(taskKey));
}
@Test
public void testLookup() {
datastore.put(testEntity);
// [START lookup]
Entity task = datastore.get(taskKey);
// [END lookup]
assertEquals(testEntity, task);
}
@Test
public void testUpdate() {
datastore.put(testEntity);
// [START update]
Entity task = Entity.builder(datastore.get(taskKey)).set("priority", 5).build();
datastore.update(task);
// [END update]
assertEquals(task, datastore.get(taskKey));
}
@Test
public void testDelete() {
datastore.put(testEntity);
// [START delete]
datastore.delete(taskKey);
// [END delete]
assertNull(datastore.get(taskKey));
}
private List<Entity> setUpBatchTests(Key taskKey1, Key taskKey2) {
Entity task1 = Entity.builder(taskKey1)
.set("category", "Personal")
.set("done", false)
.set("priority", 4)
.set("description", "Learn Cloud Datastore")
.build();
Entity task2 = Entity.builder(taskKey2)
.set("category", "Personal")
.set("done", false)
.set("priority", 5)
.set("description", "Integrate Cloud Datastore")
.build();
datastore.put(task1, task2);
return ImmutableList.of(task1, task2);
}
@Test
public void testBatchUpsert() {
// [START batch_upsert]
FullEntity<IncompleteKey> task1 = FullEntity.builder(keyFactory.newKey())
.set("category", "Personal")
.set("done", false)
.set("priority", 4)
.set("description", "Learn Cloud Datastore")
.build();
FullEntity<IncompleteKey> task2 = Entity.builder(keyFactory.newKey())
.set("category", "Personal")
.set("done", false)
.set("priority", 5)
.set("description", "Integrate Cloud Datastore")
.build();
List<Entity> tasks = datastore.add(task1, task2);
Key taskKey1 = tasks.get(0).key();
Key taskKey2 = tasks.get(1).key();
// [END batch_upsert]
assertEquals(Entity.builder(taskKey1, task1).build(), datastore.get(taskKey1));
assertEquals(Entity.builder(taskKey2, task2).build(), datastore.get(taskKey2));
}
@Test
public void testBatchLookup() {
Key taskKey1 = keyFactory.newKey(1);
Key taskKey2 = keyFactory.newKey(2);
List<Entity> expectedTasks = setUpBatchTests(taskKey1, taskKey2);
// [START batch_lookup]
Iterator<Entity> tasks = datastore.get(taskKey1, taskKey2);
// [END batch_lookup]
assertEquals(expectedTasks.get(0), tasks.next());
assertEquals(expectedTasks.get(1), tasks.next());
}
@Test
public void testBatchDelete() {
Key taskKey1 = keyFactory.newKey(1);
Key taskKey2 = keyFactory.newKey(2);
setUpBatchTests(taskKey1, taskKey2);
// [START batch_delete]
datastore.delete(taskKey1, taskKey2);
// [END batch_delete]
assertNull(datastore.get(taskKey1));
assertNull(datastore.get(taskKey2));
}
private void setUpQueryTests() {
Key taskKey = datastore.newKeyFactory()
.kind("Task")
.ancestors(PathElement.of("TaskList", "default"))
.newKey("someTask");
datastore.put(Entity.builder(taskKey)
.set("category", "Personal")
.set("done", false)
.set("completed", false)
.set("priority", 4)
.set("created", includedDate)
.set("percent_complete", 10.0)
.set("description",
StringValue.builder("Learn Cloud Datastore").excludeFromIndexes(true).build())
.set("tag", "fun", "l", "programming")
.build());
}
private <V> V assertValidQuery(Query<V> query) {
QueryResults<V> results = datastore.run(query);
V result = results.next();
assertFalse(results.hasNext());
return result;
}
private <V> void assertInvalidQuery(Query<V> query) {
thrown.expect(DatastoreException.class);
datastore.run(query);
}
@Test
public void testBasicQuery() {
setUpQueryTests();
// [START basic_query]
Query<Entity> query = Query.entityQueryBuilder()
.kind("Task")
.filter(CompositeFilter.and(
PropertyFilter.eq("done", false), PropertyFilter.ge("priority", 4)))
.orderBy(OrderBy.desc("priority"))
.build();
// [END basic_query]
assertValidQuery(query);
}
@Test
public void testRunQuery() {
setUpQueryTests();
Query<Entity> query = Query.entityQueryBuilder().kind("Task").build();
// [START run_query]
QueryResults<Entity> tasks = datastore.run(query);
// [END run_query]
assertNotNull(tasks.next());
assertFalse(tasks.hasNext());
}
@Test
public void testPropertyFilter() {
setUpQueryTests();
// [START property_filter]
Query<Entity> query =
Query.entityQueryBuilder().kind("Task").filter(PropertyFilter.eq("done", false)).build();
// [END property_filter]
assertValidQuery(query);
}
@Test
public void testCompositeFilter() {
setUpQueryTests();
// [START composite_filter]
Query<Entity> query = Query.entityQueryBuilder()
.kind("Task")
.filter(
CompositeFilter.and(PropertyFilter.eq("done", false), PropertyFilter.eq("priority", 4)))
.build();
// [END composite_filter]
assertValidQuery(query);
}
@Test
public void testKeyFilter() {
setUpQueryTests();
// [START key_filter]
Query<Entity> query = Query.entityQueryBuilder()
.kind("Task")
.filter(PropertyFilter.gt("__key__", keyFactory.newKey("someTask")))
.build();
// [END key_filter]
assertValidQuery(query);
}
@Test
public void testAscendingSort() {
setUpQueryTests();
// [START ascending_sort]
Query<Entity> query =
Query.entityQueryBuilder().kind("Task").orderBy(OrderBy.asc("created")).build();
// [END ascending_sort]
assertValidQuery(query);
}
@Test
public void testDescendingSort() {
setUpQueryTests();
// [START descending_sort]
Query<Entity> query =
Query.entityQueryBuilder().kind("Task").orderBy(OrderBy.desc("created")).build();
// [END descending_sort]
assertValidQuery(query);
}
@Test
public void testMultiSort() {
setUpQueryTests();
// [START multi_sort]
Query<Entity> query = Query.entityQueryBuilder()
.kind("Task")
.orderBy(OrderBy.desc("priority"), OrderBy.asc("created"))
.build();
// [END multi_sort]
assertValidQuery(query);
}
@Test
public void testKindlessQuery() {
Key lastSeenKey = keyFactory.newKey("a");
setUpQueryTests();
// [START kindless_query]
Query<Entity> query =
Query.entityQueryBuilder().filter(PropertyFilter.gt("__key__", lastSeenKey)).build();
// [END kindless_query]
assertValidQuery(query);
}
@Test
public void testAncestorQuery() {
setUpQueryTests();
// [START ancestor_query]
Query<Entity> query = Query.entityQueryBuilder()
.kind("Task")
.filter(PropertyFilter.hasAncestor(
datastore.newKeyFactory().kind("TaskList").newKey("default")))
.build();
// [END ancestor_query]
assertValidQuery(query);
}
@Test
public void testProjectionQuery() {
setUpQueryTests();
// [START projection_query]
Query<ProjectionEntity> query = Query.projectionEntityQueryBuilder()
.kind("Task")
.projection("priority", "percent_complete")
.build();
// [END projection_query]
assertValidQuery(query);
}
@Test
public void testRunProjectionQuery() {
setUpQueryTests();
Query<ProjectionEntity> query = Query.projectionEntityQueryBuilder()
.kind("Task")
.projection("priority", "percent_complete")
.build();
// [START run_query_projection]
List<Long> priorities = new LinkedList<>();
List<Double> percentCompletes = new LinkedList<>();
QueryResults<ProjectionEntity> tasks = datastore.run(query);
while (tasks.hasNext()) {
ProjectionEntity task = tasks.next();
priorities.add(task.getLong("priority"));
percentCompletes.add(task.getDouble("percent_complete"));
}
// [END run_query_projection]
assertEquals(ImmutableList.of(4L), priorities);
assertEquals(ImmutableList.of(10.0), percentCompletes);
}
@Test
public void testKeysOnlyQuery() {
setUpQueryTests();
// [START keys_only_query]
Query<Key> query = Query.keyQueryBuilder().kind("Task").build();
// [END keys_only_query]
assertValidQuery(query);
}
@Test
public void testRunKeysOnlyQuery() {
setUpQueryTests();
Query<Key> query = Query.keyQueryBuilder().kind("Task").build();
// [START run_keys_only_query]
QueryResults<Key> taskKeys = datastore.run(query);
// [END run_keys_only_query]
assertNotNull(taskKeys.next());
assertFalse(taskKeys.hasNext());
}
@Test
public void testDistinctQuery() {
setUpQueryTests();
// [START distinct_query]
Query<ProjectionEntity> query = Query.projectionEntityQueryBuilder()
.kind("Task")
.projection("category", "priority")
.distinctOn("category", "priority")
.orderBy(OrderBy.asc("category"), OrderBy.asc("priority"))
.build();
// [END distinct_query]
assertValidQuery(query);
}
@Test
public void testDistinctOnQuery() {
setUpQueryTests();
// [START distinct_on_query]
Query<ProjectionEntity> query = Query.projectionEntityQueryBuilder()
.kind("Task")
.projection("category", "priority")
.distinctOn("category")
.orderBy(OrderBy.asc("category"), OrderBy.asc("priority"))
.build();
// [END distinct_on_query]
assertValidQuery(query);
}
@Test
public void testArrayValueInequalityRange() {
setUpQueryTests();
// [START array_value_inequality_range]
Query<Entity> query = Query.entityQueryBuilder()
.kind("Task")
.filter(CompositeFilter.and(
PropertyFilter.gt("tag", "learn"), PropertyFilter.lt("tag", "math")))
.build();
// [END array_value_inequality_range]
QueryResults<Entity> results = datastore.run(query);
assertFalse(results.hasNext());
}
@Test
public void testArrayValueEquality() {
setUpQueryTests();
// [START array_value_equality]
Query<Entity> query = Query.entityQueryBuilder()
.kind("Task")
.filter(CompositeFilter.and(
PropertyFilter.eq("tag", "fun"), PropertyFilter.eq("tag", "programming")))
.build();
// [END array_value_equality]
assertValidQuery(query);
}
@Test
public void testInequalityRange() {
setUpQueryTests();
// [START inequality_range]
Query<Entity> query = Query.entityQueryBuilder()
.kind("Task")
.filter(CompositeFilter.and(
PropertyFilter.gt("created", startDate), PropertyFilter.lt("created", endDate)))
.build();
// [END inequality_range]
assertValidQuery(query);
}
@Test
public void testInequalityInvalid() {
// [START inequality_invalid]
Query<Entity> query = Query.entityQueryBuilder()
.kind("Task")
.filter(CompositeFilter.and(
PropertyFilter.gt("created", startDate), PropertyFilter.gt("priority", 3)))
.build();
// [END inequality_invalid]
assertInvalidQuery(query);
}
@Test
public void testEqualAndInequalityRange() {
setUpQueryTests();
// [START equal_and_inequality_range]
Query<Entity> query = Query.entityQueryBuilder()
.kind("Task")
.filter(CompositeFilter.and(PropertyFilter.eq("priority", 4),
PropertyFilter.gt("created", startDate), PropertyFilter.lt("created", endDate)))
.build();
// [END equal_and_inequality_range]
assertValidQuery(query);
}
@Test
public void testInequalitySort() {
setUpQueryTests();
// [START inequality_sort]
Query<Entity> query = Query.entityQueryBuilder()
.kind("Task")
.filter(PropertyFilter.gt("priority", 3))
.orderBy(OrderBy.asc("priority"), OrderBy.asc("created"))
.build();
// [END inequality_sort]
assertValidQuery(query);
}
@Test
public void testInequalitySortInvalidNotSame() {
// [START inequality_sort_invalid_not_same]
Query<Entity> query = Query.entityQueryBuilder()
.kind("Task")
.filter(PropertyFilter.gt("priority", 3))
.orderBy(OrderBy.asc("created"))
.build();
// [END inequality_sort_invalid_not_same]
assertInvalidQuery(query);
}
@Test
public void testInequalitySortInvalidNotFirst() {
// [START inequality_sort_invalid_not_first]
Query<Entity> query = Query.entityQueryBuilder()
.kind("Task")
.filter(PropertyFilter.gt("priority", 3))
.orderBy(OrderBy.asc("created"), OrderBy.asc("priority"))
.build();
// [END inequality_sort_invalid_not_first]
assertInvalidQuery(query);
}
@Test
public void testLimit() {
setUpQueryTests();
// [START limit]
Query<Entity> query = Query.entityQueryBuilder().kind("Task").limit(5).build();
// [END limit]
assertValidQuery(query);
}
@Test
public void testCursorPaging() {
setUpQueryTests();
datastore.put(testEntity);
Cursor nextPageCursor = cursorPaging(1, null);
assertNotNull(nextPageCursor);
nextPageCursor = cursorPaging(1, nextPageCursor);
assertNotNull(nextPageCursor);
}
private Cursor cursorPaging(int pageSize, Cursor pageCursor) {
// [START cursor_paging]
EntityQuery.Builder queryBuilder = Query.entityQueryBuilder().kind("Task").limit(pageSize);
if (pageCursor != null) {
queryBuilder.startCursor(pageCursor);
}
QueryResults<Entity> tasks = datastore.run(queryBuilder.build());
while (tasks.hasNext()) {
Entity task = tasks.next();
// do something with the task
}
Cursor nextPageCursor = tasks.cursorAfter();
// [END cursor_paging]
return nextPageCursor;
}
@Test
public void testEventualConsistentQuery() {
setUpQueryTests();
// [START eventual_consistent_query]
Query<Entity> query = Query.entityQueryBuilder()
.kind("Task")
.filter(PropertyFilter.hasAncestor(
datastore.newKeyFactory().kind("TaskList").newKey("default")))
.build();
datastore.run(query, ReadOption.eventualConsistency());
// [END eventual_consistent_query]
assertValidQuery(query);
}
@Test
public void testUnindexedPropertyQuery() {
setUpQueryTests();
// [START unindexed_property_query]
Query<Entity> query = Query.entityQueryBuilder()
.kind("Task")
.filter(PropertyFilter.eq("description", "A task description"))
.build();
// [END unindexed_property_query]
QueryResults<Entity> results = datastore.run(query);
assertFalse(results.hasNext());
}
@Test
public void testExplodingProperties() {
// [START exploding_properties]
Entity task = Entity.builder(taskKey)
.set("tags", "fun", "programming", "learn")
.set("collaborators", "alice", "bob", "charlie")
.set("created", DateTime.now())
.build();
// [END exploding_properties]
assertValidEntity(task);
}
private List<Key> setUpTransferTests() {
KeyFactory keyFactory = datastore.newKeyFactory().kind("People");
Key from = keyFactory.newKey("from");
Key to = keyFactory.newKey("to");
datastore.put(Entity.builder(from).set("balance", 100).build());
datastore.put(Entity.builder(to).set("balance", 0).build());
return ImmutableList.of(from, to);
}
private void assertSuccessfulTransfer(Key from, Key to) {
assertEquals(90, datastore.get(from).getLong("balance"));
assertEquals(10, datastore.get(to).getLong("balance"));
}
@Test
public void testTransactionalUpdate() {
List<Key> keys = setUpTransferTests();
transferFunds(keys.get(0), keys.get(1), 10);
assertSuccessfulTransfer(keys.get(0), keys.get(1));
}
// [START transactional_update]
void transferFunds(Key fromKey, Key toKey, long amount) {
Transaction txn = datastore.newTransaction();
try {
List<Entity> entities = txn.fetch(fromKey, toKey);
Entity from = entities.get(0);
Entity updatedFrom =
Entity.builder(from).set("balance", from.getLong("balance") - amount).build();
Entity to = entities.get(1);
Entity updatedTo = Entity.builder(to).set("balance", to.getLong("balance") + amount).build();
txn.put(updatedFrom, updatedTo);
txn.commit();
} finally {
if (txn.active()) {
txn.rollback();
}
}
}
// [END transactional_update]
@Test
public void testTransactionalRetry() {
List<Key> keys = setUpTransferTests();
Key fromKey = keys.get(0);
Key toKey = keys.get(1);
// [START transactional_retry]
int retries = 5;
while (true) {
try {
transferFunds(fromKey, toKey, 10);
break;
} catch (DatastoreException e) {
if (retries == 0) {
throw e;
}
--retries;
}
}
// Retry handling can also be configured and automatically applied using google-cloud-java.
// [END transactional_retry]
assertSuccessfulTransfer(keys.get(0), keys.get(1));
}
@Test
public void testTransactionalGetOrCreate() {
// [START transactional_get_or_create]
Entity task;
Transaction txn = datastore.newTransaction();
try {
task = txn.get(taskKey);
if (task == null) {
task = Entity.builder(taskKey).build();
txn.put(task);
txn.commit();
}
} finally {
if (txn.active()) {
txn.rollback();
}
}
// [END transactional_get_or_create]
assertEquals(task, datastore.get(taskKey));
}
@Test
public void testTransactionalSingleEntityGroupReadOnly() {
setUpQueryTests();
Key taskListKey = datastore.newKeyFactory().kind("TaskList").newKey("default");
Entity taskListEntity = Entity.builder(taskListKey).build();
datastore.put(taskListEntity);
// [START transactional_single_entity_group_read_only]
Entity taskList;
QueryResults<Entity> tasks;
Transaction txn = datastore.newTransaction();
try {
taskList = txn.get(taskListKey);
Query<Entity> query = Query.entityQueryBuilder()
.kind("Task")
.filter(PropertyFilter.hasAncestor(taskListKey))
.build();
tasks = txn.run(query);
txn.commit();
} finally {
if (txn.active()) {
txn.rollback();
}
}
// [END transactional_single_entity_group_read_only]
assertEquals(taskListEntity, taskList);
assertNotNull(tasks.next());
assertFalse(tasks.hasNext());
}
@Test
public void testNamespaceRunQuery() {
setUpQueryTests();
// [START namespace_run_query]
KeyFactory keyFactory = datastore.newKeyFactory().kind("__namespace__");
Key startNamespace = keyFactory.newKey("g");
Key endNamespace = keyFactory.newKey("h");
Query<Key> query = Query.keyQueryBuilder()
.kind("__namespace__")
.filter(CompositeFilter.and(
PropertyFilter.gt("__key__", startNamespace),
PropertyFilter.lt("__key__", endNamespace)))
.build();
List<String> namespaces = new ArrayList<>();
QueryResults<Key> results = datastore.run(query);
while (results.hasNext()) {
namespaces.add(results.next().name());
}
// [END namespace_run_query]
assertEquals(ImmutableList.of("ghijklmnop"), namespaces);
}
@Test
public void testKindRunQuery() {
setUpQueryTests();
// [START kind_run_query]
Query<Key> query = Query.keyQueryBuilder().kind("__kind__").build();
List<String> kinds = new ArrayList<>();
QueryResults<Key> results = datastore.run(query);
while (results.hasNext()) {
kinds.add(results.next().name());
}
// [END kind_run_query]
assertEquals(ImmutableList.of("Task"), kinds);
}
@Test
public void testPropertyRunQuery() {
setUpQueryTests();
// [START property_run_query]
Query<Key> query = Query.keyQueryBuilder().kind("__property__").build();
QueryResults<Key> keys = datastore.run(query);
Map<String, Collection<String>> propertiesByKind = new HashMap<>();
while (keys.hasNext()) {
Key key = keys.next();
String kind = key.parent().name();
String propertyName = key.name();
Collection<String> properties = propertiesByKind.get(kind);
if (properties == null) {
properties = new HashSet<>();
propertiesByKind.put(kind, properties);
}
properties.add(propertyName);
}
// [END property_run_query]
Map<String, ImmutableSet<String>> expected = ImmutableMap.of("Task", ImmutableSet.of(
"done", "category", "done", "completed", "priority", "created", "percent_complete", "tag"));
assertEquals(expected, propertiesByKind);
}
@Test
public void testPropertyByKindRunQuery() {
setUpQueryTests();
// [START property_by_kind_run_query]
Key key = datastore.newKeyFactory().kind("__kind__").newKey("Task");
Query<Entity> query = Query.entityQueryBuilder()
.kind("__property__")
.filter(PropertyFilter.hasAncestor(key))
.build();
QueryResults<Entity> results = datastore.run(query);
Map<String, Collection<String>> representationsByProperty = new HashMap<>();
while (results.hasNext()) {
Entity result = results.next();
String propertyName = result.key().name();
List<StringValue> representations = result.getList("property_representation");
Collection<String> currentRepresentations = representationsByProperty.get(propertyName);
if (currentRepresentations == null) {
currentRepresentations = new HashSet<>();
representationsByProperty.put(propertyName, currentRepresentations);
}
for (StringValue value : representations) {
currentRepresentations.add(value.get());
}
}
// [END property_by_kind_run_query]
Map<String, Collection<String>> expected = ImmutableMap.<String, Collection<String>>builder()
.put("category", Collections.singleton("STRING"))
.put("done", Collections.singleton("BOOLEAN"))
.put("completed", Collections.singleton("BOOLEAN"))
.put("priority", Collections.singleton("INT64"))
.put("created", Collections.singleton("INT64"))
.put("percent_complete", Collections.singleton("DOUBLE"))
.put("tag", Collections.singleton("STRING"))
.build();
assertEquals(expected, representationsByProperty);
}
@Test
public void testPropertyFilteringRunQuery() {
setUpQueryTests();
// [START property_filtering_run_query]
Key startKey = datastore.newKeyFactory()
.kind("__property__")
.ancestors(PathElement.of("__kind__", "Task"))
.newKey("priority");
Query<Key> query = Query.keyQueryBuilder()
.kind("__property__")
.filter(PropertyFilter.ge("__key__", startKey))
.build();
Map<String, Collection<String>> propertiesByKind = new HashMap<>();
QueryResults<Key> keys = datastore.run(query);
while (keys.hasNext()) {
Key key = keys.next();
String kind = key.parent().name();
String propertyName = key.name();
Collection<String> properties = propertiesByKind.get(kind);
if (properties == null) {
properties = new HashSet<String>();
propertiesByKind.put(kind, properties);
}
properties.add(propertyName);
}
// [END property_filtering_run_query]
Map<String, ImmutableSet<String>> expected =
ImmutableMap.of("Task", ImmutableSet.of("priority", "tag"));
assertEquals(expected, propertiesByKind);
}
@Test
public void testGqlRunQuery() {
setUpQueryTests();
// [START gql_run_query]
Query<Entity> query =
Query.gqlQueryBuilder(ResultType.ENTITY, "select * from Task order by created asc").build();
// [END gql_run_query]
assertValidQuery(query);
}
@Test
public void testGqlNamedBindingQuery() {
setUpQueryTests();
// [START gql_named_binding_query]
Query<Entity> query =
Query.gqlQueryBuilder(
ResultType.ENTITY,
"select * from Task where completed = @completed and priority = @priority")
.setBinding("completed", false)
.setBinding("priority", 4)
.build();
// [END gql_named_binding_query]
assertValidQuery(query);
}
@Test
public void testGqlPositionalBindingQuery() {
setUpQueryTests();
// [START gql_positional_binding_query]
Query<Entity> query = Query.gqlQueryBuilder(
ResultType.ENTITY, "select * from Task where completed = @1 and priority = @2")
.addBinding(false)
.addBinding(4)
.build();
// [END gql_positional_binding_query]
assertValidQuery(query);
}
@Test
public void testGqlLiteralQuery() {
setUpQueryTests();
// [START gql_literal_query]
Query<Entity> query = Query.gqlQueryBuilder(
ResultType.ENTITY, "select * from Task where completed = false and priority = 4")
.allowLiteral(true)
.build();
// [END gql_literal_query]
assertValidQuery(query);
}
}
| |
/**
* Copyright (C) 2013 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.web.analytics.blotter;
import static org.testng.AssertJUnit.assertEquals;
import static org.testng.AssertJUnit.assertTrue;
import java.math.BigDecimal;
import java.util.Set;
import org.joda.beans.MetaBean;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import org.threeten.bp.LocalDate;
import org.threeten.bp.LocalTime;
import org.threeten.bp.OffsetTime;
import org.threeten.bp.ZoneOffset;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import com.opengamma.core.id.ExternalSchemes;
import com.opengamma.financial.security.equity.EquitySecurity;
import com.opengamma.id.ExternalId;
import com.opengamma.id.ExternalIdBundle;
import com.opengamma.id.UniqueId;
import com.opengamma.id.VersionCorrection;
import com.opengamma.master.portfolio.ManageablePortfolio;
import com.opengamma.master.portfolio.ManageablePortfolioNode;
import com.opengamma.master.portfolio.PortfolioDocument;
import com.opengamma.master.portfolio.impl.InMemoryPortfolioMaster;
import com.opengamma.master.position.ManageablePosition;
import com.opengamma.master.position.ManageableTrade;
import com.opengamma.master.position.PositionDocument;
import com.opengamma.master.position.impl.InMemoryPositionMaster;
import com.opengamma.master.security.SecurityDocument;
import com.opengamma.master.security.impl.InMemorySecurityMaster;
import com.opengamma.util.money.Currency;
import com.opengamma.util.test.TestGroup;
/**
* Test.
*/
@Test(groups = TestGroup.UNIT)
public class FungibleTradeBuilderTest {
private static final ExternalIdBundle APPLE_BUNDLE;
private static final ExternalIdBundle INTEL_BUNDLE;
private static final EquitySecurity APPLE_SECURITY;
private static final EquitySecurity INTEL_SECURITY;
static {
APPLE_BUNDLE = ExternalIdBundle.of(ExternalSchemes.BLOOMBERG_TICKER, "AAPL US Equity");
INTEL_BUNDLE = ExternalIdBundle.of(ExternalSchemes.BLOOMBERG_TICKER, "INTC US Equity");
APPLE_SECURITY = new EquitySecurity("exch", "exchCode", "Apple", Currency.USD);
INTEL_SECURITY = new EquitySecurity("exch", "exchCode", "Intel", Currency.USD);
APPLE_SECURITY.setExternalIdBundle(APPLE_BUNDLE);
INTEL_SECURITY.setExternalIdBundle(INTEL_BUNDLE);
}
private InMemoryPortfolioMaster _portfolioMaster;
private InMemoryPositionMaster _positionMaster;
private FungibleTradeBuilder _tradeBuilder;
private ManageablePortfolioNode _savedNode;
private ManageablePosition _savedPosition;
private ManageablePortfolio _savedPortfolio;
@BeforeMethod
public void setUp() throws Exception {
_portfolioMaster = new InMemoryPortfolioMaster();
_positionMaster = new InMemoryPositionMaster();
final InMemorySecurityMaster securityMaster = new InMemorySecurityMaster();
securityMaster.add(new SecurityDocument(APPLE_SECURITY));
securityMaster.add(new SecurityDocument(INTEL_SECURITY));
final BigDecimal quantity = BigDecimal.valueOf(20);
final ManageablePosition position = new ManageablePosition(quantity, APPLE_SECURITY.getExternalIdBundle());
position.addTrade(new ManageableTrade(quantity,
APPLE_BUNDLE,
LocalDate.of(2012, 12, 1),
OffsetTime.of(LocalTime.of(9, 30), ZoneOffset.UTC),
ExternalId.of(AbstractTradeBuilder.CPTY_SCHEME, "existingCpty")));
_savedPosition = _positionMaster.add(new PositionDocument(position)).getPosition();
final ManageablePortfolioNode root = new ManageablePortfolioNode("root");
final ManageablePortfolioNode node = new ManageablePortfolioNode("node");
node.addPosition(_savedPosition.getUniqueId());
root.addChildNode(node);
final ManageablePortfolio portfolio = new ManageablePortfolio("portfolio", root);
_savedPortfolio = _portfolioMaster.add(new PortfolioDocument(portfolio)).getPortfolio();
final Set<MetaBean> metaBeans = Sets.<MetaBean>newHashSet(ManageableTrade.meta());
_tradeBuilder = new FungibleTradeBuilder(_positionMaster,
_portfolioMaster,
securityMaster,
metaBeans,
BlotterUtils.getStringConvert());
_savedNode = _savedPortfolio.getRootNode().getChildNodes().get(0);
}
private static BeanDataSource createTradeData(final String securityTicker, final String id) {
final String securityId = ExternalId.of(ExternalSchemes.BLOOMBERG_TICKER, securityTicker).toString();
return BlotterTestUtils.beanData("uniqueId", id,
"type", FungibleTradeBuilder.TRADE_TYPE_NAME,
"tradeDate", "2012-12-21",
"tradeTime", "14:25",
"securityIdBundle", securityId,
"premium", "1234",
"premiumCurrency", "USD",
"premiumDate", "2012-12-22",
"premiumTime", "13:30",
"quantity", "30",
"counterparty", "cptyName",
"attributes", Maps.newHashMap());
}
/**
* add a trade to a portfolio node that doesn't have an existing position in the trade's security.
* node has a position in a different security of the same type
*/
@Test
public void addTradeNoExistingPosition() {
final UniqueId tradeId = _tradeBuilder.addTrade(createTradeData("INTC US Equity", null), _savedNode.getUniqueId());
final ManageableTrade testTrade = _positionMaster.getTrade(tradeId);
assertEquals(LocalDate.of(2012, 12, 21), testTrade.getTradeDate());
assertEquals(OffsetTime.of(LocalTime.of(14, 25), ZoneOffset.UTC), testTrade.getTradeTime());
assertEquals(INTEL_BUNDLE, testTrade.getSecurityLink().getExternalId());
assertEquals(1234d, testTrade.getPremium());
assertEquals(Currency.USD, testTrade.getPremiumCurrency());
assertEquals(LocalDate.of(2012, 12, 22), testTrade.getPremiumDate());
assertEquals(OffsetTime.of(LocalTime.of(13, 30), ZoneOffset.UTC), testTrade.getPremiumTime());
assertEquals(BigDecimal.valueOf(30), testTrade.getQuantity());
assertEquals(ExternalId.of(AbstractTradeBuilder.CPTY_SCHEME, "cptyName"), testTrade.getCounterpartyExternalId());
assertTrue(testTrade.getAttributes().isEmpty());
final ManageablePosition testPosition = _positionMaster.get(testTrade.getParentPositionId()).getPosition();
assertEquals(INTEL_BUNDLE, testPosition.getSecurityLink().getExternalId());
assertEquals(testTrade.getQuantity(), testPosition.getQuantity());
assertEquals(1, testPosition.getTrades().size());
assertEquals(testTrade, testPosition.getTrades().get(0));
final ManageablePortfolio testPortfolio = _portfolioMaster.get(_savedNode.getPortfolioId()).getPortfolio();
final ManageablePortfolioNode testNode = testPortfolio.getRootNode().getChildNodes().get(0);
assertEquals(2, testNode.getPositionIds().size());
assertEquals(_savedPosition.getUniqueId().getObjectId(), testNode.getPositionIds().get(0));
assertEquals(testPosition.getUniqueId().getObjectId(), testNode.getPositionIds().get(1));
}
/**
* add a trade to a portfolio node that already has a position in the trade's security
*/
@Test
public void addTradeToExistingPosition() {
final UniqueId tradeId = _tradeBuilder.addTrade(createTradeData("AAPL US Equity", null), _savedNode.getUniqueId());
final ManageableTrade testTrade = _positionMaster.getTrade(tradeId);
assertEquals(LocalDate.of(2012, 12, 21), testTrade.getTradeDate());
assertEquals(OffsetTime.of(LocalTime.of(14, 25), ZoneOffset.UTC), testTrade.getTradeTime());
assertEquals(APPLE_BUNDLE, testTrade.getSecurityLink().getExternalId());
assertEquals(1234d, testTrade.getPremium());
assertEquals(Currency.USD, testTrade.getPremiumCurrency());
assertEquals(LocalDate.of(2012, 12, 22), testTrade.getPremiumDate());
assertEquals(OffsetTime.of(LocalTime.of(13, 30), ZoneOffset.UTC), testTrade.getPremiumTime());
assertEquals(BigDecimal.valueOf(30), testTrade.getQuantity());
assertEquals(ExternalId.of(AbstractTradeBuilder.CPTY_SCHEME, "cptyName"), testTrade.getCounterpartyExternalId());
assertTrue(testTrade.getAttributes().isEmpty());
final ManageablePosition testPosition = _positionMaster.get(testTrade.getParentPositionId()).getPosition();
assertEquals(APPLE_BUNDLE, testPosition.getSecurityLink().getExternalId());
assertEquals(BigDecimal.valueOf(50), testPosition.getQuantity());
assertEquals(2, testPosition.getTrades().size());
assertEquals(testTrade, testPosition.getTrades().get(1));
final ManageablePortfolio testPortfolio = _portfolioMaster.get(_savedNode.getPortfolioId()).getPortfolio();
final ManageablePortfolioNode testNode = testPortfolio.getRootNode().getChildNodes().get(0);
assertEquals(1, testNode.getPositionIds().size());
assertEquals(_savedPosition.getUniqueId().getObjectId(), testNode.getPositionIds().get(0));
}
/**
* update a trade - the position's quantity should also be adjusted
*/
@Test
public void updateTrade() {
final ManageableTrade previousTrade = _savedPosition.getTrades().get(0);
final BeanDataSource tradeData = createTradeData("AAPL US Equity", previousTrade.getUniqueId().toString());
final UniqueId updatedId = _tradeBuilder.updateTrade(tradeData);
final ManageableTrade updatedTrade = _positionMaster.getTrade(updatedId);
assertEquals(LocalDate.of(2012, 12, 21), updatedTrade.getTradeDate());
assertEquals(OffsetTime.of(LocalTime.of(14, 25), ZoneOffset.UTC), updatedTrade.getTradeTime());
assertEquals(APPLE_BUNDLE, updatedTrade.getSecurityLink().getExternalId());
assertEquals(1234d, updatedTrade.getPremium());
assertEquals(Currency.USD, updatedTrade.getPremiumCurrency());
assertEquals(LocalDate.of(2012, 12, 22), updatedTrade.getPremiumDate());
assertEquals(OffsetTime.of(LocalTime.of(13, 30), ZoneOffset.UTC), updatedTrade.getPremiumTime());
assertEquals(BigDecimal.valueOf(30), updatedTrade.getQuantity());
assertEquals(ExternalId.of(AbstractTradeBuilder.CPTY_SCHEME, "cptyName"), updatedTrade.getCounterpartyExternalId());
assertTrue(updatedTrade.getAttributes().isEmpty());
}
/**
* update a position that doesn't have any trades. position's quantity should match trade and trade should be added
*/
@Test
public void updatePositionWithNoTrades() {
final ManageablePosition position = new ManageablePosition(BigDecimal.valueOf(42), APPLE_SECURITY.getExternalIdBundle());
final ManageablePosition savedPosition = _positionMaster.add(new PositionDocument(position)).getPosition();
assertEquals(BigDecimal.valueOf(42), savedPosition.getQuantity());
_tradeBuilder.updatePosition(createTradeData("AAPL US Equity", null), savedPosition.getUniqueId());
final ManageablePosition updatedPosition = _positionMaster.get(savedPosition.getUniqueId().getObjectId(),
VersionCorrection.LATEST).getPosition();
assertEquals(BigDecimal.valueOf(30), updatedPosition.getQuantity());
assertEquals(1, updatedPosition.getTrades().size());
final ManageableTrade trade = updatedPosition.getTrades().get(0);
assertEquals(LocalDate.of(2012, 12, 21), trade.getTradeDate());
assertEquals(OffsetTime.of(LocalTime.of(14, 25), ZoneOffset.UTC), trade.getTradeTime());
assertEquals(APPLE_BUNDLE, trade.getSecurityLink().getExternalId());
assertEquals(1234d, trade.getPremium());
assertEquals(Currency.USD, trade.getPremiumCurrency());
assertEquals(LocalDate.of(2012, 12, 22), trade.getPremiumDate());
assertEquals(OffsetTime.of(LocalTime.of(13, 30), ZoneOffset.UTC), trade.getPremiumTime());
assertEquals(BigDecimal.valueOf(30), trade.getQuantity());
assertEquals(ExternalId.of(AbstractTradeBuilder.CPTY_SCHEME, "cptyName"), trade.getCounterpartyExternalId());
assertTrue(trade.getAttributes().isEmpty());
}
/**
* update the trade and change the security - this should fail
*/
@Test(expectedExceptions = IllegalArgumentException.class)
public void updateTradeChangeSecurity() {
final ManageableTrade previousTrade = _savedPosition.getTrades().get(0);
final BeanDataSource tradeData = createTradeData("INTC US Equity", previousTrade.getUniqueId().toString());
_tradeBuilder.updateTrade(tradeData);
}
}
| |
package com.github.yuthura.bianca.test;
import java.io.InputStream;
import java.io.Reader;
import java.math.BigDecimal;
import java.net.URL;
import java.sql.*;
import java.sql.Date;
import java.util.Calendar;
import java.util.*;
public class TestPreparedStatement implements PreparedStatement {
public final Map<Integer, Object> params;
public TestPreparedStatement() {
params = new HashMap<>();
}
@Override
public ResultSet executeQuery(String sql) throws SQLException {
throw new UntestableOperationException();
}
@Override
public int executeUpdate(String sql) throws SQLException {
throw new UntestableOperationException();
}
@Override
public void close() throws SQLException {
throw new UntestableOperationException();
}
@Override
public int getMaxFieldSize() throws SQLException {
throw new UntestableOperationException();
}
@Override
public void setMaxFieldSize(int max) throws SQLException {
throw new UntestableOperationException();
}
@Override
public int getMaxRows() throws SQLException {
throw new UntestableOperationException();
}
@Override
public void setMaxRows(int max) throws SQLException {
throw new UntestableOperationException();
}
@Override
public void setEscapeProcessing(boolean enable) throws SQLException {
throw new UntestableOperationException();
}
@Override
public int getQueryTimeout() throws SQLException {
throw new UntestableOperationException();
}
@Override
public void setQueryTimeout(int seconds) throws SQLException {
throw new UntestableOperationException();
}
@Override
public void cancel() throws SQLException {
throw new UntestableOperationException();
}
@Override
public SQLWarning getWarnings() throws SQLException {
throw new UntestableOperationException();
}
@Override
public void clearWarnings() throws SQLException {
throw new UntestableOperationException();
}
@Override
public void setCursorName(String name) throws SQLException {
throw new UntestableOperationException();
}
@Override
public boolean execute(String sql) throws SQLException {
throw new UntestableOperationException();
}
@Override
public ResultSet getResultSet() throws SQLException {
throw new UntestableOperationException();
}
@Override
public int getUpdateCount() throws SQLException {
throw new UntestableOperationException();
}
@Override
public boolean getMoreResults() throws SQLException {
throw new UntestableOperationException();
}
@Override
public void setFetchDirection(int direction) throws SQLException {
throw new UntestableOperationException();
}
@Override
public int getFetchDirection() throws SQLException {
throw new UntestableOperationException();
}
@Override
public void setFetchSize(int rows) throws SQLException {
throw new UntestableOperationException();
}
@Override
public int getFetchSize() throws SQLException {
throw new UntestableOperationException();
}
@Override
public int getResultSetConcurrency() throws SQLException {
throw new UntestableOperationException();
}
@Override
public int getResultSetType() throws SQLException {
throw new UntestableOperationException();
}
@Override
public void addBatch(String sql) throws SQLException {
throw new UntestableOperationException();
}
@Override
public void clearBatch() throws SQLException {
throw new UntestableOperationException();
}
@Override
public int[] executeBatch() throws SQLException {
throw new UntestableOperationException();
}
@Override
public Connection getConnection() throws SQLException {
throw new UntestableOperationException();
}
@Override
public boolean getMoreResults(int current) throws SQLException {
throw new UntestableOperationException();
}
@Override
public ResultSet getGeneratedKeys() throws SQLException {
throw new UntestableOperationException();
}
@Override
public int executeUpdate(String sql, int autoGeneratedKeys) throws SQLException {
throw new UntestableOperationException();
}
@Override
public int executeUpdate(String sql, int[] columnIndexes) throws SQLException {
throw new UntestableOperationException();
}
@Override
public int executeUpdate(String sql, String[] columnNames) throws SQLException {
throw new UntestableOperationException();
}
@Override
public boolean execute(String sql, int autoGeneratedKeys) throws SQLException {
throw new UntestableOperationException();
}
@Override
public boolean execute(String sql, int[] columnIndexes) throws SQLException {
throw new UntestableOperationException();
}
@Override
public boolean execute(String sql, String[] columnNames) throws SQLException {
throw new UntestableOperationException();
}
@Override
public int getResultSetHoldability() throws SQLException {
throw new UntestableOperationException();
}
@Override
public boolean isClosed() throws SQLException {
throw new UntestableOperationException();
}
@Override
public void setPoolable(boolean poolable) throws SQLException {
throw new UntestableOperationException();
}
@Override
public boolean isPoolable() throws SQLException {
throw new UntestableOperationException();
}
@Override
public void closeOnCompletion() throws SQLException {
throw new UntestableOperationException();
}
@Override
public boolean isCloseOnCompletion() throws SQLException {
throw new UntestableOperationException();
}
@Override
public <T> T unwrap(Class<T> iface) throws SQLException {
throw new UntestableOperationException();
}
@Override
public boolean isWrapperFor(Class<?> iface) throws SQLException {
throw new UntestableOperationException();
}
@Override
public ResultSet executeQuery() throws SQLException {
throw new UntestableOperationException();
}
@Override
public int executeUpdate() throws SQLException {
throw new UntestableOperationException();
}
@Override
public void setNull(int parameterIndex, int sqlType) throws SQLException {
params.put(Integer.valueOf(parameterIndex), null);
}
@Override
public void setBoolean(int parameterIndex, boolean x) throws SQLException {
params.put(Integer.valueOf(parameterIndex), x);
}
@Override
public void setByte(int parameterIndex, byte x) throws SQLException {
params.put(Integer.valueOf(parameterIndex), x);
}
@Override
public void setShort(int parameterIndex, short x) throws SQLException {
params.put(Integer.valueOf(parameterIndex), x);
}
@Override
public void setInt(int parameterIndex, int x) throws SQLException {
params.put(Integer.valueOf(parameterIndex), x);
}
@Override
public void setLong(int parameterIndex, long x) throws SQLException {
params.put(Integer.valueOf(parameterIndex), x);
}
@Override
public void setFloat(int parameterIndex, float x) throws SQLException {
params.put(Integer.valueOf(parameterIndex), x);
}
@Override
public void setDouble(int parameterIndex, double x) throws SQLException {
params.put(Integer.valueOf(parameterIndex), x);
}
@Override
public void setBigDecimal(int parameterIndex, BigDecimal x) throws SQLException {
params.put(Integer.valueOf(parameterIndex), x);
}
@Override
public void setString(int parameterIndex, String x) throws SQLException {
params.put(Integer.valueOf(parameterIndex), x);
}
@Override
public void setBytes(int parameterIndex, byte[] x) throws SQLException {
params.put(Integer.valueOf(parameterIndex), x);
}
@Override
public void setDate(int parameterIndex, Date x) throws SQLException {
params.put(Integer.valueOf(parameterIndex), x);
}
@Override
public void setTime(int parameterIndex, Time x) throws SQLException {
params.put(Integer.valueOf(parameterIndex), x);
}
@Override
public void setTimestamp(int parameterIndex, Timestamp x) throws SQLException {
params.put(Integer.valueOf(parameterIndex), x);
}
@Override
public void setAsciiStream(int parameterIndex, InputStream x, int length) throws SQLException {
params.put(Integer.valueOf(parameterIndex), x);
}
@Override
public void setUnicodeStream(int parameterIndex, InputStream x, int length) throws SQLException {
params.put(Integer.valueOf(parameterIndex), x);
}
@Override
public void setBinaryStream(int parameterIndex, InputStream x, int length) throws SQLException {
params.put(Integer.valueOf(parameterIndex), x);
}
@Override
public void clearParameters() throws SQLException {
params.clear();
}
@Override
public void setObject(int parameterIndex, Object x, int targetSqlType) throws SQLException {
params.put(Integer.valueOf(parameterIndex), x);
}
@Override
public void setObject(int parameterIndex, Object x) throws SQLException {
params.put(Integer.valueOf(parameterIndex), x);
}
@Override
public boolean execute() throws SQLException {
throw new UntestableOperationException();
}
@Override
public void addBatch() throws SQLException {
throw new UntestableOperationException();
}
@Override
public void setCharacterStream(int parameterIndex, Reader reader, int length) throws SQLException {
params.put(Integer.valueOf(parameterIndex), reader);
}
@Override
public void setRef(int parameterIndex, Ref x) throws SQLException {
params.put(Integer.valueOf(parameterIndex), x);
}
@Override
public void setBlob(int parameterIndex, Blob x) throws SQLException {
params.put(Integer.valueOf(parameterIndex), x);
}
@Override
public void setClob(int parameterIndex, Clob x) throws SQLException {
params.put(Integer.valueOf(parameterIndex), x);
}
@Override
public void setArray(int parameterIndex, Array x) throws SQLException {
params.put(Integer.valueOf(parameterIndex), x);
}
@Override
public ResultSetMetaData getMetaData() throws SQLException {
throw new UntestableOperationException();
}
@Override
public void setDate(int parameterIndex, Date x, Calendar cal) throws SQLException {
params.put(Integer.valueOf(parameterIndex), x);
}
@Override
public void setTime(int parameterIndex, Time x, Calendar cal) throws SQLException {
params.put(Integer.valueOf(parameterIndex), x);
}
@Override
public void setTimestamp(int parameterIndex, Timestamp x, Calendar cal) throws SQLException {
params.put(Integer.valueOf(parameterIndex), x);
}
@Override
public void setNull(int parameterIndex, int sqlType, String typeName) throws SQLException {
params.put(Integer.valueOf(parameterIndex), null);
}
@Override
public void setURL(int parameterIndex, URL x) throws SQLException {
params.put(Integer.valueOf(parameterIndex), x);
}
@Override
public ParameterMetaData getParameterMetaData() throws SQLException {
throw new UntestableOperationException();
}
@Override
public void setRowId(int parameterIndex, RowId x) throws SQLException {
params.put(Integer.valueOf(parameterIndex), x);
}
@Override
public void setNString(int parameterIndex, String value) throws SQLException {
params.put(Integer.valueOf(parameterIndex), value);
}
@Override
public void setNCharacterStream(int parameterIndex, Reader value, long length) throws SQLException {
params.put(Integer.valueOf(parameterIndex), value);
}
@Override
public void setNClob(int parameterIndex, NClob value) throws SQLException {
params.put(Integer.valueOf(parameterIndex), value);
}
@Override
public void setClob(int parameterIndex, Reader reader, long length) throws SQLException {
params.put(Integer.valueOf(parameterIndex), reader);
}
@Override
public void setBlob(int parameterIndex, InputStream inputStream, long length) throws SQLException {
params.put(Integer.valueOf(parameterIndex), inputStream);
}
@Override
public void setNClob(int parameterIndex, Reader reader, long length) throws SQLException {
params.put(Integer.valueOf(parameterIndex), reader);
}
@Override
public void setSQLXML(int parameterIndex, SQLXML xmlObject) throws SQLException {
params.put(Integer.valueOf(parameterIndex), xmlObject);
}
@Override
public void setObject(int parameterIndex, Object x, int targetSqlType, int scaleOrLength) throws SQLException {
params.put(Integer.valueOf(parameterIndex), x);
}
@Override
public void setAsciiStream(int parameterIndex, InputStream x, long length) throws SQLException {
params.put(Integer.valueOf(parameterIndex), x);
}
@Override
public void setBinaryStream(int parameterIndex, InputStream x, long length) throws SQLException {
params.put(Integer.valueOf(parameterIndex), x);
}
@Override
public void setCharacterStream(int parameterIndex, Reader reader, long length) throws SQLException {
params.put(Integer.valueOf(parameterIndex), reader);
}
@Override
public void setAsciiStream(int parameterIndex, InputStream x) throws SQLException {
params.put(Integer.valueOf(parameterIndex), x);
}
@Override
public void setBinaryStream(int parameterIndex, InputStream x) throws SQLException {
params.put(Integer.valueOf(parameterIndex), x);
}
@Override
public void setCharacterStream(int parameterIndex, Reader reader) throws SQLException {
params.put(Integer.valueOf(parameterIndex), reader);
}
@Override
public void setNCharacterStream(int parameterIndex, Reader value) throws SQLException {
params.put(Integer.valueOf(parameterIndex), value);
}
@Override
public void setClob(int parameterIndex, Reader reader) throws SQLException {
params.put(Integer.valueOf(parameterIndex), reader);
}
@Override
public void setBlob(int parameterIndex, InputStream inputStream) throws SQLException {
params.put(Integer.valueOf(parameterIndex), inputStream);
}
@Override
public void setNClob(int parameterIndex, Reader reader) throws SQLException {
params.put(Integer.valueOf(parameterIndex), reader);
}
}
| |
package com.newrelic.plugins.mysql.instance;
import static com.newrelic.plugins.mysql.util.Constants.*;
import java.sql.Connection;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Map;
import java.util.Set;
import com.newrelic.metrics.publish.Agent;
import com.newrelic.metrics.publish.util.Logger;
import com.newrelic.plugins.mysql.MetricMeta;
import com.newrelic.plugins.mysql.MySQL;
/**
* This class creates a specific MySQL agent that is used to obtain a MySQL database connection,
* gather requested metrics and report to New Relic
*
* @author Ronald Bradford me@ronaldbradford.com
*
*/
public class MySQLAgent extends Agent {
private static final Logger logger = Logger.getLogger(MySQLAgent.class);
private static final String GUID = "com.newrelic.plugins.mysql.instance";
private static final String version = "2.0.0";
public static final String AGENT_DEFAULT_HOST = "localhost"; // Default values for MySQL Agent
public static final String AGENT_DEFAULT_USER = "newrelic";
public static final String AGENT_DEFAULT_PASSWD = "f63c225f4abe9e13";
public static final String AGENT_DEFAULT_PROPERTIES = "";
public static final String AGENT_DEFAULT_METRICS = "status,newrelic";
private final String name; // Agent Name
private final String host; // MySQL Connection parameters
private final String user;
private final String passwd;
private final String properties;
private String agentInfo;
private final Set<String> metrics;
// Definition of MySQL meta data (counter, unit, type etc)
private final Map<String, MetricMeta> metricsMeta = new HashMap<String, MetricMeta>();
// Definition of categories of metrics
private Map<String, Object> metricCategories = new HashMap<String, Object>();
private final MySQL m; // Per agent MySQL Object
private boolean firstReport = true;
/**
* Default constructor to create a new MySQL Agent
*
* @param map
*
* @param String Human name for Agent
* @param String MySQL Instance host:port
* @param String MySQL user
* @param String MySQL user password
* @param String CSVm List of metrics to be monitored
*/
public MySQLAgent(String name, String host, String user, String passwd, String properties, Set<String> metrics, Map<String, Object> metricCategories) {
super(GUID, version);
this.name = name; // Set local attributes for new class object
this.host = host;
this.user = user;
this.passwd = passwd;
this.properties = properties;
this.metrics = metrics;
this.metricCategories = metricCategories;
this.m = new MySQL();
createMetaData(); // Define incremental counters that are value/sec etc
logger.debug("MySQL Agent initialized: ", formatAgentParams(name, host, user, properties, metrics));
}
/**
* Format Agent parameters for logging
*
* @param name
* @param host
* @param user
* @param properties
* @param metrics
* @return A formatted String representing the Agent parameters
*/
private String formatAgentParams(String name, String host, String user, String properties, Set<String> metrics) {
StringBuilder builder = new StringBuilder();
builder.append("name: ").append(name).append(" | ");
builder.append("host: ").append(host).append(" | ");
builder.append("user: ").append(user).append(" | ");
builder.append("properties: ").append(properties).append(" | ");
builder.append("metrics: ").append(metrics).append(" | ");
return builder.toString();
}
/**
* This method is run for every poll cycle of the Agent. Get a MySQL Database connection and gather metrics.
*/
@Override
public void pollCycle() {
Connection c = m.getConnection(host, user, passwd, properties); // Get a database connection (which should be cached)
if (c == null) {
return; // Unable to continue without a valid database connection
}
logger.debug("Gathering MySQL metrics. ", getAgentInfo());
Map<String, Float> results = gatherMetrics(c); // Gather defined metrics
reportMetrics(results); // Report Metrics to New Relic
firstReport = false;
}
/**
* This method runs the varies categories of MySQL statements and gathers the metrics that can be reported
*
* @param Connection c MySQL Database Connection
* @param String List of metrics to be obtained for this agent
* @return Map of metrics and values
*/
private Map<String, Float> gatherMetrics(Connection c) {
Map<String, Float> results = new HashMap<String, Float>(); // Create an empty set of results
Map<String, Object> categories = getMetricCategories(); // Get current Metric Categories
Iterator<String> iter = categories.keySet().iterator();
while (iter.hasNext()) {
String category = iter.next();
@SuppressWarnings("unchecked")
Map<String, String> attributes = (Map<String, String>) categories.get(category);
if (isReportingForCategory(category)) {
results.putAll(MySQL.runSQL(c, category, attributes.get(SQL), attributes.get(RESULT)));
}
}
results.putAll(newRelicMetrics(results));
return results;
}
/**
* This method creates a number of custom New Relic Metrics, that are derived from raw MySQL status metrics
*
* @param Map existing Gathered MySQL metrics
* @param metrics String of the Metric Categories to capture
* @return Map Additional derived metrics
*/
protected Map<String, Float> newRelicMetrics(Map<String, Float> existing) {
Map<String, Float> derived = new HashMap<String, Float>();
if (!isReportingForCategory(NEW_RELIC_CATEGORY)) {
return derived; // Only calculate newrelic category if specified.
}
if (!isReportingForCategory(STATUS_CATEGORY)) {
return derived; // "status" category is a pre-requisite for newrelic metrics
}
logger.debug("Adding New Relic derived metrics");
/* read and write volume */
if (areRequiredMetricsPresent("Reads", existing, "status/com_select", "status/qcache_hits")) {
derived.put("newrelic/volume_reads", existing.get("status/com_select") + existing.get("status/qcache_hits"));
}
if (areRequiredMetricsPresent("Writes", existing, "status/com_insert", "status/com_update", "status/com_delete", "status/com_replace",
"status/com_insert_select", "status/com_update_multi", "status/com_delete_multi", "status/com_replace_select")) {
derived.put("newrelic/volume_writes", existing.get("status/com_insert") + existing.get("status/com_insert_select")
+ existing.get("status/com_update") + existing.get("status/com_update_multi")
+ existing.get("status/com_delete") + existing.get("status/com_delete_multi")
+ existing.get("status/com_replace") + existing.get("status/com_replace_select"));
}
/* read and write throughput */
if (areRequiredMetricsPresent("Read Throughput", existing, "status/bytes_sent")) {
derived.put("newrelic/bytes_reads", existing.get("status/bytes_sent"));
}
if (areRequiredMetricsPresent("Write Throughput", existing, "status/bytes_received")) {
derived.put("newrelic/bytes_writes", existing.get("status/bytes_received"));
}
/* Connection management */
if (areRequiredMetricsPresent("Connection Management", existing, "status/threads_connected", "status/threads_running", "status/threads_cached")) {
Float threads_connected = existing.get("status/threads_connected");
Float threads_running = existing.get("status/threads_running");
derived.put("newrelic/connections_connected", threads_connected);
derived.put("newrelic/connections_running", threads_running);
derived.put("newrelic/connections_cached", existing.get("status/threads_cached"));
Float pct_connection_utilization = 0.0f;
if (threads_connected > 0) {
pct_connection_utilization = (threads_running / threads_connected) * 100.0f;
}
derived.put("newrelic/pct_connection_utilization", pct_connection_utilization);
}
/* InnoDB Metrics */
if (areRequiredMetricsPresent("InnoDB", existing, "status/innodb_pages_created", "status/innodb_pages_read", "status/innodb_pages_written",
"status/innodb_buffer_pool_read_requests", "status/innodb_buffer_pool_reads", "status/innodb_data_fsyncs", "status/innodb_os_log_fsyncs")) {
derived.put("newrelic/innodb_bp_pages_created", existing.get("status/innodb_pages_created"));
derived.put("newrelic/innodb_bp_pages_read", existing.get("status/innodb_pages_read"));
derived.put("newrelic/innodb_bp_pages_written", existing.get("status/innodb_pages_written"));
/* Innodb Specific Metrics */
Float innodb_read_requests = existing.get("status/innodb_buffer_pool_read_requests");
Float innodb_reads = existing.get("status/innodb_buffer_pool_reads");
Float pct_innodb_buffer_pool_hit_ratio = 0.0f;
if (innodb_read_requests + innodb_reads > 0) {
pct_innodb_buffer_pool_hit_ratio = (innodb_read_requests / (innodb_read_requests + innodb_reads)) * 100.0f;
}
derived.put("newrelic/pct_innodb_buffer_pool_hit_ratio", pct_innodb_buffer_pool_hit_ratio);
derived.put("newrelic/innodb_fsyncs_data", existing.get("status/innodb_data_fsyncs"));
derived.put("newrelic/innodb_fsyncs_os_log", existing.get("status/innodb_os_log_fsyncs"));
}
/* InnoDB Buffer Metrics */
if (areRequiredMetricsPresent("InnoDB Buffers", existing, "status/innodb_buffer_pool_pages_total", "status/innodb_buffer_pool_pages_data",
"status/innodb_buffer_pool_pages_misc", "status/innodb_buffer_pool_pages_dirty", "status/innodb_buffer_pool_pages_free")) {
Float pages_total = existing.get("status/innodb_buffer_pool_pages_total");
Float pages_data = existing.get("status/innodb_buffer_pool_pages_data");
Float pages_misc = existing.get("status/innodb_buffer_pool_pages_misc");
Float pages_dirty = existing.get("status/innodb_buffer_pool_pages_dirty");
Float pages_free = existing.get("status/innodb_buffer_pool_pages_free");
derived.put("newrelic/innodb_buffer_pool_pages_clean", pages_data - pages_dirty);
derived.put("newrelic/innodb_buffer_pool_pages_dirty", pages_dirty);
derived.put("newrelic/innodb_buffer_pool_pages_misc", pages_misc);
derived.put("newrelic/innodb_buffer_pool_pages_free", pages_free);
derived.put("newrelic/innodb_buffer_pool_pages_unassigned", pages_total - pages_data - pages_free - pages_misc);
}
/* Query Cache */
if (areRequiredMetricsPresent("Query Cache", existing, "status/qcache_hits", "status/com_select", "status/qcache_free_blocks",
"status/qcache_total_blocks", "status/qcache_inserts", "status/qcache_not_cached")) {
Float qc_hits = existing.get("status/qcache_hits");
Float reads = existing.get("status/com_select");
Float free = existing.get("status/qcache_free_blocks");
Float total = existing.get("status/qcache_total_blocks");
derived.put("newrelic/query_cache_hits", qc_hits);
derived.put("newrelic/query_cache_misses", existing.get("status/qcache_inserts"));
derived.put("newrelic/query_cache_not_cached", existing.get("status/qcache_not_cached"));
Float pct_query_cache_hit_utilization = 0.0f;
if (qc_hits + reads > 0) {
pct_query_cache_hit_utilization = (qc_hits / (qc_hits + reads)) * 100.0f;
}
derived.put("newrelic/pct_query_cache_hit_utilization", pct_query_cache_hit_utilization);
Float pct_query_cache_memory_in_use = 0.0f;
if (total > 0) {
pct_query_cache_memory_in_use = 100.0f - ((free / total) * 100.0f);
}
derived.put("newrelic/pct_query_cache_memory_in_use", pct_query_cache_memory_in_use);
}
/* Temp Table */
if (areRequiredMetricsPresent("Temp Tables", existing, "status/created_tmp_tables", "status/created_tmp_disk_tables")) {
Float tmp_tables = existing.get("status/created_tmp_tables");
Float tmp_tables_disk = existing.get("status/created_tmp_disk_tables");
Float pct_tmp_tables_written_to_disk = 0.0f;
if (tmp_tables > 0) {
pct_tmp_tables_written_to_disk = (tmp_tables_disk / tmp_tables) * 100.0f;
}
derived.put("newrelic/pct_tmp_tables_written_to_disk", pct_tmp_tables_written_to_disk);
}
/* Replication specifics */
// "slave" category is a pre-requisite for these metrics
if (isReportingForCategory("slave")) {
if (areRequiredMetricsPresent("newrelic/replication_lag", existing, "slave/seconds_behind_master")) {
derived.put("newrelic/replication_lag", existing.get("slave/seconds_behind_master"));
}
if (areRequiredMetricsPresent("newrelic/replication_status", existing, "slave/slave_io_running", "slave/slave_sql_running")) {
int slave_io_thread_running = existing.get("slave/slave_io_running").intValue();
int slave_sql_thread_running = existing.get("slave/slave_sql_running").intValue();
/* both need to be YES, which is 1 */
Float replication_status = 1.0f; // Default as in ERROR
if (slave_io_thread_running + slave_sql_thread_running == 2) {
replication_status = 0.0f;
}
derived.put("newrelic/replication_status", replication_status);
}
if (areRequiredMetricsPresent("newrelic/slave_relay_log_bytes", existing, "slave/relay_log_pos")) {
derived.put("newrelic/slave_relay_log_bytes", existing.get("slave/relay_log_pos"));
}
if (areRequiredMetricsPresent("newrelic/master_log_lag_bytes", existing, "slave/read_master_log_pos", "slave/exec_master_log_pos")) {
derived.put("newrelic/master_log_lag_bytes", existing.get("slave/read_master_log_pos")
- existing.get("slave/exec_master_log_pos"));
}
} else {// This is a hack because the NR UI can't handle it missing for graphs
derived.put("newrelic/replication_lag", 0.0f);
derived.put("newrelic/replication_status", 0.0f);
derived.put("newrelic/slave_relay_log_bytes", 0.0f);
derived.put("newrelic/master_log_lag_bytes", 0.0f);
}
return derived;
}
/**
* This method does the reporting of metrics to New Relic
*
* @param Map results
*/
public void reportMetrics(Map<String, Float> results) {
int count = 0;
logger.debug("Collected ", results.size(), " MySQL metrics. ", getAgentInfo());
logger.debug(results);
Iterator<String> iter = results.keySet().iterator();
while (iter.hasNext()) { // Iterate over current metrics
String key = iter.next().toLowerCase();
Float val = results.get(key);
MetricMeta md = getMetricMeta(key);
if (md != null) { // Metric Meta data exists (from metric.category.json)
logger.debug(METRIC_LOG_PREFIX, key, SPACE, md, EQUALS, val);
count++;
if (md.isCounter()) { // Metric is a counter
reportMetric(key, md.getUnit(), md.getCounter().process(val));
} else { // Metric is a fixed Number
reportMetric(key, md.getUnit(), val);
}
} else { // md != null
if (firstReport) { // Provide some feedback of available metrics for future reporting
logger.debug("Not reporting identified metric ", key);
}
}
}
logger.debug("Reported to New Relic ", count, " metrics. ", getAgentInfo());
}
/**
* Is this agent reporting metrics for a specific category
*
* @param metricCategory
* @return boolean
*/
boolean isReportingForCategory(String metricCategory) {
return metrics.contains(metricCategory);
}
private String getAgentInfo() {
if (agentInfo == null) {
agentInfo = new StringBuilder().append("Agent Name: ").append(name).append(". Agent Version: ").append(version).toString();
}
return agentInfo;
}
/**
* This method creates the metric meta data that is derived from the provided configuration and New Relic specific metrics.
*/
private void createMetaData() {
Map<String, Object> categories = getMetricCategories(); // Get current Metric Categories
Iterator<String> iter = categories.keySet().iterator();
while (iter.hasNext()) {
String category = iter.next();
@SuppressWarnings("unchecked")
Map<String, String> attributes = (Map<String, String>) categories.get(category);
String valueMetrics = attributes.get("value_metrics");
if (valueMetrics != null) {
Set<String> metrics = new HashSet<String>(Arrays.asList(valueMetrics.toLowerCase().replaceAll(SPACE, EMPTY_STRING).split(COMMA)));
for (String s : metrics) {
addMetricMeta(category + SEPARATOR + s, new MetricMeta(false));
}
}
String counterMetrics = attributes.get("counter_metrics");
if (counterMetrics != null) {
Set<String> metrics = new HashSet<String>(Arrays.asList(counterMetrics.toLowerCase().replaceAll(SPACE, EMPTY_STRING).split(COMMA)));
for (String s : metrics) {
addMetricMeta(category + SEPARATOR + s, new MetricMeta(true));
}
}
}
/* Define New Relic specific metrics used for default dashboards */
addMetricMeta("newrelic/volume_reads", new MetricMeta(true, "Queries/Second"));
addMetricMeta("newrelic/volume_writes", new MetricMeta(true, "Queries/Second"));
addMetricMeta("newrelic/bytes_reads", new MetricMeta(true, "Bytes/Second"));
addMetricMeta("newrelic/bytes_writes", new MetricMeta(true, "Bytes/Second"));
addMetricMeta("newrelic/connections_connected", new MetricMeta(false, "Connections"));
addMetricMeta("newrelic/connections_running", new MetricMeta(false, "Connections"));
addMetricMeta("newrelic/connections_cached", new MetricMeta(false, "Connections"));
addMetricMeta("newrelic/innodb_bp_pages_created", new MetricMeta(true, "Pages/Second"));
addMetricMeta("newrelic/innodb_bp_pages_read", new MetricMeta(true, "Pages/Second"));
addMetricMeta("newrelic/innodb_bp_pages_written", new MetricMeta(true, "Pages/Second"));
addMetricMeta("newrelic/query_cache_hits", new MetricMeta(true, "Queries/Seconds"));
addMetricMeta("newrelic/query_cache_misses", new MetricMeta(true, "Queries/Seconds"));
addMetricMeta("newrelic/query_cache_not_cached", new MetricMeta(true, "Queries/Seconds"));
addMetricMeta("newrelic/replication_lag", new MetricMeta(false, "Seconds"));
addMetricMeta("newrelic/replication_status", new MetricMeta(false, "State"));
addMetricMeta("newrelic/pct_connection_utilization", new MetricMeta(false, "Percent"));
addMetricMeta("newrelic/pct_innodb_buffer_pool_hit_ratio", new MetricMeta(false, "Percent"));
addMetricMeta("newrelic/pct_query_cache_hit_utilization", new MetricMeta(false, "Percent"));
addMetricMeta("newrelic/pct_query_cache_memory_in_use", new MetricMeta(false, "Percent"));
addMetricMeta("newrelic/pct_tmp_tables_written_to_disk", new MetricMeta(false, "Percent"));
addMetricMeta("newrelic/innodb_fsyncs_data", new MetricMeta(true, "Fsyncs/Second"));
addMetricMeta("newrelic/innodb_fsyncs_os_log", new MetricMeta(true, "Fsyncs/Second"));
addMetricMeta("newrelic/slave_relay_log_bytes", new MetricMeta(true, "Bytes/Second"));
addMetricMeta("newrelic/master_log_lag_bytes", new MetricMeta(true, "Bytes/Second"));
/* Define improved metric values for certain general metrics */
addMetricMeta("status/aborted_clients", new MetricMeta(true, "Connections/Second"));
addMetricMeta("status/aborted_connects", new MetricMeta(true, "Connections/Second"));
addMetricMeta("status/bytes_sent", new MetricMeta(true, "Bytes/Second"));
addMetricMeta("status/bytes_received", new MetricMeta(true, "Bytes/Second"));
addMetricMeta("status/com_select", new MetricMeta(true, "Selects/Second"));
addMetricMeta("status/com_insert", new MetricMeta(true, "Inserts/Second"));
addMetricMeta("status/com_insert_select", new MetricMeta(true, "Inserts/Second"));
addMetricMeta("status/com_update", new MetricMeta(true, "Updates/Second"));
addMetricMeta("status/com_update_multi", new MetricMeta(true, "Updates/Second"));
addMetricMeta("status/com_delete", new MetricMeta(true, "Deletes/Second"));
addMetricMeta("status/com_delete_multi", new MetricMeta(true, "Deletes/Second"));
addMetricMeta("status/com_replace", new MetricMeta(true, "Replaces/Second"));
addMetricMeta("status/com_replace_select", new MetricMeta(true, "Replaces/Second"));
addMetricMeta("status/slow_queries", new MetricMeta(true, "Queries/Second"));
addMetricMeta("status/created_tmp_tables", new MetricMeta(true, "Queries/Second"));
addMetricMeta("status/created_tmp_disk_tables", new MetricMeta(true, "Queries/Second"));
addMetricMeta("status/innodb_buffer_pool_pages_flushed", new MetricMeta(true, "Pages/Second"));
addMetricMeta("newrelic/innodb_buffer_pool_pages_clean", new MetricMeta(false, "Pages"));
addMetricMeta("newrelic/innodb_buffer_pool_pages_dirty", new MetricMeta(false, "Pages"));
addMetricMeta("newrelic/innodb_buffer_pool_pages_misc", new MetricMeta(false, "Pages"));
addMetricMeta("newrelic/innodb_buffer_pool_pages_free", new MetricMeta(false, "Pages"));
addMetricMeta("newrelic/innodb_buffer_pool_pages_unassigned", new MetricMeta(false, "Pages"));
addMetricMeta("status/innodb_data_fsyncs", new MetricMeta(true, "Fsyncs/Second"));
addMetricMeta("status/innodb_os_log_fsyncs", new MetricMeta(true, "Fsyncs/Second"));
addMetricMeta("status/innodb_os_log_written", new MetricMeta(true, "Bytes/Second"));
/* Query Cache Units */
addMetricMeta("status/qcache_free_blocks", new MetricMeta(false, "Blocks"));
addMetricMeta("status/qcache_free_memory", new MetricMeta(false, "Bytes"));
addMetricMeta("status/qcache_hits", new MetricMeta(true, "Queries/Second"));
addMetricMeta("status/qcache_inserts", new MetricMeta(true, "Queries/Second"));
addMetricMeta("status/qcache_lowmem_prunes", new MetricMeta(true, "Queries/Second"));
addMetricMeta("status/qcache_not_cached", new MetricMeta(true, "Queries/Second"));
addMetricMeta("status/qcache_queries_in_cache", new MetricMeta(false, "Queries"));
addMetricMeta("status/qcache_total_blocks", new MetricMeta(false, "Blocks"));
addMetricMeta("innodb_status/history_list_length", new MetricMeta(false, "Pages"));
addMetricMeta("innodb_status/queries_inside_innodb", new MetricMeta(false, "Queries"));
addMetricMeta("innodb_status/queries_in_queue", new MetricMeta(false, "Queries"));
addMetricMeta("innodb_status/checkpoint_age", new MetricMeta(false, "Bytes"));
addMetricMeta("master/position", new MetricMeta(true, "Bytes/Second"));
addMetricMeta("slave/relay_log_pos", new MetricMeta(true, "Bytes/Second"));
}
/**
* Add the given metric meta information to the Map of all metric meta information for this agent
*
* @param String key
* @param Metric mm
*/
private void addMetricMeta(String key, MetricMeta mm) {
metricsMeta.put(key.toLowerCase(), mm);
}
/**
* This provides a lazy instantiation of a MySQL metric where no meta data was defined and means new metrics can be captured automatically.
*
* A default metric is a integer value
*
* @param String Metric to look up
* @return MetridMeta Structure of information about the metric
*/
private MetricMeta getMetricMeta(String key) {
if (key.startsWith(INNODB_MUTEX_CATEGORY) && !metricsMeta.containsKey(key)) { // This is a catch all for dynamic name metrics
addMetricMeta(key, new MetricMeta(true, "Operations/Second"));
}
return metricsMeta.get(key.toLowerCase()); // Look for existing meta data on metric
}
/**
* Private utility function to validate that all required data is present for constructing atomic metrics
*
* @param category - a display name for which metric category will not be included if a given key is not present
* @param map - the map of available data points
* @param keys - keys that are expected to be present for this operation
* @return true if all expected keys are present, otherwise false
*/
private boolean areRequiredMetricsPresent(String category, Map<String, Float> map, String... keys) {
for (String key : keys) {
if (!map.containsKey(key)) {
if (firstReport) { // Only report missing category data on the first run so as not to clutter the log
logger.debug("Not reporting on '", category, "' due to missing data field '", key, "'");
}
return false;
}
}
return true;
}
/**
* Return the human readable name for this agent.
*
* @return String
*/
@Override
public String getComponentHumanLabel() {
return name;
}
/**
* Return the map of metric categories
*
* @return Map
*/
public Map<String, Object> getMetricCategories() {
return metricCategories;
}
}
| |
/*
* Copyright 2002-2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.test.context.support;
import java.util.ArrayList;
import java.util.List;
import java.util.Set;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.beans.BeanUtils;
import org.springframework.context.ApplicationContext;
import org.springframework.context.ApplicationContextException;
import org.springframework.context.ApplicationContextInitializer;
import org.springframework.context.ConfigurableApplicationContext;
import org.springframework.core.GenericTypeResolver;
import org.springframework.core.annotation.AnnotationAwareOrderComparator;
import org.springframework.core.env.PropertySource;
import org.springframework.core.io.ClassPathResource;
import org.springframework.test.context.ContextConfigurationAttributes;
import org.springframework.test.context.ContextCustomizer;
import org.springframework.test.context.ContextLoader;
import org.springframework.test.context.MergedContextConfiguration;
import org.springframework.test.context.SmartContextLoader;
import org.springframework.test.context.util.TestContextResourceUtils;
import org.springframework.util.Assert;
import org.springframework.util.ClassUtils;
import org.springframework.util.ObjectUtils;
import org.springframework.util.ResourceUtils;
/**
* Abstract application context loader that provides a basis for all concrete
* implementations of the {@link ContextLoader} SPI. Provides a
* <em>Template Method</em> based approach for {@link #processLocations processing}
* resource locations.
*
* <p>As of Spring 3.1, {@code AbstractContextLoader} also provides a basis
* for all concrete implementations of the {@link SmartContextLoader} SPI. For
* backwards compatibility with the {@code ContextLoader} SPI,
* {@link #processContextConfiguration(ContextConfigurationAttributes)} delegates
* to {@link #processLocations(Class, String...)}.
*
* @author Sam Brannen
* @author Juergen Hoeller
* @author Phillip Webb
* @since 2.5
* @see #generateDefaultLocations
* @see #getResourceSuffixes
* @see #modifyLocations
* @see #prepareContext
* @see #customizeContext
*/
public abstract class AbstractContextLoader implements SmartContextLoader {
private static final String[] EMPTY_STRING_ARRAY = new String[0];
private static final Log logger = LogFactory.getLog(AbstractContextLoader.class);
// --- SmartContextLoader -----------------------------------------------
/**
* For backwards compatibility with the {@link ContextLoader} SPI, the
* default implementation simply delegates to {@link #processLocations(Class, String...)},
* passing it the {@link ContextConfigurationAttributes#getDeclaringClass()
* declaring class} and {@link ContextConfigurationAttributes#getLocations()
* resource locations} retrieved from the supplied
* {@link ContextConfigurationAttributes configuration attributes}. The
* processed locations are then
* {@link ContextConfigurationAttributes#setLocations(String[]) set} in
* the supplied configuration attributes.
* <p>Can be overridden in subclasses — for example, to process
* annotated classes instead of resource locations.
* @since 3.1
* @see #processLocations(Class, String...)
*/
@Override
public void processContextConfiguration(ContextConfigurationAttributes configAttributes) {
String[] processedLocations =
processLocations(configAttributes.getDeclaringClass(), configAttributes.getLocations());
configAttributes.setLocations(processedLocations);
}
/**
* Prepare the {@link ConfigurableApplicationContext} created by this
* {@code SmartContextLoader} <i>before</i> bean definitions are read.
* <p>The default implementation:
* <ul>
* <li>Sets the <em>active bean definition profiles</em> from the supplied
* {@code MergedContextConfiguration} in the
* {@link org.springframework.core.env.Environment Environment} of the
* context.</li>
* <li>Adds {@link PropertySource PropertySources} for all
* {@linkplain MergedContextConfiguration#getPropertySourceLocations()
* resource locations} and
* {@linkplain MergedContextConfiguration#getPropertySourceProperties()
* inlined properties} from the supplied {@code MergedContextConfiguration}
* to the {@code Environment} of the context.</li>
* <li>Determines what (if any) context initializer classes have been supplied
* via the {@code MergedContextConfiguration} and instantiates and
* {@linkplain ApplicationContextInitializer#initialize invokes} each with the
* given application context.
* <ul>
* <li>Any {@code ApplicationContextInitializers} implementing
* {@link org.springframework.core.Ordered Ordered} or annotated with {@link
* org.springframework.core.annotation.Order @Order} will be sorted appropriately.</li>
* </ul>
* </li>
* </ul>
* @param context the newly created application context
* @param mergedConfig the merged context configuration
* @since 3.2
* @see TestPropertySourceUtils#addPropertiesFilesToEnvironment
* @see TestPropertySourceUtils#addInlinedPropertiesToEnvironment
* @see ApplicationContextInitializer#initialize(ConfigurableApplicationContext)
* @see #loadContext(MergedContextConfiguration)
* @see ConfigurableApplicationContext#setId
*/
protected void prepareContext(ConfigurableApplicationContext context, MergedContextConfiguration mergedConfig) {
context.getEnvironment().setActiveProfiles(mergedConfig.getActiveProfiles());
TestPropertySourceUtils.addPropertiesFilesToEnvironment(context, mergedConfig.getPropertySourceLocations());
TestPropertySourceUtils.addInlinedPropertiesToEnvironment(context, mergedConfig.getPropertySourceProperties());
invokeApplicationContextInitializers(context, mergedConfig);
}
@SuppressWarnings("unchecked")
private void invokeApplicationContextInitializers(ConfigurableApplicationContext context,
MergedContextConfiguration mergedConfig) {
Set<Class<? extends ApplicationContextInitializer<? extends ConfigurableApplicationContext>>> initializerClasses =
mergedConfig.getContextInitializerClasses();
if (initializerClasses.isEmpty()) {
// no ApplicationContextInitializers have been declared -> nothing to do
return;
}
List<ApplicationContextInitializer<ConfigurableApplicationContext>> initializerInstances = new ArrayList<>();
Class<?> contextClass = context.getClass();
for (Class<? extends ApplicationContextInitializer<? extends ConfigurableApplicationContext>> initializerClass : initializerClasses) {
Class<?> initializerContextClass =
GenericTypeResolver.resolveTypeArgument(initializerClass, ApplicationContextInitializer.class);
if (initializerContextClass != null && !initializerContextClass.isInstance(context)) {
throw new ApplicationContextException(String.format(
"Could not apply context initializer [%s] since its generic parameter [%s] " +
"is not assignable from the type of application context used by this " +
"context loader: [%s]", initializerClass.getName(), initializerContextClass.getName(),
contextClass.getName()));
}
initializerInstances.add((ApplicationContextInitializer<ConfigurableApplicationContext>) BeanUtils.instantiateClass(initializerClass));
}
AnnotationAwareOrderComparator.sort(initializerInstances);
for (ApplicationContextInitializer<ConfigurableApplicationContext> initializer : initializerInstances) {
initializer.initialize(context);
}
}
/**
* Customize the {@link ConfigurableApplicationContext} created by this
* {@code ContextLoader} <em>after</em> bean definitions have been loaded
* into the context but <em>before</em> the context has been refreshed.
* <p>The default implementation delegates to all
* {@link MergedContextConfiguration#getContextCustomizers context customizers}
* that have been registered with the supplied {@code mergedConfig}.
* @param context the newly created application context
* @param mergedConfig the merged context configuration
* @since 4.3
*/
protected void customizeContext(ConfigurableApplicationContext context, MergedContextConfiguration mergedConfig) {
for (ContextCustomizer contextCustomizer : mergedConfig.getContextCustomizers()) {
contextCustomizer.customizeContext(context, mergedConfig);
}
}
// --- ContextLoader -------------------------------------------------------
/**
* If the supplied {@code locations} are {@code null} or <em>empty</em>
* and {@link #isGenerateDefaultLocations()} returns {@code true},
* default locations will be {@link #generateDefaultLocations(Class)
* generated} (i.e., detected) for the specified {@link Class class}
* and the configured {@linkplain #getResourceSuffixes() resource suffixes};
* otherwise, the supplied {@code locations} will be
* {@linkplain #modifyLocations modified} if necessary and returned.
* @param clazz the class with which the locations are associated: to be
* used when generating default locations
* @param locations the unmodified locations to use for loading the
* application context (can be {@code null} or empty)
* @return a processed array of application context resource locations
* @since 2.5
* @see #isGenerateDefaultLocations()
* @see #generateDefaultLocations(Class)
* @see #modifyLocations(Class, String...)
* @see org.springframework.test.context.ContextLoader#processLocations(Class, String...)
* @see #processContextConfiguration(ContextConfigurationAttributes)
*/
@Override
public final String[] processLocations(Class<?> clazz, String... locations) {
return (ObjectUtils.isEmpty(locations) && isGenerateDefaultLocations()) ?
generateDefaultLocations(clazz) : modifyLocations(clazz, locations);
}
/**
* Generate the default classpath resource locations array based on the
* supplied class.
* <p>For example, if the supplied class is {@code com.example.MyTest},
* the generated locations will contain a single string with a value of
* {@code "classpath:com/example/MyTest<suffix>"}, where {@code <suffix>}
* is the value of the first configured
* {@linkplain #getResourceSuffixes() resource suffix} for which the
* generated location actually exists in the classpath.
* <p>As of Spring 3.1, the implementation of this method adheres to the
* contract defined in the {@link SmartContextLoader} SPI. Specifically,
* this method will <em>preemptively</em> verify that the generated default
* location actually exists. If it does not exist, this method will log a
* warning and return an empty array.
* <p>Subclasses can override this method to implement a different
* <em>default location generation</em> strategy.
* @param clazz the class for which the default locations are to be generated
* @return an array of default application context resource locations
* @since 2.5
* @see #getResourceSuffixes()
*/
protected String[] generateDefaultLocations(Class<?> clazz) {
Assert.notNull(clazz, "Class must not be null");
String[] suffixes = getResourceSuffixes();
for (String suffix : suffixes) {
Assert.hasText(suffix, "Resource suffix must not be empty");
String resourcePath = ClassUtils.convertClassNameToResourcePath(clazz.getName()) + suffix;
String prefixedResourcePath = ResourceUtils.CLASSPATH_URL_PREFIX + resourcePath;
ClassPathResource classPathResource = new ClassPathResource(resourcePath);
if (classPathResource.exists()) {
if (logger.isInfoEnabled()) {
logger.info(String.format("Detected default resource location \"%s\" for test class [%s]",
prefixedResourcePath, clazz.getName()));
}
return new String[] {prefixedResourcePath};
}
else if (logger.isDebugEnabled()) {
logger.debug(String.format("Did not detect default resource location for test class [%s]: " +
"%s does not exist", clazz.getName(), classPathResource));
}
}
if (logger.isInfoEnabled()) {
logger.info(String.format("Could not detect default resource locations for test class [%s]: " +
"no resource found for suffixes %s.", clazz.getName(), ObjectUtils.nullSafeToString(suffixes)));
}
return EMPTY_STRING_ARRAY;
}
/**
* Generate a modified version of the supplied locations array and return it.
* <p>The default implementation simply delegates to
* {@link TestContextResourceUtils#convertToClasspathResourcePaths}.
* <p>Subclasses can override this method to implement a different
* <em>location modification</em> strategy.
* @param clazz the class with which the locations are associated
* @param locations the resource locations to be modified
* @return an array of modified application context resource locations
* @since 2.5
*/
protected String[] modifyLocations(Class<?> clazz, String... locations) {
return TestContextResourceUtils.convertToClasspathResourcePaths(clazz, locations);
}
/**
* Determine whether or not <em>default</em> resource locations should be
* generated if the {@code locations} provided to
* {@link #processLocations(Class, String...)} are {@code null} or empty.
* <p>As of Spring 3.1, the semantics of this method have been overloaded
* to include detection of either default resource locations or default
* configuration classes. Consequently, this method can also be used to
* determine whether or not <em>default</em> configuration classes should be
* detected if the {@code classes} present in the
* {@link ContextConfigurationAttributes configuration attributes} supplied
* to {@link #processContextConfiguration(ContextConfigurationAttributes)}
* are {@code null} or empty.
* <p>Can be overridden by subclasses to change the default behavior.
* @return always {@code true} by default
* @since 2.5
*/
protected boolean isGenerateDefaultLocations() {
return true;
}
/**
* Get the suffixes to append to {@link ApplicationContext} resource locations
* when detecting default locations.
* <p>The default implementation simply wraps the value returned by
* {@link #getResourceSuffix()} in a single-element array, but this
* can be overridden by subclasses in order to support multiple suffixes.
* @return the resource suffixes; never {@code null} or empty
* @since 4.1
* @see #generateDefaultLocations(Class)
*/
protected String[] getResourceSuffixes() {
return new String[] {getResourceSuffix()};
}
/**
* Get the suffix to append to {@link ApplicationContext} resource locations
* when detecting default locations.
* <p>Subclasses must provide an implementation of this method that returns
* a single suffix. Alternatively subclasses may provide a <em>no-op</em>
* implementation of this method and override {@link #getResourceSuffixes()}
* in order to provide multiple custom suffixes.
* @return the resource suffix; never {@code null} or empty
* @since 2.5
* @see #generateDefaultLocations(Class)
* @see #getResourceSuffixes()
*/
protected abstract String getResourceSuffix();
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.stress;
import java.io.File;
import java.io.IOError;
import java.net.URI;
import java.util.*;
import java.util.concurrent.*;
import javax.inject.Inject;
import com.google.common.collect.Lists;
import com.google.common.util.concurrent.Uninterruptibles;
import io.airlift.airline.*;
import org.apache.cassandra.config.DatabaseDescriptor;
import org.apache.cassandra.cql3.statements.schema.CreateTableStatement;
import org.apache.cassandra.db.ColumnFamilyStore;
import org.apache.cassandra.db.Directories;
import org.apache.cassandra.db.SystemKeyspace;
import org.apache.cassandra.db.commitlog.CommitLog;
import org.apache.cassandra.db.compaction.CompactionManager;
import org.apache.cassandra.db.lifecycle.LifecycleTransaction;
import org.apache.cassandra.dht.IPartitioner;
import org.apache.cassandra.dht.Token;
import org.apache.cassandra.io.sstable.StressCQLSSTableWriter;
import org.apache.cassandra.io.sstable.Component;
import org.apache.cassandra.io.sstable.Descriptor;
import org.apache.cassandra.io.sstable.format.SSTableReader;
import org.apache.cassandra.io.util.FileUtils;
import org.apache.cassandra.locator.InetAddressAndPort;
import org.apache.cassandra.locator.TokenMetadata;
import org.apache.cassandra.service.StorageService;
import org.apache.cassandra.stress.generate.PartitionGenerator;
import org.apache.cassandra.stress.generate.SeedManager;
import org.apache.cassandra.stress.operations.userdefined.SchemaInsert;
import org.apache.cassandra.stress.settings.StressSettings;
import org.apache.cassandra.tools.nodetool.CompactionStats;
import org.apache.cassandra.utils.FBUtilities;
import org.apache.cassandra.utils.JVMStabilityInspector;
/**
* Tool that allows fast route to loading data for arbitrary schemas to disk
* and compacting them.
*/
public abstract class CompactionStress implements Runnable
{
@Inject
public HelpOption helpOption;
@Option(name = { "-p", "--profile" }, description = "Path to stress yaml file", required = true)
String profile;
@Option(name = { "-d", "--datadir" }, description = "Data directory (can be used many times to specify multiple data dirs)", required = true)
List<String> dataDirs;
@Option(name = {"-v", "--vnodes"}, description = "number of local tokens to generate (default 256)")
Integer numTokens = 256;
static
{
DatabaseDescriptor.daemonInitialization();
CommitLog.instance.start();
}
List<File> getDataDirectories()
{
List<File> dataDirectories = new ArrayList<>(dataDirs.size());
for (String dataDir : dataDirs)
{
File outputDir = new File(dataDir);
if (!outputDir.exists())
{
System.err.println("Invalid output dir (missing): " + outputDir);
System.exit(1);
}
if (!outputDir.isDirectory())
{
System.err.println("Invalid output dir (not a directory): " + outputDir);
System.exit(2);
}
if (!outputDir.canWrite())
{
System.err.println("Invalid output dir (no write permissions): " + outputDir);
System.exit(3);
}
dataDirectories.add(outputDir);
}
return dataDirectories;
}
ColumnFamilyStore initCf(StressProfile stressProfile, boolean loadSSTables)
{
generateTokens(stressProfile.seedStr, StorageService.instance.getTokenMetadata(), numTokens);
CreateTableStatement.Raw createStatement = stressProfile.getCreateStatement();
List<File> dataDirectories = getDataDirectories();
ColumnFamilyStore cfs = StressCQLSSTableWriter.Builder.createOfflineTable(createStatement, Collections.EMPTY_LIST, dataDirectories);
if (loadSSTables)
{
Directories.SSTableLister lister = cfs.getDirectories().sstableLister(Directories.OnTxnErr.IGNORE).skipTemporary(true);
List<SSTableReader> sstables = new ArrayList<>();
//Offline open sstables
for (Map.Entry<Descriptor, Set<Component>> entry : lister.list().entrySet())
{
Set<Component> components = entry.getValue();
if (!components.contains(Component.DATA))
continue;
try
{
SSTableReader sstable = SSTableReader.openNoValidation(entry.getKey(), components, cfs);
sstables.add(sstable);
}
catch (Exception e)
{
JVMStabilityInspector.inspectThrowable(e);
System.err.println(String.format("Error Loading %s: %s", entry.getKey(), e.getMessage()));
}
}
cfs.disableAutoCompaction();
// We want to add the SSTables without firing their indexing by any eventual unsupported 2i
if (cfs.indexManager.hasIndexes())
throw new IllegalStateException("CompactionStress does not support secondary indexes");
//Register with cfs
cfs.addSSTables(sstables);
}
return cfs;
}
StressProfile getStressProfile()
{
try
{
File yamlFile = new File(profile);
return StressProfile.load(yamlFile.exists() ? yamlFile.toURI() : URI.create(profile));
}
catch ( IOError e)
{
e.printStackTrace();
System.err.print("Invalid profile URI : " + profile);
System.exit(4);
}
return null;
}
/**
* Populate tokenMetadata consistently across runs.
*
* We need consistency to write and compact the same data offline
* in the case of a range aware sstable writer.
*/
private void generateTokens(String seed, TokenMetadata tokenMetadata, Integer numTokens)
{
Random random = new Random(seed.hashCode());
IPartitioner p = tokenMetadata.partitioner;
tokenMetadata.clearUnsafe();
for (int i = 1; i <= numTokens; i++)
{
InetAddressAndPort addr = FBUtilities.getBroadcastAddressAndPort();
List<Token> tokens = Lists.newArrayListWithCapacity(numTokens);
for (int j = 0; j < numTokens; ++j)
tokens.add(p.getRandomToken(random));
tokenMetadata.updateNormalTokens(tokens, addr);
}
}
public abstract void run();
@Command(name = "compact", description = "Compact data in directory")
public static class Compaction extends CompactionStress
{
@Option(name = {"-m", "--maximal"}, description = "Force maximal compaction (default true)")
Boolean maximal = false;
@Option(name = {"-t", "--threads"}, description = "Number of compactor threads to use for bg compactions (default 4)")
Integer threads = 4;
public void run()
{
//Setup
SystemKeyspace.finishStartup(); //needed for early-open
CompactionManager.instance.setMaximumCompactorThreads(threads);
CompactionManager.instance.setCoreCompactorThreads(threads);
CompactionManager.instance.setRate(0);
StressProfile stressProfile = getStressProfile();
ColumnFamilyStore cfs = initCf(stressProfile, true);
cfs.getCompactionStrategyManager().compactionLogger.enable();
List<Future<?>> futures = new ArrayList<>(threads);
if (maximal)
{
futures = CompactionManager.instance.submitMaximal(cfs, FBUtilities.nowInSeconds(), false);
}
else
{
cfs.enableAutoCompaction();
cfs.getCompactionStrategyManager().enable();
for (int i = 0; i < threads; i++)
futures.addAll(CompactionManager.instance.submitBackground(cfs));
}
long working;
//Report compaction stats while working
while ((working = futures.stream().filter(f -> !f.isDone()).count()) > 0 || CompactionManager.instance.getActiveCompactions() > 0 || (!maximal && cfs.getCompactionStrategyManager().getEstimatedRemainingTasks() > 0))
{
//Re-up any bg jobs
if (!maximal)
{
for (long i = working; i < threads; i++)
futures.addAll(CompactionManager.instance.submitBackground(cfs));
}
reportCompactionStats();
Uninterruptibles.sleepUninterruptibly(10, TimeUnit.SECONDS);
}
System.out.println("Finished! Shutting down...");
CompactionManager.instance.forceShutdown();
//Wait for cleanup to finish before forcing
Uninterruptibles.sleepUninterruptibly(1, TimeUnit.SECONDS);
LifecycleTransaction.removeUnfinishedLeftovers(cfs);
}
}
void reportCompactionStats()
{
System.out.println("========");
System.out.println(String.format("Pending compactions: %d\n", CompactionManager.instance.getPendingTasks()));
CompactionStats.reportCompactionTable(CompactionManager.instance.getCompactions(), 0, true, System.out);
}
@Command(name = "write", description = "write data directly to disk")
public static class DataWriter extends CompactionStress
{
private static double BYTES_IN_GB = 1024 * 1014 * 1024;
@Option(name = { "-g", "--gbsize"}, description = "Total GB size on disk you wish to write", required = true)
Integer totalSizeGb;
@Option(name = { "-t", "--threads" }, description = "Number of sstable writer threads (default 2)")
Integer threads = 2;
@Option(name = { "-c", "--partition-count"}, description = "Number of partitions to loop over (default 1000000)")
Integer partitions = 1000000;
@Option(name = { "-b", "--buffer-size-mb"}, description = "Buffer in MB writes before writing new sstable (default 128)")
Integer bufferSize = 128;
@Option(name = { "-r", "--range-aware"}, description = "Splits the local ranges in number of data directories and makes sure we never write the same token in two different directories (default true)")
Boolean makeRangeAware = true;
public void run()
{
StressProfile stressProfile = getStressProfile();
ColumnFamilyStore cfs = initCf(stressProfile, false);
Directories directories = cfs.getDirectories();
StressSettings settings = StressSettings.parse(new String[]{ "write", "-pop seq=1.." + partitions });
SeedManager seedManager = new SeedManager(settings);
PartitionGenerator generator = stressProfile.getOfflineGenerator();
WorkManager workManager = new WorkManager.FixedWorkManager(Long.MAX_VALUE);
ExecutorService executorService = Executors.newFixedThreadPool(threads);
CountDownLatch finished = new CountDownLatch(threads);
for (int i = 0; i < threads; i++)
{
//Every thread needs it's own writer
final SchemaInsert insert = stressProfile.getOfflineInsert(null, generator, seedManager, settings);
final StressCQLSSTableWriter tableWriter = insert.createWriter(cfs, bufferSize, makeRangeAware);
executorService.submit(() -> {
try
{
insert.runOffline(tableWriter, workManager);
}
catch (Exception e)
{
e.printStackTrace();
}
finally
{
FileUtils.closeQuietly(tableWriter);
finished.countDown();
}
});
}
double currentSizeGB;
while ((currentSizeGB = directories.getRawDiretoriesSize() / BYTES_IN_GB) < totalSizeGb)
{
if (finished.getCount() == 0)
break;
System.out.println(String.format("Written %.2fGB of %dGB", currentSizeGB, totalSizeGb));
Uninterruptibles.sleepUninterruptibly(3, TimeUnit.SECONDS);
}
workManager.stop();
Uninterruptibles.awaitUninterruptibly(finished);
currentSizeGB = directories.getRawDiretoriesSize() / BYTES_IN_GB;
System.out.println(String.format("Finished writing %.2fGB", currentSizeGB));
}
}
public static void main(String[] args)
{
Cli.CliBuilder<Runnable> builder = Cli.<Runnable>builder("compaction-stress")
.withDescription("benchmark for compaction")
.withDefaultCommand(Help.class)
.withCommands(Help.class, DataWriter.class, Compaction.class);
Cli<Runnable> stress = builder.build();
try
{
stress.parse(args).run();
}
catch (Throwable t)
{
t.printStackTrace();
System.exit(6);
}
System.exit(0);
}
}
| |
/**
* Copyright ${license.git.copyrightYears} the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.mybatis.generator.internal.db;
import java.util.HashSet;
import java.util.Set;
/**
* This class contains a somewhat comprehensive list of SQL reserved words.
* Since different databases have different reserved words, this list is
* inclusive of many different databases - so it may include words that are not
* reserved in some databases.
*
* This list is based on the list from Drupal Handbook:
* http://drupal.org/node/141051 With additions for DB2
*
* @author Jeff Butler
*
*/
public class SqlReservedWords {
private static Set<String> RESERVED_WORDS;
static {
String[] words = { "A", //$NON-NLS-1$
"ABORT", //$NON-NLS-1$
"ABS", //$NON-NLS-1$
"ABSOLUTE", //$NON-NLS-1$
"ACCESS", //$NON-NLS-1$
"ACTION", //$NON-NLS-1$
"ADA", //$NON-NLS-1$
"ADD", // DB2 //$NON-NLS-1$
"ADMIN", //$NON-NLS-1$
"AFTER", // DB2 //$NON-NLS-1$
"AGGREGATE", //$NON-NLS-1$
"ALIAS", // DB2 //$NON-NLS-1$
"ALL", // DB2 //$NON-NLS-1$
"ALLOCATE", // DB2 //$NON-NLS-1$
"ALLOW", // DB2 //$NON-NLS-1$
"ALSO", //$NON-NLS-1$
"ALTER", // DB2 //$NON-NLS-1$
"ALWAYS", //$NON-NLS-1$
"ANALYSE", //$NON-NLS-1$
"ANALYZE", //$NON-NLS-1$
"AND", // DB2 //$NON-NLS-1$
"ANY", // DB2 //$NON-NLS-1$
"APPLICATION", // DB2 //$NON-NLS-1$
"ARE", //$NON-NLS-1$
"ARRAY", //$NON-NLS-1$
"AS", // DB2 //$NON-NLS-1$
"ASC", //$NON-NLS-1$
"ASENSITIVE", //$NON-NLS-1$
"ASSERTION", //$NON-NLS-1$
"ASSIGNMENT", //$NON-NLS-1$
"ASSOCIATE", // DB2 //$NON-NLS-1$
"ASUTIME", // DB2 //$NON-NLS-1$
"ASYMMETRIC", //$NON-NLS-1$
"AT", //$NON-NLS-1$
"ATOMIC", //$NON-NLS-1$
"ATTRIBUTE", //$NON-NLS-1$
"ATTRIBUTES", //$NON-NLS-1$
"AUDIT", // DB2 //$NON-NLS-1$
"AUTHORIZATION", // DB2 //$NON-NLS-1$
"AUTO_INCREMENT", //$NON-NLS-1$
"AUX", // DB2 //$NON-NLS-1$
"AUXILIARY", // DB2 //$NON-NLS-1$
"AVG", //$NON-NLS-1$
"AVG_ROW_LENGTH", //$NON-NLS-1$
"BACKUP", //$NON-NLS-1$
"BACKWARD", //$NON-NLS-1$
"BEFORE", // DB2 //$NON-NLS-1$
"BEGIN", // DB2 //$NON-NLS-1$
"BERNOULLI", //$NON-NLS-1$
"BETWEEN", // DB2 //$NON-NLS-1$
"BIGINT", //$NON-NLS-1$
"BINARY", // DB2 //$NON-NLS-1$
"BIT", //$NON-NLS-1$
"BIT_LENGTH", //$NON-NLS-1$
"BITVAR", //$NON-NLS-1$
"BLOB", //$NON-NLS-1$
"BOOL", //$NON-NLS-1$
"BOOLEAN", //$NON-NLS-1$
"BOTH", //$NON-NLS-1$
"BREADTH", //$NON-NLS-1$
"BREAK", //$NON-NLS-1$
"BROWSE", //$NON-NLS-1$
"BUFFERPOOL", // DB2 //$NON-NLS-1$
"BULK", //$NON-NLS-1$
"BY", // DB2 //$NON-NLS-1$
"C", //$NON-NLS-1$
"CACHE", // DB2 //$NON-NLS-1$
"CALL", // DB2 //$NON-NLS-1$
"CALLED", // DB2 //$NON-NLS-1$
"CAPTURE", // DB2 //$NON-NLS-1$
"CARDINALITY", // DB2 //$NON-NLS-1$
"CASCADE", //$NON-NLS-1$
"CASCADED", // DB2 //$NON-NLS-1$
"CASE", // DB2 //$NON-NLS-1$
"CAST", // DB2 //$NON-NLS-1$
"CATALOG", //$NON-NLS-1$
"CATALOG_NAME", //$NON-NLS-1$
"CCSID", // DB2 //$NON-NLS-1$
"CEIL", //$NON-NLS-1$
"CEILING", //$NON-NLS-1$
"CHAIN", //$NON-NLS-1$
"CHANGE", //$NON-NLS-1$
"CHAR", // DB2 //$NON-NLS-1$
"CHAR_LENGTH", //$NON-NLS-1$
"CHARACTER", // DB2 //$NON-NLS-1$
"CHARACTER_LENGTH", //$NON-NLS-1$
"CHARACTER_SET_CATALOG", //$NON-NLS-1$
"CHARACTER_SET_NAME", //$NON-NLS-1$
"CHARACTER_SET_SCHEMA", //$NON-NLS-1$
"CHARACTERISTICS", //$NON-NLS-1$
"CHARACTERS", //$NON-NLS-1$
"CHECK", // DB2 //$NON-NLS-1$
"CHECKED", //$NON-NLS-1$
"CHECKPOINT", //$NON-NLS-1$
"CHECKSUM", //$NON-NLS-1$
"CLASS", //$NON-NLS-1$
"CLASS_ORIGIN", //$NON-NLS-1$
"CLOB", //$NON-NLS-1$
"CLOSE", // DB2 //$NON-NLS-1$
"CLUSTER", // DB2 //$NON-NLS-1$
"CLUSTERED", //$NON-NLS-1$
"COALESCE", //$NON-NLS-1$
"COBOL", //$NON-NLS-1$
"COLLATE", //$NON-NLS-1$
"COLLATION", //$NON-NLS-1$
"COLLATION_CATALOG", //$NON-NLS-1$
"COLLATION_NAME", //$NON-NLS-1$
"COLLATION_SCHEMA", //$NON-NLS-1$
"COLLECT", //$NON-NLS-1$
"COLLECTION", // DB2 //$NON-NLS-1$
"COLLID", // DB2 //$NON-NLS-1$
"COLUMN", // DB2 //$NON-NLS-1$
"COLUMN_NAME", //$NON-NLS-1$
"COLUMNS", //$NON-NLS-1$
"COMMAND_FUNCTION", //$NON-NLS-1$
"COMMAND_FUNCTION_CODE", //$NON-NLS-1$
"COMMENT", // DB2 //$NON-NLS-1$
"COMMIT", // DB2 //$NON-NLS-1$
"COMMITTED", //$NON-NLS-1$
"COMPLETION", //$NON-NLS-1$
"COMPRESS", //$NON-NLS-1$
"COMPUTE", //$NON-NLS-1$
"CONCAT", // DB2 //$NON-NLS-1$
"CONDITION", // DB2 //$NON-NLS-1$
"CONDITION_NUMBER", //$NON-NLS-1$
"CONNECT", // DB2 //$NON-NLS-1$
"CONNECTION", // DB2 //$NON-NLS-1$
"CONNECTION_NAME", //$NON-NLS-1$
"CONSTRAINT", // DB2 //$NON-NLS-1$
"CONSTRAINT_CATALOG", //$NON-NLS-1$
"CONSTRAINT_NAME", //$NON-NLS-1$
"CONSTRAINT_SCHEMA", //$NON-NLS-1$
"CONSTRAINTS", //$NON-NLS-1$
"CONSTRUCTOR", //$NON-NLS-1$
"CONTAINS", // DB2 //$NON-NLS-1$
"CONTAINSTABLE", //$NON-NLS-1$
"CONTINUE", // DB2 //$NON-NLS-1$
"CONVERSION", //$NON-NLS-1$
"CONVERT", //$NON-NLS-1$
"COPY", //$NON-NLS-1$
"CORR", //$NON-NLS-1$
"CORRESPONDING", //$NON-NLS-1$
"COUNT", // DB2 //$NON-NLS-1$
"COUNT_BIG", // DB2 //$NON-NLS-1$
"COVAR_POP", //$NON-NLS-1$
"COVAR_SAMP", //$NON-NLS-1$
"CREATE", // DB2 //$NON-NLS-1$
"CREATEDB", //$NON-NLS-1$
"CREATEROLE", //$NON-NLS-1$
"CREATEUSER", //$NON-NLS-1$
"CROSS", // DB2 //$NON-NLS-1$
"CSV", //$NON-NLS-1$
"CUBE", //$NON-NLS-1$
"CUME_DIST", //$NON-NLS-1$
"CURRENT", // DB2 //$NON-NLS-1$
"CURRENT_DATE", // DB2 //$NON-NLS-1$
"CURRENT_DEFAULT_TRANSFORM_GROUP", //$NON-NLS-1$
"CURRENT_LC_CTYPE", // DB2 //$NON-NLS-1$
"CURRENT_PATH", // DB2 //$NON-NLS-1$
"CURRENT_ROLE", //$NON-NLS-1$
"CURRENT_SERVER", // DB2 //$NON-NLS-1$
"CURRENT_TIME", // DB2 //$NON-NLS-1$
"CURRENT_TIMESTAMP", // DB2 //$NON-NLS-1$
"CURRENT_TIMEZONE", // DB2 //$NON-NLS-1$
"CURRENT_TRANSFORM_GROUP_FOR_TYPE", //$NON-NLS-1$
"CURRENT_USER", // DB2 //$NON-NLS-1$
"CURSOR", // DB2 //$NON-NLS-1$
"CURSOR_NAME", //$NON-NLS-1$
"CYCLE", // DB2 //$NON-NLS-1$
"DATA", // DB2 //$NON-NLS-1$
"DATABASE", // DB2 //$NON-NLS-1$
"DATABASES", //$NON-NLS-1$
"DATE", //$NON-NLS-1$
"DATETIME", //$NON-NLS-1$
"DATETIME_INTERVAL_CODE", //$NON-NLS-1$
"DATETIME_INTERVAL_PRECISION", //$NON-NLS-1$
"DAY", // DB2 //$NON-NLS-1$
"DAY_HOUR", //$NON-NLS-1$
"DAY_MICROSECOND", //$NON-NLS-1$
"DAY_MINUTE", //$NON-NLS-1$
"DAY_SECOND", //$NON-NLS-1$
"DAYOFMONTH", //$NON-NLS-1$
"DAYOFWEEK", //$NON-NLS-1$
"DAYOFYEAR", //$NON-NLS-1$
"DAYS", // DB2 //$NON-NLS-1$
"DB2GENERAL", // DB2 //$NON-NLS-1$
"DB2GNRL", // DB2 //$NON-NLS-1$
"DB2SQL", // DB2 //$NON-NLS-1$
"DBCC", //$NON-NLS-1$
"DBINFO", // DB2 //$NON-NLS-1$
"DEALLOCATE", //$NON-NLS-1$
"DEC", //$NON-NLS-1$
"DECIMAL", //$NON-NLS-1$
"DECLARE", // DB2 //$NON-NLS-1$
"DEFAULT", // DB2 //$NON-NLS-1$
"DEFAULTS", // DB2 //$NON-NLS-1$
"DEFERRABLE", //$NON-NLS-1$
"DEFERRED", //$NON-NLS-1$
"DEFINED", //$NON-NLS-1$
"DEFINER", //$NON-NLS-1$
"DEFINITION", // DB2 //$NON-NLS-1$
"DEGREE", //$NON-NLS-1$
"DELAY_KEY_WRITE", //$NON-NLS-1$
"DELAYED", //$NON-NLS-1$
"DELETE", // DB2 //$NON-NLS-1$
"DELIMITER", //$NON-NLS-1$
"DELIMITERS", //$NON-NLS-1$
"DENSE_RANK", //$NON-NLS-1$
"DENY", //$NON-NLS-1$
"DEPTH", //$NON-NLS-1$
"DEREF", //$NON-NLS-1$
"DERIVED", //$NON-NLS-1$
"DESC", //$NON-NLS-1$
"DESCRIBE", //$NON-NLS-1$
"DESCRIPTOR", // DB2 //$NON-NLS-1$
"DESTROY", //$NON-NLS-1$
"DESTRUCTOR", //$NON-NLS-1$
"DETERMINISTIC", // DB2 //$NON-NLS-1$
"DIAGNOSTICS", //$NON-NLS-1$
"DICTIONARY", //$NON-NLS-1$
"DISABLE", //$NON-NLS-1$
"DISALLOW", // DB2 //$NON-NLS-1$
"DISCONNECT", // DB2 //$NON-NLS-1$
"DISK", //$NON-NLS-1$
"DISPATCH", //$NON-NLS-1$
"DISTINCT", // DB2 //$NON-NLS-1$
"DISTINCTROW", //$NON-NLS-1$
"DISTRIBUTED", //$NON-NLS-1$
"DIV", //$NON-NLS-1$
"DO", // DB2 //$NON-NLS-1$
"DOMAIN", //$NON-NLS-1$
"DOUBLE", // DB2 //$NON-NLS-1$
"DROP", // DB2 //$NON-NLS-1$
"DSNHATTR", // DB2 //$NON-NLS-1$
"DSSIZE", // DB2 //$NON-NLS-1$
"DUAL", //$NON-NLS-1$
"DUMMY", //$NON-NLS-1$
"DUMP", //$NON-NLS-1$
"DYNAMIC", // DB2 //$NON-NLS-1$
"DYNAMIC_FUNCTION", //$NON-NLS-1$
"DYNAMIC_FUNCTION_CODE", //$NON-NLS-1$
"EACH", // DB2 //$NON-NLS-1$
"EDITPROC", // DB2 //$NON-NLS-1$
"ELEMENT", //$NON-NLS-1$
"ELSE", // DB2 //$NON-NLS-1$
"ELSEIF", // DB2 //$NON-NLS-1$
"ENABLE", //$NON-NLS-1$
"ENCLOSED", //$NON-NLS-1$
"ENCODING", // DB2 //$NON-NLS-1$
"ENCRYPTED", //$NON-NLS-1$
"END", // DB2 //$NON-NLS-1$
"END-EXEC", // DB2 //$NON-NLS-1$
"END-EXEC1", // DB2 //$NON-NLS-1$
"ENUM", //$NON-NLS-1$
"EQUALS", //$NON-NLS-1$
"ERASE", // DB2 //$NON-NLS-1$
"ERRLVL", //$NON-NLS-1$
"ESCAPE", // DB2 //$NON-NLS-1$
"ESCAPED", //$NON-NLS-1$
"EVERY", //$NON-NLS-1$
"EXCEPT", // DB2 //$NON-NLS-1$
"EXCEPTION", // DB2 //$NON-NLS-1$
"EXCLUDE", //$NON-NLS-1$
"EXCLUDING", // DB2 //$NON-NLS-1$
"EXCLUSIVE", //$NON-NLS-1$
"EXEC", //$NON-NLS-1$
"EXECUTE", // DB2 //$NON-NLS-1$
"EXISTING", //$NON-NLS-1$
"EXISTS", // DB2 //$NON-NLS-1$
"EXIT", // DB2 //$NON-NLS-1$
"EXP", //$NON-NLS-1$
"EXPLAIN", //$NON-NLS-1$
"EXTERNAL", // DB2 //$NON-NLS-1$
"EXTRACT", //$NON-NLS-1$
"FALSE", //$NON-NLS-1$
"FENCED", // DB2 //$NON-NLS-1$
"FETCH", // DB2 //$NON-NLS-1$
"FIELDPROC", // DB2 //$NON-NLS-1$
"FIELDS", //$NON-NLS-1$
"FILE", // DB2 //$NON-NLS-1$
"FILLFACTOR", //$NON-NLS-1$
"FILTER", //$NON-NLS-1$
"FINAL", // DB2 //$NON-NLS-1$
"FIRST", //$NON-NLS-1$
"FLOAT", //$NON-NLS-1$
"FLOAT4", //$NON-NLS-1$
"FLOAT8", //$NON-NLS-1$
"FLOOR", //$NON-NLS-1$
"FLUSH", //$NON-NLS-1$
"FOLLOWING", //$NON-NLS-1$
"FOR", // DB2 //$NON-NLS-1$
"FORCE", //$NON-NLS-1$
"FOREIGN", // DB2 //$NON-NLS-1$
"FORTRAN", //$NON-NLS-1$
"FORWARD", //$NON-NLS-1$
"FOUND", //$NON-NLS-1$
"FREE", // DB2 //$NON-NLS-1$
"FREETEXT", //$NON-NLS-1$
"FREETEXTTABLE", //$NON-NLS-1$
"FREEZE", //$NON-NLS-1$
"FROM", // DB2 //$NON-NLS-1$
"FULL", // DB2 //$NON-NLS-1$
"FULLTEXT", //$NON-NLS-1$
"FUNCTION", // DB2 //$NON-NLS-1$
"FUSION", //$NON-NLS-1$
"G", //$NON-NLS-1$
"GENERAL", // DB2 //$NON-NLS-1$
"GENERATED", // DB2 //$NON-NLS-1$
"GET", // DB2 //$NON-NLS-1$
"GLOBAL", // DB2 //$NON-NLS-1$
"GO", // DB2 //$NON-NLS-1$
"GOTO", // DB2 //$NON-NLS-1$
"GRANT", // DB2 //$NON-NLS-1$
"GRANTED", //$NON-NLS-1$
"GRANTS", //$NON-NLS-1$
"GRAPHIC", // DB2 //$NON-NLS-1$
"GREATEST", //$NON-NLS-1$
"GROUP", // DB2 //$NON-NLS-1$
"GROUPING", //$NON-NLS-1$
"HANDLER", // DB2 //$NON-NLS-1$
"HAVING", // DB2 //$NON-NLS-1$
"HEADER", //$NON-NLS-1$
"HEAP", //$NON-NLS-1$
"HIERARCHY", //$NON-NLS-1$
"HIGH_PRIORITY", //$NON-NLS-1$
"HOLD", // DB2 //$NON-NLS-1$
"HOLDLOCK", //$NON-NLS-1$
"HOST", //$NON-NLS-1$
"HOSTS", //$NON-NLS-1$
"HOUR", // DB2 //$NON-NLS-1$
"HOUR_MICROSECOND", //$NON-NLS-1$
"HOUR_MINUTE", //$NON-NLS-1$
"HOUR_SECOND", //$NON-NLS-1$
"HOURS", // DB2 //$NON-NLS-1$
"IDENTIFIED", //$NON-NLS-1$
"IDENTITY", // DB2 //$NON-NLS-1$
"IDENTITY_INSERT", //$NON-NLS-1$
"IDENTITYCOL", //$NON-NLS-1$
"IF", // DB2 //$NON-NLS-1$
"IGNORE", //$NON-NLS-1$
"ILIKE", //$NON-NLS-1$
"IMMEDIATE", // DB2 //$NON-NLS-1$
"IMMUTABLE", //$NON-NLS-1$
"IMPLEMENTATION", //$NON-NLS-1$
"IMPLICIT", //$NON-NLS-1$
"IN", // DB2 //$NON-NLS-1$
"INCLUDE", //$NON-NLS-1$
"INCLUDING", // DB2 //$NON-NLS-1$
"INCREMENT", // DB2 //$NON-NLS-1$
"INDEX", // DB2 //$NON-NLS-1$
"INDICATOR", // DB2 //$NON-NLS-1$
"INFILE", //$NON-NLS-1$
"INFIX", //$NON-NLS-1$
"INHERIT", // DB2 //$NON-NLS-1$
"INHERITS", //$NON-NLS-1$
"INITIAL", //$NON-NLS-1$
"INITIALIZE", //$NON-NLS-1$
"INITIALLY", //$NON-NLS-1$
"INNER", // DB2 //$NON-NLS-1$
"INOUT", // DB2 //$NON-NLS-1$
"INPUT", //$NON-NLS-1$
"INSENSITIVE", // DB2 //$NON-NLS-1$
"INSERT", // DB2 //$NON-NLS-1$
"INSERT_ID", //$NON-NLS-1$
"INSTANCE", //$NON-NLS-1$
"INSTANTIABLE", //$NON-NLS-1$
"INSTEAD", //$NON-NLS-1$
"INT", //$NON-NLS-1$
"INT1", //$NON-NLS-1$
"INT2", //$NON-NLS-1$
"INT3", //$NON-NLS-1$
"INT4", //$NON-NLS-1$
"INT8", //$NON-NLS-1$
"INTEGER", //$NON-NLS-1$
"INTEGRITY", // DB2 //$NON-NLS-1$
"INTERSECT", //$NON-NLS-1$
"INTERSECTION", //$NON-NLS-1$
"INTERVAL", //$NON-NLS-1$
"INTO", // DB2 //$NON-NLS-1$
"INVOKER", //$NON-NLS-1$
"IS", // DB2 //$NON-NLS-1$
"ISAM", //$NON-NLS-1$
"ISNULL", //$NON-NLS-1$
"ISOBID", // DB2 //$NON-NLS-1$
"ISOLATION", // DB2 //$NON-NLS-1$
"ITERATE", // DB2 //$NON-NLS-1$
"JAR", // DB2 //$NON-NLS-1$
"JAVA", // DB2 //$NON-NLS-1$
"JOIN", // DB2 //$NON-NLS-1$
"K", //$NON-NLS-1$
"KEY", // DB2 //$NON-NLS-1$
"KEY_MEMBER", //$NON-NLS-1$
"KEY_TYPE", //$NON-NLS-1$
"KEYS", //$NON-NLS-1$
"KILL", //$NON-NLS-1$
"LABEL", // DB2 //$NON-NLS-1$
"LANCOMPILER", //$NON-NLS-1$
"LANGUAGE", // DB2 //$NON-NLS-1$
"LARGE", //$NON-NLS-1$
"LAST", //$NON-NLS-1$
"LAST_INSERT_ID", //$NON-NLS-1$
"LATERAL", //$NON-NLS-1$
"LC_CTYPE", // DB2 //$NON-NLS-1$
"LEADING", //$NON-NLS-1$
"LEAST", //$NON-NLS-1$
"LEAVE", // DB2 //$NON-NLS-1$
"LEFT", // DB2 //$NON-NLS-1$
"LENGTH", //$NON-NLS-1$
"LESS", //$NON-NLS-1$
"LEVEL", //$NON-NLS-1$
"LIKE", // DB2 //$NON-NLS-1$
"LIMIT", //$NON-NLS-1$
"LINENO", //$NON-NLS-1$
"LINES", //$NON-NLS-1$
"LINKTYPE", // DB2 //$NON-NLS-1$
"LISTEN", //$NON-NLS-1$
"LN", //$NON-NLS-1$
"LOAD", //$NON-NLS-1$
"LOCAL", // DB2 //$NON-NLS-1$
"LOCALE", // DB2 //$NON-NLS-1$
"LOCALTIME", //$NON-NLS-1$
"LOCALTIMESTAMP", //$NON-NLS-1$
"LOCATION", //$NON-NLS-1$
"LOCATOR", // DB2 //$NON-NLS-1$
"LOCATORS", // DB2 //$NON-NLS-1$
"LOCK", // DB2 //$NON-NLS-1$
"LOCKMAX", // DB2 //$NON-NLS-1$
"LOCKSIZE", // DB2 //$NON-NLS-1$
"LOGIN", //$NON-NLS-1$
"LOGS", //$NON-NLS-1$
"LONG", // DB2 //$NON-NLS-1$
"LONGBLOB", //$NON-NLS-1$
"LONGTEXT", //$NON-NLS-1$
"LOOP", // DB2 //$NON-NLS-1$
"LOW_PRIORITY", //$NON-NLS-1$
"LOWER", //$NON-NLS-1$
"M", //$NON-NLS-1$
"MAP", //$NON-NLS-1$
"MATCH", //$NON-NLS-1$
"MATCHED", //$NON-NLS-1$
"MAX", //$NON-NLS-1$
"MAX_ROWS", //$NON-NLS-1$
"MAXEXTENTS", //$NON-NLS-1$
"MAXVALUE", // DB2 //$NON-NLS-1$
"MEDIUMBLOB", //$NON-NLS-1$
"MEDIUMINT", //$NON-NLS-1$
"MEDIUMTEXT", //$NON-NLS-1$
"MEMBER", //$NON-NLS-1$
"MERGE", //$NON-NLS-1$
"MESSAGE_LENGTH", //$NON-NLS-1$
"MESSAGE_OCTET_LENGTH", //$NON-NLS-1$
"MESSAGE_TEXT", //$NON-NLS-1$
"METHOD", //$NON-NLS-1$
"MICROSECOND", // DB2 //$NON-NLS-1$
"MICROSECONDS", // DB2 //$NON-NLS-1$
"MIDDLEINT", //$NON-NLS-1$
"MIN", //$NON-NLS-1$
"MIN_ROWS", //$NON-NLS-1$
"MINUS", //$NON-NLS-1$
"MINUTE", // DB2 //$NON-NLS-1$
"MINUTE_MICROSECOND", //$NON-NLS-1$
"MINUTE_SECOND", //$NON-NLS-1$
"MINUTES", // DB2 //$NON-NLS-1$
"MINVALUE", // DB2 //$NON-NLS-1$
"MLSLABEL", //$NON-NLS-1$
"MOD", //$NON-NLS-1$
"MODE", // DB2 //$NON-NLS-1$
"MODIFIES", // DB2 //$NON-NLS-1$
"MODIFY", //$NON-NLS-1$
"MODULE", //$NON-NLS-1$
"MONTH", // DB2 //$NON-NLS-1$
"MONTHNAME", //$NON-NLS-1$
"MONTHS", // DB2 //$NON-NLS-1$
"MORE", //$NON-NLS-1$
"MOVE", //$NON-NLS-1$
"MULTISET", //$NON-NLS-1$
"MUMPS", //$NON-NLS-1$
"MYISAM", //$NON-NLS-1$
"NAME", //$NON-NLS-1$
"NAMES", //$NON-NLS-1$
"NATIONAL", //$NON-NLS-1$
"NATURAL", //$NON-NLS-1$
"NCHAR", //$NON-NLS-1$
"NCLOB", //$NON-NLS-1$
"NESTING", //$NON-NLS-1$
"NEW", // DB2 //$NON-NLS-1$
"NEW_TABLE", // DB2 //$NON-NLS-1$
"NEXT", //$NON-NLS-1$
"NO", // DB2 //$NON-NLS-1$
"NO_WRITE_TO_BINLOG", //$NON-NLS-1$
"NOAUDIT", //$NON-NLS-1$
"NOCACHE", // DB2 //$NON-NLS-1$
"NOCHECK", //$NON-NLS-1$
"NOCOMPRESS", //$NON-NLS-1$
"NOCREATEDB", //$NON-NLS-1$
"NOCREATEROLE", //$NON-NLS-1$
"NOCREATEUSER", //$NON-NLS-1$
"NOCYCLE", // DB2 //$NON-NLS-1$
"NODENAME", // DB2 //$NON-NLS-1$
"NODENUMBER", // DB2 //$NON-NLS-1$
"NOINHERIT", //$NON-NLS-1$
"NOLOGIN", //$NON-NLS-1$
"NOMAXVALUE", // DB2 //$NON-NLS-1$
"NOMINVALUE", // DB2 //$NON-NLS-1$
"NONCLUSTERED", //$NON-NLS-1$
"NONE", //$NON-NLS-1$
"NOORDER", // DB2 //$NON-NLS-1$
"NORMALIZE", //$NON-NLS-1$
"NORMALIZED", //$NON-NLS-1$
"NOSUPERUSER", //$NON-NLS-1$
"NOT", // DB2 //$NON-NLS-1$
"NOTHING", //$NON-NLS-1$
"NOTIFY", //$NON-NLS-1$
"NOTNULL", //$NON-NLS-1$
"NOWAIT", //$NON-NLS-1$
"NULL", // DB2 //$NON-NLS-1$
"NULLABLE", //$NON-NLS-1$
"NULLIF", //$NON-NLS-1$
"NULLS", // DB2 //$NON-NLS-1$
"NUMBER", //$NON-NLS-1$
"NUMERIC", //$NON-NLS-1$
"NUMPARTS", // DB2 //$NON-NLS-1$
"OBID", // DB2 //$NON-NLS-1$
"OBJECT", //$NON-NLS-1$
"OCTET_LENGTH", //$NON-NLS-1$
"OCTETS", //$NON-NLS-1$
"OF", // DB2 //$NON-NLS-1$
"OFF", //$NON-NLS-1$
"OFFLINE", //$NON-NLS-1$
"OFFSET", //$NON-NLS-1$
"OFFSETS", //$NON-NLS-1$
"OIDS", //$NON-NLS-1$
"OLD", // DB2 //$NON-NLS-1$
"OLD_TABLE", // DB2 //$NON-NLS-1$
"ON", // DB2 //$NON-NLS-1$
"ONLINE", //$NON-NLS-1$
"ONLY", //$NON-NLS-1$
"OPEN", // DB2 //$NON-NLS-1$
"OPENDATASOURCE", //$NON-NLS-1$
"OPENQUERY", //$NON-NLS-1$
"OPENROWSET", //$NON-NLS-1$
"OPENXML", //$NON-NLS-1$
"OPERATION", //$NON-NLS-1$
"OPERATOR", //$NON-NLS-1$
"OPTIMIZATION", // DB2 //$NON-NLS-1$
"OPTIMIZE", // DB2 //$NON-NLS-1$
"OPTION", // DB2 //$NON-NLS-1$
"OPTIONALLY", //$NON-NLS-1$
"OPTIONS", //$NON-NLS-1$
"OR", // DB2 //$NON-NLS-1$
"ORDER", // DB2 //$NON-NLS-1$
"ORDERING", //$NON-NLS-1$
"ORDINALITY", //$NON-NLS-1$
"OTHERS", //$NON-NLS-1$
"OUT", // DB2 //$NON-NLS-1$
"OUTER", // DB2 //$NON-NLS-1$
"OUTFILE", //$NON-NLS-1$
"OUTPUT", //$NON-NLS-1$
"OVER", //$NON-NLS-1$
"OVERLAPS", //$NON-NLS-1$
"OVERLAY", //$NON-NLS-1$
"OVERRIDING", // DB2 //$NON-NLS-1$
"OWNER", //$NON-NLS-1$
"PACK_KEYS", //$NON-NLS-1$
"PACKAGE", // DB2 //$NON-NLS-1$
"PAD", //$NON-NLS-1$
"PARAMETER", // DB2 //$NON-NLS-1$
"PARAMETER_MODE", //$NON-NLS-1$
"PARAMETER_NAME", //$NON-NLS-1$
"PARAMETER_ORDINAL_POSITION", //$NON-NLS-1$
"PARAMETER_SPECIFIC_CATALOG", //$NON-NLS-1$
"PARAMETER_SPECIFIC_NAME", //$NON-NLS-1$
"PARAMETER_SPECIFIC_SCHEMA", //$NON-NLS-1$
"PARAMETERS", //$NON-NLS-1$
"PART", // DB2 //$NON-NLS-1$
"PARTIAL", //$NON-NLS-1$
"PARTITION", // DB2 //$NON-NLS-1$
"PASCAL", //$NON-NLS-1$
"PASSWORD", //$NON-NLS-1$
"PATH", // DB2 //$NON-NLS-1$
"PCTFREE", //$NON-NLS-1$
"PERCENT", //$NON-NLS-1$
"PERCENT_RANK", //$NON-NLS-1$
"PERCENTILE_CONT", //$NON-NLS-1$
"PERCENTILE_DISC", //$NON-NLS-1$
"PIECESIZE", // DB2 //$NON-NLS-1$
"PLACING", //$NON-NLS-1$
"PLAN", // DB2 //$NON-NLS-1$
"PLI", //$NON-NLS-1$
"POSITION", // DB2 //$NON-NLS-1$
"POSTFIX", //$NON-NLS-1$
"POWER", //$NON-NLS-1$
"PRECEDING", //$NON-NLS-1$
"PRECISION", // DB2 //$NON-NLS-1$
"PREFIX", //$NON-NLS-1$
"PREORDER", //$NON-NLS-1$
"PREPARE", // DB2 //$NON-NLS-1$
"PREPARED", //$NON-NLS-1$
"PRESERVE", //$NON-NLS-1$
"PRIMARY", // DB2 //$NON-NLS-1$
"PRINT", //$NON-NLS-1$
"PRIOR", //$NON-NLS-1$
"PRIQTY", // DB2 //$NON-NLS-1$
"PRIVILEGES", // DB2 //$NON-NLS-1$
"PROC", //$NON-NLS-1$
"PROCEDURAL", //$NON-NLS-1$
"PROCEDURE", // DB2 //$NON-NLS-1$
"PROCESS", //$NON-NLS-1$
"PROCESSLIST", //$NON-NLS-1$
"PROGRAM", // DB2 //$NON-NLS-1$
"PSID", // DB2 //$NON-NLS-1$
"PUBLIC", //$NON-NLS-1$
"PURGE", //$NON-NLS-1$
"QUERYNO", // DB2 //$NON-NLS-1$
"QUOTE", //$NON-NLS-1$
"RAID0", //$NON-NLS-1$
"RAISERROR", //$NON-NLS-1$
"RANGE", //$NON-NLS-1$
"RANK", //$NON-NLS-1$
"RAW", //$NON-NLS-1$
"READ", // DB2 //$NON-NLS-1$
"READS", // DB2 //$NON-NLS-1$
"READTEXT", //$NON-NLS-1$
"REAL", //$NON-NLS-1$
"RECHECK", //$NON-NLS-1$
"RECONFIGURE", //$NON-NLS-1$
"RECOVERY", // DB2 //$NON-NLS-1$
"RECURSIVE", //$NON-NLS-1$
"REF", //$NON-NLS-1$
"REFERENCES", // DB2 //$NON-NLS-1$
"REFERENCING", // DB2 //$NON-NLS-1$
"REGEXP", //$NON-NLS-1$
"REGR_AVGX", //$NON-NLS-1$
"REGR_AVGY", //$NON-NLS-1$
"REGR_COUNT", //$NON-NLS-1$
"REGR_INTERCEPT", //$NON-NLS-1$
"REGR_R2", //$NON-NLS-1$
"REGR_SLOPE", //$NON-NLS-1$
"REGR_SXX", //$NON-NLS-1$
"REGR_SXY", //$NON-NLS-1$
"REGR_SYY", //$NON-NLS-1$
"REINDEX", //$NON-NLS-1$
"RELATIVE", //$NON-NLS-1$
"RELEASE", // DB2 //$NON-NLS-1$
"RELOAD", //$NON-NLS-1$
"RENAME", // DB2 //$NON-NLS-1$
"REPEAT", // DB2 //$NON-NLS-1$
"REPEATABLE", //$NON-NLS-1$
"REPLACE", //$NON-NLS-1$
"REPLICATION", //$NON-NLS-1$
"REQUIRE", //$NON-NLS-1$
"RESET", // DB2 //$NON-NLS-1$
"RESIGNAL", // DB2 //$NON-NLS-1$
"RESOURCE", //$NON-NLS-1$
"RESTART", // DB2 //$NON-NLS-1$
"RESTORE", //$NON-NLS-1$
"RESTRICT", // DB2 //$NON-NLS-1$
"RESULT", // DB2 //$NON-NLS-1$
"RESULT_SET_LOCATOR", // DB2 //$NON-NLS-1$
"RETURN", // DB2 //$NON-NLS-1$
"RETURNED_CARDINALITY", //$NON-NLS-1$
"RETURNED_LENGTH", //$NON-NLS-1$
"RETURNED_OCTET_LENGTH", //$NON-NLS-1$
"RETURNED_SQLSTATE", //$NON-NLS-1$
"RETURNS", // DB2 //$NON-NLS-1$
"REVOKE", // DB2 //$NON-NLS-1$
"RIGHT", // DB2 //$NON-NLS-1$
"RLIKE", //$NON-NLS-1$
"ROLE", //$NON-NLS-1$
"ROLLBACK", // DB2 //$NON-NLS-1$
"ROLLUP", //$NON-NLS-1$
"ROUTINE", // DB2 //$NON-NLS-1$
"ROUTINE_CATALOG", //$NON-NLS-1$
"ROUTINE_NAME", //$NON-NLS-1$
"ROUTINE_SCHEMA", //$NON-NLS-1$
"ROW", // DB2 //$NON-NLS-1$
"ROW_COUNT", //$NON-NLS-1$
"ROW_NUMBER", //$NON-NLS-1$
"ROWCOUNT", //$NON-NLS-1$
"ROWGUIDCOL", //$NON-NLS-1$
"ROWID", //$NON-NLS-1$
"ROWNUM", //$NON-NLS-1$
"ROWS", // DB2 //$NON-NLS-1$
"RRN", // DB2 //$NON-NLS-1$
"RULE", //$NON-NLS-1$
"RUN", // DB2 //$NON-NLS-1$
"SAVE", //$NON-NLS-1$
"SAVEPOINT", // DB2 //$NON-NLS-1$
"SCALE", //$NON-NLS-1$
"SCHEMA", // DB2 //$NON-NLS-1$
"SCHEMA_NAME", //$NON-NLS-1$
"SCHEMAS", //$NON-NLS-1$
"SCOPE", //$NON-NLS-1$
"SCOPE_CATALOG", //$NON-NLS-1$
"SCOPE_NAME", //$NON-NLS-1$
"SCOPE_SCHEMA", //$NON-NLS-1$
"SCRATCHPAD", // DB2 //$NON-NLS-1$
"SCROLL", //$NON-NLS-1$
"SEARCH", //$NON-NLS-1$
"SECOND", // DB2 //$NON-NLS-1$
"SECOND_MICROSECOND", //$NON-NLS-1$
"SECONDS", // DB2 //$NON-NLS-1$
"SECQTY", // DB2 //$NON-NLS-1$
"SECTION", //$NON-NLS-1$
"SECURITY", // DB2 //$NON-NLS-1$
"SELECT", // DB2 //$NON-NLS-1$
"SELF", //$NON-NLS-1$
"SENSITIVE", // DB2 //$NON-NLS-1$
"SEPARATOR", //$NON-NLS-1$
"SEQUENCE", //$NON-NLS-1$
"SERIALIZABLE", //$NON-NLS-1$
"SERVER_NAME", //$NON-NLS-1$
"SESSION", //$NON-NLS-1$
"SESSION_USER", //$NON-NLS-1$
"SET", // DB2 //$NON-NLS-1$
"SETOF", //$NON-NLS-1$
"SETS", //$NON-NLS-1$
"SETUSER", //$NON-NLS-1$
"SHARE", //$NON-NLS-1$
"SHOW", //$NON-NLS-1$
"SHUTDOWN", //$NON-NLS-1$
"SIGNAL", // DB2 //$NON-NLS-1$
"SIMILAR", //$NON-NLS-1$
"SIMPLE", // DB2 //$NON-NLS-1$
"SIZE", //$NON-NLS-1$
"SMALLINT", //$NON-NLS-1$
"SOME", // DB2 //$NON-NLS-1$
"SONAME", //$NON-NLS-1$
"SOURCE", // DB2 //$NON-NLS-1$
"SPACE", //$NON-NLS-1$
"SPATIAL", //$NON-NLS-1$
"SPECIFIC", // DB2 //$NON-NLS-1$
"SPECIFIC_NAME", //$NON-NLS-1$
"SPECIFICTYPE", //$NON-NLS-1$
"SQL", // DB2 //$NON-NLS-1$
"SQL_BIG_RESULT", //$NON-NLS-1$
"SQL_BIG_SELECTS", //$NON-NLS-1$
"SQL_BIG_TABLES", //$NON-NLS-1$
"SQL_CALC_FOUND_ROWS", //$NON-NLS-1$
"SQL_LOG_OFF", //$NON-NLS-1$
"SQL_LOG_UPDATE", //$NON-NLS-1$
"SQL_LOW_PRIORITY_UPDATES", //$NON-NLS-1$
"SQL_SELECT_LIMIT", //$NON-NLS-1$
"SQL_SMALL_RESULT", //$NON-NLS-1$
"SQL_WARNINGS", //$NON-NLS-1$
"SQLCA", //$NON-NLS-1$
"SQLCODE", //$NON-NLS-1$
"SQLERROR", //$NON-NLS-1$
"SQLEXCEPTION", //$NON-NLS-1$
"SQLID", // DB2 //$NON-NLS-1$
"SQLSTATE", //$NON-NLS-1$
"SQLWARNING", //$NON-NLS-1$
"SQRT", //$NON-NLS-1$
"SSL", //$NON-NLS-1$
"STABLE", //$NON-NLS-1$
"STANDARD", // DB2 //$NON-NLS-1$
"START", // DB2 //$NON-NLS-1$
"STARTING", //$NON-NLS-1$
"STATE", //$NON-NLS-1$
"STATEMENT", //$NON-NLS-1$
"STATIC", // DB2 //$NON-NLS-1$
"STATISTICS", //$NON-NLS-1$
"STATUS", //$NON-NLS-1$
"STAY", // DB2 //$NON-NLS-1$
"STDDEV_POP", //$NON-NLS-1$
"STDDEV_SAMP", //$NON-NLS-1$
"STDIN", //$NON-NLS-1$
"STDOUT", //$NON-NLS-1$
"STOGROUP", // DB2 //$NON-NLS-1$
"STORAGE", //$NON-NLS-1$
"STORES", // DB2 //$NON-NLS-1$
"STRAIGHT_JOIN", //$NON-NLS-1$
"STRICT", //$NON-NLS-1$
"STRING", //$NON-NLS-1$
"STRUCTURE", //$NON-NLS-1$
"STYLE", // DB2 //$NON-NLS-1$
"SUBCLASS_ORIGIN", //$NON-NLS-1$
"SUBLIST", //$NON-NLS-1$
"SUBMULTISET", //$NON-NLS-1$
"SUBPAGES", // DB2 //$NON-NLS-1$
"SUBSTRING", // DB2 //$NON-NLS-1$
"SUCCESSFUL", //$NON-NLS-1$
"SUM", //$NON-NLS-1$
"SUPERUSER", //$NON-NLS-1$
"SYMMETRIC", //$NON-NLS-1$
"SYNONYM", // DB2 //$NON-NLS-1$
"SYSDATE", //$NON-NLS-1$
"SYSFUN", // DB2 //$NON-NLS-1$
"SYSIBM", // DB2 //$NON-NLS-1$
"SYSID", //$NON-NLS-1$
"SYSPROC", // DB2 //$NON-NLS-1$
"SYSTEM", // DB2 //$NON-NLS-1$
"SYSTEM_USER", //$NON-NLS-1$
"TABLE", // DB2 //$NON-NLS-1$
"TABLE_NAME", //$NON-NLS-1$
"TABLES", //$NON-NLS-1$
"TABLESAMPLE", //$NON-NLS-1$
"TABLESPACE", // DB2 //$NON-NLS-1$
"TEMP", //$NON-NLS-1$
"TEMPLATE", //$NON-NLS-1$
"TEMPORARY", //$NON-NLS-1$
"TERMINATE", //$NON-NLS-1$
"TERMINATED", //$NON-NLS-1$
"TEXT", //$NON-NLS-1$
"TEXTSIZE", //$NON-NLS-1$
"THAN", //$NON-NLS-1$
"THEN", // DB2 //$NON-NLS-1$
"TIES", //$NON-NLS-1$
"TIME", //$NON-NLS-1$
"TIMESTAMP", //$NON-NLS-1$
"TIMEZONE_HOUR", //$NON-NLS-1$
"TIMEZONE_MINUTE", //$NON-NLS-1$
"TINYBLOB", //$NON-NLS-1$
"TINYINT", //$NON-NLS-1$
"TINYTEXT", //$NON-NLS-1$
"TO", // DB2 //$NON-NLS-1$
"TOAST", //$NON-NLS-1$
"TOP", //$NON-NLS-1$
"TOP_LEVEL_COUNT", //$NON-NLS-1$
"TRAILING", //$NON-NLS-1$
"TRAN", //$NON-NLS-1$
"TRANSACTION", // DB2 //$NON-NLS-1$
"TRANSACTION_ACTIVE", //$NON-NLS-1$
"TRANSACTIONS_COMMITTED", //$NON-NLS-1$
"TRANSACTIONS_ROLLED_BACK", //$NON-NLS-1$
"TRANSFORM", //$NON-NLS-1$
"TRANSFORMS", //$NON-NLS-1$
"TRANSLATE", //$NON-NLS-1$
"TRANSLATION", //$NON-NLS-1$
"TREAT", //$NON-NLS-1$
"TRIGGER", // DB2 //$NON-NLS-1$
"TRIGGER_CATALOG", //$NON-NLS-1$
"TRIGGER_NAME", //$NON-NLS-1$
"TRIGGER_SCHEMA", //$NON-NLS-1$
"TRIM", // DB2 //$NON-NLS-1$
"TRUE", //$NON-NLS-1$
"TRUNCATE", //$NON-NLS-1$
"TRUSTED", //$NON-NLS-1$
"TSEQUAL", //$NON-NLS-1$
"TYPE", // DB2 //$NON-NLS-1$
"UESCAPE", //$NON-NLS-1$
"UID", //$NON-NLS-1$
"UNBOUNDED", //$NON-NLS-1$
"UNCOMMITTED", //$NON-NLS-1$
"UNDER", //$NON-NLS-1$
"UNDO", // DB2 //$NON-NLS-1$
"UNENCRYPTED", //$NON-NLS-1$
"UNION", // DB2 //$NON-NLS-1$
"UNIQUE", // DB2 //$NON-NLS-1$
"UNKNOWN", //$NON-NLS-1$
"UNLISTEN", //$NON-NLS-1$
"UNLOCK", //$NON-NLS-1$
"UNNAMED", //$NON-NLS-1$
"UNNEST", //$NON-NLS-1$
"UNSIGNED", //$NON-NLS-1$
"UNTIL", // DB2 //$NON-NLS-1$
"UPDATE", // DB2 //$NON-NLS-1$
"UPDATETEXT", //$NON-NLS-1$
"UPPER", //$NON-NLS-1$
"USAGE", // DB2 //$NON-NLS-1$
"USE", //$NON-NLS-1$
"USER", // DB2 //$NON-NLS-1$
"USER_DEFINED_TYPE_CATALOG", //$NON-NLS-1$
"USER_DEFINED_TYPE_CODE", //$NON-NLS-1$
"USER_DEFINED_TYPE_NAME", //$NON-NLS-1$
"USER_DEFINED_TYPE_SCHEMA", //$NON-NLS-1$
"USING", // DB2 //$NON-NLS-1$
"UTC_DATE", //$NON-NLS-1$
"UTC_TIME", //$NON-NLS-1$
"UTC_TIMESTAMP", //$NON-NLS-1$
"VACUUM", //$NON-NLS-1$
"VALID", //$NON-NLS-1$
"VALIDATE", //$NON-NLS-1$
"VALIDATOR", //$NON-NLS-1$
"VALIDPROC", // DB2 //$NON-NLS-1$
"VALUE", //$NON-NLS-1$
"VALUES", // DB2 //$NON-NLS-1$
"VAR_POP", //$NON-NLS-1$
"VAR_SAMP", //$NON-NLS-1$
"VARBINARY", //$NON-NLS-1$
"VARCHAR", //$NON-NLS-1$
"VARCHAR2", //$NON-NLS-1$
"VARCHARACTER", //$NON-NLS-1$
"VARIABLE", // DB2 //$NON-NLS-1$
"VARIABLES", //$NON-NLS-1$
"VARIANT", // DB2 //$NON-NLS-1$
"VARYING", //$NON-NLS-1$
"VCAT", // DB2 //$NON-NLS-1$
"VERBOSE", //$NON-NLS-1$
"VIEW", // DB2 //$NON-NLS-1$
"VOLATILE", //$NON-NLS-1$
"VOLUMES", // DB2 //$NON-NLS-1$
"WAITFOR", //$NON-NLS-1$
"WHEN", // DB2 //$NON-NLS-1$
"WHENEVER", //$NON-NLS-1$
"WHERE", // DB2 //$NON-NLS-1$
"WHILE", // DB2 //$NON-NLS-1$
"WIDTH_BUCKET", //$NON-NLS-1$
"WINDOW", //$NON-NLS-1$
"WITH", // DB2 //$NON-NLS-1$
"WITHIN", //$NON-NLS-1$
"WITHOUT", //$NON-NLS-1$
"WLM", // DB2 //$NON-NLS-1$
"WORK", //$NON-NLS-1$
"WRITE", // DB2 //$NON-NLS-1$
"WRITETEXT", //$NON-NLS-1$
"X509", //$NON-NLS-1$
"XOR", //$NON-NLS-1$
"YEAR", // DB2 //$NON-NLS-1$
"YEAR_MONTH", //$NON-NLS-1$
"YEARS", // DB2 //$NON-NLS-1$
"ZEROFILL", //$NON-NLS-1$
"ZONE" //$NON-NLS-1$
};
RESERVED_WORDS = new HashSet<String>(words.length);
for (String word : words) {
RESERVED_WORDS.add(word);
}
}
public static boolean containsWord(String word) {
boolean rc;
if (word == null) {
rc = false;
} else {
rc = RESERVED_WORDS.contains(word.toUpperCase());
}
return rc;
}
/**
* Utility class - no instances allowed
*/
private SqlReservedWords() {
}
}
| |
package org.act.tstream.daemon.worker.metrics;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.util.Map.Entry;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.HashMap;
import java.util.Set;
import java.util.HashSet;
import org.act.tstream.callback.RunnableCallback;
import org.apache.log4j.Logger;
import org.act.tstream.client.ConfigExtension;
import org.act.tstream.cluster.StormBase;
import org.act.tstream.cluster.StormClusterState;
import org.act.tstream.cluster.StormMonitor;
import org.act.tstream.daemon.supervisor.Supervisor;
import org.act.tstream.daemon.supervisor.SupervisorInfo;
import org.act.tstream.daemon.worker.WorkerMetricInfo;
import org.act.tstream.metric.MetricDef;
import org.act.tstream.task.TaskMetricInfo;
import org.act.tstream.task.Assignment;
public class UploadSupervMetric extends RunnableCallback {
private static Logger LOG = Logger.getLogger(UploadSupervMetric.class);
private AtomicBoolean active;
private Integer result;
private int frequence;
private Map conf;
private String supervisorId;
private String hostName;
private StormClusterState cluster;
private MetricSendClient client;
List<Map<String, Object>> jsonMsgTasks = new ArrayList<Map<String, Object>>();
List<Map<String, Object>> jsonMsgWorkers = new ArrayList<Map<String, Object>>();
public UploadSupervMetric(Map conf, StormClusterState cluster, String supervisorId,
AtomicBoolean active, int frequence, MetricSendClient client) {
this.active = active;
this.frequence = frequence;
this.result = null;
this.conf = conf;
this.cluster = cluster;
this.supervisorId = supervisorId;
this.client = client;
try {
SupervisorInfo supervisorInfo = cluster.supervisor_info(supervisorId);
this.hostName = supervisorInfo.getHostName();
} catch (Exception e) {
LOG.error("Failed to get hostname for supervisorID=" + supervisorId);
}
}
@Override
public Object getResult() {
return result;
}
@Override
public void run() {
sendMetricsData();;
if (active.get()) {
this.result = frequence;
} else {
this.result = -1;
}
}
public void sendMetricsData() {
try {
List<String> topologys = cluster.active_storms();
for (String topologyId : topologys) {
StormMonitor monitor = null;
boolean metricPerf = true;
Assignment assignment = null;
try {
monitor = cluster.get_storm_monitor(topologyId);
if (monitor != null) metricPerf = monitor.getMetrics();
assignment = cluster.assignment_info(topologyId, null);
} catch (Exception e) {
LOG.error("Error when retrieving monitor status and assignment info "
+ "for " + topologyId, e);
continue;
}
if (assignment != null) {
Set<Integer> taskSet = new HashSet<Integer>();
Set<Integer> workerSet = new HashSet<Integer>();
//Retrieve task set
Set<Integer> tempTaskSet = assignment.getCurrentSuperviosrTasks(supervisorId);
taskSet.addAll(tempTaskSet);
//Retrieve worker set
Set<Integer> tempWorkerSet = assignment.getCurrentSuperviosrWorkers(supervisorId);
workerSet.addAll(tempWorkerSet);
//Build KV Map for AliMonitor
buildTaskJsonMsg(topologyId, taskSet, metricPerf);
buildWorkerJsonMsg(topologyId, workerSet, metricPerf);
}
}
if (jsonMsgTasks.size() != 0) {
if (client instanceof AlimonitorClient) {
((AlimonitorClient) client).setMonitorName(
ConfigExtension.getAlmonTaskMetricName(conf));
((AlimonitorClient) client).setCollectionFlag(0);
((AlimonitorClient) client).setErrorInfo("");
}
client.send(jsonMsgTasks);
}
if (jsonMsgWorkers.size() != 0) {
if (client instanceof AlimonitorClient) {
((AlimonitorClient) client).setMonitorName(
ConfigExtension.getAlmonWorkerMetricName(conf));
((AlimonitorClient) client).setCollectionFlag(0);
((AlimonitorClient) client).setErrorInfo("");
}
client.send(jsonMsgWorkers);
}
jsonMsgTasks.clear();
jsonMsgWorkers.clear();
} catch (Exception e) {
LOG.error("Failed to upload worker&task metrics data", e);
jsonMsgTasks.clear();
jsonMsgWorkers.clear();
}
}
public void buildTaskJsonMsg(String topologyId, Set<Integer> taskSet, boolean metricPerf) {
for (Integer taskId : taskSet) {
try {
TaskMetricInfo taskMetric = cluster.get_task_metric(topologyId, taskId);
if (taskMetric == null) continue;
// Task KV structure
Map<String, Object> taskKV = new HashMap<String, Object>();
taskKV.put("Topology_Name", topologyId);
taskKV.put("Task_Id", String.valueOf(taskId));
taskKV.put("Component", taskMetric.getComponent());
taskKV.putAll(taskMetric.getGaugeData());
taskKV.putAll(taskMetric.getCounterData());
taskKV.putAll(taskMetric.getMeterData());
if (metricPerf == true) {
taskKV.putAll(taskMetric.getTimerData());
taskKV.putAll(taskMetric.getHistogramData());
}
jsonMsgTasks.add(taskKV);
} catch (Exception e) {
LOG.error("Failed to buildTaskJsonMsg, taskID=" + taskId + ", e=" + e);
}
}
}
public void buildWorkerJsonMsg(String topologyId, Set<Integer> workerSet, boolean metricPerf) {
String workerId = null;
for (Integer port: workerSet) {
try {
workerId = hostName + ":" + port;
WorkerMetricInfo workerMetric = cluster.get_worker_metric(topologyId, workerId);
if (workerMetric == null) continue;
Map<String, Object> workerKV = new HashMap<String, Object>();
workerKV.put("Topology_Name", topologyId);
workerKV.put("Port", String.valueOf(port));
workerKV.put(MetricDef.MEMORY_USED, workerMetric.getUsedMem());
workerKV.put(MetricDef.CPU_USED_RATIO, workerMetric.getUsedCpu());
workerKV.putAll(workerMetric.getGaugeData());
workerKV.putAll(workerMetric.getCounterData());
workerKV.putAll(workerMetric.getMeterData());
if (metricPerf == true)
{
workerKV.putAll(workerMetric.getTimerData());
workerKV.putAll(workerMetric.getHistogramData());
}
jsonMsgWorkers.add(workerKV);
} catch (Exception e) {
LOG.error("Failed to buildWorkerJsonMsg, workerId=" + workerId + ", e=" + e);
}
}
}
public void clean() {
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.fs;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.io.PrintWriter;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.junit.After;
import org.junit.Assert;
import org.junit.Test;
public class TestFileUtil {
private static final Log LOG = LogFactory.getLog(TestFileUtil.class);
private static final String TEST_ROOT_DIR = System.getProperty(
"test.build.data", "/tmp") + "/fu";
private static final File TEST_DIR = new File(TEST_ROOT_DIR);
private static String FILE = "x";
private static String LINK = "y";
private static String DIR = "dir";
private File del = new File(TEST_DIR, "del");
private File tmp = new File(TEST_DIR, "tmp");
private File dir1 = new File(del, DIR + "1");
private File dir2 = new File(del, DIR + "2");
private File partitioned = new File(TEST_DIR, "partitioned");
/**
* Creates multiple directories for testing.
*
* Contents of them are
* dir:tmp:
* file: x
* dir:del:
* file: x
* dir: dir1 : file:x
* dir: dir2 : file:x
* link: y to tmp/x
* link: tmpDir to tmp
* dir:partitioned:
* file: part-r-00000, contents: "foo"
* file: part-r-00001, contents: "bar"
*/
private void setupDirs() throws IOException {
Assert.assertFalse(del.exists());
Assert.assertFalse(tmp.exists());
Assert.assertFalse(partitioned.exists());
del.mkdirs();
tmp.mkdirs();
partitioned.mkdirs();
new File(del, FILE).createNewFile();
File tmpFile = new File(tmp, FILE);
tmpFile.createNewFile();
// create directories
dir1.mkdirs();
dir2.mkdirs();
new File(dir1, FILE).createNewFile();
new File(dir2, FILE).createNewFile();
// create a symlink to file
File link = new File(del, LINK);
FileUtil.symLink(tmpFile.toString(), link.toString());
// create a symlink to dir
File linkDir = new File(del, "tmpDir");
FileUtil.symLink(tmp.toString(), linkDir.toString());
Assert.assertEquals(5, del.listFiles().length);
// create files in partitioned directories
createFile(partitioned, "part-r-00000", "foo");
createFile(partitioned, "part-r-00001", "bar");
}
/**
* Creates a new file in the specified directory, with the specified name and
* the specified file contents. This method will add a newline terminator to
* the end of the contents string in the destination file.
* @param directory File non-null destination directory.
* @param name String non-null file name.
* @param contents String non-null file contents.
* @throws IOException if an I/O error occurs.
*/
private void createFile(File directory, String name, String contents)
throws IOException {
File newFile = new File(directory, name);
PrintWriter pw = new PrintWriter(newFile);
try {
pw.println(contents);
}
finally {
pw.close();
}
}
@After
public void tearDown() throws IOException {
FileUtil.fullyDelete(del);
FileUtil.fullyDelete(tmp);
FileUtil.fullyDelete(partitioned);
}
@Test
public void testFullyDelete() throws IOException {
setupDirs();
boolean ret = FileUtil.fullyDelete(del);
Assert.assertTrue(ret);
Assert.assertFalse(del.exists());
validateTmpDir();
}
/**
* Tests if fullyDelete deletes
* (a) symlink to file only and not the file pointed to by symlink.
* (b) symlink to dir only and not the dir pointed to by symlink.
* @throws IOException
*/
@Test
public void testFullyDeleteSymlinks() throws IOException {
setupDirs();
File link = new File(del, LINK);
Assert.assertEquals(5, del.list().length);
// Since tmpDir is symlink to tmp, fullyDelete(tmpDir) should not
// delete contents of tmp. See setupDirs for details.
boolean ret = FileUtil.fullyDelete(link);
Assert.assertTrue(ret);
Assert.assertFalse(link.exists());
Assert.assertEquals(4, del.list().length);
validateTmpDir();
File linkDir = new File(del, "tmpDir");
// Since tmpDir is symlink to tmp, fullyDelete(tmpDir) should not
// delete contents of tmp. See setupDirs for details.
ret = FileUtil.fullyDelete(linkDir);
Assert.assertTrue(ret);
Assert.assertFalse(linkDir.exists());
Assert.assertEquals(3, del.list().length);
validateTmpDir();
}
/**
* Tests if fullyDelete deletes
* (a) dangling symlink to file properly
* (b) dangling symlink to directory properly
* @throws IOException
*/
@Test
public void testFullyDeleteDanglingSymlinks() throws IOException {
setupDirs();
// delete the directory tmp to make tmpDir a dangling link to dir tmp and
// to make y as a dangling link to file tmp/x
boolean ret = FileUtil.fullyDelete(tmp);
Assert.assertTrue(ret);
Assert.assertFalse(tmp.exists());
// dangling symlink to file
File link = new File(del, LINK);
Assert.assertEquals(5, del.list().length);
// Even though 'y' is dangling symlink to file tmp/x, fullyDelete(y)
// should delete 'y' properly.
ret = FileUtil.fullyDelete(link);
Assert.assertTrue(ret);
Assert.assertEquals(4, del.list().length);
// dangling symlink to directory
File linkDir = new File(del, "tmpDir");
// Even though tmpDir is dangling symlink to tmp, fullyDelete(tmpDir) should
// delete tmpDir properly.
ret = FileUtil.fullyDelete(linkDir);
Assert.assertTrue(ret);
Assert.assertEquals(3, del.list().length);
}
@Test
public void testFullyDeleteContents() throws IOException {
setupDirs();
boolean ret = FileUtil.fullyDeleteContents(del);
Assert.assertTrue(ret);
Assert.assertTrue(del.exists());
Assert.assertEquals(0, del.listFiles().length);
validateTmpDir();
}
private void validateTmpDir() {
Assert.assertTrue(tmp.exists());
Assert.assertEquals(1, tmp.listFiles().length);
Assert.assertTrue(new File(tmp, FILE).exists());
}
private File xSubDir = new File(del, "xsubdir");
private File ySubDir = new File(del, "ysubdir");
static String file1Name = "file1";
private File file2 = new File(xSubDir, "file2");
private File file3 = new File(ySubDir, "file3");
private File zlink = new File(del, "zlink");
/**
* Creates a directory which can not be deleted completely.
*
* Directory structure. The naming is important in that {@link MyFile}
* is used to return them in alphabetical order when listed.
*
* del(+w)
* |
* .---------------------------------------,
* | | | |
* file1(!w) xsubdir(-w) ysubdir(+w) zlink
* | |
* file2 file3
*
* @throws IOException
*/
private void setupDirsAndNonWritablePermissions() throws IOException {
Assert.assertFalse("The directory del should not have existed!",
del.exists());
del.mkdirs();
new MyFile(del, file1Name).createNewFile();
// "file1" is non-deletable by default, see MyFile.delete().
xSubDir.mkdirs();
file2.createNewFile();
xSubDir.setWritable(false);
ySubDir.mkdirs();
file3.createNewFile();
Assert.assertFalse("The directory tmp should not have existed!",
tmp.exists());
tmp.mkdirs();
File tmpFile = new File(tmp, FILE);
tmpFile.createNewFile();
FileUtil.symLink(tmpFile.toString(), zlink.toString());
}
// Validates the return value.
// Validates the existence of directory "xsubdir" and the file "file1"
// Sets writable permissions for the non-deleted dir "xsubdir" so that it can
// be deleted in tearDown().
private void validateAndSetWritablePermissions(boolean ret) {
xSubDir.setWritable(true);
Assert.assertFalse("The return value should have been false!", ret);
Assert.assertTrue("The file file1 should not have been deleted!",
new File(del, file1Name).exists());
Assert.assertTrue(
"The directory xsubdir should not have been deleted!",
xSubDir.exists());
Assert.assertTrue("The file file2 should not have been deleted!",
file2.exists());
Assert.assertFalse("The directory ysubdir should have been deleted!",
ySubDir.exists());
Assert.assertFalse("The link zlink should have been deleted!",
zlink.exists());
}
@Test
public void testFailFullyDelete() throws IOException {
LOG.info("Running test to verify failure of fullyDelete()");
setupDirsAndNonWritablePermissions();
boolean ret = FileUtil.fullyDelete(new MyFile(del));
validateAndSetWritablePermissions(ret);
}
/**
* Extend {@link File}. Same as {@link File} except for two things: (1) This
* treats file1Name as a very special file which is not delete-able
* irrespective of it's parent-dir's permissions, a peculiar file instance for
* testing. (2) It returns the files in alphabetically sorted order when
* listed.
*
*/
public static class MyFile extends File {
private static final long serialVersionUID = 1L;
public MyFile(File f) {
super(f.getAbsolutePath());
}
public MyFile(File parent, String child) {
super(parent, child);
}
/**
* Same as {@link File#delete()} except for file1Name which will never be
* deleted (hard-coded)
*/
@Override
public boolean delete() {
LOG.info("Trying to delete myFile " + getAbsolutePath());
boolean bool = false;
if (getName().equals(file1Name)) {
bool = false;
} else {
bool = super.delete();
}
if (bool) {
LOG.info("Deleted " + getAbsolutePath() + " successfully");
} else {
LOG.info("Cannot delete " + getAbsolutePath());
}
return bool;
}
/**
* Return the list of files in an alphabetically sorted order
*/
@Override
public File[] listFiles() {
File[] files = super.listFiles();
List<File> filesList = Arrays.asList(files);
Collections.sort(filesList);
File[] myFiles = new MyFile[files.length];
int i=0;
for(File f : filesList) {
myFiles[i++] = new MyFile(f);
}
return myFiles;
}
}
@Test
public void testFailFullyDeleteContents() throws IOException {
LOG.info("Running test to verify failure of fullyDeleteContents()");
setupDirsAndNonWritablePermissions();
boolean ret = FileUtil.fullyDeleteContents(new MyFile(del));
validateAndSetWritablePermissions(ret);
}
@Test
public void testCopyMergeSingleDirectory() throws IOException {
setupDirs();
boolean copyMergeResult = copyMerge("partitioned", "tmp/merged");
Assert.assertTrue("Expected successful copyMerge result.", copyMergeResult);
File merged = new File(TEST_DIR, "tmp/merged");
Assert.assertTrue("File tmp/merged must exist after copyMerge.",
merged.exists());
BufferedReader rdr = new BufferedReader(new FileReader(merged));
try {
Assert.assertEquals("Line 1 of merged file must contain \"foo\".",
"foo", rdr.readLine());
Assert.assertEquals("Line 2 of merged file must contain \"bar\".",
"bar", rdr.readLine());
Assert.assertNull("Expected end of file reading merged file.",
rdr.readLine());
}
finally {
rdr.close();
}
}
/**
* Calls FileUtil.copyMerge using the specified source and destination paths.
* Both source and destination are assumed to be on the local file system.
* The call will not delete source on completion and will not add an
* additional string between files.
* @param src String non-null source path.
* @param dst String non-null destination path.
* @return boolean true if the call to FileUtil.copyMerge was successful.
* @throws IOException if an I/O error occurs.
*/
private boolean copyMerge(String src, String dst)
throws IOException {
Configuration conf = new Configuration();
FileSystem fs = FileSystem.getLocal(conf);
final boolean result;
try {
Path srcPath = new Path(TEST_ROOT_DIR, src);
Path dstPath = new Path(TEST_ROOT_DIR, dst);
boolean deleteSource = false;
String addString = null;
result = FileUtil.copyMerge(fs, srcPath, fs, dstPath, deleteSource, conf,
addString);
}
finally {
fs.close();
}
return result;
}
}
| |
/*
* Copyright (C) 2010-2018 The Project Lombok Authors.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package lombok.javac.apt;
import java.io.IOException;
import java.lang.reflect.Method;
import java.net.URI;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Arrays;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import java.util.concurrent.atomic.AtomicBoolean;
import javax.tools.FileObject;
import javax.tools.JavaFileManager;
import javax.tools.JavaFileObject;
import javax.tools.JavaFileObject.Kind;
import lombok.core.DiagnosticsReceiver;
import com.sun.tools.javac.file.BaseFileManager;
//Can't use SimpleJavaFileObject so we copy/paste most of its content here, because javac doesn't follow the interface,
//and casts to its own BaseFileObject type. D'oh!
final class LombokFileObjects {
interface Compiler {
Compiler JAVAC6 = new Compiler() {
private Method decoderMethod = null;
private final AtomicBoolean decoderIsSet = new AtomicBoolean();
@Override public JavaFileObject wrap(LombokFileObject fileObject) {
return new Javac6BaseFileObjectWrapper(fileObject);
}
@Override public Method getDecoderMethod() {
synchronized (decoderIsSet) {
if (decoderIsSet.get()) return decoderMethod;
decoderMethod = LombokFileObjects.getDecoderMethod("com.sun.tools.javac.util.BaseFileObject");
decoderIsSet.set(true);
return decoderMethod;
}
}
};
Compiler JAVAC7 = new Compiler() {
private Method decoderMethod = null;
private final AtomicBoolean decoderIsSet = new AtomicBoolean();
@Override public JavaFileObject wrap(LombokFileObject fileObject) {
return new Javac7BaseFileObjectWrapper(fileObject);
}
@Override public Method getDecoderMethod() {
synchronized (decoderIsSet) {
if (decoderIsSet.get()) return decoderMethod;
decoderMethod = LombokFileObjects.getDecoderMethod("com.sun.tools.javac.file.BaseFileObject");
decoderIsSet.set(true);
return decoderMethod;
}
}
};
JavaFileObject wrap(LombokFileObject fileObject);
Method getDecoderMethod();
}
static Method getDecoderMethod(String className) {
Method m = null;
try {
m = Class.forName(className).getDeclaredMethod("getDecoder", boolean.class);
m.setAccessible(true);
} catch (NoSuchMethodException e) {
// Intentional fallthrough - getDecoder(boolean) is not always present.
} catch (ClassNotFoundException e) {
// Intentional fallthrough - getDecoder(boolean) is not always present.
}
return m;
}
private LombokFileObjects() {}
private static final List<String> KNOWN_JAVA9_FILE_MANAGERS = Arrays.asList(
"com.google.errorprone.MaskedClassLoader$MaskedFileManager",
"com.google.devtools.build.buildjar.javac.BlazeJavacMain$ClassloaderMaskingFileManager",
"com.google.devtools.build.java.turbine.javac.JavacTurbineCompiler$ClassloaderMaskingFileManager",
"org.netbeans.modules.java.source.parsing.ProxyFileManager",
"com.sun.tools.javac.api.ClientCodeWrapper$WrappedStandardJavaFileManager",
"com.sun.tools.javac.main.DelegatingJavaFileManager$DelegatingSJFM" // IntelliJ + JDK10
);
static Compiler getCompiler(JavaFileManager jfm) {
String jfmClassName = jfm != null ? jfm.getClass().getName() : "null";
if (jfmClassName.equals("com.sun.tools.javac.util.DefaultFileManager")) return Compiler.JAVAC6;
if (jfmClassName.equals("com.sun.tools.javac.util.JavacFileManager")) return Compiler.JAVAC6;
if (jfmClassName.equals("com.sun.tools.javac.file.JavacFileManager")) {
try {
Class<?> superType = Class.forName("com.sun.tools.javac.file.BaseFileManager");
if (superType.isInstance(jfm)) {
return new Java9Compiler(jfm);
}
}
catch (Throwable e) {}
return Compiler.JAVAC7;
}
if (KNOWN_JAVA9_FILE_MANAGERS.contains(jfmClassName)) {
try {
return new Java9Compiler(jfm);
}
catch (Throwable e) {}
}
try {
if (Class.forName("com.sun.tools.javac.file.PathFileObject") == null) throw new NullPointerException();
return new Java9Compiler(jfm);
} catch (Throwable e) {}
try {
if (Class.forName("com.sun.tools.javac.file.BaseFileObject") == null) throw new NullPointerException();
return Compiler.JAVAC7;
} catch (Throwable e) {}
try {
if (Class.forName("com.sun.tools.javac.util.BaseFileObject") == null) throw new NullPointerException();
return Compiler.JAVAC6;
} catch (Throwable e) {}
StringBuilder sb = new StringBuilder(jfmClassName);
if (jfm != null) {
sb.append(" extends ").append(jfm.getClass().getSuperclass().getName());
for (Class<?> cls : jfm.getClass().getInterfaces()) {
sb.append(" implements ").append(cls.getName());
}
}
throw new IllegalArgumentException(sb.toString());
}
static JavaFileObject createEmpty(Compiler compiler, String name, Kind kind) {
return compiler.wrap(new EmptyLombokFileObject(name, kind));
}
static JavaFileObject createIntercepting(Compiler compiler, JavaFileObject delegate, String fileName, DiagnosticsReceiver diagnostics) {
return compiler.wrap(new InterceptingJavaFileObject(delegate, fileName, diagnostics, compiler.getDecoderMethod()));
}
static class Java9Compiler implements Compiler {
private final BaseFileManager fileManager;
public Java9Compiler(JavaFileManager jfm) {
fileManager = asBaseFileManager(jfm);
}
@Override public JavaFileObject wrap(LombokFileObject fileObject) {
return new Javac9BaseFileObjectWrapper(fileManager, toPath(fileObject), fileObject);
}
@Override public Method getDecoderMethod() {
return null;
}
private static Path toPath(LombokFileObject fileObject) {
URI uri = fileObject.toUri();
if (uri.getScheme() == null) {
uri = URI.create("file:///" + uri);
}
try {
return Paths.get(uri);
} catch (IllegalArgumentException e) {
throw new IllegalArgumentException("Problems in URI '" + uri + "' (" + fileObject.toUri() + ")", e);
}
}
private static BaseFileManager asBaseFileManager(JavaFileManager jfm) {
if (jfm instanceof BaseFileManager) {
return (BaseFileManager) jfm;
}
return new FileManagerWrapper(jfm);
}
static class FileManagerWrapper extends BaseFileManager {
JavaFileManager manager;
public FileManagerWrapper(JavaFileManager manager) {
super(null);
this.manager = manager;
}
@Override
public int isSupportedOption(String option) {
return manager.isSupportedOption(option);
}
@Override
public ClassLoader getClassLoader(Location location) {
return manager.getClassLoader(location);
}
@Override
public Iterable<JavaFileObject> list(Location location, String packageName, Set<Kind> kinds, boolean recurse) throws IOException {
return manager.list(location, packageName, kinds, recurse);
}
@Override
public String inferBinaryName(Location location, JavaFileObject file) {
return manager.inferBinaryName(location, file);
}
@Override
public boolean isSameFile(FileObject a, FileObject b) {
return manager.isSameFile(a, b);
}
@Override
public boolean handleOption(String current, Iterator<String> remaining) {
return manager.handleOption(current, remaining);
}
@Override
public boolean hasLocation(Location location) {
return manager.hasLocation(location);
}
@Override
public JavaFileObject getJavaFileForInput(Location location, String className, Kind kind) throws IOException {
return manager.getJavaFileForInput(location, className, kind);
}
@Override
public JavaFileObject getJavaFileForOutput(Location location, String className, Kind kind, FileObject sibling) throws IOException {
return manager.getJavaFileForOutput(location, className, kind, sibling);
}
@Override
public FileObject getFileForInput(Location location, String packageName, String relativeName) throws IOException {
return manager.getFileForInput(location, packageName, relativeName);
}
@Override
public FileObject getFileForOutput(Location location, String packageName, String relativeName, FileObject sibling) throws IOException {
return manager.getFileForOutput(location, packageName, relativeName, sibling);
}
@Override
public void flush() throws IOException {
manager.flush();
}
@Override
public void close() throws IOException {
manager.close();
}
}
}
}
| |
/*
* ModifierUtils.java
*
* Copyright (c) 2006 David Holroyd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.co.badgersinfoil.metaas.impl;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import org.asdt.core.internal.antlr.AS3Parser;
import uk.co.badgersinfoil.metaas.dom.Visibility;
import uk.co.badgersinfoil.metaas.impl.antlr.LinkedListToken;
import uk.co.badgersinfoil.metaas.impl.antlr.LinkedListTree;
/**
* Helpers for dealing with the modifiers list.
*/
class ModifierUtils {
private static class ModInfo {
public int tokenType;
public Visibility vis;
public String keyword;
public ModInfo(int tokenType, Visibility vis, String keyword) {
this.tokenType = tokenType;
this.vis = vis;
this.keyword = keyword;
}
}
private static Map modinfoByTokenType = new HashMap();
private static Map modinfoByVisibility = new HashMap();
private static Set<String> nonVisibilityModifiers = new HashSet<String>();
static {
mapMod(AS3Parser.PRIVATE, Visibility.PRIVATE, "private");
mapMod(AS3Parser.PUBLIC, Visibility.PUBLIC, "public");
mapMod(AS3Parser.PROTECTED, Visibility.PROTECTED, "protected");
mapMod(AS3Parser.INTERNAL, Visibility.INTERNAL, "internal");
mapMod(Integer.MIN_VALUE, Visibility.DEFAULT, null);
nonVisibilityModifiers.add("native");
nonVisibilityModifiers.add("static");
nonVisibilityModifiers.add("final");
nonVisibilityModifiers.add("enumerable");
nonVisibilityModifiers.add("explicit");
nonVisibilityModifiers.add("override");
nonVisibilityModifiers.add("dynamic");
nonVisibilityModifiers.add("intrinsic");
nonVisibilityModifiers.add("virtual");
}
private static void mapMod(int tokenType, Visibility vis, String keyword) {
ModInfo inf = new ModInfo(tokenType, vis, keyword);
modinfoByTokenType.put(new Integer(tokenType), inf);
modinfoByVisibility.put(vis, inf);
}
private static ModInfo getModInfo(int tokenType) {
return (ModInfo)modinfoByTokenType.get(new Integer(tokenType));
}
private static ModInfo getModInfo(Visibility vis) {
ModInfo result = (ModInfo)modinfoByVisibility.get(vis);
if (result == null) {
throw new IllegalArgumentException("unknown kind of visibility: "+vis);
}
return result;
}
public static Visibility getVisibility(LinkedListTree modifiers) {
for (ASTIterator i=new ASTIterator(modifiers); i.hasNext(); ) {
LinkedListTree mod = i.next();
ModInfo modInfo = getModInfo(mod.getType());
if (modInfo != null) {
return modInfo.vis;
}
}
return Visibility.DEFAULT;
}
public static String getNamespace(LinkedListTree modifiers) {
for (ASTIterator i=new ASTIterator(modifiers); i.hasNext(); ) {
LinkedListTree mod = i.next();
ModInfo modInfo = getModInfo(mod.getType());
if (modInfo == null) {
String modifier = mod.toString();
if (!nonVisibilityModifiers.contains(modifier)) {
return modifier;
}
}
}
return null;
}
private static boolean hasModifierFlag(LinkedListTree modifiers, int type) {
for (ASTIterator i=new ASTIterator(modifiers); i.hasNext(); ) {
LinkedListTree mod = i.next();
if (mod.getType() == type) {
return true;
}
}
return false;
}
public static boolean isDynamic(LinkedListTree modifiers) {
return hasModifierFlag(modifiers, AS3Parser.DYNAMIC);
}
public static boolean isOverride(LinkedListTree modifiers) {
return hasModifierFlag(modifiers, AS3Parser.OVERRIDE);
}
public static boolean isNative(LinkedListTree modifiers) {
return hasModifierFlag(modifiers, AS3Parser.NATIVE);
}
public static boolean isFinal(LinkedListTree modifiers) {
return hasModifierFlag(modifiers, AS3Parser.FINAL);
}
public static boolean isVirtual(LinkedListTree modifiers) {
return hasModifierFlag(modifiers, AS3Parser.VIRTUAL);
}
public static void setDynamic(LinkedListTree modifiers, boolean value) {
setModifierFlag(modifiers, value, AS3Parser.DYNAMIC, "dynamic");
}
public static void setOverride(LinkedListTree modifiers, boolean value) {
setModifierFlag(modifiers, value, AS3Parser.OVERRIDE, "override");
}
public static void setFinal(LinkedListTree modifiers, boolean value) {
setModifierFlag(modifiers, value, AS3Parser.FINAL, "final");
}
private static void setModifierFlag(LinkedListTree modifiers, boolean flag, int type, String text) {
for (ASTIterator i=new ASTIterator(modifiers); i.hasNext(); ) {
LinkedListTree mod = i.next();
if (mod.getType() == type) {
if (flag) return;
else {
i.remove();
if (modifiers.getChildCount() == 0) {
deleteAllChildTokens(modifiers);
}
}
return;
}
}
if (flag) {
LinkedListTree node = ASTUtils.newAST(type, text);
node.appendToken(TokenBuilder.newSpace());
modifiers.addChildWithTokens(node);
}
}
public static void setVisibility(LinkedListTree modifiers, Visibility protection) {
ModInfo modInfo = getModInfo(protection);
for (ASTIterator i=new ASTIterator(modifiers); i.hasNext(); ) {
LinkedListTree mod = i.next();
if (isVisibilityKeyword(mod)) {
if (modInfo.keyword == null) {
i.remove();
if (modifiers.getChildCount() == 0) {
deleteAllChildTokens(modifiers);
}
} else {
mod.token.setType(modInfo.tokenType);
mod.token.setText(modInfo.keyword);
}
return;
}
}
LinkedListTree mod = ASTUtils.newAST(modInfo.tokenType, modInfo.keyword);
mod.appendToken(TokenBuilder.newSpace());
modifiers.addChildWithTokens(mod);
}
private static boolean isVisibilityKeyword(LinkedListTree mod) {
return getModInfo(mod.getType()) != null;
}
private static void deleteAllChildTokens(LinkedListTree modifiers) {
for (LinkedListToken tok=modifiers.getStartToken(); tok!=null && tok!=modifiers.getStopToken(); ) {
LinkedListToken next = tok.getNext();
tok.delete();
tok = next;
}
modifiers.setStartToken(null);
modifiers.setStopToken(null);
}
/**
* Constructs a new MODIFIERS node which represents the given
* visibility as an AST containing either "public", "private",
* "protected", "internal" or no children (i.e. default visibility).
*/
public static LinkedListTree toModifiers(Visibility visibility) {
if (Visibility.DEFAULT.equals(visibility)) {
return ASTUtils.newPlaceholderAST(AS3Parser.MODIFIERS);
}
LinkedListTree modifiers = ASTUtils.newImaginaryAST(AS3Parser.MODIFIERS);
ModInfo modInfo = getModInfo(visibility);
LinkedListTree mod = ASTUtils.newAST(modInfo.tokenType, modInfo.keyword);
mod.appendToken(TokenBuilder.newSpace());
modifiers.addChildWithTokens(mod);
return modifiers;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jackrabbit.oak.plugins.document.util;
import java.util.List;
import java.util.Map;
import java.util.concurrent.Callable;
import javax.annotation.Nonnull;
import org.apache.jackrabbit.oak.cache.CacheStats;
import org.apache.jackrabbit.oak.commons.json.JsopBuilder;
import org.apache.jackrabbit.oak.plugins.document.Collection;
import org.apache.jackrabbit.oak.plugins.document.Document;
import org.apache.jackrabbit.oak.plugins.document.DocumentStore;
import org.apache.jackrabbit.oak.plugins.document.DocumentStoreException;
import org.apache.jackrabbit.oak.plugins.document.UpdateOp;
import org.apache.jackrabbit.oak.plugins.document.cache.CacheInvalidationStats;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Implements a <code>DocumentStore</code> wrapper and logs all calls.
*/
public class LoggingDocumentStoreWrapper implements DocumentStore {
private static final Logger LOG = LoggerFactory.getLogger(LoggingDocumentStoreWrapper.class);
private static final boolean DEBUG = Boolean.parseBoolean(System.getProperty("ds.debug", "true"));
final DocumentStore store;
private boolean logThread;
public LoggingDocumentStoreWrapper(DocumentStore store) {
this.store = store;
}
public LoggingDocumentStoreWrapper withThreadNameLogging() {
this.logThread = true;
return this;
}
@Override
public <T extends Document> T find(final Collection<T> collection,
final String key) {
try {
logMethod("find", collection, key);
return logResult(new Callable<T>() {
@Override
public T call() throws Exception {
return store.find(collection, key);
}
});
} catch (Exception e) {
logException(e);
throw convert(e);
}
}
@Override
public <T extends Document> T find(final Collection<T> collection,
final String key,
final int maxCacheAge) {
try {
logMethod("find", collection, key, maxCacheAge);
return logResult(new Callable<T>() {
@Override
public T call() throws Exception {
return store.find(collection, key, maxCacheAge);
}
});
} catch (Exception e) {
logException(e);
throw convert(e);
}
}
@Nonnull
@Override
public <T extends Document> List<T> query(final Collection<T> collection,
final String fromKey,
final String toKey,
final int limit) {
try {
logMethod("query", collection, fromKey, toKey, limit);
return logResult(new Callable<List<T>>() {
@Override
public List<T> call() throws Exception {
return store.query(collection, fromKey, toKey, limit);
}
});
} catch (Exception e) {
logException(e);
throw convert(e);
}
}
@Override
@Nonnull
public <T extends Document> List<T> query(final Collection<T> collection,
final String fromKey,
final String toKey,
final String indexedProperty,
final long startValue,
final int limit) {
try {
logMethod("query", collection, fromKey, toKey, indexedProperty, startValue, limit);
return logResult(new Callable<List<T>>() {
@Override
public List<T> call() throws Exception {
return store.query(collection, fromKey, toKey, indexedProperty, startValue, limit);
}
});
} catch (Exception e) {
logException(e);
throw convert(e);
}
}
@Override
public <T extends Document> void remove(Collection<T> collection, String key) {
try {
logMethod("remove", collection, key);
store.remove(collection, key);
} catch (Exception e) {
logException(e);
throw convert(e);
}
}
@Override
public <T extends Document> void remove(Collection<T> collection, List<String> keys) {
try {
logMethod("remove", collection, keys);
store.remove(collection, keys);
} catch (Exception e) {
logException(e);
throw convert(e);
}
}
@Override
public <T extends Document> int remove(final Collection<T> collection,
final Map<String, Map<UpdateOp.Key, UpdateOp.Condition>> toRemove) {
try {
logMethod("remove", collection, toRemove);
return logResult(new Callable<Integer>() {
@Override
public Integer call() throws Exception {
return store.remove(collection, toRemove);
}
});
} catch (Exception e) {
logException(e);
throw convert(e);
}
}
@Override
public <T extends Document> boolean create(final Collection<T> collection,
final List<UpdateOp> updateOps) {
try {
logMethod("create", collection, updateOps);
return logResult(new Callable<Boolean>() {
@Override
public Boolean call() throws Exception {
return store.create(collection, updateOps);
}
});
} catch (Exception e) {
logException(e);
throw convert(e);
}
}
@Override
public <T extends Document> void update(final Collection<T> collection,
final List<String> keys,
final UpdateOp updateOp) {
try {
logMethod("update", collection, keys, updateOp);
logResult(new Callable<Void>() {
@Override
public Void call() throws Exception {
store.update(collection, keys, updateOp);
return null;
}
});
} catch (Exception e) {
logException(e);
throw convert(e);
}
}
@Nonnull
@Override
public <T extends Document> T createOrUpdate(final Collection<T> collection,
final UpdateOp update) {
try {
logMethod("createOrUpdate", collection, update);
return logResult(new Callable<T>() {
@Override
public T call() throws Exception {
return store.createOrUpdate(collection, update);
}
});
} catch (Exception e) {
logException(e);
throw convert(e);
}
}
@Override
public <T extends Document> T findAndUpdate(final Collection<T> collection,
final UpdateOp update) {
try {
logMethod("findAndUpdate", collection, update);
return logResult(new Callable<T>() {
@Override
public T call() throws Exception {
return store.findAndUpdate(collection, update);
}
});
} catch (Exception e) {
logException(e);
throw convert(e);
}
}
@Override
public CacheInvalidationStats invalidateCache() {
try {
logMethod("invalidateCache");
return store.invalidateCache();
} catch (Exception e) {
logException(e);
throw convert(e);
}
}
@Override
public CacheInvalidationStats invalidateCache(Iterable<String> keys) {
try {
logMethod("invalidateCache", keys);
return store.invalidateCache(keys);
} catch (Exception e) {
logException(e);
throw convert(e);
}
}
@Override
public <T extends Document> void invalidateCache(Collection<T> collection, String key) {
try {
logMethod("invalidateCache", collection, key);
store.invalidateCache(collection, key);
} catch (Exception e) {
logException(e);
throw convert(e);
}
}
@Override
public void dispose() {
try {
logMethod("dispose");
store.dispose();
} catch (Exception e) {
logException(e);
throw convert(e);
}
}
@Override
public <T extends Document> T getIfCached(final Collection<T> collection,
final String key) {
try {
logMethod("getIfCached", collection, key);
return logResult(new Callable<T>() {
@Override
public T call() throws Exception {
return store.getIfCached(collection, key);
}
});
} catch (Exception e) {
logException(e);
throw convert(e);
}
}
@Override
public void setReadWriteMode(String readWriteMode) {
try {
logMethod("setReadWriteMode", readWriteMode);
store.setReadWriteMode(readWriteMode);
} catch (Exception e) {
logException(e);
throw convert(e);
}
}
@Override
public CacheStats getCacheStats() {
try {
logMethod("getCacheStats");
return logResult(new Callable<CacheStats>() {
@Override
public CacheStats call() throws Exception {
return store.getCacheStats();
}
});
} catch (Exception e) {
logException(e);
throw convert(e);
}
}
@Override
public Map<String, String> getMetadata() {
return store.getMetadata();
}
private void logMethod(String methodName, Object... args) {
StringBuilder buff = new StringBuilder("ds");
buff.append('.').append(methodName).append('(');
for (int i = 0; i < args.length; i++) {
if (i > 0) {
buff.append(", ");
}
buff.append(quote(args[i]));
}
buff.append(");");
log(buff.toString());
}
public static String quote(Object o) {
if (o == null) {
return "null";
} else if (o instanceof String) {
return JsopBuilder.encode((String) o);
}
return o.toString();
}
private RuntimeException convert(Exception e) {
if (e instanceof RuntimeException) {
return (RuntimeException) e;
}
log("// unexpected exception type: " + e.getClass().getName());
return new DocumentStoreException("Unexpected exception: " + e.toString(), e);
}
private void logException(Exception e) {
log("// exception: " + e.toString());
}
private <T> T logResult(Callable<T> callable) throws Exception {
long time = System.nanoTime();
T result = callable.call();
time = System.nanoTime() - time;
log("// " + (time / 1000) + " us\t" + quote(result));
return result;
}
private void log(String message) {
String out = this.logThread ? (Thread.currentThread() + " " + message) : message;
if (DEBUG) {
System.out.println(out);
}
LOG.info(out);
}
}
| |
package mil.nga.giat.geowave.adapter.vector.query.row;
import java.awt.Rectangle;
import java.awt.geom.AffineTransform;
import java.awt.geom.NoninvertibleTransformException;
import java.awt.geom.Point2D;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeMap;
import mil.nga.giat.geowave.adapter.vector.plugin.GeoWaveGTDataStore;
import mil.nga.giat.geowave.core.geotime.GeometryUtils;
import mil.nga.giat.geowave.core.index.ByteArrayId;
import mil.nga.giat.geowave.core.index.NumericIndexStrategy;
import org.apache.accumulo.core.data.Key;
import org.apache.accumulo.core.data.Range;
import org.apache.hadoop.io.Text;
import org.apache.log4j.Logger;
import org.geotools.geometry.DirectPosition2D;
import org.geotools.geometry.jts.ReferencedEnvelope;
import org.geotools.referencing.CRS;
import org.geotools.renderer.lite.RendererUtilities;
import org.opengis.geometry.MismatchedDimensionException;
import org.opengis.referencing.FactoryException;
import org.opengis.referencing.operation.MathTransform;
import org.opengis.referencing.operation.TransformException;
/**
* This encapsulates mappings of pixels on a spatially referenced image to/from
* row IDs. It can be given a pixelSize to track larger grids of pixels for
* performance purposes rather than tracking each individual pixel (eg. a
* pixelSize of 4 would break the image into a grid of 4x4 pixel cells and map
* row IDs to/from the larger cells). Also it maintains the painted ranges of
* Accumulo row IDs which is important to determine if the current row has been
* marked as painted and should be skipped. It does mark any row IDs outside of
* the image's ReferencedEnvelope as painted.
*
*/
public class BasicRowIdStore
{
private static final Logger LOGGER = Logger.getLogger(BasicRowIdStore.class);
private final int height;
private final Map<Integer, Key> pixelToRowIdMap = new HashMap<Integer, Key>();
private final TreeMap<Key, Set<Integer>> rowIdToPixelsMap = new TreeMap<Key, Set<Integer>>();
private List<Range> paintedPixelRanges = new ArrayList<Range>();
private final Integer pixelSize;
protected Key currentRow;
protected Key lastRow;
public BasicRowIdStore(
final int width,
final int height,
final NumericIndexStrategy indexStrategy,
final ReferencedEnvelope env,
final Integer pixelSize )
throws NoninvertibleTransformException {
this.height = height;
this.pixelSize = (pixelSize != null) ? Math.max(
1,
pixelSize) : null;
final AffineTransform worldToScreen = RendererUtilities.worldToScreenTransform(
env,
new Rectangle(
width,
height));
MathTransform worldToData = null;
try {
worldToData = CRS.findMathTransform(
env.getCoordinateReferenceSystem(),
GeoWaveGTDataStore.DEFAULT_CRS);
}
catch (final FactoryException e) {
LOGGER.warn(
"Unable to transform from map projection to default data projection (EPSG:4326)",
e);
}
int increment = 1;
if (pixelSize != null) {
increment = pixelSize;
}
for (int x = 0; x < width; x += increment) {
for (int y = 0; y < height; y += increment) {
final Key rowIdKey = getMinRowId(
x,
y,
increment,
indexStrategy,
worldToScreen,
worldToData);
if (rowIdKey == null) {
LOGGER.warn("Row ID cannot be determined for pixel [x=" + x + ", y=" + y);
continue;
}
final int pixelIndex = getPixelIndex(
x,
y);
pixelToRowIdMap.put(
pixelIndex,
rowIdKey);
Set<Integer> pixels = rowIdToPixelsMap.get(rowIdKey);
if (pixels == null) {
pixels = new HashSet<Integer>();
rowIdToPixelsMap.put(
rowIdKey,
pixels);
}
pixels.add(pixelIndex);
}
}
// mark all rows prior to the first row ID as "painted"
paintedPixelRanges.add(new Range(
null,
rowIdToPixelsMap.firstKey()));
}
protected Key getMinRowId(
final int x,
final int y,
final int yOffset,
final NumericIndexStrategy indexStrategy,
final AffineTransform worldToScreen,
final MathTransform worldToData )
throws NoninvertibleTransformException,
MismatchedDimensionException {
// add one to the pixel in height because pixel space starts at
// the top and goes down so to get the lower left spatial
// coordinate for a pixel you want the [x, y+1] in pixel space
final Point2D pixel = new Point2D.Double(
x,
y + yOffset);
final Point2D mapProjPt = worldToScreen.inverseTransform(
pixel,
null);
final DirectPosition2D lonLat = new DirectPosition2D();
if (worldToData != null) {
try {
worldToData.transform(
new DirectPosition2D(
mapProjPt),
lonLat);
}
catch (final TransformException e) {
LOGGER.warn(
"Unable to transform point from map projection to default data projection (EPSG:4326)",
e);
}
}
else {
lonLat.setLocation(mapProjPt);
}
final List<ByteArrayId> rowIdsForPixel = indexStrategy.getInsertionIds(GeometryUtils.basicConstraintsFromPoint(
lonLat.getY(),
lonLat.getX()).getIndexConstraints(
indexStrategy));
// this should be of size one if its a simple spatial index
if (!rowIdsForPixel.isEmpty()) {
final ByteArrayId rowId = rowIdsForPixel.get(0);
final Key rowIdKey = new Key(
new Text(
rowId.getBytes()));
return rowIdKey;
}
return null;
}
protected int getPixelIndex(
final int x,
final int y ) {
if ((pixelSize != null) && (pixelSize != 1)) {
return ((x / pixelSize) * (int) Math.ceil((double) height / (double) pixelSize)) + (y / pixelSize);
}
return (x * height) + y;
}
public Key getNextRow(
final Key currentRow ) {
final Iterator<Key> rowIdIt = rowIdToPixelsMap.keySet().iterator();
while (rowIdIt.hasNext()) {
final Key rowId = rowIdIt.next();
if (rowId.compareTo(currentRow) < 0) {
rowIdIt.remove();
}
else {
return rowId;
}
}
return null;
}
public boolean isPainted(
final Key rowId ) {
for (final Range range : paintedPixelRanges) {
if (range.contains(rowId)) {
return true;
}
}
if ((currentRow != null) && ((lastRow == null) || !lastRow.equals(currentRow))) {
// this case occurs if the pixel has already been decimated, to
// prevent over sampling when the rows do not match the pixels
// tracking the last row just is a performance improvement to ensure
// this doesn't happen many times continuously (typically a single
// row/feature results in many pixels being painted, we only need to
// perform this decimation once per row)
final Set<Integer> pixelIds = rowIdToPixelsMap.get(currentRow);
lastRow = currentRow;
if ((pixelIds != null) && (pixelIds.size() > ((pixelSize != null) ? pixelSize : 1))) {
// this row ID spans multiple pixels, and more pixels than our
// threshold on pixel size
// we cannot decimate
return false;
}
// otherwise decimate the range of row IDs spanning the pixel for
// the current row
final Key start = rowIdToPixelsMap.floorKey(currentRow);
final Key stop = rowIdToPixelsMap.higherKey(currentRow);
final Range decimatedRange = new Range(
start,
true,
stop,
false);
incorporatePaintedPixel(decimatedRange);
}
return false;
}
public void notifyPixelPainted(
final int x,
final int y,
final boolean shouldDecimate ) {
// pass all final decisions to decimate into this function so that if
// counts need to be applied, they can be applied here
if (shouldDecimate) {
final int pixelIndex = getPixelIndex(
x,
y);
decimate(pixelIndex);
}
}
protected boolean decimate(
final int pixelIndex ) {
final Key rowId = pixelToRowIdMap.remove(pixelIndex);
if (rowId != null) {
final Set<Integer> pixelIds = rowIdToPixelsMap.get(rowId);
if (pixelIds != null) {
pixelIds.remove(pixelIndex);
if (pixelIds.isEmpty()) {
rowIdToPixelsMap.remove(rowId);
// mark all rows between this pixel and the next pixel
final Range decimatedRange = new Range(
rowId,
true,
rowIdToPixelsMap.higherKey(rowId), // if this is
// null it
// will
// represent positive infinite
false);
incorporatePaintedPixel(decimatedRange);
return true;
}
}
}
return false;
}
protected void incorporatePaintedPixel(
final Range newRange ) {
paintedPixelRanges.add(newRange);
paintedPixelRanges = Range.mergeOverlapping(paintedPixelRanges);
}
public boolean setCurrentRow(
final Key currentRow ) {
this.currentRow = currentRow;
return true;
}
}
| |
package com.devnatres.dashproject.debug;
import com.badlogic.gdx.graphics.Camera;
import com.badlogic.gdx.graphics.Color;
import com.badlogic.gdx.graphics.g2d.Batch;
import com.badlogic.gdx.graphics.g2d.BitmapFont;
import com.badlogic.gdx.graphics.g2d.SpriteBatch;
import com.badlogic.gdx.graphics.glutils.ShapeRenderer;
import com.badlogic.gdx.math.Rectangle;
import com.badlogic.gdx.math.Vector2;
import com.badlogic.gdx.utils.Array;
import com.devnatres.dashproject.DashGame;
import com.devnatres.dashproject.dnagdx.DnaCamera;
import java.util.ArrayList;
/**
* Debug supporting class. <br>
* <br>
* Created by DevNatres on 08/12/2014.
*/
abstract public class Debug {
public static final boolean DEBUG = false;
private static final boolean DEBUG_FRAMES = DEBUG && true;
private static final boolean DEBUG_POINTS = DEBUG && false;
private static final boolean DEBUG_RECTANGLES = DEBUG && false;
private static final boolean DEBUG_COLLISIONS = DEBUG && false;
public static final boolean IMMORTAL = false;
private static ArrayList<Vector2> points;
private static ArrayList<Color> pointColors;
private static ArrayList<Rectangle> rectangles;
private static ArrayList<Color> rectangleColors;
private static ShapeRenderer shape;
private static Batch batch;
private static DnaCamera gameCamera;
private static DnaCamera debugCamera;
private static boolean initialized;
private static BitmapFont font;
private static Array<DebugCell> testCells;
private static int count;
public static void begin(DnaCamera gameCamera) {
if (!DEBUG) return;
points = new ArrayList<Vector2>();
pointColors = new ArrayList<Color>();
rectangles = new ArrayList<Rectangle>();
rectangleColors = new ArrayList<Color>();
testCells = new Array<DebugCell>();
Debug.gameCamera = gameCamera;
batch = new SpriteBatch();
shape = new ShapeRenderer();
DashGame dashGame = DashGame.getInstance();
debugCamera = new DnaCamera();
font = new BitmapFont();
font.setColor(Color.GREEN);
count = 0;
DebugFPS.start();
initialized = true;
}
public static void end() {
if (!DEBUG) return;
font.dispose();
shape.dispose();
batch.dispose();
initialized = false;
}
public static void drawFrames() {
if (!DEBUG_FRAMES) return;
DebugFPS.update();
debugCamera.update();
batch.setProjectionMatrix(debugCamera.combined);
batch.begin();
font.draw(batch,"ALPHA v0.8 - FPS: " + DebugFPS.measuredFPS
+ " AVG(" + DebugFPS.measuredFpsList.length + "s): " + DebugFPS.avgFps
+ " MIN(" + DebugFPS.measuredFpsList.length + "s): " + DebugFPS.minFps
+ " MAX(" + DebugFPS.measuredFpsList.length + "s): " + DebugFPS.maxFps
, 10, 15);
batch.end();
}
public static void addPoint(float x, float y, Color color) {
if (!DEBUG_POINTS) return;
points.add(new Vector2(x, y));
pointColors.add(color);
}
public static void addRectangleRef(Rectangle rectangle, Color color) {
if (!DEBUG_RECTANGLES) return;
if (!rectangles.contains(rectangle)) {
rectangles.add(rectangle);
rectangleColors.add(color);
}
}
public static void removeRectangleRef() {
if (!DEBUG_RECTANGLES) return;
rectangles.clear();
rectangleColors.clear();
}
public static void addTestCell(int column, int row, boolean segmentChange, int stepColumn, int stepRow) {
if (!DEBUG_COLLISIONS) return;
testCells.add(new DebugCell(column, row, 0));
if (segmentChange) {
testCells.add(new DebugCell(column - stepColumn, row, 1));
testCells.add(new DebugCell(column, row - stepRow, 1));
}
}
public static void drawError(String errorMessage) {
if (!(initialized && DEBUG)) return;
shape.setProjectionMatrix(debugCamera.combined);
shape.setColor(Color.BLACK);
shape.begin(ShapeRenderer.ShapeType.Filled);
shape.box(20, 10, 0, 440, 50, 0);
shape.end();
batch.setProjectionMatrix(debugCamera.combined);
batch.begin();
font.draw(batch, errorMessage, 50, 50);
batch.end();
}
public static void draw() {
if (!(initialized && DEBUG)) return;
if (DEBUG_POINTS) drawPoints(gameCamera);
if (DEBUG_RECTANGLES) drawRectangles(gameCamera);
if (DEBUG_COLLISIONS) drawCollisions(gameCamera);
}
private static void drawPoints(Camera gameCamera) {
shape.setProjectionMatrix(gameCamera.combined);
for (int i = 0, n = points.size(); i < n; i++) {
Vector2 vector = points.get(i);
Color color = pointColors.get(i);
shape.setColor(color);
shape.begin(ShapeRenderer.ShapeType.Filled);
shape.circle(vector.x, vector.y, 5);
shape.end();
}
}
private static void drawRectangles(Camera gameCamera) {
shape.setProjectionMatrix(gameCamera.combined);
for (int i = 0, n = rectangles.size(); i < n; i++) {
Rectangle rectangle = rectangles.get(i);
Color color = rectangleColors.get(i);
shape.setColor(color);
shape.begin(ShapeRenderer.ShapeType.Line);
shape.rect(rectangle.x, rectangle.y, rectangle.width, rectangle.height);
shape.end();
}
}
private static void drawCollisions(Camera gameCamera) {
shape.setProjectionMatrix(gameCamera.combined);
for (int i = 0; i < testCells.size; i++) {
DebugCell debugCell = testCells.get(i);
if (debugCell.getType() == 0) {
shape.setColor(255, 255, 255, .7f);
} else {
shape.setColor(128, 0, 128, .7f);
}
shape.begin(ShapeRenderer.ShapeType.Filled);
shape.circle((debugCell.getColumn() * DebugCell.CELL_PIXEL_WIDTH) + DebugCell.CELL_PIXEL_WIDTH / 2,
(debugCell.getRow() * DebugCell.CELL_PIXEL_HEIGHT) + DebugCell.CELL_PIXEL_HEIGHT / 2,
DebugCell.CELL_PIXEL_WIDTH / 2);
shape.end();
}
testCells.clear();
}
public static void addCount() {
if (!DEBUG) return;
count++;
}
public static int getCount() {
return count;
}
public static void resetCount() {
count = 0;
}
public static void doNothing() {}
private Debug() {};
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.search.child;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.SortedDocValues;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.*;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.LongBitSet;
import org.elasticsearch.common.lucene.IndexCacheableQuery;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.lucene.search.NoopCollector;
import org.elasticsearch.index.fielddata.AtomicParentChildFieldData;
import org.elasticsearch.index.fielddata.IndexParentChildFieldData;
import org.elasticsearch.index.fielddata.plain.ParentChildIndexFieldData;
import java.io.IOException;
import java.util.List;
import java.util.Set;
/**
* A query that only return child documents that are linked to the parent documents that matched with the inner query.
*/
public class ParentConstantScoreQuery extends IndexCacheableQuery {
private final ParentChildIndexFieldData parentChildIndexFieldData;
private Query parentQuery;
private final String parentType;
private final Filter childrenFilter;
public ParentConstantScoreQuery(ParentChildIndexFieldData parentChildIndexFieldData, Query parentQuery, String parentType, Filter childrenFilter) {
this.parentChildIndexFieldData = parentChildIndexFieldData;
this.parentQuery = parentQuery;
this.parentType = parentType;
this.childrenFilter = childrenFilter;
}
@Override
public Query rewrite(IndexReader reader) throws IOException {
Query parentRewritten = parentQuery.rewrite(reader);
if (parentRewritten != parentQuery) {
Query rewritten = new ParentConstantScoreQuery(parentChildIndexFieldData, parentRewritten, parentType, childrenFilter);
rewritten.setBoost(getBoost());
return rewritten;
}
return super.rewrite(reader);
}
@Override
public Weight doCreateWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
IndexParentChildFieldData globalIfd = parentChildIndexFieldData.loadGlobal(searcher.getIndexReader());
final long maxOrd;
List<LeafReaderContext> leaves = searcher.getIndexReader().leaves();
if (globalIfd == null || leaves.isEmpty()) {
return new BooleanQuery().createWeight(searcher, needsScores);
} else {
AtomicParentChildFieldData afd = globalIfd.load(leaves.get(0));
SortedDocValues globalValues = afd.getOrdinalsValues(parentType);
maxOrd = globalValues.getValueCount();
}
if (maxOrd == 0) {
return new BooleanQuery().createWeight(searcher, needsScores);
}
ParentOrdsCollector collector = new ParentOrdsCollector(globalIfd, maxOrd, parentType);
searcher.search(parentQuery, collector);
if (collector.parentCount() == 0) {
return new BooleanQuery().createWeight(searcher, needsScores);
}
return new ChildrenWeight(this, childrenFilter, collector, globalIfd);
}
@Override
public int hashCode() {
int result = super.hashCode();
result = 31 * result + parentQuery.hashCode();
result = 31 * result + parentType.hashCode();
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (super.equals(obj) == false) {
return false;
}
ParentConstantScoreQuery that = (ParentConstantScoreQuery) obj;
if (!parentQuery.equals(that.parentQuery)) {
return false;
}
if (!parentType.equals(that.parentType)) {
return false;
}
return true;
}
@Override
public String toString(String field) {
return "parent_filter[" + parentType + "](" + parentQuery + ')';
}
private final class ChildrenWeight extends Weight {
private final IndexParentChildFieldData globalIfd;
private final Filter childrenFilter;
private final LongBitSet parentOrds;
private float queryNorm;
private float queryWeight;
private ChildrenWeight(Query query, Filter childrenFilter, ParentOrdsCollector collector, IndexParentChildFieldData globalIfd) {
super(query);
this.globalIfd = globalIfd;
this.childrenFilter = childrenFilter;
this.parentOrds = collector.parentOrds;
}
@Override
public void extractTerms(Set<Term> terms) {
}
@Override
public Explanation explain(LeafReaderContext context, int doc) throws IOException {
return Explanation.match(getBoost(), "not implemented yet...");
}
@Override
public float getValueForNormalization() throws IOException {
queryWeight = getBoost();
return queryWeight * queryWeight;
}
@Override
public void normalize(float norm, float topLevelBoost) {
this.queryNorm = norm * topLevelBoost;
queryWeight *= this.queryNorm;
}
@Override
public Scorer scorer(LeafReaderContext context, Bits acceptDocs) throws IOException {
DocIdSet childrenDocIdSet = childrenFilter.getDocIdSet(context, acceptDocs);
if (Lucene.isEmpty(childrenDocIdSet)) {
return null;
}
SortedDocValues globalValues = globalIfd.load(context).getOrdinalsValues(parentType);
if (globalValues != null) {
DocIdSetIterator innerIterator = childrenDocIdSet.iterator();
if (innerIterator != null) {
ChildrenDocIdIterator childrenDocIdIterator = new ChildrenDocIdIterator(
innerIterator, parentOrds, globalValues
);
return ConstantScorer.create(childrenDocIdIterator, this, queryWeight);
}
}
return null;
}
}
private final class ChildrenDocIdIterator extends FilteredDocIdSetIterator {
private final LongBitSet parentOrds;
private final SortedDocValues globalOrdinals;
ChildrenDocIdIterator(DocIdSetIterator innerIterator, LongBitSet parentOrds, SortedDocValues globalOrdinals) {
super(innerIterator);
this.parentOrds = parentOrds;
this.globalOrdinals = globalOrdinals;
}
@Override
protected boolean match(int docId) {
int globalOrd = globalOrdinals.getOrd(docId);
if (globalOrd >= 0) {
return parentOrds.get(globalOrd);
} else {
return false;
}
}
}
private final static class ParentOrdsCollector extends NoopCollector {
private final LongBitSet parentOrds;
private final IndexParentChildFieldData globalIfd;
private final String parentType;
private SortedDocValues globalOrdinals;
ParentOrdsCollector(IndexParentChildFieldData globalIfd, long maxOrd, String parentType) {
this.parentOrds = new LongBitSet(maxOrd);
this.globalIfd = globalIfd;
this.parentType = parentType;
}
@Override
public void collect(int doc) throws IOException {
// It can happen that for particular segment no document exist for an specific type. This prevents NPE
if (globalOrdinals != null) {
long globalOrd = globalOrdinals.getOrd(doc);
if (globalOrd >= 0) {
parentOrds.set(globalOrd);
}
}
}
@Override
public void doSetNextReader(LeafReaderContext readerContext) throws IOException {
globalOrdinals = globalIfd.load(readerContext).getOrdinalsValues(parentType);
}
public long parentCount() {
return parentOrds.cardinality();
}
}
}
| |
/*
* Copyright 2000-2017 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.application.options;
import com.intellij.ide.highlighter.JavaHighlightingColors;
import com.intellij.openapi.actionSystem.AnActionEvent;
import com.intellij.openapi.actionSystem.CommonShortcuts;
import com.intellij.openapi.actionSystem.ShortcutSet;
import com.intellij.openapi.application.ApplicationBundle;
import com.intellij.openapi.editor.markup.TextAttributes;
import com.intellij.openapi.wm.IdeFocusManager;
import com.intellij.psi.codeStyle.PackageEntry;
import com.intellij.psi.codeStyle.PackageEntryTable;
import com.intellij.ui.*;
import com.intellij.ui.components.JBCheckBox;
import com.intellij.ui.table.JBTable;
import com.intellij.util.IconUtil;
import com.intellij.util.ui.JBUI;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import javax.swing.event.CellEditorListener;
import javax.swing.event.ChangeEvent;
import javax.swing.table.AbstractTableModel;
import javax.swing.table.TableCellEditor;
import javax.swing.table.TableColumn;
import javax.swing.table.TableModel;
import java.awt.*;
import java.awt.event.ItemEvent;
import java.awt.event.ItemListener;
/**
* @author Max Medvedev
*/
public abstract class ImportLayoutPanel extends JPanel {
private final JBCheckBox myCbLayoutStaticImportsSeparately = new JBCheckBox("Layout static imports separately");
private final JBTable myImportLayoutTable;
private final PackageEntryTable myImportLayoutList = new PackageEntryTable();
public JBTable getImportLayoutTable() {
return myImportLayoutTable;
}
public PackageEntryTable getImportLayoutList() {
return myImportLayoutList;
}
public JBCheckBox getCbLayoutStaticImportsSeparately() {
return myCbLayoutStaticImportsSeparately;
}
public ImportLayoutPanel() {
super(new BorderLayout());
setBorder(IdeBorderFactory.createTitledBorder(ApplicationBundle.message("title.import.layout"), false, JBUI.emptyInsets()));
myCbLayoutStaticImportsSeparately.addItemListener(new ItemListener() {
public void itemStateChanged(ItemEvent e) {
if (areStaticImportsEnabled()) {
boolean found = false;
for (int i = myImportLayoutList.getEntryCount() - 1; i >= 0; i--) {
PackageEntry entry = myImportLayoutList.getEntryAt(i);
if (entry == PackageEntry.ALL_OTHER_STATIC_IMPORTS_ENTRY) {
found = true;
break;
}
}
if (!found) {
int index = myImportLayoutList.getEntryCount();
if (index != 0 && myImportLayoutList.getEntryAt(index - 1) != PackageEntry.BLANK_LINE_ENTRY) {
myImportLayoutList.addEntry(PackageEntry.BLANK_LINE_ENTRY);
}
myImportLayoutList.addEntry(PackageEntry.ALL_OTHER_STATIC_IMPORTS_ENTRY);
}
}
else {
for (int i = myImportLayoutList.getEntryCount() - 1; i >= 0; i--) {
PackageEntry entry = myImportLayoutList.getEntryAt(i);
if (entry.isStatic()) {
myImportLayoutList.removeEntryAt(i);
}
}
}
refresh();
}
});
add(myCbLayoutStaticImportsSeparately, BorderLayout.NORTH);
JPanel importLayoutPanel = ToolbarDecorator.createDecorator(myImportLayoutTable = createTableForPackageEntries(myImportLayoutList, this))
.addExtraAction(new DumbAwareActionButton(ApplicationBundle.message("button.add.package"), IconUtil.getAddPackageIcon()) {
@Override
public void actionPerformed(AnActionEvent e) {
addPackageToImportLayouts();
}
@Override
public ShortcutSet getShortcut() {
return CommonShortcuts.getNewForDialogs();
}
})
.addExtraAction(new DumbAwareActionButton(ApplicationBundle.message("button.add.blank"), IconUtil.getAddBlankLineIcon()) {
@Override
public void actionPerformed(AnActionEvent e) {
addBlankLine();
}
})
.setRemoveAction(new AnActionButtonRunnable() {
@Override
public void run(AnActionButton button) {
removeEntryFromImportLayouts();
}
})
.setMoveUpAction(new AnActionButtonRunnable() {
@Override
public void run(AnActionButton button) {
moveRowUp();
}
})
.setMoveDownAction(new AnActionButtonRunnable() {
@Override
public void run(AnActionButton button) {
moveRowDown();
}
})
.setRemoveActionUpdater(new AnActionButtonUpdater() {
@Override
public boolean isEnabled(AnActionEvent e) {
int selectedImport = myImportLayoutTable.getSelectedRow();
PackageEntry entry = selectedImport < 0 ? null : myImportLayoutList.getEntryAt(selectedImport);
return entry != null && entry != PackageEntry.ALL_OTHER_STATIC_IMPORTS_ENTRY && entry != PackageEntry.ALL_OTHER_IMPORTS_ENTRY;
}
})
.setButtonComparator(ApplicationBundle.message("button.add.package"), ApplicationBundle.message("button.add.blank"), "Remove", "Up", "Down")
.setPreferredSize(new Dimension(-1, 100)).createPanel();
add(importLayoutPanel, BorderLayout.CENTER);
}
public abstract void refresh();
private void addPackageToImportLayouts() {
int selected = myImportLayoutTable.getSelectedRow() + 1;
if (selected < 0) {
selected = myImportLayoutList.getEntryCount();
}
PackageEntry entry = new PackageEntry(false, "", true);
myImportLayoutList.insertEntryAt(entry, selected);
refreshTableModel(selected, myImportLayoutTable);
}
private void addBlankLine() {
int selected = myImportLayoutTable.getSelectedRow() + 1;
if (selected < 0) {
selected = myImportLayoutList.getEntryCount();
}
myImportLayoutList.insertEntryAt(PackageEntry.BLANK_LINE_ENTRY, selected);
AbstractTableModel model = (AbstractTableModel)myImportLayoutTable.getModel();
model.fireTableRowsInserted(selected, selected);
myImportLayoutTable.setRowSelectionInterval(selected, selected);
}
private void removeEntryFromImportLayouts() {
int selected = myImportLayoutTable.getSelectedRow();
if (selected < 0) {
return;
}
PackageEntry entry = myImportLayoutList.getEntryAt(selected);
if (entry == PackageEntry.ALL_OTHER_STATIC_IMPORTS_ENTRY || entry == PackageEntry.ALL_OTHER_IMPORTS_ENTRY) {
return;
}
TableUtil.stopEditing(myImportLayoutTable);
myImportLayoutList.removeEntryAt(selected);
AbstractTableModel model = (AbstractTableModel)myImportLayoutTable.getModel();
model.fireTableRowsDeleted(selected, selected);
if (selected >= myImportLayoutList.getEntryCount()) {
selected--;
}
if (selected >= 0) {
myImportLayoutTable.setRowSelectionInterval(selected, selected);
}
}
private void moveRowUp() {
int selected = myImportLayoutTable.getSelectedRow();
if (selected < 1) {
return;
}
TableUtil.stopEditing(myImportLayoutTable);
PackageEntry entry = myImportLayoutList.getEntryAt(selected);
PackageEntry previousEntry = myImportLayoutList.getEntryAt(selected - 1);
myImportLayoutList.setEntryAt(previousEntry, selected);
myImportLayoutList.setEntryAt(entry, selected - 1);
AbstractTableModel model = (AbstractTableModel)myImportLayoutTable.getModel();
model.fireTableRowsUpdated(selected - 1, selected);
myImportLayoutTable.setRowSelectionInterval(selected - 1, selected - 1);
}
private void moveRowDown() {
int selected = myImportLayoutTable.getSelectedRow();
if (selected >= myImportLayoutList.getEntryCount() - 1) {
return;
}
TableUtil.stopEditing(myImportLayoutTable);
PackageEntry entry = myImportLayoutList.getEntryAt(selected);
PackageEntry nextEntry = myImportLayoutList.getEntryAt(selected + 1);
myImportLayoutList.setEntryAt(nextEntry, selected);
myImportLayoutList.setEntryAt(entry, selected + 1);
AbstractTableModel model = (AbstractTableModel)myImportLayoutTable.getModel();
model.fireTableRowsUpdated(selected, selected + 1);
myImportLayoutTable.setRowSelectionInterval(selected + 1, selected + 1);
}
public boolean areStaticImportsEnabled() {
return myCbLayoutStaticImportsSeparately.isSelected();
}
public static JBTable createTableForPackageEntries(final PackageEntryTable packageTable, final ImportLayoutPanel panel) {
final String[] names = {
ApplicationBundle.message("listbox.import.package"),
ApplicationBundle.message("listbox.import.with.subpackages"),
};
// Create a model of the data.
TableModel dataModel = new AbstractTableModel() {
public int getColumnCount() {
return names.length + (panel.areStaticImportsEnabled() ? 1 : 0);
}
public int getRowCount() {
return packageTable.getEntryCount();
}
@Nullable
public Object getValueAt(int row, int col) {
PackageEntry entry = packageTable.getEntryAt(row);
if (entry == null || !isCellEditable(row, col)) return null;
col += panel.areStaticImportsEnabled() ? 0 : 1;
if (col == 0) {
return entry.isStatic();
}
if (col == 1) {
return entry.getPackageName();
}
if (col == 2) {
return entry.isWithSubpackages();
}
throw new IllegalArgumentException(String.valueOf(col));
}
public String getColumnName(int column) {
if (panel.areStaticImportsEnabled() && column == 0) return "Static";
column -= panel.areStaticImportsEnabled() ? 1 : 0;
return names[column];
}
public Class getColumnClass(int col) {
col += panel.areStaticImportsEnabled() ? 0 : 1;
if (col == 0) {
return Boolean.class;
}
if (col == 1) {
return String.class;
}
if (col == 2) {
return Boolean.class;
}
throw new IllegalArgumentException(String.valueOf(col));
}
public boolean isCellEditable(int row, int col) {
PackageEntry packageEntry = packageTable.getEntryAt(row);
return !packageEntry.isSpecial();
}
public void setValueAt(Object aValue, int row, int col) {
PackageEntry packageEntry = packageTable.getEntryAt(row);
col += panel.areStaticImportsEnabled() ? 0 : 1;
if (col == 0) {
PackageEntry newPackageEntry = new PackageEntry((Boolean)aValue, packageEntry.getPackageName(), packageEntry.isWithSubpackages());
packageTable.setEntryAt(newPackageEntry, row);
}
else if (col == 1) {
PackageEntry newPackageEntry =
new PackageEntry(packageEntry.isStatic(), ((String)aValue).trim(), packageEntry.isWithSubpackages());
packageTable.setEntryAt(newPackageEntry, row);
}
else if (col == 2) {
PackageEntry newPackageEntry =
new PackageEntry(packageEntry.isStatic(), packageEntry.getPackageName(), ((Boolean)aValue).booleanValue());
packageTable.setEntryAt(newPackageEntry, row);
}
else {
throw new IllegalArgumentException(String.valueOf(col));
}
}
};
// Create the table
final JBTable result = new JBTable(dataModel);
result.getSelectionModel().setSelectionMode(ListSelectionModel.SINGLE_SELECTION);
resizeColumns(packageTable, result, panel.areStaticImportsEnabled());
TableCellEditor editor = result.getDefaultEditor(String.class);
if (editor instanceof DefaultCellEditor) {
((DefaultCellEditor)editor).setClickCountToStart(1);
}
TableCellEditor beditor = result.getDefaultEditor(Boolean.class);
beditor.addCellEditorListener(new CellEditorListener() {
public void editingStopped(ChangeEvent e) {
if (panel.areStaticImportsEnabled()) {
result.repaint(); // add/remove static keyword
}
}
public void editingCanceled(ChangeEvent e) {
}
});
return result;
}
public static void resizeColumns(final PackageEntryTable packageTable, JBTable result, boolean areStaticImportsEnabled) {
ColoredTableCellRenderer packageRenderer = new ColoredTableCellRenderer() {
@Override
protected void customizeCellRenderer(JTable table, Object value, boolean selected, boolean hasFocus, int row, int column) {
PackageEntry entry = packageTable.getEntryAt(row);
if (entry == PackageEntry.BLANK_LINE_ENTRY) {
append("<blank line>", SimpleTextAttributes.GRAYED_ATTRIBUTES);
}
else {
TextAttributes attributes = JavaHighlightingColors.KEYWORD.getDefaultAttributes();
append("import", SimpleTextAttributes.fromTextAttributes(attributes));
if (entry.isStatic()) {
append(" ", SimpleTextAttributes.REGULAR_ATTRIBUTES);
append("static", SimpleTextAttributes.fromTextAttributes(attributes));
}
append(" ", SimpleTextAttributes.REGULAR_ATTRIBUTES);
if (entry == PackageEntry.ALL_OTHER_IMPORTS_ENTRY || entry == PackageEntry.ALL_OTHER_STATIC_IMPORTS_ENTRY) {
append("all other imports", SimpleTextAttributes.REGULAR_ATTRIBUTES);
}
else {
append(entry.getPackageName() + ".*", SimpleTextAttributes.REGULAR_ATTRIBUTES);
}
}
}
};
if (areStaticImportsEnabled) {
fixColumnWidthToHeader(result, 0);
fixColumnWidthToHeader(result, 2);
result.getColumnModel().getColumn(1).setCellRenderer(packageRenderer);
result.getColumnModel().getColumn(0).setCellRenderer(new BooleanTableCellRenderer());
result.getColumnModel().getColumn(2).setCellRenderer(new BooleanTableCellRenderer());
}
else {
fixColumnWidthToHeader(result, 1);
result.getColumnModel().getColumn(0).setCellRenderer(packageRenderer);
result.getColumnModel().getColumn(1).setCellRenderer(new BooleanTableCellRenderer());
}
}
private static void fixColumnWidthToHeader(JBTable result, int columnIdx) {
final TableColumn column = result.getColumnModel().getColumn(columnIdx);
final int width =
15 + result.getTableHeader().getFontMetrics(result.getTableHeader().getFont()).stringWidth(result.getColumnName(columnIdx));
column.setMinWidth(width);
column.setMaxWidth(width);
}
public static void refreshTableModel(int selectedRow, JBTable table) {
AbstractTableModel model = (AbstractTableModel)table.getModel();
model.fireTableRowsInserted(selectedRow, selectedRow);
table.setRowSelectionInterval(selectedRow, selectedRow);
TableUtil.editCellAt(table, selectedRow, 0);
Component editorComp = table.getEditorComponent();
if (editorComp != null) {
IdeFocusManager.getGlobalInstance().doWhenFocusSettlesDown(() -> {
IdeFocusManager.getGlobalInstance().requestFocus(editorComp, true);
});
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.sling.ide.test.impl;
import static org.apache.sling.ide.test.impl.helpers.jcr.JcrMatchers.hasChildrenCount;
import static org.apache.sling.ide.test.impl.helpers.jcr.JcrMatchers.hasFileContent;
import static org.apache.sling.ide.test.impl.helpers.jcr.JcrMatchers.hasPath;
import static org.apache.sling.ide.test.impl.helpers.jcr.JcrMatchers.hasPrimaryType;
import static org.apache.sling.ide.test.impl.helpers.jcr.JcrMatchers.hasPropertyValue;
import static org.hamcrest.CoreMatchers.allOf;
import static org.hamcrest.CoreMatchers.nullValue;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.concurrent.Callable;
import javax.jcr.Node;
import javax.jcr.RepositoryException;
import org.apache.commons.httpclient.HttpException;
import org.apache.commons.httpclient.URIException;
import org.apache.sling.ide.test.impl.helpers.DisableDebugStatusHandlers;
import org.apache.sling.ide.test.impl.helpers.ExternalSlingLaunchpad;
import org.apache.sling.ide.test.impl.helpers.FailOnUnsuccessfulEventsRule;
import org.apache.sling.ide.test.impl.helpers.LaunchpadConfig;
import org.apache.sling.ide.test.impl.helpers.Poller;
import org.apache.sling.ide.test.impl.helpers.ProjectAdapter;
import org.apache.sling.ide.test.impl.helpers.RepositoryAccessor;
import org.apache.sling.ide.test.impl.helpers.ServerAdapter;
import org.apache.sling.ide.test.impl.helpers.SlingWstServer;
import org.apache.sling.ide.test.impl.helpers.TemporaryProject;
import org.eclipse.core.resources.IProject;
import org.eclipse.core.runtime.CoreException;
import org.eclipse.core.runtime.Path;
import org.eclipse.jdt.core.JavaCore;
import org.hamcrest.Matcher;
import org.junit.After;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.RuleChain;
import org.junit.rules.TestRule;
/**
* The <tt>ContentDeploymentTest</tt> validates simple content deployment based on the resources changes in the
* workspace
*
*/
public class ContentDeploymentTest {
private final LaunchpadConfig config = LaunchpadConfig.getInstance();
private final SlingWstServer wstServer = new SlingWstServer(config);
@Rule
public TestRule chain = RuleChain.outerRule(new ExternalSlingLaunchpad(config)).around(wstServer);
@Rule
public TemporaryProject projectRule = new TemporaryProject();
@Rule
public FailOnUnsuccessfulEventsRule failOnEventsRule = new FailOnUnsuccessfulEventsRule();
@Rule
public DisableDebugStatusHandlers disableDebugHandlers = new DisableDebugStatusHandlers();
@Test
public void deployFile() throws CoreException, InterruptedException, URIException, HttpException, IOException {
wstServer.waitForServerToStart();
// create faceted project
IProject contentProject = projectRule.getProject();
ProjectAdapter project = new ProjectAdapter(contentProject);
project.addNatures(JavaCore.NATURE_ID, "org.eclipse.wst.common.project.facet.core.nature");
// install bundle facet
project.installFacet("sling.content", "1.0");
ServerAdapter server = new ServerAdapter(wstServer.getServer());
server.installModule(contentProject);
project.createOrUpdateFile(Path.fromPortableString("jcr_root/test/hello.txt"), new ByteArrayInputStream(
"hello, world".getBytes()));
// verify that file is created
final RepositoryAccessor repo = new RepositoryAccessor(config);
Poller poller = new Poller();
poller.pollUntil(new Callable<Void>() {
@Override
public Void call() throws HttpException, IOException {
repo.assertGetIsSuccessful("test/hello.txt", "hello, world");
return null;
}
}, nullValue(Void.class));
project.createOrUpdateFile(Path.fromPortableString("jcr_root/test/hello.txt"), new ByteArrayInputStream(
"goodbye, world".getBytes()));
// verify that file is updated
poller.pollUntil(new Callable<Void>() {
@Override
public Void call() throws HttpException, IOException {
repo.assertGetIsSuccessful("test/hello.txt", "goodbye, world");
return null;
}
}, nullValue(Void.class));
project.deleteMember(Path.fromPortableString("jcr_root/test/hello.txt"));
// verify that file is deleted
poller.pollUntil(new Callable<Void>() {
@Override
public Void call() throws HttpException, IOException {
repo.assertGetReturns404("test/hello.txt");
return null;
}
}, nullValue(Void.class));
}
@Test
public void changeNodePrimaryType() throws Exception {
wstServer.waitForServerToStart();
// create faceted project
IProject contentProject = projectRule.getProject();
ProjectAdapter project = new ProjectAdapter(contentProject);
project.addNatures("org.eclipse.wst.common.project.facet.core.nature");
// install bundle facet
project.installFacet("sling.content", "1.0");
ServerAdapter server = new ServerAdapter(wstServer.getServer());
server.installModule(contentProject);
project.createOrUpdateFile(Path.fromPortableString("jcr_root/test/hello.txt"), new ByteArrayInputStream(
"hello, world".getBytes()));
// verifications
final RepositoryAccessor repo = new RepositoryAccessor(config);
Poller poller = new Poller();
assertThatNode(repo, poller, "/test", allOf(hasPath("/test"), hasPrimaryType("nt:folder"), hasChildrenCount(1)));
// change node type to sling:Folder
InputStream contentXml = getClass().getResourceAsStream("sling-folder-nodetype.xml");
project.createOrUpdateFile(Path.fromPortableString("jcr_root/test/.content.xml"), contentXml);
// verifications (2)
assertThatNode(repo, poller, "/test",
allOf(hasPath("/test"), hasPrimaryType("sling:Folder"), hasChildrenCount(1)));
}
@Test
public void deployFileWithAttachedMetadata() throws Exception {
wstServer.waitForServerToStart();
// create faceted project
IProject contentProject = projectRule.getProject();
ProjectAdapter project = new ProjectAdapter(contentProject);
project.addNatures(JavaCore.NATURE_ID, "org.eclipse.wst.common.project.facet.core.nature");
// install bundle facet
project.installFacet("sling.content", "1.0");
ServerAdapter server = new ServerAdapter(wstServer.getServer());
server.installModule(contentProject);
project.createOrUpdateFile(Path.fromPortableString("jcr_root/test/hello.esp"), new ByteArrayInputStream(
"// not really javascript".getBytes()));
// verify that file is created
final RepositoryAccessor repo = new RepositoryAccessor(config);
Poller poller = new Poller();
assertThatNode(repo, poller, "/test/hello.esp", hasPrimaryType("nt:file"));
InputStream contentXml = getClass().getResourceAsStream("file-custom-mimetype.xml");
project.createOrUpdateFile(Path.fromPortableString("jcr_root/test/hello.esp.dir/.content.xml"), contentXml);
assertThatNode(repo, poller, "/test/hello.esp/jcr:content", hasPropertyValue("jcr:mimeType", "text/javascript"));
}
@Test
public void fileDeployedBeforeAddingModuleToServerIsPublished() throws Throwable {
wstServer.waitForServerToStart();
// create faceted project
IProject contentProject = projectRule.getProject();
ProjectAdapter project = new ProjectAdapter(contentProject);
project.addNatures(JavaCore.NATURE_ID, "org.eclipse.wst.common.project.facet.core.nature");
project.createOrUpdateFile(Path.fromPortableString("jcr_root/test/hello.txt"), new ByteArrayInputStream(
"hello, world".getBytes()));
// install bundle facet
project.installFacet("sling.content", "1.0");
ServerAdapter server = new ServerAdapter(wstServer.getServer());
server.installModule(contentProject);
// verify that file is created
final RepositoryAccessor repo = new RepositoryAccessor(config);
Poller poller = new Poller();
poller.pollUntil(new Callable<Node>() {
@Override
public Node call() throws RepositoryException {
return repo.getNode("/test/hello.txt");
}
}, hasFileContent("hello, world"));
}
/**
* This test validates that if the parent of a resource that does not exist in the repository the resource is
* successfully created
*
* @throws Exception
*/
@Test
public void deployFileWithMissingParentFromRepository() throws Exception {
wstServer.waitForServerToStart();
// create faceted project
IProject contentProject = projectRule.getProject();
ProjectAdapter project = new ProjectAdapter(contentProject);
project.addNatures(JavaCore.NATURE_ID, "org.eclipse.wst.common.project.facet.core.nature");
// install bundle facet
project.installFacet("sling.content", "1.0");
ServerAdapter server = new ServerAdapter(wstServer.getServer());
server.installModule(contentProject);
// create filter.xml
project.createVltFilterWithRoots("/test");
// create file
project.createOrUpdateFile(Path.fromPortableString("jcr_root/test/demo/nested/structure/hello.txt"),
new ByteArrayInputStream("hello, world".getBytes()));
// verify that file is created
final RepositoryAccessor repo = new RepositoryAccessor(config);
Poller poller = new Poller();
poller.pollUntil(new Callable<Node>() {
@Override
public Node call() throws RepositoryException {
return repo.getNode("/test/demo/nested/structure/hello.txt");
}
}, hasFileContent("hello, world"));
}
@Test
public void filedDeployedWithFullCoverageSiblingDoesNotCauseSpuriousDeletion() throws Exception {
wstServer.waitForServerToStart();
// create faceted project
IProject contentProject = projectRule.getProject();
ProjectAdapter project = new ProjectAdapter(contentProject);
project.addNatures(JavaCore.NATURE_ID, "org.eclipse.wst.common.project.facet.core.nature");
// install bundle facet
project.installFacet("sling.content", "1.0");
ServerAdapter server = new ServerAdapter(wstServer.getServer());
server.installModule(contentProject);
// create sling:Folder at /test/folder
project.createOrUpdateFile(Path.fromPortableString("jcr_root/test/folder/.content.xml"), getClass()
.getResourceAsStream("sling-folder-nodetype.xml"));
// create nt:file at /test/folder/hello.esp
project.createOrUpdateFile(Path.fromPortableString("jcr_root/test/folder/hello.esp"), new ByteArrayInputStream(
"// not really javascript".getBytes()));
// create sling:OsgiConfig at /test/folder/config.xml
project.createOrUpdateFile(Path.fromPortableString("jcr_root/test/folder/config.xml"), getClass()
.getResourceAsStream("com.example.some.Component.xml"));
// verify that config node is created
final RepositoryAccessor repo = new RepositoryAccessor(config);
Poller poller = new Poller();
assertThatNode(repo, poller, "/test/folder/config", hasPrimaryType("sling:OsgiConfig"));
// update file at /test/folder/hello.esp
project.createOrUpdateFile(Path.fromPortableString("jcr_root/test/folder/hello.esp"), new ByteArrayInputStream(
"// maybe javascript".getBytes()));
// wait until the file is updated
assertThatNode(repo, poller, "/test/folder/hello.esp", hasFileContent("// maybe javascript"));
// verify that the sling:OsgiConfig node is still present
assertThatNode(repo, poller, "/test/folder/config", hasPrimaryType("sling:OsgiConfig"));
}
private void assertThatNode(final RepositoryAccessor repo, Poller poller, final String nodePath, Matcher<Node> matcher)
throws InterruptedException {
poller.pollUntil(new Callable<Node>() {
@Override
public Node call() throws RepositoryException {
Node node = repo.getNode(nodePath);
return node;
}
}, matcher);
}
@After
public void cleanUp() throws Exception {
new RepositoryAccessor(config).tryDeleteResource("/test");
}
}
| |
/*
* Copyright 2021 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.thoughtworks.go.plugin.access.common.settings;
import com.thoughtworks.go.plugin.access.common.AbstractExtension;
import com.thoughtworks.go.plugin.access.configrepo.ConfigRepoExtension;
import com.thoughtworks.go.plugin.access.notification.NotificationExtension;
import com.thoughtworks.go.plugin.access.packagematerial.PackageRepositoryExtension;
import com.thoughtworks.go.plugin.access.pluggabletask.TaskExtension;
import com.thoughtworks.go.plugin.access.scm.SCMExtension;
import com.thoughtworks.go.plugin.api.config.Property;
import com.thoughtworks.go.plugin.domain.common.PluginConstants;
import com.thoughtworks.go.plugin.infra.PluginManager;
import com.thoughtworks.go.plugin.infra.plugininfo.GoPluginDescriptor;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.Mock;
import org.mockito.junit.jupiter.MockitoExtension;
import java.util.List;
import java.util.UUID;
import static java.util.Arrays.asList;
import static org.hamcrest.Matchers.*;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.junit.jupiter.api.Assertions.fail;
import static org.mockito.Mockito.*;
@ExtendWith(MockitoExtension.class)
public class PluginSettingsMetadataLoaderTest {
@Mock
private PackageRepositoryExtension packageRepositoryExtension;
@Mock(lenient = true)
private SCMExtension scmExtension;
@Mock(lenient = true)
private TaskExtension taskExtension;
@Mock(lenient = true)
private NotificationExtension notificationExtension;
@Mock(lenient = true)
private ConfigRepoExtension configRepoExtension;
@Mock
private PluginManager pluginManager;
private PluginSettingsMetadataLoader metadataLoader;
@BeforeEach
public void setUp() {
when(packageRepositoryExtension.extensionName()).thenReturn(PluginConstants.PACKAGE_MATERIAL_EXTENSION);
when(scmExtension.extensionName()).thenReturn(PluginConstants.SCM_EXTENSION);
when(notificationExtension.extensionName()).thenReturn(PluginConstants.NOTIFICATION_EXTENSION);
when(configRepoExtension.extensionName()).thenReturn(PluginConstants.CONFIG_REPO_EXTENSION);
when(taskExtension.extensionName()).thenReturn(PluginConstants.PLUGGABLE_TASK_EXTENSION);
List<GoPluginExtension> extensions = asList(packageRepositoryExtension, scmExtension, notificationExtension, configRepoExtension, taskExtension);
metadataLoader = new PluginSettingsMetadataLoader(extensions, pluginManager);
PluginSettingsMetadataStore.getInstance().clear();
}
@AfterEach
public void tearDown() {
PluginSettingsMetadataStore.getInstance().clear();
}
@Test
public void shouldFetchPluginSettingsMetadataForPluginBasedOnPluginId() throws Exception {
List<AbstractExtension> everyExtensionExceptTask = asList(packageRepositoryExtension, scmExtension, notificationExtension, configRepoExtension);
for (GoPluginExtension extension : everyExtensionExceptTask) {
PluginSettingsConfiguration configuration = new PluginSettingsConfiguration();
configuration.add(new PluginSettingsProperty("k1").with(Property.REQUIRED, true).with(Property.SECURE, false));
GoPluginDescriptor pluginDescriptor = GoPluginDescriptor.builder().id(UUID.randomUUID().toString()).isBundledPlugin(true).build();
setupSettingsResponses(extension, pluginDescriptor.id(), configuration, "template");
metadataLoader.fetchPluginSettingsMetaData(pluginDescriptor);
verifyMetadataForPlugin(pluginDescriptor.id());
}
}
@Test
public void shouldNotFetchPluginSettingsMetadataForTaskPlugin() throws Exception {
PluginSettingsConfiguration configuration = new PluginSettingsConfiguration();
configuration.add(new PluginSettingsProperty("k1").with(Property.REQUIRED, true).with(Property.SECURE, false));
GoPluginDescriptor pluginDescriptor = GoPluginDescriptor.builder().id(UUID.randomUUID().toString()).isBundledPlugin(true).build();
setupSettingsResponses(taskExtension, pluginDescriptor.id(), configuration, "template");
metadataLoader.fetchPluginSettingsMetaData(pluginDescriptor);
verify(taskExtension, never()).getPluginSettingsConfiguration(pluginDescriptor.id());
verify(taskExtension, never()).getPluginSettingsView(pluginDescriptor.id());
assertThat(PluginSettingsMetadataStore.getInstance().configuration(pluginDescriptor.id()), is(nullValue()));
}
@Test
public void shouldNotStoreMetadataIfConfigurationIsMissing() {
PluginSettingsConfiguration configuration = new PluginSettingsConfiguration();
configuration.add(new PluginSettingsProperty("k1").with(Property.REQUIRED, true).with(Property.SECURE, false));
GoPluginDescriptor pluginDescriptor = GoPluginDescriptor.builder().id("plugin-id").isBundledPlugin(true).build();
setupSettingsResponses(packageRepositoryExtension, pluginDescriptor.id(), null, "template");
metadataLoader.fetchPluginSettingsMetaData(pluginDescriptor);
assertThat(PluginSettingsMetadataStore.getInstance().hasPlugin(pluginDescriptor.id()), is(false));
}
@Test
public void shouldNotStoreMetadataIfViewTemplateIsMissing() {
GoPluginDescriptor pluginDescriptor = GoPluginDescriptor.builder().id("plugin-id").isBundledPlugin(true).build();
setupSettingsResponses(packageRepositoryExtension, pluginDescriptor.id(), null, null);
metadataLoader.fetchPluginSettingsMetaData(pluginDescriptor);
assertThat(PluginSettingsMetadataStore.getInstance().hasPlugin(pluginDescriptor.id()), is(false));
}
@Test
public void shouldRegisterAsPluginFrameworkStartListener() throws Exception {
verify(pluginManager).addPluginChangeListener(metadataLoader);
}
@Test
public void shouldRemoveMetadataOnPluginUnLoadedCallback() throws Exception {
GoPluginDescriptor pluginDescriptor = GoPluginDescriptor.builder().id("plugin-id").isBundledPlugin(true).build();
PluginSettingsMetadataStore.getInstance().addMetadataFor(pluginDescriptor.id(), PluginConstants.NOTIFICATION_EXTENSION, new PluginSettingsConfiguration(), "template");
metadataLoader.pluginUnLoaded(pluginDescriptor);
assertThat(PluginSettingsMetadataStore.getInstance().hasPlugin(pluginDescriptor.id()), is(false));
}
@Test
public void shouldFailWhenAPluginWithMultipleExtensionsHasMoreThanOneExtensionRespondingWithSettings() throws Exception {
PluginSettingsConfiguration configuration = new PluginSettingsConfiguration();
configuration.add(new PluginSettingsProperty("k1").with(Property.REQUIRED, true).with(Property.SECURE, false));
String pluginID = "plugin-id";
GoPluginDescriptor pluginDescriptor = GoPluginDescriptor.builder().id(pluginID).build();
setupSettingsResponses(notificationExtension, pluginID, configuration, "view");
setupSettingsResponses(packageRepositoryExtension, pluginID, configuration, "view");
try {
metadataLoader.fetchPluginSettingsMetaData(pluginDescriptor);
fail("Should have failed since multiple extensions support plugin settings.");
} catch (Exception e) {
assertThat(e.getMessage(), containsString("Plugin with ID: plugin-id has more than one extension which supports plugin settings"));
assertThat(PluginSettingsMetadataStore.getInstance().hasPlugin(pluginDescriptor.id()), is(false));
}
}
@Test
public void shouldNotFailWhenAPluginWithMultipleExtensionsHasMoreThanOneExtensionRespondingWithSettings_BUT_OnlyOneIsValid() throws Exception {
PluginSettingsConfiguration configuration = new PluginSettingsConfiguration();
configuration.add(new PluginSettingsProperty("k1").with(Property.REQUIRED, true).with(Property.SECURE, false));
String pluginID = "plugin-id";
GoPluginDescriptor pluginDescriptor = GoPluginDescriptor.builder().id(pluginID).build();
setupSettingsResponses(notificationExtension, pluginID, configuration, null);
setupSettingsResponses(packageRepositoryExtension, pluginID, configuration, "view");
metadataLoader.fetchPluginSettingsMetaData(pluginDescriptor);
assertThat(PluginSettingsMetadataStore.getInstance().hasPlugin(pluginID), is(true));
assertThat(PluginSettingsMetadataStore.getInstance().configuration(pluginID), is(configuration));
assertThat(PluginSettingsMetadataStore.getInstance().template(pluginID), is("view"));
assertThat(PluginSettingsMetadataStore.getInstance().extensionWhichCanHandleSettings(pluginID), is(PluginConstants.PACKAGE_MATERIAL_EXTENSION));
}
@Test
public void shouldNotFailWhenAPluginWithMultipleExtensionsHasMoreThanOneExtensionRespondingWithSettings_BUT_NoneIsValid() throws Exception {
PluginSettingsConfiguration configuration = new PluginSettingsConfiguration();
configuration.add(new PluginSettingsProperty("k1").with(Property.REQUIRED, true).with(Property.SECURE, false));
String pluginID = "plugin-id";
GoPluginDescriptor pluginDescriptor = GoPluginDescriptor.builder().id(pluginID).build();
setupSettingsResponses(notificationExtension, pluginID, configuration, null);
setupSettingsResponses(packageRepositoryExtension, pluginID, null, "view");
metadataLoader.fetchPluginSettingsMetaData(pluginDescriptor);
assertThat(PluginSettingsMetadataStore.getInstance().hasPlugin(pluginID), is(false));
}
@Test
public void shouldNotFailWhenAPluginWithMultipleExtensionsHasMoreThanOneExtensionRespondingWithSettings_BUT_OneIsValidAndOtherThrowsException() throws Exception {
PluginSettingsConfiguration configuration = new PluginSettingsConfiguration();
configuration.add(new PluginSettingsProperty("k1").with(Property.REQUIRED, true).with(Property.SECURE, false));
String pluginID = "plugin-id";
GoPluginDescriptor pluginDescriptor = GoPluginDescriptor.builder().id(pluginID).build();
setupSettingsResponses(notificationExtension, pluginID, configuration, "view");
when(packageRepositoryExtension.canHandlePlugin(pluginID)).thenReturn(false);
when(scmExtension.canHandlePlugin(pluginID)).thenReturn(true);
when(scmExtension.getPluginSettingsConfiguration(pluginID)).thenThrow(new RuntimeException("Ouch!"));
when(scmExtension.getPluginSettingsView(pluginID)).thenReturn("view");
metadataLoader.fetchPluginSettingsMetaData(pluginDescriptor);
assertThat(PluginSettingsMetadataStore.getInstance().hasPlugin(pluginID), is(true));
verify(packageRepositoryExtension, never()).getPluginSettingsConfiguration(pluginID);
verify(packageRepositoryExtension, never()).getPluginSettingsView(pluginID);
}
@Test
public void shouldReturnNullForExtensionWhichCanHandleSettingsIfPluginDoesNotExist() throws Exception {
assertThat(PluginSettingsMetadataStore.getInstance().extensionWhichCanHandleSettings("INVALID-PLUGIN"), is(nullValue()));
assertThat(PluginSettingsMetadataStore.getInstance().extensionWhichCanHandleSettings(""), is(nullValue()));
}
private void verifyMetadataForPlugin(String pluginId) {
PluginSettingsConfiguration configurationInStore = PluginSettingsMetadataStore.getInstance().configuration(pluginId);
assertThat(configurationInStore.size(), is(1));
PluginSettingsProperty scmConfiguration = (PluginSettingsProperty) configurationInStore.get("k1");
assertThat(scmConfiguration.getKey(), is("k1"));
assertThat(scmConfiguration.getOption(Property.REQUIRED), is(true));
assertThat(scmConfiguration.getOption(Property.SECURE), is(false));
String template = PluginSettingsMetadataStore.getInstance().template(pluginId);
assertThat(template, is("template"));
}
private void setupSettingsResponses(GoPluginExtension extension, String pluginID, PluginSettingsConfiguration configuration, String viewTemplate) {
when(extension.canHandlePlugin(pluginID)).thenReturn(true);
when(extension.getPluginSettingsConfiguration(pluginID)).thenReturn(configuration);
when(extension.getPluginSettingsView(pluginID)).thenReturn(viewTemplate);
}
}
| |
/*
* Copyright 2018 New Vector Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.matrix.androidsdk.listeners;
import androidx.annotation.NonNull;
import android.text.TextUtils;
import org.matrix.androidsdk.core.Log;
import org.matrix.androidsdk.data.Room;
import org.matrix.androidsdk.data.RoomState;
import org.matrix.androidsdk.rest.model.Event;
import org.matrix.androidsdk.rest.model.User;
import java.util.List;
/**
* A listener which filter event for a specific room
*/
public class MXRoomEventListener extends MXEventListener {
private static final String LOG_TAG = MXRoomEventListener.class.getSimpleName();
private final String mRoomId;
private final IMXEventListener mEventListener;
private final Room mRoom;
public MXRoomEventListener(@NonNull Room room,
@NonNull IMXEventListener eventListener) {
mRoom = room;
mRoomId = room.getRoomId();
mEventListener = eventListener;
}
@Override
public void onPresenceUpdate(Event event, User user) {
// Only pass event through if the user is a member of the room
// FIXME LazyLoading. We cannot rely on getMember nullity anymore
if (mRoom.getMember(user.user_id) != null) {
try {
mEventListener.onPresenceUpdate(event, user);
} catch (Exception e) {
Log.e(LOG_TAG, "onPresenceUpdate exception " + e.getMessage(), e);
}
}
}
@Override
public void onLiveEvent(Event event, RoomState roomState) {
// Filter out events for other rooms and events while we are joining (before the room is ready)
if (TextUtils.equals(mRoomId, event.roomId) && mRoom.isReady()) {
try {
mEventListener.onLiveEvent(event, roomState);
} catch (Exception e) {
Log.e(LOG_TAG, "onLiveEvent exception " + e.getMessage(), e);
}
}
}
@Override
public void onLiveEventsChunkProcessed(String fromToken, String toToken) {
try {
mEventListener.onLiveEventsChunkProcessed(fromToken, toToken);
} catch (Exception e) {
Log.e(LOG_TAG, "onLiveEventsChunkProcessed exception " + e.getMessage(), e);
}
}
@Override
public void onEventSentStateUpdated(Event event) {
// Filter out events for other rooms
if (TextUtils.equals(mRoomId, event.roomId)) {
try {
mEventListener.onEventSentStateUpdated(event);
} catch (Exception e) {
Log.e(LOG_TAG, "onEventSentStateUpdated exception " + e.getMessage(), e);
}
}
}
@Override
public void onEventDecrypted(String roomId, String eventId) {
// Filter out events for other rooms
if (TextUtils.equals(mRoomId, roomId)) {
try {
mEventListener.onEventDecrypted(roomId, eventId);
} catch (Exception e) {
Log.e(LOG_TAG, "onDecryptedEvent exception " + e.getMessage(), e);
}
}
}
@Override
public void onEventSent(final Event event, final String prevEventId) {
// Filter out events for other rooms
if (TextUtils.equals(mRoomId, event.roomId)) {
try {
mEventListener.onEventSent(event, prevEventId);
} catch (Exception e) {
Log.e(LOG_TAG, "onEventSent exception " + e.getMessage(), e);
}
}
}
@Override
public void onRoomInternalUpdate(String roomId) {
// Filter out events for other rooms
if (TextUtils.equals(mRoomId, roomId)) {
try {
mEventListener.onRoomInternalUpdate(roomId);
} catch (Exception e) {
Log.e(LOG_TAG, "onRoomInternalUpdate exception " + e.getMessage(), e);
}
}
}
@Override
public void onNotificationCountUpdate(String roomId) {
// Filter out events for other rooms
if (TextUtils.equals(mRoomId, roomId)) {
try {
mEventListener.onNotificationCountUpdate(roomId);
} catch (Exception e) {
Log.e(LOG_TAG, "onNotificationCountUpdate exception " + e.getMessage(), e);
}
}
}
@Override
public void onNewRoom(String roomId) {
// Filter out events for other rooms
if (TextUtils.equals(mRoomId, roomId)) {
try {
mEventListener.onNewRoom(roomId);
} catch (Exception e) {
Log.e(LOG_TAG, "onNewRoom exception " + e.getMessage(), e);
}
}
}
@Override
public void onJoinRoom(String roomId) {
// Filter out events for other rooms
if (TextUtils.equals(mRoomId, roomId)) {
try {
mEventListener.onJoinRoom(roomId);
} catch (Exception e) {
Log.e(LOG_TAG, "onJoinRoom exception " + e.getMessage(), e);
}
}
}
@Override
public void onReceiptEvent(String roomId, List<String> senderIds) {
// Filter out events for other rooms
if (TextUtils.equals(mRoomId, roomId)) {
try {
mEventListener.onReceiptEvent(roomId, senderIds);
} catch (Exception e) {
Log.e(LOG_TAG, "onReceiptEvent exception " + e.getMessage(), e);
}
}
}
@Override
public void onRoomTagEvent(String roomId) {
// Filter out events for other rooms
if (TextUtils.equals(mRoomId, roomId)) {
try {
mEventListener.onRoomTagEvent(roomId);
} catch (Exception e) {
Log.e(LOG_TAG, "onRoomTagEvent exception " + e.getMessage(), e);
}
}
}
@Override
public void onBingRulesUpdate() {
try {
mEventListener.onBingRulesUpdate();
} catch (Exception e) {
Log.e(LOG_TAG, "onBingRulesUpdate exception " + e.getMessage(), e);
}
}
@Override
public void onTaggedEventsEvent(String roomId) {
// Filter out events for other rooms
if (TextUtils.equals(mRoomId, roomId)) {
try {
mEventListener.onTaggedEventsEvent(roomId);
} catch (Exception e) {
Log.e(LOG_TAG, "onTaggedEventsEvent exception " + e.getMessage(), e);
}
}
}
@Override
public void onReadMarkerEvent(String roomId) {
// Filter out events for other rooms
if (TextUtils.equals(mRoomId, roomId)) {
try {
mEventListener.onReadMarkerEvent(roomId);
} catch (Exception e) {
Log.e(LOG_TAG, "onReadMarkerEvent exception " + e.getMessage(), e);
}
}
}
@Override
public void onRoomFlush(String roomId) {
// Filter out events for other rooms
if (TextUtils.equals(mRoomId, roomId)) {
try {
mEventListener.onRoomFlush(roomId);
} catch (Exception e) {
Log.e(LOG_TAG, "onRoomFlush exception " + e.getMessage(), e);
}
}
}
@Override
public void onLeaveRoom(String roomId) {
// Filter out events for other rooms
if (TextUtils.equals(mRoomId, roomId)) {
try {
mEventListener.onLeaveRoom(roomId);
} catch (Exception e) {
Log.e(LOG_TAG, "onLeaveRoom exception " + e.getMessage(), e);
}
}
}
@Override
public void onRoomKick(String roomId) {
// Filter out events for other rooms
if (TextUtils.equals(mRoomId, roomId)) {
try {
mEventListener.onRoomKick(roomId);
} catch (Exception e) {
Log.e(LOG_TAG, "onRoomKick exception " + e.getMessage(), e);
}
}
}
}
| |
/*
* Copyright 2013-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.rules.coercer;
import com.facebook.buck.io.filesystem.ProjectFilesystem;
import com.facebook.buck.model.BuildTarget;
import com.facebook.buck.rules.CellPathResolver;
import com.facebook.buck.rules.Hint;
import com.facebook.buck.util.MoreSuppliers;
import com.facebook.buck.util.Types;
import com.facebook.buck.util.exceptions.BuckUncheckedExecutionException;
import com.google.common.base.CaseFormat;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Iterables;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.nio.file.Path;
import java.util.Collection;
import java.util.Map;
import java.util.function.Supplier;
import javax.annotation.Nullable;
/** Represents a single field that can be represented in buck build files. */
public class ParamInfo implements Comparable<ParamInfo> {
private final TypeCoercer<?> typeCoercer;
private final String name;
private final Method setter;
/**
* Holds the closest getter for this property defined on the abstract class or interface.
*
* <p>Note that this may not be abstract, for instance if a @Value.Default is specified.
*/
private final Supplier<Method> closestGetterOnAbstractClassOrInterface;
/** Holds the getter for the concrete Immutable class. */
private final Supplier<Method> concreteGetter;
private final Supplier<Boolean> isOptional;
@SuppressWarnings("PMD.EmptyCatchBlock")
public ParamInfo(TypeCoercerFactory typeCoercerFactory, Method setter) {
Preconditions.checkArgument(
setter.getParameterCount() == 1,
"Setter is expected to have exactly one parameter but had %s",
setter.getParameterCount());
Preconditions.checkArgument(
setter.getName().startsWith("set"),
"Setter is expected to have name starting with 'set' but was %s",
setter.getName());
Preconditions.checkArgument(
setter.getName().length() > 3,
"Setter must have name longer than just 'set' but was %s",
setter.getName());
this.setter = setter;
this.closestGetterOnAbstractClassOrInterface =
MoreSuppliers.memoize(this::findClosestGetterOnAbstractClassOrInterface);
this.concreteGetter =
MoreSuppliers.memoize(
() -> {
// This needs to get (and invoke) the concrete Immutable class's getter, not the
// abstract
// getter from a superclass.
// Accordingly, we manually find the getter there, rather than using
// closestGetterOnAbstractClassOrInterface.
Class<?> enclosingClass = setter.getDeclaringClass().getEnclosingClass();
if (enclosingClass == null) {
throw new IllegalStateException(
String.format(
"Couldn't find enclosing class of Builder %s", setter.getDeclaringClass()));
}
Iterable<String> getterNames = getGetterNames();
for (String possibleGetterName : getterNames) {
try {
return enclosingClass.getMethod(possibleGetterName);
} catch (NoSuchMethodException e) {
// Handled below
}
}
throw new IllegalStateException(
String.format(
"Couldn't find declared getter for %s#%s. Tried enclosing class %s methods: %s",
setter.getDeclaringClass(), setter.getName(), enclosingClass, getterNames));
});
this.isOptional =
MoreSuppliers.memoize(
() -> {
Method getter = closestGetterOnAbstractClassOrInterface.get();
Class<?> type = getter.getReturnType();
if (CoercedTypeCache.OPTIONAL_TYPES.contains(type)) {
return true;
}
if (Collection.class.isAssignableFrom(type) || Map.class.isAssignableFrom(type)) {
return true;
}
// Unfortunately @Value.Default isn't retained at runtime, so we use abstract-ness
// as a proxy for whether something has a default value.
return !Modifier.isAbstract(getter.getModifiers());
});
StringBuilder builder = new StringBuilder();
builder.append(setter.getName().substring(3, 4).toLowerCase());
if (setter.getName().length() > 4) {
builder.append(setter.getName().substring(4));
}
this.name = builder.toString();
try {
this.typeCoercer =
typeCoercerFactory.typeCoercerForType(setter.getGenericParameterTypes()[0]);
} catch (Exception e) {
throw new BuckUncheckedExecutionException(
e, "When getting ParamInfo for %s.%s.", setter.getDeclaringClass().getName(), name);
}
}
public String getName() {
return name;
}
public TypeCoercer<?> getTypeCoercer() {
return typeCoercer;
}
public boolean isOptional() {
return this.isOptional.get();
}
public String getPythonName() {
return CaseFormat.LOWER_CAMEL.to(CaseFormat.LOWER_UNDERSCORE, getName());
}
public boolean isDep() {
Hint hint = getHint();
if (hint != null) {
return hint.isDep();
}
return Hint.DEFAULT_IS_DEP;
}
/** @see Hint#isTargetGraphOnlyDep() */
public boolean isTargetGraphOnlyDep() {
Hint hint = getHint();
if (hint != null && hint.isTargetGraphOnlyDep()) {
Preconditions.checkState(hint.isDep(), "Conditional deps are only applicable for deps.");
return true;
}
return Hint.DEFAULT_IS_TARGET_GRAPH_ONLY_DEP;
}
public boolean isInput() {
Hint hint = getHint();
if (hint != null) {
return hint.isInput();
}
return Hint.DEFAULT_IS_INPUT;
}
private Hint getHint() {
return this.closestGetterOnAbstractClassOrInterface.get().getAnnotation(Hint.class);
}
/**
* Returns the type that input values will be coerced to. Return the type parameter of Optional if
* wrapped in Optional.
*/
public Class<?> getResultClass() {
return typeCoercer.getOutputClass();
}
/**
* Traverse the value of the field on {@code dto} that is represented by this instance.
*
* <p>If this field has a top level Optional type, traversal begins at the Optional value, or not
* at all if the field is empty.
*
* @param traversal traversal to apply on the values.
* @param dto the object whose field will be traversed.
* @see TypeCoercer#traverse(CellPathResolver, Object, TypeCoercer.Traversal)
*/
public void traverse(CellPathResolver cellPathResolver, Traversal traversal, Object dto) {
traverseHelper(cellPathResolver, typeCoercer, traversal, dto);
}
@SuppressWarnings("unchecked")
private <U> void traverseHelper(
CellPathResolver cellPathResolver,
TypeCoercer<U> typeCoercer,
Traversal traversal,
Object dto) {
U object = (U) get(dto);
if (object != null) {
typeCoercer.traverse(cellPathResolver, object, traversal);
}
}
/** Get the value of this param as set on dto. */
public Object get(Object dto) {
Method getter = this.concreteGetter.get();
try {
return getter.invoke(dto);
} catch (InvocationTargetException | IllegalAccessException e) {
throw new IllegalStateException(
String.format(
"Error invoking getter %s on class %s", getter.getName(), getter.getDeclaringClass()),
e);
}
}
public boolean hasElementTypes(Class<?>... types) {
return typeCoercer.hasElementClass(types);
}
public void setFromParams(
CellPathResolver cellRoots,
ProjectFilesystem filesystem,
BuildTarget buildTarget,
Object arg,
Map<String, ?> instance)
throws ParamInfoException {
set(cellRoots, filesystem, buildTarget.getBasePath(), arg, instance.get(name));
}
/**
* Sets a single property of the {@code dto}, coercing types as necessary.
*
* @param cellRoots
* @param filesystem {@link ProjectFilesystem} used to ensure {@link Path}s exist.
* @param pathRelativeToProjectRoot The path relative to the project root that this DTO is for.
* @param dto The constructor DTO on which the value should be set.
* @param value The value, which may be coerced depending on the type on {@code dto}.
*/
public void set(
CellPathResolver cellRoots,
ProjectFilesystem filesystem,
Path pathRelativeToProjectRoot,
Object dto,
@Nullable Object value)
throws ParamInfoException {
if (value == null) {
return;
}
try {
setCoercedValue(
dto, typeCoercer.coerce(cellRoots, filesystem, pathRelativeToProjectRoot, value));
} catch (CoerceFailedException e) {
throw new ParamInfoException(name, e.getMessage(), e);
}
}
/**
* Set the param on dto to value, assuming value has already been coerced.
*
* <p>This is useful for things like making copies of dtos.
*/
public void setCoercedValue(Object dto, Object value) {
try {
setter.invoke(dto, value);
} catch (IllegalAccessException | InvocationTargetException e) {
throw new RuntimeException(e);
}
}
/** Returns the most-overridden getter on the abstract Immutable. */
@SuppressWarnings("PMD.EmptyCatchBlock")
private Method findClosestGetterOnAbstractClassOrInterface() {
Iterable<Class<?>> superClasses =
Iterables.skip(Types.getSupertypes(setter.getDeclaringClass().getEnclosingClass()), 1);
ImmutableList<String> getterNames = getGetterNames();
for (Class<?> clazz : superClasses) {
for (String getterName : getterNames) {
try {
return clazz.getDeclaredMethod(getterName);
} catch (NoSuchMethodException e) {
// Handled below
}
}
}
throw new IllegalStateException(
String.format(
"Couldn't find declared getter for %s#%s. Tried parent classes %s methods: %s",
setter.getDeclaringClass(), setter.getName(), superClasses, getterNames));
}
private ImmutableList<String> getGetterNames() {
String suffix = setter.getName().substring(3);
return ImmutableList.of("get" + suffix, "is" + suffix);
}
/** Only valid when comparing {@link ParamInfo} instances from the same description. */
@Override
public int compareTo(ParamInfo that) {
if (this == that) {
return 0;
}
return this.name.compareTo(that.name);
}
@Override
public int hashCode() {
return name.hashCode();
}
@Override
public boolean equals(Object obj) {
if (!(obj instanceof ParamInfo)) {
return false;
}
ParamInfo that = (ParamInfo) obj;
return name.equals(that.getName());
}
public interface Traversal extends TypeCoercer.Traversal {}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.airavata.client.tools;
import java.io.File;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.UUID;
import org.apache.airavata.client.api.AiravataAPI;
import org.apache.airavata.client.api.exception.AiravataAPIInvocationException;
import org.apache.airavata.commons.gfac.type.ApplicationDescription;
import org.apache.airavata.commons.gfac.type.HostDescription;
import org.apache.airavata.commons.gfac.type.ServiceDescription;
import org.apache.airavata.schemas.gfac.ApplicationDeploymentDescriptionType;
import org.apache.airavata.schemas.gfac.DataType;
import org.apache.airavata.schemas.gfac.GsisshHostType;
import org.apache.airavata.schemas.gfac.HpcApplicationDeploymentType;
import org.apache.airavata.schemas.gfac.InputParameterType;
import org.apache.airavata.schemas.gfac.JobTypeType;
import org.apache.airavata.schemas.gfac.OutputParameterType;
import org.apache.airavata.schemas.gfac.ParameterType;
import org.apache.airavata.schemas.gfac.ProjectAccountType;
import org.apache.airavata.schemas.gfac.QueueType;
public class UltrascanDocumentCreator {
private AiravataAPI airavataAPI = null;
private String hpcHostAddress = "trestles.sdsc.edu";
private String gsiSshHostNameTrestles = "gsissh-trestles";
private String gsiSshHostNameStampede = "gsissh-stampede";
public UltrascanDocumentCreator(AiravataAPI airavataAPI) {
this.airavataAPI = airavataAPI;
}
public void createEchoPBSDocsforTestles() {
HostDescription host = new HostDescription(GsisshHostType.type);
host.getType().setHostAddress(hpcHostAddress);
host.getType().setHostName(gsiSshHostNameTrestles);
((GsisshHostType) host.getType()).setPort(22);
((GsisshHostType) host.getType()).setInstalledPath("/opt/torque/bin/");
try {
airavataAPI.getApplicationManager().saveHostDescription(host);
} catch (AiravataAPIInvocationException e) {
e.printStackTrace(); //To change body of catch statement use File | Settings | File Templates.
}
/*
* Service Description creation and saving
*/
String serviceName = "US3EchoTrestles";
ServiceDescription serv = new ServiceDescription();
serv.getType().setName(serviceName);
List<InputParameterType> inputList = new ArrayList<InputParameterType>();
List<OutputParameterType> outputList = new ArrayList<OutputParameterType>();
InputParameterType input = InputParameterType.Factory.newInstance();
input.setParameterName("echo_input");
ParameterType parameterType = input.addNewParameterType();
parameterType.setType(DataType.STRING);
parameterType.setName("String");
OutputParameterType output = OutputParameterType.Factory.newInstance();
output.setParameterName("echo_output");
ParameterType parameterType1 = output.addNewParameterType();
parameterType1.setType(DataType.STRING);
parameterType1.setName("String");
inputList.add(input);
outputList.add(output);
InputParameterType[] inputParamList = inputList.toArray(new InputParameterType[inputList.size()]);
OutputParameterType[] outputParamList = outputList.toArray(new OutputParameterType[outputList.size()]);
serv.getType().setInputParametersArray(inputParamList);
serv.getType().setOutputParametersArray(outputParamList);
try {
airavataAPI.getApplicationManager().saveServiceDescription(serv);
} catch (AiravataAPIInvocationException e) {
e.printStackTrace(); //To change body of catch statement use File | Settings | File Templates.
}
/*
Application descriptor creation and saving
*/
ApplicationDescription appDesc = new ApplicationDescription(HpcApplicationDeploymentType.type);
HpcApplicationDeploymentType app = (HpcApplicationDeploymentType) appDesc.getType();
ApplicationDeploymentDescriptionType.ApplicationName name = ApplicationDeploymentDescriptionType.ApplicationName.Factory.newInstance();
name.setStringValue(serviceName);
app.setApplicationName(name);
ProjectAccountType projectAccountType = app.addNewProjectAccount();
projectAccountType.setProjectAccountNumber("uot111");
QueueType queueType = app.addNewQueue();
queueType.setQueueName("normal");
app.setCpuCount(1);
app.setJobType(JobTypeType.SERIAL);
app.setNodeCount(1);
app.setProcessorsPerNode(1);
app.setMaxWallTime(10);
/*
* Use bat file if it is compiled on Windows
*/
app.setExecutableLocation("/bin/echo");
/*
* Default tmp location
*/
String tempDir = "/oasis/projects/nsf/uot111/us3/airavata-workdirs/";
app.setScratchWorkingDirectory(tempDir);
app.setInstalledParentPath("/opt/torque/bin/");
try {
airavataAPI.getApplicationManager().saveApplicationDescription(serviceName, gsiSshHostNameTrestles, appDesc);
} catch (AiravataAPIInvocationException e) {
e.printStackTrace(); //To change body of catch statement use File | Settings | File Templates.
}
}
public void createMPIPBSDocsTrestles() {
HostDescription host = new HostDescription(GsisshHostType.type);
host.getType().setHostAddress(hpcHostAddress);
host.getType().setHostName(gsiSshHostNameTrestles);
((GsisshHostType) host.getType()).setPort(22);
((GsisshHostType) host.getType()).setInstalledPath("/opt/torque/bin/");
try {
airavataAPI.getApplicationManager().saveHostDescription(host);
} catch (AiravataAPIInvocationException e) {
e.printStackTrace(); //To change body of catch statement use File | Settings | File Templates.
}
/*
* Service Description creation and saving
*/
String serviceName = "US3AppTrestles";
ServiceDescription serv = new ServiceDescription();
serv.getType().setName(serviceName);
List<InputParameterType> inputList = new ArrayList<InputParameterType>();
List<OutputParameterType> outputList = new ArrayList<OutputParameterType>();
InputParameterType input = InputParameterType.Factory.newInstance();
input.setParameterName("input");
ParameterType parameterType = input.addNewParameterType();
parameterType.setType(DataType.URI);
parameterType.setName("URI");
OutputParameterType output = OutputParameterType.Factory.newInstance();
output.setParameterName("output");
ParameterType parameterType1 = output.addNewParameterType();
parameterType1.setType(DataType.URI);
parameterType1.setName("URI");
OutputParameterType output1 = OutputParameterType.Factory.newInstance();
output1.setParameterName("stdout");
ParameterType parameterType2 = output1.addNewParameterType();
parameterType2.setType(DataType.STD_OUT);
parameterType2.setName("StdOut");
OutputParameterType output2 = OutputParameterType.Factory.newInstance();
output2.setParameterName("stderr");
ParameterType parameterType3 = output2.addNewParameterType();
parameterType3.setType(DataType.STD_ERR);
parameterType3.setName("StdErr");
inputList.add(input);
outputList.add(output);
outputList.add(output1);
outputList.add(output2);
InputParameterType[] inputParamList = inputList.toArray(new InputParameterType[inputList.size()]);
OutputParameterType[] outputParamList = outputList.toArray(new OutputParameterType[outputList.size()]);
serv.getType().setInputParametersArray(inputParamList);
serv.getType().setOutputParametersArray(outputParamList);
try {
airavataAPI.getApplicationManager().saveServiceDescription(serv);
} catch (AiravataAPIInvocationException e) {
e.printStackTrace(); //To change body of catch statement use File | Settings | File Templates.
}
/*
Application descriptor creation and saving
*/
ApplicationDescription appDesc = new ApplicationDescription(HpcApplicationDeploymentType.type);
HpcApplicationDeploymentType app = (HpcApplicationDeploymentType) appDesc.getType();
ApplicationDeploymentDescriptionType.ApplicationName name = ApplicationDeploymentDescriptionType.ApplicationName.Factory.newInstance();
name.setStringValue(serviceName);
app.setApplicationName(name);
ProjectAccountType projectAccountType = app.addNewProjectAccount();
projectAccountType.setProjectAccountNumber("uot111");
QueueType queueType = app.addNewQueue();
queueType.setQueueName("normal");
app.setCpuCount(1);
app.setJobType(JobTypeType.MPI);
app.setNodeCount(32);
app.setProcessorsPerNode(2);
app.setMaxWallTime(10);
/*
* Use bat file if it is compiled on Windows
*/
app.setExecutableLocation("/home/us3/trestles/bin/us_mpi_analysis");
/*
* Default tmp location
*/
String tempDir = "/oasis/projects/nsf/uot111/us3/airavata-workdirs/";
app.setScratchWorkingDirectory(tempDir);
app.setInstalledParentPath("/opt/torque/bin/");
app.setJobSubmitterCommand("/opt/mvapich2/pgi/ib/bin/mpiexec");
try {
airavataAPI.getApplicationManager().saveApplicationDescription(serviceName, gsiSshHostNameTrestles, appDesc);
} catch (AiravataAPIInvocationException e) {
e.printStackTrace(); //To change body of catch statement use File | Settings | File Templates.
}
}
public void createMPISLURMDocsStampede() {
HostDescription host = new HostDescription(GsisshHostType.type);
host.getType().setHostAddress("stampede.tacc.xsede.org");
host.getType().setHostName("gsissh-stampede");
((GsisshHostType) host.getType()).setJobManager("slurm");
((GsisshHostType) host.getType()).setInstalledPath("/usr/bin/");
((GsisshHostType) host.getType()).setPort(2222);
try {
airavataAPI.getApplicationManager().saveHostDescription(host);
} catch (AiravataAPIInvocationException e) {
e.printStackTrace(); //To change body of catch statement use File | Settings | File Templates.
}
/*
* Service Description creation and saving
*/
String serviceName = "US3AppStampede";
ServiceDescription serv = new ServiceDescription();
serv.getType().setName(serviceName);
List<InputParameterType> inputList = new ArrayList<InputParameterType>();
List<OutputParameterType> outputList = new ArrayList<OutputParameterType>();
InputParameterType input = InputParameterType.Factory.newInstance();
input.setParameterName("input");
ParameterType parameterType = input.addNewParameterType();
parameterType.setType(DataType.URI);
parameterType.setName("URI");
OutputParameterType output = OutputParameterType.Factory.newInstance();
output.setParameterName("output");
ParameterType parameterType1 = output.addNewParameterType();
parameterType1.setType(DataType.URI);
parameterType1.setName("URI");
OutputParameterType output1 = OutputParameterType.Factory.newInstance();
output1.setParameterName("stdout");
ParameterType parameterType2 = output1.addNewParameterType();
parameterType2.setType(DataType.STD_OUT);
parameterType2.setName("StdOut");
OutputParameterType output2 = OutputParameterType.Factory.newInstance();
output2.setParameterName("stderr");
ParameterType parameterType3 = output2.addNewParameterType();
parameterType3.setType(DataType.STD_ERR);
parameterType3.setName("StdErr");
inputList.add(input);
outputList.add(output);
outputList.add(output1);
outputList.add(output2);
InputParameterType[] inputParamList = inputList.toArray(new InputParameterType[inputList.size()]);
OutputParameterType[] outputParamList = outputList.toArray(new OutputParameterType[outputList.size()]);
serv.getType().setInputParametersArray(inputParamList);
serv.getType().setOutputParametersArray(outputParamList);
try {
airavataAPI.getApplicationManager().saveServiceDescription(serv);
} catch (AiravataAPIInvocationException e) {
e.printStackTrace(); //To change body of catch statement use File | Settings | File Templates.
}
/*
Application descriptor creation and saving
*/
ApplicationDescription appDesc = new ApplicationDescription(HpcApplicationDeploymentType.type);
HpcApplicationDeploymentType app = (HpcApplicationDeploymentType) appDesc.getType();
ApplicationDeploymentDescriptionType.ApplicationName name = ApplicationDeploymentDescriptionType.ApplicationName.Factory.newInstance();
name.setStringValue(serviceName);
app.setApplicationName(name);
ProjectAccountType projectAccountType = app.addNewProjectAccount();
projectAccountType.setProjectAccountNumber("TG-MCB070039N");
QueueType queueType = app.addNewQueue();
queueType.setQueueName("normal");
app.setCpuCount(1);
app.setJobType(JobTypeType.MPI);
app.setNodeCount(32);
app.setProcessorsPerNode(2);
app.setMaxWallTime(10);
/*
* Use bat file if it is compiled on Windows
*/
app.setExecutableLocation("/home1/01623/us3/bin/us_mpi_analysis");
/*
* Default tmp location
*/
String tempDir = "/home1/01623/us3";
app.setScratchWorkingDirectory(tempDir);
app.setInstalledParentPath("/usr/bin/");
app.setJobSubmitterCommand("/usr/local/bin/ibrun");
try {
airavataAPI.getApplicationManager().saveApplicationDescription(serviceName, gsiSshHostNameStampede, appDesc);
} catch (AiravataAPIInvocationException e) {
e.printStackTrace(); //To change body of catch statement use File | Settings | File Templates.
}
}
public void createEchoSlurmDocsofStampede() {
HostDescription host = new HostDescription(GsisshHostType.type);
host.getType().setHostAddress("stampede.tacc.xsede.org");
host.getType().setHostName("stampede-host");
((GsisshHostType) host.getType()).setJobManager("slurm");
((GsisshHostType) host.getType()).setInstalledPath("/usr/bin/");
((GsisshHostType) host.getType()).setPort(2222);
((GsisshHostType) host.getType()).setMonitorMode("push");
// ((GsisshHostType) host.getType()).setMo(2222);
try {
airavataAPI.getApplicationManager().saveHostDescription(host);
} catch (AiravataAPIInvocationException e) {
e.printStackTrace(); //To change body of catch statement use File | Settings | File Templates.
}
/*
* Service Description creation and saving
*/
String serviceName = "US3EchoStampede";
ServiceDescription serv = new ServiceDescription();
serv.getType().setName(serviceName);
List<InputParameterType> inputList = new ArrayList<InputParameterType>();
List<OutputParameterType> outputList = new ArrayList<OutputParameterType>();
InputParameterType input = InputParameterType.Factory.newInstance();
input.setParameterName("echo_input");
ParameterType parameterType = input.addNewParameterType();
parameterType.setType(DataType.STRING);
parameterType.setName("String");
OutputParameterType output = OutputParameterType.Factory.newInstance();
output.setParameterName("echo_output");
ParameterType parameterType1 = output.addNewParameterType();
parameterType1.setType(DataType.STRING);
parameterType1.setName("String");
inputList.add(input);
outputList.add(output);
InputParameterType[] inputParamList = inputList.toArray(new InputParameterType[inputList.size()]);
OutputParameterType[] outputParamList = outputList.toArray(new OutputParameterType[outputList.size()]);
serv.getType().setInputParametersArray(inputParamList);
serv.getType().setOutputParametersArray(outputParamList);
try {
airavataAPI.getApplicationManager().saveServiceDescription(serv);
} catch (AiravataAPIInvocationException e) {
e.printStackTrace(); //To change body of catch statement use File | Settings | File Templates.
}
/*
Application descriptor creation and saving
*/
ApplicationDescription appDesc = new ApplicationDescription(HpcApplicationDeploymentType.type);
HpcApplicationDeploymentType app = (HpcApplicationDeploymentType) appDesc.getType();
ApplicationDeploymentDescriptionType.ApplicationName name = ApplicationDeploymentDescriptionType.ApplicationName.Factory.newInstance();
name.setStringValue(serviceName);
app.setApplicationName(name);
ProjectAccountType projectAccountType = app.addNewProjectAccount();
projectAccountType.setProjectAccountNumber("TG-MCB070039N");
QueueType queueType = app.addNewQueue();
queueType.setQueueName("normal");
app.setCpuCount(1);
app.setJobType(JobTypeType.SERIAL);
app.setNodeCount(1);
app.setProcessorsPerNode(1);
app.setMaxWallTime(10);
/*
* Use bat file if it is compiled on Windows
*/
app.setExecutableLocation("/bin/echo");
/*
* Default tmp location
*/
String tempDir = "/home1/01623/us3";
app.setScratchWorkingDirectory(tempDir);
app.setInstalledParentPath("/usr/bin/");
try {
airavataAPI.getApplicationManager().saveApplicationDescription(serviceName, gsiSshHostNameStampede, appDesc);
} catch (AiravataAPIInvocationException e) {
e.printStackTrace(); //To change body of catch statement use File | Settings | File Templates.
}
}
public AiravataAPI getAiravataAPI() {
return airavataAPI;
}
public void setAiravataAPI(AiravataAPI airavataAPI) {
this.airavataAPI = airavataAPI;
}
}
| |
/*
* Copyright (C) 2009 The Guava Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.common.collect;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.collect.SortedLists.KeyAbsentBehavior.INVERTED_INSERTION_INDEX;
import static com.google.common.collect.SortedLists.KeyAbsentBehavior.NEXT_HIGHER;
import static com.google.common.collect.SortedLists.KeyPresentBehavior.ANY_PRESENT;
import static com.google.common.collect.SortedLists.KeyPresentBehavior.FIRST_AFTER;
import static com.google.common.collect.SortedLists.KeyPresentBehavior.FIRST_PRESENT;
import com.google.common.annotations.GwtCompatible;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.Iterator;
import java.util.NoSuchElementException;
import java.util.Set;
import javax.annotation.Nullable;
/**
* An immutable sorted set with one or more elements. TODO(jlevy): Consider
* separate class for a single-element sorted set.
*
* @author Jared Levy
* @author Louis Wasserman
*/
@GwtCompatible(serializable = true, emulated = true)
@SuppressWarnings("serial")
final class RegularImmutableSortedSet<E> extends ImmutableSortedSet<E> {
private transient final ImmutableList<E> elements;
RegularImmutableSortedSet(
ImmutableList<E> elements, Comparator<? super E> comparator) {
super(comparator);
this.elements = elements;
checkArgument(!elements.isEmpty());
}
@Override public UnmodifiableIterator<E> iterator() {
return elements.iterator();
}
@Override public boolean isEmpty() {
return false;
}
@Override
public int size() {
return elements.size();
}
@Override public boolean contains(Object o) {
if (o == null) {
return false;
}
try {
return binarySearch(o) >= 0;
} catch (ClassCastException e) {
return false;
}
}
@Override public boolean containsAll(Collection<?> targets) {
// TODO(jlevy): For optimal performance, use a binary search when
// targets.size() < size() / log(size())
// TODO(kevinb): see if we can share code with OrderedIterator after it
// graduates from labs.
if (!SortedIterables.hasSameComparator(comparator(), targets)
|| (targets.size() <= 1)) {
return super.containsAll(targets);
}
/*
* If targets is a sorted set with the same comparator, containsAll can run
* in O(n) time stepping through the two collections.
*/
Iterator<E> thisIterator = iterator();
Iterator<?> thatIterator = targets.iterator();
Object target = thatIterator.next();
try {
while (thisIterator.hasNext()) {
int cmp = unsafeCompare(thisIterator.next(), target);
if (cmp == 0) {
if (!thatIterator.hasNext()) {
return true;
}
target = thatIterator.next();
} else if (cmp > 0) {
return false;
}
}
} catch (NullPointerException e) {
return false;
} catch (ClassCastException e) {
return false;
}
return false;
}
private int binarySearch(Object key) {
// TODO(kevinb): split this into binarySearch(E) and
// unsafeBinarySearch(Object), use each appropriately. name all methods that
// might throw CCE "unsafe*".
// Pretend the comparator can compare anything. If it turns out it can't
// compare a and b, we should get a CCE on the subsequent line. Only methods
// that are spec'd to throw CCE should call this.
@SuppressWarnings("unchecked")
Comparator<Object> unsafeComparator = (Comparator<Object>) comparator;
return Collections.binarySearch(elements, key, unsafeComparator);
}
@Override boolean isPartialView() {
return elements.isPartialView();
}
@Override public Object[] toArray() {
return elements.toArray();
}
@Override public <T> T[] toArray(T[] array) {
return elements.toArray(array);
}
@Override public boolean equals(@Nullable Object object) {
if (object == this) {
return true;
}
if (!(object instanceof Set)) {
return false;
}
Set<?> that = (Set<?>) object;
if (size() != that.size()) {
return false;
}
if (SortedIterables.hasSameComparator(comparator, that)) {
Iterator<?> otherIterator = that.iterator();
try {
Iterator<E> iterator = iterator();
while (iterator.hasNext()) {
Object element = iterator.next();
Object otherElement = otherIterator.next();
if (otherElement == null
|| unsafeCompare(element, otherElement) != 0) {
return false;
}
}
return true;
} catch (ClassCastException e) {
return false;
} catch (NoSuchElementException e) {
return false; // concurrent change to other set
}
}
return this.containsAll(that);
}
@Override
public E first() {
return elements.get(0);
}
@Override
public E last() {
return elements.get(size() - 1);
}
@Override
ImmutableSortedSet<E> headSetImpl(E toElement, boolean inclusive) {
int index;
if (inclusive) {
index = SortedLists.binarySearch(
elements, checkNotNull(toElement), comparator(), FIRST_AFTER, NEXT_HIGHER);
} else {
index = SortedLists.binarySearch(
elements, checkNotNull(toElement), comparator(), FIRST_PRESENT, NEXT_HIGHER);
}
return createSubset(0, index);
}
@Override
ImmutableSortedSet<E> subSetImpl(
E fromElement, boolean fromInclusive, E toElement, boolean toInclusive) {
return tailSetImpl(fromElement, fromInclusive)
.headSetImpl(toElement, toInclusive);
}
@Override
ImmutableSortedSet<E> tailSetImpl(E fromElement, boolean inclusive) {
int index;
if (inclusive) {
index = SortedLists.binarySearch(
elements, checkNotNull(fromElement), comparator(), FIRST_PRESENT, NEXT_HIGHER);
} else {
index = SortedLists.binarySearch(
elements, checkNotNull(fromElement), comparator(), FIRST_AFTER, NEXT_HIGHER);
}
return createSubset(index, size());
}
// Pretend the comparator can compare anything. If it turns out it can't
// compare two elements, it'll throw a CCE. Only methods that are specified to
// throw CCE should call this.
@SuppressWarnings("unchecked")
Comparator<Object> unsafeComparator() {
return (Comparator<Object>) comparator;
}
private ImmutableSortedSet<E> createSubset(int newFromIndex, int newToIndex) {
if (newFromIndex == 0 && newToIndex == size()) {
return this;
} else if (newFromIndex < newToIndex) {
return new RegularImmutableSortedSet<E>(
elements.subList(newFromIndex, newToIndex), comparator);
} else {
return emptySet(comparator);
}
}
@SuppressWarnings("unchecked")
@Override int indexOf(@Nullable Object target) {
if (target == null) {
return -1;
}
int position;
try {
position = SortedLists.binarySearch(elements, (E) target, comparator(),
ANY_PRESENT, INVERTED_INSERTION_INDEX);
} catch (ClassCastException e) {
return -1;
}
// TODO(kevinb): reconsider if it's really worth making feeble attempts at
// sanity for inconsistent comparators.
// The equals() check is needed when the comparator isn't compatible with
// equals().
return (position >= 0 && elements.get(position).equals(target))
? position : -1;
}
@Override ImmutableList<E> createAsList() {
return new ImmutableSortedAsList<E>(this, elements);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.runtime.webmonitor;
import akka.actor.ActorSystem;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ArrayNode;
import java.net.URI;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.configuration.JobManagerOptions;
import org.apache.flink.core.fs.Path;
import org.apache.flink.runtime.blob.BlobView;
import org.apache.flink.runtime.execution.ExecutionState;
import org.apache.flink.runtime.executiongraph.AccessExecutionGraph;
import org.apache.flink.runtime.executiongraph.AccessExecutionJobVertex;
import org.apache.flink.runtime.executiongraph.AccessExecutionVertex;
import org.apache.flink.runtime.highavailability.HighAvailabilityServices;
import org.apache.flink.runtime.jobgraph.JobStatus;
import org.apache.flink.runtime.leaderretrieval.LeaderRetrievalService;
import org.apache.flink.runtime.messages.webmonitor.JobDetails;
import org.apache.flink.runtime.webmonitor.history.JsonArchivist;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
/**
* Utilities for the web runtime monitor. This class contains for example methods to build
* messages with aggregate information about the state of an execution graph, to be send
* to the web server.
*/
public final class WebMonitorUtils {
private static final Logger LOG = LoggerFactory.getLogger(WebMonitorUtils.class);
/**
* Singleton to hold the log and stdout file
*/
public static class LogFileLocation {
public final File logFile;
public final File stdOutFile;
private LogFileLocation(File logFile, File stdOutFile) {
this.logFile = logFile;
this.stdOutFile = stdOutFile;
}
/**
* Finds the Flink log directory using log.file Java property that is set during startup.
*/
public static LogFileLocation find(Configuration config) {
final String logEnv = "log.file";
String logFilePath = System.getProperty(logEnv);
if (logFilePath == null) {
LOG.warn("Log file environment variable '{}' is not set.", logEnv);
logFilePath = config.getString(JobManagerOptions.WEB_LOG_PATH);
}
// not configured, cannot serve log files
if (logFilePath == null || logFilePath.length() < 4) {
LOG.warn("JobManager log files are unavailable in the web dashboard. " +
"Log file location not found in environment variable '{}' or configuration key '{}'.",
logEnv, JobManagerOptions.WEB_LOG_PATH.key());
return new LogFileLocation(null, null);
}
String outFilePath = logFilePath.substring(0, logFilePath.length() - 3).concat("out");
LOG.info("Determined location of JobManager log file: {}", logFilePath);
LOG.info("Determined location of JobManager stdout file: {}", outFilePath);
return new LogFileLocation(resolveFileLocation(logFilePath), resolveFileLocation(outFilePath));
}
/**
* Verify log file location
* @param logFilePath Path to log file
* @return File or null if not a valid log file
*/
private static File resolveFileLocation(String logFilePath) {
File logFile = new File(logFilePath);
return (logFile.exists() && logFile.canRead()) ? logFile : null;
}
}
/**
* Starts the web runtime monitor. Because the actual implementation of the runtime monitor is
* in another project, we load the runtime monitor dynamically.
* <p>
* Because failure to start the web runtime monitor is not considered fatal, this method does
* not throw any exceptions, but only logs them.
*
* @param config The configuration for the runtime monitor.
* @param highAvailabilityServices HighAvailabilityServices used to start the WebRuntimeMonitor
* @param actorSystem ActorSystem used to connect to the JobManager
*
*/
public static WebMonitor startWebRuntimeMonitor(
Configuration config,
HighAvailabilityServices highAvailabilityServices,
ActorSystem actorSystem) {
// try to load and instantiate the class
try {
String classname = "org.apache.flink.runtime.webmonitor.WebRuntimeMonitor";
Class<? extends WebMonitor> clazz = Class.forName(classname).asSubclass(WebMonitor.class);
Constructor<? extends WebMonitor> constructor = clazz.getConstructor(Configuration.class,
LeaderRetrievalService.class,
BlobView.class,
ActorSystem.class);
return constructor.newInstance(
config,
highAvailabilityServices.getJobManagerLeaderRetriever(HighAvailabilityServices.DEFAULT_JOB_ID),
highAvailabilityServices.createBlobStore(),
actorSystem);
} catch (ClassNotFoundException e) {
LOG.error("Could not load web runtime monitor. " +
"Probably reason: flink-runtime-web is not in the classpath");
LOG.debug("Caught exception", e);
return null;
} catch (InvocationTargetException e) {
LOG.error("WebServer could not be created", e.getTargetException());
return null;
} catch (Throwable t) {
LOG.error("Failed to instantiate web runtime monitor.", t);
return null;
}
}
public static JsonArchivist[] getJsonArchivists() {
try {
String classname = "org.apache.flink.runtime.webmonitor.WebRuntimeMonitor";
Class<? extends WebMonitor> clazz = Class.forName(classname).asSubclass(WebMonitor.class);
Method method = clazz.getMethod("getJsonArchivists");
JsonArchivist[] result = (JsonArchivist[]) method.invoke(null);
return result;
} catch (ClassNotFoundException e) {
LOG.error("Could not load web runtime monitor. " +
"Probably reason: flink-runtime-web is not in the classpath");
LOG.debug("Caught exception", e);
return new JsonArchivist[0];
} catch (Throwable t) {
LOG.error("Failed to retrieve archivers from web runtime monitor.", t);
return new JsonArchivist[0];
}
}
public static Map<String, String> fromKeyValueJsonArray(String jsonString) {
try {
Map<String, String> map = new HashMap<>();
ObjectMapper m = new ObjectMapper();
ArrayNode array = (ArrayNode) m.readTree(jsonString);
Iterator<JsonNode> elements = array.elements();
while (elements.hasNext()) {
JsonNode node = elements.next();
String key = node.get("key").asText();
String value = node.get("value").asText();
map.put(key, value);
}
return map;
}
catch (Exception e) {
throw new RuntimeException(e.getMessage(), e);
}
}
public static JobDetails createDetailsForJob(AccessExecutionGraph job) {
JobStatus status = job.getState();
long started = job.getStatusTimestamp(JobStatus.CREATED);
long finished = status.isGloballyTerminalState() ? job.getStatusTimestamp(status) : -1L;
int[] countsPerStatus = new int[ExecutionState.values().length];
long lastChanged = 0;
int numTotalTasks = 0;
for (AccessExecutionJobVertex ejv : job.getVerticesTopologically()) {
AccessExecutionVertex[] vertices = ejv.getTaskVertices();
numTotalTasks += vertices.length;
for (AccessExecutionVertex vertex : vertices) {
ExecutionState state = vertex.getExecutionState();
countsPerStatus[state.ordinal()]++;
lastChanged = Math.max(lastChanged, vertex.getStateTimestamp(state));
}
}
lastChanged = Math.max(lastChanged, finished);
return new JobDetails(job.getJobID(), job.getJobName(),
started, finished, status, lastChanged,
countsPerStatus, numTotalTasks);
}
/**
* Checks and normalizes the given URI. This method first checks the validity of the
* URI (scheme and path are not null) and then normalizes the URI to a path.
*
* @param archiveDirUri The URI to check and normalize.
* @return A normalized URI as a Path.
*
* @throws IllegalArgumentException Thrown, if the URI misses scheme or path.
*/
public static Path validateAndNormalizeUri(URI archiveDirUri) {
final String scheme = archiveDirUri.getScheme();
final String path = archiveDirUri.getPath();
// some validity checks
if (scheme == null) {
throw new IllegalArgumentException("The scheme (hdfs://, file://, etc) is null. " +
"Please specify the file system scheme explicitly in the URI.");
}
if (path == null) {
throw new IllegalArgumentException("The path to store the job archive data in is null. " +
"Please specify a directory path for the archiving the job data.");
}
return new Path(archiveDirUri);
}
/**
* Private constructor to prevent instantiation.
*/
private WebMonitorUtils() {
throw new RuntimeException();
}
}
| |
package org.spongycastle.openpgp;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.security.SecureRandom;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import org.spongycastle.bcpg.BCPGInputStream;
import org.spongycastle.bcpg.BCPGObject;
import org.spongycastle.bcpg.BCPGOutputStream;
import org.spongycastle.bcpg.ContainedPacket;
import org.spongycastle.bcpg.DSASecretBCPGKey;
import org.spongycastle.bcpg.ElGamalSecretBCPGKey;
import org.spongycastle.bcpg.HashAlgorithmTags;
import org.spongycastle.bcpg.PublicKeyPacket;
import org.spongycastle.bcpg.RSASecretBCPGKey;
import org.spongycastle.bcpg.S2K;
import org.spongycastle.bcpg.SecretKeyPacket;
import org.spongycastle.bcpg.SecretSubkeyPacket;
import org.spongycastle.bcpg.SymmetricKeyAlgorithmTags;
import org.spongycastle.bcpg.UserAttributePacket;
import org.spongycastle.bcpg.UserIDPacket;
import org.spongycastle.openpgp.operator.PBESecretKeyDecryptor;
import org.spongycastle.openpgp.operator.PBESecretKeyEncryptor;
import org.spongycastle.openpgp.operator.PGPContentSignerBuilder;
import org.spongycastle.openpgp.operator.PGPDigestCalculator;
/**
* general class to handle a PGP secret key object.
*/
public class PGPSecretKey
{
SecretKeyPacket secret;
PGPPublicKey pub;
PGPSecretKey(
SecretKeyPacket secret,
PGPPublicKey pub)
{
this.secret = secret;
this.pub = pub;
}
PGPSecretKey(
PGPPrivateKey privKey,
PGPPublicKey pubKey,
PGPDigestCalculator checksumCalculator,
PBESecretKeyEncryptor keyEncryptor)
throws PGPException
{
this(privKey, pubKey, checksumCalculator, false, keyEncryptor);
}
PGPSecretKey(
PGPPrivateKey privKey,
PGPPublicKey pubKey,
PGPDigestCalculator checksumCalculator,
boolean isMasterKey,
PBESecretKeyEncryptor keyEncryptor)
throws PGPException
{
this.pub = pubKey;
BCPGObject secKey = (BCPGObject)privKey.getPrivateKeyDataPacket();
try
{
ByteArrayOutputStream bOut = new ByteArrayOutputStream();
BCPGOutputStream pOut = new BCPGOutputStream(bOut);
pOut.writeObject(secKey);
byte[] keyData = bOut.toByteArray();
pOut.write(checksum(checksumCalculator, keyData, keyData.length));
int encAlgorithm = keyEncryptor.getAlgorithm();
if (encAlgorithm != SymmetricKeyAlgorithmTags.NULL)
{
keyData = bOut.toByteArray(); // include checksum
byte[] encData = keyEncryptor.encryptKeyData(keyData, 0, keyData.length);
byte[] iv = keyEncryptor.getCipherIV();
S2K s2k = keyEncryptor.getS2K();
int s2kUsage;
if (checksumCalculator != null)
{
if (checksumCalculator.getAlgorithm() != HashAlgorithmTags.SHA1)
{
throw new PGPException("only SHA1 supported for key checksum calculations.");
}
s2kUsage = SecretKeyPacket.USAGE_SHA1;
}
else
{
s2kUsage = SecretKeyPacket.USAGE_CHECKSUM;
}
if (isMasterKey)
{
this.secret = new SecretKeyPacket(pub.publicPk, encAlgorithm, s2kUsage, s2k, iv, encData);
}
else
{
this.secret = new SecretSubkeyPacket(pub.publicPk, encAlgorithm, s2kUsage, s2k, iv, encData);
}
}
else
{
if (isMasterKey)
{
this.secret = new SecretKeyPacket(pub.publicPk, encAlgorithm, null, null, bOut.toByteArray());
}
else
{
this.secret = new SecretSubkeyPacket(pub.publicPk, encAlgorithm, null, null, bOut.toByteArray());
}
}
}
catch (PGPException e)
{
throw e;
}
catch (Exception e)
{
throw new PGPException("Exception encrypting key", e);
}
}
public PGPSecretKey(
int certificationLevel,
PGPKeyPair keyPair,
String id,
PGPSignatureSubpacketVector hashedPcks,
PGPSignatureSubpacketVector unhashedPcks,
PGPContentSignerBuilder certificationSignerBuilder,
PBESecretKeyEncryptor keyEncryptor)
throws PGPException
{
this(certificationLevel, keyPair, id, null, hashedPcks, unhashedPcks, certificationSignerBuilder, keyEncryptor);
}
public PGPSecretKey(
int certificationLevel,
PGPKeyPair keyPair,
String id,
PGPDigestCalculator checksumCalculator,
PGPSignatureSubpacketVector hashedPcks,
PGPSignatureSubpacketVector unhashedPcks,
PGPContentSignerBuilder certificationSignerBuilder,
PBESecretKeyEncryptor keyEncryptor)
throws PGPException
{
this(keyPair.getPrivateKey(), certifiedPublicKey(certificationLevel, keyPair, id, hashedPcks, unhashedPcks, certificationSignerBuilder), checksumCalculator, true, keyEncryptor);
}
private static PGPPublicKey certifiedPublicKey(
int certificationLevel,
PGPKeyPair keyPair,
String id,
PGPSignatureSubpacketVector hashedPcks,
PGPSignatureSubpacketVector unhashedPcks,
PGPContentSignerBuilder certificationSignerBuilder)
throws PGPException
{
PGPSignatureGenerator sGen;
try
{
sGen = new PGPSignatureGenerator(certificationSignerBuilder);
}
catch (Exception e)
{
throw new PGPException("creating signature generator: " + e, e);
}
//
// generate the certification
//
sGen.init(certificationLevel, keyPair.getPrivateKey());
sGen.setHashedSubpackets(hashedPcks);
sGen.setUnhashedSubpackets(unhashedPcks);
try
{
PGPSignature certification = sGen.generateCertification(id, keyPair.getPublicKey());
return PGPPublicKey.addCertification(keyPair.getPublicKey(), id, certification);
}
catch (Exception e)
{
throw new PGPException("exception doing certification: " + e, e);
}
}
/**
* Return true if this key has an algorithm type that makes it suitable to use for signing.
* <p>
* Note: with version 4 keys KeyFlags subpackets should also be considered when present for
* determining the preferred use of the key.
*
* @return true if this key algorithm is suitable for use with signing.
*/
public boolean isSigningKey()
{
int algorithm = pub.getAlgorithm();
return ((algorithm == PGPPublicKey.RSA_GENERAL) || (algorithm == PGPPublicKey.RSA_SIGN)
|| (algorithm == PGPPublicKey.DSA) || (algorithm == PGPPublicKey.ECDSA) || (algorithm == PGPPublicKey.ELGAMAL_GENERAL));
}
/**
* Return true if this is a master key.
* @return true if a master key.
*/
public boolean isMasterKey()
{
return pub.isMasterKey();
}
/**
* Detect if the Secret Key's Private Key is empty or not
*
* @return boolean whether or not the private key is empty
*/
public boolean isPrivateKeyEmpty()
{
byte[] secKeyData = secret.getSecretKeyData();
return (secKeyData == null || secKeyData.length < 1);
}
/**
* return the algorithm the key is encrypted with.
*
* @return the algorithm used to encrypt the secret key.
*/
public int getKeyEncryptionAlgorithm()
{
return secret.getEncAlgorithm();
}
/**
* Return the keyID of the public key associated with this key.
*
* @return the keyID associated with this key.
*/
public long getKeyID()
{
return pub.getKeyID();
}
/**
* Return the public key associated with this key.
*
* @return the public key for this key.
*/
public PGPPublicKey getPublicKey()
{
return pub;
}
/**
* Return any userIDs associated with the key.
*
* @return an iterator of Strings.
*/
public Iterator getUserIDs()
{
return pub.getUserIDs();
}
/**
* Return the S2K object used to encrypt this secret key.
*
* @return this secret key's s2k object
*/
public S2K getS2K()
{
return secret.getS2K();
}
/**
* Return any user attribute vectors associated with the key.
*
* @return an iterator of Strings.
*/
public Iterator getUserAttributes()
{
return pub.getUserAttributes();
}
private byte[] extractKeyData(
PBESecretKeyDecryptor decryptorFactory)
throws PGPException
{
byte[] encData = secret.getSecretKeyData();
byte[] data = null;
if (secret.getEncAlgorithm() != SymmetricKeyAlgorithmTags.NULL)
{
try
{
if (secret.getPublicKeyPacket().getVersion() == 4)
{
byte[] key = decryptorFactory.makeKeyFromPassPhrase(secret.getEncAlgorithm(), secret.getS2K());
data = decryptorFactory.recoverKeyData(secret.getEncAlgorithm(), key, secret.getIV(), encData, 0, encData.length);
boolean useSHA1 = secret.getS2KUsage() == SecretKeyPacket.USAGE_SHA1;
byte[] check = checksum(useSHA1 ? decryptorFactory.getChecksumCalculator(HashAlgorithmTags.SHA1) : null, data, (useSHA1) ? data.length - 20 : data.length - 2);
for (int i = 0; i != check.length; i++)
{
if (check[i] != data[data.length - check.length + i])
{
throw new PGPException("checksum mismatch at " + i + " of " + check.length);
}
}
}
else // version 2 or 3, RSA only.
{
byte[] key = decryptorFactory.makeKeyFromPassPhrase(secret.getEncAlgorithm(), secret.getS2K());
data = new byte[encData.length];
byte[] iv = new byte[secret.getIV().length];
System.arraycopy(secret.getIV(), 0, iv, 0, iv.length);
//
// read in the four numbers
//
int pos = 0;
for (int i = 0; i != 4; i++)
{
int encLen = (((encData[pos] << 8) | (encData[pos + 1] & 0xff)) + 7) / 8;
data[pos] = encData[pos];
data[pos + 1] = encData[pos + 1];
byte[] tmp = decryptorFactory.recoverKeyData(secret.getEncAlgorithm(), key, iv, encData, pos + 2, encLen);
System.arraycopy(tmp, 0, data, pos + 2, tmp.length);
pos += 2 + encLen;
if (i != 3)
{
System.arraycopy(encData, pos - iv.length, iv, 0, iv.length);
}
}
//
// verify and copy checksum
//
data[pos] = encData[pos];
data[pos + 1] = encData[pos + 1];
int cs = ((encData[pos] << 8) & 0xff00) | (encData[pos + 1] & 0xff);
int calcCs = 0;
for (int j = 0; j < data.length - 2; j++)
{
calcCs += data[j] & 0xff;
}
calcCs &= 0xffff;
if (calcCs != cs)
{
throw new PGPException("checksum mismatch: passphrase wrong, expected "
+ Integer.toHexString(cs)
+ " found " + Integer.toHexString(calcCs));
}
}
}
catch (PGPException e)
{
throw e;
}
catch (Exception e)
{
throw new PGPException("Exception decrypting key", e);
}
}
else
{
data = encData;
}
return data;
}
/**
* Extract a PGPPrivate key from the SecretKey's encrypted contents.
*
* @param decryptorFactory factory to use to generate a decryptor for the passed in secretKey.
* @return PGPPrivateKey the unencrypted private key.
* @throws PGPException on failure.
*/
public PGPPrivateKey extractPrivateKey(
PBESecretKeyDecryptor decryptorFactory)
throws PGPException
{
if (isPrivateKeyEmpty())
{
return null;
}
PublicKeyPacket pubPk = secret.getPublicKeyPacket();
try
{
byte[] data = extractKeyData(decryptorFactory);
BCPGInputStream in = new BCPGInputStream(new ByteArrayInputStream(data));
switch (pubPk.getAlgorithm())
{
case PGPPublicKey.RSA_ENCRYPT:
case PGPPublicKey.RSA_GENERAL:
case PGPPublicKey.RSA_SIGN:
RSASecretBCPGKey rsaPriv = new RSASecretBCPGKey(in);
return new PGPPrivateKey(this.getKeyID(), pubPk, rsaPriv);
case PGPPublicKey.DSA:
DSASecretBCPGKey dsaPriv = new DSASecretBCPGKey(in);
return new PGPPrivateKey(this.getKeyID(), pubPk, dsaPriv);
case PGPPublicKey.ELGAMAL_ENCRYPT:
case PGPPublicKey.ELGAMAL_GENERAL:
ElGamalSecretBCPGKey elPriv = new ElGamalSecretBCPGKey(in);
return new PGPPrivateKey(this.getKeyID(), pubPk, elPriv);
default:
throw new PGPException("unknown public key algorithm encountered");
}
}
catch (PGPException e)
{
throw e;
}
catch (Exception e)
{
throw new PGPException("Exception constructing key", e);
}
}
private static byte[] checksum(PGPDigestCalculator digCalc, byte[] bytes, int length)
throws PGPException
{
if (digCalc != null)
{
OutputStream dOut = digCalc.getOutputStream();
try
{
dOut.write(bytes, 0, length);
dOut.close();
}
catch (Exception e)
{
throw new PGPException("checksum digest calculation failed: " + e.getMessage(), e);
}
return digCalc.getDigest();
}
else
{
int checksum = 0;
for (int i = 0; i != length; i++)
{
checksum += bytes[i] & 0xff;
}
byte[] check = new byte[2];
check[0] = (byte)(checksum >> 8);
check[1] = (byte)checksum;
return check;
}
}
public byte[] getEncoded()
throws IOException
{
ByteArrayOutputStream bOut = new ByteArrayOutputStream();
this.encode(bOut);
return bOut.toByteArray();
}
public void encode(
OutputStream outStream)
throws IOException
{
BCPGOutputStream out;
if (outStream instanceof BCPGOutputStream)
{
out = (BCPGOutputStream)outStream;
}
else
{
out = new BCPGOutputStream(outStream);
}
out.writePacket(secret);
if (pub.trustPk != null)
{
out.writePacket(pub.trustPk);
}
if (pub.subSigs == null) // is not a sub key
{
for (int i = 0; i != pub.keySigs.size(); i++)
{
((PGPSignature)pub.keySigs.get(i)).encode(out);
}
for (int i = 0; i != pub.ids.size(); i++)
{
if (pub.ids.get(i) instanceof UserIDPacket)
{
UserIDPacket id = (UserIDPacket)pub.ids.get(i);
out.writePacket(id);
}
else
{
PGPUserAttributeSubpacketVector v = (PGPUserAttributeSubpacketVector)pub.ids.get(i);
out.writePacket(new UserAttributePacket(v.toSubpacketArray()));
}
if (pub.idTrusts.get(i) != null)
{
out.writePacket((ContainedPacket)pub.idTrusts.get(i));
}
List sigs = (ArrayList)pub.idSigs.get(i);
for (int j = 0; j != sigs.size(); j++)
{
((PGPSignature)sigs.get(j)).encode(out);
}
}
}
else
{
for (int j = 0; j != pub.subSigs.size(); j++)
{
((PGPSignature)pub.subSigs.get(j)).encode(out);
}
}
}
/**
* Return a copy of the passed in secret key, encrypted using a new
* password and the passed in algorithm.
*
* @param key the PGPSecretKey to be copied.
* @param oldKeyDecryptor the current decryptor based on the current password for key.
* @param newKeyEncryptor a new encryptor based on a new password for encrypting the secret key material.
*/
public static PGPSecretKey copyWithNewPassword(
PGPSecretKey key,
PBESecretKeyDecryptor oldKeyDecryptor,
PBESecretKeyEncryptor newKeyEncryptor)
throws PGPException
{
if (key.isPrivateKeyEmpty())
{
throw new PGPException("no private key in this SecretKey - public key present only.");
}
byte[] rawKeyData = key.extractKeyData(oldKeyDecryptor);
int s2kUsage = key.secret.getS2KUsage();
byte[] iv = null;
S2K s2k = null;
byte[] keyData;
int newEncAlgorithm = SymmetricKeyAlgorithmTags.NULL;
if (newKeyEncryptor == null || newKeyEncryptor.getAlgorithm() == SymmetricKeyAlgorithmTags.NULL)
{
s2kUsage = SecretKeyPacket.USAGE_NONE;
if (key.secret.getS2KUsage() == SecretKeyPacket.USAGE_SHA1) // SHA-1 hash, need to rewrite checksum
{
keyData = new byte[rawKeyData.length - 18];
System.arraycopy(rawKeyData, 0, keyData, 0, keyData.length - 2);
byte[] check = checksum(null, keyData, keyData.length - 2);
keyData[keyData.length - 2] = check[0];
keyData[keyData.length - 1] = check[1];
}
else
{
keyData = rawKeyData;
}
}
else
{
if (key.secret.getPublicKeyPacket().getVersion() < 4)
{
// Version 2 or 3 - RSA Keys only
byte[] encKey = newKeyEncryptor.getKey();
keyData = new byte[rawKeyData.length];
if (newKeyEncryptor.getS2K() != null)
{
throw new PGPException("MD5 Digest Calculator required for version 3 key encryptor.");
}
//
// process 4 numbers
//
int pos = 0;
for (int i = 0; i != 4; i++)
{
int encLen = (((rawKeyData[pos] << 8) | (rawKeyData[pos + 1] & 0xff)) + 7) / 8;
keyData[pos] = rawKeyData[pos];
keyData[pos + 1] = rawKeyData[pos + 1];
byte[] tmp;
if (i == 0)
{
tmp = newKeyEncryptor.encryptKeyData(encKey, rawKeyData, pos + 2, encLen);
iv = newKeyEncryptor.getCipherIV();
}
else
{
byte[] tmpIv = new byte[iv.length];
System.arraycopy(keyData, pos - iv.length, tmpIv, 0, tmpIv.length);
tmp = newKeyEncryptor.encryptKeyData(encKey, tmpIv, rawKeyData, pos + 2, encLen);
}
System.arraycopy(tmp, 0, keyData, pos + 2, tmp.length);
pos += 2 + encLen;
}
//
// copy in checksum.
//
keyData[pos] = rawKeyData[pos];
keyData[pos + 1] = rawKeyData[pos + 1];
s2k = newKeyEncryptor.getS2K();
newEncAlgorithm = newKeyEncryptor.getAlgorithm();
}
else
{
keyData = newKeyEncryptor.encryptKeyData(rawKeyData, 0, rawKeyData.length);
iv = newKeyEncryptor.getCipherIV();
s2k = newKeyEncryptor.getS2K();
newEncAlgorithm = newKeyEncryptor.getAlgorithm();
}
}
SecretKeyPacket secret;
if (key.secret instanceof SecretSubkeyPacket)
{
secret = new SecretSubkeyPacket(key.secret.getPublicKeyPacket(),
newEncAlgorithm, s2kUsage, s2k, iv, keyData);
}
else
{
secret = new SecretKeyPacket(key.secret.getPublicKeyPacket(),
newEncAlgorithm, s2kUsage, s2k, iv, keyData);
}
return new PGPSecretKey(secret, key.pub);
}
/**
* Replace the passed the public key on the passed in secret key.
*
* @param secretKey secret key to change
* @param publicKey new public key.
* @return a new secret key.
* @throws IllegalArgumentException if keyIDs do not match.
*/
public static PGPSecretKey replacePublicKey(PGPSecretKey secretKey, PGPPublicKey publicKey)
{
if (publicKey.getKeyID() != secretKey.getKeyID())
{
throw new IllegalArgumentException("keyIDs do not match");
}
return new PGPSecretKey(secretKey.secret, publicKey);
}
}
| |
/* Copyright (c) 2008 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.gdata.model;
import com.google.gdata.util.Version;
import com.google.gdata.wireformats.AltFormat;
/**
* A context that metadata is operating under. Currently this contains the
* alt format, projection and version of the current request. Immutable.
*
* <p>This class also contains static initializers for the transforms required
* by each of our alt formats, which guarantees they will be loaded. We may
* want to put them somewhere else, but we need to move the constants for the
* contexts along with them.
*
*
*/
public final class MetadataContext implements Comparable<MetadataContext> {
/** The ATOM metadata context. */
public static final MetadataContext ATOM = MetadataContext.forAlt(
AltFormat.ATOM);
/** The RSS metadata context. */
public static final MetadataContext RSS = MetadataContext.forAlt(
AltFormat.RSS);
/**
* Creates a new immutable metadata context with just an alt format. The
* format must not be null or this will throw a null pointer exception.
*
* @param format the alt format for the context, not {@code null}.
* @return a metadata context for the alt format.
*/
public static MetadataContext forAlt(AltFormat format) {
return forContext(format, null, null);
}
/**
* Creates a new immutable metadata context with just projection. If the
* projection is null this will return null, which is the default context.
*
* @param projection the projection of the context.
* @return an immutable metadata context with the given projection, or null
* if the projection was null.
*/
public static MetadataContext forProjection(String projection) {
return forContext(null, projection, null);
}
/**
* Constructs a new immutable metadata context with just version. If version
* is null this will return null, which is the default context.
*
* @param version the version of the context.
* @return an immutable metadata context with the given version, or null if
* the version was null.
*/
public static MetadataContext forVersion(Version version) {
return forContext(null, null, version);
}
/**
* Creates a new immutable metadata context. If version, projection and alt
* type are all null this method will return null, which is the default
* context.
*
* @param format the alt format of the context.
* @param projection the projection for the context.
* @param version the version of the context.
* @return an immutable metadata context with the given alt type, projection
* and version, or null if all parameters are null.
*/
public static MetadataContext forContext(AltFormat format, String projection,
Version version) {
if (format == null && projection == null && version == null) {
return null;
}
return new MetadataContext(format, projection, version);
}
// The alt type of the request.
private final AltFormat altFormat;
// The projection of the request.
private final String projection;
// The version of the request.
private final Version version;
/**
* Private constructor, callers must use the static factory methods.
*/
private MetadataContext(
AltFormat format, String projection, Version version) {
this.altFormat = format;
this.projection = projection;
this.version = version;
}
/**
* Returns true if this context is a match for the given context. A context
* is a match for another context if it is a subset of that context. Null
* properties are ignored, but any properties that are set must match the
* same property on the other context.
*/
public boolean matches(MetadataContext other) {
return other != null
&& (altFormat == null || altFormat.equals(other.altFormat))
&& (projection == null || projection.equals(other.projection))
&& (version == null
|| (other.version != null && other.version.isCompatible(version)));
}
/**
* The alt format the context represents.
*
* @return the alt format or null if the context doesn't have an alt format.
*/
public AltFormat getAltFormat() {
return altFormat;
}
/**
* The projection the context represents.
*
* @return the projection or null if the context doesn't have a projection.
*/
public String getProjection() {
return projection;
}
/**
* The version the context represents.
*
* @return the version or null if no version exists in this context.
*/
public Version getVersion() {
return version;
}
/**
* Compare two context objects to order them. A null value for any field is
* treated as a low value. Otherwise we use the normal string comparison. If
* the given context object is {@code null} a {@link NullPointerException}
* will be thrown.
*/
public int compareTo(MetadataContext other) {
if (this == other) {
return 0;
}
int compare = compareAltFormat(altFormat, other.altFormat);
if (compare != 0) {
return compare;
}
compare = compareString(projection, other.projection);
if (compare != 0) {
return compare;
}
// Compare versions.
return compareVersion(version, other.version);
}
/**
* Compares two alt formats, where either may be null. Just compares them
* based on their names.
*/
static int compareAltFormat(AltFormat a, AltFormat b) {
return compareString(
(a == null) ? null : a.getName(),
(b == null) ? null : b.getName());
}
/**
* Compares two strings, where either may be null.
*/
static int compareString(String a, String b) {
if (a == b) {
return 0;
}
if (a == null) {
return -1;
}
if (b == null) {
return 1;
}
return a.compareTo(b);
}
/**
* Compares two versions, where either may be null.
*/
static int compareVersion(Version a, Version b) {
if (a == b) {
return 0;
}
if (a == null) {
return -1;
}
if (b == null) {
return 1;
}
int compare = MetadataKey.compareClass(
a.getServiceClass(), b.getServiceClass());
if (compare != 0) {
return compare;
}
int aMajor = a.getMajor();
int bMajor = b.getMajor();
if (aMajor != bMajor) {
return aMajor < bMajor ? -1 : 1;
}
int aMinor = a.getMinor();
int bMinor = b.getMinor();
return aMinor < bMinor ? -1 : (aMinor == bMinor ? 0 : 1);
}
/**
* The hashCode for a context is just the hashcode of its parts.
*/
@Override
public int hashCode() {
int hash = 0;
if (altFormat != null) {
hash += altFormat.hashCode();
}
if (projection != null) {
hash = hash * 37;
hash += projection.hashCode();
}
if (version != null) {
hash = hash * 37;
hash += version.hashCode();
}
return hash;
}
/**
* Checks that the other object is a metadata context with the same alt type
* and version.
*/
@Override
public boolean equals(Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof MetadataContext)) {
return false;
}
MetadataContext other = (MetadataContext) obj;
if (altFormat == null) {
if (other.altFormat != null) {
return false;
}
} else if (!altFormat.equals(other.altFormat)) {
return false;
}
if (projection == null) {
if (other.projection != null) {
return false;
}
} else if (!projection.equals(other.projection)) {
return false;
}
if (version == null) {
if (other.version != null) {
return false;
}
} else if (!version.equals(other.version)) {
return false;
}
return true;
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{MetadataContext(");
sb.append(altFormat);
sb.append(',');
sb.append(projection);
sb.append(',');
sb.append(version);
sb.append(")}");
return sb.toString();
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.